Rewrite the project
The new code is much more readable and easier to extend. I took
inspiration from Haskell's Parsec project which seems to only
write the happy-path and applied the idea to Erlang's exceptions.
When the parser tries to parse, say, a list, and crashes, it
tries with a table next, and so on until something matches.
Normal paragraphs always match so there can be no parsing
failures.
The parser now has a number of passes: first the block parser,
then lists and tables passes to build a proper tree out of
them and finally an inline pass to apply inline formatting.
The resulting AST can then be modified at will and passed on
to translator modules which output a different format.
The man page translator was also rewritten and has been tested
against both Cowboy and Gun. Numerous issues were fixed as a
result of this rewrite.
Loïc Hoguin
5 years ago
0 | Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | ||
2 | Permission to use, copy, modify, and/or distribute this software for any | |
3 | purpose with or without fee is hereby granted, provided that the above | |
4 | copyright notice and this permission notice appear in all copies. | |
5 | ||
6 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
1 | 1 | |
2 | 2 | PROJECT = asciideck |
3 | 3 | PROJECT_DESCRIPTION = Asciidoc for Erlang. |
4 | PROJECT_VERSION = 0.1.0 | |
5 | ||
6 | # Options. | |
7 | ||
8 | CI_OTP ?= OTP-18.0.3 OTP-18.1.5 OTP-18.2.4.1 OTP-18.3.4.4 OTP-19.0.7 OTP-19.1.5 | |
9 | CI_HIPE ?= $(lastword $(CI_OTP)) | |
10 | CI_ERLLVM ?= $(CI_HIPE) | |
4 | PROJECT_VERSION = 0.2.0 | |
11 | 5 | |
12 | 6 | # Dependencies. |
13 | 7 | |
14 | TEST_DEPS = ct_helper | |
8 | TEST_ERLC_OPTS += +'{parse_transform, eunit_autoexport}' | |
9 | TEST_DEPS = $(if $(CI_ERLANG_MK),ci.erlang.mk) ct_helper | |
15 | 10 | dep_ct_helper = git https://github.com/ninenines/ct_helper master |
16 | 11 | |
12 | # CI configuration. | |
13 | ||
14 | dep_ci.erlang.mk = git https://github.com/ninenines/ci.erlang.mk master | |
15 | DEP_EARLY_PLUGINS = ci.erlang.mk | |
16 | ||
17 | AUTO_CI_OTP ?= OTP-19+ | |
18 | AUTO_CI_HIPE ?= OTP-LATEST | |
19 | # AUTO_CI_ERLLVM ?= OTP-LATEST | |
20 | AUTO_CI_WINDOWS ?= OTP-19+ | |
21 | ||
17 | 22 | include erlang.mk |
0 | {application, asciideck, [ | |
0 | {application, 'asciideck', [ | |
1 | 1 | {description, "Asciidoc for Erlang."}, |
2 | {vsn, "0.1.0"}, | |
3 | {modules, ['asciideck','asciideck_parser','asciideck_to_manpage']}, | |
2 | {vsn, "0.2.0"}, | |
3 | {modules, ['asciideck','asciideck_attributes_parser','asciideck_attributes_pass','asciideck_block_parser','asciideck_inline_pass','asciideck_line_reader','asciideck_lists_pass','asciideck_tables_pass','asciideck_to_manpage']}, | |
4 | 4 | {registered, []}, |
5 | 5 | {applications, [kernel,stdlib]}, |
6 | 6 | {env, []} |
14 | 14 | .PHONY: all app apps deps search rel relup docs install-docs check tests clean distclean help erlang-mk |
15 | 15 | |
16 | 16 | ERLANG_MK_FILENAME := $(realpath $(lastword $(MAKEFILE_LIST))) |
17 | ||
18 | ERLANG_MK_VERSION = 2016.11.03-4-g9e9b7d2 | |
17 | export ERLANG_MK_FILENAME | |
18 | ||
19 | ERLANG_MK_VERSION = 208a116 | |
20 | ERLANG_MK_WITHOUT = | |
19 | 21 | |
20 | 22 | # Make 3.81 and 3.82 are deprecated. |
21 | 23 | |
151 | 153 | $(subst $(space),$(comma),$(strip $(1))) |
152 | 154 | endef |
153 | 155 | |
156 | define escape_dquotes | |
157 | $(subst ",\",$1) | |
158 | endef | |
159 | ||
154 | 160 | # Adding erlang.mk to make Erlang scripts who call init:get_plain_arguments() happy. |
155 | 161 | define erlang |
156 | $(ERL) $(2) -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(subst ",\",$(1)))" -- erlang.mk | |
162 | $(ERL) $2 -pz $(ERLANG_MK_TMP)/rebar/ebin -eval "$(subst $(newline),,$(call escape_dquotes,$1))" -- erlang.mk | |
157 | 163 | endef |
158 | 164 | |
159 | 165 | ifeq ($(PLATFORM),msys2) |
182 | 188 | ERLANG_MK_BUILD_CONFIG ?= build.config |
183 | 189 | ERLANG_MK_BUILD_DIR ?= .erlang.mk.build |
184 | 190 | |
191 | erlang-mk: WITHOUT ?= $(ERLANG_MK_WITHOUT) | |
185 | 192 | erlang-mk: |
193 | ifdef ERLANG_MK_COMMIT | |
186 | 194 | git clone $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) |
187 | ifdef ERLANG_MK_COMMIT | |
188 | 195 | cd $(ERLANG_MK_BUILD_DIR) && git checkout $(ERLANG_MK_COMMIT) |
196 | else | |
197 | git clone --depth 1 $(ERLANG_MK_REPO) $(ERLANG_MK_BUILD_DIR) | |
189 | 198 | endif |
190 | 199 | if [ -f $(ERLANG_MK_BUILD_CONFIG) ]; then cp $(ERLANG_MK_BUILD_CONFIG) $(ERLANG_MK_BUILD_DIR)/build.config; fi |
191 | $(MAKE) -C $(ERLANG_MK_BUILD_DIR) | |
200 | $(MAKE) -C $(ERLANG_MK_BUILD_DIR) WITHOUT='$(strip $(WITHOUT))' | |
192 | 201 | cp $(ERLANG_MK_BUILD_DIR)/erlang.mk ./erlang.mk |
193 | 202 | rm -rf $(ERLANG_MK_BUILD_DIR) |
194 | 203 | |
195 | 204 | # The erlang.mk package index is bundled in the default erlang.mk build. |
196 | 205 | # Search for the string "copyright" to skip to the rest of the code. |
206 | ||
207 | # Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> | |
208 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
209 | ||
210 | .PHONY: distclean-kerl | |
211 | ||
212 | KERL_INSTALL_DIR ?= $(HOME)/erlang | |
213 | ||
214 | ifeq ($(strip $(KERL)),) | |
215 | KERL := $(ERLANG_MK_TMP)/kerl/kerl | |
216 | endif | |
217 | ||
218 | export KERL | |
219 | ||
220 | KERL_GIT ?= https://github.com/kerl/kerl | |
221 | KERL_COMMIT ?= master | |
222 | ||
223 | KERL_MAKEFLAGS ?= | |
224 | ||
225 | OTP_GIT ?= https://github.com/erlang/otp | |
226 | ||
227 | define kerl_otp_target | |
228 | ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(1)),) | |
229 | $(KERL_INSTALL_DIR)/$(1): $(KERL) | |
230 | MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1) | |
231 | $(KERL) install $(1) $(KERL_INSTALL_DIR)/$(1) | |
232 | endif | |
233 | endef | |
234 | ||
235 | define kerl_hipe_target | |
236 | ifeq ($(wildcard $(KERL_INSTALL_DIR)/$1-native),) | |
237 | $(KERL_INSTALL_DIR)/$1-native: $(KERL) | |
238 | KERL_CONFIGURE_OPTIONS=--enable-native-libs \ | |
239 | MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native | |
240 | $(KERL) install $1-native $(KERL_INSTALL_DIR)/$1-native | |
241 | endif | |
242 | endef | |
243 | ||
244 | $(KERL): | |
245 | $(verbose) mkdir -p $(ERLANG_MK_TMP) | |
246 | $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl | |
247 | $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT) | |
248 | $(verbose) chmod +x $(KERL) | |
249 | ||
250 | distclean:: distclean-kerl | |
251 | ||
252 | distclean-kerl: | |
253 | $(gen_verbose) rm -rf $(KERL) | |
254 | ||
255 | # Allow users to select which version of Erlang/OTP to use for a project. | |
256 | ||
257 | ifneq ($(strip $(LATEST_ERLANG_OTP)),) | |
258 | ERLANG_OTP := $(notdir $(lastword $(sort \ | |
259 | $(filter-out $(KERL_INSTALL_DIR)/OTP_% %-rc1 %-rc2 %-rc3,\ | |
260 | $(wildcard $(KERL_INSTALL_DIR)/*[^-native]))))) | |
261 | endif | |
262 | ||
263 | ERLANG_OTP ?= | |
264 | ERLANG_HIPE ?= | |
265 | ||
266 | # Use kerl to enforce a specific Erlang/OTP version for a project. | |
267 | ifneq ($(strip $(ERLANG_OTP)),) | |
268 | export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_OTP)/bin:$(PATH) | |
269 | SHELL := env PATH=$(PATH) $(SHELL) | |
270 | $(eval $(call kerl_otp_target,$(ERLANG_OTP))) | |
271 | ||
272 | # Build Erlang/OTP only if it doesn't already exist. | |
273 | ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_OTP))$(BUILD_ERLANG_OTP),) | |
274 | $(info Building Erlang/OTP $(ERLANG_OTP)... Please wait...) | |
275 | $(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_OTP) ERLANG_OTP=$(ERLANG_OTP) BUILD_ERLANG_OTP=1 >&2) | |
276 | endif | |
277 | ||
278 | else | |
279 | # Same for a HiPE enabled VM. | |
280 | ifneq ($(strip $(ERLANG_HIPE)),) | |
281 | export PATH := $(KERL_INSTALL_DIR)/$(ERLANG_HIPE)-native/bin:$(PATH) | |
282 | SHELL := env PATH=$(PATH) $(SHELL) | |
283 | $(eval $(call kerl_hipe_target,$(ERLANG_HIPE))) | |
284 | ||
285 | # Build Erlang/OTP only if it doesn't already exist. | |
286 | ifeq ($(wildcard $(KERL_INSTALL_DIR)/$(ERLANG_HIPE))$(BUILD_ERLANG_OTP),) | |
287 | $(info Building HiPE-enabled Erlang/OTP $(ERLANG_OTP)... Please wait...) | |
288 | $(shell $(MAKE) $(KERL_INSTALL_DIR)/$(ERLANG_HIPE) ERLANG_HIPE=$(ERLANG_HIPE) BUILD_ERLANG_OTP=1 >&2) | |
289 | endif | |
290 | ||
291 | endif | |
292 | endif | |
197 | 293 | |
198 | 294 | PACKAGES += aberth |
199 | 295 | pkg_aberth_name = aberth |
403 | 499 | pkg_bootstrap_repo = https://github.com/schlagert/bootstrap |
404 | 500 | pkg_bootstrap_commit = master |
405 | 501 | |
502 | PACKAGES += boss | |
503 | pkg_boss_name = boss | |
504 | pkg_boss_description = Erlang web MVC, now featuring Comet | |
505 | pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss | |
506 | pkg_boss_fetch = git | |
507 | pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss | |
508 | pkg_boss_commit = master | |
509 | ||
406 | 510 | PACKAGES += boss_db |
407 | 511 | pkg_boss_db_name = boss_db |
408 | 512 | pkg_boss_db_description = BossDB: a sharded, caching, pooling, evented ORM for Erlang |
411 | 515 | pkg_boss_db_repo = https://github.com/ErlyORM/boss_db |
412 | 516 | pkg_boss_db_commit = master |
413 | 517 | |
414 | PACKAGES += boss | |
415 | pkg_boss_name = boss | |
416 | pkg_boss_description = Erlang web MVC, now featuring Comet | |
417 | pkg_boss_homepage = https://github.com/ChicagoBoss/ChicagoBoss | |
418 | pkg_boss_fetch = git | |
419 | pkg_boss_repo = https://github.com/ChicagoBoss/ChicagoBoss | |
420 | pkg_boss_commit = master | |
421 | ||
422 | 518 | PACKAGES += brod |
423 | 519 | pkg_brod_name = brod |
424 | 520 | pkg_brod_description = Kafka client in Erlang |
523 | 619 | pkg_chumak_repo = https://github.com/chovencorp/chumak |
524 | 620 | pkg_chumak_commit = master |
525 | 621 | |
526 | PACKAGES += classifier | |
527 | pkg_classifier_name = classifier | |
528 | pkg_classifier_description = An Erlang Bayesian Filter and Text Classifier | |
529 | pkg_classifier_homepage = https://github.com/inaka/classifier | |
530 | pkg_classifier_fetch = git | |
531 | pkg_classifier_repo = https://github.com/inaka/classifier | |
532 | pkg_classifier_commit = master | |
622 | PACKAGES += cl | |
623 | pkg_cl_name = cl | |
624 | pkg_cl_description = OpenCL binding for Erlang | |
625 | pkg_cl_homepage = https://github.com/tonyrog/cl | |
626 | pkg_cl_fetch = git | |
627 | pkg_cl_repo = https://github.com/tonyrog/cl | |
628 | pkg_cl_commit = master | |
533 | 629 | |
534 | 630 | PACKAGES += clique |
535 | 631 | pkg_clique_name = clique |
539 | 635 | pkg_clique_repo = https://github.com/basho/clique |
540 | 636 | pkg_clique_commit = develop |
541 | 637 | |
542 | PACKAGES += cl | |
543 | pkg_cl_name = cl | |
544 | pkg_cl_description = OpenCL binding for Erlang | |
545 | pkg_cl_homepage = https://github.com/tonyrog/cl | |
546 | pkg_cl_fetch = git | |
547 | pkg_cl_repo = https://github.com/tonyrog/cl | |
548 | pkg_cl_commit = master | |
549 | ||
550 | 638 | PACKAGES += cloudi_core |
551 | 639 | pkg_cloudi_core_name = cloudi_core |
552 | 640 | pkg_cloudi_core_description = CloudI internal service runtime |
563 | 651 | pkg_cloudi_service_api_requests_repo = https://github.com/CloudI/cloudi_service_api_requests |
564 | 652 | pkg_cloudi_service_api_requests_commit = master |
565 | 653 | |
654 | PACKAGES += cloudi_service_db | |
655 | pkg_cloudi_service_db_name = cloudi_service_db | |
656 | pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) | |
657 | pkg_cloudi_service_db_homepage = http://cloudi.org/ | |
658 | pkg_cloudi_service_db_fetch = git | |
659 | pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db | |
660 | pkg_cloudi_service_db_commit = master | |
661 | ||
662 | PACKAGES += cloudi_service_db_cassandra | |
663 | pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra | |
664 | pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service | |
665 | pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ | |
666 | pkg_cloudi_service_db_cassandra_fetch = git | |
667 | pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra | |
668 | pkg_cloudi_service_db_cassandra_commit = master | |
669 | ||
566 | 670 | PACKAGES += cloudi_service_db_cassandra_cql |
567 | 671 | pkg_cloudi_service_db_cassandra_cql_name = cloudi_service_db_cassandra_cql |
568 | 672 | pkg_cloudi_service_db_cassandra_cql_description = Cassandra CQL CloudI Service |
571 | 675 | pkg_cloudi_service_db_cassandra_cql_repo = https://github.com/CloudI/cloudi_service_db_cassandra_cql |
572 | 676 | pkg_cloudi_service_db_cassandra_cql_commit = master |
573 | 677 | |
574 | PACKAGES += cloudi_service_db_cassandra | |
575 | pkg_cloudi_service_db_cassandra_name = cloudi_service_db_cassandra | |
576 | pkg_cloudi_service_db_cassandra_description = Cassandra CloudI Service | |
577 | pkg_cloudi_service_db_cassandra_homepage = http://cloudi.org/ | |
578 | pkg_cloudi_service_db_cassandra_fetch = git | |
579 | pkg_cloudi_service_db_cassandra_repo = https://github.com/CloudI/cloudi_service_db_cassandra | |
580 | pkg_cloudi_service_db_cassandra_commit = master | |
581 | ||
582 | 678 | PACKAGES += cloudi_service_db_couchdb |
583 | 679 | pkg_cloudi_service_db_couchdb_name = cloudi_service_db_couchdb |
584 | 680 | pkg_cloudi_service_db_couchdb_description = CouchDB CloudI Service |
603 | 699 | pkg_cloudi_service_db_memcached_repo = https://github.com/CloudI/cloudi_service_db_memcached |
604 | 700 | pkg_cloudi_service_db_memcached_commit = master |
605 | 701 | |
606 | PACKAGES += cloudi_service_db | |
607 | pkg_cloudi_service_db_name = cloudi_service_db | |
608 | pkg_cloudi_service_db_description = CloudI Database (in-memory/testing/generic) | |
609 | pkg_cloudi_service_db_homepage = http://cloudi.org/ | |
610 | pkg_cloudi_service_db_fetch = git | |
611 | pkg_cloudi_service_db_repo = https://github.com/CloudI/cloudi_service_db | |
612 | pkg_cloudi_service_db_commit = master | |
613 | ||
614 | 702 | PACKAGES += cloudi_service_db_mysql |
615 | 703 | pkg_cloudi_service_db_mysql_name = cloudi_service_db_mysql |
616 | 704 | pkg_cloudi_service_db_mysql_description = MySQL CloudI Service |
939 | 1027 | pkg_dnssd_repo = https://github.com/benoitc/dnssd_erlang |
940 | 1028 | pkg_dnssd_commit = master |
941 | 1029 | |
942 | PACKAGES += dtl | |
943 | pkg_dtl_name = dtl | |
944 | pkg_dtl_description = Django Template Language: A full-featured port of the Django template engine to Erlang. | |
945 | pkg_dtl_homepage = https://github.com/oinksoft/dtl | |
946 | pkg_dtl_fetch = git | |
947 | pkg_dtl_repo = https://github.com/oinksoft/dtl | |
948 | pkg_dtl_commit = master | |
949 | ||
950 | 1030 | PACKAGES += dynamic_compile |
951 | 1031 | pkg_dynamic_compile_name = dynamic_compile |
952 | 1032 | pkg_dynamic_compile_description = compile and load erlang modules from string input |
1035 | 1115 | pkg_edown_repo = https://github.com/uwiger/edown |
1036 | 1116 | pkg_edown_commit = master |
1037 | 1117 | |
1118 | PACKAGES += eep | |
1119 | pkg_eep_name = eep | |
1120 | pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy | |
1121 | pkg_eep_homepage = https://github.com/virtan/eep | |
1122 | pkg_eep_fetch = git | |
1123 | pkg_eep_repo = https://github.com/virtan/eep | |
1124 | pkg_eep_commit = master | |
1125 | ||
1038 | 1126 | PACKAGES += eep_app |
1039 | 1127 | pkg_eep_app_name = eep_app |
1040 | 1128 | pkg_eep_app_description = Embedded Event Processing |
1043 | 1131 | pkg_eep_app_repo = https://github.com/darach/eep-erl |
1044 | 1132 | pkg_eep_app_commit = master |
1045 | 1133 | |
1046 | PACKAGES += eep | |
1047 | pkg_eep_name = eep | |
1048 | pkg_eep_description = Erlang Easy Profiling (eep) application provides a way to analyze application performance and call hierarchy | |
1049 | pkg_eep_homepage = https://github.com/virtan/eep | |
1050 | pkg_eep_fetch = git | |
1051 | pkg_eep_repo = https://github.com/virtan/eep | |
1052 | pkg_eep_commit = master | |
1053 | ||
1054 | 1134 | PACKAGES += efene |
1055 | 1135 | pkg_efene_name = efene |
1056 | 1136 | pkg_efene_description = Alternative syntax for the Erlang Programming Language focusing on simplicity, ease of use and programmer UX |
1075 | 1155 | pkg_ehsa_repo = https://bitbucket.org/a12n/ehsa |
1076 | 1156 | pkg_ehsa_commit = default |
1077 | 1157 | |
1158 | PACKAGES += ej | |
1159 | pkg_ej_name = ej | |
1160 | pkg_ej_description = Helper module for working with Erlang terms representing JSON | |
1161 | pkg_ej_homepage = https://github.com/seth/ej | |
1162 | pkg_ej_fetch = git | |
1163 | pkg_ej_repo = https://github.com/seth/ej | |
1164 | pkg_ej_commit = master | |
1165 | ||
1078 | 1166 | PACKAGES += ejabberd |
1079 | 1167 | pkg_ejabberd_name = ejabberd |
1080 | 1168 | pkg_ejabberd_description = Robust, ubiquitous and massively scalable Jabber / XMPP Instant Messaging platform |
1083 | 1171 | pkg_ejabberd_repo = https://github.com/processone/ejabberd |
1084 | 1172 | pkg_ejabberd_commit = master |
1085 | 1173 | |
1086 | PACKAGES += ej | |
1087 | pkg_ej_name = ej | |
1088 | pkg_ej_description = Helper module for working with Erlang terms representing JSON | |
1089 | pkg_ej_homepage = https://github.com/seth/ej | |
1090 | pkg_ej_fetch = git | |
1091 | pkg_ej_repo = https://github.com/seth/ej | |
1092 | pkg_ej_commit = master | |
1093 | ||
1094 | 1174 | PACKAGES += ejwt |
1095 | 1175 | pkg_ejwt_name = ejwt |
1096 | 1176 | pkg_ejwt_description = erlang library for JSON Web Token |
1251 | 1331 | pkg_eredis_pool_repo = https://github.com/hiroeorz/eredis_pool |
1252 | 1332 | pkg_eredis_pool_commit = master |
1253 | 1333 | |
1334 | PACKAGES += erl_streams | |
1335 | pkg_erl_streams_name = erl_streams | |
1336 | pkg_erl_streams_description = Streams in Erlang | |
1337 | pkg_erl_streams_homepage = https://github.com/epappas/erl_streams | |
1338 | pkg_erl_streams_fetch = git | |
1339 | pkg_erl_streams_repo = https://github.com/epappas/erl_streams | |
1340 | pkg_erl_streams_commit = master | |
1341 | ||
1254 | 1342 | PACKAGES += erlang_cep |
1255 | 1343 | pkg_erlang_cep_name = erlang_cep |
1256 | 1344 | pkg_erlang_cep_description = A basic CEP package written in erlang |
1427 | 1515 | pkg_erlport_repo = https://github.com/hdima/erlport |
1428 | 1516 | pkg_erlport_commit = master |
1429 | 1517 | |
1518 | PACKAGES += erlsh | |
1519 | pkg_erlsh_name = erlsh | |
1520 | pkg_erlsh_description = Erlang shell tools | |
1521 | pkg_erlsh_homepage = https://github.com/proger/erlsh | |
1522 | pkg_erlsh_fetch = git | |
1523 | pkg_erlsh_repo = https://github.com/proger/erlsh | |
1524 | pkg_erlsh_commit = master | |
1525 | ||
1430 | 1526 | PACKAGES += erlsha2 |
1431 | 1527 | pkg_erlsha2_name = erlsha2 |
1432 | 1528 | pkg_erlsha2_description = SHA-224, SHA-256, SHA-384, SHA-512 implemented in Erlang NIFs. |
1435 | 1531 | pkg_erlsha2_repo = https://github.com/vinoski/erlsha2 |
1436 | 1532 | pkg_erlsha2_commit = master |
1437 | 1533 | |
1438 | PACKAGES += erlsh | |
1439 | pkg_erlsh_name = erlsh | |
1440 | pkg_erlsh_description = Erlang shell tools | |
1441 | pkg_erlsh_homepage = https://github.com/proger/erlsh | |
1442 | pkg_erlsh_fetch = git | |
1443 | pkg_erlsh_repo = https://github.com/proger/erlsh | |
1444 | pkg_erlsh_commit = master | |
1445 | ||
1446 | 1534 | PACKAGES += erlsom |
1447 | 1535 | pkg_erlsom_name = erlsom |
1448 | 1536 | pkg_erlsom_description = XML parser for Erlang |
1451 | 1539 | pkg_erlsom_repo = https://github.com/willemdj/erlsom |
1452 | 1540 | pkg_erlsom_commit = master |
1453 | 1541 | |
1454 | PACKAGES += erl_streams | |
1455 | pkg_erl_streams_name = erl_streams | |
1456 | pkg_erl_streams_description = Streams in Erlang | |
1457 | pkg_erl_streams_homepage = https://github.com/epappas/erl_streams | |
1458 | pkg_erl_streams_fetch = git | |
1459 | pkg_erl_streams_repo = https://github.com/epappas/erl_streams | |
1460 | pkg_erl_streams_commit = master | |
1461 | ||
1462 | 1542 | PACKAGES += erlubi |
1463 | 1543 | pkg_erlubi_name = erlubi |
1464 | 1544 | pkg_erlubi_description = Ubigraph Erlang Client (and Process Visualizer) |
1515 | 1595 | pkg_erwa_repo = https://github.com/bwegh/erwa |
1516 | 1596 | pkg_erwa_commit = master |
1517 | 1597 | |
1598 | PACKAGES += escalus | |
1599 | pkg_escalus_name = escalus | |
1600 | pkg_escalus_description = An XMPP client library in Erlang for conveniently testing XMPP servers | |
1601 | pkg_escalus_homepage = https://github.com/esl/escalus | |
1602 | pkg_escalus_fetch = git | |
1603 | pkg_escalus_repo = https://github.com/esl/escalus | |
1604 | pkg_escalus_commit = master | |
1605 | ||
1518 | 1606 | PACKAGES += espec |
1519 | 1607 | pkg_espec_name = espec |
1520 | 1608 | pkg_espec_description = ESpec: Behaviour driven development framework for Erlang |
1539 | 1627 | pkg_etap_repo = https://github.com/ngerakines/etap |
1540 | 1628 | pkg_etap_commit = master |
1541 | 1629 | |
1630 | PACKAGES += etest | |
1631 | pkg_etest_name = etest | |
1632 | pkg_etest_description = A lightweight, convention over configuration test framework for Erlang | |
1633 | pkg_etest_homepage = https://github.com/wooga/etest | |
1634 | pkg_etest_fetch = git | |
1635 | pkg_etest_repo = https://github.com/wooga/etest | |
1636 | pkg_etest_commit = master | |
1637 | ||
1542 | 1638 | PACKAGES += etest_http |
1543 | 1639 | pkg_etest_http_name = etest_http |
1544 | 1640 | pkg_etest_http_description = etest Assertions around HTTP (client-side) |
1547 | 1643 | pkg_etest_http_repo = https://github.com/wooga/etest_http |
1548 | 1644 | pkg_etest_http_commit = master |
1549 | 1645 | |
1550 | PACKAGES += etest | |
1551 | pkg_etest_name = etest | |
1552 | pkg_etest_description = A lightweight, convention over configuration test framework for Erlang | |
1553 | pkg_etest_homepage = https://github.com/wooga/etest | |
1554 | pkg_etest_fetch = git | |
1555 | pkg_etest_repo = https://github.com/wooga/etest | |
1556 | pkg_etest_commit = master | |
1557 | ||
1558 | 1646 | PACKAGES += etoml |
1559 | 1647 | pkg_etoml_name = etoml |
1560 | 1648 | pkg_etoml_description = TOML language erlang parser |
1563 | 1651 | pkg_etoml_repo = https://github.com/kalta/etoml |
1564 | 1652 | pkg_etoml_commit = master |
1565 | 1653 | |
1654 | PACKAGES += eunit | |
1655 | pkg_eunit_name = eunit | |
1656 | pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. | |
1657 | pkg_eunit_homepage = https://github.com/richcarl/eunit | |
1658 | pkg_eunit_fetch = git | |
1659 | pkg_eunit_repo = https://github.com/richcarl/eunit | |
1660 | pkg_eunit_commit = master | |
1661 | ||
1566 | 1662 | PACKAGES += eunit_formatters |
1567 | 1663 | pkg_eunit_formatters_name = eunit_formatters |
1568 | 1664 | pkg_eunit_formatters_description = Because eunit's output sucks. Let's make it better. |
1571 | 1667 | pkg_eunit_formatters_repo = https://github.com/seancribbs/eunit_formatters |
1572 | 1668 | pkg_eunit_formatters_commit = master |
1573 | 1669 | |
1574 | PACKAGES += eunit | |
1575 | pkg_eunit_name = eunit | |
1576 | pkg_eunit_description = The EUnit lightweight unit testing framework for Erlang - this is the canonical development repository. | |
1577 | pkg_eunit_homepage = https://github.com/richcarl/eunit | |
1578 | pkg_eunit_fetch = git | |
1579 | pkg_eunit_repo = https://github.com/richcarl/eunit | |
1580 | pkg_eunit_commit = master | |
1581 | ||
1582 | 1670 | PACKAGES += euthanasia |
1583 | 1671 | pkg_euthanasia_name = euthanasia |
1584 | 1672 | pkg_euthanasia_description = Merciful killer for your Erlang processes |
1715 | 1803 | pkg_fn_repo = https://github.com/reiddraper/fn |
1716 | 1804 | pkg_fn_commit = master |
1717 | 1805 | |
1806 | PACKAGES += folsom | |
1807 | pkg_folsom_name = folsom | |
1808 | pkg_folsom_description = Expose Erlang Events and Metrics | |
1809 | pkg_folsom_homepage = https://github.com/boundary/folsom | |
1810 | pkg_folsom_fetch = git | |
1811 | pkg_folsom_repo = https://github.com/boundary/folsom | |
1812 | pkg_folsom_commit = master | |
1813 | ||
1718 | 1814 | PACKAGES += folsom_cowboy |
1719 | 1815 | pkg_folsom_cowboy_name = folsom_cowboy |
1720 | 1816 | pkg_folsom_cowboy_description = A Cowboy based Folsom HTTP Wrapper. |
1731 | 1827 | pkg_folsomite_repo = https://github.com/campanja/folsomite |
1732 | 1828 | pkg_folsomite_commit = master |
1733 | 1829 | |
1734 | PACKAGES += folsom | |
1735 | pkg_folsom_name = folsom | |
1736 | pkg_folsom_description = Expose Erlang Events and Metrics | |
1737 | pkg_folsom_homepage = https://github.com/boundary/folsom | |
1738 | pkg_folsom_fetch = git | |
1739 | pkg_folsom_repo = https://github.com/boundary/folsom | |
1740 | pkg_folsom_commit = master | |
1741 | ||
1742 | 1830 | PACKAGES += fs |
1743 | 1831 | pkg_fs_name = fs |
1744 | 1832 | pkg_fs_description = Erlang FileSystem Listener |
1907 | 1995 | pkg_gold_fever_repo = https://github.com/inaka/gold_fever |
1908 | 1996 | pkg_gold_fever_commit = master |
1909 | 1997 | |
1910 | PACKAGES += gossiperl | |
1911 | pkg_gossiperl_name = gossiperl | |
1912 | pkg_gossiperl_description = Gossip middleware in Erlang | |
1913 | pkg_gossiperl_homepage = http://gossiperl.com/ | |
1914 | pkg_gossiperl_fetch = git | |
1915 | pkg_gossiperl_repo = https://github.com/gossiperl/gossiperl | |
1916 | pkg_gossiperl_commit = master | |
1917 | ||
1918 | 1998 | PACKAGES += gpb |
1919 | 1999 | pkg_gpb_name = gpb |
1920 | 2000 | pkg_gpb_description = A Google Protobuf implementation for Erlang |
1939 | 2019 | pkg_grapherl_repo = https://github.com/eproxus/grapherl |
1940 | 2020 | pkg_grapherl_commit = master |
1941 | 2021 | |
2022 | PACKAGES += grpc | |
2023 | pkg_grpc_name = grpc | |
2024 | pkg_grpc_description = gRPC server in Erlang | |
2025 | pkg_grpc_homepage = https://github.com/Bluehouse-Technology/grpc | |
2026 | pkg_grpc_fetch = git | |
2027 | pkg_grpc_repo = https://github.com/Bluehouse-Technology/grpc | |
2028 | pkg_grpc_commit = master | |
2029 | ||
2030 | PACKAGES += grpc_client | |
2031 | pkg_grpc_client_name = grpc_client | |
2032 | pkg_grpc_client_description = gRPC client in Erlang | |
2033 | pkg_grpc_client_homepage = https://github.com/Bluehouse-Technology/grpc_client | |
2034 | pkg_grpc_client_fetch = git | |
2035 | pkg_grpc_client_repo = https://github.com/Bluehouse-Technology/grpc_client | |
2036 | pkg_grpc_client_commit = master | |
2037 | ||
1942 | 2038 | PACKAGES += gun |
1943 | 2039 | pkg_gun_name = gun |
1944 | 2040 | pkg_gun_description = Asynchronous SPDY, HTTP and Websocket client written in Erlang. |
2019 | 2115 | pkg_ibrowse_repo = https://github.com/cmullaparthi/ibrowse |
2020 | 2116 | pkg_ibrowse_commit = master |
2021 | 2117 | |
2118 | PACKAGES += idna | |
2119 | pkg_idna_name = idna | |
2120 | pkg_idna_description = Erlang IDNA lib | |
2121 | pkg_idna_homepage = https://github.com/benoitc/erlang-idna | |
2122 | pkg_idna_fetch = git | |
2123 | pkg_idna_repo = https://github.com/benoitc/erlang-idna | |
2124 | pkg_idna_commit = master | |
2125 | ||
2022 | 2126 | PACKAGES += ierlang |
2023 | 2127 | pkg_ierlang_name = ierlang |
2024 | 2128 | pkg_ierlang_description = An Erlang language kernel for IPython. |
2035 | 2139 | pkg_iota_repo = https://github.com/jpgneves/iota |
2036 | 2140 | pkg_iota_commit = master |
2037 | 2141 | |
2142 | PACKAGES += irc_lib | |
2143 | pkg_irc_lib_name = irc_lib | |
2144 | pkg_irc_lib_description = Erlang irc client library | |
2145 | pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib | |
2146 | pkg_irc_lib_fetch = git | |
2147 | pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib | |
2148 | pkg_irc_lib_commit = master | |
2149 | ||
2038 | 2150 | PACKAGES += ircd |
2039 | 2151 | pkg_ircd_name = ircd |
2040 | 2152 | pkg_ircd_description = A pluggable IRC daemon application/library for Erlang. |
2043 | 2155 | pkg_ircd_repo = https://github.com/tonyg/erlang-ircd |
2044 | 2156 | pkg_ircd_commit = master |
2045 | 2157 | |
2046 | PACKAGES += irc_lib | |
2047 | pkg_irc_lib_name = irc_lib | |
2048 | pkg_irc_lib_description = Erlang irc client library | |
2049 | pkg_irc_lib_homepage = https://github.com/OtpChatBot/irc_lib | |
2050 | pkg_irc_lib_fetch = git | |
2051 | pkg_irc_lib_repo = https://github.com/OtpChatBot/irc_lib | |
2052 | pkg_irc_lib_commit = master | |
2053 | ||
2054 | 2158 | PACKAGES += iris |
2055 | 2159 | pkg_iris_name = iris |
2056 | 2160 | pkg_iris_description = Iris Erlang binding |
2123 | 2227 | pkg_joxa_repo = https://github.com/joxa/joxa |
2124 | 2228 | pkg_joxa_commit = master |
2125 | 2229 | |
2230 | PACKAGES += json | |
2231 | pkg_json_name = json | |
2232 | pkg_json_description = a high level json library for erlang (17.0+) | |
2233 | pkg_json_homepage = https://github.com/talentdeficit/json | |
2234 | pkg_json_fetch = git | |
2235 | pkg_json_repo = https://github.com/talentdeficit/json | |
2236 | pkg_json_commit = master | |
2237 | ||
2238 | PACKAGES += json_rec | |
2239 | pkg_json_rec_name = json_rec | |
2240 | pkg_json_rec_description = JSON to erlang record | |
2241 | pkg_json_rec_homepage = https://github.com/justinkirby/json_rec | |
2242 | pkg_json_rec_fetch = git | |
2243 | pkg_json_rec_repo = https://github.com/justinkirby/json_rec | |
2244 | pkg_json_rec_commit = master | |
2245 | ||
2126 | 2246 | PACKAGES += jsone |
2127 | 2247 | pkg_jsone_name = jsone |
2128 | 2248 | pkg_jsone_description = An Erlang library for encoding, decoding JSON data. |
2139 | 2259 | pkg_jsonerl_repo = https://github.com/lambder/jsonerl |
2140 | 2260 | pkg_jsonerl_commit = master |
2141 | 2261 | |
2142 | PACKAGES += json | |
2143 | pkg_json_name = json | |
2144 | pkg_json_description = a high level json library for erlang (17.0+) | |
2145 | pkg_json_homepage = https://github.com/talentdeficit/json | |
2146 | pkg_json_fetch = git | |
2147 | pkg_json_repo = https://github.com/talentdeficit/json | |
2148 | pkg_json_commit = master | |
2149 | ||
2150 | 2262 | PACKAGES += jsonpath |
2151 | 2263 | pkg_jsonpath_name = jsonpath |
2152 | 2264 | pkg_jsonpath_description = Fast Erlang JSON data retrieval and updates via javascript-like notation |
2155 | 2267 | pkg_jsonpath_repo = https://github.com/GeneStevens/jsonpath |
2156 | 2268 | pkg_jsonpath_commit = master |
2157 | 2269 | |
2158 | PACKAGES += json_rec | |
2159 | pkg_json_rec_name = json_rec | |
2160 | pkg_json_rec_description = JSON to erlang record | |
2161 | pkg_json_rec_homepage = https://github.com/justinkirby/json_rec | |
2162 | pkg_json_rec_fetch = git | |
2163 | pkg_json_rec_repo = https://github.com/justinkirby/json_rec | |
2164 | pkg_json_rec_commit = master | |
2165 | ||
2166 | 2270 | PACKAGES += jsonx |
2167 | 2271 | pkg_jsonx_name = jsonx |
2168 | 2272 | pkg_jsonx_description = JSONX is an Erlang library for efficient decode and encode JSON, written in C. |
2291 | 2395 | pkg_kvs_repo = https://github.com/synrc/kvs |
2292 | 2396 | pkg_kvs_commit = master |
2293 | 2397 | |
2398 | PACKAGES += lager | |
2399 | pkg_lager_name = lager | |
2400 | pkg_lager_description = A logging framework for Erlang/OTP. | |
2401 | pkg_lager_homepage = https://github.com/erlang-lager/lager | |
2402 | pkg_lager_fetch = git | |
2403 | pkg_lager_repo = https://github.com/erlang-lager/lager | |
2404 | pkg_lager_commit = master | |
2405 | ||
2294 | 2406 | PACKAGES += lager_amqp_backend |
2295 | 2407 | pkg_lager_amqp_backend_name = lager_amqp_backend |
2296 | 2408 | pkg_lager_amqp_backend_description = AMQP RabbitMQ Lager backend |
2299 | 2411 | pkg_lager_amqp_backend_repo = https://github.com/jbrisbin/lager_amqp_backend |
2300 | 2412 | pkg_lager_amqp_backend_commit = master |
2301 | 2413 | |
2302 | PACKAGES += lager | |
2303 | pkg_lager_name = lager | |
2304 | pkg_lager_description = A logging framework for Erlang/OTP. | |
2305 | pkg_lager_homepage = https://github.com/basho/lager | |
2306 | pkg_lager_fetch = git | |
2307 | pkg_lager_repo = https://github.com/basho/lager | |
2308 | pkg_lager_commit = master | |
2309 | ||
2310 | 2414 | PACKAGES += lager_syslog |
2311 | 2415 | pkg_lager_syslog_name = lager_syslog |
2312 | 2416 | pkg_lager_syslog_description = Syslog backend for lager |
2313 | pkg_lager_syslog_homepage = https://github.com/basho/lager_syslog | |
2417 | pkg_lager_syslog_homepage = https://github.com/erlang-lager/lager_syslog | |
2314 | 2418 | pkg_lager_syslog_fetch = git |
2315 | pkg_lager_syslog_repo = https://github.com/basho/lager_syslog | |
2419 | pkg_lager_syslog_repo = https://github.com/erlang-lager/lager_syslog | |
2316 | 2420 | pkg_lager_syslog_commit = master |
2317 | 2421 | |
2318 | 2422 | PACKAGES += lambdapad |
2483 | 2587 | pkg_mavg_repo = https://github.com/EchoTeam/mavg |
2484 | 2588 | pkg_mavg_commit = master |
2485 | 2589 | |
2590 | PACKAGES += mc_erl | |
2591 | pkg_mc_erl_name = mc_erl | |
2592 | pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. | |
2593 | pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl | |
2594 | pkg_mc_erl_fetch = git | |
2595 | pkg_mc_erl_repo = https://github.com/clonejo/mc-erl | |
2596 | pkg_mc_erl_commit = master | |
2597 | ||
2486 | 2598 | PACKAGES += mcd |
2487 | 2599 | pkg_mcd_name = mcd |
2488 | 2600 | pkg_mcd_description = Fast memcached protocol client in pure Erlang |
2499 | 2611 | pkg_mcerlang_repo = https://github.com/fredlund/McErlang |
2500 | 2612 | pkg_mcerlang_commit = master |
2501 | 2613 | |
2502 | PACKAGES += mc_erl | |
2503 | pkg_mc_erl_name = mc_erl | |
2504 | pkg_mc_erl_description = mc-erl is a server for Minecraft 1.4.7 written in Erlang. | |
2505 | pkg_mc_erl_homepage = https://github.com/clonejo/mc-erl | |
2506 | pkg_mc_erl_fetch = git | |
2507 | pkg_mc_erl_repo = https://github.com/clonejo/mc-erl | |
2508 | pkg_mc_erl_commit = master | |
2509 | ||
2510 | 2614 | PACKAGES += meck |
2511 | 2615 | pkg_meck_name = meck |
2512 | 2616 | pkg_meck_description = A mocking library for Erlang |
2771 | 2875 | pkg_nprocreg_repo = https://github.com/nitrogen/nprocreg |
2772 | 2876 | pkg_nprocreg_commit = master |
2773 | 2877 | |
2878 | PACKAGES += oauth | |
2879 | pkg_oauth_name = oauth | |
2880 | pkg_oauth_description = An Erlang OAuth 1.0 implementation | |
2881 | pkg_oauth_homepage = https://github.com/tim/erlang-oauth | |
2882 | pkg_oauth_fetch = git | |
2883 | pkg_oauth_repo = https://github.com/tim/erlang-oauth | |
2884 | pkg_oauth_commit = master | |
2885 | ||
2774 | 2886 | PACKAGES += oauth2 |
2775 | 2887 | pkg_oauth2_name = oauth2 |
2776 | 2888 | pkg_oauth2_description = Erlang Oauth2 implementation |
2779 | 2891 | pkg_oauth2_repo = https://github.com/kivra/oauth2 |
2780 | 2892 | pkg_oauth2_commit = master |
2781 | 2893 | |
2782 | PACKAGES += oauth | |
2783 | pkg_oauth_name = oauth | |
2784 | pkg_oauth_description = An Erlang OAuth 1.0 implementation | |
2785 | pkg_oauth_homepage = https://github.com/tim/erlang-oauth | |
2786 | pkg_oauth_fetch = git | |
2787 | pkg_oauth_repo = https://github.com/tim/erlang-oauth | |
2788 | pkg_oauth_commit = master | |
2894 | PACKAGES += observer_cli | |
2895 | pkg_observer_cli_name = observer_cli | |
2896 | pkg_observer_cli_description = Visualize Erlang/Elixir Nodes On The Command Line | |
2897 | pkg_observer_cli_homepage = http://zhongwencool.github.io/observer_cli | |
2898 | pkg_observer_cli_fetch = git | |
2899 | pkg_observer_cli_repo = https://github.com/zhongwencool/observer_cli | |
2900 | pkg_observer_cli_commit = master | |
2789 | 2901 | |
2790 | 2902 | PACKAGES += octopus |
2791 | 2903 | pkg_octopus_name = octopus |
2835 | 2947 | pkg_openpoker_repo = https://github.com/hpyhacking/openpoker |
2836 | 2948 | pkg_openpoker_commit = master |
2837 | 2949 | |
2950 | PACKAGES += otpbp | |
2951 | pkg_otpbp_name = otpbp | |
2952 | pkg_otpbp_description = Parse transformer for use new OTP functions in old Erlang/OTP releases (R15, R16, 17, 18, 19) | |
2953 | pkg_otpbp_homepage = https://github.com/Ledest/otpbp | |
2954 | pkg_otpbp_fetch = git | |
2955 | pkg_otpbp_repo = https://github.com/Ledest/otpbp | |
2956 | pkg_otpbp_commit = master | |
2957 | ||
2838 | 2958 | PACKAGES += pal |
2839 | 2959 | pkg_pal_name = pal |
2840 | 2960 | pkg_pal_description = Pragmatic Authentication Library |
2971 | 3091 | pkg_procket_repo = https://github.com/msantos/procket |
2972 | 3092 | pkg_procket_commit = master |
2973 | 3093 | |
3094 | PACKAGES += prop | |
3095 | pkg_prop_name = prop | |
3096 | pkg_prop_description = An Erlang code scaffolding and generator system. | |
3097 | pkg_prop_homepage = https://github.com/nuex/prop | |
3098 | pkg_prop_fetch = git | |
3099 | pkg_prop_repo = https://github.com/nuex/prop | |
3100 | pkg_prop_commit = master | |
3101 | ||
2974 | 3102 | PACKAGES += proper |
2975 | 3103 | pkg_proper_name = proper |
2976 | 3104 | pkg_proper_description = PropEr: a QuickCheck-inspired property-based testing tool for Erlang. |
2979 | 3107 | pkg_proper_repo = https://github.com/manopapad/proper |
2980 | 3108 | pkg_proper_commit = master |
2981 | 3109 | |
2982 | PACKAGES += prop | |
2983 | pkg_prop_name = prop | |
2984 | pkg_prop_description = An Erlang code scaffolding and generator system. | |
2985 | pkg_prop_homepage = https://github.com/nuex/prop | |
2986 | pkg_prop_fetch = git | |
2987 | pkg_prop_repo = https://github.com/nuex/prop | |
2988 | pkg_prop_commit = master | |
2989 | ||
2990 | 3110 | PACKAGES += props |
2991 | 3111 | pkg_props_name = props |
2992 | 3112 | pkg_props_description = Property structure library |
3059 | 3179 | pkg_quickrand_repo = https://github.com/okeuday/quickrand |
3060 | 3180 | pkg_quickrand_commit = master |
3061 | 3181 | |
3182 | PACKAGES += rabbit | |
3183 | pkg_rabbit_name = rabbit | |
3184 | pkg_rabbit_description = RabbitMQ Server | |
3185 | pkg_rabbit_homepage = https://www.rabbitmq.com/ | |
3186 | pkg_rabbit_fetch = git | |
3187 | pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git | |
3188 | pkg_rabbit_commit = master | |
3189 | ||
3062 | 3190 | PACKAGES += rabbit_exchange_type_riak |
3063 | 3191 | pkg_rabbit_exchange_type_riak_name = rabbit_exchange_type_riak |
3064 | 3192 | pkg_rabbit_exchange_type_riak_description = Custom RabbitMQ exchange type for sticking messages in Riak |
3067 | 3195 | pkg_rabbit_exchange_type_riak_repo = https://github.com/jbrisbin/riak-exchange |
3068 | 3196 | pkg_rabbit_exchange_type_riak_commit = master |
3069 | 3197 | |
3070 | PACKAGES += rabbit | |
3071 | pkg_rabbit_name = rabbit | |
3072 | pkg_rabbit_description = RabbitMQ Server | |
3073 | pkg_rabbit_homepage = https://www.rabbitmq.com/ | |
3074 | pkg_rabbit_fetch = git | |
3075 | pkg_rabbit_repo = https://github.com/rabbitmq/rabbitmq-server.git | |
3076 | pkg_rabbit_commit = master | |
3077 | ||
3078 | 3198 | PACKAGES += rack |
3079 | 3199 | pkg_rack_name = rack |
3080 | 3200 | pkg_rack_description = Rack handler for erlang |
3219 | 3339 | pkg_rfc4627_jsonrpc_repo = https://github.com/tonyg/erlang-rfc4627 |
3220 | 3340 | pkg_rfc4627_jsonrpc_commit = master |
3221 | 3341 | |
3342 | PACKAGES += riak_control | |
3343 | pkg_riak_control_name = riak_control | |
3344 | pkg_riak_control_description = Webmachine-based administration interface for Riak. | |
3345 | pkg_riak_control_homepage = https://github.com/basho/riak_control | |
3346 | pkg_riak_control_fetch = git | |
3347 | pkg_riak_control_repo = https://github.com/basho/riak_control | |
3348 | pkg_riak_control_commit = master | |
3349 | ||
3350 | PACKAGES += riak_core | |
3351 | pkg_riak_core_name = riak_core | |
3352 | pkg_riak_core_description = Distributed systems infrastructure used by Riak. | |
3353 | pkg_riak_core_homepage = https://github.com/basho/riak_core | |
3354 | pkg_riak_core_fetch = git | |
3355 | pkg_riak_core_repo = https://github.com/basho/riak_core | |
3356 | pkg_riak_core_commit = master | |
3357 | ||
3358 | PACKAGES += riak_dt | |
3359 | pkg_riak_dt_name = riak_dt | |
3360 | pkg_riak_dt_description = Convergent replicated datatypes in Erlang | |
3361 | pkg_riak_dt_homepage = https://github.com/basho/riak_dt | |
3362 | pkg_riak_dt_fetch = git | |
3363 | pkg_riak_dt_repo = https://github.com/basho/riak_dt | |
3364 | pkg_riak_dt_commit = master | |
3365 | ||
3366 | PACKAGES += riak_ensemble | |
3367 | pkg_riak_ensemble_name = riak_ensemble | |
3368 | pkg_riak_ensemble_description = Multi-Paxos framework in Erlang | |
3369 | pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble | |
3370 | pkg_riak_ensemble_fetch = git | |
3371 | pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble | |
3372 | pkg_riak_ensemble_commit = master | |
3373 | ||
3374 | PACKAGES += riak_kv | |
3375 | pkg_riak_kv_name = riak_kv | |
3376 | pkg_riak_kv_description = Riak Key/Value Store | |
3377 | pkg_riak_kv_homepage = https://github.com/basho/riak_kv | |
3378 | pkg_riak_kv_fetch = git | |
3379 | pkg_riak_kv_repo = https://github.com/basho/riak_kv | |
3380 | pkg_riak_kv_commit = master | |
3381 | ||
3382 | PACKAGES += riak_pg | |
3383 | pkg_riak_pg_name = riak_pg | |
3384 | pkg_riak_pg_description = Distributed process groups with riak_core. | |
3385 | pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg | |
3386 | pkg_riak_pg_fetch = git | |
3387 | pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg | |
3388 | pkg_riak_pg_commit = master | |
3389 | ||
3390 | PACKAGES += riak_pipe | |
3391 | pkg_riak_pipe_name = riak_pipe | |
3392 | pkg_riak_pipe_description = Riak Pipelines | |
3393 | pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe | |
3394 | pkg_riak_pipe_fetch = git | |
3395 | pkg_riak_pipe_repo = https://github.com/basho/riak_pipe | |
3396 | pkg_riak_pipe_commit = master | |
3397 | ||
3398 | PACKAGES += riak_sysmon | |
3399 | pkg_riak_sysmon_name = riak_sysmon | |
3400 | pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages | |
3401 | pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon | |
3402 | pkg_riak_sysmon_fetch = git | |
3403 | pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon | |
3404 | pkg_riak_sysmon_commit = master | |
3405 | ||
3406 | PACKAGES += riak_test | |
3407 | pkg_riak_test_name = riak_test | |
3408 | pkg_riak_test_description = I'm in your cluster, testing your riaks | |
3409 | pkg_riak_test_homepage = https://github.com/basho/riak_test | |
3410 | pkg_riak_test_fetch = git | |
3411 | pkg_riak_test_repo = https://github.com/basho/riak_test | |
3412 | pkg_riak_test_commit = master | |
3413 | ||
3222 | 3414 | PACKAGES += riakc |
3223 | 3415 | pkg_riakc_name = riakc |
3224 | 3416 | pkg_riakc_description = Erlang clients for Riak. |
3227 | 3419 | pkg_riakc_repo = https://github.com/basho/riak-erlang-client |
3228 | 3420 | pkg_riakc_commit = master |
3229 | 3421 | |
3230 | PACKAGES += riak_control | |
3231 | pkg_riak_control_name = riak_control | |
3232 | pkg_riak_control_description = Webmachine-based administration interface for Riak. | |
3233 | pkg_riak_control_homepage = https://github.com/basho/riak_control | |
3234 | pkg_riak_control_fetch = git | |
3235 | pkg_riak_control_repo = https://github.com/basho/riak_control | |
3236 | pkg_riak_control_commit = master | |
3237 | ||
3238 | PACKAGES += riak_core | |
3239 | pkg_riak_core_name = riak_core | |
3240 | pkg_riak_core_description = Distributed systems infrastructure used by Riak. | |
3241 | pkg_riak_core_homepage = https://github.com/basho/riak_core | |
3242 | pkg_riak_core_fetch = git | |
3243 | pkg_riak_core_repo = https://github.com/basho/riak_core | |
3244 | pkg_riak_core_commit = master | |
3245 | ||
3246 | PACKAGES += riak_dt | |
3247 | pkg_riak_dt_name = riak_dt | |
3248 | pkg_riak_dt_description = Convergent replicated datatypes in Erlang | |
3249 | pkg_riak_dt_homepage = https://github.com/basho/riak_dt | |
3250 | pkg_riak_dt_fetch = git | |
3251 | pkg_riak_dt_repo = https://github.com/basho/riak_dt | |
3252 | pkg_riak_dt_commit = master | |
3253 | ||
3254 | PACKAGES += riak_ensemble | |
3255 | pkg_riak_ensemble_name = riak_ensemble | |
3256 | pkg_riak_ensemble_description = Multi-Paxos framework in Erlang | |
3257 | pkg_riak_ensemble_homepage = https://github.com/basho/riak_ensemble | |
3258 | pkg_riak_ensemble_fetch = git | |
3259 | pkg_riak_ensemble_repo = https://github.com/basho/riak_ensemble | |
3260 | pkg_riak_ensemble_commit = master | |
3261 | ||
3262 | 3422 | PACKAGES += riakhttpc |
3263 | 3423 | pkg_riakhttpc_name = riakhttpc |
3264 | 3424 | pkg_riakhttpc_description = Riak Erlang client using the HTTP interface |
3267 | 3427 | pkg_riakhttpc_repo = https://github.com/basho/riak-erlang-http-client |
3268 | 3428 | pkg_riakhttpc_commit = master |
3269 | 3429 | |
3270 | PACKAGES += riak_kv | |
3271 | pkg_riak_kv_name = riak_kv | |
3272 | pkg_riak_kv_description = Riak Key/Value Store | |
3273 | pkg_riak_kv_homepage = https://github.com/basho/riak_kv | |
3274 | pkg_riak_kv_fetch = git | |
3275 | pkg_riak_kv_repo = https://github.com/basho/riak_kv | |
3276 | pkg_riak_kv_commit = master | |
3277 | ||
3278 | 3430 | PACKAGES += riaknostic |
3279 | 3431 | pkg_riaknostic_name = riaknostic |
3280 | 3432 | pkg_riaknostic_description = A diagnostic tool for Riak installations, to find common errors asap |
3283 | 3435 | pkg_riaknostic_repo = https://github.com/basho/riaknostic |
3284 | 3436 | pkg_riaknostic_commit = master |
3285 | 3437 | |
3286 | PACKAGES += riak_pg | |
3287 | pkg_riak_pg_name = riak_pg | |
3288 | pkg_riak_pg_description = Distributed process groups with riak_core. | |
3289 | pkg_riak_pg_homepage = https://github.com/cmeiklejohn/riak_pg | |
3290 | pkg_riak_pg_fetch = git | |
3291 | pkg_riak_pg_repo = https://github.com/cmeiklejohn/riak_pg | |
3292 | pkg_riak_pg_commit = master | |
3293 | ||
3294 | PACKAGES += riak_pipe | |
3295 | pkg_riak_pipe_name = riak_pipe | |
3296 | pkg_riak_pipe_description = Riak Pipelines | |
3297 | pkg_riak_pipe_homepage = https://github.com/basho/riak_pipe | |
3298 | pkg_riak_pipe_fetch = git | |
3299 | pkg_riak_pipe_repo = https://github.com/basho/riak_pipe | |
3300 | pkg_riak_pipe_commit = master | |
3301 | ||
3302 | 3438 | PACKAGES += riakpool |
3303 | 3439 | pkg_riakpool_name = riakpool |
3304 | 3440 | pkg_riakpool_description = erlang riak client pool |
3307 | 3443 | pkg_riakpool_repo = https://github.com/dweldon/riakpool |
3308 | 3444 | pkg_riakpool_commit = master |
3309 | 3445 | |
3310 | PACKAGES += riak_sysmon | |
3311 | pkg_riak_sysmon_name = riak_sysmon | |
3312 | pkg_riak_sysmon_description = Simple OTP app for managing Erlang VM system_monitor event messages | |
3313 | pkg_riak_sysmon_homepage = https://github.com/basho/riak_sysmon | |
3314 | pkg_riak_sysmon_fetch = git | |
3315 | pkg_riak_sysmon_repo = https://github.com/basho/riak_sysmon | |
3316 | pkg_riak_sysmon_commit = master | |
3317 | ||
3318 | PACKAGES += riak_test | |
3319 | pkg_riak_test_name = riak_test | |
3320 | pkg_riak_test_description = I'm in your cluster, testing your riaks | |
3321 | pkg_riak_test_homepage = https://github.com/basho/riak_test | |
3322 | pkg_riak_test_fetch = git | |
3323 | pkg_riak_test_repo = https://github.com/basho/riak_test | |
3324 | pkg_riak_test_commit = master | |
3325 | ||
3326 | 3446 | PACKAGES += rivus_cep |
3327 | 3447 | pkg_rivus_cep_name = rivus_cep |
3328 | 3448 | pkg_rivus_cep_description = Complex event processing in Erlang |
3603 | 3723 | pkg_stripe_repo = https://github.com/mattsta/stripe-erlang |
3604 | 3724 | pkg_stripe_commit = v1 |
3605 | 3725 | |
3726 | PACKAGES += subproc | |
3727 | pkg_subproc_name = subproc | |
3728 | pkg_subproc_description = unix subprocess manager with {active,once|false} modes | |
3729 | pkg_subproc_homepage = http://dozzie.jarowit.net/trac/wiki/subproc | |
3730 | pkg_subproc_fetch = git | |
3731 | pkg_subproc_repo = https://github.com/dozzie/subproc | |
3732 | pkg_subproc_commit = v0.1.0 | |
3733 | ||
3606 | 3734 | PACKAGES += supervisor3 |
3607 | 3735 | pkg_supervisor3_name = supervisor3 |
3608 | 3736 | pkg_supervisor3_description = OTP supervisor with additional strategies |
3643 | 3771 | pkg_switchboard_repo = https://github.com/thusfresh/switchboard |
3644 | 3772 | pkg_switchboard_commit = master |
3645 | 3773 | |
3774 | PACKAGES += syn | |
3775 | pkg_syn_name = syn | |
3776 | pkg_syn_description = A global Process Registry and Process Group manager for Erlang. | |
3777 | pkg_syn_homepage = https://github.com/ostinelli/syn | |
3778 | pkg_syn_fetch = git | |
3779 | pkg_syn_repo = https://github.com/ostinelli/syn | |
3780 | pkg_syn_commit = master | |
3781 | ||
3646 | 3782 | PACKAGES += sync |
3647 | 3783 | pkg_sync_name = sync |
3648 | 3784 | pkg_sync_description = On-the-fly recompiling and reloading in Erlang. |
3651 | 3787 | pkg_sync_repo = https://github.com/rustyio/sync |
3652 | 3788 | pkg_sync_commit = master |
3653 | 3789 | |
3654 | PACKAGES += syn | |
3655 | pkg_syn_name = syn | |
3656 | pkg_syn_description = A global Process Registry and Process Group manager for Erlang. | |
3657 | pkg_syn_homepage = https://github.com/ostinelli/syn | |
3658 | pkg_syn_fetch = git | |
3659 | pkg_syn_repo = https://github.com/ostinelli/syn | |
3660 | pkg_syn_commit = master | |
3661 | ||
3662 | 3790 | PACKAGES += syntaxerl |
3663 | 3791 | pkg_syntaxerl_name = syntaxerl |
3664 | 3792 | pkg_syntaxerl_description = Syntax checker for Erlang |
3731 | 3859 | pkg_tirerl_repo = https://github.com/inaka/tirerl |
3732 | 3860 | pkg_tirerl_commit = master |
3733 | 3861 | |
3862 | PACKAGES += toml | |
3863 | pkg_toml_name = toml | |
3864 | pkg_toml_description = TOML (0.4.0) config parser | |
3865 | pkg_toml_homepage = http://dozzie.jarowit.net/trac/wiki/TOML | |
3866 | pkg_toml_fetch = git | |
3867 | pkg_toml_repo = https://github.com/dozzie/toml | |
3868 | pkg_toml_commit = v0.2.0 | |
3869 | ||
3734 | 3870 | PACKAGES += traffic_tools |
3735 | 3871 | pkg_traffic_tools_name = traffic_tools |
3736 | 3872 | pkg_traffic_tools_description = Simple traffic limiting library |
3774 | 3910 | PACKAGES += triq |
3775 | 3911 | pkg_triq_name = triq |
3776 | 3912 | pkg_triq_description = Trifork QuickCheck |
3777 | pkg_triq_homepage = https://github.com/krestenkrab/triq | |
3913 | pkg_triq_homepage = https://triq.gitlab.io | |
3778 | 3914 | pkg_triq_fetch = git |
3779 | pkg_triq_repo = https://github.com/krestenkrab/triq | |
3915 | pkg_triq_repo = https://gitlab.com/triq/triq.git | |
3780 | 3916 | pkg_triq_commit = master |
3781 | 3917 | |
3782 | 3918 | PACKAGES += tunctl |
4011 | 4147 | pkg_yaws_repo = https://github.com/klacke/yaws |
4012 | 4148 | pkg_yaws_commit = master |
4013 | 4149 | |
4150 | PACKAGES += zab_engine | |
4151 | pkg_zab_engine_name = zab_engine | |
4152 | pkg_zab_engine_description = zab propotocol implement by erlang | |
4153 | pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine | |
4154 | pkg_zab_engine_fetch = git | |
4155 | pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine | |
4156 | pkg_zab_engine_commit = master | |
4157 | ||
4014 | 4158 | PACKAGES += zabbix_sender |
4015 | 4159 | pkg_zabbix_sender_name = zabbix_sender |
4016 | 4160 | pkg_zabbix_sender_description = Zabbix trapper for sending data to Zabbix in pure Erlang |
4018 | 4162 | pkg_zabbix_sender_fetch = git |
4019 | 4163 | pkg_zabbix_sender_repo = https://github.com/stalkermn/zabbix_sender.git |
4020 | 4164 | pkg_zabbix_sender_commit = master |
4021 | ||
4022 | PACKAGES += zab_engine | |
4023 | pkg_zab_engine_name = zab_engine | |
4024 | pkg_zab_engine_description = zab propotocol implement by erlang | |
4025 | pkg_zab_engine_homepage = https://github.com/xinmingyao/zab_engine | |
4026 | pkg_zab_engine_fetch = git | |
4027 | pkg_zab_engine_repo = https://github.com/xinmingyao/zab_engine | |
4028 | pkg_zab_engine_commit = master | |
4029 | 4165 | |
4030 | 4166 | PACKAGES += zeta |
4031 | 4167 | pkg_zeta_name = zeta |
4097 | 4233 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> |
4098 | 4234 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
4099 | 4235 | |
4100 | .PHONY: distclean-deps | |
4236 | .PHONY: distclean-deps clean-tmp-deps.log | |
4101 | 4237 | |
4102 | 4238 | # Configuration. |
4103 | 4239 | |
4116 | 4252 | |
4117 | 4253 | REBAR_DEPS_DIR = $(DEPS_DIR) |
4118 | 4254 | export REBAR_DEPS_DIR |
4255 | ||
4256 | # External "early" plugins (see core/plugins.mk for regular plugins). | |
4257 | # They both use the core_dep_plugin macro. | |
4258 | ||
4259 | define core_dep_plugin | |
4260 | ifeq ($(2),$(PROJECT)) | |
4261 | -include $$(patsubst $(PROJECT)/%,%,$(1)) | |
4262 | else | |
4263 | -include $(DEPS_DIR)/$(1) | |
4264 | ||
4265 | $(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; | |
4266 | endif | |
4267 | endef | |
4268 | ||
4269 | DEP_EARLY_PLUGINS ?= | |
4270 | ||
4271 | $(foreach p,$(DEP_EARLY_PLUGINS),\ | |
4272 | $(eval $(if $(findstring /,$p),\ | |
4273 | $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ | |
4274 | $(call core_dep_plugin,$p/early-plugins.mk,$p)))) | |
4119 | 4275 | |
4120 | 4276 | dep_name = $(if $(dep_$(1)),$(1),$(if $(pkg_$(1)_name),$(pkg_$(1)_name),$(1))) |
4121 | 4277 | dep_repo = $(patsubst git://github.com/%,https://github.com/%, \ |
4122 | 4278 | $(if $(dep_$(1)),$(word 2,$(dep_$(1))),$(pkg_$(1)_repo))) |
4123 | 4279 | dep_commit = $(if $(dep_$(1)_commit),$(dep_$(1)_commit),$(if $(dep_$(1)),$(word 3,$(dep_$(1))),$(pkg_$(1)_commit))) |
4124 | 4280 | |
4281 | LOCAL_DEPS_DIRS = $(foreach a,$(LOCAL_DEPS),$(if $(wildcard $(APPS_DIR)/$(a)),$(APPS_DIR)/$(a))) | |
4125 | 4282 | ALL_APPS_DIRS = $(if $(wildcard $(APPS_DIR)/),$(filter-out $(APPS_DIR),$(shell find $(APPS_DIR) -maxdepth 1 -type d))) |
4126 | 4283 | ALL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(foreach dep,$(filter-out $(IGNORE_DEPS),$(BUILD_DEPS) $(DEPS)),$(call dep_name,$(dep)))) |
4127 | 4284 | |
4138 | 4295 | |
4139 | 4296 | # Verbosity. |
4140 | 4297 | |
4141 | dep_verbose_0 = @echo " DEP " $(1); | |
4298 | dep_verbose_0 = @echo " DEP $1 ($(call dep_commit,$1))"; | |
4142 | 4299 | dep_verbose_2 = set -x; |
4143 | 4300 | dep_verbose = $(dep_verbose_$(V)) |
4144 | 4301 | |
4145 | 4302 | # Core targets. |
4146 | 4303 | |
4147 | ifdef IS_APP | |
4148 | apps:: | |
4149 | else | |
4150 | apps:: $(ALL_APPS_DIRS) | |
4304 | apps:: $(ALL_APPS_DIRS) clean-tmp-deps.log | |
4151 | 4305 | ifeq ($(IS_APP)$(IS_DEP),) |
4152 | 4306 | $(verbose) rm -f $(ERLANG_MK_TMP)/apps.log |
4153 | 4307 | endif |
4155 | 4309 | # Create ebin directory for all apps to make sure Erlang recognizes them |
4156 | 4310 | # as proper OTP applications when using -include_lib. This is a temporary |
4157 | 4311 | # fix, a proper fix would be to compile apps/* in the right order. |
4158 | $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ | |
4159 | mkdir -p $$dep/ebin || exit $$?; \ | |
4312 | ifndef IS_APP | |
4313 | $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ | |
4314 | mkdir -p $$dep/ebin; \ | |
4160 | 4315 | done |
4161 | $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ | |
4316 | endif | |
4317 | # at the toplevel: if LOCAL_DEPS is defined with at least one local app, only | |
4318 | # compile that list of apps. otherwise, compile everything. | |
4319 | # within an app: compile all LOCAL_DEPS that are (uncompiled) local apps | |
4320 | $(verbose) set -e; for dep in $(if $(LOCAL_DEPS_DIRS)$(IS_APP),$(LOCAL_DEPS_DIRS),$(ALL_APPS_DIRS)) ; do \ | |
4162 | 4321 | if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/apps.log; then \ |
4163 | 4322 | :; \ |
4164 | 4323 | else \ |
4165 | 4324 | echo $$dep >> $(ERLANG_MK_TMP)/apps.log; \ |
4166 | $(MAKE) -C $$dep IS_APP=1 || exit $$?; \ | |
4325 | $(MAKE) -C $$dep IS_APP=1; \ | |
4167 | 4326 | fi \ |
4168 | 4327 | done |
4328 | ||
4329 | clean-tmp-deps.log: | |
4330 | ifeq ($(IS_APP)$(IS_DEP),) | |
4331 | $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log | |
4169 | 4332 | endif |
4170 | 4333 | |
4171 | 4334 | ifneq ($(SKIP_DEPS),) |
4172 | 4335 | deps:: |
4173 | 4336 | else |
4174 | ifeq ($(ALL_DEPS_DIRS),) | |
4175 | deps:: apps | |
4176 | else | |
4177 | deps:: $(ALL_DEPS_DIRS) apps | |
4178 | ifeq ($(IS_APP)$(IS_DEP),) | |
4179 | $(verbose) rm -f $(ERLANG_MK_TMP)/deps.log | |
4180 | endif | |
4337 | deps:: $(ALL_DEPS_DIRS) apps clean-tmp-deps.log | |
4181 | 4338 | $(verbose) mkdir -p $(ERLANG_MK_TMP) |
4182 | $(verbose) for dep in $(ALL_DEPS_DIRS) ; do \ | |
4339 | $(verbose) set -e; for dep in $(ALL_DEPS_DIRS) ; do \ | |
4183 | 4340 | if grep -qs ^$$dep$$ $(ERLANG_MK_TMP)/deps.log; then \ |
4184 | 4341 | :; \ |
4185 | 4342 | else \ |
4186 | 4343 | echo $$dep >> $(ERLANG_MK_TMP)/deps.log; \ |
4187 | 4344 | if [ -f $$dep/GNUmakefile ] || [ -f $$dep/makefile ] || [ -f $$dep/Makefile ]; then \ |
4188 | $(MAKE) -C $$dep IS_DEP=1 || exit $$?; \ | |
4345 | $(MAKE) -C $$dep IS_DEP=1; \ | |
4189 | 4346 | else \ |
4190 | echo "Error: No Makefile to build dependency $$dep."; \ | |
4347 | echo "Error: No Makefile to build dependency $$dep." >&2; \ | |
4191 | 4348 | exit 2; \ |
4192 | 4349 | fi \ |
4193 | 4350 | fi \ |
4194 | 4351 | done |
4195 | endif | |
4196 | 4352 | endif |
4197 | 4353 | |
4198 | 4354 | # Deps related targets. |
4202 | 4358 | # in practice only Makefile is needed so far. |
4203 | 4359 | define dep_autopatch |
4204 | 4360 | if [ -f $(DEPS_DIR)/$(1)/erlang.mk ]; then \ |
4361 | rm -rf $(DEPS_DIR)/$1/ebin/; \ | |
4205 | 4362 | $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ |
4206 | 4363 | $(call dep_autopatch_erlang_mk,$(1)); \ |
4207 | 4364 | elif [ -f $(DEPS_DIR)/$(1)/Makefile ]; then \ |
4208 | if [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ | |
4365 | if [ -f $(DEPS_DIR)/$1/rebar.lock ]; then \ | |
4366 | $(call dep_autopatch2,$1); \ | |
4367 | elif [ 0 != `grep -c "include ../\w*\.mk" $(DEPS_DIR)/$(1)/Makefile` ]; then \ | |
4209 | 4368 | $(call dep_autopatch2,$(1)); \ |
4210 | elif [ 0 != `grep -ci rebar $(DEPS_DIR)/$(1)/Makefile` ]; then \ | |
4369 | elif [ 0 != `grep -ci "^[^#].*rebar" $(DEPS_DIR)/$(1)/Makefile` ]; then \ | |
4211 | 4370 | $(call dep_autopatch2,$(1)); \ |
4212 | elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i rebar '{}' \;`" ]; then \ | |
4371 | elif [ -n "`find $(DEPS_DIR)/$(1)/ -type f -name \*.mk -not -name erlang.mk -exec grep -i "^[^#].*rebar" '{}' \;`" ]; then \ | |
4213 | 4372 | $(call dep_autopatch2,$(1)); \ |
4214 | else \ | |
4215 | $(call erlang,$(call dep_autopatch_app.erl,$(1))); \ | |
4216 | 4373 | fi \ |
4217 | 4374 | else \ |
4218 | 4375 | if [ ! -d $(DEPS_DIR)/$(1)/src/ ]; then \ |
4224 | 4381 | endef |
4225 | 4382 | |
4226 | 4383 | define dep_autopatch2 |
4384 | ! test -f $(DEPS_DIR)/$1/ebin/$1.app || \ | |
4385 | mv -n $(DEPS_DIR)/$1/ebin/$1.app $(DEPS_DIR)/$1/src/$1.app.src; \ | |
4386 | rm -f $(DEPS_DIR)/$1/ebin/$1.app; \ | |
4227 | 4387 | if [ -f $(DEPS_DIR)/$1/src/$1.app.src.script ]; then \ |
4228 | 4388 | $(call erlang,$(call dep_autopatch_appsrc_script.erl,$(1))); \ |
4229 | 4389 | fi; \ |
4230 | 4390 | $(call erlang,$(call dep_autopatch_appsrc.erl,$(1))); \ |
4231 | if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script ]; then \ | |
4391 | if [ -f $(DEPS_DIR)/$(1)/rebar -o -f $(DEPS_DIR)/$(1)/rebar.config -o -f $(DEPS_DIR)/$(1)/rebar.config.script -o -f $(DEPS_DIR)/$1/rebar.lock ]; then \ | |
4232 | 4392 | $(call dep_autopatch_fetch_rebar); \ |
4233 | 4393 | $(call dep_autopatch_rebar,$(1)); \ |
4234 | 4394 | else \ |
4240 | 4400 | printf "noop:\n" > $(DEPS_DIR)/$(1)/Makefile |
4241 | 4401 | endef |
4242 | 4402 | |
4243 | # Overwrite erlang.mk with the current file by default. | |
4403 | # Replace "include erlang.mk" with a line that will load the parent Erlang.mk | |
4404 | # if given. Do it for all 3 possible Makefile file names. | |
4244 | 4405 | ifeq ($(NO_AUTOPATCH_ERLANG_MK),) |
4245 | 4406 | define dep_autopatch_erlang_mk |
4246 | echo "include $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk" \ | |
4247 | > $(DEPS_DIR)/$1/erlang.mk | |
4407 | for f in Makefile makefile GNUmakefile; do \ | |
4408 | if [ -f $(DEPS_DIR)/$1/$$f ]; then \ | |
4409 | sed -i.bak s/'include *erlang.mk'/'include $$(if $$(ERLANG_MK_FILENAME),$$(ERLANG_MK_FILENAME),erlang.mk)'/ $(DEPS_DIR)/$1/$$f; \ | |
4410 | fi \ | |
4411 | done | |
4248 | 4412 | endef |
4249 | 4413 | else |
4250 | 4414 | define dep_autopatch_erlang_mk |
4309 | 4473 | Write("C_SRC_TYPE = rebar\n"), |
4310 | 4474 | Write("DRV_CFLAGS = -fPIC\nexport DRV_CFLAGS\n"), |
4311 | 4475 | Write(["ERLANG_ARCH = ", rebar_utils:wordsize(), "\nexport ERLANG_ARCH\n"]), |
4476 | ToList = fun | |
4477 | (V) when is_atom(V) -> atom_to_list(V); | |
4478 | (V) when is_list(V) -> "'\\"" ++ V ++ "\\"'" | |
4479 | end, | |
4312 | 4480 | fun() -> |
4313 | 4481 | Write("ERLC_OPTS = +debug_info\nexport ERLC_OPTS\n"), |
4314 | 4482 | case lists:keyfind(erl_opts, 1, Conf) of |
4316 | 4484 | {_, ErlOpts} -> |
4317 | 4485 | lists:foreach(fun |
4318 | 4486 | ({d, D}) -> |
4319 | Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); | |
4487 | Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n"); | |
4488 | ({d, DKey, DVal}) -> | |
4489 | Write("ERLC_OPTS += -D" ++ ToList(DKey) ++ "=" ++ ToList(DVal) ++ "\n"); | |
4320 | 4490 | ({i, I}) -> |
4321 | 4491 | Write(["ERLC_OPTS += -I ", I, "\n"]); |
4322 | 4492 | ({platform_define, Regex, D}) -> |
4323 | 4493 | case rebar_utils:is_arch(Regex) of |
4324 | true -> Write("ERLC_OPTS += -D" ++ atom_to_list(D) ++ "=1\n"); | |
4494 | true -> Write("ERLC_OPTS += -D" ++ ToList(D) ++ "=1\n"); | |
4325 | 4495 | false -> ok |
4326 | 4496 | end; |
4327 | 4497 | ({parse_transform, PT}) -> |
4328 | Write("ERLC_OPTS += +'{parse_transform, " ++ atom_to_list(PT) ++ "}'\n"); | |
4498 | Write("ERLC_OPTS += +'{parse_transform, " ++ ToList(PT) ++ "}'\n"); | |
4329 | 4499 | (_) -> ok |
4330 | 4500 | end, ErlOpts) |
4331 | 4501 | end, |
4332 | 4502 | Write("\n") |
4333 | 4503 | end(), |
4504 | GetHexVsn = fun(N) -> | |
4505 | case file:consult("$(call core_native_path,$(DEPS_DIR)/$1/rebar.lock)") of | |
4506 | {ok, Lock} -> | |
4507 | io:format("~p~n", [Lock]), | |
4508 | case lists:keyfind("1.1.0", 1, Lock) of | |
4509 | {_, LockPkgs} -> | |
4510 | io:format("~p~n", [LockPkgs]), | |
4511 | case lists:keyfind(atom_to_binary(N, latin1), 1, LockPkgs) of | |
4512 | {_, {pkg, _, Vsn}, _} -> | |
4513 | io:format("~p~n", [Vsn]), | |
4514 | {N, {hex, binary_to_list(Vsn)}}; | |
4515 | _ -> | |
4516 | false | |
4517 | end; | |
4518 | _ -> | |
4519 | false | |
4520 | end; | |
4521 | _ -> | |
4522 | false | |
4523 | end | |
4524 | end, | |
4334 | 4525 | fun() -> |
4335 | 4526 | File = case lists:keyfind(deps, 1, Conf) of |
4336 | 4527 | false -> []; |
4337 | 4528 | {_, Deps} -> |
4338 | 4529 | [begin case case Dep of |
4530 | N when is_atom(N) -> GetHexVsn(N); | |
4339 | 4531 | {N, S} when is_atom(N), is_list(S) -> {N, {hex, S}}; |
4340 | 4532 | {N, S} when is_tuple(S) -> {N, S}; |
4341 | 4533 | {N, _, S} -> {N, S}; |
4372 | 4564 | Write("\npre-deps::\n"), |
4373 | 4565 | Write("\npre-app::\n"), |
4374 | 4566 | PatchHook = fun(Cmd) -> |
4375 | case Cmd of | |
4567 | Cmd2 = re:replace(Cmd, "^([g]?make)(.*)( -C.*)", "\\\\1\\\\3\\\\2", [{return, list}]), | |
4568 | case Cmd2 of | |
4376 | 4569 | "make -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); |
4377 | 4570 | "gmake -C" ++ Cmd1 -> "$$\(MAKE) -C" ++ Escape(Cmd1); |
4378 | 4571 | "make " ++ Cmd1 -> "$$\(MAKE) -f Makefile.orig.mk " ++ Escape(Cmd1); |
4487 | 4680 | end, |
4488 | 4681 | [PortSpec(S) || S <- PortSpecs] |
4489 | 4682 | end, |
4490 | Write("\ninclude $(call core_relpath,$(dir $(ERLANG_MK_FILENAME)),$(DEPS_DIR)/app)/erlang.mk"), | |
4683 | Write("\ninclude $$\(if $$\(ERLANG_MK_FILENAME),$$\(ERLANG_MK_FILENAME),erlang.mk)"), | |
4491 | 4684 | RunPlugin = fun(Plugin, Step) -> |
4492 | 4685 | case erlang:function_exported(Plugin, Step, 2) of |
4493 | 4686 | false -> ok; |
4535 | 4728 | halt() |
4536 | 4729 | endef |
4537 | 4730 | |
4538 | define dep_autopatch_app.erl | |
4539 | UpdateModules = fun(App) -> | |
4540 | case filelib:is_regular(App) of | |
4541 | false -> ok; | |
4542 | true -> | |
4543 | {ok, [{application, '$(1)', L0}]} = file:consult(App), | |
4544 | Mods = filelib:fold_files("$(call core_native_path,$(DEPS_DIR)/$1/src)", "\\\\.erl$$", true, | |
4545 | fun (F, Acc) -> [list_to_atom(filename:rootname(filename:basename(F)))|Acc] end, []), | |
4546 | L = lists:keystore(modules, 1, L0, {modules, Mods}), | |
4547 | ok = file:write_file(App, io_lib:format("~p.~n", [{application, '$(1)', L}])) | |
4548 | end | |
4549 | end, | |
4550 | UpdateModules("$(call core_native_path,$(DEPS_DIR)/$1/ebin/$1.app)"), | |
4551 | halt() | |
4552 | endef | |
4553 | ||
4554 | 4731 | define dep_autopatch_appsrc_script.erl |
4555 | 4732 | AppSrc = "$(call core_native_path,$(DEPS_DIR)/$1/src/$1.app.src)", |
4556 | 4733 | AppSrcScript = AppSrc ++ ".script", |
4557 | Bindings = erl_eval:new_bindings(), | |
4558 | {ok, Conf} = file:script(AppSrcScript, Bindings), | |
4734 | {ok, Conf0} = file:consult(AppSrc), | |
4735 | Bindings0 = erl_eval:new_bindings(), | |
4736 | Bindings1 = erl_eval:add_binding('CONFIG', Conf0, Bindings0), | |
4737 | Bindings = erl_eval:add_binding('SCRIPT', AppSrcScript, Bindings1), | |
4738 | Conf = case file:script(AppSrcScript, Bindings) of | |
4739 | {ok, [C]} -> C; | |
4740 | {ok, C} -> C | |
4741 | end, | |
4559 | 4742 | ok = file:write_file(AppSrc, io_lib:format("~p.~n", [Conf])), |
4560 | 4743 | halt() |
4561 | 4744 | endef |
4568 | 4751 | true -> |
4569 | 4752 | {ok, [{application, $(1), L0}]} = file:consult(AppSrcIn), |
4570 | 4753 | L1 = lists:keystore(modules, 1, L0, {modules, []}), |
4571 | L2 = case lists:keyfind(vsn, 1, L1) of {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); _ -> L1 end, | |
4754 | L2 = case lists:keyfind(vsn, 1, L1) of | |
4755 | {_, git} -> lists:keyreplace(vsn, 1, L1, {vsn, "git"}); | |
4756 | {_, {cmd, _}} -> lists:keyreplace(vsn, 1, L1, {vsn, "cmd"}); | |
4757 | _ -> L1 | |
4758 | end, | |
4572 | 4759 | L3 = case lists:keyfind(registered, 1, L2) of false -> [{registered, []}|L2]; _ -> L2 end, |
4573 | 4760 | ok = file:write_file(AppSrcOut, io_lib:format("~p.~n", [{application, $(1), L3}])), |
4574 | 4761 | case AppSrcOut of AppSrcIn -> ok; _ -> ok = file:delete(AppSrcIn) end |
4598 | 4785 | cp -R $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); |
4599 | 4786 | endef |
4600 | 4787 | |
4788 | define dep_fetch_ln | |
4789 | ln -s $(call dep_repo,$(1)) $(DEPS_DIR)/$(call dep_name,$(1)); | |
4790 | endef | |
4791 | ||
4601 | 4792 | # Hex only has a package version. No need to look in the Erlang.mk packages. |
4602 | 4793 | define dep_fetch_hex |
4603 | 4794 | mkdir -p $(ERLANG_MK_TMP)/hex $(DEPS_DIR)/$1; \ |
4604 | 4795 | $(call core_http_get,$(ERLANG_MK_TMP)/hex/$1.tar,\ |
4605 | https://s3.amazonaws.com/s3.hex.pm/tarballs/$1-$(strip $(word 2,$(dep_$1))).tar); \ | |
4796 | https://repo.hex.pm/tarballs/$1-$(strip $(word 2,$(dep_$1))).tar); \ | |
4606 | 4797 | tar -xOf $(ERLANG_MK_TMP)/hex/$1.tar contents.tar.gz | tar -C $(DEPS_DIR)/$1 -xzf -; |
4607 | 4798 | endef |
4608 | 4799 | |
4633 | 4824 | $(eval DEP_NAME := $(call dep_name,$1)) |
4634 | 4825 | $(eval DEP_STR := $(if $(filter-out $1,$(DEP_NAME)),$1,"$1 ($(DEP_NAME))")) |
4635 | 4826 | $(verbose) if test -d $(APPS_DIR)/$(DEP_NAME); then \ |
4636 | echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)."; \ | |
4827 | echo "Error: Dependency" $(DEP_STR) "conflicts with application found in $(APPS_DIR)/$(DEP_NAME)." >&2; \ | |
4637 | 4828 | exit 17; \ |
4638 | 4829 | fi |
4639 | 4830 | $(verbose) mkdir -p $(DEPS_DIR) |
4675 | 4866 | clean:: clean-apps |
4676 | 4867 | |
4677 | 4868 | clean-apps: |
4678 | $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ | |
4679 | $(MAKE) -C $$dep clean IS_APP=1 || exit $$?; \ | |
4869 | $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ | |
4870 | $(MAKE) -C $$dep clean IS_APP=1; \ | |
4680 | 4871 | done |
4681 | 4872 | |
4682 | 4873 | distclean:: distclean-apps |
4683 | 4874 | |
4684 | 4875 | distclean-apps: |
4685 | $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ | |
4686 | $(MAKE) -C $$dep distclean IS_APP=1 || exit $$?; \ | |
4876 | $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ | |
4877 | $(MAKE) -C $$dep distclean IS_APP=1; \ | |
4687 | 4878 | done |
4688 | 4879 | endif |
4689 | 4880 | |
4703 | 4894 | ERLANG_MK_RECURSIVE_TEST_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-test-deps-list.log |
4704 | 4895 | ERLANG_MK_RECURSIVE_SHELL_DEPS_LIST = $(ERLANG_MK_TMP)/recursive-shell-deps-list.log |
4705 | 4896 | |
4706 | # External plugins. | |
4707 | ||
4708 | DEP_PLUGINS ?= | |
4709 | ||
4710 | define core_dep_plugin | |
4711 | -include $(DEPS_DIR)/$(1) | |
4712 | ||
4713 | $(DEPS_DIR)/$(1): $(DEPS_DIR)/$(2) ; | |
4714 | endef | |
4715 | ||
4716 | $(foreach p,$(DEP_PLUGINS),\ | |
4717 | $(eval $(if $(findstring /,$p),\ | |
4718 | $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ | |
4719 | $(call core_dep_plugin,$p/plugins.mk,$p)))) | |
4720 | ||
4721 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> | |
4722 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
4723 | ||
4724 | # Configuration. | |
4725 | ||
4726 | DTL_FULL_PATH ?= | |
4727 | DTL_PATH ?= templates/ | |
4728 | DTL_SUFFIX ?= _dtl | |
4729 | DTL_OPTS ?= | |
4730 | ||
4731 | # Verbosity. | |
4732 | ||
4733 | dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); | |
4734 | dtl_verbose = $(dtl_verbose_$(V)) | |
4735 | ||
4736 | # Core targets. | |
4737 | ||
4738 | DTL_PATH := $(abspath $(DTL_PATH)) | |
4739 | DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl)) | |
4740 | ||
4741 | ifneq ($(DTL_FILES),) | |
4742 | ||
4743 | DTL_NAMES = $(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)) | |
4744 | DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES))) | |
4745 | BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES))) | |
4746 | ||
4747 | ifneq ($(words $(DTL_FILES)),0) | |
4748 | # Rebuild templates when the Makefile changes. | |
4749 | $(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | |
4750 | @mkdir -p $(ERLANG_MK_TMP) | |
4751 | @if test -f $@; then \ | |
4752 | touch $(DTL_FILES); \ | |
4753 | fi | |
4754 | @touch $@ | |
4755 | ||
4756 | ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl | |
4757 | endif | |
4758 | ||
4759 | define erlydtl_compile.erl | |
4760 | [begin | |
4761 | Module0 = case "$(strip $(DTL_FULL_PATH))" of | |
4762 | "" -> | |
4763 | filename:basename(F, ".dtl"); | |
4764 | _ -> | |
4765 | "$(DTL_PATH)/" ++ F2 = filename:rootname(F, ".dtl"), | |
4766 | re:replace(F2, "/", "_", [{return, list}, global]) | |
4767 | end, | |
4768 | Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), | |
4769 | case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of | |
4770 | ok -> ok; | |
4771 | {ok, _} -> ok | |
4772 | end | |
4773 | end || F <- string:tokens("$(1)", " ")], | |
4774 | halt(). | |
4775 | endef | |
4776 | ||
4777 | ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/ | |
4778 | $(if $(strip $?),\ | |
4779 | $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\ | |
4780 | -pa ebin/ $(DEPS_DIR)/erlydtl/ebin/)) | |
4781 | ||
4782 | endif | |
4783 | ||
4784 | 4897 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> |
4785 | 4898 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
4786 | 4899 | |
4800 | 4913 | |
4801 | 4914 | define compile_proto.erl |
4802 | 4915 | [begin |
4803 | Dir = filename:dirname(filename:dirname(F)), | |
4804 | 4916 | protobuffs_compile:generate_source(F, |
4805 | [{output_include_dir, Dir ++ "/include"}, | |
4806 | {output_src_dir, Dir ++ "/ebin"}]) | |
4917 | [{output_include_dir, "./include"}, | |
4918 | {output_src_dir, "./ebin"}]) | |
4807 | 4919 | end || F <- string:tokens("$(1)", " ")], |
4808 | 4920 | halt(). |
4809 | 4921 | endef |
4827 | 4939 | ERLC_EXCLUDE ?= |
4828 | 4940 | ERLC_EXCLUDE_PATHS = $(addprefix src/,$(addsuffix .erl,$(ERLC_EXCLUDE))) |
4829 | 4941 | |
4942 | ERLC_ASN1_OPTS ?= | |
4943 | ||
4830 | 4944 | ERLC_MIB_OPTS ?= |
4831 | 4945 | COMPILE_MIB_FIRST ?= |
4832 | 4946 | COMPILE_MIB_FIRST_PATHS = $(addprefix mibs/,$(addsuffix .mib,$(COMPILE_MIB_FIRST))) |
4876 | 4990 | |
4877 | 4991 | ifeq ($(wildcard src/$(PROJECT_MOD).erl),) |
4878 | 4992 | define app_file |
4879 | {application, $(PROJECT), [ | |
4993 | {application, '$(PROJECT)', [ | |
4880 | 4994 | {description, "$(PROJECT_DESCRIPTION)"}, |
4881 | 4995 | {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), |
4882 | 4996 | {id$(comma)$(space)"$(1)"}$(comma)) |
4883 | 4997 | {modules, [$(call comma_list,$(2))]}, |
4884 | 4998 | {registered, []}, |
4885 | {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, | |
4999 | {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, | |
4886 | 5000 | {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) |
4887 | 5001 | ]}. |
4888 | 5002 | endef |
4889 | 5003 | else |
4890 | 5004 | define app_file |
4891 | {application, $(PROJECT), [ | |
5005 | {application, '$(PROJECT)', [ | |
4892 | 5006 | {description, "$(PROJECT_DESCRIPTION)"}, |
4893 | 5007 | {vsn, "$(PROJECT_VERSION)"},$(if $(IS_DEP), |
4894 | 5008 | {id$(comma)$(space)"$(1)"}$(comma)) |
4895 | 5009 | {modules, [$(call comma_list,$(2))]}, |
4896 | 5010 | {registered, [$(call comma_list,$(PROJECT)_sup $(PROJECT_REGISTERED))]}, |
4897 | {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS))]}, | |
5011 | {applications, [$(call comma_list,kernel stdlib $(OTP_DEPS) $(LOCAL_DEPS) $(foreach dep,$(DEPS),$(call dep_name,$(dep))))]}, | |
4898 | 5012 | {mod, {$(PROJECT_MOD), []}}, |
4899 | 5013 | {env, $(subst \,\\,$(PROJECT_ENV))}$(if $(findstring {,$(PROJECT_APP_EXTRA_KEYS)),$(comma)$(newline)$(tab)$(subst \,\\,$(PROJECT_APP_EXTRA_KEYS)),) |
4900 | 5014 | ]}. |
4919 | 5033 | |
4920 | 5034 | define compile_asn1 |
4921 | 5035 | $(verbose) mkdir -p include/ |
4922 | $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(1) | |
5036 | $(asn1_verbose) erlc -v -I include/ -o asn1/ +noobj $(ERLC_ASN1_OPTS) $(1) | |
4923 | 5037 | $(verbose) mv asn1/*.erl src/ |
4924 | 5038 | $(verbose) mv asn1/*.hrl include/ |
4925 | 5039 | $(verbose) mv asn1/*.asn1db include/ |
4959 | 5073 | E = ets:new(makedep, [bag]), |
4960 | 5074 | G = digraph:new([acyclic]), |
4961 | 5075 | ErlFiles = lists:usort(string:tokens("$(ERL_FILES)", " ")), |
5076 | DepsDir = "$(call core_native_path,$(DEPS_DIR))", | |
5077 | AppsDir = "$(call core_native_path,$(APPS_DIR))", | |
5078 | DepsDirsSrc = "$(if $(wildcard $(DEPS_DIR)/*/src), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/src)))", | |
5079 | DepsDirsInc = "$(if $(wildcard $(DEPS_DIR)/*/include), $(call core_native_path,$(wildcard $(DEPS_DIR)/*/include)))", | |
5080 | AppsDirsSrc = "$(if $(wildcard $(APPS_DIR)/*/src), $(call core_native_path,$(wildcard $(APPS_DIR)/*/src)))", | |
5081 | AppsDirsInc = "$(if $(wildcard $(APPS_DIR)/*/include), $(call core_native_path,$(wildcard $(APPS_DIR)/*/include)))", | |
5082 | DepsDirs = lists:usort(string:tokens(DepsDirsSrc++DepsDirsInc, " ")), | |
5083 | AppsDirs = lists:usort(string:tokens(AppsDirsSrc++AppsDirsInc, " ")), | |
4962 | 5084 | Modules = [{list_to_atom(filename:basename(F, ".erl")), F} || F <- ErlFiles], |
4963 | 5085 | Add = fun (Mod, Dep) -> |
4964 | 5086 | case lists:keyfind(Dep, 1, Modules) of |
4973 | 5095 | end, |
4974 | 5096 | AddHd = fun (F, Mod, DepFile) -> |
4975 | 5097 | case file:open(DepFile, [read]) of |
4976 | {error, enoent} -> ok; | |
5098 | {error, enoent} -> | |
5099 | ok; | |
4977 | 5100 | {ok, Fd} -> |
4978 | F(F, Fd, Mod), | |
4979 | 5101 | {_, ModFile} = lists:keyfind(Mod, 1, Modules), |
4980 | ets:insert(E, {ModFile, DepFile}) | |
5102 | case ets:match(E, {ModFile, DepFile}) of | |
5103 | [] -> | |
5104 | ets:insert(E, {ModFile, DepFile}), | |
5105 | F(F, Fd, Mod,0); | |
5106 | _ -> ok | |
5107 | end | |
4981 | 5108 | end |
4982 | 5109 | end, |
5110 | SearchHrl = fun | |
5111 | F(_Hrl, []) -> {error,enoent}; | |
5112 | F(Hrl, [Dir|Dirs]) -> | |
5113 | HrlF = filename:join([Dir,Hrl]), | |
5114 | case filelib:is_file(HrlF) of | |
5115 | true -> | |
5116 | {ok, HrlF}; | |
5117 | false -> F(Hrl,Dirs) | |
5118 | end | |
5119 | end, | |
4983 | 5120 | Attr = fun |
4984 | (F, Mod, behavior, Dep) -> Add(Mod, Dep); | |
4985 | (F, Mod, behaviour, Dep) -> Add(Mod, Dep); | |
4986 | (F, Mod, compile, {parse_transform, Dep}) -> Add(Mod, Dep); | |
4987 | (F, Mod, compile, Opts) when is_list(Opts) -> | |
5121 | (_F, Mod, behavior, Dep) -> | |
5122 | Add(Mod, Dep); | |
5123 | (_F, Mod, behaviour, Dep) -> | |
5124 | Add(Mod, Dep); | |
5125 | (_F, Mod, compile, {parse_transform, Dep}) -> | |
5126 | Add(Mod, Dep); | |
5127 | (_F, Mod, compile, Opts) when is_list(Opts) -> | |
4988 | 5128 | case proplists:get_value(parse_transform, Opts) of |
4989 | 5129 | undefined -> ok; |
4990 | 5130 | Dep -> Add(Mod, Dep) |
4991 | 5131 | end; |
4992 | 5132 | (F, Mod, include, Hrl) -> |
4993 | case filelib:is_file("include/" ++ Hrl) of | |
4994 | true -> AddHd(F, Mod, "include/" ++ Hrl); | |
4995 | false -> | |
4996 | case filelib:is_file("src/" ++ Hrl) of | |
4997 | true -> AddHd(F, Mod, "src/" ++ Hrl); | |
4998 | false -> false | |
4999 | end | |
5133 | case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of | |
5134 | {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); | |
5135 | {error, _} -> false | |
5000 | 5136 | end; |
5001 | (F, Mod, include_lib, "$1/include/" ++ Hrl) -> AddHd(F, Mod, "include/" ++ Hrl); | |
5002 | (F, Mod, include_lib, Hrl) -> AddHd(F, Mod, "include/" ++ Hrl); | |
5137 | (F, Mod, include_lib, Hrl) -> | |
5138 | case SearchHrl(Hrl, ["src", "include",AppsDir,DepsDir]++AppsDirs++DepsDirs) of | |
5139 | {ok, FoundHrl} -> AddHd(F, Mod, FoundHrl); | |
5140 | {error, _} -> false | |
5141 | end; | |
5003 | 5142 | (F, Mod, import, {Imp, _}) -> |
5004 | case filelib:is_file("src/" ++ atom_to_list(Imp) ++ ".erl") of | |
5143 | IsFile = | |
5144 | case lists:keyfind(Imp, 1, Modules) of | |
5145 | false -> false; | |
5146 | {_, FilePath} -> filelib:is_file(FilePath) | |
5147 | end, | |
5148 | case IsFile of | |
5005 | 5149 | false -> ok; |
5006 | 5150 | true -> Add(Mod, Imp) |
5007 | 5151 | end; |
5008 | 5152 | (_, _, _, _) -> ok |
5009 | 5153 | end, |
5010 | MakeDepend = fun(F, Fd, Mod) -> | |
5011 | case io:parse_erl_form(Fd, undefined) of | |
5012 | {ok, {attribute, _, Key, Value}, _} -> | |
5013 | Attr(F, Mod, Key, Value), | |
5014 | F(F, Fd, Mod); | |
5015 | {eof, _} -> | |
5016 | file:close(Fd); | |
5017 | _ -> | |
5018 | F(F, Fd, Mod) | |
5019 | end | |
5154 | MakeDepend = fun | |
5155 | (F, Fd, Mod, StartLocation) -> | |
5156 | {ok, Filename} = file:pid2name(Fd), | |
5157 | case io:parse_erl_form(Fd, undefined, StartLocation) of | |
5158 | {ok, AbsData, EndLocation} -> | |
5159 | case AbsData of | |
5160 | {attribute, _, Key, Value} -> | |
5161 | Attr(F, Mod, Key, Value), | |
5162 | F(F, Fd, Mod, EndLocation); | |
5163 | _ -> F(F, Fd, Mod, EndLocation) | |
5164 | end; | |
5165 | {eof, _ } -> file:close(Fd); | |
5166 | {error, ErrorDescription } -> | |
5167 | file:close(Fd); | |
5168 | {error, ErrorInfo, ErrorLocation} -> | |
5169 | F(F, Fd, Mod, ErrorLocation) | |
5170 | end, | |
5171 | ok | |
5020 | 5172 | end, |
5021 | 5173 | [begin |
5022 | 5174 | Mod = list_to_atom(filename:basename(F, ".erl")), |
5023 | 5175 | {ok, Fd} = file:open(F, [read]), |
5024 | MakeDepend(MakeDepend, Fd, Mod) | |
5176 | MakeDepend(MakeDepend, Fd, Mod,0) | |
5025 | 5177 | end || F <- ErlFiles], |
5026 | 5178 | Depend = sofs:to_external(sofs:relation_to_family(sofs:relation(ets:tab2list(E)))), |
5027 | 5179 | CompileFirst = [X || X <- lists:reverse(digraph_utils:topsort(G)), [] =/= digraph:in_neighbours(G, X)], |
5180 | TargetPath = fun(Target) -> | |
5181 | case lists:keyfind(Target, 1, Modules) of | |
5182 | false -> ""; | |
5183 | {_, DepFile} -> | |
5184 | DirSubname = tl(string:tokens(filename:dirname(DepFile), "/")), | |
5185 | string:join(DirSubname ++ [atom_to_list(Target)], "/") | |
5186 | end | |
5187 | end, | |
5028 | 5188 | ok = file:write_file("$(1)", [ |
5029 | 5189 | [[F, "::", [[" ", D] || D <- Deps], "; @touch \$$@\n"] || {F, Deps} <- Depend], |
5030 | "\nCOMPILE_FIRST +=", [[" ", atom_to_list(CF)] || CF <- CompileFirst], "\n" | |
5190 | "\nCOMPILE_FIRST +=", [[" ", TargetPath(CF)] || CF <- CompileFirst], "\n" | |
5031 | 5191 | ]), |
5032 | 5192 | halt() |
5033 | 5193 | endef |
5051 | 5211 | ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change |
5052 | 5212 | endif |
5053 | 5213 | |
5054 | -include $(PROJECT).d | |
5214 | include $(wildcard $(PROJECT).d) | |
5055 | 5215 | |
5056 | 5216 | ebin/$(PROJECT).app:: ebin/ |
5057 | 5217 | |
5112 | 5272 | doc-deps: |
5113 | 5273 | else |
5114 | 5274 | doc-deps: $(ALL_DOC_DEPS_DIRS) |
5115 | $(verbose) for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done | |
5275 | $(verbose) set -e; for dep in $(ALL_DOC_DEPS_DIRS) ; do $(MAKE) -C $$dep; done | |
5116 | 5276 | endif |
5117 | 5277 | |
5118 | 5278 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> |
5132 | 5292 | rel-deps: |
5133 | 5293 | else |
5134 | 5294 | rel-deps: $(ALL_REL_DEPS_DIRS) |
5135 | $(verbose) for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done | |
5295 | $(verbose) set -e; for dep in $(ALL_REL_DEPS_DIRS) ; do $(MAKE) -C $$dep; done | |
5136 | 5296 | endif |
5137 | 5297 | |
5138 | 5298 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> |
5157 | 5317 | test-deps: |
5158 | 5318 | else |
5159 | 5319 | test-deps: $(ALL_TEST_DEPS_DIRS) |
5160 | $(verbose) for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done | |
5320 | $(verbose) set -e; for dep in $(ALL_TEST_DEPS_DIRS) ; do $(MAKE) -C $$dep IS_DEP=1; done | |
5161 | 5321 | endif |
5162 | 5322 | |
5163 | 5323 | ifneq ($(wildcard $(TEST_DIR)),) |
5169 | 5329 | ifeq ($(wildcard src),) |
5170 | 5330 | test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) |
5171 | 5331 | test-build:: clean deps test-deps |
5172 | $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" | |
5332 | $(verbose) $(MAKE) --no-print-directory test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" | |
5173 | 5333 | else |
5174 | 5334 | ifeq ($(wildcard ebin/test),) |
5175 | 5335 | test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) |
5176 | 5336 | test-build:: clean deps test-deps $(PROJECT).d |
5177 | $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" | |
5337 | $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" | |
5178 | 5338 | $(gen_verbose) touch ebin/test |
5179 | 5339 | else |
5180 | 5340 | test-build:: ERLC_OPTS=$(TEST_ERLC_OPTS) |
5181 | 5341 | test-build:: deps test-deps $(PROJECT).d |
5182 | $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(TEST_ERLC_OPTS)" | |
5342 | $(verbose) $(MAKE) --no-print-directory app-build test-dir ERLC_OPTS="$(call escape_dquotes,$(TEST_ERLC_OPTS))" | |
5183 | 5343 | endif |
5184 | 5344 | |
5185 | 5345 | clean:: clean-test-dir |
5276 | 5436 | define asciidoc2man.erl |
5277 | 5437 | try |
5278 | 5438 | [begin |
5439 | io:format(" ADOC ~s~n", [F]), | |
5279 | 5440 | ok = asciideck:to_manpage(asciideck:parse_file(F), #{ |
5280 | 5441 | compress => gzip, |
5281 | 5442 | outdir => filename:dirname(F), |
5284 | 5445 | }) |
5285 | 5446 | end || F <- [$(shell echo $(addprefix $(comma)\",$(addsuffix \",$1)) | sed 's/^.//')]], |
5286 | 5447 | halt(0) |
5287 | catch _:_ -> | |
5448 | catch C:E -> | |
5449 | io:format("Exception ~p:~p~nStacktrace: ~p~n", [C, E, erlang:get_stacktrace()]), | |
5288 | 5450 | halt(1) |
5289 | 5451 | end. |
5290 | 5452 | endef |
5300 | 5462 | install-asciidoc: asciidoc-manual |
5301 | 5463 | $(foreach s,$(MAN_SECTIONS),\ |
5302 | 5464 | mkdir -p $(MAN_INSTALL_PATH)/man$s/ && \ |
5303 | install -g `id -u` -o `id -g` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;) | |
5465 | install -g `id -g` -o `id -u` -m 0644 doc/man$s/*.gz $(MAN_INSTALL_PATH)/man$s/;) | |
5304 | 5466 | |
5305 | 5467 | distclean-asciidoc-manual: |
5306 | 5468 | $(gen_verbose) rm -rf $(addprefix doc/man,$(MAN_SECTIONS)) |
5561 | 5723 | {ok, StateName, StateData}. |
5562 | 5724 | endef |
5563 | 5725 | |
5726 | define tpl_gen_statem | |
5727 | -module($(n)). | |
5728 | -behaviour(gen_statem). | |
5729 | ||
5730 | %% API. | |
5731 | -export([start_link/0]). | |
5732 | ||
5733 | %% gen_statem. | |
5734 | -export([callback_mode/0]). | |
5735 | -export([init/1]). | |
5736 | -export([state_name/3]). | |
5737 | -export([handle_event/4]). | |
5738 | -export([terminate/3]). | |
5739 | -export([code_change/4]). | |
5740 | ||
5741 | -record(state, { | |
5742 | }). | |
5743 | ||
5744 | %% API. | |
5745 | ||
5746 | -spec start_link() -> {ok, pid()}. | |
5747 | start_link() -> | |
5748 | gen_statem:start_link(?MODULE, [], []). | |
5749 | ||
5750 | %% gen_statem. | |
5751 | ||
5752 | callback_mode() -> | |
5753 | state_functions. | |
5754 | ||
5755 | init([]) -> | |
5756 | {ok, state_name, #state{}}. | |
5757 | ||
5758 | state_name(_EventType, _EventData, StateData) -> | |
5759 | {next_state, state_name, StateData}. | |
5760 | ||
5761 | handle_event(_EventType, _EventData, StateName, StateData) -> | |
5762 | {next_state, StateName, StateData}. | |
5763 | ||
5764 | terminate(_Reason, _StateName, _StateData) -> | |
5765 | ok. | |
5766 | ||
5767 | code_change(_OldVsn, StateName, StateData, _Extra) -> | |
5768 | {ok, StateName, StateData}. | |
5769 | endef | |
5770 | ||
5564 | 5771 | define tpl_cowboy_loop |
5565 | 5772 | -module($(n)). |
5566 | 5773 | -behaviour(cowboy_loop_handler). |
5753 | 5960 | ifndef t |
5754 | 5961 | $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) |
5755 | 5962 | endif |
5756 | ifndef tpl_$(t) | |
5757 | $(error Unknown template) | |
5758 | endif | |
5759 | 5963 | ifndef n |
5760 | 5964 | $(error Usage: $(MAKE) new t=TEMPLATE n=NAME [in=APP]) |
5761 | 5965 | endif |
5762 | 5966 | ifdef in |
5763 | $(verbose) $(MAKE) -C $(APPS_DIR)/$(in)/ new t=$t n=$n in= | |
5967 | $(call render_template,tpl_$(t),$(APPS_DIR)/$(in)/src/$(n).erl) | |
5764 | 5968 | else |
5765 | 5969 | $(call render_template,tpl_$(t),src/$(n).erl) |
5766 | 5970 | endif |
5767 | 5971 | |
5768 | 5972 | list-templates: |
5769 | $(verbose) echo Available templates: $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) | |
5973 | $(verbose) @echo Available templates: | |
5974 | $(verbose) printf " %s\n" $(sort $(patsubst tpl_%,%,$(filter tpl_%,$(.VARIABLES)))) | |
5770 | 5975 | |
5771 | 5976 | # Copyright (c) 2014-2016, Loïc Hoguin <essen@ninenines.eu> |
5772 | 5977 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6003 | 6208 | $(call render_template,bs_erl_nif,src/$n.erl) |
6004 | 6209 | endif |
6005 | 6210 | |
6006 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> | |
6211 | # Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> | |
6007 | 6212 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6008 | 6213 | |
6009 | .PHONY: ci ci-prepare ci-setup distclean-kerl | |
6214 | .PHONY: ci ci-prepare ci-setup | |
6010 | 6215 | |
6011 | 6216 | CI_OTP ?= |
6012 | 6217 | CI_HIPE ?= |
6024 | 6229 | ci:: |
6025 | 6230 | else |
6026 | 6231 | |
6027 | ifeq ($(strip $(KERL)),) | |
6028 | KERL := $(ERLANG_MK_TMP)/kerl/kerl | |
6029 | endif | |
6030 | ||
6031 | export KERL | |
6032 | ||
6033 | KERL_GIT ?= https://github.com/kerl/kerl | |
6034 | KERL_COMMIT ?= master | |
6035 | ||
6036 | KERL_MAKEFLAGS ?= | |
6037 | ||
6038 | OTP_GIT ?= https://github.com/erlang/otp | |
6039 | ||
6040 | CI_INSTALL_DIR ?= $(HOME)/erlang | |
6041 | ||
6042 | 6232 | ci:: $(addprefix ci-,$(CI_OTP) $(addsuffix -native,$(CI_HIPE)) $(addsuffix -erllvm,$(CI_ERLLVM))) |
6043 | 6233 | |
6044 | ci-prepare: $(addprefix $(CI_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE))) | |
6234 | ci-prepare: $(addprefix $(KERL_INSTALL_DIR)/,$(CI_OTP) $(addsuffix -native,$(CI_HIPE))) | |
6045 | 6235 | |
6046 | 6236 | ci-setup:: |
6047 | 6237 | |
6051 | 6241 | ci_verbose = $(ci_verbose_$(V)) |
6052 | 6242 | |
6053 | 6243 | define ci_target |
6054 | ci-$1: $(CI_INSTALL_DIR)/$2 | |
6244 | ci-$1: $(KERL_INSTALL_DIR)/$2 | |
6055 | 6245 | $(verbose) $(MAKE) --no-print-directory clean |
6056 | 6246 | $(ci_verbose) \ |
6057 | PATH="$(CI_INSTALL_DIR)/$2/bin:$(PATH)" \ | |
6247 | PATH="$(KERL_INSTALL_DIR)/$2/bin:$(PATH)" \ | |
6058 | 6248 | CI_OTP_RELEASE="$1" \ |
6059 | 6249 | CT_OPTS="-label $1" \ |
6060 | 6250 | CI_VM="$3" \ |
6066 | 6256 | $(foreach otp,$(CI_HIPE),$(eval $(call ci_target,$(otp)-native,$(otp)-native,native))) |
6067 | 6257 | $(foreach otp,$(CI_ERLLVM),$(eval $(call ci_target,$(otp)-erllvm,$(otp)-native,erllvm))) |
6068 | 6258 | |
6069 | define ci_otp_target | |
6070 | ifeq ($(wildcard $(CI_INSTALL_DIR)/$(1)),) | |
6071 | $(CI_INSTALL_DIR)/$(1): $(KERL) | |
6072 | MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $(1) $(1) | |
6073 | $(KERL) install $(1) $(CI_INSTALL_DIR)/$(1) | |
6074 | endif | |
6075 | endef | |
6076 | ||
6077 | $(foreach otp,$(CI_OTP),$(eval $(call ci_otp_target,$(otp)))) | |
6078 | ||
6079 | define ci_hipe_target | |
6080 | ifeq ($(wildcard $(CI_INSTALL_DIR)/$1-native),) | |
6081 | $(CI_INSTALL_DIR)/$1-native: $(KERL) | |
6082 | KERL_CONFIGURE_OPTIONS=--enable-native-libs \ | |
6083 | MAKEFLAGS="$(KERL_MAKEFLAGS)" $(KERL) build git $(OTP_GIT) $1 $1-native | |
6084 | $(KERL) install $1-native $(CI_INSTALL_DIR)/$1-native | |
6085 | endif | |
6086 | endef | |
6087 | ||
6088 | $(foreach otp,$(sort $(CI_HIPE) $(CI_ERLLLVM)),$(eval $(call ci_hipe_target,$(otp)))) | |
6089 | ||
6090 | $(KERL): | |
6091 | $(verbose) mkdir -p $(ERLANG_MK_TMP) | |
6092 | $(gen_verbose) git clone --depth 1 $(KERL_GIT) $(ERLANG_MK_TMP)/kerl | |
6093 | $(verbose) cd $(ERLANG_MK_TMP)/kerl && git checkout $(KERL_COMMIT) | |
6094 | $(verbose) chmod +x $(KERL) | |
6259 | $(foreach otp,$(CI_OTP),$(eval $(call kerl_otp_target,$(otp)))) | |
6260 | $(foreach otp,$(sort $(CI_HIPE) $(CI_ERLLLVM)),$(eval $(call kerl_hipe_target,$(otp)))) | |
6095 | 6261 | |
6096 | 6262 | help:: |
6097 | 6263 | $(verbose) printf "%s\n" "" \ |
6101 | 6267 | "The CI_OTP variable must be defined with the Erlang versions" \ |
6102 | 6268 | "that must be tested. For example: CI_OTP = OTP-17.3.4 OTP-17.5.3" |
6103 | 6269 | |
6104 | distclean:: distclean-kerl | |
6105 | ||
6106 | distclean-kerl: | |
6107 | $(gen_verbose) rm -rf $(KERL) | |
6108 | 6270 | endif |
6109 | 6271 | |
6110 | 6272 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> |
6122 | 6284 | endif |
6123 | 6285 | endif |
6124 | 6286 | CT_SUITES ?= |
6287 | CT_LOGS_DIR ?= $(CURDIR)/logs | |
6125 | 6288 | |
6126 | 6289 | # Core targets. |
6127 | 6290 | |
6144 | 6307 | -noinput \ |
6145 | 6308 | -pa $(CURDIR)/ebin $(DEPS_DIR)/*/ebin $(APPS_DIR)/*/ebin $(TEST_DIR) \ |
6146 | 6309 | -dir $(TEST_DIR) \ |
6147 | -logdir $(CURDIR)/logs | |
6310 | -logdir $(CT_LOGS_DIR) | |
6148 | 6311 | |
6149 | 6312 | ifeq ($(CT_SUITES),) |
6150 | 6313 | ct: $(if $(IS_APP),,apps-ct) |
6151 | 6314 | else |
6315 | # We do not run tests if we are in an apps/* with no test directory. | |
6316 | ifneq ($(IS_APP)$(wildcard $(TEST_DIR)),1) | |
6152 | 6317 | ct: test-build $(if $(IS_APP),,apps-ct) |
6153 | $(verbose) mkdir -p $(CURDIR)/logs/ | |
6318 | $(verbose) mkdir -p $(CT_LOGS_DIR) | |
6154 | 6319 | $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(CT_SUITES)) $(CT_OPTS) |
6320 | endif | |
6155 | 6321 | endif |
6156 | 6322 | |
6157 | 6323 | ifneq ($(ALL_APPS_DIRS),) |
6178 | 6344 | |
6179 | 6345 | define ct_suite_target |
6180 | 6346 | ct-$(1): test-build |
6181 | $(verbose) mkdir -p $(CURDIR)/logs/ | |
6347 | $(verbose) mkdir -p $(CT_LOGS_DIR) | |
6182 | 6348 | $(gen_verbose) $(CT_RUN) -sname ct_$(PROJECT) -suite $(addsuffix _SUITE,$(1)) $(CT_EXTRA) $(CT_OPTS) |
6183 | 6349 | endef |
6184 | 6350 | |
6185 | 6351 | $(foreach test,$(CT_SUITES),$(eval $(call ct_suite_target,$(test)))) |
6186 | 6352 | |
6187 | 6353 | distclean-ct: |
6188 | $(gen_verbose) rm -rf $(CURDIR)/logs/ | |
6354 | $(gen_verbose) rm -rf $(CT_LOGS_DIR) | |
6189 | 6355 | |
6190 | 6356 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> |
6191 | 6357 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6200 | 6366 | PLT_APPS ?= |
6201 | 6367 | DIALYZER_DIRS ?= --src -r $(wildcard src) $(ALL_APPS_DIRS) |
6202 | 6368 | DIALYZER_OPTS ?= -Werror_handling -Wrace_conditions -Wunmatched_returns # -Wunderspecs |
6369 | DIALYZER_PLT_OPTS ?= | |
6203 | 6370 | |
6204 | 6371 | # Core targets. |
6205 | 6372 | |
6231 | 6398 | endef |
6232 | 6399 | |
6233 | 6400 | $(DIALYZER_PLT): deps app |
6234 | $(verbose) dialyzer --build_plt --apps erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) \ | |
6235 | `test -f $(ERLANG_MK_TMP)/deps.log && cat $(ERLANG_MK_TMP)/deps.log` | |
6401 | $(eval DEPS_LOG := $(shell test -f $(ERLANG_MK_TMP)/deps.log && \ | |
6402 | while read p; do test -d $$p/ebin && echo $$p/ebin; done <$(ERLANG_MK_TMP)/deps.log)) | |
6403 | $(verbose) dialyzer --build_plt $(DIALYZER_PLT_OPTS) --apps \ | |
6404 | erts kernel stdlib $(PLT_APPS) $(OTP_DEPS) $(LOCAL_DEPS) $(DEPS_LOG) | |
6236 | 6405 | |
6237 | 6406 | plt: $(DIALYZER_PLT) |
6238 | 6407 | |
6244 | 6413 | else |
6245 | 6414 | dialyze: $(DIALYZER_PLT) |
6246 | 6415 | endif |
6247 | $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(subst ",\",$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) | |
6416 | $(verbose) dialyzer --no_native `$(ERL) -eval "$(subst $(newline),,$(call escape_dquotes,$(call filter_opts.erl)))" -extra $(ERLC_OPTS)` $(DIALYZER_DIRS) $(DIALYZER_OPTS) | |
6248 | 6417 | |
6249 | 6418 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> |
6250 | 6419 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6254 | 6423 | # Configuration. |
6255 | 6424 | |
6256 | 6425 | EDOC_OPTS ?= |
6426 | EDOC_SRC_DIRS ?= | |
6427 | EDOC_OUTPUT ?= doc | |
6428 | ||
6429 | define edoc.erl | |
6430 | SrcPaths = lists:foldl(fun(P, Acc) -> | |
6431 | filelib:wildcard(atom_to_list(P) ++ "/{src,c_src}") ++ Acc | |
6432 | end, [], [$(call comma_list,$(patsubst %,'%',$(EDOC_SRC_DIRS)))]), | |
6433 | DefaultOpts = [{dir, "$(EDOC_OUTPUT)"}, {source_path, SrcPaths}, {subpackages, false}], | |
6434 | edoc:application($(1), ".", [$(2)] ++ DefaultOpts), | |
6435 | halt(0). | |
6436 | endef | |
6257 | 6437 | |
6258 | 6438 | # Core targets. |
6259 | 6439 | |
6260 | ifneq ($(wildcard doc/overview.edoc),) | |
6440 | ifneq ($(strip $(EDOC_SRC_DIRS)$(wildcard doc/overview.edoc)),) | |
6261 | 6441 | docs:: edoc |
6262 | 6442 | endif |
6263 | 6443 | |
6266 | 6446 | # Plugin-specific targets. |
6267 | 6447 | |
6268 | 6448 | edoc: distclean-edoc doc-deps |
6269 | $(gen_verbose) $(ERL) -eval 'edoc:application($(PROJECT), ".", [$(EDOC_OPTS)]), halt().' | |
6449 | $(gen_verbose) $(call erlang,$(call edoc.erl,$(PROJECT),$(EDOC_OPTS))) | |
6270 | 6450 | |
6271 | 6451 | distclean-edoc: |
6272 | $(gen_verbose) rm -f doc/*.css doc/*.html doc/*.png doc/edoc-info | |
6452 | $(gen_verbose) rm -f $(EDOC_OUTPUT)/*.css $(EDOC_OUTPUT)/*.html $(EDOC_OUTPUT)/*.png $(EDOC_OUTPUT)/edoc-info | |
6453 | ||
6454 | # Copyright (c) 2013-2016, Loïc Hoguin <essen@ninenines.eu> | |
6455 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
6456 | ||
6457 | # Configuration. | |
6458 | ||
6459 | DTL_FULL_PATH ?= | |
6460 | DTL_PATH ?= templates/ | |
6461 | DTL_SUFFIX ?= _dtl | |
6462 | DTL_OPTS ?= | |
6463 | ||
6464 | # Verbosity. | |
6465 | ||
6466 | dtl_verbose_0 = @echo " DTL " $(filter %.dtl,$(?F)); | |
6467 | dtl_verbose = $(dtl_verbose_$(V)) | |
6468 | ||
6469 | # Core targets. | |
6470 | ||
6471 | DTL_PATH := $(abspath $(DTL_PATH)) | |
6472 | DTL_FILES := $(sort $(call core_find,$(DTL_PATH),*.dtl)) | |
6473 | ||
6474 | ifneq ($(DTL_FILES),) | |
6475 | ||
6476 | DTL_NAMES = $(addsuffix $(DTL_SUFFIX),$(DTL_FILES:$(DTL_PATH)/%.dtl=%)) | |
6477 | DTL_MODULES = $(if $(DTL_FULL_PATH),$(subst /,_,$(DTL_NAMES)),$(notdir $(DTL_NAMES))) | |
6478 | BEAM_FILES += $(addsuffix .beam,$(addprefix ebin/,$(DTL_MODULES))) | |
6479 | ||
6480 | ifneq ($(words $(DTL_FILES)),0) | |
6481 | # Rebuild templates when the Makefile changes. | |
6482 | $(ERLANG_MK_TMP)/last-makefile-change-erlydtl: $(MAKEFILE_LIST) | |
6483 | @mkdir -p $(ERLANG_MK_TMP) | |
6484 | @if test -f $@; then \ | |
6485 | touch $(DTL_FILES); \ | |
6486 | fi | |
6487 | @touch $@ | |
6488 | ||
6489 | ebin/$(PROJECT).app:: $(ERLANG_MK_TMP)/last-makefile-change-erlydtl | |
6490 | endif | |
6491 | ||
6492 | define erlydtl_compile.erl | |
6493 | [begin | |
6494 | Module0 = case "$(strip $(DTL_FULL_PATH))" of | |
6495 | "" -> | |
6496 | filename:basename(F, ".dtl"); | |
6497 | _ -> | |
6498 | "$(DTL_PATH)/" ++ F2 = filename:rootname(F, ".dtl"), | |
6499 | re:replace(F2, "/", "_", [{return, list}, global]) | |
6500 | end, | |
6501 | Module = list_to_atom(string:to_lower(Module0) ++ "$(DTL_SUFFIX)"), | |
6502 | case erlydtl:compile(F, Module, [$(DTL_OPTS)] ++ [{out_dir, "ebin/"}, return_errors]) of | |
6503 | ok -> ok; | |
6504 | {ok, _} -> ok | |
6505 | end | |
6506 | end || F <- string:tokens("$(1)", " ")], | |
6507 | halt(). | |
6508 | endef | |
6509 | ||
6510 | ebin/$(PROJECT).app:: $(DTL_FILES) | ebin/ | |
6511 | $(if $(strip $?),\ | |
6512 | $(dtl_verbose) $(call erlang,$(call erlydtl_compile.erl,$(call core_native_path,$?)),\ | |
6513 | -pa ebin/ $(DEPS_DIR)/erlydtl/ebin/)) | |
6514 | ||
6515 | endif | |
6273 | 6516 | |
6274 | 6517 | # Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> |
6275 | 6518 | # Copyright (c) 2014, Dave Cottlehuber <dch@skunkwerks.at> |
6318 | 6561 | $(verbose) chmod +x $(ESCRIPT_FILE) |
6319 | 6562 | |
6320 | 6563 | distclean-escript: |
6321 | $(gen_verbose) rm -f $(ESCRIPT_NAME) | |
6564 | $(gen_verbose) rm -f $(ESCRIPT_FILE) | |
6322 | 6565 | |
6323 | 6566 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> |
6324 | 6567 | # Copyright (c) 2014, Enrique Fernandez <enrique.fernandez@erlang-solutions.com> |
6343 | 6586 | # Plugin-specific targets. |
6344 | 6587 | |
6345 | 6588 | define eunit.erl |
6346 | case "$(COVER)" of | |
6347 | "" -> ok; | |
6589 | Enabled = case "$(COVER)" of | |
6590 | "" -> false; | |
6348 | 6591 | _ -> |
6349 | case cover:compile_beam_directory("ebin") of | |
6350 | {error, _} -> halt(1); | |
6351 | _ -> ok | |
6592 | case filelib:is_dir("ebin") of | |
6593 | false -> false; | |
6594 | true -> | |
6595 | case cover:compile_beam_directory("ebin") of | |
6596 | {error, _} -> halt(1); | |
6597 | _ -> true | |
6598 | end | |
6352 | 6599 | end |
6353 | 6600 | end, |
6354 | 6601 | case eunit:test($1, [$(EUNIT_OPTS)]) of |
6355 | 6602 | ok -> ok; |
6356 | 6603 | error -> halt(2) |
6357 | 6604 | end, |
6358 | case "$(COVER)" of | |
6359 | "" -> ok; | |
6605 | case {Enabled, "$(COVER)"} of | |
6606 | {false, _} -> ok; | |
6607 | {_, ""} -> ok; | |
6360 | 6608 | _ -> |
6361 | cover:export("eunit.coverdata") | |
6609 | cover:export("$(COVER_DATA_DIR)/eunit.coverdata") | |
6362 | 6610 | end, |
6363 | 6611 | halt() |
6364 | 6612 | endef |
6367 | 6615 | |
6368 | 6616 | ifdef t |
6369 | 6617 | ifeq (,$(findstring :,$(t))) |
6370 | eunit: test-build | |
6618 | eunit: test-build cover-data-dir | |
6371 | 6619 | $(gen_verbose) $(call erlang,$(call eunit.erl,['$(t)']),$(EUNIT_ERL_OPTS)) |
6372 | 6620 | else |
6373 | eunit: test-build | |
6621 | eunit: test-build cover-data-dir | |
6374 | 6622 | $(gen_verbose) $(call erlang,$(call eunit.erl,fun $(t)/0),$(EUNIT_ERL_OPTS)) |
6375 | 6623 | endif |
6376 | 6624 | else |
6380 | 6628 | EUNIT_MODS = $(foreach mod,$(EUNIT_EBIN_MODS) $(filter-out \ |
6381 | 6629 | $(patsubst %,%_tests,$(EUNIT_EBIN_MODS)),$(EUNIT_TEST_MODS)),'$(mod)') |
6382 | 6630 | |
6383 | eunit: test-build $(if $(IS_APP),,apps-eunit) | |
6631 | eunit: test-build $(if $(IS_APP),,apps-eunit) cover-data-dir | |
6384 | 6632 | $(gen_verbose) $(call erlang,$(call eunit.erl,[$(call comma_list,$(EUNIT_MODS))]),$(EUNIT_ERL_OPTS)) |
6385 | 6633 | |
6386 | 6634 | ifneq ($(ALL_APPS_DIRS),) |
6387 | 6635 | apps-eunit: |
6388 | $(verbose) for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; done | |
6636 | $(verbose) eunit_retcode=0 ; for app in $(ALL_APPS_DIRS); do $(MAKE) -C $$app eunit IS_APP=1; \ | |
6637 | [ $$? -ne 0 ] && eunit_retcode=1 ; done ; \ | |
6638 | exit $$eunit_retcode | |
6639 | endif | |
6640 | endif | |
6641 | ||
6642 | # Copyright (c) 2015-2017, Loïc Hoguin <essen@ninenines.eu> | |
6643 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
6644 | ||
6645 | ifeq ($(filter proper,$(DEPS) $(TEST_DEPS)),proper) | |
6646 | .PHONY: proper | |
6647 | ||
6648 | # Targets. | |
6649 | ||
6650 | tests:: proper | |
6651 | ||
6652 | define proper_check.erl | |
6653 | code:add_pathsa([ | |
6654 | "$(call core_native_path,$(CURDIR)/ebin)", | |
6655 | "$(call core_native_path,$(DEPS_DIR)/*/ebin)", | |
6656 | "$(call core_native_path,$(TEST_DIR))"]), | |
6657 | Module = fun(M) -> | |
6658 | [true] =:= lists:usort([ | |
6659 | case atom_to_list(F) of | |
6660 | "prop_" ++ _ -> | |
6661 | io:format("Testing ~p:~p/0~n", [M, F]), | |
6662 | proper:quickcheck(M:F(), nocolors); | |
6663 | _ -> | |
6664 | true | |
6665 | end | |
6666 | || {F, 0} <- M:module_info(exports)]) | |
6667 | end, | |
6668 | try | |
6669 | case $(1) of | |
6670 | all -> [true] =:= lists:usort([Module(M) || M <- [$(call comma_list,$(3))]]); | |
6671 | module -> Module($(2)); | |
6672 | function -> proper:quickcheck($(2), nocolors) | |
6673 | end | |
6674 | of | |
6675 | true -> halt(0); | |
6676 | _ -> halt(1) | |
6677 | catch error:undef -> | |
6678 | io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), | |
6679 | halt(0) | |
6680 | end. | |
6681 | endef | |
6682 | ||
6683 | ifdef t | |
6684 | ifeq (,$(findstring :,$(t))) | |
6685 | proper: test-build | |
6686 | $(verbose) $(call erlang,$(call proper_check.erl,module,$(t))) | |
6687 | else | |
6688 | proper: test-build | |
6689 | $(verbose) echo Testing $(t)/0 | |
6690 | $(verbose) $(call erlang,$(call proper_check.erl,function,$(t)())) | |
6691 | endif | |
6692 | else | |
6693 | proper: test-build | |
6694 | $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ | |
6695 | $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) | |
6696 | $(gen_verbose) $(call erlang,$(call proper_check.erl,all,undefined,$(MODULES))) | |
6389 | 6697 | endif |
6390 | 6698 | endif |
6391 | 6699 | |
6399 | 6707 | RELX ?= $(ERLANG_MK_TMP)/relx |
6400 | 6708 | RELX_CONFIG ?= $(CURDIR)/relx.config |
6401 | 6709 | |
6402 | RELX_URL ?= https://github.com/erlware/relx/releases/download/v3.19.0/relx | |
6710 | RELX_URL ?= https://erlang.mk/res/relx-v3.24.5 | |
6403 | 6711 | RELX_OPTS ?= |
6404 | 6712 | RELX_OUTPUT_DIR ?= _rel |
6713 | RELX_REL_EXT ?= | |
6714 | RELX_TAR ?= 1 | |
6715 | ||
6716 | ifdef SFX | |
6717 | RELX_TAR = 1 | |
6718 | endif | |
6405 | 6719 | |
6406 | 6720 | ifeq ($(firstword $(RELX_OPTS)),-o) |
6407 | 6721 | RELX_OUTPUT_DIR = $(word 2,$(RELX_OPTS)) |
6424 | 6738 | # Plugin-specific targets. |
6425 | 6739 | |
6426 | 6740 | $(RELX): |
6741 | $(verbose) mkdir -p $(ERLANG_MK_TMP) | |
6427 | 6742 | $(gen_verbose) $(call core_http_get,$(RELX),$(RELX_URL)) |
6428 | 6743 | $(verbose) chmod +x $(RELX) |
6429 | 6744 | |
6430 | 6745 | relx-rel: $(RELX) rel-deps app |
6431 | $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release tar | |
6746 | $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release $(if $(filter 1,$(RELX_TAR)),tar) | |
6432 | 6747 | |
6433 | 6748 | relx-relup: $(RELX) rel-deps app |
6434 | $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release relup tar | |
6749 | $(verbose) $(RELX) -c $(RELX_CONFIG) $(RELX_OPTS) release relup $(if $(filter 1,$(RELX_TAR)),tar) | |
6435 | 6750 | |
6436 | 6751 | distclean-relx-rel: |
6437 | 6752 | $(gen_verbose) rm -rf $(RELX_OUTPUT_DIR) |
6439 | 6754 | # Run target. |
6440 | 6755 | |
6441 | 6756 | ifeq ($(wildcard $(RELX_CONFIG)),) |
6442 | run: | |
6757 | run:: | |
6443 | 6758 | else |
6444 | 6759 | |
6445 | 6760 | define get_relx_release.erl |
6446 | {ok, Config} = file:consult("$(RELX_CONFIG)"), | |
6447 | {release, {Name, Vsn}, _} = lists:keyfind(release, 1, Config), | |
6761 | {ok, Config} = file:consult("$(call core_native_path,$(RELX_CONFIG))"), | |
6762 | {release, {Name, Vsn0}, _} = lists:keyfind(release, 1, Config), | |
6763 | Vsn = case Vsn0 of | |
6764 | {cmd, Cmd} -> os:cmd(Cmd); | |
6765 | semver -> ""; | |
6766 | {semver, _} -> ""; | |
6767 | VsnStr -> Vsn0 | |
6768 | end, | |
6448 | 6769 | io:format("~s ~s", [Name, Vsn]), |
6449 | 6770 | halt(0). |
6450 | 6771 | endef |
6453 | 6774 | RELX_REL_NAME := $(word 1,$(RELX_REL)) |
6454 | 6775 | RELX_REL_VSN := $(word 2,$(RELX_REL)) |
6455 | 6776 | |
6456 | run: all | |
6457 | $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME) console | |
6777 | ifeq ($(PLATFORM),msys2) | |
6778 | RELX_REL_EXT := .cmd | |
6779 | endif | |
6780 | ||
6781 | run:: all | |
6782 | $(verbose) $(RELX_OUTPUT_DIR)/$(RELX_REL_NAME)/bin/$(RELX_REL_NAME)$(RELX_REL_EXT) console | |
6458 | 6783 | |
6459 | 6784 | help:: |
6460 | 6785 | $(verbose) printf "%s\n" "" \ |
6472 | 6797 | # Configuration. |
6473 | 6798 | |
6474 | 6799 | SHELL_ERL ?= erl |
6475 | SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin | |
6800 | SHELL_PATHS ?= $(CURDIR)/ebin $(APPS_DIR)/*/ebin $(DEPS_DIR)/*/ebin $(TEST_DIR) | |
6476 | 6801 | SHELL_OPTS ?= |
6477 | 6802 | |
6478 | 6803 | ALL_SHELL_DEPS_DIRS = $(addprefix $(DEPS_DIR)/,$(SHELL_DEPS)) |
6489 | 6814 | $(foreach dep,$(SHELL_DEPS),$(eval $(call dep_target,$(dep)))) |
6490 | 6815 | |
6491 | 6816 | build-shell-deps: $(ALL_SHELL_DEPS_DIRS) |
6492 | $(verbose) for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done | |
6817 | $(verbose) set -e; for dep in $(ALL_SHELL_DEPS_DIRS) ; do $(MAKE) -C $$dep ; done | |
6493 | 6818 | |
6494 | 6819 | shell: build-shell-deps |
6495 | 6820 | $(gen_verbose) $(SHELL_ERL) -pa $(SHELL_PATHS) $(SHELL_OPTS) |
6496 | 6821 | |
6822 | # Copyright 2017, Stanislaw Klekot <dozzie@jarowit.net> | |
6823 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
6824 | ||
6825 | .PHONY: distclean-sphinx sphinx | |
6826 | ||
6827 | # Configuration. | |
6828 | ||
6829 | SPHINX_BUILD ?= sphinx-build | |
6830 | SPHINX_SOURCE ?= doc | |
6831 | SPHINX_CONFDIR ?= | |
6832 | SPHINX_FORMATS ?= html | |
6833 | SPHINX_DOCTREES ?= $(ERLANG_MK_TMP)/sphinx.doctrees | |
6834 | SPHINX_OPTS ?= | |
6835 | ||
6836 | #sphinx_html_opts = | |
6837 | #sphinx_html_output = html | |
6838 | #sphinx_man_opts = | |
6839 | #sphinx_man_output = man | |
6840 | #sphinx_latex_opts = | |
6841 | #sphinx_latex_output = latex | |
6842 | ||
6843 | # Helpers. | |
6844 | ||
6845 | sphinx_build_0 = @echo " SPHINX" $1; $(SPHINX_BUILD) -N -q | |
6846 | sphinx_build_1 = $(SPHINX_BUILD) -N | |
6847 | sphinx_build_2 = set -x; $(SPHINX_BUILD) | |
6848 | sphinx_build = $(sphinx_build_$(V)) | |
6849 | ||
6850 | define sphinx.build | |
6851 | $(call sphinx_build,$1) -b $1 -d $(SPHINX_DOCTREES) $(if $(SPHINX_CONFDIR),-c $(SPHINX_CONFDIR)) $(SPHINX_OPTS) $(sphinx_$1_opts) -- $(SPHINX_SOURCE) $(call sphinx.output,$1) | |
6852 | ||
6853 | endef | |
6854 | ||
6855 | define sphinx.output | |
6856 | $(if $(sphinx_$1_output),$(sphinx_$1_output),$1) | |
6857 | endef | |
6858 | ||
6859 | # Targets. | |
6860 | ||
6861 | ifneq ($(wildcard $(if $(SPHINX_CONFDIR),$(SPHINX_CONFDIR),$(SPHINX_SOURCE))/conf.py),) | |
6862 | docs:: sphinx | |
6863 | distclean:: distclean-sphinx | |
6864 | endif | |
6865 | ||
6866 | help:: | |
6867 | $(verbose) printf "%s\n" "" \ | |
6868 | "Sphinx targets:" \ | |
6869 | " sphinx Generate Sphinx documentation." \ | |
6870 | "" \ | |
6871 | "ReST sources and 'conf.py' file are expected in directory pointed by" \ | |
6872 | "SPHINX_SOURCE ('doc' by default). SPHINX_FORMATS lists formats to build (only" \ | |
6873 | "'html' format is generated by default); target directory can be specified by" \ | |
6874 | 'setting sphinx_$${format}_output, for example: sphinx_html_output = output/html' \ | |
6875 | "Additional Sphinx options can be set in SPHINX_OPTS." | |
6876 | ||
6877 | # Plugin-specific targets. | |
6878 | ||
6879 | sphinx: | |
6880 | $(foreach F,$(SPHINX_FORMATS),$(call sphinx.build,$F)) | |
6881 | ||
6882 | distclean-sphinx: | |
6883 | $(gen_verbose) rm -rf $(filter-out $(SPHINX_SOURCE),$(foreach F,$(SPHINX_FORMATS),$(call sphinx.output,$F))) | |
6884 | ||
6885 | # Copyright (c) 2017, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> | |
6886 | # This file is contributed to erlang.mk and subject to the terms of the ISC License. | |
6887 | ||
6888 | .PHONY: show-ERL_LIBS show-ERLC_OPTS show-TEST_ERLC_OPTS | |
6889 | ||
6890 | show-ERL_LIBS: | |
6891 | @echo $(ERL_LIBS) | |
6892 | ||
6893 | show-ERLC_OPTS: | |
6894 | @$(foreach opt,$(ERLC_OPTS) -pa ebin -I include,echo "$(opt)";) | |
6895 | ||
6896 | show-TEST_ERLC_OPTS: | |
6897 | @$(foreach opt,$(TEST_ERLC_OPTS) -pa ebin -I include,echo "$(opt)";) | |
6898 | ||
6497 | 6899 | # Copyright (c) 2015-2016, Loïc Hoguin <essen@ninenines.eu> |
6498 | 6900 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6499 | 6901 | |
6505 | 6907 | tests:: triq |
6506 | 6908 | |
6507 | 6909 | define triq_check.erl |
6508 | code:add_pathsa(["$(call core_native_path,$(CURDIR)/ebin)", "$(call core_native_path,$(DEPS_DIR)/*/ebin)"]), | |
6910 | code:add_pathsa([ | |
6911 | "$(call core_native_path,$(CURDIR)/ebin)", | |
6912 | "$(call core_native_path,$(DEPS_DIR)/*/ebin)", | |
6913 | "$(call core_native_path,$(TEST_DIR))"]), | |
6509 | 6914 | try |
6510 | 6915 | case $(1) of |
6511 | 6916 | all -> [true] =:= lists:usort([triq:check(M) || M <- [$(call comma_list,$(3))]]); |
6516 | 6921 | true -> halt(0); |
6517 | 6922 | _ -> halt(1) |
6518 | 6923 | catch error:undef -> |
6519 | io:format("Undefined property or module~n"), | |
6924 | io:format("Undefined property or module?~n~p~n", [erlang:get_stacktrace()]), | |
6520 | 6925 | halt(0) |
6521 | 6926 | end. |
6522 | 6927 | endef |
6532 | 6937 | endif |
6533 | 6938 | else |
6534 | 6939 | triq: test-build |
6535 | $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename $(wildcard ebin/*.beam)))))) | |
6940 | $(eval MODULES := $(patsubst %,'%',$(sort $(notdir $(basename \ | |
6941 | $(wildcard ebin/*.beam) $(call core_find,$(TEST_DIR)/,*.beam)))))) | |
6536 | 6942 | $(gen_verbose) $(call erlang,$(call triq_check.erl,all,undefined,$(MODULES))) |
6537 | 6943 | endif |
6538 | 6944 | endif |
6554 | 6960 | XREFR ?= $(CURDIR)/xrefr |
6555 | 6961 | export XREFR |
6556 | 6962 | |
6557 | XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/0.2.2/xrefr | |
6963 | XREFR_URL ?= https://github.com/inaka/xref_runner/releases/download/1.1.0/xrefr | |
6558 | 6964 | |
6559 | 6965 | # Core targets. |
6560 | 6966 | |
6561 | 6967 | help:: |
6562 | $(verbose) printf "%s\n" "" \ | |
6563 | "Xref targets:" \ | |
6564 | " xref Run Xrefr using $XREF_CONFIG as config file if defined" | |
6968 | $(verbose) printf '%s\n' '' \ | |
6969 | 'Xref targets:' \ | |
6970 | ' xref Run Xrefr using $$XREF_CONFIG as config file if defined' | |
6565 | 6971 | |
6566 | 6972 | distclean:: distclean-xref |
6567 | 6973 | |
6581 | 6987 | # Copyright (c) 2015, Viktor Söderqvist <viktor@zuiderkwast.se> |
6582 | 6988 | # This file is part of erlang.mk and subject to the terms of the ISC License. |
6583 | 6989 | |
6584 | COVER_REPORT_DIR = cover | |
6990 | COVER_REPORT_DIR ?= cover | |
6991 | COVER_DATA_DIR ?= $(CURDIR) | |
6585 | 6992 | |
6586 | 6993 | # Hook in coverage to ct |
6587 | 6994 | |
6588 | 6995 | ifdef COVER |
6589 | 6996 | ifdef CT_RUN |
6590 | # All modules in 'ebin' | |
6591 | COVER_MODS = $(notdir $(basename $(call core_ls,ebin/*.beam))) | |
6592 | ||
6997 | ifneq ($(wildcard $(TEST_DIR)),) | |
6593 | 6998 | test-build:: $(TEST_DIR)/ct.cover.spec |
6594 | 6999 | |
6595 | $(TEST_DIR)/ct.cover.spec: | |
6596 | $(verbose) echo Cover mods: $(COVER_MODS) | |
7000 | $(TEST_DIR)/ct.cover.spec: cover-data-dir | |
6597 | 7001 | $(gen_verbose) printf "%s\n" \ |
6598 | '{incl_mods,[$(subst $(space),$(comma),$(COVER_MODS))]}.' \ | |
6599 | '{export,"$(CURDIR)/ct.coverdata"}.' > $@ | |
7002 | "{incl_app, '$(PROJECT)', details}." \ | |
7003 | '{export,"$(abspath $(COVER_DATA_DIR))/ct.coverdata"}.' > $@ | |
6600 | 7004 | |
6601 | 7005 | CT_RUN += -cover $(TEST_DIR)/ct.cover.spec |
7006 | endif | |
6602 | 7007 | endif |
6603 | 7008 | endif |
6604 | 7009 | |
6609 | 7014 | tests:: |
6610 | 7015 | $(verbose) $(MAKE) --no-print-directory cover-report |
6611 | 7016 | endif |
7017 | ||
7018 | cover-data-dir: | $(COVER_DATA_DIR) | |
7019 | ||
7020 | $(COVER_DATA_DIR): | |
7021 | $(verbose) mkdir -p $(COVER_DATA_DIR) | |
7022 | else | |
7023 | cover-data-dir: | |
6612 | 7024 | endif |
6613 | 7025 | |
6614 | 7026 | clean:: coverdata-clean |
6622 | 7034 | "Cover targets:" \ |
6623 | 7035 | " cover-report Generate a HTML coverage report from previously collected" \ |
6624 | 7036 | " cover data." \ |
6625 | " all.coverdata Merge {eunit,ct}.coverdata into one coverdata file." \ | |
7037 | " all.coverdata Merge all coverdata files into all.coverdata." \ | |
6626 | 7038 | "" \ |
6627 | 7039 | "If COVER=1 is set, coverage data is generated by the targets eunit and ct. The" \ |
6628 | 7040 | "target tests additionally generates a HTML coverage report from the combined" \ |
6631 | 7043 | |
6632 | 7044 | # Plugin specific targets |
6633 | 7045 | |
6634 | COVERDATA = $(filter-out all.coverdata,$(wildcard *.coverdata)) | |
7046 | COVERDATA = $(filter-out $(COVER_DATA_DIR)/all.coverdata,$(wildcard $(COVER_DATA_DIR)/*.coverdata)) | |
6635 | 7047 | |
6636 | 7048 | .PHONY: coverdata-clean |
6637 | 7049 | coverdata-clean: |
6638 | $(gen_verbose) rm -f *.coverdata ct.cover.spec | |
7050 | $(gen_verbose) rm -f $(COVER_DATA_DIR)/*.coverdata $(TEST_DIR)/ct.cover.spec | |
6639 | 7051 | |
6640 | 7052 | # Merge all coverdata files into one. |
6641 | all.coverdata: $(COVERDATA) | |
6642 | $(gen_verbose) $(ERL) -eval ' \ | |
6643 | $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) \ | |
6644 | cover:export("$@"), halt(0).' | |
7053 | define cover_export.erl | |
7054 | $(foreach f,$(COVERDATA),cover:import("$(f)") == ok orelse halt(1),) | |
7055 | cover:export("$(COVER_DATA_DIR)/$@"), halt(0). | |
7056 | endef | |
7057 | ||
7058 | all.coverdata: $(COVERDATA) cover-data-dir | |
7059 | $(gen_verbose) $(call erlang,$(cover_export.erl)) | |
6645 | 7060 | |
6646 | 7061 | # These are only defined if COVER_REPORT_DIR is non-empty. Set COVER_REPORT_DIR to |
6647 | 7062 | # empty if you want the coverdata files but not the HTML report. |
6651 | 7066 | |
6652 | 7067 | cover-report-clean: |
6653 | 7068 | $(gen_verbose) rm -rf $(COVER_REPORT_DIR) |
7069 | $(if $(shell ls -A $(COVER_DATA_DIR)/),,$(verbose) rmdir $(COVER_DATA_DIR)) | |
6654 | 7070 | |
6655 | 7071 | ifeq ($(COVERDATA),) |
6656 | 7072 | cover-report: |
6659 | 7075 | # Modules which include eunit.hrl always contain one line without coverage |
6660 | 7076 | # because eunit defines test/0 which is never called. We compensate for this. |
6661 | 7077 | EUNIT_HRL_MODS = $(subst $(space),$(comma),$(shell \ |
6662 | grep -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ | |
7078 | grep -H -e '^\s*-include.*include/eunit\.hrl"' src/*.erl \ | |
6663 | 7079 | | sed "s/^src\/\(.*\)\.erl:.*/'\1'/" | uniq)) |
6664 | 7080 | |
6665 | 7081 | define cover_report.erl |
6694 | 7110 | endef |
6695 | 7111 | |
6696 | 7112 | cover-report: |
6697 | $(gen_verbose) mkdir -p $(COVER_REPORT_DIR) | |
7113 | $(verbose) mkdir -p $(COVER_REPORT_DIR) | |
6698 | 7114 | $(gen_verbose) $(call erlang,$(cover_report.erl)) |
6699 | 7115 | |
6700 | 7116 | endif |
6746 | 7162 | |
6747 | 7163 | endif |
6748 | 7164 | endif |
7165 | ||
7166 | # Copyright (c) 2013-2017, Loïc Hoguin <essen@ninenines.eu> | |
7167 | # This file is part of erlang.mk and subject to the terms of the ISC License. | |
7168 | ||
7169 | # External plugins. | |
7170 | ||
7171 | DEP_PLUGINS ?= | |
7172 | ||
7173 | $(foreach p,$(DEP_PLUGINS),\ | |
7174 | $(eval $(if $(findstring /,$p),\ | |
7175 | $(call core_dep_plugin,$p,$(firstword $(subst /, ,$p))),\ | |
7176 | $(call core_dep_plugin,$p/plugins.mk,$p)))) | |
6749 | 7177 | |
6750 | 7178 | # Copyright (c) 2013-2015, Loïc Hoguin <essen@ninenines.eu> |
6751 | 7179 | # Copyright (c) 2015-2016, Jean-Sébastien Pédron <jean-sebastien@rabbitmq.com> |
6814 | 7242 | $(verbose) rm -f $(ERLANG_MK_RECURSIVE_TMP_LIST) |
6815 | 7243 | endif |
6816 | 7244 | ifndef IS_APP |
6817 | $(verbose) for dep in $(ALL_APPS_DIRS) ; do \ | |
7245 | $(verbose) set -e; for dep in $(ALL_APPS_DIRS) ; do \ | |
6818 | 7246 | $(MAKE) -C $$dep $@ \ |
6819 | 7247 | IS_APP=1 \ |
6820 | ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \ | |
6821 | || exit $$?; \ | |
7248 | ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \ | |
6822 | 7249 | done |
6823 | 7250 | endif |
6824 | $(verbose) for dep in $^ ; do \ | |
7251 | $(verbose) set -e; for dep in $^ ; do \ | |
6825 | 7252 | if ! grep -qs ^$$dep$$ $(ERLANG_MK_RECURSIVE_TMP_LIST); then \ |
6826 | 7253 | echo $$dep >> $(ERLANG_MK_RECURSIVE_TMP_LIST); \ |
6827 | if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk)$$" \ | |
7254 | if grep -qs -E "^[[:blank:]]*include[[:blank:]]+(erlang\.mk|.*/erlang\.mk|.*ERLANG_MK_FILENAME.*)$$" \ | |
6828 | 7255 | $$dep/GNUmakefile $$dep/makefile $$dep/Makefile; then \ |
6829 | 7256 | $(MAKE) -C $$dep fetch-deps \ |
6830 | 7257 | IS_DEP=1 \ |
6831 | ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST) \ | |
6832 | || exit $$?; \ | |
7258 | ERLANG_MK_RECURSIVE_TMP_LIST=$(ERLANG_MK_RECURSIVE_TMP_LIST); \ | |
6833 | 7259 | fi \ |
6834 | 7260 | fi \ |
6835 | 7261 | done |
0 | %% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> | |
0 | %% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | 1 | %% |
2 | 2 | %% Permission to use, copy, modify, and/or distribute this software for any |
3 | 3 | %% purpose with or without fee is hereby granted, provided that the above |
31 | 31 | parse(Data) -> |
32 | 32 | parse(Data, #{}). |
33 | 33 | |
34 | parse(Data, St) when is_binary(Data) -> | |
35 | asciideck_parser:parse(Data, St); | |
34 | parse(Data, _St) when is_binary(Data) -> | |
35 | Passes = [ | |
36 | asciideck_attributes_pass, | |
37 | asciideck_lists_pass, | |
38 | asciideck_tables_pass, | |
39 | asciideck_inline_pass | |
40 | ], | |
41 | lists:foldl(fun(M, AST) -> M:run(AST) end, | |
42 | asciideck_block_parser:parse(Data), Passes); | |
36 | 43 | parse(Data, St) -> |
37 | 44 | parse(iolist_to_binary(Data), St). |
38 | 45 |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% Asciidoc User Guide 29 | |
15 | -module(asciideck_attributes_parser). | |
16 | ||
17 | -export([parse/1]). | |
18 | ||
19 | -type attributes() :: #{ | |
20 | %% The raw attribute list. | |
21 | 0 := binary(), | |
22 | %% Positional attributes. | |
23 | pos_integer() => binary(), | |
24 | %% Named attributes. | |
25 | binary() => binary() | |
26 | }. | |
27 | -export_type([attributes/0]). | |
28 | ||
29 | -define(IS_WS(C), (C =:= $\s) or (C =:= $\t)). | |
30 | ||
31 | -spec parse(binary()) -> attributes(). | |
32 | parse(Data) -> | |
33 | parse(Data, #{0 => Data}, 1). | |
34 | ||
35 | parse(<<>>, Attrs, _) -> | |
36 | Attrs; | |
37 | parse(Data, Attrs, Nth) -> | |
38 | case parse_attr(Data, <<>>) of | |
39 | {Value, Rest} when Nth =/= undefined -> | |
40 | parse(Rest, Attrs#{Nth => Value}, Nth + 1); | |
41 | {Name, Value, Rest} -> | |
42 | parse(Rest, Attrs#{Name => Value}, undefined) | |
43 | end. | |
44 | ||
45 | parse_attr(<<>>, Acc) -> | |
46 | {Acc, <<>>}; | |
47 | %% Skip preceding whitespace. | |
48 | parse_attr(<<C, R/bits>>, <<>>) when ?IS_WS(C) -> | |
49 | parse_attr(R, <<>>); | |
50 | %% Parse quoted positional attributes in their own function. | |
51 | parse_attr(<<$", R/bits>>, <<>>) -> | |
52 | parse_quoted_attr(R, <<>>); | |
53 | %% We have a named attribute, parse the value. | |
54 | parse_attr(<<$=, R/bits>>, Name) when Name =/= <<>> -> | |
55 | parse_attr_value(R, asciideck_block_parser:trim(Name, trailing), <<>>); | |
56 | %% We have a positional attribute. | |
57 | parse_attr(<<$,, R/bits>>, Value) -> | |
58 | {asciideck_block_parser:trim(Value, trailing), R}; | |
59 | %% Continue. | |
60 | parse_attr(<<C, R/bits>>, Acc) when C =/= $= -> | |
61 | parse_attr(R, <<Acc/binary, C>>). | |
62 | ||
63 | %% Get everything until the next double quote. | |
64 | parse_quoted_attr(<<$", R/bits>>, Acc) -> | |
65 | parse_quoted_attr_end(R, Acc); | |
66 | parse_quoted_attr(<<$\\, $", R/bits>>, Acc) -> | |
67 | parse_quoted_attr(R, <<Acc/binary, $">>); | |
68 | parse_quoted_attr(<<C, R/bits>>, Acc) -> | |
69 | parse_quoted_attr(R, <<Acc/binary, C>>). | |
70 | ||
71 | %% Skip the whitespace until the next comma or eof. | |
72 | parse_quoted_attr_end(<<>>, Value) -> | |
73 | {Value, <<>>}; | |
74 | parse_quoted_attr_end(<<$,, R/bits>>, Value) -> | |
75 | {Value, R}; | |
76 | parse_quoted_attr_end(<<C, R/bits>>, Value) when ?IS_WS(C) -> | |
77 | parse_quoted_attr_end(R, Value). | |
78 | ||
79 | parse_attr_value(<<>>, Name, Acc) -> | |
80 | {Name, Acc, <<>>}; | |
81 | %% Skip preceding whitespace. | |
82 | parse_attr_value(<<C, R/bits>>, Name, <<>>) when ?IS_WS(C) -> | |
83 | parse_attr_value(R, Name, <<>>); | |
84 | %% Parse quoted positional attributes in their own function. | |
85 | parse_attr_value(<<$", R/bits>>, Name, <<>>) -> | |
86 | {Value, Rest} = parse_quoted_attr(R, <<>>), | |
87 | {Name, Value, Rest}; | |
88 | %% Done. | |
89 | parse_attr_value(<<$,, R/bits>>, Name, Value) -> | |
90 | {Name, asciideck_block_parser:trim(Value, trailing), R}; | |
91 | %% Continue. | |
92 | parse_attr_value(<<C, R/bits>>, Name, Acc) -> | |
93 | parse_attr_value(R, Name, <<Acc/binary, C>>). | |
94 | ||
95 | -ifdef(TEST). | |
96 | attribute_0_test() -> | |
97 | #{0 := <<"Hello,world,width=\"50\"">>} = parse(<<"Hello,world,width=\"50\"">>), | |
98 | ok. | |
99 | ||
100 | parse_test() -> | |
101 | #{} = parse(<<>>), | |
102 | #{ | |
103 | 1 := <<"Hello">> | |
104 | } = parse(<<"Hello">>), | |
105 | #{ | |
106 | 1 := <<"quote">>, | |
107 | 2 := <<"Bertrand Russell">>, | |
108 | 3 := <<"The World of Mathematics (1956)">> | |
109 | } = parse(<<"quote, Bertrand Russell, The World of Mathematics (1956)">>), | |
110 | #{ | |
111 | 1 := <<"22 times">>, | |
112 | <<"backcolor">> := <<"#0e0e0e">>, | |
113 | <<"options">> := <<"noborders,wide">> | |
114 | } = parse(<<"\"22 times\", backcolor=\"#0e0e0e\", options=\"noborders,wide\"">>), | |
115 | #{ | |
116 | 1 := <<"A footnote, "with an image" image:smallnew.png[]">> | |
117 | } = parse(<<"A footnote, "with an image" image:smallnew.png[]">>), | |
118 | ok. | |
119 | -endif. |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% The purpose of this pass is to apply attributes to | |
15 | %% their corresponding blocks. For macros the attributes | |
16 | %% are already applied. For inline elements the inline | |
17 | %% pass is taking care of it. | |
18 | -module(asciideck_attributes_pass). | |
19 | ||
20 | -export([run/1]). | |
21 | ||
22 | run([]) -> | |
23 | []; | |
24 | %% A block identifier is an alternative way of specifying | |
25 | %% the id attribute for a block. | |
26 | run([{block_id, #{id := ID}, <<>>, _}|Tail0]) -> | |
27 | Tail = apply_attributes(Tail0, #{<<"id">> => ID}), | |
28 | run(Tail); | |
29 | %% A block title is ultimately treated as an attribute | |
30 | %% for the following block. | |
31 | run([{block_title, _, Title, _}|Tail0]) -> | |
32 | Tail = apply_attributes(Tail0, #{<<"title">> => Title}), | |
33 | run(Tail); | |
34 | run([{attribute_list, Attrs, <<>>, _}|Tail0]) -> | |
35 | Tail = apply_attributes(Tail0, Attrs), | |
36 | run(Tail); | |
37 | run([Block|Tail]) -> | |
38 | [Block|run(Tail)]. | |
39 | ||
40 | %% Find the next block to apply the attributes. | |
41 | apply_attributes([], _) -> | |
42 | []; | |
43 | apply_attributes(AST=[Element0={Type, Attrs0, Content, Ann}|Tail], Attrs) -> | |
44 | case can_apply(Type) of | |
45 | drop -> | |
46 | AST; | |
47 | skip -> | |
48 | [Element0|apply_attributes(Tail, Attrs)]; | |
49 | apply -> | |
50 | Element = {Type, maps:merge(Attrs0, Attrs), Content, Ann}, | |
51 | [Element|Tail] | |
52 | end. | |
53 | ||
54 | %% Block macros already come with a mandatory attribute list. | |
55 | %% Just to play it safe we drop the attributes for now. | |
56 | can_apply(block_macro) -> drop; | |
57 | %% If we hit a list item continuation, drop the attributes for now. | |
58 | can_apply(list_item_continuation) -> drop; | |
59 | %% We skip attribute lists and alike and let it sort itself out. | |
60 | can_apply(block_id) -> skip; | |
61 | can_apply(attribute_list) -> skip; | |
62 | can_apply(block_title) -> skip; | |
63 | %% Everything else is a block. | |
64 | can_apply(_) -> apply. | |
65 | ||
66 | -ifdef(TEST). | |
67 | attribute_list_test() -> | |
68 | AST0 = [ | |
69 | {attribute_list, #{ | |
70 | 0 => <<"width=400">>, | |
71 | <<"width">> => <<"400">> | |
72 | }, <<>>, #{line => 1}}, | |
73 | {listing_block, #{}, <<"Hello!">>, #{line => 2}} | |
74 | ], | |
75 | AST = [ | |
76 | {listing_block, #{ | |
77 | 0 => <<"width=400">>, | |
78 | <<"width">> => <<"400">> | |
79 | }, <<"Hello!">>, #{line => 2}} | |
80 | ], | |
81 | AST = run(AST0), | |
82 | ok. | |
83 | ||
84 | block_id_test() -> | |
85 | AST0 = [ | |
86 | {block_id, #{ | |
87 | id => <<"cowboy_req">> | |
88 | }, <<>>, #{line => 1}}, | |
89 | {listing_block, #{}, <<"Hello!">>, #{line => 2}} | |
90 | ], | |
91 | AST = [ | |
92 | {listing_block, #{ | |
93 | <<"id">> => <<"cowboy_req">> | |
94 | }, <<"Hello!">>, #{line => 2}} | |
95 | ], | |
96 | AST = run(AST0), | |
97 | ok. | |
98 | ||
99 | block_title_test() -> | |
100 | AST0 = [ | |
101 | {block_title, #{}, <<"Title">>, #{line => 1}}, | |
102 | {listing_block, #{}, <<"Hello!">>, #{line => 2}} | |
103 | ], | |
104 | AST = [ | |
105 | {listing_block, #{ | |
106 | <<"title">> => <<"Title">> | |
107 | }, <<"Hello!">>, #{line => 2}} | |
108 | ], | |
109 | AST = run(AST0), | |
110 | ok. | |
111 | -endif. |
0 | %% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% The block parser is the first pass of the parsing of Asciidoc | |
15 | %% files. It only isolates the different top-level blocks and | |
16 | %% produces a representation that can then be manipulated. | |
17 | %% | |
18 | %% Further passes are necessary to propagate the parsed lists | |
19 | %% of attributes to their respective blocks, to create actual | |
20 | %% lists from the parsed list items or to parse the contents | |
21 | %% of tables. Finally a final pass will parse inline elements. | |
22 | %% | |
23 | %% This module may be called again for parsing the content | |
24 | %% of individual table cells. | |
25 | -module(asciideck_block_parser). | |
26 | ||
27 | -export([parse/1]). | |
28 | ||
29 | %% @todo Temporary export. Move somewhere else. | |
30 | -export([trim/1]). | |
31 | -export([trim/2]). | |
32 | -export([while/2]). | |
33 | ||
34 | -type ast() :: list(). %% @todo | |
35 | ||
36 | -record(state, { | |
37 | reader :: pid() | |
38 | }). | |
39 | ||
40 | -define(IS_WS(C), (C =:= $\s) or (C =:= $\t)). | |
41 | ||
42 | -ifdef(TEST). | |
43 | -define(NOT(Type, Value), true = Type =/= element(1, hd(Value))). | |
44 | ||
45 | define_NOT_test() -> | |
46 | %% This succeeds. | |
47 | ?NOT(block_id, parse(<<"[[block,id]]">>)), | |
48 | %% This fails. | |
49 | {'EXIT', _} = (catch ?NOT(block_id, parse(<<"[[block_id]]">>))), | |
50 | ok. | |
51 | -endif. | |
52 | ||
53 | -spec parse(binary()) -> ast(). | |
54 | parse(Data) -> | |
55 | %% @todo Might want to start it supervised. | |
56 | %% @todo Might want to stop it also. | |
57 | {ok, ReaderPid} = asciideck_line_reader:start_link(Data), | |
58 | blocks(#state{reader=ReaderPid}). | |
59 | ||
60 | blocks(St) -> | |
61 | case block(St) of | |
62 | eof -> []; | |
63 | Block -> [Block|blocks(St)] | |
64 | end. | |
65 | ||
66 | %% Asciidoc parsing never fails. If a block is not | |
67 | %% formatted properly, it will be treated as a paragraph. | |
68 | block(St) -> | |
69 | skip(fun empty_line/1, St), | |
70 | oneof([ | |
71 | fun eof/1, | |
72 | %% Section titles. | |
73 | fun section_title/1, | |
74 | fun long_section_title/1, | |
75 | %% Block macros. | |
76 | fun block_id/1, | |
77 | fun block_macro/1, | |
78 | %% Lists. | |
79 | fun bulleted_list/1, | |
80 | fun numbered_list/1, | |
81 | fun labeled_list/1, | |
82 | fun callout_list/1, | |
83 | fun list_item_continuation/1, | |
84 | %% Delimited blocks. | |
85 | fun listing_block/1, | |
86 | fun literal_block/1, | |
87 | fun sidebar_block/1, | |
88 | fun comment_block/1, | |
89 | fun passthrough_block/1, | |
90 | fun quote_block/1, | |
91 | fun example_block/1, | |
92 | fun open_block/1, | |
93 | %% Table. | |
94 | fun table/1, | |
95 | %% Attributes. | |
96 | fun attribute_entry/1, | |
97 | fun attribute_list/1, | |
98 | %% Block title. | |
99 | fun block_title/1, | |
100 | %% Comment lines. | |
101 | fun comment_line/1, | |
102 | %% Paragraphs. | |
103 | fun literal_para/1, | |
104 | fun admonition_para/1, | |
105 | fun para/1 | |
106 | ], St). | |
107 | ||
108 | eof(St) -> | |
109 | eof = read_line(St). | |
110 | ||
111 | -ifdef(TEST). | |
112 | eof_test() -> | |
113 | [] = parse(<<>>). | |
114 | -endif. | |
115 | ||
116 | empty_line(St) -> | |
117 | <<>> = trim(read_line(St)). | |
118 | ||
119 | -ifdef(TEST). | |
120 | empty_line_test() -> | |
121 | [] = parse(<< | |
122 | "\n" | |
123 | " \n" | |
124 | " \n" | |
125 | "\n" | |
126 | >>). | |
127 | -endif. | |
128 | ||
129 | %% Asciidoc User Guide 11.2 | |
130 | section_title(St) -> | |
131 | {Level, Title0} = case read_line(St) of | |
132 | <<"=", C, R/bits>> when ?IS_WS(C) -> {0, R}; | |
133 | <<"==", C, R/bits>> when ?IS_WS(C) -> {1, R}; | |
134 | <<"===", C, R/bits>> when ?IS_WS(C) -> {2, R}; | |
135 | <<"====", C, R/bits>> when ?IS_WS(C) -> {3, R}; | |
136 | <<"=====", C, R/bits>> when ?IS_WS(C) -> {4, R} | |
137 | end, | |
138 | Ann = ann(St), | |
139 | Title1 = trim(Title0), | |
140 | %% Optional: trailing title delimiter. | |
141 | Trailer = case Level of | |
142 | 0 -> <<"=">>; | |
143 | 1 -> <<"==">>; | |
144 | 2 -> <<"===">>; | |
145 | 3 -> <<"====">>; | |
146 | 4 -> <<"=====">> | |
147 | end, | |
148 | Len = byte_size(Title1) - Level - 2, | |
149 | Title = case Title1 of | |
150 | <<Title2:Len/binary, WS, Trailer/binary>> when ?IS_WS(WS) -> trim(Title2); | |
151 | _ -> trim(Title1) | |
152 | end, | |
153 | %% Section titles must be followed by at least one empty line. | |
154 | _ = empty_line(St), | |
155 | %% Good! | |
156 | {section_title, #{level => Level}, Title, Ann}. | |
157 | ||
158 | -ifdef(TEST). | |
159 | section_title_test() -> | |
160 | %% With trailing title delimiter. | |
161 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
162 | = parse(<<"= Document Title (level 0) =">>), | |
163 | [{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}] | |
164 | = parse(<<"== Section Title (level 1) ==">>), | |
165 | [{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}] | |
166 | = parse(<<"=== Section Title (level 2) ===">>), | |
167 | [{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}] | |
168 | = parse(<<"==== Section Title (level 3) ====">>), | |
169 | [{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}] | |
170 | = parse(<<"===== Section Title (level 4) =====">>), | |
171 | %% Without trailing title delimiter. | |
172 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
173 | = parse(<<"= Document Title (level 0)">>), | |
174 | [{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}] | |
175 | = parse(<<"== Section Title (level 1)">>), | |
176 | [{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}] | |
177 | = parse(<<"=== Section Title (level 2)">>), | |
178 | [{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}] | |
179 | = parse(<<"==== Section Title (level 3)">>), | |
180 | [{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}] | |
181 | = parse(<<"===== Section Title (level 4)">>), | |
182 | %% Accept more spaces before/after delimiters. | |
183 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
184 | = parse(<<"= Document Title (level 0)">>), | |
185 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
186 | = parse(<<"= Document Title (level 0) =">>), | |
187 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
188 | = parse(<<"= Document Title (level 0) =">>), | |
189 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] | |
190 | = parse(<<"= Document Title (level 0) = ">>), | |
191 | %% A space before the first delimiter is not a title. | |
192 | ?NOT(section_title, parse(<<" = Document Title (level 0)">>)), | |
193 | ok. | |
194 | -endif. | |
195 | ||
196 | %% Asciidoc User Guide 11.1 | |
197 | long_section_title(St) -> | |
198 | %% Title must be hard against the left margin. | |
199 | <<C, _/bits>> = Title0 = read_line(St), | |
200 | Ann = ann(St), | |
201 | false = ?IS_WS(C), | |
202 | Title = trim(Title0), | |
203 | %% Read the underline. | |
204 | {Level, Char, Underline0} = case read_line(St) of | |
205 | U = <<"=", _/bits >> -> {0, $=, U}; | |
206 | U = <<"-", _/bits >> -> {1, $-, U}; | |
207 | U = <<"~", _/bits >> -> {2, $~, U}; | |
208 | U = <<"^", _/bits >> -> {3, $^, U}; | |
209 | U = <<"+", _/bits >> -> {4, $+, U} | |
210 | end, | |
211 | Underline = trim(Underline0, trailing), | |
212 | %% Underline must be the same character repeated over the entire line. | |
213 | repeats(Underline, Char), | |
214 | %% Underline must be the same size as the title, +/- 2 characters. | |
215 | TLen = byte_size(Title), | |
216 | ULen = byte_size(Underline), | |
217 | true = (TLen >= ULen - 2) andalso (TLen =< ULen + 2), | |
218 | %% Good! | |
219 | {section_title, #{level => Level}, Title, Ann}. | |
220 | ||
221 | -ifdef(TEST). | |
222 | long_section_title_test() -> | |
223 | %% Same amount of characters for the underline. | |
224 | [{section_title, #{level := 0}, <<"Document Title (level 0)">>, _}] = parse(<< | |
225 | "Document Title (level 0)\n" | |
226 | "========================">>), | |
227 | [{section_title, #{level := 1}, <<"Section Title (level 1)">>, _}] = parse(<< | |
228 | "Section Title (level 1)\n" | |
229 | "-----------------------">>), | |
230 | [{section_title, #{level := 2}, <<"Section Title (level 2)">>, _}] = parse(<< | |
231 | "Section Title (level 2)\n" | |
232 | "~~~~~~~~~~~~~~~~~~~~~~~">>), | |
233 | [{section_title, #{level := 3}, <<"Section Title (level 3)">>, _}] = parse(<< | |
234 | "Section Title (level 3)\n" | |
235 | "^^^^^^^^^^^^^^^^^^^^^^^">>), | |
236 | [{section_title, #{level := 4}, <<"Section Title (level 4)">>, _}] = parse(<< | |
237 | "Section Title (level 4)\n" | |
238 | "+++++++++++++++++++++++">>), | |
239 | %% A shorter title to confirm we are not cheating. | |
240 | [{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<< | |
241 | "Hello!\n" | |
242 | "======">>), | |
243 | %% Underline can be +/- 2 characters. | |
244 | [{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<< | |
245 | "Hello!\n" | |
246 | "====">>), | |
247 | [{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<< | |
248 | "Hello!\n" | |
249 | "=====">>), | |
250 | [{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<< | |
251 | "Hello!\n" | |
252 | "=======">>), | |
253 | [{section_title, #{level := 0}, <<"Hello!">>, _}] = parse(<< | |
254 | "Hello!\n" | |
255 | "========">>), | |
256 | %% Underline too short/long results in a different block. | |
257 | ?NOT(section_title, parse(<< | |
258 | "Hello!\n" | |
259 | "===">>)), | |
260 | ?NOT(section_title, parse(<< | |
261 | "Hello!\n" | |
262 | "=========">>)), | |
263 | ok. | |
264 | -endif. | |
265 | ||
266 | %% Asciidoc User Guide 21.2.1 | |
267 | %% | |
268 | %% We currently do not implement the <xreflabel> value. | |
269 | %% I am also not sure what characters are allowed, | |
270 | %% so what is here is what I came up with guessing. | |
271 | block_id(St) -> | |
272 | <<"[[", Line0/bits>> = read_line(St), | |
273 | Line = trim(Line0), | |
274 | Len = byte_size(Line) - 2, | |
275 | <<BlockID:Len/binary, "]]">> = Line, | |
276 | %% Make sure there are only valid characters. | |
277 | {BlockID, <<>>} = while(fun(C) -> | |
278 | (C =/= $,) andalso (C =/= $[) andalso (C =/= $]) | |
279 | andalso (C =/= $\s) andalso (C =/= $\t) | |
280 | end, BlockID), | |
281 | %% Good! | |
282 | {block_id, #{id => BlockID}, <<>>, ann(St)}. | |
283 | ||
284 | -ifdef(TEST). | |
285 | block_id_test() -> | |
286 | %% Valid. | |
287 | [{block_id, #{id := <<"X30">>}, <<>>, _}] = parse(<<"[[X30]]">>), | |
288 | %% Invalid. | |
289 | ?NOT(block_id, parse(<<"[[block,id]]">>)), | |
290 | ?NOT(block_id, parse(<<"[[block[id]]">>)), | |
291 | ?NOT(block_id, parse(<<"[[block]id]]">>)), | |
292 | ?NOT(block_id, parse(<<"[[block id]]">>)), | |
293 | ?NOT(block_id, parse(<<"[[block\tid]]">>)), | |
294 | %% Must be hard on the left of the line. | |
295 | ?NOT(block_id, parse(<<" [[block_id]]">>)), | |
296 | ?NOT(block_id, parse(<<"\t[[block_id]]">>)), | |
297 | ok. | |
298 | -endif. | |
299 | ||
300 | %% Asciidoc User Guide 21.2.3 | |
301 | comment_line(St) -> | |
302 | <<"//", Comment0/bits>> = read_line(St), | |
303 | Comment = trim(Comment0), | |
304 | %% Good! | |
305 | {comment_line, #{<<"subs">> => <<"verbatim">>}, Comment, ann(St)}. | |
306 | ||
307 | -ifdef(TEST). | |
308 | comment_line_test() -> | |
309 | [{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.">>), | |
310 | %% We trim the whitespace around the comment. | |
311 | [{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.">>), | |
312 | [{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment. ">>), | |
313 | [{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"//\tThis is a comment.">>), | |
314 | [{comment_line, _, <<"This is a comment.">>, _}] = parse(<<"// This is a comment.\t">>), | |
315 | [ | |
316 | {comment_line, _, <<"First line.">>, _}, | |
317 | {comment_line, _, <<"Second line.">>, _} | |
318 | ] = parse(<< | |
319 | "// First line.\n" | |
320 | "// Second line.\n">>), | |
321 | %% Must be hard on the left of the line. | |
322 | ?NOT(comment_line, parse(<<" // This is a comment.">>)), | |
323 | ?NOT(comment_line, parse(<<"\t// This is a comment.">>)), | |
324 | ok. | |
325 | -endif. | |
326 | ||
327 | %% We currently implement the following block macros | |
328 | %% from the Asciidoc User Guide: | |
329 | %% | |
330 | %% - image (21.2.2) | |
331 | %% - include (21.3.1) | |
332 | %% - ifdef (21.3.2) | |
333 | %% - ifndef (21.3.2) | |
334 | %% - endif (21.3.2) | |
335 | block_macro(St) -> | |
336 | Line0 = read_line(St), | |
337 | Ann = ann(St), | |
338 | %% Name must contain letters, digits or dash characters. | |
339 | {Name, <<"::", Line1/bits>>} = while(fun(C) -> | |
340 | ((C >= $a) andalso (C =< $z)) | |
341 | orelse ((C >= $A) andalso (C =< $Z)) | |
342 | orelse ((C >= $0) andalso (C =< $9)) | |
343 | orelse (C =:= $-) | |
344 | end, Line0), | |
345 | %% Name must not begin with a dash. | |
346 | true = binary:at(Name, 0) =/= $-, | |
347 | %% Target must not contain whitespace characters. | |
348 | %% It is followed by an [attribute list]. | |
349 | {Target, AttrList0 = <<"[", _/bits>>} = while(fun(C) -> | |
350 | (C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t) | |
351 | end, Line1), | |
352 | AttrList1 = trim(AttrList0), | |
353 | {attribute_list, AttrList, <<>>, _} = attribute_list(St, AttrList1), | |
354 | %% Block macros must be followed by at least one empty line. | |
355 | _ = empty_line(St), | |
356 | {block_macro, AttrList#{ | |
357 | name => Name, | |
358 | target => Target | |
359 | }, <<>>, Ann}. | |
360 | ||
361 | -ifdef(TEST). | |
362 | block_macro_image_test() -> | |
363 | [{block_macro, #{ | |
364 | name := <<"image">>, | |
365 | target := <<"images/layout.png">>, | |
366 | 1 := <<"J14P main circuit board">> | |
367 | }, <<>>, _}] = parse(<<"image::images/layout.png[J14P main circuit board]">>), | |
368 | [{block_macro, #{ | |
369 | name := <<"image">>, | |
370 | target := <<"images/layout.png">>, | |
371 | 1 := <<"J14P main circuit board">>, | |
372 | <<"title">> := <<"Main circuit board">> | |
373 | }, <<>>, _}] = parse( | |
374 | <<"image::images/layout.png[\"J14P main circuit board\", " | |
375 | "title=\"Main circuit board\"]">>), | |
376 | ok. | |
377 | ||
378 | block_macro_include_test() -> | |
379 | [{block_macro, #{ | |
380 | name := <<"include">>, | |
381 | target := <<"chapter1.txt">>, | |
382 | <<"tabsize">> := <<"4">> | |
383 | }, <<>>, _}] = parse(<<"include::chapter1.txt[tabsize=4]">>), | |
384 | ok. | |
385 | ||
386 | block_macro_ifdef_test() -> | |
387 | [{block_macro, #{ | |
388 | name := <<"ifdef">>, | |
389 | target := <<"revnumber">>, | |
390 | 0 := <<>> | |
391 | }, <<>>, _}] = parse(<<"ifdef::revnumber[]">>), | |
392 | [{block_macro, #{ | |
393 | name := <<"ifdef">>, | |
394 | target := <<"revnumber">>, | |
395 | 1 := <<"Version number 42">> | |
396 | }, <<>>, _}] = parse(<<"ifdef::revnumber[Version number 42]">>), | |
397 | ok. | |
398 | ||
399 | block_macro_ifndef_test() -> | |
400 | [{block_macro, #{ | |
401 | name := <<"ifndef">>, | |
402 | target := <<"revnumber">>, | |
403 | 0 := <<>> | |
404 | }, <<>>, _}] = parse(<<"ifndef::revnumber[]">>), | |
405 | ok. | |
406 | ||
407 | block_macro_endif_test() -> | |
408 | [{block_macro, #{ | |
409 | name := <<"endif">>, | |
410 | target := <<"revnumber">>, | |
411 | 0 := <<>> | |
412 | }, <<>>, _}] = parse(<<"endif::revnumber[]">>), | |
413 | %% Some macros accept an empty target. | |
414 | [{block_macro, #{ | |
415 | name := <<"endif">>, | |
416 | target := <<>>, | |
417 | 0 := <<>> | |
418 | }, <<>>, _}] = parse(<<"endif::[]">>), | |
419 | ok. | |
420 | -endif. | |
421 | ||
422 | %% Asciidoc User Guide 17.1 | |
423 | bulleted_list(St) -> | |
424 | Line0 = read_line(St), | |
425 | Line1 = trim(Line0), | |
426 | {Type0, Level, ListItem} = case Line1 of | |
427 | <<"-", C, R/bits>> when ?IS_WS(C) -> {dash, 1, R}; | |
428 | <<"*", C, R/bits>> when ?IS_WS(C) -> {star, 1, R}; | |
429 | <<"**", C, R/bits>> when ?IS_WS(C) -> {star, 2, R}; | |
430 | <<"***", C, R/bits>> when ?IS_WS(C) -> {star, 3, R}; | |
431 | <<"****", C, R/bits>> when ?IS_WS(C) -> {star, 4, R}; | |
432 | <<"*****", C, R/bits>> when ?IS_WS(C) -> {star, 5, R} | |
433 | end, | |
434 | Type = case Type0 of | |
435 | dash -> bulleted_alt; | |
436 | star -> bulleted | |
437 | end, | |
438 | list_item(St, #{ | |
439 | type => Type, | |
440 | level => Level | |
441 | }, ListItem). | |
442 | ||
443 | -ifdef(TEST). | |
444 | bulleted_list_test() -> | |
445 | [{list_item, #{ | |
446 | type := bulleted_alt, | |
447 | level := 1 | |
448 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"- List item.">>), | |
449 | [{list_item, #{ | |
450 | type := bulleted, | |
451 | level := 1 | |
452 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"* List item.">>), | |
453 | [{list_item, #{ | |
454 | type := bulleted, | |
455 | level := 2 | |
456 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"** List item.">>), | |
457 | [{list_item, #{ | |
458 | type := bulleted, | |
459 | level := 3 | |
460 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"*** List item.">>), | |
461 | [{list_item, #{ | |
462 | type := bulleted, | |
463 | level := 4 | |
464 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"**** List item.">>), | |
465 | [{list_item, #{ | |
466 | type := bulleted, | |
467 | level := 5 | |
468 | }, [{paragraph, _, <<"List item.">>, _}], _}] = parse(<<"***** List item.">>), | |
469 | %% Two list items one after the other. | |
470 | [ | |
471 | {list_item, #{type := bulleted, level := 1}, | |
472 | [{paragraph, _, <<"List item 1.">>, _}], _}, | |
473 | {list_item, #{type := bulleted, level := 1}, | |
474 | [{paragraph, _, <<"List item 2.">>, _}], _} | |
475 | ] = parse(<<"* List item 1.\n* List item 2.">>), | |
476 | ok. | |
477 | -endif. | |
478 | ||
479 | %% Asciidoc User Guide 17.2 | |
480 | %% | |
481 | %% We currently only implement implicit numbering. | |
482 | numbered_list(St) -> | |
483 | Line0 = read_line(St), | |
484 | Line1 = trim(Line0), | |
485 | {Level, ListItem} = case Line1 of | |
486 | <<".", C, R/bits>> when ?IS_WS(C) -> {1, R}; | |
487 | <<"..", C, R/bits>> when ?IS_WS(C) -> {2, R}; | |
488 | <<"...", C, R/bits>> when ?IS_WS(C) -> {3, R}; | |
489 | <<"....", C, R/bits>> when ?IS_WS(C) -> {4, R}; | |
490 | <<".....", C, R/bits>> when ?IS_WS(C) -> {5, R} | |
491 | end, | |
492 | list_item(St, #{ | |
493 | type => numbered, | |
494 | level => Level | |
495 | }, ListItem). | |
496 | ||
497 | -ifdef(TEST). | |
498 | numbered_list_test() -> | |
499 | [{list_item, #{ | |
500 | type := numbered, | |
501 | level := 1 | |
502 | }, [{paragraph, _, <<"Arabic (decimal) numbered list item.">>, _}], _}] | |
503 | = parse(<<". Arabic (decimal) numbered list item.">>), | |
504 | [{list_item, #{ | |
505 | type := numbered, | |
506 | level := 2 | |
507 | }, [{paragraph, _, <<"Lower case alpha (letter) numbered list item.">>, _}], _}] | |
508 | = parse(<<".. Lower case alpha (letter) numbered list item.">>), | |
509 | [{list_item, #{ | |
510 | type := numbered, | |
511 | level := 3 | |
512 | }, [{paragraph, _, <<"Lower case roman numbered list item.">>, _}], _}] | |
513 | = parse(<<"... Lower case roman numbered list item.">>), | |
514 | [{list_item, #{ | |
515 | type := numbered, | |
516 | level := 4 | |
517 | }, [{paragraph, _, <<"Upper case alpha (letter) numbered list item.">>, _}], _}] | |
518 | = parse(<<".... Upper case alpha (letter) numbered list item.">>), | |
519 | [{list_item, #{ | |
520 | type := numbered, | |
521 | level := 5 | |
522 | }, [{paragraph, _, <<"Upper case roman numbered list item.">>, _}], _}] | |
523 | = parse(<<"..... Upper case roman numbered list item.">>), | |
524 | %% Two list items one after the other. | |
525 | [ | |
526 | {list_item, #{type := numbered, level := 1}, | |
527 | [{paragraph, _, <<"List item 1.">>, _}], _}, | |
528 | {list_item, #{type := numbered, level := 1}, | |
529 | [{paragraph, _, <<"List item 2.">>, _}], _} | |
530 | ] = parse(<<". List item 1.\n. List item 2.">>), | |
531 | ok. | |
532 | -endif. | |
533 | ||
534 | %% Asciidoc User Guide 17.3 | |
535 | %% | |
536 | %% The Asciidoc User Guide makes it sound like the | |
537 | %% label must be hard on the left margin but we don't | |
538 | %% enforce that to simplify the implementation. | |
539 | labeled_list(St) -> | |
540 | Line0 = read_line(St), | |
541 | %% We can't match directly to find the list separator, | |
542 | %% we have to search for it. | |
543 | {Label0, Sep, ListItem0} = find_labeled_list(Line0), | |
544 | Label = trim(Label0), | |
545 | ListItem = trim(ListItem0), | |
546 | %% The label must not be empty. | |
547 | true = trim(Label) =/= <<>>, | |
548 | list_item(St, #{ | |
549 | type => labeled, | |
550 | separator => Sep, | |
551 | label => Label | |
552 | }, ListItem). | |
553 | ||
554 | find_labeled_list(Line) -> | |
555 | find_labeled_list(Line, <<>>). | |
556 | ||
557 | %% We don't have a final clause with an empty binary because | |
558 | %% we want to crash if we don't find a labeled list. | |
559 | find_labeled_list(<<"::">>, Acc) -> {Acc, <<"::">>, <<>>}; | |
560 | find_labeled_list(<<":::">>, Acc) -> {Acc, <<":::">>, <<>>}; | |
561 | find_labeled_list(<<"::::">>, Acc) -> {Acc, <<"::::">>, <<>>}; | |
562 | find_labeled_list(<<";;">>, Acc) -> {Acc, <<";;">>, <<>>}; | |
563 | find_labeled_list(<<"::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<"::">>, R}; | |
564 | find_labeled_list(<<":::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<":::">>, R}; | |
565 | find_labeled_list(<<"::::", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<"::::">>, R}; | |
566 | find_labeled_list(<<";;", C, R/bits>>, Acc) when ?IS_WS(C) -> {Acc, <<";;">>, R}; | |
567 | find_labeled_list(<<C, R/bits>>, Acc) -> find_labeled_list(R, <<Acc/binary, C>>). | |
568 | ||
569 | -ifdef(TEST). | |
570 | labeled_list_test() -> | |
571 | [{list_item, #{type := labeled, separator := <<"::">>, label := <<"Question">>}, | |
572 | [{paragraph, _, <<"Answer!">>, _}], _}] = parse(<<"Question:: Answer!">>), | |
573 | [{list_item, #{type := labeled, separator := <<"::">>, label := <<"Question">>}, | |
574 | [{paragraph, _, <<"Answer!">>, _}], _}] = parse(<<"Question::\n Answer!">>), | |
575 | %% Long snippet from the Asciidoc User Guide, minus literal paragraph. | |
576 | %% @todo Add the literal paragraph back once they are implemented. | |
577 | [ | |
578 | {list_item, #{type := labeled, separator := <<"::">>, label := <<"In">>}, | |
579 | [{paragraph, _, <<>>, _}], _}, | |
580 | {list_item, #{type := labeled, separator := <<"::">>, label := <<"Lorem">>}, | |
581 | [{paragraph, _, <<"Fusce euismod commodo velit.">>, _}], _}, | |
582 | {list_item, #{type := labeled, separator := <<"::">>, label := <<"Ipsum">>}, | |
583 | [{paragraph, _, <<"Vivamus fringilla mi eu lacus.">>, _}], _}, | |
584 | {list_item, #{type := bulleted, level := 1}, | |
585 | [{paragraph, _, <<"Vivamus fringilla mi eu lacus.">>, _}], _}, | |
586 | {list_item, #{type := bulleted, level := 1}, | |
587 | [{paragraph, _, <<"Donec eget arcu bibendum nunc consequat lobortis.">>, _}], _}, | |
588 | {list_item, #{type := labeled, separator := <<"::">>, label := <<"Dolor">>}, | |
589 | [{paragraph, _, <<"Donec eget arcu bibendum nunc consequat lobortis.">>, _}], _}, | |
590 | {list_item, #{type := labeled, separator := <<";;">>, label := <<"Suspendisse">>}, | |
591 | [{paragraph, _, <<"A massa id sem aliquam auctor.">>, _}], _}, | |
592 | {list_item, #{type := labeled, separator := <<";;">>, label := <<"Morbi">>}, | |
593 | [{paragraph, _, <<"Pretium nulla vel lorem.">>, _}], _}, | |
594 | {list_item, #{type := labeled, separator := <<";;">>, label := <<"In">>}, | |
595 | [{paragraph, _, <<"Dictum mauris in urna.">>, _}], _}, | |
596 | {list_item, #{type := labeled, separator := <<":::">>, label := <<"Vivamus">>}, | |
597 | [{paragraph, _, <<"Fringilla mi eu lacus.">>, _}], _}, | |
598 | {list_item, #{type := labeled, separator := <<":::">>, label := <<"Donec">>}, | |
599 | [{paragraph, _, <<"Eget arcu bibendum nunc consequat lobortis.">>, _}], _} | |
600 | ] = parse(<< | |
601 | "In::\n" | |
602 | "Lorem::\n" | |
603 | " Fusce euismod commodo velit.\n" | |
604 | %% @todo Add literal paragraph back here. | |
605 | "Ipsum:: Vivamus fringilla mi eu lacus.\n" | |
606 | " * Vivamus fringilla mi eu lacus.\n" | |
607 | " * Donec eget arcu bibendum nunc consequat lobortis.\n" | |
608 | "Dolor::\n" | |
609 | " Donec eget arcu bibendum nunc consequat lobortis.\n" | |
610 | " Suspendisse;;\n" | |
611 | " A massa id sem aliquam auctor.\n" | |
612 | " Morbi;;\n" | |
613 | " Pretium nulla vel lorem.\n" | |
614 | " In;;\n" | |
615 | " Dictum mauris in urna.\n" | |
616 | " Vivamus::: Fringilla mi eu lacus.\n" | |
617 | " Donec::: Eget arcu bibendum nunc consequat lobortis.\n">>), | |
618 | ok. | |
619 | -endif. | |
620 | ||
621 | %% Asciidoc User Guide 20 | |
622 | -spec callout_list(_) -> no_return(). | |
623 | callout_list(St) -> throw({not_implemented, St}). %% @todo | |
624 | ||
625 | %% Asciidoc User Guide 17 | |
626 | %% | |
627 | %% We do not apply rules about blocks being contained in | |
628 | %% the list item at this stage of parsing. We only concern | |
629 | %% ourselves with identifying blocks, and then another pass | |
630 | %% will build a tree from the result of this pass. | |
631 | list_item(St, Attrs, ListItem0) -> | |
632 | ListItem1 = trim(ListItem0), | |
633 | Ann = ann(St), | |
634 | %% For labeled lists, we may need to skip empty lines | |
635 | %% until the start of the list item contents, since | |
636 | %% it can begin on a separate line from the label. | |
637 | _ = case {ListItem1, Attrs} of | |
638 | {<<>>, #{type := labeled}} -> | |
639 | read_while(St, fun skip_empty_lines/1, <<>>); | |
640 | _ -> | |
641 | ok | |
642 | end, | |
643 | %% A list item ends on end of file, empty line or when a new list starts. | |
644 | %% Any indentation is optional and therefore removed. | |
645 | ListItem = read_while(St, fun fold_list_item/1, ListItem1), | |
646 | {list_item, Attrs, [{paragraph, #{}, ListItem, Ann}], Ann}. | |
647 | ||
648 | skip_empty_lines(eof) -> | |
649 | done; | |
650 | skip_empty_lines(Line) -> | |
651 | case trim(Line) of | |
652 | <<>> -> {more, <<>>}; | |
653 | _ -> done | |
654 | end. | |
655 | ||
656 | fold_list_item(eof) -> | |
657 | done; | |
658 | fold_list_item(Line0) -> | |
659 | case trim(Line0) of | |
660 | <<>> -> done; | |
661 | <<"+">> -> done; | |
662 | <<"//", _/bits >> -> done; | |
663 | <<"-", C, _/bits>> when ?IS_WS(C) -> done; | |
664 | <<"*", C, _/bits>> when ?IS_WS(C) -> done; | |
665 | <<"**", C, _/bits>> when ?IS_WS(C) -> done; | |
666 | <<"***", C, _/bits>> when ?IS_WS(C) -> done; | |
667 | <<"****", C, _/bits>> when ?IS_WS(C) -> done; | |
668 | <<"*****", C, _/bits>> when ?IS_WS(C) -> done; | |
669 | <<".", C, _/bits>> when ?IS_WS(C) -> done; | |
670 | <<"..", C, _/bits>> when ?IS_WS(C) -> done; | |
671 | <<"...", C, _/bits>> when ?IS_WS(C) -> done; | |
672 | <<"....", C, _/bits>> when ?IS_WS(C) -> done; | |
673 | <<".....", C, _/bits>> when ?IS_WS(C) -> done; | |
674 | Line -> | |
675 | try find_labeled_list(Line) of | |
676 | {_, _, _} -> done | |
677 | catch _:_ -> | |
678 | {more, Line} | |
679 | end | |
680 | end. | |
681 | ||
682 | -ifdef(TEST). | |
683 | list_item_test() -> | |
684 | [ | |
685 | {list_item, #{type := bulleted, level := 1}, | |
686 | [{paragraph, #{}, <<"List item.">>, _}], _}, | |
687 | {list_item, #{type := bulleted, level := 2}, | |
688 | [{paragraph, #{}, <<"List item.">>, _}], _}, | |
689 | {list_item, #{type := bulleted, level := 1}, | |
690 | [{paragraph, #{}, <<"List item.">>, _}], _}, | |
691 | {list_item, #{type := numbered, level := 1}, | |
692 | [{paragraph, #{}, <<"List item.">>, _}], _}, | |
693 | {list_item, #{type := numbered, level := 1}, | |
694 | [{paragraph, #{}, <<"List item.">>, _}], _}, | |
695 | {list_item, #{type := bulleted, level := 1}, | |
696 | [{paragraph, #{}, <<"List item.">>, _}], _} | |
697 | ] = parse(<< | |
698 | "* List item.\n" | |
699 | "** List item.\n" | |
700 | "* List item.\n" | |
701 | " . List item.\n" | |
702 | " . List item.\n" | |
703 | "* List item.\n">>), | |
704 | %% Properly detect a labeled list. | |
705 | [ | |
706 | {list_item, #{type := bulleted, level := 1}, | |
707 | [{paragraph, #{}, <<"List item.\nMultiline.">>, _}], _}, | |
708 | {list_item, #{type := labeled, label := <<"Question">>}, | |
709 | [{paragraph, #{}, <<"Answer!">>, _}], _} | |
710 | ] = parse(<< | |
711 | "* List item.\n" | |
712 | "Multiline.\n" | |
713 | "Question:: Answer!\n">>), | |
714 | ok. | |
715 | -endif. | |
716 | ||
717 | %% Asciidoc User Guide 17.7 | |
718 | list_item_continuation(St) -> | |
719 | %% Continuations are a single + hard against the left margin. | |
720 | <<$+, Whitespace/bits>> = read_line(St), | |
721 | <<>> = trim(Whitespace), | |
722 | {list_item_continuation, #{}, <<>>, ann(St)}. | |
723 | ||
724 | -ifdef(TEST). | |
725 | list_item_continuation_test() -> | |
726 | [{list_item_continuation, _, _, _}] = parse(<<"+">>), | |
727 | [{list_item_continuation, _, _, _}] = parse(<<"+ ">>), | |
728 | [{list_item_continuation, _, _, _}] = parse(<<"+\n">>), | |
729 | ok. | |
730 | -endif. | |
731 | ||
732 | %% Asciidoc User Guide 16.2 | |
733 | listing_block(St) -> | |
734 | delimited_block(St, listing_block, $-, #{<<"subs">> => <<"verbatim">>}). | |
735 | ||
736 | -ifdef(TEST). | |
737 | listing_block_test() -> | |
738 | Block = << | |
739 | "#include <stdio.h>\n" | |
740 | "\n" | |
741 | "int main() {\n" | |
742 | " printf(\"Hello World!\n\");\n" | |
743 | " exit(0);\n" | |
744 | "}">>, | |
745 | [{listing_block, _, Block, _}] = parse(<< | |
746 | "--------------------------------------\n", | |
747 | Block/binary, "\n" | |
748 | "--------------------------------------\n">>), | |
749 | ok. | |
750 | -endif. | |
751 | ||
752 | %% Asciidoc User Guide 16.3 | |
753 | literal_block(St) -> | |
754 | delimited_block(St, literal_block, $., #{<<"subs">> => <<"verbatim">>}). | |
755 | ||
756 | -ifdef(TEST). | |
757 | literal_block_test() -> | |
758 | Block = << | |
759 | "Consul *necessitatibus* per id,\n" | |
760 | "consetetur, eu pro everti postulant\n" | |
761 | "homero verear ea mea, qui.">>, | |
762 | [{literal_block, _, Block, _}] = parse(<< | |
763 | "...................................\n", | |
764 | Block/binary, "\n" | |
765 | "...................................\n">>), | |
766 | ok. | |
767 | -endif. | |
768 | ||
769 | %% Asciidoc User Guide 16.4 | |
770 | sidebar_block(St) -> | |
771 | delimited_block(St, sidebar_block, $*). | |
772 | ||
773 | -ifdef(TEST). | |
774 | sidebar_block_test() -> | |
775 | Block = << | |
776 | "Any AsciiDoc SectionBody element (apart from\n" | |
777 | "SidebarBlocks) can be placed inside a sidebar.">>, | |
778 | [{sidebar_block, _, Block, _}] = parse(<< | |
779 | "************************************************\n", | |
780 | Block/binary, "\n" | |
781 | "************************************************\n">>), | |
782 | ok. | |
783 | -endif. | |
784 | ||
785 | %% Asciidoc User Guide 16.5 | |
786 | comment_block(St) -> | |
787 | delimited_block(St, comment_block, $/). | |
788 | ||
789 | -ifdef(TEST). | |
790 | comment_block_test() -> | |
791 | Block = << | |
792 | "CommentBlock contents are not processed by\n" | |
793 | "asciidoc(1).">>, | |
794 | [{comment_block, _, Block, _}] = parse(<< | |
795 | "//////////////////////////////////////////\n", | |
796 | Block/binary, "\n" | |
797 | "//////////////////////////////////////////\n">>), | |
798 | ok. | |
799 | -endif. | |
800 | ||
801 | %% Asciidoc User Guide 16.6 | |
802 | passthrough_block(St) -> | |
803 | delimited_block(St, passthrough_block, $+). | |
804 | ||
805 | -ifdef(TEST). | |
806 | passthrough_block_test() -> | |
807 | Block = << | |
808 | "<table border=\"1\"><tr>\n" | |
809 | " <td>*Cell 1*</td>\n" | |
810 | " <td>*Cell 2*</td>\n" | |
811 | "</tr></table>">>, | |
812 | [{passthrough_block, _, Block, _}] = parse(<< | |
813 | "++++++++++++++++++++++++++++++++++++++\n", | |
814 | Block/binary, "\n" | |
815 | "++++++++++++++++++++++++++++++++++++++\n">>), | |
816 | ok. | |
817 | -endif. | |
818 | ||
819 | %% Asciidoc User Guide 16.7 | |
820 | quote_block(St) -> | |
821 | delimited_block(St, quote_block, $_). | |
822 | ||
823 | -ifdef(TEST). | |
824 | quote_block_test() -> | |
825 | Block = << | |
826 | "As he spoke there was the sharp sound of horses' hoofs and\n" | |
827 | "grating wheels against the curb, followed by a sharp pull at the\n" | |
828 | "bell. Holmes whistled.\n" | |
829 | "\n" | |
830 | "\"A pair, by the sound,\" said he. \"Yes,\" he continued, glancing\n" | |
831 | "out of the window. \"A nice little brougham and a pair of\n" | |
832 | "beauties. A hundred and fifty guineas apiece. There's money in\n" | |
833 | "this case, Watson, if there is nothing else.\"">>, | |
834 | [{quote_block, _, Block, _}] = parse(<< | |
835 | "____________________________________________________________________\n", | |
836 | Block/binary, "\n" | |
837 | "____________________________________________________________________\n">>), | |
838 | ok. | |
839 | -endif. | |
840 | ||
841 | %% Asciidoc User Guide 16.8 | |
842 | example_block(St) -> | |
843 | delimited_block(St, example_block, $=). | |
844 | ||
845 | -ifdef(TEST). | |
846 | example_block_test() -> | |
847 | Block = << | |
848 | "Qui in magna commodo, est labitur dolorum an. Est ne magna primis\n" | |
849 | "adolescens.">>, | |
850 | [{example_block, _, Block, _}] = parse(<< | |
851 | "=====================================================================\n", | |
852 | Block/binary, "\n" | |
853 | "=====================================================================\n">>), | |
854 | ok. | |
855 | -endif. | |
856 | ||
857 | %% Asciidoc User Guide 16 | |
858 | delimited_block(St, Name, Char) -> | |
859 | delimited_block(St, Name, Char, #{}, <<Char, Char, Char, Char>>). | |
860 | ||
861 | delimited_block(St, Name, Char, Attrs) -> | |
862 | delimited_block(St, Name, Char, Attrs, <<Char, Char, Char, Char>>). | |
863 | ||
864 | delimited_block(St, Name, Char, Attrs, Four) -> | |
865 | %% A delimiter block begins by a series of four or more repeated characters. | |
866 | <<Four:4/binary, Line0/bits>> = read_line(St), | |
867 | Ann = ann(St), | |
868 | Line = trim(Line0, trailing), | |
869 | repeats(Line, Char), | |
870 | %% Get the content of the block as-is. | |
871 | Block = read_while(St, fun(L) -> fold_delimited_block(L, Four, Char) end, <<>>), | |
872 | %% Skip the trailing delimiter line. | |
873 | _ = read_line(St), | |
874 | {Name, Attrs, Block, Ann}. | |
875 | ||
876 | %% Accept eof as a closing delimiter. | |
877 | fold_delimited_block(eof, _, _) -> | |
878 | done; | |
879 | fold_delimited_block(Line0, Four, Char) -> | |
880 | case Line0 of | |
881 | <<Four:4/binary, Line1/bits>> -> | |
882 | try | |
883 | Line = trim(Line1, trailing), | |
884 | repeats(Line, Char), | |
885 | done | |
886 | catch _:_ -> | |
887 | {more, Line0} | |
888 | end; | |
889 | _ -> | |
890 | {more, Line0} | |
891 | end. | |
892 | ||
893 | -ifdef(TEST). | |
894 | delimited_block_test() -> | |
895 | %% Confirm that the block ends at eof. | |
896 | %% | |
897 | %% We see an extra line break because asciideck_line_reader adds | |
898 | %% one at the end of every files to ease processing. | |
899 | [{listing_block, _, <<"Hello!\n\n">>, _}] = parse(<< | |
900 | "----\n" | |
901 | "Hello!\n">>), | |
902 | %% Same without a trailing line break. | |
903 | %% | |
904 | %% We also see an extra line break for the aforementioned reasons. | |
905 | [{listing_block, _, <<"Hello!\n">>, _}] = parse(<< | |
906 | "----\n" | |
907 | "Hello!">>), | |
908 | ok. | |
909 | -endif. | |
910 | ||
911 | %% Asciidoc User Guide 16.10 | |
912 | -spec open_block(_) -> no_return(). | |
913 | open_block(St) -> throw({not_implemented, St}). %% @todo | |
914 | ||
915 | %% Asciidoc User Guide 23 | |
916 | %% | |
917 | %% We do not parse the table in this pass. Instead we | |
918 | %% treat it like any other delimited block. | |
919 | table(St) -> | |
920 | delimited_block(St, table, $=, #{}, <<"|===">>). | |
921 | ||
922 | -ifdef(TEST). | |
923 | table_test() -> | |
924 | Block = << | |
925 | "|1 |2 |A\n" | |
926 | "|3 |4 |B\n" | |
927 | "|5 |6 |C">>, | |
928 | [{table, _, Block, _}] = parse(<< | |
929 | "|=======\n", | |
930 | Block/binary, "\n" | |
931 | "|=======\n">>), | |
932 | ok. | |
933 | -endif. | |
934 | ||
935 | %% Asciidoc User Guide 28 | |
936 | -spec attribute_entry(_) -> no_return(). | |
937 | attribute_entry(St) -> throw({not_implemented, St}). %% @todo | |
938 | ||
939 | %% Asciidoc User Guide 14, 29 | |
940 | attribute_list(St) -> | |
941 | AttrList = read_line(St), | |
942 | attribute_list(St, AttrList). | |
943 | ||
944 | attribute_list(St, AttrList0) -> | |
945 | %% First we remove the enclosing square brackets. | |
946 | <<$[, AttrList1/bits>> = AttrList0, | |
947 | AttrList2 = trim(AttrList1), | |
948 | Len = byte_size(AttrList2) - 1, | |
949 | <<AttrList3:Len/binary, $]>> = AttrList2, | |
950 | AttrList = asciideck_attributes_parser:parse(AttrList3), | |
951 | {attribute_list, AttrList, <<>>, ann(St)}. | |
952 | ||
953 | -ifdef(TEST). | |
954 | attribute_list_test() -> | |
955 | [{attribute_list, #{0 := <<"Hello">>, 1 := <<"Hello">>}, <<>>, _}] | |
956 | = parse(<<"[Hello]">>), | |
957 | [{attribute_list, #{ | |
958 | 1 := <<"quote">>, | |
959 | 2 := <<"Bertrand Russell">>, | |
960 | 3 := <<"The World of Mathematics (1956)">> | |
961 | }, <<>>, _}] | |
962 | = parse(<<"[quote, Bertrand Russell, The World of Mathematics (1956)]">>), | |
963 | [{attribute_list, #{ | |
964 | 1 := <<"22 times">>, | |
965 | <<"backcolor">> := <<"#0e0e0e">>, | |
966 | <<"options">> := <<"noborders,wide">> | |
967 | }, <<>>, _}] | |
968 | = parse(<<"[\"22 times\", backcolor=\"#0e0e0e\", options=\"noborders,wide\"]">>), | |
969 | [{attribute_list, #{ | |
970 | 1 := <<"A footnote, "with an image" image:smallnew.png[]">> | |
971 | }, <<>>, _}] | |
972 | = parse(<<"[A footnote, "with an image" image:smallnew.png[]]">>), | |
973 | ok. | |
974 | -endif. | |
975 | ||
976 | %% Asciidoc User Guide 12 | |
977 | block_title(St) -> | |
978 | %% A block title line begins with a period and is followed by the title text. | |
979 | <<$., Title0/bits>> = read_line(St), | |
980 | Ann = ann(St), | |
981 | Title = trim(Title0), | |
982 | {block_title, #{}, Title, Ann}. | |
983 | ||
984 | -ifdef(TEST). | |
985 | block_title_test() -> | |
986 | %% Valid. | |
987 | [{block_title, _, <<"Notes">>, _}] = parse(<<".Notes">>), | |
988 | [{block_title, _, <<"Notes">>, _}] = parse(<<".Notes ">>), | |
989 | %% Invalid. | |
990 | ?NOT(block_title, parse(<<". Notes">>)), | |
991 | ok. | |
992 | -endif. | |
993 | ||
994 | %% Asciidoc User Guide 15.2 | |
995 | -spec literal_para(_) -> no_return(). | |
996 | literal_para(St) -> throw({not_implemented, St}). %% @todo | |
997 | ||
998 | %% Asciidoc User Guide 15.4 | |
999 | -spec admonition_para(_) -> no_return(). | |
1000 | admonition_para(St) -> throw({not_implemented, St}). %% @todo | |
1001 | ||
1002 | %% Asciidoc User Guide 15.1 | |
1003 | para(St) -> | |
1004 | %% Paragraph must be hard against the left margin. | |
1005 | <<C, _/bits>> = Para0 = read_line(St), | |
1006 | Ann = ann(St), | |
1007 | %% @todo Uncomment this line once everything else has been implemented. | |
1008 | _ = ?IS_WS(C), % false = ?IS_WS(C), | |
1009 | Para1 = trim(Para0), | |
1010 | %% Paragraph ends at blank line, end of file or start of delimited block or list. | |
1011 | Para = read_while(St, fun fold_para/1, Para1), | |
1012 | {paragraph, #{}, Para, Ann}. | |
1013 | ||
1014 | fold_para(eof) -> | |
1015 | done; | |
1016 | fold_para(Line0) -> | |
1017 | case trim(Line0) of | |
1018 | <<>> -> done; | |
1019 | <<"+">> -> done; | |
1020 | %% @todo Detect delimited block or list. | |
1021 | Line -> {more, Line} | |
1022 | end. | |
1023 | ||
1024 | -ifdef(TEST). | |
1025 | para_test() -> | |
1026 | LoremIpsum = << | |
1027 | "Lorem ipsum dolor sit amet, consectetur adipiscing elit,\n" | |
1028 | "sed do eiusmod tempor incididunt ut labore et dolore\n" | |
1029 | "magna aliqua. Ut enim ad minim veniam, quis nostrud\n" | |
1030 | "exercitation ullamco laboris nisi ut aliquip ex ea\n" | |
1031 | "commodo consequat. Duis aute irure dolor in reprehenderit\n" | |
1032 | "in voluptate velit esse cillum dolore eu fugiat nulla\n" | |
1033 | "pariatur. Excepteur sint occaecat cupidatat non proident,\n" | |
1034 | "sunt in culpa qui officia deserunt mollit anim id est laborum." | |
1035 | >>, | |
1036 | %% Paragraph followed by end of file. | |
1037 | [{paragraph, _, LoremIpsum, _}] = parse(<< LoremIpsum/binary, "\n">>), | |
1038 | %% Paragraph followed by end of file with no trailing line break.. | |
1039 | [{paragraph, _, LoremIpsum, _}] = parse(LoremIpsum), | |
1040 | %% Two paragraphs. | |
1041 | [{paragraph, _, LoremIpsum, _}, {paragraph, _, LoremIpsum, _}] | |
1042 | = parse(<< | |
1043 | LoremIpsum/binary, | |
1044 | "\n\n", | |
1045 | LoremIpsum/binary >>), | |
1046 | ok. | |
1047 | -endif. | |
1048 | ||
1049 | %% Control functions. | |
1050 | ||
1051 | oneof([], St) -> | |
1052 | throw({error, St}); %% @todo | |
1053 | oneof([Parse|Tail], St=#state{reader=ReaderPid}) -> | |
1054 | Ln = asciideck_line_reader:get_position(ReaderPid), | |
1055 | try | |
1056 | Parse(St) | |
1057 | catch _:_ -> | |
1058 | asciideck_line_reader:set_position(ReaderPid, Ln), | |
1059 | oneof(Tail, St) | |
1060 | end. | |
1061 | ||
1062 | skip(Parse, St=#state{reader=ReaderPid}) -> | |
1063 | Ln = asciideck_line_reader:get_position(ReaderPid), | |
1064 | try | |
1065 | _ = Parse(St), | |
1066 | skip(Parse, St) | |
1067 | catch _:_ -> | |
1068 | asciideck_line_reader:set_position(ReaderPid, Ln), | |
1069 | ok | |
1070 | end. | |
1071 | ||
1072 | %% Line functions. | |
1073 | ||
1074 | read_line(#state{reader=ReaderPid}) -> | |
1075 | asciideck_line_reader:read_line(ReaderPid). | |
1076 | ||
1077 | read_while(St=#state{reader=ReaderPid}, F, Acc) -> | |
1078 | Ln = asciideck_line_reader:get_position(ReaderPid), | |
1079 | case F(read_line(St)) of | |
1080 | done -> | |
1081 | asciideck_line_reader:set_position(ReaderPid, Ln), | |
1082 | Acc; | |
1083 | {more, Line} -> | |
1084 | case Acc of | |
1085 | <<>> -> read_while(St, F, Line); | |
1086 | _ -> read_while(St, F, <<Acc/binary, $\n, Line/binary>>) | |
1087 | end | |
1088 | end. | |
1089 | ||
1090 | ann(#state{reader=ReaderPid}) -> | |
1091 | #{line => asciideck_line_reader:get_position(ReaderPid)}. | |
1092 | ||
1093 | trim(Line) -> | |
1094 | trim(Line, both). | |
1095 | ||
1096 | trim(Line, Direction) -> | |
1097 | Regex = case Direction of | |
1098 | both -> "^[ \\t\\r\\n]+|[ \\t\\r\\n]+$"; | |
1099 | trailing -> "[ \\t\\r\\n]+$" | |
1100 | end, | |
1101 | iolist_to_binary(re:replace(Line, Regex, <<>>, [global])). | |
1102 | ||
1103 | repeats(<<>>, _) -> ok; | |
1104 | repeats(<<C, Rest/bits>>, C) -> repeats(Rest, C). | |
1105 | ||
1106 | while(F, Bin) -> | |
1107 | while(Bin, F, <<>>). | |
1108 | ||
1109 | while(<<>>, _, Acc) -> | |
1110 | {Acc, <<>>}; | |
1111 | while(<<C, R/bits>>, F, Acc) -> | |
1112 | case F(C) of | |
1113 | true -> while(R, F, <<Acc/binary, C>>); | |
1114 | false -> {Acc, <<C, R/bits>>} | |
1115 | end. |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% This pass walks over the tree and parses inline elements. | |
15 | -module(asciideck_inline_pass). | |
16 | ||
17 | -export([run/1]). | |
18 | ||
19 | -import(asciideck_block_parser, [trim/1, while/2]). | |
20 | ||
21 | -type inline_ast() :: list(). %% @todo | |
22 | -export_type([inline_ast/0]). | |
23 | ||
24 | run([]) -> | |
25 | []; | |
26 | run([Data|Tail]) when is_binary(Data) -> | |
27 | [inline(Data)|run(Tail)]; | |
28 | %% We do not do any inline formatting for verbatim blocks, | |
29 | %% for example listing blocks. | |
30 | %% | |
31 | %% @todo subs is a list of values. | |
32 | run([Item={_, #{<<"subs">> := <<"verbatim">>}, _, _}|Tail]) -> | |
33 | [Item|run(Tail)]; | |
34 | %% Labeled lists' labels can also have inline formatting. | |
35 | run([{Type, Attrs=#{label := Label}, Items, Ann}|Tail]) when is_list(Items) -> | |
36 | [{Type, Attrs#{label => inline(Label)}, run(Items), Ann}|run(Tail)]; | |
37 | run([{Type, Attrs, Items, Ann}|Tail]) when is_list(Items) -> | |
38 | [{Type, Attrs, run(Items), Ann}|run(Tail)]; | |
39 | run([{Type, Attrs, Data, Ann}|Tail]) -> | |
40 | [{Type, Attrs, inline(Data), Ann}|run(Tail)]. | |
41 | ||
42 | %% We reduce inline content with a single text element | |
43 | %% with no formatting to a simple binary. | |
44 | inline(<<>>) -> | |
45 | <<>>; | |
46 | inline(Data) -> | |
47 | case inline(Data, <<>>, []) of | |
48 | [] -> <<>>; | |
49 | [Text] when is_binary(Text) -> Text; | |
50 | AST -> AST | |
51 | end. | |
52 | ||
53 | -spec inline(binary(), binary(), inline_ast()) -> inline_ast(). | |
54 | inline(<<>>, <<>>, Acc) -> | |
55 | lists:reverse(Acc); | |
56 | inline(<<>>, BinAcc, Acc) -> | |
57 | lists:reverse([BinAcc|Acc]); | |
58 | inline(Data, BinAcc, Acc) -> | |
59 | oneof(Data, BinAcc, Acc, [ | |
60 | %% Links. | |
61 | fun xref/2, | |
62 | fun link/2, | |
63 | fun http_link/2, | |
64 | fun https_link/2, | |
65 | %% Quoted text. | |
66 | fun emphasized_single_quote/2, | |
67 | fun emphasized_underline/2, | |
68 | fun strong/2, | |
69 | %% Passthrough macros. | |
70 | fun inline_literal_passthrough/2 | |
71 | ]). | |
72 | ||
73 | %% The inline pass replaces \r\n and \n with a simple space | |
74 | %% when it occurs within normal text. | |
75 | oneof(<<$\r, $\n, Rest/bits>>, BinAcc, Acc, []) -> | |
76 | inline(Rest, <<BinAcc/binary, $\s>>, Acc); | |
77 | oneof(<<$\n, Rest/bits>>, BinAcc, Acc, []) -> | |
78 | inline(Rest, <<BinAcc/binary, $\s>>, Acc); | |
79 | oneof(<<C, Rest/bits>>, BinAcc, Acc, []) -> | |
80 | inline(Rest, <<BinAcc/binary, C>>, Acc); | |
81 | oneof(Data, BinAcc, Acc, [Parse|Tail]) -> | |
82 | Prev = case BinAcc of | |
83 | <<>> -> undefined; | |
84 | _ -> binary:last(BinAcc) | |
85 | end, | |
86 | try Parse(Data, Prev) of | |
87 | {ok, Inline, Rest} when BinAcc =:= <<>> -> | |
88 | inline(Rest, BinAcc, [Inline|Acc]); | |
89 | {ok, Inline, Rest} -> | |
90 | inline(Rest, <<>>, [Inline, BinAcc|Acc]); | |
91 | {skip, Text, Rest} -> | |
92 | oneof(Rest, <<BinAcc/binary, Text/binary>>, Acc, Tail) | |
93 | catch _:_ -> | |
94 | oneof(Data, BinAcc, Acc, Tail) | |
95 | end. | |
96 | ||
97 | -ifdef(TEST). | |
98 | text_test() -> | |
99 | <<>> = inline(<<>>), | |
100 | <<"Hello, Robert">> = inline(<<"Hello, Robert">>), | |
101 | ok. | |
102 | -endif. | |
103 | ||
104 | -define(IS_BOUNDARY(C), C =:= undefined; C =:= $\s; C =:= $\t; C =:= $\r; C =:= $\n; C =:= $(). | |
105 | ||
106 | %% Asciidoc User Guide 21.2.1 | |
107 | %% | |
108 | %% We currently do not implement the <<...>> form. | |
109 | xref(<<"xref:", IDAndCaption/bits>>, Prev) when ?IS_BOUNDARY(Prev) -> | |
110 | %% ID must not contain whitespace characters. | |
111 | {ID, <<"[", Caption0/bits>>} = while(fun(C) -> | |
112 | (C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t) | |
113 | end, IDAndCaption), | |
114 | %% It is followed by a caption. | |
115 | {Caption1, <<"]", Rest/bits>>} = while(fun(C) -> | |
116 | C =/= $] | |
117 | end, Caption0), | |
118 | Caption = trim(Caption1), | |
119 | {ok, {xref, #{ | |
120 | id => ID | |
121 | }, Caption, inline}, Rest}. | |
122 | ||
123 | -ifdef(TEST). | |
124 | xref_test() -> | |
125 | [{xref, #{ | |
126 | id := <<"tiger_image">> | |
127 | }, <<"face of a tiger">>, _}] = inline(<<"xref:tiger_image[face of a tiger]">>), | |
128 | ok. | |
129 | -endif. | |
130 | ||
131 | %% Asciidoc User Guide 21.1.3 | |
132 | link(<<"link:", TargetAndCaption/bits>>, Prev) when ?IS_BOUNDARY(Prev) -> | |
133 | %% Target must not contain whitespace characters. | |
134 | {Target, <<"[", Caption0/bits>>} = while(fun(C) -> | |
135 | (C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t) | |
136 | andalso (C =/= $\r) andalso (C =/= $\n) | |
137 | end, TargetAndCaption), | |
138 | %% It is followed by a caption. | |
139 | {Caption1, <<"]", Rest/bits>>} = while(fun(C) -> | |
140 | C =/= $] | |
141 | end, Caption0), | |
142 | Caption = trim(Caption1), | |
143 | {ok, {link, #{ | |
144 | target => Target | |
145 | }, Caption, inline}, Rest}. | |
146 | ||
147 | -ifdef(TEST). | |
148 | link_test() -> | |
149 | [{link, #{ | |
150 | target := <<"downloads/foo.zip">> | |
151 | }, <<"download foo.zip">>, _}] = inline(<<"link:downloads/foo.zip[download foo.zip]">>), | |
152 | [{link, #{ | |
153 | target := <<"chapter1.asciidoc#fragment">> | |
154 | }, <<"Chapter 1.">>, _}] = inline(<<"link:chapter1.asciidoc#fragment[Chapter 1.]">>), | |
155 | [ | |
156 | {link, #{target := <<"first.zip">>}, <<"first">>, _}, | |
157 | <<", ">>, | |
158 | {link, #{target := <<"second.zip">>}, <<"second">>, _} | |
159 | ] = inline(<<"link:first.zip[first],\nlink:second.zip[second]">>), | |
160 | ok. | |
161 | -endif. | |
162 | ||
163 | %% Asciidoc User Guide 21.1.3 | |
164 | http_link(<<"http:", Rest/bits>>, Prev) when ?IS_BOUNDARY(Prev) -> | |
165 | direct_link(Rest, <<"http:">>). | |
166 | ||
167 | direct_link(Data, Prefix) -> | |
168 | %% Target must not contain whitespace characters. | |
169 | {Target0, Rest0} = while(fun(C) -> | |
170 | (C =/= $[) andalso (C =/= $\s) andalso (C =/= $\t) | |
171 | andalso (C =/= $\r) andalso (C =/= $\n) | |
172 | end, Data), | |
173 | Target = <<Prefix/binary, Target0/binary>>, | |
174 | %% It is optionally followed by a caption. Otherwise | |
175 | %% the link itself is the caption. | |
176 | case Rest0 of | |
177 | <<"[", Caption0/bits>> -> | |
178 | {Caption1, <<"]", Rest/bits>>} = while(fun(C) -> | |
179 | C =/= $] | |
180 | end, Caption0), | |
181 | Caption = trim(Caption1), | |
182 | {ok, {link, #{ | |
183 | target => Target | |
184 | }, Caption, inline}, Rest}; | |
185 | _ -> | |
186 | {ok, {link, #{ | |
187 | target => Target | |
188 | }, Target, inline}, Rest0} | |
189 | end. | |
190 | ||
191 | -ifdef(TEST). | |
192 | http_link_test() -> | |
193 | [ | |
194 | <<"If you have ">>, | |
195 | {link, #{ | |
196 | target := <<"http://example.org/hello#fragment">> | |
197 | }, <<"http://example.org/hello#fragment">>, _}, | |
198 | <<" then:">> | |
199 | ] = inline(<<"If you have http://example.org/hello#fragment then:">>), | |
200 | [ | |
201 | <<"If you have ">>, | |
202 | {link, #{ | |
203 | target := <<"http://example.org/hello#fragment">> | |
204 | }, <<"http://example.org/hello#fragment">>, _}, | |
205 | <<" then:">> | |
206 | ] = inline(<<"If you have http://example.org/hello#fragment\nthen:">>), | |
207 | [ | |
208 | <<"Oh, ">>, | |
209 | {link, #{ | |
210 | target := <<"http://example.org/hello#fragment">> | |
211 | }, <<"hello there">>, _}, | |
212 | <<", young lad.">> | |
213 | ] = inline(<<"Oh, http://example.org/hello#fragment[hello there], young lad.">>), | |
214 | ok. | |
215 | -endif. | |
216 | ||
217 | %% Asciidoc User Guide 21.1.3 | |
218 | https_link(<<"https:", Rest/bits>>, Prev) when ?IS_BOUNDARY(Prev) -> | |
219 | direct_link(Rest, <<"https:">>). | |
220 | ||
221 | -ifdef(TEST). | |
222 | https_link_test() -> | |
223 | [ | |
224 | <<"If you have ">>, | |
225 | {link, #{ | |
226 | target := <<"https://example.org/hello#fragment">> | |
227 | }, <<"https://example.org/hello#fragment">>, _}, | |
228 | <<" then:">> | |
229 | ] = inline(<<"If you have https://example.org/hello#fragment then:">>), | |
230 | [ | |
231 | <<"If you have ">>, | |
232 | {link, #{ | |
233 | target := <<"https://example.org/hello#fragment">> | |
234 | }, <<"https://example.org/hello#fragment">>, _}, | |
235 | <<" then:">> | |
236 | ] = inline(<<"If you have https://example.org/hello#fragment\nthen:">>), | |
237 | [ | |
238 | <<"Oh, ">>, | |
239 | {link, #{ | |
240 | target := <<"https://example.org/hello#fragment">> | |
241 | }, <<"hello there">>, _}, | |
242 | <<", young lad.">> | |
243 | ] = inline(<<"Oh, https://example.org/hello#fragment[hello there], young lad.">>), | |
244 | ok. | |
245 | -endif. | |
246 | ||
247 | %% Asciidoc User Guide 10.1 | |
248 | %% @todo <<"\\**" | |
249 | %% @todo <<"\\*" | |
250 | %% @todo <<"**" | |
251 | emphasized_single_quote(Data, Prev) -> | |
252 | quoted_text(Data, Prev, emphasized, $', $'). | |
253 | emphasized_underline(Data, Prev) -> | |
254 | quoted_text(Data, Prev, emphasized, $_, $_). | |
255 | strong(Data, Prev) -> | |
256 | quoted_text(Data, Prev, strong, $*, $*). | |
257 | ||
258 | quoted_text(<<Left, Rest0/bits>>, Prev, Type, Left, Right) when ?IS_BOUNDARY(Prev) -> | |
259 | {Content, <<Right, Rest/bits>>} = while(fun(C) -> C =/= Right end, Rest0), | |
260 | {ok, {Type, #{ | |
261 | left => Left, | |
262 | right => Right | |
263 | }, inline(Content), inline}, Rest}. | |
264 | ||
265 | -ifdef(TEST). | |
266 | emphasized_test() -> | |
267 | [ | |
268 | <<"Word phrases ">>, | |
269 | {emphasized, #{left := $', right := $'}, | |
270 | <<"enclosed in single quote characters">>, _}, | |
271 | <<" (acute accents) or ">>, | |
272 | {emphasized, #{left := $_, right := $_}, | |
273 | <<"underline characters">>, _}, | |
274 | <<" are emphasized.">> | |
275 | ] = inline(<< | |
276 | "Word phrases 'enclosed in single quote characters' (acute accents) " | |
277 | "or _underline characters_ are emphasized." | |
278 | >>), | |
279 | ok. | |
280 | ||
281 | strong_test() -> | |
282 | [ | |
283 | <<"Word phrases ">>, | |
284 | {strong, #{left := $*, right := $*}, | |
285 | <<"enclosed in asterisk characters">>, _}, | |
286 | <<" are rendered in a strong font (usually bold).">> | |
287 | ] = inline(<< | |
288 | "Word phrases *enclosed in asterisk characters* " | |
289 | "are rendered in a strong font (usually bold)." | |
290 | >>), | |
291 | ok. | |
292 | -endif. | |
293 | ||
294 | %% Asciidoc User Guide 21.4 | |
295 | inline_literal_passthrough(<<"`", Rest0/bits>>, Prev) when ?IS_BOUNDARY(Prev) -> | |
296 | {Content, <<"`", Rest/bits>>} = while(fun(C) -> C =/= $` end, Rest0), | |
297 | {ok, {inline_literal_passthrough, #{}, Content, inline}, Rest}. | |
298 | ||
299 | -ifdef(TEST). | |
300 | inline_literal_passthrough_test() -> | |
301 | [ | |
302 | <<"Word phrases ">>, | |
303 | {inline_literal_passthrough, #{}, <<"enclosed in backtick characters">>, _}, | |
304 | <<" (grave accents)...">> | |
305 | ] = inline(<<"Word phrases `enclosed in backtick characters` (grave accents)...">>), | |
306 | ok. | |
307 | -endif. |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | -module(asciideck_line_reader). | |
15 | -behaviour(gen_server). | |
16 | ||
17 | %% API. | |
18 | -export([start_link/1]). | |
19 | -export([read_line/1]). | |
20 | -export([get_position/1]). | |
21 | -export([set_position/2]). | |
22 | ||
23 | %% gen_server. | |
24 | -export([init/1]). | |
25 | -export([handle_call/3]). | |
26 | -export([handle_cast/2]). | |
27 | -export([handle_info/2]). | |
28 | -export([terminate/2]). | |
29 | -export([code_change/3]). | |
30 | ||
31 | -record(state, { | |
32 | lines :: [binary()], | |
33 | length :: non_neg_integer(), | |
34 | pos = 1 :: non_neg_integer() | |
35 | }). | |
36 | ||
37 | %% API. | |
38 | ||
39 | -spec start_link(binary()) -> {ok, pid()}. | |
40 | start_link(Data) -> | |
41 | gen_server:start_link(?MODULE, [Data], []). | |
42 | ||
43 | -spec read_line(pid()) -> binary() | eof. | |
44 | read_line(Pid) -> | |
45 | gen_server:call(Pid, read_line). | |
46 | ||
47 | %% @todo peek_line | |
48 | ||
49 | -spec get_position(pid()) -> pos_integer(). | |
50 | get_position(Pid) -> | |
51 | gen_server:call(Pid, get_position). | |
52 | ||
53 | -spec set_position(pid(), pos_integer()) -> ok. | |
54 | set_position(Pid, Pos) -> | |
55 | gen_server:cast(Pid, {set_position, Pos}). | |
56 | ||
57 | %% gen_server. | |
58 | ||
59 | init([Data]) -> | |
60 | Lines0 = binary:split(Data, <<"\n">>, [global]), | |
61 | %% We add an empty line at the end to simplify parsing. | |
62 | %% This has the inconvenient that when parsing blocks | |
63 | %% this empty line will be included in the result if | |
64 | %% the block is not properly closed. | |
65 | Lines = lists:append(Lines0, [<<>>]), | |
66 | {ok, #state{lines=Lines, length=length(Lines)}}. | |
67 | ||
68 | handle_call(read_line, _From, State=#state{length=Length, pos=Pos}) | |
69 | when Pos > Length -> | |
70 | {reply, eof, State}; | |
71 | %% @todo I know this isn't the most efficient. We could keep | |
72 | %% the lines read separately and roll back when set_position | |
73 | %% wants us to. But it works fine for now. | |
74 | handle_call(read_line, _From, State=#state{lines=Lines, pos=Pos}) -> | |
75 | {reply, lists:nth(Pos, Lines), State#state{pos=Pos + 1}}; | |
76 | handle_call(get_position, _From, State=#state{pos=Pos}) -> | |
77 | {reply, Pos, State}; | |
78 | handle_call(_Request, _From, State) -> | |
79 | {reply, ignored, State}. | |
80 | ||
81 | handle_cast({set_position, Pos}, State) -> | |
82 | {noreply, State#state{pos=Pos}}; | |
83 | handle_cast(_Msg, State) -> | |
84 | {noreply, State}. | |
85 | ||
86 | handle_info(_Info, State) -> | |
87 | {noreply, State}. | |
88 | ||
89 | terminate(_Reason, _State) -> | |
90 | ok. | |
91 | ||
92 | code_change(_OldVsn, State, _Extra) -> | |
93 | {ok, State}. |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% The purpose of this pass is to aggregate list_item | |
15 | %% blocks into proper lists. This involves building a | |
16 | %% tree based on the rules for list items. | |
17 | %% | |
18 | %% The general rules are: | |
19 | %% | |
20 | %% - Any list item of different type/level than the | |
21 | %% current list item is a child of the latter. | |
22 | %% | |
23 | %% - The level ultimately does not matter when building | |
24 | %% the tree, * then **** then ** is accepted just fine. | |
25 | %% | |
26 | %% - Lists of the same type as a parent are not allowed. | |
27 | %% On the other hand reusing a type in different parts | |
28 | %% of the tree is not a problem. | |
29 | %% | |
30 | %% - Any literal paragraph following a list item is a | |
31 | %% child of that list item. @todo | |
32 | %% | |
33 | %% - Any other block can be included as a child by using | |
34 | %% list continuations. | |
35 | -module(asciideck_lists_pass). | |
36 | ||
37 | -export([run/1]). | |
38 | ||
39 | run(AST) -> | |
40 | list(AST, []). | |
41 | ||
42 | list([], Acc) -> | |
43 | lists:reverse(Acc); | |
44 | %% Any trailing block continuation is ignored. | |
45 | list([{list_item_continuation, _, _, _}], Acc) -> | |
46 | lists:reverse(Acc); | |
47 | %% The first list item contains the attributes for the list. | |
48 | list([LI={list_item, Attrs, _, Ann}|Tail0], Acc) -> | |
49 | {Items, Tail} = item(Tail0, LI, [type(Attrs)], []), | |
50 | list(Tail, [{list, Attrs, Items, Ann}|Acc]); | |
51 | list([Block|Tail], Acc) -> | |
52 | list(Tail, [Block|Acc]). | |
53 | ||
54 | %% Bulleted/numbered list item of the same type. | |
55 | item([NextLI={list_item, #{type := T, level := L}, _, _}|Tail], | |
56 | CurrentLI={list_item, #{type := T, level := L}, _, _}, Parents, Acc) -> | |
57 | item(Tail, NextLI, Parents, [reverse_children(CurrentLI)|Acc]); | |
58 | %% Labeled list item of the same type. | |
59 | item([NextLI={list_item, #{type := T, separator := S}, _, _}|Tail], | |
60 | CurrentLI={list_item, #{type := T, separator := S}, _, _}, Parents, Acc) -> | |
61 | item(Tail, NextLI, Parents, [reverse_children(CurrentLI)|Acc]); | |
62 | %% Other list items are either parent or children lists. | |
63 | item(FullTail=[NextLI={list_item, Attrs, _, Ann}|Tail0], CurrentLI, Parents, Acc) -> | |
64 | case lists:member(type(Attrs), Parents) of | |
65 | %% We have a parent list item. This is the end of this child list. | |
66 | true -> | |
67 | {lists:reverse([reverse_children(CurrentLI)|Acc]), FullTail}; | |
68 | %% We have a child list item. This is the beginning of a new list. | |
69 | false -> | |
70 | {Items, Tail} = item(Tail0, NextLI, [type(Attrs)|Parents], []), | |
71 | item(Tail, add_child(CurrentLI, {list, Attrs, Items, Ann}), Parents, Acc) | |
72 | end; | |
73 | %% Ignore multiple contiguous list continuations. | |
74 | item([LIC={list_item_continuation, _, _, _}, | |
75 | {list_item_continuation, _, _, _}|Tail], CurrentLI, Parents, Acc) -> | |
76 | item([LIC|Tail], CurrentLI, Parents, Acc); | |
77 | %% Blocks that immediately follow list_item_continuation are children, | |
78 | %% unless they are list_item themselves in which case it depends on the | |
79 | %% type and level of the list item. | |
80 | item([{list_item_continuation, _, _, _}, LI={list_item, _, _, _}|Tail], CurrentLI, Parents, Acc) -> | |
81 | item([LI|Tail], CurrentLI, Parents, Acc); | |
82 | item([{list_item_continuation, _, _, _}, Block|Tail], CurrentLI, Parents, Acc) -> | |
83 | item(Tail, add_child(CurrentLI, Block), Parents, Acc); | |
84 | %% Anything else is the end of the list. | |
85 | item(Tail, CurrentLI, _, Acc) -> | |
86 | {lists:reverse([reverse_children(CurrentLI)|Acc]), Tail}. | |
87 | ||
88 | type(Attrs) -> | |
89 | maps:with([type, level, separator], Attrs). | |
90 | ||
91 | add_child({list_item, Attrs, Children, Ann}, Child) -> | |
92 | {list_item, Attrs, [Child|Children], Ann}. | |
93 | ||
94 | reverse_children({list_item, Attrs, Children, Ann}) -> | |
95 | {list_item, Attrs, lists:reverse(Children), Ann}. | |
96 | ||
97 | -ifdef(TEST). | |
98 | list_test() -> | |
99 | [{list, #{type := bulleted, level := 1}, [ | |
100 | {list_item, #{type := bulleted, level := 1}, | |
101 | [{paragraph, #{}, <<"Hello!">>, _}], #{line := 1}}, | |
102 | {list_item, #{type := bulleted, level := 1}, | |
103 | [{paragraph, #{}, <<"World!">>, _}], #{line := 2}} | |
104 | ], #{line := 1}}] = run([ | |
105 | {list_item, #{type => bulleted, level => 1}, | |
106 | [{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}}, | |
107 | {list_item, #{type => bulleted, level => 1}, | |
108 | [{paragraph, #{}, <<"World!">>, #{line => 2}}], #{line => 2}} | |
109 | ]), | |
110 | ok. | |
111 | ||
112 | list_of_list_test() -> | |
113 | [{list, #{type := bulleted, level := 1}, [ | |
114 | {list_item, #{type := bulleted, level := 1}, [ | |
115 | {paragraph, #{}, <<"Hello!">>, _}, | |
116 | {list, #{type := bulleted, level := 2}, [ | |
117 | {list_item, #{type := bulleted, level := 2}, | |
118 | [{paragraph, #{}, <<"Cat!">>, _}], #{line := 2}}, | |
119 | {list_item, #{type := bulleted, level := 2}, | |
120 | [{paragraph, #{}, <<"Dog!">>, _}], #{line := 3}} | |
121 | ], #{line := 2}} | |
122 | ], #{line := 1}}, | |
123 | {list_item, #{type := bulleted, level := 1}, | |
124 | [{paragraph, #{}, <<"World!">>, _}], #{line := 4}} | |
125 | ], #{line := 1}}] = run([ | |
126 | {list_item, #{type => bulleted, level => 1}, | |
127 | [{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}}, | |
128 | {list_item, #{type => bulleted, level => 2}, | |
129 | [{paragraph, #{}, <<"Cat!">>, #{line => 2}}], #{line => 2}}, | |
130 | {list_item, #{type => bulleted, level => 2}, | |
131 | [{paragraph, #{}, <<"Dog!">>, #{line => 3}}], #{line => 3}}, | |
132 | {list_item, #{type => bulleted, level => 1}, | |
133 | [{paragraph, #{}, <<"World!">>, #{line => 4}}], #{line => 4}} | |
134 | ]), | |
135 | ok. | |
136 | ||
137 | list_continuation_test() -> | |
138 | [{list, #{type := bulleted, level := 1}, [ | |
139 | {list_item, #{type := bulleted, level := 1}, [ | |
140 | {paragraph, #{}, <<"Hello!">>, _}, | |
141 | {listing_block, #{}, <<"hello() -> world.">>, #{line := 3}} | |
142 | ], #{line := 1}}, | |
143 | {list_item, #{type := bulleted, level := 1}, | |
144 | [{paragraph, #{}, <<"World!">>, _}], #{line := 6}} | |
145 | ], #{line := 1}}] = run([ | |
146 | {list_item, #{type => bulleted, level => 1}, | |
147 | [{paragraph, #{}, <<"Hello!">>, #{line => 1}}], #{line => 1}}, | |
148 | {list_item_continuation, #{}, <<>>, #{line => 2}}, | |
149 | {listing_block, #{}, <<"hello() -> world.">>, #{line => 3}}, | |
150 | {list_item, #{type => bulleted, level => 1}, | |
151 | [{paragraph, #{}, <<"World!">>, #{line => 6}}], #{line => 6}} | |
152 | ]), | |
153 | ok. | |
154 | -endif. |
0 | %% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | -module(asciideck_parser). | |
15 | ||
16 | -export([parse/2]). | |
17 | ||
18 | %% @todo | |
19 | %% All nodes in the AST are of type {Type, Attrs, Text | Nodes, Ann} | |
20 | %% except for text formatting nodes at the moment. Text formatting | |
21 | %% nodes will be converted to this form in a future change. | |
22 | ||
23 | %% Parsing occurs in a few passes: | |
24 | %% | |
25 | %% * p1: Line-based parsing of the raw Asciidoc document | |
26 | %% * p2: Deal with more compp1 structures like lists and tables | |
27 | ||
28 | parse(Data, St) -> | |
29 | Lines0 = binary:split(Data, <<"\n">>, [global]), | |
30 | %% Ensure there's an empty line at the end, to simplify parsing. | |
31 | Lines1 = lists:append(Lines0, [<<>>]), | |
32 | LineNumbers = lists:seq(1, length(Lines1)), | |
33 | Lines = lists:zip(LineNumbers, Lines1), | |
34 | %% @todo Document header, if any. Recognized by the author info/doc attributes? | |
35 | %% Alternatively, don't recognize it, and only use attribute entries for the same info. | |
36 | p2(p1(Lines, [], St), []). | |
37 | ||
38 | %% First pass. | |
39 | ||
40 | %% @todo When a block element is encountered asciidoc(1) determines the type of block by checking in the following order (first to last): (section) Titles, BlockMacros, Lists, DelimitedBlocks, Tables, AttributeEntrys, AttributeLists, BlockTitles, Paragraphs. | |
41 | ||
42 | %% @todo And this function is parsing, not p1ing. | |
43 | p1([], AST, _St) -> | |
44 | lists:reverse(AST); | |
45 | %% Extra empty lines. | |
46 | p1([{_, <<>>}|Tail], AST, St) -> | |
47 | p1(Tail, AST, St); | |
48 | %% Comments. | |
49 | p1([{LN, <<"//", Comment/bits >>}|Tail], AST, St) -> | |
50 | p1(Tail, [comment(trim_ws(Comment), ann(LN, St))|AST], St); | |
51 | %% Section titles. | |
52 | p1([{LN, <<"= ", Title/bits >>}, {_, <<>>}|Tail], AST, St) -> | |
53 | p1_title_short(Tail, AST, St, LN, Title, 0); | |
54 | p1([{LN, <<"== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) -> | |
55 | p1_title_short(Tail, AST, St, LN, Title, 1); | |
56 | p1([{LN, <<"=== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) -> | |
57 | p1_title_short(Tail, AST, St, LN, Title, 2); | |
58 | p1([{LN, <<"==== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) -> | |
59 | p1_title_short(Tail, AST, St, LN, Title, 3); | |
60 | p1([{LN, <<"===== ", Title/bits >>}, {_, <<>>}|Tail], AST, St) -> | |
61 | p1_title_short(Tail, AST, St, LN, Title, 4); | |
62 | %% Block titles. | |
63 | p1([{_LN, <<".", Title/bits >>}|Tail], AST, St) -> | |
64 | p1(Tail, [{block_title, Title}|AST], St); | |
65 | %% Attribute lists. | |
66 | p1([{_LN, <<"[", Attrs/bits >>}|Tail], AST, St) -> | |
67 | p1(Tail, [{attribute_list, p1_attr_list(Attrs)}|AST], St); | |
68 | %% Listing blocks. | |
69 | p1([{LN, <<"----", _/bits >>}|Tail], AST, St) -> | |
70 | p1_listing(Tail, AST, St, LN, []); | |
71 | %% Lists. | |
72 | p1([{LN, <<"* ", Text/bits >>}|Tail], AST, St) -> | |
73 | p1_li(Tail, AST, St, uli1, {LN, Text}); | |
74 | p1([{LN, <<"** ", Text/bits >>}|Tail], AST, St) -> | |
75 | p1_li(Tail, AST, St, uli2, {LN, Text}); | |
76 | p1([{LN, <<"*** ", Text/bits >>}|Tail], AST, St) -> | |
77 | p1_li(Tail, AST, St, uli3, {LN, Text}); | |
78 | p1([{LN, <<"**** ", Text/bits >>}|Tail], AST, St) -> | |
79 | p1_li(Tail, AST, St, uli4, {LN, Text}); | |
80 | p1([{LN, <<"***** ", Text/bits >>}|Tail], AST, St) -> | |
81 | p1_li(Tail, AST, St, uli5, {LN, Text}); | |
82 | %% Tables. | |
83 | p1([{LN, <<"|===", _/bits >>}|Tail], AST, St) -> | |
84 | p1_table(Tail, AST, St, LN); | |
85 | p1([{LN, <<"|", Text/bits >>}|Tail], AST, St) -> | |
86 | p1_cell(Tail, AST, St, LN, Text); | |
87 | %% Prefix-based or paragraph. | |
88 | p1(Lines, AST, St) -> | |
89 | p1_text(Lines, AST, St). | |
90 | ||
91 | p1_title_short(Tail, AST, St, LN, Text0, Level) -> | |
92 | %% Remove the trailer, if any. | |
93 | Text1 = trim_ws(Text0), | |
94 | Trailer = case Level of | |
95 | 0 -> <<" =">>; | |
96 | 1 -> <<" ==">>; | |
97 | 2 -> <<" ===">>; | |
98 | 3 -> <<" ====">>; | |
99 | 4 -> <<" =====">> | |
100 | end, | |
101 | TrailerSize = byte_size(Trailer), | |
102 | Size = byte_size(Text1) - TrailerSize, | |
103 | Text3 = case Text1 of | |
104 | << Text2:Size/binary, Trailer:TrailerSize/binary >> -> Text2; | |
105 | _ -> Text1 | |
106 | end, | |
107 | Text = trim_ws(Text3), | |
108 | p1(Tail, [title(Text, #{level => Level}, ann(LN, St))|AST], St). | |
109 | ||
110 | p1_attr_list(AttrList0) -> | |
111 | [AttrList|_] = binary:split(AttrList0, <<"]">>), | |
112 | binary:split(AttrList, <<",">>). | |
113 | ||
114 | %% @todo Parse attributes properly. | |
115 | p1_table(Tail, [{attribute_list, Attrs}, {block_title, Title}|AST], St, LN) -> | |
116 | p1(Tail, [{begin_table, #{title => Title, todo => Attrs}, ann(LN, St)}|AST], St); | |
117 | p1_table(Tail, [{attribute_list, Attrs}|AST], St, LN) -> | |
118 | p1(Tail, [{begin_table, #{todo => Attrs}, ann(LN, St)}|AST], St); | |
119 | p1_table(Tail, AST=[nl, {cell, _, _, _}|_], St, _) -> | |
120 | p1(Tail, [end_table|AST], St); | |
121 | p1_table(Tail, AST=[{cell, _, _, _}|_], St, _) -> | |
122 | p1(Tail, [end_table|AST], St); | |
123 | p1_table(Tail, AST, St, LN) -> | |
124 | p1(Tail, [{begin_table, #{}, ann(LN, St)}|AST], St). | |
125 | ||
126 | %% @todo Multiline cells. | |
127 | %% @todo Styled cells. | |
128 | %% @todo Strip whitespace at the beginning of the cell if on the same line. | |
129 | p1_cell(Tail=[{_, NextLine}|_], AST0, St, LN, Text) -> | |
130 | case p1_cell_split(Text, <<>>) of | |
131 | [Cell] -> | |
132 | AST1 = [nl, cell(p1([{LN, trim_ws(Cell)}, {LN, <<>>}], [], St), ann(LN, St))|AST0], | |
133 | AST = case NextLine of | |
134 | <<>> -> [nl|AST1]; | |
135 | _ -> AST1 | |
136 | end, | |
137 | p1(Tail, AST, St); | |
138 | [Cell, Rest] -> | |
139 | p1_cell(Tail, [cell(p1([{LN, trim_ws(Cell)}, {LN, <<>>}], [], St), ann(LN, St))|AST0], St, LN, Rest) | |
140 | end. | |
141 | ||
142 | p1_cell_split(<<>>, Acc) -> | |
143 | [Acc]; | |
144 | p1_cell_split(<< $\\, $|, Rest/bits >>, Acc) -> | |
145 | p1_cell_split(Rest, << Acc/binary, $| >>); | |
146 | p1_cell_split(<< $|, Rest/bits >>, Acc) -> | |
147 | [Acc, Rest]; | |
148 | p1_cell_split(<< C, Rest/bits >>, Acc) -> | |
149 | p1_cell_split(Rest, << Acc/binary, C >>). | |
150 | ||
151 | p1_listing([{_, <<"----", _/bits >>}, {_, <<>>}|Tail], AST0, St, LN, [_|Acc]) -> | |
152 | Text = iolist_to_binary(lists:reverse(Acc)), | |
153 | case AST0 of | |
154 | [{attribute_list, [<<"source">>, Lang]}, {block_title, Title}|AST] -> | |
155 | p1(Tail, [listing(Text, #{title => Title, language => Lang}, ann(LN, St))|AST], St); | |
156 | [{block_title, Title}, {attribute_list, [<<"source">>, Lang]}|AST] -> | |
157 | p1(Tail, [listing(Text, #{title => Title, language => Lang}, ann(LN, St))|AST], St); | |
158 | [{attribute_list, [<<"source">>, Lang]}|AST] -> | |
159 | p1(Tail, [listing(Text, #{language => Lang}, ann(LN, St))|AST], St); | |
160 | [{block_title, Title}|AST] -> | |
161 | p1(Tail, [listing(Text, #{title => Title}, ann(LN, St))|AST], St); | |
162 | AST -> | |
163 | p1(Tail, [listing(Text, #{}, ann(LN, St))|AST], St) | |
164 | end; | |
165 | p1_listing([{_, Line}|Tail], AST, St, LN, Acc) -> | |
166 | p1_listing(Tail, AST, St, LN, [<<"\n">>, Line|Acc]). | |
167 | ||
168 | p1_li(Lines, AST, St, Type, FirstLine = {LN, _}) -> | |
169 | {Tail, Glob} = p1_li_glob(Lines, []), | |
170 | p1(Tail, [{Type, p1([FirstLine|Glob], [], St), ann(LN, St)}|AST], St). | |
171 | ||
172 | %% Glob everything until next list or empty line. | |
173 | p1_li_glob(Tail = [{LN, << "*", _/bits >>}|_], Acc) -> | |
174 | {Tail, lists:reverse([{LN, <<>>}|Acc])}; | |
175 | p1_li_glob(Tail = [{LN, <<>>}|_], Acc) -> | |
176 | {Tail, lists:reverse([{LN, <<>>}|Acc])}; | |
177 | p1_li_glob([{LN, <<"+">>}|Tail], Acc) -> | |
178 | p1_li_glob(Tail, [{LN, <<>>}|Acc]); | |
179 | p1_li_glob([Line|Tail], Acc) -> | |
180 | p1_li_glob(Tail, [Line|Acc]). | |
181 | ||
182 | %% Skip initial empty lines and then glob like normal lists. | |
183 | p1_ll_glob(Lines=[{_, Line}|Tail]) -> | |
184 | case trim_ws(Line) of | |
185 | <<>> -> p1_ll_glob(Tail); | |
186 | _ -> p1_ll_glob(Lines, []) | |
187 | end. | |
188 | ||
189 | %% Glob everything until empty line. | |
190 | %% @todo Detect next list. | |
191 | p1_ll_glob(Tail = [{LN, <<>>}|_], Acc) -> | |
192 | {Tail, lists:reverse([{LN, <<>>}|Acc])}; | |
193 | p1_ll_glob([{LN, <<"+">>}|Tail], Acc) -> | |
194 | p1_ll_glob(Tail, [{LN, <<>>}|Acc]); | |
195 | p1_ll_glob([{LN, <<" ", Line/bits>>}|Tail], Acc) -> | |
196 | p1_ll_glob([{LN, trim_ws(Line)}|Tail], Acc); | |
197 | p1_ll_glob(Lines=[Line={LN, Text}|Tail], Acc) -> | |
198 | case binary:split(<< Text/binary, $\s >>, <<":: ">>) of | |
199 | [_, _] -> | |
200 | {Lines, lists:reverse([{LN, <<>>}|Acc])}; | |
201 | _ -> | |
202 | p1_ll_glob(Tail, [Line|Acc]) | |
203 | end. | |
204 | ||
205 | p1_text(Lines=[{LN, Line}|Tail], AST, St) -> | |
206 | case binary:split(<< Line/binary, $\s >>, <<":: ">>) of | |
207 | %% Nothing else on the line. | |
208 | [Label, <<>>] -> | |
209 | {Tail1, Glob} = p1_ll_glob(Tail), | |
210 | p1(Tail1, [{label, Label, p1(Glob, [], St), ann(LN, St)}|AST], St); | |
211 | %% Text on the same line. | |
212 | [Label, Text0] -> | |
213 | Size = byte_size(Text0) - 1, | |
214 | << Text:Size/binary, _ >> = Text0, | |
215 | {Tail1, Glob} = p1_ll_glob([{LN, Text}|Tail]), | |
216 | %% Text on the same line is necessarily a paragraph I believe. | |
217 | p1_p(Tail1, [{label, Label, p1(Glob, [], St), ann(LN, St)}|AST], St, LN, []); | |
218 | %% Not a labeled list. | |
219 | _ -> | |
220 | p1_maybe_p(Lines, AST, St) | |
221 | end. | |
222 | ||
223 | %% @todo Literal paragraphs. | |
224 | p1_maybe_p([{_LN, << " ", Line/bits >>}|Tail], AST, St) -> | |
225 | <<>> = trim_ws(Line), | |
226 | p1(Tail, AST, St); | |
227 | p1_maybe_p(Lines=[{LN, _}|_], AST, St) -> | |
228 | p1_p(Lines, AST, St, LN, []). | |
229 | ||
230 | p1_p([{_, <<>>}|Tail], AST0, St, LN, [_|Acc]) -> | |
231 | Text = format(iolist_to_binary(lists:reverse(Acc)), LN, St), | |
232 | case AST0 of | |
233 | [{block_title, Title}|AST] -> | |
234 | p1(Tail, [paragraph(Text, #{title => Title}, ann(LN, St))|AST], St); | |
235 | AST -> | |
236 | p1(Tail, [paragraph(Text, #{}, ann(LN, St))|AST], St) | |
237 | end; | |
238 | %% Ignore comments inside paragraphs. | |
239 | %% @todo Keep in the AST. | |
240 | p1_p([{_, <<"//", _/bits>>}|Tail], AST, St, LN, Acc) -> | |
241 | p1_p(Tail, AST, St, LN, Acc); | |
242 | p1_p([{_, Line}|Tail], AST, St, LN, Acc) -> | |
243 | %% @todo We need to keep line/col information. To do this | |
244 | %% we probably should keep an index of character number -> line/col | |
245 | %% that we pass to the format function. Otherwise the line/col | |
246 | %% information on text will point to the paragraph start. | |
247 | p1_p(Tail, AST, St, LN, [<<" ">>, Line|Acc]). | |
248 | ||
249 | %% Inline formatting. | |
250 | ||
251 | %% @todo Probably do it as part of the node functions that require it. | |
252 | format(Text, LN, St) -> | |
253 | case format(Text, LN, St, [], <<>>, $\s) of | |
254 | [Bin] when is_binary(Bin) -> Bin; | |
255 | Formatted -> Formatted | |
256 | end. | |
257 | ||
258 | format(<<>>, _, _, Acc, <<>>, _) -> | |
259 | lists:reverse(Acc); | |
260 | format(<<>>, _, _, Acc, BinAcc, _) -> | |
261 | lists:reverse([BinAcc|Acc]); | |
262 | format(<< "link:", Rest0/bits >>, LN, St, Acc0, BinAcc, Prev) when Prev =:= $\s -> | |
263 | case re:run(Rest0, "^([^[]*)\\[([^]]*)\\](.*)", [{capture, all, binary}]) of | |
264 | nomatch -> | |
265 | format(Rest0, LN, St, Acc0, << BinAcc/binary, "link:" >>, $:); | |
266 | {match, [_, Link, Text, Rest]} -> | |
267 | Acc = case BinAcc of | |
268 | <<>> -> Acc0; | |
269 | _ -> [BinAcc|Acc0] | |
270 | end, | |
271 | format(Rest, LN, St, [rel_link(Text, Link, ann(LN, St))|Acc], <<>>, $]) | |
272 | end; | |
273 | format(<< C, Rest0/bits >>, LN, St, Acc0, BinAcc, Prev) when Prev =:= $\s -> | |
274 | %% @todo In some cases we must format inside the quoted text too. | |
275 | %% Therefore we need to have some information about what to do here. | |
276 | Quotes = #{ | |
277 | $* => {strong, text}, | |
278 | $` => {mono, literal} | |
279 | }, | |
280 | case maps:get(C, Quotes, undefined) of | |
281 | undefined -> | |
282 | format(Rest0, LN, St, Acc0, << BinAcc/binary, C >>, C); | |
283 | {NodeType, QuotedType} -> | |
284 | case binary:split(Rest0, << C >>) of | |
285 | [_] -> | |
286 | format(Rest0, LN, St, Acc0, << BinAcc/binary, $* >>, $*); | |
287 | [QuotedText0, Rest] -> | |
288 | Acc = case BinAcc of | |
289 | <<>> -> Acc0; | |
290 | _ -> [BinAcc|Acc0] | |
291 | end, | |
292 | QuotedText = case QuotedType of | |
293 | text -> format(QuotedText0, LN, St); | |
294 | literal -> QuotedText0 | |
295 | end, | |
296 | format(Rest, LN, St, [quoted(NodeType, QuotedText, ann(LN, St))|Acc], <<>>, $*) | |
297 | end | |
298 | end; | |
299 | format(<< C, Rest/bits >>, LN, St, Acc, BinAcc, _) -> | |
300 | format(Rest, LN, St, Acc, << BinAcc/binary, C >>, C). | |
301 | ||
302 | %% Second pass. | |
303 | ||
304 | p2([], Acc) -> | |
305 | lists:reverse(Acc); | |
306 | p2([{label, Label, Items, Ann}|Tail], Acc) -> | |
307 | %% @todo Handle this like other lists. | |
308 | p2(Tail, [ll([li(p2(Items, []), #{label => Label}, Ann)], #{}, Ann)|Acc]); | |
309 | p2(Tail0=[{uli1, _, UlAnn}|_], Acc) -> | |
310 | {LIs0, Tail} = lists:splitwith(fun({uli1, _, _}) -> true; (_) -> false end, Tail0), | |
311 | LIs = [li(I, LiAnn) || {uli1, I, LiAnn} <- LIs0], | |
312 | p2(Tail, [ul(LIs, #{}, UlAnn)|Acc]); | |
313 | p2([{begin_table, Attrs, Ann}|Tail0], Acc) -> | |
314 | %% @todo Can also get them from Attrs? | |
315 | N = count_table_columns(Tail0), | |
316 | {Rows, Tail} = p2_rows(Tail0, [], [], N, 1), | |
317 | p2(Tail, [table(Rows, Attrs, Ann)|Acc]); | |
318 | p2([Item|Tail], Acc) -> | |
319 | p2(Tail, [Item|Acc]). | |
320 | ||
321 | %% @todo One cell per line version. | |
322 | count_table_columns(Cells) -> | |
323 | length(lists:takewhile(fun({cell, _, _, _}) -> true; (_) -> false end, Cells)). | |
324 | ||
325 | p2_rows([nl|Tail], Rows, Cols, NumCols, N) -> | |
326 | p2_rows(Tail, Rows, Cols, NumCols, N); | |
327 | p2_rows([Cell = {cell, _, _, Ann}|Tail], Rows, Cols, NumCols, NumCols) -> | |
328 | p2_rows(Tail, [row(lists:reverse([Cell|Cols]), Ann)|Rows], [], NumCols, 1); | |
329 | p2_rows([Cell = {cell, _, _, _}|Tail], Rows, Cols, NumCols, N) -> | |
330 | p2_rows(Tail, Rows, [Cell|Cols], NumCols, N + 1); | |
331 | p2_rows([end_table|Tail], Rows, [], _, _) -> | |
332 | {lists:reverse(Rows), Tail}. | |
333 | ||
334 | %% Annotations. | |
335 | ||
336 | ann(Line, St) -> | |
337 | ann(Line, 1, St). | |
338 | ||
339 | %% @todo Take filename too, if any. | |
340 | ann(Line, Col, _St) -> | |
341 | #{line => Line, col => Col}. | |
342 | ||
343 | %% Nodes. | |
344 | ||
345 | cell(Nodes, Ann) -> | |
346 | {cell, #{}, Nodes, Ann}. | |
347 | ||
348 | comment(Text, Ann) -> | |
349 | {comment, #{}, Text, Ann}. | |
350 | ||
351 | li(Nodes, Ann) -> | |
352 | li(Nodes, #{}, Ann). | |
353 | ||
354 | li(Nodes, Attrs, Ann) -> | |
355 | {li, Attrs, Nodes, Ann}. | |
356 | ||
357 | listing(Text, Attrs, Ann) -> | |
358 | {listing, Attrs, Text, Ann}. | |
359 | ||
360 | ll(Nodes, Attrs, Ann) -> | |
361 | {ll, Attrs, Nodes, Ann}. | |
362 | ||
363 | paragraph(Text, Attrs, Ann) -> | |
364 | {p, Attrs, Text, Ann}. | |
365 | ||
366 | quoted(NodeType, Text, Ann) -> | |
367 | {NodeType, #{}, Text, Ann}. | |
368 | ||
369 | rel_link(Text, Link, Ann) -> | |
370 | {rel_link, #{target => Link}, Text, Ann}. | |
371 | ||
372 | row(Nodes, Ann) -> | |
373 | {row, #{}, Nodes, Ann}. | |
374 | ||
375 | table(Nodes, Attrs, Ann) -> | |
376 | {table, Attrs, Nodes, Ann}. | |
377 | ||
378 | title(Text, Attrs, Ann) -> | |
379 | {title, Attrs, Text, Ann}. | |
380 | ||
381 | ul(Nodes, Attrs, Ann) -> | |
382 | {ul, Attrs, Nodes, Ann}. | |
383 | ||
384 | %% Utility functions. | |
385 | ||
386 | trim_ws(Text) -> | |
387 | iolist_to_binary(re:replace(Text, "^[ \\t]+|[ \\t]+$", <<>>, [global])). |
0 | %% Copyright (c) 2017-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | %% | |
2 | %% Permission to use, copy, modify, and/or distribute this software for any | |
3 | %% purpose with or without fee is hereby granted, provided that the above | |
4 | %% copyright notice and this permission notice appear in all copies. | |
5 | %% | |
6 | %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | |
7 | %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | |
8 | %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | |
9 | %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | |
10 | %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | |
11 | %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | |
12 | %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | |
13 | ||
14 | %% This pass parses and builds a table from the contents | |
15 | %% of a table block. | |
16 | %% | |
17 | %% Asciidoc User Guide 23 | |
18 | %% | |
19 | %% @todo Rows and cells are currently not annotated. | |
20 | -module(asciideck_tables_pass). | |
21 | ||
22 | -export([run/1]). | |
23 | ||
24 | -define(IS_WS(C), (C =:= $\s) or (C =:= $\t) or (C =:= $\n). | |
25 | ||
26 | run([]) -> | |
27 | []; | |
28 | run([Table={table, _, _, _}|Tail]) -> | |
29 | [table(Table)|run(Tail)]; | |
30 | run([Block|Tail]) -> | |
31 | [Block|run(Tail)]. | |
32 | ||
33 | table({table, Attrs, Contents, Ann}) -> | |
34 | {Cells, NumCols} = parse_table(Contents, Attrs), | |
35 | Children = rows(Cells, NumCols), | |
36 | {table, Attrs, Children, Ann}. | |
37 | ||
38 | -ifdef(TEST). | |
39 | table_test() -> | |
40 | {table, _, [ | |
41 | {row, _, [ | |
42 | {cell, _, <<"1">>, _}, | |
43 | {cell, _, <<"2">>, _}, | |
44 | {cell, _, <<"A">>, _} | |
45 | ], _}, | |
46 | {row, _, [ | |
47 | {cell, _, <<"3">>, _}, | |
48 | {cell, _, <<"4">>, _}, | |
49 | {cell, _, <<"B">>, _} | |
50 | ], _}, | |
51 | {row, _, [ | |
52 | {cell, _, <<"5">>, _}, | |
53 | {cell, _, <<"6">>, _}, | |
54 | {cell, _, <<"C">>, _} | |
55 | ], _} | |
56 | ], _} = table({table, #{}, << | |
57 | "|1 |2 |A\n" | |
58 | "|3 |4 |B\n" | |
59 | "|5 |6 |C">>, #{line => 1}}), | |
60 | ok. | |
61 | -endif. | |
62 | ||
63 | %% If the cols attribute is not specified, the number of | |
64 | %% columns is the number of cells on the first line. | |
65 | parse_table(Contents, #{<<"cols">> := Cols}) -> | |
66 | {parse_cells(Contents, []), num_cols(Cols)}; | |
67 | %% We get the first line, parse the cells in it then | |
68 | %% count the number of columns in the table. Finally | |
69 | %% we parse all the remaining cells. | |
70 | parse_table(Contents, _) -> | |
71 | case binary:split(Contents, <<$\n>>) of | |
72 | %% We only have the one line. Who writes tables like this? | |
73 | [Line] -> | |
74 | Cells = parse_cells(Line, []), | |
75 | {Cells, length(Cells)}; | |
76 | %% We have a useful table with more than one line. Good user! | |
77 | [Line, Rest] -> | |
78 | Cells0 = parse_cells(Line, []), | |
79 | Cells = parse_cells(Rest, lists:reverse(Cells0)), | |
80 | {Cells, length(Cells0)} | |
81 | end. | |
82 | ||
83 | num_cols(Cols) -> | |
84 | %% @todo Handle column specifiers. | |
85 | Specs = binary:split(Cols, <<$,>>, [global]), | |
86 | length(Specs). | |
87 | ||
88 | parse_cells(Contents, Acc) -> | |
89 | Cells = split_cells(Contents),%binary:split(Contents, [<<$|>>], [global]), | |
90 | do_parse_cells(Cells, Acc). | |
91 | %% Split on | | |
92 | %% Look at the end of each element see if there's a cell specifier | |
93 | %% Add it as an attribute to the cell for now and consolidate | |
94 | %% when processing rows. | |
95 | ||
96 | split_cells(Contents) -> | |
97 | split_cells(Contents, <<>>, []). | |
98 | ||
99 | split_cells(<<>>, Cell, Acc) -> | |
100 | lists:reverse([Cell|Acc]); | |
101 | split_cells(<<$\\, $|, R/bits>>, Cell, Acc) -> | |
102 | split_cells(R, <<Cell/binary, $|>>, Acc); | |
103 | split_cells(<<$|, R/bits>>, Cell, Acc) -> | |
104 | split_cells(R, <<>>, [Cell|Acc]); | |
105 | split_cells(<<C, R/bits>>, Cell, Acc) -> | |
106 | split_cells(R, <<Cell/binary, C>>, Acc). | |
107 | ||
108 | %% Malformed table (no pipe before cell). Process it like it is a single cell. | |
109 | do_parse_cells([Contents], Acc) -> | |
110 | %% @todo Annotations. | |
111 | lists:reverse([{cell, #{specifiers => <<>>}, Contents, #{}}|Acc]); | |
112 | %% Last cell. There are no further cell specifiers. | |
113 | do_parse_cells([Specs, Contents0], Acc) -> | |
114 | Contents = asciideck_block_parser:trim(Contents0, both), | |
115 | %% @todo Annotations. | |
116 | Cell = {cell, #{specifiers => Specs}, Contents, #{}}, | |
117 | lists:reverse([Cell|Acc]); | |
118 | %% If there are cell specifiers we need to extract them from the cell | |
119 | %% contents. Cell specifiers are everything from the last whitespace | |
120 | %% until the end of the binary. | |
121 | do_parse_cells([Specs, Contents0|Tail], Acc) -> | |
122 | NextSpecs = <<>>, %% @todo find_r(Contents0, <<>>), | |
123 | Len = byte_size(Contents0) - byte_size(NextSpecs), | |
124 | <<Contents1:Len/binary, _/bits>> = Contents0, | |
125 | Contents = asciideck_block_parser:trim(Contents1, both), | |
126 | %% @todo Annotations. | |
127 | Cell = {cell, #{specifiers => Specs}, Contents, #{}}, | |
128 | do_parse_cells([NextSpecs|Tail], [Cell|Acc]). | |
129 | ||
130 | %% @todo This is not correct. Not all remaining data is specifiers. | |
131 | %% In addition, for columns at the end of the line this doesn't apply. | |
132 | %% Find the remaining data after the last whitespace character. | |
133 | %find_r(<<>>, Acc) -> | |
134 | % Acc; | |
135 | %find_r(<<C, Rest/bits>>, _) when ?IS_WS(C) -> | |
136 | % find_r(Rest, Rest); | |
137 | %find_r(<<_, Rest/bits>>, Acc) -> | |
138 | % find_r(Rest, Acc). | |
139 | ||
140 | -ifdef(TEST). | |
141 | parse_table_test() -> | |
142 | {[ | |
143 | {cell, _, <<"1">>, _}, | |
144 | {cell, _, <<"2">>, _}, | |
145 | {cell, _, <<"A">>, _}, | |
146 | {cell, _, <<"3">>, _}, | |
147 | {cell, _, <<"4">>, _}, | |
148 | {cell, _, <<"B">>, _}, | |
149 | {cell, _, <<"5">>, _}, | |
150 | {cell, _, <<"6">>, _}, | |
151 | {cell, _, <<"C">>, _} | |
152 | ], 3} = parse_table(<< | |
153 | "|1 |2 |A\n" | |
154 | "|3 |4 |B\n" | |
155 | "|5 |6 |C">>, #{}), | |
156 | ok. | |
157 | ||
158 | parse_table_escape_pipe_test() -> | |
159 | {[ | |
160 | {cell, _, <<"1">>, _}, | |
161 | {cell, _, <<"2">>, _}, | |
162 | {cell, _, <<"3 |4">>, _}, | |
163 | {cell, _, <<"5">>, _} | |
164 | ], 2} = parse_table(<< | |
165 | "|1 |2\n" | |
166 | "|3 \\|4 |5">>, #{}), | |
167 | ok. | |
168 | -endif. | |
169 | ||
170 | %% @todo We currently don't handle colspans and rowspans. | |
171 | rows(Cells, NumCols) -> | |
172 | rows(Cells, [], NumCols, [], NumCols). | |
173 | ||
174 | %% End of row. | |
175 | rows(Tail, Acc, NumCols, RowAcc, CurCol) when CurCol =< 0 -> | |
176 | %% @todo Annotations. | |
177 | Row = {row, #{}, lists:reverse(RowAcc), #{}}, | |
178 | rows(Tail, [Row|Acc], NumCols, [], NumCols); | |
179 | %% Add a cell to the row. | |
180 | rows([Cell|Tail], Acc, NumCols, RowAcc, CurCol) -> | |
181 | rows(Tail, Acc, NumCols, [Cell|RowAcc], CurCol - 1); | |
182 | %% End of a properly formed table. | |
183 | rows([], Acc, _, [], _) -> | |
184 | lists:reverse(Acc); | |
185 | %% Malformed table. Even if we expect more columns, | |
186 | %% if there are no more cells there's nothing we can do. | |
187 | rows([], Acc, _, RowAcc, _) -> | |
188 | %% @todo Annotations. | |
189 | Row = {row, #{}, lists:reverse(RowAcc), #{}}, | |
190 | lists:reverse([Row|Acc]). |
0 | %% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> | |
0 | %% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | 1 | %% |
2 | 2 | %% Permission to use, copy, modify, and/or distribute this software for any |
3 | 3 | %% purpose with or without fee is hereby granted, provided that the above |
18 | 18 | -export([translate/2]). |
19 | 19 | |
20 | 20 | translate(AST, Opts) -> |
21 | {Man, Section, Output0} = translate_man(AST, Opts), | |
21 | {Man, Section, Output0} = man(AST, Opts), | |
22 | 22 | {CompressExt, Output} = case Opts of |
23 | 23 | #{compress := gzip} -> {".gz", zlib:gzip(Output0)}; |
24 | 24 | _ -> {"", Output0} |
31 | 31 | Output |
32 | 32 | end. |
33 | 33 | |
34 | translate_man([{title, #{level := 0}, Title0, _Ann}|AST], Opts) -> | |
34 | %% Header of the man page file. | |
35 | ||
36 | man([{section_title, #{level := 0}, Title0, _Ann}|AST], Opts) -> | |
35 | 37 | ensure_name_section(AST), |
36 | 38 | [Title, << Section:1/binary, _/bits >>] = binary:split(Title0, <<"(">>), |
37 | 39 | Extra1 = maps:get(extra1, Opts, today()), |
41 | 43 | ".TH \"", Title, "\" \"", Section, "\" \"", |
42 | 44 | Extra1, "\" \"", Extra2, "\" \"", Extra3, "\"\n" |
43 | 45 | ".ta T 4n\n\\&\n", |
44 | man(AST, []) | |
46 | ast(AST) | |
45 | 47 | ]}. |
46 | 48 | |
47 | ensure_name_section([{title, #{level := 1}, Title, _}|_]) -> | |
49 | ensure_name_section([{section_title, #{level := 1}, Title, _}|_]) -> | |
48 | 50 | case string:to_lower(string:strip(binary_to_list(Title))) of |
49 | 51 | "name" -> ok; |
50 | 52 | _ -> error(badarg) |
56 | 58 | {{Y, M, D}, _} = calendar:universal_time(), |
57 | 59 | io_lib:format("~b-~2.10.0b-~2.10.0b", [Y, M, D]). |
58 | 60 | |
59 | man([], Acc) -> | |
60 | lists:reverse(Acc); | |
61 | man([{title, #{level := 1}, Title, _Ann}|Tail], Acc) -> | |
62 | man(Tail, [[".SH ", string:to_upper(binary_to_list(Title)), "\n"]|Acc]); | |
63 | man([{title, #{level := 2}, Title, _Ann}|Tail], Acc) -> | |
64 | man(Tail, [[".SS ", Title, "\n"]|Acc]); | |
65 | man([{p, _Attrs, Text, _Ann}|Tail], Acc) -> | |
66 | man(Tail, [[".LP\n", man_format(Text), "\n.sp\n"]|Acc]); | |
67 | man([{listing, Attrs, Listing, _Ann}|Tail], Acc0) -> | |
68 | Acc1 = case Attrs of | |
69 | #{title := Title} -> | |
70 | [[".PP\n\\fB", Title, "\\fR\n"]|Acc0]; | |
71 | _ -> | |
72 | Acc0 | |
73 | end, | |
74 | Acc = [[ | |
61 | %% Loop over all types of AST nodes. | |
62 | ||
63 | ast(AST) -> | |
64 | fold(AST, fun ast_node/1). | |
65 | ||
66 | fold(AST, Fun) -> | |
67 | lists:reverse(lists:foldl( | |
68 | fun(Node, Acc) -> [Fun(Node)|Acc] end, | |
69 | [], AST)). | |
70 | ||
71 | ast_node(Node={Type, _, _, _}) -> | |
72 | try | |
73 | case Type of | |
74 | section_title -> section_title(Node); | |
75 | paragraph -> paragraph(Node); | |
76 | listing_block -> listing_block(Node); | |
77 | list -> list(Node); | |
78 | table -> table(Node); | |
79 | comment_line -> comment_line(Node); | |
80 | _ -> | |
81 | io:format("Ignored AST node ~p~n", [Node]), | |
82 | [] | |
83 | end | |
84 | catch _:_ -> | |
85 | io:format("Ignored AST node ~p~n", [Node]), | |
86 | [] | |
87 | end. | |
88 | ||
89 | %% Section titles. | |
90 | ||
91 | section_title({section_title, #{level := 1}, Title, _}) -> | |
92 | [".SH ", string:to_upper(binary_to_list(Title)), "\n"]; | |
93 | section_title({section_title, #{level := 2}, Title, _}) -> | |
94 | [".SS ", Title, "\n"]. | |
95 | ||
96 | %% Paragraphs. | |
97 | ||
98 | paragraph({paragraph, _, Text, _}) -> | |
99 | [".LP\n", inline(Text), "\n.sp\n"]. | |
100 | ||
101 | %% Listing blocks. | |
102 | ||
103 | listing_block({listing_block, Attrs, Listing, _}) -> | |
104 | [ | |
105 | case Attrs of | |
106 | #{<<"title">> := Title} -> | |
107 | [".PP\n\\fB", Title, "\\fR\n"]; | |
108 | _ -> | |
109 | [] | |
110 | end, | |
75 | 111 | ".if n \\{\\\n" |
76 | 112 | ".RS 4\n" |
77 | 113 | ".\\}\n" |
81 | 117 | ".fi\n" |
82 | 118 | ".if n \\{\\\n" |
83 | 119 | ".RE\n" |
84 | ".\\}\n"]|Acc1], | |
85 | man(Tail, Acc); | |
86 | man([{ul, _Attrs, Items, _Ann}|Tail], Acc0) -> | |
87 | Acc = man_ul(Items, Acc0), | |
88 | man(Tail, Acc); | |
89 | man([{ll, _Attrs, Items, _Ann}|Tail], Acc0) -> | |
90 | Acc = man_ll(Items, Acc0), | |
91 | man(Tail, Acc); | |
92 | %% @todo Attributes. | |
93 | %% Currently acts as if options="headers" was always set. | |
94 | man([{table, _TAttrs, [{row, RowAttrs, Headers0, RowAnn}|Rows0], _TAnn}|Tail], Acc0) -> | |
95 | Headers = [{cell, CAttrs, [{p, Attrs, [{strong, #{}, P, CAnn}], Ann}], CAnn} | |
96 | || {cell, CAttrs, [{p, Attrs, P, Ann}], CAnn} <- Headers0], | |
97 | Rows = [{row, RowAttrs, Headers, RowAnn}|Rows0], | |
98 | Acc = [[ | |
99 | ".TS\n" | |
100 | "allbox tab(:);\n", | |
101 | man_table_style(Rows, []), | |
102 | man_table_contents(Rows), | |
103 | ".TE\n" | |
104 | ".sp 1\n"]|Acc0], | |
105 | man(Tail, Acc); | |
106 | %% Skip everything we don't understand. | |
107 | man([_Ignore|Tail], Acc) -> | |
108 | io:format("Ignore ~p~n", [_Ignore]), %% @todo lol io:format | |
109 | man(Tail, Acc). | |
110 | ||
111 | man_ll([], Acc) -> | |
112 | Acc; | |
113 | man_ll([{li, #{label := Label}, Item, _LiAnn}|Tail], Acc0) -> | |
114 | Acc = [[ | |
115 | ".PP\n" | |
116 | "\\fB", Label, "\\fR\n", | |
117 | ".RS 4\n", | |
118 | man_ll_item(Item), | |
119 | ".RE\n"]|Acc0], | |
120 | man_ll(Tail, Acc). | |
121 | ||
122 | man_ll_item([{ul, _Attrs, Items, _Ann}]) -> | |
123 | [man_ul(Items, []), "\n"]; | |
124 | man_ll_item([{p, _PAttrs, Text, _PAnn}]) -> | |
125 | [man_format(Text), "\n"]; | |
126 | man_ll_item([{p, _PAttrs, Text, _PAnn}|Tail]) -> | |
127 | [man_format(Text), "\n\n", man_ll_item(Tail)]. | |
128 | ||
129 | man_ul([], Acc) -> | |
130 | Acc; | |
131 | man_ul([{li, _LiAttrs, [{p, _PAttrs, Text, _PAnn}], _LiAnn}|Tail], Acc0) -> | |
132 | Acc = [[ | |
120 | ".\\}\n" | |
121 | ]. | |
122 | ||
123 | %% Lists. | |
124 | ||
125 | list({list, #{type := bulleted}, Items, _}) -> | |
126 | fold(Items, fun bulleted_list_item/1); | |
127 | list({list, #{type := labeled}, Items, _}) -> | |
128 | fold(Items, fun labeled_list_item/1). | |
129 | ||
130 | bulleted_list_item({list_item, _, [{paragraph, _, Text, _}|AST], _}) -> | |
131 | [ | |
133 | 132 | ".ie n \\{\\\n" |
134 | 133 | ".RS 2\n" |
135 | 134 | "\\h'-02'\\(bu\\h'+01'\\c\n" |
139 | 138 | ".sp -1\n" |
140 | 139 | ".IP \\(bu 2.3\n" |
141 | 140 | ".\\}\n", |
142 | man_format(Text), "\n" | |
143 | ".RE\n"]|Acc0], | |
144 | man_ul(Tail, Acc). | |
145 | ||
146 | man_table_style([], [_|Acc]) -> | |
147 | lists:reverse([".\n"|Acc]); | |
148 | man_table_style([{row, _, Cols, _}|Tail], Acc) -> | |
149 | man_table_style(Tail, [$\n, man_table_style_cols(Cols, [])|Acc]). | |
150 | ||
151 | man_table_style_cols([], [_|Acc]) -> | |
152 | lists:reverse(Acc); | |
153 | man_table_style_cols([{cell, _, _, _}|Tail], Acc) -> | |
154 | man_table_style_cols(Tail, [$\s, "lt"|Acc]). | |
155 | ||
156 | man_table_contents(Rows) -> | |
157 | [man_table_contents_cols(Cols, []) || {row, _, Cols, _} <- Rows]. | |
158 | ||
159 | man_table_contents_cols([], [_|Acc]) -> | |
160 | lists:reverse(["\n"|Acc]); | |
161 | man_table_contents_cols([{cell, _CAttrs, [{p, _PAttrs, Text, _PAnn}], _CAnn}|Tail], Acc) -> | |
162 | man_table_contents_cols(Tail, [$:, "\nT}", man_format(Text), "T{\n"|Acc]). | |
163 | ||
164 | man_format(Text) when is_binary(Text) -> | |
141 | inline(Text), "\n", | |
142 | ast(AST), | |
143 | ".RE\n" | |
144 | ]. | |
145 | ||
146 | labeled_list_item({list_item, #{label := Label}, [{paragraph, _, Text, _}|AST], _}) -> | |
147 | [ | |
148 | ".PP\n" | |
149 | "\\fB", inline(Label), "\\fR\n", | |
150 | ".RS 4\n", | |
151 | inline(Text), "\n", | |
152 | ast(AST), | |
153 | ".RE\n" | |
154 | ]. | |
155 | ||
156 | %% Tables. | |
157 | ||
158 | table({table, _, Rows0, _}) -> | |
159 | Rows = table_apply_options(Rows0), | |
160 | [ | |
161 | ".TS\n" | |
162 | "allbox tab(:);\n", | |
163 | table_style(Rows), ".\n", | |
164 | table_contents(Rows), | |
165 | ".TE\n" | |
166 | ".sp 1\n" | |
167 | ]. | |
168 | ||
169 | %% @todo Currently acts as if options="headers" was always set. | |
170 | table_apply_options([{row, RAttrs, Headers0, RAnn}|Tail]) -> | |
171 | Headers = [{cell, CAttrs, [{strong, #{}, CText, CAnn}], CAnn} | |
172 | || {cell, CAttrs, CText, CAnn} <- Headers0], | |
173 | [{row, RAttrs, Headers, RAnn}|Tail]. | |
174 | ||
175 | table_style(Rows) -> | |
176 | [[table_style_cells(Cells), "\n"] | |
177 | || {row, _, Cells, _} <- Rows]. | |
178 | ||
179 | table_style_cells(Cells) -> | |
180 | ["lt " || {cell, _, _, _} <- Cells]. | |
181 | ||
182 | table_contents(Rows) -> | |
183 | [[table_contents_cells(Cells), "\n"] | |
184 | || {row, _, Cells, _} <- Rows]. | |
185 | ||
186 | table_contents_cells([FirstCell|Cells]) -> | |
187 | [table_contents_cell(FirstCell), | |
188 | [[":", table_contents_cell(Cell)] || Cell <- Cells]]. | |
189 | ||
190 | table_contents_cell({cell, _, Text, _}) -> | |
191 | ["T{\n", inline(Text), "\nT}"]. | |
192 | ||
193 | %% Comment lines are printed in the generated file | |
194 | %% but are not visible in viewers. | |
195 | ||
196 | comment_line({comment_line, _, Text, _}) -> | |
197 | ["\\# ", Text, "\n"]. | |
198 | ||
199 | %% Inline formatting. | |
200 | ||
201 | inline(Text) when is_binary(Text) -> | |
165 | 202 | Text; |
166 | man_format({rel_link, #{target := Link}, Text, _}) -> | |
203 | %% When the link is the text we only print it once. | |
204 | inline({link, #{target := Link}, Link, _}) -> | |
205 | Link; | |
206 | inline({link, #{target := Link}, Text, _}) -> | |
167 | 207 | case re:run(Text, "^([-_:.a-zA-Z0-9]*)(\\([0-9]\\))$", [{capture, all, binary}]) of |
168 | 208 | nomatch -> [Text, " (", Link, ")"]; |
169 | 209 | {match, [_, ManPage, ManSection]} -> ["\\fB", ManPage, "\\fR", ManSection] |
170 | 210 | end; |
171 | man_format({strong, _, Text, _}) -> | |
172 | ["\\fB", man_format(Text), "\\fR"]; | |
211 | inline({emphasized, _, Text, _}) -> | |
212 | ["\\fI", inline(Text), "\\fR"]; | |
213 | inline({strong, _, Text, _}) -> | |
214 | ["\\fB", inline(Text), "\\fR"]; | |
173 | 215 | %% We are already using a monospace font. |
174 | %% @todo Maybe there's a readable formatting we could use to differentiate from normal text? | |
175 | man_format({mono, _, Text, _}) -> | |
176 | man_format(Text); | |
177 | man_format(Text) when is_list(Text) -> | |
178 | [man_format(T) || T <- Text]. | |
216 | inline({inline_literal_passthrough, _, Text, _}) -> | |
217 | inline(Text); | |
218 | %% Xref links appear as plain text in manuals. | |
219 | inline({xref, _, Text, _}) -> | |
220 | inline(Text); | |
221 | inline(Text) when is_list(Text) -> | |
222 | [inline(T) || T <- Text]. |
0 | %% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> | |
0 | %% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | 1 | %% |
2 | 2 | %% Permission to use, copy, modify, and/or distribute this software for any |
3 | 3 | %% purpose with or without fee is hereby granted, provided that the above |
13 | 13 | |
14 | 14 | -module(man_SUITE). |
15 | 15 | -compile(export_all). |
16 | -compile(nowarn_export_all). | |
16 | 17 | |
17 | 18 | -import(ct_helper, [doc/1]). |
18 | 19 |
0 | %% Copyright (c) 2016, Loïc Hoguin <essen@ninenines.eu> | |
0 | %% Copyright (c) 2016-2018, Loïc Hoguin <essen@ninenines.eu> | |
1 | 1 | %% |
2 | 2 | %% Permission to use, copy, modify, and/or distribute this software for any |
3 | 3 | %% purpose with or without fee is hereby granted, provided that the above |
13 | 13 | |
14 | 14 | -module(parser_SUITE). |
15 | 15 | -compile(export_all). |
16 | -compile(nowarn_export_all). | |
16 | 17 | |
17 | 18 | -import(asciideck, [parse/1]). |
18 | 19 | -import(ct_helper, [doc/1]). |
19 | 20 | |
20 | 21 | all() -> |
21 | ct_helper:all(?MODULE). | |
22 | [{group, blocks}]. | |
22 | 23 | |
23 | 24 | %% @todo Test formatting too! |
25 | groups() -> | |
26 | [{blocks, [parallel], ct_helper:all(?MODULE)}]. | |
24 | 27 | |
25 | 28 | %% Empty lines. |
26 | 29 | |
42 | 45 | |
43 | 46 | quoted_text_strong(_) -> |
44 | 47 | doc("Strong text formatting. (10.1)"), |
45 | [{p, _, [{strong, _, <<"Hello beautiful world!">>, _}], _}] = | |
48 | [{paragraph, _, [{strong, _, <<"Hello beautiful world!">>, _}], _}] = | |
46 | 49 | parse("*Hello beautiful world!*"), |
47 | [{p, _, [{strong, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] = | |
50 | [{paragraph, _, [{strong, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] = | |
48 | 51 | parse("*Hello* beautiful world!"), |
49 | [{p, _, [<<"Hello ">>, {strong, _, <<"beautiful">>, _}, <<" world!">>], _}] = | |
52 | [{paragraph, _, [<<"Hello ">>, {strong, _, <<"beautiful">>, _}, <<" world!">>], _}] = | |
50 | 53 | parse("Hello *beautiful* world!"), |
51 | [{p, _, [<<"Hello beautiful ">>, {strong, _, <<"world!">>, _}], _}] = | |
54 | [{paragraph, _, [<<"Hello beautiful ">>, {strong, _, <<"world!">>, _}], _}] = | |
52 | 55 | parse("Hello beautiful *world!*"), |
53 | [{p, _, [<<"Hello beautiful ">>, {strong, _, <<"multiline world!">>, _}, <<" lol">>], _}] = | |
56 | [{paragraph, _, [<<"Hello beautiful ">>, {strong, _, <<"multiline world!">>, _}, <<" lol">>], _}] = | |
54 | 57 | parse("Hello beautiful *multiline\nworld!* lol"), |
55 | 58 | %% Nested formatting. |
56 | [{p, _, [{strong, _, [ | |
59 | [{paragraph, _, [{strong, _, [ | |
57 | 60 | <<"Hello ">>, |
58 | {rel_link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _}, | |
61 | {link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _}, | |
59 | 62 | <<" world!">> |
60 | 63 | ], _}], _}] = |
61 | 64 | parse("*Hello link:downloads/cowboy-2.0.tgz[2.0] world!*"), |
63 | 66 | |
64 | 67 | quoted_text_literal_mono(_) -> |
65 | 68 | doc("Literal monospace text formatting. (10.1)"), |
66 | [{p, _, [{mono, _, <<"Hello beautiful world!">>, _}], _}] = | |
69 | [{paragraph, _, [{inline_literal_passthrough, _, <<"Hello beautiful world!">>, _}], _}] = | |
67 | 70 | parse("`Hello beautiful world!`"), |
68 | [{p, _, [{mono, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] = | |
71 | [{paragraph, _, [{inline_literal_passthrough, _, <<"Hello">>, _}, <<" beautiful world!">>], _}] = | |
69 | 72 | parse("`Hello` beautiful world!"), |
70 | [{p, _, [<<"Hello ">>, {mono, _, <<"beautiful">>, _}, <<" world!">>], _}] = | |
73 | [{paragraph, _, [<<"Hello ">>, {inline_literal_passthrough, _, <<"beautiful">>, _}, <<" world!">>], _}] = | |
71 | 74 | parse("Hello `beautiful` world!"), |
72 | [{p, _, [<<"Hello beautiful ">>, {mono, _, <<"world!">>, _}], _}] = | |
75 | [{paragraph, _, [<<"Hello beautiful ">>, {inline_literal_passthrough, _, <<"world!">>, _}], _}] = | |
73 | 76 | parse("Hello beautiful `world!`"), |
74 | [{p, _, [<<"Hello beautiful ">>, {mono, _, <<"multiline world!">>, _}, <<" lol">>], _}] = | |
77 | [{paragraph, _, [<<"Hello beautiful ">>, {inline_literal_passthrough, _, <<"multiline\nworld!">>, _}, <<" lol">>], _}] = | |
75 | 78 | parse("Hello beautiful `multiline\nworld!` lol"), |
76 | 79 | %% No text formatting must occur inside backticks. |
77 | [{p, _, [{mono, _, <<"Hello *beautiful* world!">>, _}], _}] = | |
80 | [{paragraph, _, [{inline_literal_passthrough, _, <<"Hello *beautiful* world!">>, _}], _}] = | |
78 | 81 | parse("`Hello *beautiful* world!`"), |
79 | 82 | ok. |
80 | 83 | |
85 | 88 | |
86 | 89 | title_short(_) -> |
87 | 90 | doc("The trailing title delimiter is optional. (11.2)"), |
88 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"), | |
89 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"), | |
90 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"), | |
91 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"), | |
92 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"), | |
91 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"), | |
92 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"), | |
93 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"), | |
94 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"), | |
95 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"), | |
93 | 96 | ok. |
94 | 97 | |
95 | 98 | title_short_no_spaces(_) -> |
96 | 99 | doc("One or more spaces must fall between the title and the delimiter. (11.2)"), |
97 | [{p, _, <<"=Hello world!">>, _}] = parse("=Hello world!"), | |
98 | [{p, _, <<"==Hello world!">>, _}] = parse("==Hello world!"), | |
99 | [{p, _, <<"===Hello world!">>, _}] = parse("===Hello world!"), | |
100 | [{p, _, <<"====Hello world!">>, _}] = parse("====Hello world!"), | |
101 | [{p, _, <<"=====Hello world!">>, _}] = parse("=====Hello world!"), | |
100 | [{paragraph, _, <<"=Hello world!">>, _}] = parse("=Hello world!"), | |
101 | [{paragraph, _, <<"==Hello world!">>, _}] = parse("==Hello world!"), | |
102 | [{paragraph, _, <<"===Hello world!">>, _}] = parse("===Hello world!"), | |
103 | [{paragraph, _, <<"====Hello world!">>, _}] = parse("====Hello world!"), | |
104 | [{paragraph, _, <<"=====Hello world!">>, _}] = parse("=====Hello world!"), | |
102 | 105 | ok. |
103 | 106 | |
104 | 107 | title_short_trim_spaces_before(_) -> |
105 | 108 | doc("Spaces between the title and delimiter must be ignored. (11.2)"), |
106 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"), | |
107 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"), | |
108 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"), | |
109 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"), | |
110 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"), | |
109 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world!"), | |
110 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world!"), | |
111 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world!"), | |
112 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world!"), | |
113 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world!"), | |
111 | 114 | ok. |
112 | 115 | |
113 | 116 | title_short_trim_spaces_after(_) -> |
114 | 117 | doc("Spaces after the title must be ignored. (11.2)"), |
115 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! "), | |
116 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! "), | |
117 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! "), | |
118 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! "), | |
119 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "), | |
118 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! "), | |
119 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! "), | |
120 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! "), | |
121 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! "), | |
122 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "), | |
120 | 123 | ok. |
121 | 124 | |
122 | 125 | title_short_trim_spaces_before_after(_) -> |
123 | 126 | doc("Spaces before and after the title must be ignored. (11.2)"), |
124 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! "), | |
125 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! "), | |
126 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! "), | |
127 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! "), | |
128 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "), | |
127 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! "), | |
128 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! "), | |
129 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! "), | |
130 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! "), | |
131 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! "), | |
129 | 132 | ok. |
130 | 133 | |
131 | 134 | title_short_trailer(_) -> |
132 | 135 | doc("The trailing title delimiter is optional. (11.2)"), |
133 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="), | |
134 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="), | |
135 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="), | |
136 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="), | |
137 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="), | |
136 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="), | |
137 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="), | |
138 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="), | |
139 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="), | |
140 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="), | |
138 | 141 | ok. |
139 | 142 | |
140 | 143 | title_short_trailer_no_spaces(_) -> |
141 | 144 | doc("One or more spaces must fall between the title and the trailer. (11.2)"), |
142 | [{title, #{level := 0}, <<"Hello world!=">>, _}] = parse("= Hello world!="), | |
143 | [{title, #{level := 1}, <<"Hello world!==">>, _}] = parse("== Hello world!=="), | |
144 | [{title, #{level := 2}, <<"Hello world!===">>, _}] = parse("=== Hello world!==="), | |
145 | [{title, #{level := 3}, <<"Hello world!====">>, _}] = parse("==== Hello world!===="), | |
146 | [{title, #{level := 4}, <<"Hello world!=====">>, _}] = parse("===== Hello world!====="), | |
145 | [{section_title, #{level := 0}, <<"Hello world!=">>, _}] = parse("= Hello world!="), | |
146 | [{section_title, #{level := 1}, <<"Hello world!==">>, _}] = parse("== Hello world!=="), | |
147 | [{section_title, #{level := 2}, <<"Hello world!===">>, _}] = parse("=== Hello world!==="), | |
148 | [{section_title, #{level := 3}, <<"Hello world!====">>, _}] = parse("==== Hello world!===="), | |
149 | [{section_title, #{level := 4}, <<"Hello world!=====">>, _}] = parse("===== Hello world!====="), | |
147 | 150 | ok. |
148 | 151 | |
149 | 152 | title_short_trim_spaces_before_trailer(_) -> |
150 | 153 | doc("Spaces between the title and trailer must be ignored. (11.2)"), |
151 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="), | |
152 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="), | |
153 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="), | |
154 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="), | |
155 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="), | |
154 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! ="), | |
155 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! =="), | |
156 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! ==="), | |
157 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ===="), | |
158 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ====="), | |
156 | 159 | ok. |
157 | 160 | |
158 | 161 | title_short_trim_spaces_after_trailer(_) -> |
159 | 162 | doc("Spaces after the trailer must be ignored. (11.2)"), |
160 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
161 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
162 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
163 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
164 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
163 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
164 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
165 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
166 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
167 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
165 | 168 | ok. |
166 | 169 | |
167 | 170 | title_short_trim_spaces_before_after_trailer(_) -> |
168 | 171 | doc("Spaces before and after the trailer must be ignored. (11.2)"), |
169 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
170 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
171 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
172 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
173 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
172 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
173 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
174 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
175 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
176 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
174 | 177 | ok. |
175 | 178 | |
176 | 179 | title_short_trim_spaces_before_after_title_trailer(_) -> |
177 | 180 | doc("Spaces before and after both the title and the trailer must be ignored. (11.2)"), |
178 | [{title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
179 | [{title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
180 | [{title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
181 | [{title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
182 | [{title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
181 | [{section_title, #{level := 0}, <<"Hello world!">>, _}] = parse("= Hello world! = "), | |
182 | [{section_title, #{level := 1}, <<"Hello world!">>, _}] = parse("== Hello world! == "), | |
183 | [{section_title, #{level := 2}, <<"Hello world!">>, _}] = parse("=== Hello world! === "), | |
184 | [{section_title, #{level := 3}, <<"Hello world!">>, _}] = parse("==== Hello world! ==== "), | |
185 | [{section_title, #{level := 4}, <<"Hello world!">>, _}] = parse("===== Hello world! ===== "), | |
183 | 186 | ok. |
184 | 187 | |
185 | 188 | title_short_wrong_trailer(_) -> |
186 | 189 | doc("The delimiters must be the same size when a trailer is present. (11.2)"), |
187 | [{title, #{level := 0}, <<"Hello world! ===">>, _}] = parse("= Hello world! ==="), | |
188 | [{title, #{level := 1}, <<"Hello world! ====">>, _}] = parse("== Hello world! ===="), | |
189 | [{title, #{level := 2}, <<"Hello world! =====">>, _}] = parse("=== Hello world! ====="), | |
190 | [{title, #{level := 3}, <<"Hello world! =">>, _}] = parse("==== Hello world! ="), | |
191 | [{title, #{level := 4}, <<"Hello world! ==">>, _}] = parse("===== Hello world! =="), | |
190 | [{section_title, #{level := 0}, <<"Hello world! ===">>, _}] = parse("= Hello world! ==="), | |
191 | [{section_title, #{level := 1}, <<"Hello world! ====">>, _}] = parse("== Hello world! ===="), | |
192 | [{section_title, #{level := 2}, <<"Hello world! =====">>, _}] = parse("=== Hello world! ====="), | |
193 | [{section_title, #{level := 3}, <<"Hello world! =">>, _}] = parse("==== Hello world! ="), | |
194 | [{section_title, #{level := 4}, <<"Hello world! ==">>, _}] = parse("===== Hello world! =="), | |
192 | 195 | ok. |
193 | 196 | |
194 | 197 | %% Normal paragraphs. |
197 | 200 | |
198 | 201 | paragraph(_) -> |
199 | 202 | doc("Normal paragraph. (15.1)"), |
200 | [{p, _, <<"Hello world this is a paragraph peace.">>, _}] = parse( | |
203 | [{paragraph, _, <<"Hello world this is a paragraph peace.">>, _}] = parse( | |
201 | 204 | "Hello world\n" |
202 | 205 | "this is a paragraph\n" |
203 | 206 | "peace.\n"), |
204 | 207 | [ |
205 | {p, _, <<"Hello world this is a paragraph peace.">>, _}, | |
206 | {p, _, <<"This is another paragraph.">>, _} | |
208 | {paragraph, _, <<"Hello world this is a paragraph peace.">>, _}, | |
209 | {paragraph, _, <<"This is another paragraph.">>, _} | |
207 | 210 | ] = parse( |
208 | 211 | "Hello world\n" |
209 | 212 | "this is a paragraph\n" |
214 | 217 | |
215 | 218 | paragraph_title(_) -> |
216 | 219 | doc("Paragraph preceded by a block title. (12, 15.1)"), |
217 | [{p, #{title := <<"Block title!">>}, <<"Hello world this is a paragraph peace.">>, _}] = parse( | |
220 | [{paragraph, #{<<"title">> := <<"Block title!">>}, <<"Hello world this is a paragraph peace.">>, _}] = parse( | |
218 | 221 | ".Block title!\n" |
219 | 222 | "Hello world\n" |
220 | 223 | "this is a paragraph\n" |
228 | 231 | Source = << |
229 | 232 | "init(Req, State) ->\n" |
230 | 233 | " {ok, Req, State}.">>, |
231 | [{listing, _, Source, _}] = parse(iolist_to_binary([ | |
234 | [{listing_block, _, Source, _}] = parse(iolist_to_binary([ | |
232 | 235 | "----\n", |
233 | 236 | Source, "\n" |
234 | 237 | "----\n"])), |
236 | 239 | |
237 | 240 | listing_title(_) -> |
238 | 241 | doc("Listing block with title. (12, 16.2)"), |
239 | [{listing, #{title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
242 | [{listing_block, #{<<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
240 | 243 | ".Block title!\n" |
241 | 244 | "----\n" |
242 | 245 | "1 = 2.\n" |
248 | 251 | Source = << |
249 | 252 | "init(Req, State) ->\n" |
250 | 253 | " {ok, Req, State}.">>, |
251 | [{listing, #{language := <<"erlang">>}, Source, _}] = parse(iolist_to_binary([ | |
254 | [{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>}, Source, _}] = parse(iolist_to_binary([ | |
252 | 255 | "[source,erlang]\n" |
253 | 256 | "----\n", |
254 | 257 | Source, "\n" |
257 | 260 | |
258 | 261 | listing_filter_source_title(_) -> |
259 | 262 | doc("Source code listing filter with title. (12, source-highlight-filter)"), |
260 | [{listing, #{language := <<"erlang">>, title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
263 | [{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>, <<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
261 | 264 | ".Block title!\n" |
262 | 265 | "[source,erlang]\n" |
263 | 266 | "----\n" |
264 | 267 | "1 = 2.\n" |
265 | 268 | "----\n"), |
266 | [{listing, #{language := <<"erlang">>, title := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
269 | [{listing_block, #{1 := <<"source">>, 2 := <<"erlang">>, <<"title">> := <<"Block title!">>}, <<"1 = 2.">>, _}] = parse( | |
267 | 270 | "[source,erlang]\n" |
268 | 271 | ".Block title!\n" |
269 | 272 | "----\n" |
275 | 278 | |
276 | 279 | unordered_list(_) -> |
277 | 280 | doc("Unoredered lists. (17.1)"), |
278 | [{ul, _, [ | |
279 | {li, _, [{p, _, <<"Hello!">>, _}], _} | |
281 | [{list, #{type := bulleted}, [ | |
282 | {list_item, _, [{paragraph, #{}, <<"Hello!">>, _}], _} | |
280 | 283 | ], _}] = parse("* Hello!"), |
281 | [{ul, _, [ | |
282 | {li, _, [{p, _, <<"Hello!">>, _}], _}, | |
283 | {li, _, [{p, _, <<"World!">>, _}], _}, | |
284 | {li, _, [{p, _, <<"Hehe.">>, _}], _} | |
284 | [{list, #{type := bulleted}, [ | |
285 | {list_item, _, [{paragraph, #{}, <<"Hello!">>, _}], _}, | |
286 | {list_item, _, [{paragraph, #{}, <<"World!">>, _}], _}, | |
287 | {list_item, _, [{paragraph, #{}, <<"Hehe.">>, _}], _} | |
285 | 288 | ], _}] = parse( |
286 | 289 | "* Hello!\n" |
287 | 290 | "* World!\n" |
299 | 302 | |
300 | 303 | labeled_list(_) -> |
301 | 304 | doc("Labeled lists. (17.3)"), |
302 | [{ll, _, [ | |
303 | {li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _} | |
305 | [{list, #{type := labeled}, [ | |
306 | {list_item, #{label := <<"The label">>}, | |
307 | [{paragraph, #{}, <<"The value!">>, _}], _} | |
304 | 308 | ], _}] = parse("The label:: The value!"), |
305 | %% @todo Currently this returns two ll. This is a bug but it gives | |
306 | %% me the result I want, or close enough, for now. | |
307 | [{ll, _, [ | |
308 | {li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _} | |
309 | ], _}, | |
310 | {ll, _, [ | |
311 | {li, #{label := <<"More labels">>}, [{p, _, <<"More values!">>, _}], _} | |
309 | [{list, #{type := labeled}, [ | |
310 | {list_item, #{label := <<"The label">>}, | |
311 | [{paragraph, #{}, <<"The value!">>, _}], _}, | |
312 | {list_item, #{label := <<"More labels">>}, | |
313 | [{paragraph, #{}, <<"More values!">>, _}], _} | |
312 | 314 | ], _}] = parse( |
313 | 315 | "The label:: The value!\n" |
314 | 316 | "More labels:: More values!\n"), |
315 | [{ll, _, [ | |
316 | {li, #{label := <<"The label">>}, [{p, _, <<"The value!">>, _}], _} | |
317 | [{list, #{type := labeled}, [ | |
318 | {list_item, #{label := <<"The label">>}, | |
319 | [{paragraph, #{}, <<"The value!">>, _}], _} | |
317 | 320 | ], _}] = parse( |
318 | 321 | "The label::\n" |
319 | 322 | "\n" |
320 | 323 | "The value!"), |
321 | ok. | |
322 | ||
323 | %% @todo Very little was implemented from labeled lists. They need more work. | |
324 | [{list, #{type := labeled}, [ | |
325 | {list_item, #{label := <<"The label">>}, | |
326 | [{paragraph, #{}, <<"The value!">>, _}], _} | |
327 | ], _}] = parse( | |
328 | "The label::\n" | |
329 | " The value!"), | |
330 | [{list, #{type := labeled}, [ | |
331 | {list_item, #{label := <<"The label">>}, [ | |
332 | {paragraph, _, <<"The value!">>, _}, | |
333 | {paragraph, _, <<"With continuations!">>, _}, | |
334 | {paragraph, _, <<"OK good.">>, _} | |
335 | ], _} | |
336 | ], _}] = parse( | |
337 | "The label::\n" | |
338 | "\n" | |
339 | "The value!\n" | |
340 | "+\n" | |
341 | "With continuations!\n" | |
342 | "+\n" | |
343 | "OK good."), | |
344 | [{list, #{type := labeled}, [ | |
345 | {list_item, #{label := <<"The label">>}, [ | |
346 | {paragraph, #{}, <<"The value!">>, _}, | |
347 | {list, #{type := bulleted}, [ | |
348 | {list_item, _, [{paragraph, #{}, <<"first list item">>, _}], _}, | |
349 | {list_item, _, [{paragraph, #{}, <<"second list item">>, _}], _}, | |
350 | {list_item, _, [{paragraph, #{}, <<"third list item">>, _}], _} | |
351 | ], _} | |
352 | ], _} | |
353 | ], _}] = parse( | |
354 | "The label::\n" | |
355 | "\n" | |
356 | "The value!\n" | |
357 | "+\n" | |
358 | " * first list item\n" | |
359 | " * second list\n" | |
360 | " item\n" | |
361 | " * third list\n" | |
362 | " item\n" | |
363 | "\n"), | |
364 | ok. | |
324 | 365 | |
325 | 366 | %% Macros. |
326 | 367 | |
327 | 368 | rel_link(_) -> |
328 | doc("Relative links are built using the link:<target>[<caption>] macro. (21.1.3)"), | |
329 | [{p, _, [ | |
330 | {rel_link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _} | |
369 | doc("Relative links are built using the link:Target[Caption] macro. (21.1.3)"), | |
370 | [{paragraph, _, [ | |
371 | {link, #{target := <<"downloads/cowboy-2.0.tgz">>}, <<"2.0">>, _} | |
331 | 372 | ], _}] = parse("link:downloads/cowboy-2.0.tgz[2.0]"), |
332 | [{p, _, [ | |
373 | [{paragraph, _, [ | |
333 | 374 | <<"Download ">>, |
334 | {rel_link, #{target := <<"downloads/cowboy-2.0.zip">>}, <<"Cowboy 2.0">>, _}, | |
375 | {link, #{target := <<"downloads/cowboy-2.0.zip">>}, <<"Cowboy 2.0">>, _}, | |
335 | 376 | <<" as zip">> |
336 | 377 | ], _}] = parse("Download link:downloads/cowboy-2.0.zip[Cowboy 2.0] as zip"), |
337 | 378 | ok. |
338 | 379 | |
339 | 380 | comment_line(_) -> |
340 | 381 | doc("Lines starting with two slashes are treated as comments. (21.2.3)"), |
341 | [{comment, _, <<"This is a comment.">>, _}] = parse("// This is a comment."), | |
342 | [{comment, _, <<"This is a comment.">>, _}] = parse("// This is a comment. "), | |
382 | [{comment_line, _, <<"This is a comment.">>, _}] = parse("//This is a comment."), | |
383 | [{comment_line, _, <<"This is a comment.">>, _}] = parse("// This is a comment."), | |
384 | [{comment_line, _, <<"This is a comment.">>, _}] = parse("// This is a comment. "), | |
385 | [ | |
386 | {comment_line, _, <<"First line.">>, _}, | |
387 | {comment_line, _, <<"Second line.">>, _} | |
388 | ] = parse( | |
389 | "// First line.\n" | |
390 | "// Second line.\n"), | |
343 | 391 | ok. |
344 | 392 | |
345 | 393 | %% Tables. (23) |
348 | 396 | %% @todo I think I read somewhere that paragraphs are not allowed in cells... Double check. |
349 | 397 | [{table, _, [ |
350 | 398 | {row, _, [ |
351 | {cell, _, [{p, _, <<"1">>, _}], _}, | |
352 | {cell, _, [{p, _, <<"2">>, _}], _}, | |
353 | {cell, _, [{p, _, <<"A">>, _}], _} | |
399 | {cell, _, <<"1">>, _}, | |
400 | {cell, _, <<"2">>, _}, | |
401 | {cell, _, <<"A">>, _} | |
354 | 402 | ], _}, |
355 | 403 | {row, _, [ |
356 | {cell, _, [{p, _, <<"3">>, _}], _}, | |
357 | {cell, _, [{p, _, <<"4">>, _}], _}, | |
358 | {cell, _, [{p, _, <<"B">>, _}], _} | |
404 | {cell, _, <<"3">>, _}, | |
405 | {cell, _, <<"4">>, _}, | |
406 | {cell, _, <<"B">>, _} | |
359 | 407 | ], _}, |
360 | 408 | {row, _, [ |
361 | {cell, _, [{p, _, <<"5">>, _}], _}, | |
362 | {cell, _, [{p, _, <<"6">>, _}], _}, | |
363 | {cell, _, [{p, _, <<"C">>, _}], _} | |
409 | {cell, _, <<"5">>, _}, | |
410 | {cell, _, <<"6">>, _}, | |
411 | {cell, _, <<"C">>, _} | |
364 | 412 | ], _} |
365 | 413 | ], _}]= parse( |
366 | 414 | "|=======\n" |