-*- mode: org -*- #+TITLE: spine (doc_reform) (project) makefile #+DESCRIPTION: makefile for spine #+FILETAGS: :spine:build:tools: #+AUTHOR: Ralph Amissah #+EMAIL: [[mailto:ralph.amissah@gmail.com][ralph.amissah@gmail.com]] #+COPYRIGHT: Copyright (C) 2015 - 2021 Ralph Amissah #+LANGUAGE: en #+STARTUP: content hideblocks hidestars noindent entitiespretty #+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t #+PROPERTY: header-args :exports code #+PROPERTY: header-args+ :noweb yes #+PROPERTY: header-args+ :eval no #+PROPERTY: header-args+ :results no #+PROPERTY: header-args+ :cache no #+PROPERTY: header-args+ :padline no #+PROPERTY: header-args+ :mkdirp yes [[./spine.org][spine.org]] VERSION [[./spine_info.org][spine_info.org]] [[../org/][org/]] * spine ** version info VERSION :version:set:project: *** project #+NAME: version_spine #+BEGIN_SRC sh 0.11.3 #+END_SRC *** subprojects - d2sqlite3 https://code.dlang.org/packages/d2sqlite3 #+NAME: version_d2sqlite3 #+BEGIN_SRC sh 0.19.1 #+END_SRC - imageformats https://code.dlang.org/packages/imageformats #+NAME: version_imageformats #+BEGIN_SRC sh 7.0.2 #+END_SRC - dyaml https://code.dlang.org/packages/dyaml #+NAME: version_dyaml #+BEGIN_SRC sh 0.8.3 #+END_SRC - tinyendian https://code.dlang.org/packages/tinyendian #+NAME: version_tinyendian #+BEGIN_SRC sh 0.2.0 #+END_SRC *** build tools if specified - meson #+NAME: version_meson #+BEGIN_SRC sh 0.46 #+END_SRC - soversion #+NAME: version_soversion #+BEGIN_SRC sh 0 #+END_SRC ** makefile :makefile: *** tangle #+HEADER: :tangle ../makefile #+BEGIN_SRC makefile <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> <> #+END_SRC *** settings [+2] :settings: **** git version stamp :git:version: #+NAME: make_set_var_0_git_version #+BEGIN_SRC makefile PROG_VER_GIT :=$(shell echo `git describe --long --tags | sed -e "s/^[ a-z_-]\+\([0-9.]\+\)/\1/;s/\([^-]*-g\)/r\1/;s/-/./g"`) PROG_VER_DECLARED :=$(shell echo `cat ./views/version.txt | grep --color=never "enum" | sed 's/.\+(\([0-9]\+\),[ \t]\+\([0-9]\+\),[ \t]\+\([0-9]\+\)[ \t]*).\+/\1.\2.\3/g'`) #+END_SRC **** dub (build tool) :dub: #+NAME: make_set_var_1_dub #+BEGIN_SRC makefile DUB=dub DUB_FLAGS=-v --force --compiler= #+END_SRC **** Project Details :project:spine: #+NAME: make_set_var_2_project_details #+BEGIN_SRC makefile PRG_NAME=doc-reform PRG_NICKAME=spine PRG_NAME_DIR=$(shell echo `echo $(PRG_NAME) | sed -e "s/-/_/g"`) PRG_SRC=$(PRG_NAME).d PRG_SRCDIR=./src PRG_BIN=$(PRG_NAME) PRG_BINDIR=./bin PRG_DOCDIR=./docs #+END_SRC **** Emacs Org settings :settings:emacs:org:tangle: #+NAME: make_set_var_3_emacs_org #+BEGIN_SRC makefile EMACSLISP=/usr/share/emacs/site-lisp ORG_VER_AVAILABLE=$(shell echo `ls -d ~/.emacs.d/elpa/org-???????? | cut -d '-' -f2`) EMACSLISP_ORG=~/.emacs.d/elpa/org-$($(shell echo $(ORG_VER_AVAILABLE))) ORG_CONTRIB_VER_AVAILABLE=$(shell echo `ls -d ~/.emacs.d/elpa/org-plus-contrib-???????? | cut -d '-' -f2`) EMACSLISP_ORG_CONTRIB=~/.emacs.d/elpa/org-plus-contrib-$($(shell echo $(ORG_CONTRIB_VER_AVAILABLE))) ORGFILELIST=$(shell echo `ls -1 org/*.org`) ORGFILES="" ORGDIR :=$(shell echo `pwd`) #+END_SRC **** Markup Samples ***** pods #+NAME: make_set_var_4_markup_samples_pods #+BEGIN_SRC makefile SiSU_MARKUP_SAMPLES_FIND_PODS= \ find data/pod -maxdepth 2 -name pod.manifest | cut -f 1-3 -d / | sort SiSU_MARKUP_SAMPLES_PODS_FOUND= \ $(SiSU_MARKUP_SAMPLES_FIND_PODS) | xargs SiSU_MARKUP_SAMPLES_POD = \ data/pod/accelerando.charles_stross \ data/pod/alices_adventures_in_wonderland.lewis_carroll \ data/pod/content.cory_doctorow \ data/pod/democratizing_innovation.eric_von_hippel \ data/pod/down_and_out_in_the_magic_kingdom.cory_doctorow \ data/pod/for_the_win.cory_doctorow \ data/pod/free_as_in_freedom_2.richard_stallman_and_the_free_software_revolution.sam_williams.richard_stallman \ data/pod/free_culture.lawrence_lessig \ data/pod/free_for_all.peter_wayner \ data/pod/gpl2.fsf \ data/pod/gpl3.fsf \ data/pod/gullivers_travels.jonathan_swift \ data/pod/little_brother.cory_doctorow \ data/pod/live-manual \ data/pod/sisu-manual \ data/pod/the_autonomous_contract.ralph_amissah \ data/pod/the_cathedral_and_the_bazaar.eric_s_raymond \ data/pod/the_public_domain.james_boyle \ data/pod/the_wealth_of_networks.yochai_benkler \ data/pod/through_the_looking_glass.lewis_carroll \ data/pod/two_bits.christopher_kelty \ data/pod/un_contracts_international_sale_of_goods_convention_1980 \ data/pod/viral_spiral.david_bollier #+END_SRC ***** dir #+NAME: make_set_var_5_markup_samples_search_dirs #+BEGIN_SRC makefile SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND = \ find data/sisudir/media/text -name *.ss[tm] | sort | xargs SiSU_MARKUP_SAMPLES_DIR = \ data/sisudir/media/text/accelerando.charles_stross.sst \ data/sisudir/media/text/alices_adventures_in_wonderland.lewis_carroll.sst \ data/sisudir/media/text/content.cory_doctorow.sst \ data/sisudir/media/text/democratizing_innovation.eric_von_hippel.sst \ data/sisudir/media/text/down_and_out_in_the_magic_kingdom.cory_doctorow.sst \ data/sisudir/media/text/ffa_tmp.sst \ data/sisudir/media/text/for_the_win.cory_doctorow.sst \ data/sisudir/media/text/free_as_in_freedom_2.richard_stallman_and_the_free_software_revolution.sam_williams.richard_stallman.sst \ data/sisudir/media/text/free_culture.lawrence_lessig.sst \ data/sisudir/media/text/free_for_all.peter_wayner.sst \ data/sisudir/media/text/gpl2.fsf.sst \ data/sisudir/media/text/gpl3.fsf.sst \ data/sisudir/media/text/gullivers_travels.jonathan_swift.sst \ data/sisudir/media/text/little_brother.cory_doctorow.sst \ data/sisudir/media/text/sisu_markup.sst \ data/sisudir/media/text/sisu_markup_stress_test.sst \ data/sisudir/media/text/sisu_markup_test.sst \ data/sisudir/media/text/table_special_markup.sst \ data/sisudir/media/text/the_autonomous_contract.ralph_amissah.sst \ data/sisudir/media/text/the_cathedral_and_the_bazaar.eric_s_raymond.sst \ data/sisudir/media/text/the_public_domain.james_boyle.sst \ data/sisudir/media/text/the_wealth_of_networks.yochai_benkler.sst \ data/sisudir/media/text/through_the_looking_glass.lewis_carroll.sst \ data/sisudir/media/text/two_bits.christopher_kelty.sst \ data/sisudir/media/text/un_contracts_international_sale_of_goods_convention_1980.sst \ data/sisudir/media/text/viral_spiral.david_bollier.sst #+END_SRC ****** sample markup file list #+NAME: make_find_markup_samples_0_pod_and_dir #+BEGIN_SRC makefile markup_samples: find data/pod -name pod.manifest | cut -f 1-3 -d / | sort; \ find data/sisudir/media/text -name *.ss[tm] | sort #+END_SRC #+NAME: make_find_markup_samples_1_pod #+BEGIN_SRC makefile markup_pod_samples: find data/pod -name pod.manifest | cut -f 1-3 -d / | sort #+END_SRC #+NAME: make_find_markup_samples_2_dir #+BEGIN_SRC makefile markup_dir_samples: find data/sisudir/media/text -name *.ss[tm] | sort #+END_SRC *** make archive #+NAME: make_project_zip_archive #+BEGIN_SRC makefile gitZip: git archive -v --format=tar --prefix=`echo spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)`/ HEAD | gzip > ../spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT).tar.gz && \ echo "to unzip: tar -xzf spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT).tar.gz" gitArchive: git archive -v --format=tar --prefix=spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)/ HEAD | gzip > ../spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT).tar.gz && \ echo "to unzip: tar -xzf spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT).tar.gz" #+END_SRC *** _make_ commands [+2] :make:commands: - build commands - build - debug - release - init, clean, distclean etc. - init - clean - expunge - distclean - distclean_and_init - org babel tangle - tangle - git snapshot - gitsnapshot **** version tag, stamp #+NAME: make_version_tag #+BEGIN_SRC makefile version_tag: echo "DRV=<>; git tag -f doc-reform_v$$\{DRV} -m\"doc-reform $$\{DRV}\" HEAD" && \ echo "git tag -f doc-reform_v<> -m\"doc-reform spine-<>\" HEAD" #+END_SRC **** changelog #+NAME: make_changelog #+BEGIN_SRC makefile changelog: git log --pretty=format:'---%+s %+as %ae%+h%d%+b' --no-merges | sed "/^\\s*$$/d" | sed "s/^---$$//" | sed "s/^\(\*\)\+/-/" | sed "s/ \+$$//" > CHANGELOG_ #+END_SRC **** build commands [+1] :build:compile: ***** _dub_ build rebuild :dub: ****** all compiler builds :all:dmd:gdc:ldc: ******* all builds ******** default #+NAME: make_dub_upgrade #+BEGIN_SRC makefile dub_upgrade: $(DUB) upgrade #+END_SRC #+NAME: make_compile_0_compiler_default #+BEGIN_SRC makefile default: ldc #+END_SRC ******** quick :quick: ********* default :default: #+NAME: make_compile_1_compiler_all #+BEGIN_SRC makefile all: dmd ldc gdc #+END_SRC ********* version :version: #+NAME: make_compile_2_compiler_all_versioned #+BEGIN_SRC makefile all_ver: dmd_ver ldc_ver gdc_ver #+END_SRC ********* get project dependencies #+NAME: make_get_project_dependencies_github #+BEGIN_SRC makefile get_depends: hwd=$$(echo `pwd`) && \ gwd="$${hwd}/src/ext_depends" && \ dirs=$$(echo `ls -gx $${gwd}`) && \ license_bsl="Boost Software License 1.0 (BSL-1.0)" && \ echo $${hwd} && \ echo $${gwd} && \ echo $${dirs} && \ dub upgrade; \ cd $${gwd} && \ for dir in $${dirs}; do \ if [ -d $${dir} ]; then \ echo $${dir} && \ if [ "imageformats" == $${dir} ]; then \ echo $${dir} && \ rm -rf $${dir} && \ git clone --depth=1 https://github.com/tjhann/$${dir} && \ cd $${dir} && \ echo $$PWD && \ echo "$${dir} `git rev-parse HEAD | cut -c 1-8`" > ../$${dir}.meta && \ echo "https://github.com/tjhann/$${dir}" >> ../$${dir}.meta && \ echo "$${license_bsl}" >> ../$${dir}.meta && \ cd $${gwd} && \ rm -rf $${dir}/.git; \ elif [[ "d2sqlite3" == $${dir} || "D-YAML" == $${dir} || "tinyendian" == $${dir} ]]; then \ echo $${dir} && \ rm -rf $${dir} && \ git clone --depth=1 https://github.com/dlang-community/$${dir} && \ cd $${dir} && \ echo "$${dir} `git rev-parse HEAD | cut -c 1-8`" > ../$${dir}.meta && \ echo "https://github.com/dlang-community/$${dir}" >> ../$${dir}.meta && \ echo "$${license_bsl}" >> ../$${dir}.meta && \ cd $${gwd} && \ rm -rf $${dir}/.git; \ fi; \ fi; \ done; \ cd $${hwd} set_depends: get_depends dub describe | sed 's~$(shell echo `pwd | sed 's_/_\\/_g'`)~.~g' > dub_describe.json rm_flakelock: git reset HEAD flake.lock && \ touch flake.lock \ git rm -f flake.lock flake_update: git add .; \ nix flake update && \ git reset HEAD flake.lock; \ cp flake.lock flake.lock_ flake_update_move_lock: flake_update touch flake.lock \ git rm -f flake.lock set_latest: set_depends flake_update rm_flakelock #+END_SRC gitDir=dub2nix; git clone --depth=1 https://github.com/lionello/${gitDir} | rm -rf ${gitDir}/.git ********* debug :debug: #+NAME: make_compile_3_compiler_all_debug #+BEGIN_SRC makefile all_debug: dmd_debug gdc_debug ldc_debug #+END_SRC ********* _meson_ build :meson: meson using dub - works & looks pretty clean - but subprojects built externally & not suitable for debian packaging #+NAME: make_meson_build #+BEGIN_SRC makefile meson_clean_build_dir: rm -r build; mkdir build meson_build: meson_clean_build_dir meson --buildtype=debugoptimized build ninja -C build notify-send -t 0 'D meson build ldc compiled test release executable ready' 'spine' meson_redo: meson_clean_build_dir meson_build meson_project_build_clean: clean skel tangle dub_upgrade meson_build meson: meson_clean_build_dir dub_upgrade meson_build #+END_SRC ******** clean & tangle :clean:tangle: ********* default :default: #+NAME: make_clean_tangle_compile_0_all_clean #+BEGIN_SRC makefile all_clean: clean tangle dmd ldc gdc #+END_SRC ********* version :version: #+NAME: make_clean_tangle_compile_1_all_clean_versioned #+BEGIN_SRC makefile all_ver_clean: clean tangle dmd_ver ldc_ver gdc_ver #+END_SRC ********* debug :debug: #+NAME: make_clean_tangle_compile_2_all_clean_debug #+BEGIN_SRC makefile all_debug_ver: dmd_debug_ver gdc_debug_ver ldc_debug_ver all_debug_clean_ver: clean tangle dmd_debug_ver gdc_debug_ver ldc_debug_ver #+END_SRC ****** individual compiler builds :each: ******* dmd :dmd: ******** quick :quick: ********* default :default: #+NAME: make_dub_compile_dmd_0_default #+BEGIN_SRC makefile dmd: dub_upgrade $(DUB) --compiler=dmd --config=dmd --build=release-nobounds notify-send -t 0 'D dmd compiled test release executable ready' 'spine-dmd' #+END_SRC ********* debug :debug: #+NAME: make_dub_compile_dmd_1_debug #+BEGIN_SRC makefile dmd_debug: $(DUB) --compiler=dmd --config=dmd --build=debug #+END_SRC ********* version :version: - assumes git tags with program version #+NAME: make_dub_compile_dmd_2_versioned #+BEGIN_SRC makefile dmd_ver: dub_upgrade $(DUB) --compiler=dmd --config=dmd-version mv bin/spine-dmd-ver bin-archive/spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-dmd notify-send -t 0 'D dmd compiled test release executable ready' 'spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-dmd' dmd_clean_ver: clean tangle dmd_ver #+END_SRC ******** clean & tangle :clean:tangle: #+NAME: make_dub_compile_dmd_3_release #+BEGIN_SRC makefile dmd_rel: expunge skel tangle $(DUB) --compiler=dmd --config=spine-release dmd_rel_tangle: tangle $(DUB) --compiler=dmd --config=spine-dmd dmd_rel_expunge_tangle: expunge skel tangle dmd_rel_tangle dmd_debug_tangle: tangle $(DUB) --compiler=dmd --config=spine-dmd-debug #+END_SRC ******* gdc :gdc: ******** quick :quick: ********* default :default: #+NAME: make_dub_compile_gdc_0_default #+BEGIN_SRC makefile gdc: dub_upgrade $(DUB) --compiler=gdc --config=gdc notify-send -t 0 'D gdc compiled test release executable ready' 'spine-gdc' #+END_SRC ********* debug :debug: #+NAME: make_dub_compile_gdc_1_debug #+BEGIN_SRC makefile gdc_debug: $(DUB) --compiler=gdc --config=gdc --build=debug #+END_SRC ********* version :version: - assumes git tags with program version #+NAME: make_dub_compile_gdc_2_versioned #+BEGIN_SRC makefile gdc_ver: dub_upgrade $(DUB) --compiler=gdc --config=gdc-version mv bin/spine-gdc-ver bin-archive/spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-gdc notify-send -t 0 'D gdc compiled test release executable ready' 'spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-gdc' gdc_clean_ver: clean tangle gdc_ver #+END_SRC ******** clean & tangle :clean:tangle: #+NAME: make_dub_compile_gdc_3_release #+BEGIN_SRC makefile gdc_rel: expunge skel tangle $(DUB) --compiler=gdc --config=spine-release gdc_rel_tangle: tangle $(DUB) --compiler=gdc --config=spine-gdc gdc_rel_expunge_tangle: expunge skel tangle gdc_rel_tangle gdc_debug_tangle: tangle $(DUB) --compiler=gdc --config=spine-gdc-debug #+END_SRC ******* ldc :ldc: ******** quick :quick: ********* default :default: #+NAME: make_dub_compile_ldc_0_default #+BEGIN_SRC makefile ldc: dub_upgrade $(DUB) --compiler=ldc2 --config=ldc --build=release notify-send -t 0 'D ldc compiled test release executable ready' 'spine-ldc' #+END_SRC ********* debug :debug: #+NAME: make_dub_compile_ldc_1_debug #+BEGIN_SRC makefile ldc_debug: $(DUB) --compiler=ldc2 --config=ldc --build=debug #+END_SRC ********* version :version: - assumes git tags with program version #+NAME: make_dub_compile_ldc_2_versioned #+BEGIN_SRC makefile ldc_ver: dub_upgrade $(DUB) --compiler=ldc2 --config=ldc-version mv bin/spine-ldc-ver bin-archive/spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-ldc notify-send -t 0 'D ldc2 compiled test release executable ready' 'spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT)-ldc' ldc_clean_ver: clean tangle ldc_ver #+END_SRC ******** clean & tangle :clean:tangle: #+NAME: make_dub_compile_ldc_3_release #+BEGIN_SRC makefile ldc_rel: expunge skel tangle $(DUB) --compiler=ldc2 --config=spine-release ldc_rel_tangle: tangle $(DUB) --compiler=ldc2 --config=spine-ldc ldc_rel_expunge_tangle: expunge skel tangle ldc_rel_tangle ldc_debug_tangle: tangle $(DUB) --compiler=ldc2 --config=spine-ldc-debug #+END_SRC ****** generic ******* init clean distclean etc. :clean: #+NAME: make_initialization_operations_0_parts #+BEGIN_SRC makefile reinit: clean skel tangle skel: mkdir -p $(PRG_BINDIR); \ mkdir -p $(PRG_BINDIR)-archive; \ mkdir -p build; \ mkdir -p views; \ mkdir -p data; \ mkdir -p sundry/util/d/cgi/search/cgi-bin/src; \ mkdir -p sundry/util/d/tools/markup_conversion; \ mkdir -p sundry/editor-syntax-etc/emacs; \ mkdir -p sundry/editor-syntax-etc/vim/syntax; \ mkdir -p sundry/editor-syntax-etc/vim/colors; \ mkdir -p sundry/editor-syntax-etc/vim/ftplugin; \ mkdir -p sundry/editor-syntax-etc/vim/rc; \ mkdir -p sundry/editor-syntax-etc/vim/templates; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR)/conf; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR)/io_in; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR)/io_out; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR)/meta; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR)/share; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR); \ mkdir -p $(PRG_BINDIR); init: skel clean: rm -rf build; \ rm -rf $(PRG_SRCDIR); \ rm -rf $(PRG_DOCDIR); clean_bin: rm $(PRG_BINDIR)/* clean_src: rm -rf $(PRG_SRCDIR); clean_docs: rm -rf $(PRG_DOCDIR) expunge: rm -f dub.selections.json; \ rm -rf build; \ rm -rf $(PRG_SRCDIR); \ rm -rf $(PRG_BINDIR); \ rm -rf $(PRG_DOCDIR); distclean: expunge distclean_and_init: expunge mkdir -p views; \ mkdir -p sundry/util/d/cgi/search/cgi-bin/src; \ mkdir -p sundry/util/d/tools/markup_conversion; \ mkdir -p sundry/editor-syntax-etc/emacs; \ mkdir -p sundry/editor-syntax-etc/vim/syntax; \ mkdir -p sundry/editor-syntax-etc/vim/colors; \ mkdir -p sundry/editor-syntax-etc/vim/ftplugin; \ mkdir -p sundry/editor-syntax-etc/vim/rc; \ mkdir -p sundry/editor-syntax-etc/vim/templates; \ mkdir -p $(PRG_SRCDIR)/$(PRG_NAME_DIR); \ mkdir -p $(PRG_BINDIR); #+END_SRC ******* version :version: #+NAME: make_initialization_operations_1_git_version #+BEGIN_SRC makefile ver: echo spine-$(PROG_VER_DECLARED)-tag-$(PROG_VER_GIT) #+END_SRC ******* tangle build rebuild :clean:tangle:build: #+NAME: make_initialization_operations_2_assemble #+BEGIN_SRC makefile clean_tangle_build: clean tangle build tangle_build: tangle build build: $(PRG_SRCDIR)/$(PRG_SRC) $(DC) $(DC_FLAGS) \ $(DC_FLAG_BINOF)$(PRG_BINDIR)/$(PRG_BIN) \ $(PRG_SRCDIR)/$(PRG_SRC) rebuild: $(PRG_SRCDIR)/$(PRG_SRC) $(PRG_BINDIR)/$(PRG_BIN).o clean build makefile_new: make -k tangle_maker restart: clean tangle #+END_SRC **** testrun (program against document markup) :markup:sample: ***** pods :pod: ******* find files #+NAME: make_project_testrun_0_find_pods #+BEGIN_SRC makefile find_pods: $(SiSU_MARKUP_SAMPLES_FIND_PODS) #+END_SRC ****** dmd ******* find files #+NAME: make_project_testrun_1 #+BEGIN_SRC makefile dmd_testrun_find: ./bin/spine-dmd -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ data/pod/sisu-manual dmd_testrun_find_pod_source: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --source --html --epub \ --output-dir=tmp/program_output_pod dmd_testrun_find_pod_pod: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-dmd -v --pod \ --output-dir=tmp/program_output_pod dmd_testrun_find_pod_html: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-dmd -v --html \ --output-dir=tmp/program_output_pod dmd_testrun_find_pod_epub: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-dmd -v --epub \ --output-dir=tmp/program_output_pod dmd_testrun_find_pod_all: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-dmd -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod #+END_SRC ******* path list #+NAME: make_project_testrun_2 #+BEGIN_SRC makefile dmd_testrun_paths_pod_source: ./bin/spine-dmd -v --source \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) dmd_testrun_paths_pod_pod: ./bin/spine-dmd -v --pod \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) dmd_testrun_paths_pod_html: ./bin/spine-dmd -v --html \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) dmd_testrun_paths_pod_epub: ./bin/spine-dmd -v --epub \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) dmd_testrun_paths_pod_all: ./bin/spine-dmd -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) #+END_SRC ****** gdc ******* find files #+NAME: make_project_testrun_3 #+BEGIN_SRC makefile gdc_testrun_find: ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ data/pod/sisu-manual gdc_testrun_find_pod_source: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-gdc -v --source \ --output-dir=tmp/program_output_pod gdc_testrun_find_pod_pod: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-gdc -v --pod \ --output-dir=tmp/program_output_pod gdc_testrun_find_pod_html: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-gdc -v --html \ --output-dir=tmp/program_output_pod gdc_testrun_find_pod_epub: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-gdc -v --epub \ --output-dir=tmp/program_output_pod gdc_testrun_find_pod_all: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod #+END_SRC ******* path list #+NAME: make_project_testrun_4 #+BEGIN_SRC makefile gdc_testrun_paths: ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ data/pod/sisu-manual gdc_testrun_paths_pod_source: ./bin/spine-gdc -v --source \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) gdc_testrun_paths_pod_pod: ./bin/spine-gdc -v --pod \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) gdc_testrun_paths_pod_html: ./bin/spine-gdc -v --html \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) gdc_testrun_paths_pod_epub: ./bin/spine-gdc -v --epub \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) gdc_testrun_paths_pod_all: ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) #+END_SRC ****** ldc ******* find files #+NAME: make_project_testrun_5 #+BEGIN_SRC makefile ldc_testrun_find: ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ data/pod/sisu-manual ldc_testrun_find_pod_source: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --source \ --output-dir=tmp/program_output_pod ldc_testrun_find_pod_pod: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --pod \ --output-dir=tmp/program_output_pod ldc_testrun_find_pod_html: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --html \ --output-dir=tmp/program_output_pod ldc_testrun_find_pod_epub: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --epub \ --output-dir=tmp/program_output_pod ldc_testrun_find_pod_all: $(SiSU_MARKUP_SAMPLES_PODS_FOUND) \ ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod #+END_SRC ******* path list #+NAME: make_project_testrun_6 #+BEGIN_SRC makefile ldc_testrun_paths: ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ data/pod/sisu-manual ldc_testrun_paths_pod_source: ./bin/spine-ldc -v --source \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) ldc_testrun_paths_pod_pod: ./bin/spine-ldc -v --pod \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) ldc_testrun_paths_pod_html: ./bin/spine-ldc -v --html \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) ldc_testrun_paths_pod_epub: ./bin/spine-ldc -v --epub \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) ldc_testrun_paths_pod_all: ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_pod \ $(SiSU_MARKUP_SAMPLES_POD) #+END_SRC ***** dir :dir: ****** dmd ******* find files #+NAME: make_project_testrun_7 #+BEGIN_SRC makefile dmd_testrun_find_dir_source: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-dmd -v --source \ --output-dir=tmp/program_output_dir dmd_testrun_find_dir_pod: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-dmd -v --pod \ --output-dir=tmp/program_output_dir dmd_testrun_find_dir_html: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-dmd -v --html \ --output-dir=tmp/program_output_dir dmd_testrun_find_dir_epub: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-dmd -v --epub \ --output-dir=tmp/program_output_dir dmd_testrun_find_dir_all: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-dmd -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir #+END_SRC ******* file list #+NAME: make_project_testrun_8 #+BEGIN_SRC makefile dmd_testrun_filelist_dir_source: ./bin/spine-dmd -v --source \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) dmd_testrun_filelist_dir_pod: ./bin/spine-dmd -v --pod \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) dmd_testrun_filelist_dir_html: ./bin/spine-dmd -v --html \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) dmd_testrun_filelist_dir_epub: ./bin/spine-dmd -v --epub \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) dmd_testrun_filelist_dir_all: ./bin/spine-dmd -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) #+END_SRC ****** gdc ******* find files #+NAME: make_project_testrun_9 #+BEGIN_SRC makefile gdc_testrun_find_dir_source: ./bin/spine-gdc -v --source \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) gdc_testrun_find_dir_pod: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-gdc -v --pod \ --output-dir=tmp/program_output_dir gdc_testrun_find_dir_html: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-gdc -v --html \ --output-dir=tmp/program_output_dir gdc_testrun_find_dir_epub: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-gdc -v --epub \ --output-dir=tmp/program_output_dir gdc_testrun_find_dir_all: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir #+END_SRC ******* file list #+NAME: make_project_testrun_10 #+BEGIN_SRC makefile gdc_testrun_filelist_dir_source: ./bin/spine-gdc -v --source \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) gdc_testrun_filelist_dir_pod: ./bin/spine-gdc -v --pod \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) gdc_testrun_filelist_dir_html: ./bin/spine-gdc -v --html \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) gdc_testrun_filelist_dir_epub: ./bin/spine-gdc -v --epub \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) gdc_testrun_filelist_dir_all: ./bin/spine-gdc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) #+END_SRC ****** ldc ******* find files #+NAME: make_project_testrun_11 #+BEGIN_SRC makefile ldc_testrun_find_dir_source: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-ldc -v --source \ --output-dir=tmp/program_output_dir ldc_testrun_find_dir_pod: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-ldc -v --pod \ --output-dir=tmp/program_output_dir ldc_testrun_find_dir_html: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-ldc -v --html \ --output-dir=tmp/program_output_dir ldc_testrun_find_dir_epub: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-ldc -v --epub \ --output-dir=tmp/program_output_dir ldc_testrun_find_dir_all: $(SiSU_MARKUP_SAMPLES_DIR_FILES_FOUND) \ ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir #+END_SRC ******* file list #+NAME: make_project_testrun_12 #+BEGIN_SRC makefile ldc_testrun_filelist_dir_source: ./bin/spine-ldc -v --source \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) ldc_testrun_filelist_dir_pod: ./bin/spine-ldc -v --pod \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) ldc_testrun_filelist_dir_html: ./bin/spine-ldc -v --html \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) ldc_testrun_filelist_dir_epub: ./bin/spine-ldc -v --epub \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) ldc_testrun_filelist_dir_all: ./bin/spine-ldc -v --source --html --epub --sqlite-discrete \ --sqlite-create --sqlite-update \ --output-dir=tmp/program_output_dir \ $(SiSU_MARKUP_SAMPLES_DIR) #+END_SRC **** org babel tangle batch process command :tangle: ***** tangle: org babel tangle *.org #+NAME: make_tangle_0 #+BEGIN_SRC makefile tangle: skel for f in $(ORGFILELIST); do \ ORGFILES="$$ORGFILES \"$$f\""; \ done; \ emacs --batch -Q -q \ --eval "(progn \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP)\")) \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP_ORG)\" t)) \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP_ORG_CONTRIB)\" t)) \ (require 'org)(require 'ob)(require 'ob-tangle) \ (mapc (lambda (file) \ (find-file (expand-file-name file \"$(ORGDIR)\")) \ (setq-local org-src-preserve-indentation t) \ (org-babel-tangle) \ (kill-buffer)) '($$ORGFILES)))" 2>&1 #+END_SRC ***** tangle maker: org babel tangle makefile new #+NAME: make_tangle_1 #+BEGIN_SRC makefile tangle_maker: for f in $(ORGFILELIST); do \ ORGFILES="\"org/dr_build_scaffold.org\""; \ done; \ emacs --batch -Q -q \ --eval "(progn \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP)\")) \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP_ORG)\" t)) \ (add-to-list 'load-path \ (expand-file-name \"$(EMACSLISP_ORG_CONTRIB)\" t)) \ (require 'org)(require 'ob)(require 'ob-tangle) \ (mapc (lambda (file) \ (find-file (expand-file-name file \".\")) \ (setq-local org-src-preserve-indentation t) \ (org-babel-tangle) \ (kill-buffer)) '($$ORGFILES)))" 2>&1 #+END_SRC ***** tangle nix project #+NAME: make_nix_project #+BEGIN_SRC makefile lorri_shell: lorri shell nix_shell: nix-shell nix_build_default: nix build -f default.nix && ./result/bin/spine -v nix_build_project: nix build -f project.nix && ./result/bin/spine -v nix_build_spine: nix build -f spine.nix && ./result/bin/spine -v nix-build_default: nix-build default.nix #+END_SRC **** git snapshot #+NAME: make_git_snapshot #+BEGIN_SRC makefile gitsnapshot: distclean tangle git commit -a #+END_SRC *** phony :phony: #+NAME: make_phony #+BEGIN_SRC makefile .PHONY : all build rebuild debug release \ distclean init \ tangle gitsnapshot #+END_SRC ** dub *** dub.settings.json :dub:config:json: #+HEADER: :NO-tangle ../dub.settings.json #+BEGIN_SRC json { "defaultArchitecture": "x86_64", "defaultCompiler": "ldc" } #+END_SRC *** dub.json :dub:config:json: **** dub.json ***** file-system download external dependencies - d2sqlite3 - dyaml - tinyendian - imageformats #+HEADER: :tangle ../dub.json #+BEGIN_SRC json { "authors": [ "Ralph Amissah" ], "copyright": "Copyright © 2015 - 2021 Ralph Amissah", "name": "spine", "version": "<>", "description": "a sisu like document parser", "homepage": "https://sisudoc.org", "license": "AGPL-3.0+", "targetPath": "./bin", "sourcePaths": [ "./src/doc_reform" ], "stringImportPaths": [ "./views" ], "buildRequirements": [ "allowWarnings" ], "targetType": "executable", "platforms": [ "posix" ], "buildTypes": { "dmd": { "dflags": [ "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "inline" ], "buildRequirements": [ "allowWarnings" ] }, "ldc": { "dflags": [ "-O2", "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "optimize", "inline" ], "buildRequirements": [ "allowWarnings" ] } }, "dependencies": { "spine:d2sqlite3": "*", "spine:imageformats": "*", "spine:dyaml": "*" }, "subPackages": [ { "name": "d2sqlite3", "description": "A thin wrapper around SQLite 3", "homepage": "https://github.com/dlang-community/d2sqlite3", "authors": [ "Nicolas Sicard", "Other contributors: see Github repo" ], "copyright": "Copyright 2011-18 Nicolas Sicard", "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends/d2sqlite3/source" ], "configurations": [ { "name": "d2sqlite3", "d2sqlite3": "with-lib", "targetType": "library", "systemDependencies": "SQLite version >= 3.8.7", "libs": [ "sqlite3" ], "excludedSourceFiles": [ "source/tests.d" ] } ] }, { "name": "imageformats", "description": "Decoders for PNG, TGA, BMP, JPEG and encoders for PNG, TGA, BMP.", "authors": [ "Tero Hänninen" ], "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends/imageformats/imageformats" ] }, { "name": "dyaml", "description": "YAML parser and emitter", "authors": [ "Ferdinand Majerech", "Cameron \"Herringway\" Ross" ], "license": "BSL-1.0", "homepage": "https://github.com/dlang-community/D-YAML", "copyright": "Copyright © 2011-2018, Ferdinand Majerech", "sourcePaths": [ "./src/ext_depends/D-YAML/source/dyaml", "./src/ext_depends/tinyendian/source" ], "importPaths": [ "./src/ext_depends/D-YAML/source/dyaml", "./src/ext_depends/tinyendian/source" ], "preGenerateCommands": [ "rm -rf ./src/ext_depends/D-YAML/examples", "rm -rf ./src/ext_depends/D-YAML/testsuite" ] } ], "configurations": [ { "name": "default", "targetName": "spine" }, { "name": "ldc", "targetName": "spine-ldc" }, { "name": "dmd", "targetName": "spine-dmd" } ], "subConfigurations": { "d2sqlite3": "with-lib" } } #+END_SRC ***** git (interim) #+BEGIN_SRC json { "authors": [ "Ralph Amissah" ], "copyright": "Copyright © 2015 - 2021 Ralph Amissah", "name": "spine", "version": "<>", "description": "a sisu like document parser", "homepage": "https://sisudoc.org", "license": "AGPL-3.0+", "targetPath": "./bin", "sourcePaths": [ "./src/doc_reform" ], "stringImportPaths": [ "./views" ], "buildRequirements": [ "allowWarnings" ], "targetType": "executable", "platforms": [ "posix" ], "buildTypes": { "dmd": { "dflags": [ "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "inline" ], "buildRequirements": [ "allowWarnings" ] }, "ldc": { "dflags": [ "-O2", "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "optimize", "inline" ], "buildRequirements": [ "allowWarnings" ] } }, "dependencies": { "spine:d2sqlite3": "*", "spine:imageformats": "*", "spine:dyaml": "*" }, "subPackages": [ { "name": "d2sqlite3", "description": "A thin wrapper around SQLite 3", "homepage": "https://github.com/dlang-community/d2sqlite3", "authors": [ "Nicolas Sicard", "Other contributors: see Github repo" ], "copyright": "Copyright 2011-18 Nicolas Sicard", "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends/d2sqlite3/source" ], "configurations": [ { "name": "with-lib", "targetType": "library", "systemDependencies": "SQLite version >= 3.8.7", "libs": [ "sqlite3" ], "excludedSourceFiles": [ "source/tests.d" ] } ] }, { "name": "imageformats", "description": "Decoders for PNG, TGA, BMP, JPEG and encoders for PNG, TGA, BMP.", "authors": [ "Tero Hänninen" ], "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends/imageformats/imageformats" ] }, { "name": "dyaml", "description": "YAML parser and emitter", "authors": [ "Ferdinand Majerech", "Cameron \"Herringway\" Ross" ], "license": "BSL-1.0", "homepage": "https://github.com/dlang-community/D-YAML", "copyright": "Copyright © 2011-2018, Ferdinand Majerech", "sourcePaths": [ "./src/ext_depends/D-YAML/source/dyaml" ], "importPaths": [ "./src/ext_depends/D-YAML/source/dyaml", "./src/ext_depends/tinyendian/source" ], "dependencies": { "tinyendian" : "~>0.2.0" }, "preGenerateCommands": [ "rm -rf ./src/ext_depends/D-YAML/examples", "rm -rf ./src/ext_depends/D-YAML/testsuite" ] }, { "name": "tinyendian", "description": "Lightweight endianness handling library", "authors": [ "Ferdinand Majerech" ], "license": "Boost 1.0", "copyright": "Copyright © 2014, Ferdinand Majerech", "homepage": "https://github.com/kiith-sa/tinyendian", "importPaths": [ "./src/ext_depends/tinyendian/source" ] } ], "configurations": [ { "name": "default", "targetName": "spine" }, { "name": "ldc", "targetName": "spine-ldc" }, { "name": "dmd", "targetName": "spine-dmd" } ], "subConfigurations": { "d2sqlite3": "with-lib" } } #+END_SRC ***** git fetched remote external dependencies (used previously) #+BEGIN_SRC json { "authors": [ "Ralph Amissah" ], "copyright": "Copyright © 2015 - 2021 Ralph Amissah", "name": "spine", "version": "<>", "description": "a sisu like document parser", "homepage": "https://sisudoc.org", "license": "AGPL-3.0+", "targetPath": "./bin", "sourcePaths": [ "./src/doc_reform" ], "stringImportPaths": [ "./views" ], "buildRequirements": [ "allowWarnings" ], "targetType": "executable", "platforms": [ "posix" ], "buildTypes": { "dmd": { "dflags": [ "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "inline" ], "buildRequirements": [ "allowWarnings" ] }, "ldc": { "dflags": [ "-O2", "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "optimize", "inline" ], "buildRequirements": [ "allowWarnings" ] } }, "dependencies": { "d2sqlite3": "~><>", "imageformats": "~><>", "dyaml": "~><>", "tinyendian": "~><>" }, "configurations": [ { "name": "default", "targetName": "spine" }, { "name": "ldc", "targetName": "spine-ldc" }, { "name": "dmd", "targetName": "spine-dmd" } ], "subConfigurations": { "d2sqlite3": "with-lib" } } #+END_SRC **** +check dub.json+ #+HEADER: :NO-tangle ../dub.json #+BEGIN_SRC sh { "authors": [ "Ralph Amissah" ], "copyright": "Copyright © 2015 - 2021 Ralph Amissah", "name": "spine", "description": "a sisu like document parser", "homepage": "https://sisudoc.org", "license": "AGPL-3.0+", "targetPath": "./bin", "sourcePaths": [ "./src/doc_reform" ], "stringImportPaths": [ "./views" ], "buildRequirements": [ "allowWarnings" ], "targetType": "executable", "platforms": [ "posix" ], "dependencies": { "d2sqlite3": "~><>", "imageformats": "~><>", "dyaml": "~><>", "tinyendian": "~><>" }, "subConfigurations": { "d2sqlite3": "all-included" }, "configurations": [ { "name": "default", "targetName": "spine" }, { "name": "ldc", "targetName": "spine-ldc" }, { "name": "dmd", "targetName": "spine-dmd" } ], "buildTypes": { "dmd": { "dflags": [ "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "inline" ], "buildRequirements": [ "allowWarnings" ] }, "ldc": { "dflags": [ "-O2", "-J=views", "-I=src/doc_reform" ], "buildOptions": [ "verbose", "optimize", "inline" ], "buildRequirements": [ "allowWarnings" ] } } } #+END_SRC *** dub.sdl REFERENCE UNUSED:dub:config:sdl: Every DUB package should contain a [[https://code.dlang.org/package-format?lang=json][dub.json]] (or [[https://code.dlang.org/package-format?lang=sdl][dub.sdl]]) not configured here, using dub.json for the moment **** header (including dependencies) :header: #+HEADER: :NO-tangle ../dub.sdl #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang #!/usr/bin/env dub #+BEGIN_SRC sh name "spine" description "sisu document parser" homepage "https://sisudoc.org" authors "Ralph Amissah" copyright "Copyright (C) 2016 - 2021 Ralph Amissah" license "AGPL-3.0+" targetPath "./bin" #sourcePath "./src/doc_reform" stringImportPaths "./views" buildRequirements "allowWarnings" targetType "executable" platforms "posix" dependency "d2sqlite3" version="~><>" # https://code.dlang.org/packages/d2sqlite3 https://github.com/dlang-community/d2sqlite3 subconfiguration "d2sqlite3" "all-included" dependency "imageformats" version="~><>" # https://code.dlang.org/packages/imageformats https://github.com/lgvz/imageformats dependency "dyaml" version="~><>" # https://code.dlang.org/packages/dyaml https://github.com/dlang-community/D-YAML dependency "tinyendian" version="~><>" # https://code.dlang.org/packages/tinyendian https://github.com/dlang-community/tinyendian // dyaml dependency #+END_SRC **** default time (dub --compiler=dmd --build=release) time (dub --compiler=ldc2 --build=release) time (dub --compiler=gdc --build=release) #+HEADER: :NO-tangle ../dub.sdl #+BEGIN_SRC sh configuration "default" { targetName "spine" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine'" } #+END_SRC **** dmd :dmd: time (dub --compiler=dmd -color --config=dmd --build=dmd) time (dub --compiler=dmd -color --config=dmd --build=dmd-release) #+HEADER: :NO-tangle ../dub.sdl #+BEGIN_SRC sh configuration "dmd" { targetName "spine-dmd" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-dmd'" } buildType "dmd" { dflags "-J=views" "-I=src/doc_reform" buildOptions "verbose" "inline" buildRequirements "allowWarnings" } buildType "dmd-release" { dflags "-J=views" "-I=src/doc_reform" buildOptions "verbose" "releaseMode" "optimize" "inline" } configuration "dmd-version" { dflags "-J=views" "-I=src/doc_reform" buildOptions "verbose" "optimize" targetName "spine-dmd-ver" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-dmd-ver'" } #+END_SRC buildOptions "verbose" "optimize" // ok works quick compile buildOptions "verbose" "optimize" "inline" // ok works takes ages to compile buildOptions "verbose" "releaseMode" "optimize" "inline" // broken, runtime errors **** ldc :ldc: time (dub --compiler=ldc2 -color --config=ldc --build=ldc) time (dub --compiler=ldc2 -color --config=ldc --build=ldc-release) #+HEADER: :NO-tangle ../dub.sdl #+BEGIN_SRC sh configuration "ldc" { targetName "spine-ldc" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-ldc'" } buildType "ldc" { dflags "-O2" "-J=views" "-I=src/doc_reform" buildOptions "verbose" "optimize" "inline" buildRequirements "allowWarnings" } buildType "ldc-local" { dflags "-O2" "-J=views" "-I=src/doc_reform" "-mcpu=native" buildOptions "verbose" "optimize" "inline" buildRequirements "allowWarnings" } buildType "ldc-release" { dflags "-O2" "-J=views" "-I=src/doc_reform" buildOptions "verbose" "optimize" "inline" "releaseMode" } configuration "ldc-version" { dflags "-O2" "-J=views" "-I=src/doc_reform" buildOptions "verbose" "optimize" "inline" "releaseMode" targetName "spine-ldc-ver" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-ldc-ver'" } #+END_SRC **** gdc :gdc: time (dub --compiler=gdc --config=gdc --build=gdc) time (dub --compiler=gdc --config=gdc --build=gdc-release) #+HEADER: :NO-tangle ../dub.sdl #+BEGIN_SRC sh configuration "gdc" { targetName "spine-gdc" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-gdc'" } buildType "gdc" { dflags "-O" "-J=views" "-I=src/doc_reform" lflags "-lz" buildOptions "inline" buildRequirements "allowWarnings" } buildType "gdc-release" { dflags "-O2" "-J=views" "-I=src/doc_reform" lflags "-lz" buildOptions "verbose" "optimize" "inline" "releaseMode" } configuration "gdc-version" { dflags "-O2" "-J=views" "-I=src/doc_reform" lflags "-lz" buildOptions "verbose" "optimize" "inline" "releaseMode" targetName "spine-gdc-ver" postGenerateCommands "notify-send -t 0 'D executable ready' 'spine-gdc-ver'" } #+END_SRC ***** generic :generic: ****** build (sdp) :release: #+BEGIN_SRC sh configuration "build" { name "build" targetType "executable" platforms "posix" targetName "bin/spine" dflags "-J=views" "-I=src/doc_reform" buildOptions "verbose" "releaseMode" "optimize" "inline" postGenerateCommands "notify-send -t 0 'D release executable ready' 'spine'" } #+END_SRC ** meson REVISIT :meson:build: *** notes https://mesonbuild.com/D.html https://mesonbuild.com/Dependencies.html#Dub - create and move to subdirectory rm -r build; mkdir build && cd build - build - regular build (faster compile time): meson && ninja - optimized build (faster program run time): meson --buildtype=debugoptimized && ninja - time build time ( meson --buildtype=debugoptimized && ninja ) - other - flags DFLAGS= DC=ldc2 meson .. **** projects :project: ls -1 ./src/*/**/*.d tree -fi ./src |rg "\.d$" fdfind -e .d tree -if src | rg ".+\.d$" > spine_sources.txt && \ for i in spine_sources.txt; do; \ sed -i "s/\(.\+\.d$\)/ '\1',/g" $i; done && \ cat spine_sources.txt ***** project meson.build :filelist: #+HEADER: :tangle ../meson.build #+BEGIN_SRC sh project('spine', 'd', license: 'AGPL-3', version: '<>', meson_version: '>=<>' ) #if meson.get_compiler('d').get_id() == 'gcc' # error('spine cannot be compiled with GDC at time. Sorry.') #endif spine_src = [ 'src/doc_reform/spine.d', 'src/doc_reform/conf/compile_time_info.d', 'src/doc_reform/io_in/paths_source.d', 'src/doc_reform/io_in/read_config_files.d', 'src/doc_reform/io_in/read_source_files.d', 'src/doc_reform/io_out/cgi_sqlite_search_form.d', 'src/doc_reform/io_out/create_zip_file.d', 'src/doc_reform/io_out/defaults.d', 'src/doc_reform/io_out/epub3.d', 'src/doc_reform/io_out/html.d', 'src/doc_reform/io_out/hub.d', 'src/doc_reform/io_out/latex.d', 'src/doc_reform/io_out/metadata.d', 'src/doc_reform/io_out/odt.d', 'src/doc_reform/io_out/package.d', 'src/doc_reform/io_out/paths_output.d', 'src/doc_reform/io_out/rgx.d', 'src/doc_reform/io_out/source_pod.d', 'src/doc_reform/io_out/sqlite.d', 'src/doc_reform/io_out/xmls_css.d', 'src/doc_reform/io_out/xmls.d', 'src/doc_reform/meta/conf_make_meta_json.d', 'src/doc_reform/meta/conf_make_meta_structs.d', 'src/doc_reform/meta/conf_make_meta_yaml.d', 'src/doc_reform/meta/defaults.d', 'src/doc_reform/meta/doc_debugs.d', 'src/doc_reform/meta/metadoc.d', 'src/doc_reform/meta/metadoc_from_src.d', 'src/doc_reform/meta/metadoc_harvest.d', 'src/doc_reform/meta/metadoc_harvests_authors.d', 'src/doc_reform/meta/metadoc_harvests_topics.d', 'src/doc_reform/meta/metadoc_object_setter.d', 'src/doc_reform/meta/metadoc_show_config.d', 'src/doc_reform/meta/metadoc_show_make.d', 'src/doc_reform/meta/metadoc_show_metadata.d', 'src/doc_reform/meta/metadoc_show_summary.d', 'src/doc_reform/meta/package.d', 'src/doc_reform/meta/rgx.d', 'src/doc_reform/share/defaults.d', ] source_root = meson.source_root() src_dir = include_directories('src/') # Dependencies dep_tinyendian = dependency('tinyendian', version: '>= <>', fallback: ['tinyendian', 'tinyendian_dep'], method: 'dub') dep_dyaml = dependency('dyaml', version: '>= <>', fallback: ['dyaml', 'dyaml_dep'], method: 'dub') dep_d2sqlite3 = dependency('d2sqlite3', version: '>= <>', fallback: ['d2sqlite3', 'd2sqlite3_dep'], method: 'dub') dep_imageformats = dependency('imageformats', version: '>= <>', fallback: ['imageformats', 'imageformats_dep'], method: 'dub') # Executable spine_exe = executable('spine', [spine_src], include_directories : [src_dir], d_import_dirs: [include_directories('views')], dependencies : [ dep_tinyendian, dep_dyaml, dep_d2sqlite3, dep_imageformats, ], install : true ) #+END_SRC *** meson subprojects [taken care of using dub] :subprojects: **** README #+HEADER: :tangle ../subprojects/README #+BEGIN_SRC txt - d2sqlite3 - dub - https://code.dlang.org/packages/d2sqlite3 - upstream - https://github.com/dlang-community/d2sqlite3 - dyaml - dub - https://code.dlang.org/packages/dyaml - upstream - https://github.com/dlang-community/D-YAML - upstream git clone - https://github.com/dlang-community/D-YAML.git - imageformats - dub - https://code.dlang.org/packages/imageformats - upstream git clone - https://github.com/lgvz/imageformats - tinyendian (dyaml dependency) - dub - https://code.dlang.org/packages/tinyendian - upstream git clone - https://github.com/dlang-community/tinyendian.git #+END_SRC **** d2sqlite3 :d2sqlite3: ***** wrap #+HEADER: :tangle ../subprojects/d2sqlite3.wrap #+BEGIN_SRC sh [wrap-git] directory = d2sqlite3 url = https://github.com/dlang-community/d2sqlite3.git revision = head #+END_SRC ***** meson.build #+HEADER: :NO-tangle ../subprojects/d2sqlite3.meson.build #+BEGIN_SRC sh project('d2sqlite3', 'd', meson_version: '>=<>', license: 'BSL-1.0', version: '<>' ) project_soversion = '<>' src_dir = include_directories('source/') pkgc = import('pkgconfig') sqlite_dep = dependency('sqlite3') d2sqlite3_src = [ 'source/d2sqlite3/database.d', 'source/d2sqlite3/internal/memory.d', 'source/d2sqlite3/internal/util.d', 'source/d2sqlite3/library.d', 'source/d2sqlite3/package.d', 'source/d2sqlite3/results.d', 'source/d2sqlite3/sqlite3.d', 'source/d2sqlite3/statement.d', ] install_subdir('source/d2sqlite3/', install_dir: 'include/d/d2sqlite3/') d2sqlite3_lib = library('d2sqlite3', [d2sqlite3_src], include_directories: [src_dir], dependencies: [sqlite_dep], install: true, version: meson.project_version(), soversion: project_soversion ) pkgc.generate(name: 'd2sqlite3', libraries: d2sqlite3_lib, subdirs: 'd/d2sqlite3/', version: meson.project_version(), requires: ['sqlite3'], description: 'A small wrapper around SQLite for the D programming language.' ) # for use by others which embed this as subproject d2sqlite3_dep = declare_dependency( link_with: [d2sqlite3_lib], dependencies: [sqlite_dep], include_directories: [src_dir] ) #+END_SRC **** dyaml :dyaml: ***** wrap #+HEADER: :tangle ../subprojects/dyaml.wrap #+BEGIN_SRC sh [wrap-git] directory = dyaml url = https://github.com/dlang-community/D-YAML.git revision = head #+END_SRC ***** meson.build (provided by upstream) Upstream provides meson.build #+HEADER: :NO-tangle ../subprojects/D-YAML/meson.build #+BEGIN_SRC sh project('D-YAML', 'd', meson_version: '>=<>', subproject_dir: 'contrib', version: '<>' ) project_soversion = '<>' src_dir = include_directories('source/') pkgc = import('pkgconfig') dyaml_src = [ 'source/dyaml/composer.d', 'source/dyaml/constructor.d', 'source/dyaml/dumper.d', 'source/dyaml/emitter.d', 'source/dyaml/encoding.d', 'source/dyaml/escapes.d', 'source/dyaml/event.d', 'source/dyaml/exception.d', 'source/dyaml/linebreak.d', 'source/dyaml/loader.d', 'source/dyaml/node.d', 'source/dyaml/package.d', 'source/dyaml/parser.d', 'source/dyaml/queue.d', 'source/dyaml/reader.d', 'source/dyaml/representer.d', 'source/dyaml/resolver.d', 'source/dyaml/scanner.d', 'source/dyaml/serializer.d', 'source/dyaml/style.d', 'source/dyaml/tagdirective.d', 'source/dyaml/test/common.d', 'source/dyaml/test/compare.d', 'source/dyaml/test/constructor.d', 'source/dyaml/test/emitter.d', 'source/dyaml/test/errors.d', 'source/dyaml/test/inputoutput.d', 'source/dyaml/test/reader.d', 'source/dyaml/test/representer.d', 'source/dyaml/test/resolver.d', 'source/dyaml/test/tokens.d', 'source/dyaml/token.d' ] install_subdir('source/dyaml', install_dir: 'include/d/yaml/') tinyendian_dep = dependency('tinyendian', version: '>=<>', fallback: ['tinyendian', 'tinyendian_dep']) dyaml_lib = library('dyaml', [dyaml_src], include_directories: [src_dir], dependencies: [tinyendian_dep], install: true, version: meson.project_version(), soversion: project_soversion ) pkgc.generate(name: 'dyaml', libraries: dyaml_lib, subdirs: 'd/yaml/', version: meson.project_version(), description: 'YAML parser and emitter for the D programming language.' ) # Make D-YAML easy to use as subproject dyaml_dep = declare_dependency( link_with: dyaml_lib, include_directories: [src_dir], dependencies: [tinyendian_dep] ) #+END_SRC **** imageformats :imageformats: ***** wrap #+HEADER: :tangle ../subprojects/imageformats.wrap #+BEGIN_SRC sh [wrap-git] directory = imageformats url = https://github.com/lgvz/imageformats.git revision = head #+END_SRC ***** meson.build #+HEADER: :NO-tangle ../subprojects/imageformats.meson.build #+BEGIN_SRC sh project('imageformats', 'd', meson_version: '>=<>', license: 'BSL-1.0', version: '<>' ) project_soversion = '<>' src_dir = include_directories('.') pkgc = import('pkgconfig') imageformats_src = [ 'imageformats/bmp.d', 'imageformats/jpeg.d', 'imageformats/package.d', 'imageformats/png.d', 'imageformats/tga.d' ] install_subdir('imageformats/', install_dir: 'include/d/imageformats-d/') imageformats_lib = library('imageformats-d', [imageformats_src], include_directories: [src_dir], install: true, version: meson.project_version(), soversion: project_soversion ) pkgc.generate(name: 'imageformats-d', libraries: imageformats_lib, subdirs: 'd/imageformats-d/', version: meson.project_version(), description: 'Decoders for PNG, TGA, BMP, JPEG and encoders for PNG, TGA, BMP.' ) # for use by others which embed this as subproject imageformats_dep = declare_dependency( link_with: [imageformats_lib], include_directories: [src_dir] ) #+END_SRC **** tinyendian (dyaml dependency) :tinyendian: ***** wrap #+HEADER: :tangle ../subprojects/tinyendian.wrap #+BEGIN_SRC sh [wrap-git] directory = tinyendian url = https://github.com/dlang-community/tinyendian.git revision = head #+END_SRC ***** meson.build (provided by upstream) Upstream provides meson.build #+HEADER: :NO-tangle ../subprojects/tinyendian/meson.build #+BEGIN_SRC sh # -*- mode: python; -*- project( 'tinyendian', 'd', meson_version: '>=<>', version: '<>', default_options: ['buildtype=release'], ) src_dir = include_directories('source/') pkgc = import('pkgconfig') tinyendian_src = [ 'source/tinyendian.d' ] install_headers(tinyendian_src, subdir: 'd/') tinyendian_lib = library( meson.project_name(), [tinyendian_src], include_directories: [src_dir], version: meson.project_version(), pic: true, install: true, ) pkgc.generate( name: meson.project_name(), libraries: tinyendian_lib, subdirs: 'd/', version: meson.project_version(), description: 'Lightweight endianness library for D.' ) # Make Tinyendian easy to use as subproject tinyendian_dep = declare_dependency( link_with: tinyendian_lib, include_directories: [src_dir] ) #+END_SRC ** nix :nix: - default.nix - shell.nix *** envrc :envrc: **** .envrc #+HEADER: :tangle ../.envrc #+BEGIN_SRC sh NIX_ENFORCE_PURITY=0 if [ -e .envrc-local ]; then # source an additional user-specific .envrc in ./.envrc-local source .envrc-local fi #+END_SRC - lorri? #+HEADER: :NO-tangle ../.envrc #+BEGIN_SRC sh if type lorri &>/dev/null; then echo "direnv: using lorri from PATH ($(type -p lorri))" eval "$(lorri direnv)" else # fallback prevent bootstrapping problems by using direnv's builtin nix support use nix NIX_ENFORCE_PURITY=0 fi if [ -e .envrc-local ]; then # source an additional user-specific .envrc in ./.envrc-local source .envrc-local fi #+END_SRC - enable flakes #+HEADER: :NO-tangle ../.envrc #+BEGIN_SRC sh if [ -f flake.lock ] && [ -f flake.nix ]; then # reload when these files change watch_file flake.nix watch_file flake.lock # load the flake devShell if [ ! -d $(direnv_layout_dir) ]; then mkdir $(direnv_layout_dir) fi eval "$(nix --experimental-features 'nix-command flakes' print-dev-env --profile "$(direnv_layout_dir)/flake-profile")" else # fall back to using direnv's builtin nix support # to prevent bootstrapping problems. # use nix if type lorri &>/dev/null; then echo "direnv: using lorri from PATH ($(type -p lorri))" eval "$(lorri direnv)" else # fall back to using direnv's builtin nix support # to prevent bootstrapping problems. use nix NIX_ENFORCE_PURITY=0 fi # source an additional user-specific .envrc in ./.envrc-local if [ -e .envrc-local ]; then source .envrc-local fi fi #+END_SRC **** .envrc-local CHECK MODIFY - bespoke modify appropriately and generate if needed #+HEADER: :tangle ../.envrc-local_ #+BEGIN_SRC sh export NIX_PATH=<> #export NIX_PATH=<> # reload when these files change watch_file flake.nix # watch_file flake.lock ## load the flake devShell eval "$(nix print-dev-env)" # echo $NIX_BUILD_TOP export SpineVER=$(git describe --long --tags | sed 's/^[ a-z_-]\+\([0-9.]\+\)/\1/;s/\([^-]*-g\)/r\1/;s/-/./g') export SpineSRC=<> export SpineBIN=<> export SpineDOC=<> export SpineOUTversioned=<> export SpineOUTstatic=<> echo '-*- mode: org -*- ,* nixpkgs path? eval "$(nix print-dev-env)" ' > nix_note_.org echo " == `nix-instantiate --find-file nixpkgs`" >> nix_note_.org echo ' ,* nix build and show derivation ,#+BEGIN_SRC sh nix-shell --pure nix-build nix build -f default.nix nix shell -f default.nix nix-instantiate | nix-build nix build `nix-instantiate` nix develop nix-instantiate | nix show-derivation | jq nix-instantiate | nix show-derivation --recursive | jq nix search --json 2>/dev/null |jq ,#+END_SRC ,* version and build info ,#+BEGIN_SRC sh' >> nix_note_.org echo 'spine version (git) == $SpineVER' >> nix_note_.org echo "spine version (git) == $SpineVER" >> nix_note_.org echo 'nix-instantiate == `nix-instantiate`' >> nix_note_.org echo "nix-instantiate == `nix-instantiate`" >> nix_note_.org echo "#+END_SRC ,* initialised shell variables ,#+BEGIN_SRC sh SpineSRC=$SpineSRC SpineDOC=$SpineDOC SpineOUTversioned=$SpineOUTversioned SpineOUTstatic=$SpineOUTstatic ,#+END_SRC " >> nix_note_.org echo '* spine run instruction examples ,** parallelized tasks ,*** doc source ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --pod --source --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** html & epub output ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --html --epub --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** sqlite db for each document - populate each db ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-discrete --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** doc source; html, epub; sqlite outputs ,#+BEGIN_SRC sh $SpineBIN/spine --verbose --pod --html --epub --sqlite-discrete --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** harvest (authors topics) ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --harvest --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** html, harvest ,#+BEGIN_SRC sh $SpineBIN/spine --verbose --dark --html --html-link-harvest --harvest --output="$SpineOUTversioned" $SpineDOC/markup/pod/* $SpineBIN/spine --very-verbose --html --html-link-harvest --harvest --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** composite command: source pod, html, epub, harvest, sqlite ,#+BEGIN_SRC sh $SpineBIN/spine --verbose --dark --pod --epub --html --html-link-harvest --harvest --sqlite-discrete --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,** sequential tasks ,*** sqlite db (shared) - create db ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-db-create --output="$SpineOUTversioned" ,#+END_SRC ,*** sqlite db (shared) - populate db ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-update --output="$SpineOUTversioned" $SpineDOC/spine-markup-samples/markup/pod/* ,#+END_SRC ,*** sqlite db (shared) - drop db ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-db-drop --output="$SpineOUTversioned" ,#+END_SRC ,*** sqlite db (shared) - create & populate db (single step) ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-db-create --sqlite-update --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,*** composite command: source pod, html, epub, harvest, sqlite ,#+BEGIN_SRC sh $SpineBIN/spine --verbose --no-parallel --dark --pod --epub --html --html-link-harvest --harvest --sqlite-discrete --output="$SpineOUTversioned" $SpineDOC/markup/pod/* ,#+END_SRC ,** config [./pod/].dr/config_local_site ,#+BEGIN_SRC sh cat $SpineDOC/markup/pod/.dr/config_local_site $SpineBIN/spine --show-config $SpineDOC/markup/pod $SpineBIN/spine --show-config --output="$SpineOUTversioned" $SpineDOC/markup/pod ,#+END_SRC ,** cgi operations (output to $SpineOUTstatic /var/www) ,#+BEGIN_SRC sh $SpineBIN/spine --very-verbose --sqlite-db-create --output="$SpineOUTstatic" $SpineDOC/markup/pod/* $SpineBIN/spine -v --cgi-search-form-codegen --output=$SpineOUTstatic $SpineDOC/markup/pod/* $SpineBIN/spine -v --show-config --config=$SpineDOC/markup/pod/.dr $SpineBIN/spine --html $SpineDOC/markup/pod/* $SpineBIN/spine -v --cgi-search-form-codegen --config=$SpineDOC/markup/pod/.dr/config_local_site ,#+END_SRC ,*** generate html linked to search form ,#+BEGIN_SRC sh $SpineBIN/spine -v --html --html-link-search --html-link-harvest --harvest --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** create or re-create sql db (--sqlite-db-create or --sqlite-db-recreate) ,#+BEGIN_SRC sh $SpineBIN/spine -v --sqlite-db-create --sqlite-db-filename="<>" --output="$SpineOUTstatic" $SpineBIN/spine -v --sqlite-db-recreate --sqlite-db-filename="<>" --output="$SpineOUTstatic" ,#+END_SRC ,*** populate sqlite db ,#+BEGIN_SRC sh $SpineBIN/spine -v --sqlite-update --sqlite-db-filename="<>" --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** generate html (linked to search form), sql output, harvest COMPOSITE ,#+BEGIN_SRC sh $SpineBIN/spine -v --html --html-link-search --html-link-harvest --harvest --sqlite-update --sqlite-db-filename="<>" --cgi-sqlite-search-filename="<>" --output="$SpineOUTstatic" $SpineDOC/markup/pod/* ,#+END_SRC ,*** make search form ,#+BEGIN_SRC sh $SpineBIN/spine -v --cgi-search-form-codegen --config=$SpineDOC/markup/pod/.dr/config_local_site ,#+END_SRC ' >> nix_note_.org cat nix_note_.org echo "emacs nix_note_.org" echo "cat nix_note_.org" #+END_SRC ***** nixpkgs_path SETUP ****** nixpkgs select path SELECT #+NAME: nixpkgs_path #+BEGIN_SRC nix <> #+END_SRC ****** nixpkgs path options - #+NAME: nixpkgs_path_default #+BEGIN_SRC nix #+END_SRC - local path SET WARN #+NAME: nixpkgs_path_local #+BEGIN_SRC nix /nixpkgs-ra/nixpkgs #+END_SRC ****** project path options - local path SET WARN #+NAME: nix_path_channel #+BEGIN_SRC nix /nix/var/nix/profiles/per-user/root/channels/nixos #+END_SRC #+NAME: nix_path_directory #+BEGIN_SRC nix nixpkgs=<> #+END_SRC #+NAME: project_path_local_src #+BEGIN_SRC nix /grotto-ra/repo/git.repo/projects/project-spine/doc-reform #+END_SRC #+NAME: project_path_local_bin #+BEGIN_SRC nix <>/result/bin #+END_SRC #+NAME: project_path_cgi_bin #+BEGIN_SRC nix /var/www/cgi/cgi-bin #+END_SRC #+NAME: project_path_local_doc #+BEGIN_SRC nix /grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/spine-markup-samples #+END_SRC #+NAME: project_path_local_out #+BEGIN_SRC nix /srv/www/spine/$SpineVER #+END_SRC #+NAME: project_path_local_out_ #+BEGIN_SRC nix /tmp/spine/$SpineVER/www #+END_SRC #+NAME: project_path_local_out_static #+BEGIN_SRC nix /srv/www/spine/static #+END_SRC #+NAME: project_path_local_out_static_ #+BEGIN_SRC nix /var/www #+END_SRC *** flake :flake: - flake.nix /nixpkgs-ra/nixpkgs ./shell.nix - shell.nix ./pkgs/shell-pkgs.nix - pkgs/shell-pkgs.nix - default.nix ./nix/dub.selections.nix # - dub.selections.nix nix develop nix flake update #+NAME: nixpkgs_url #+BEGIN_SRC nix github:nixos/nixpkgs #+END_SRC #+BEGIN_SRC nix <> #+END_SRC #+HEADER: :tangle ../flake.nix #+BEGIN_SRC nix { description = "a sisu like document parser"; inputs = { flake-utils.url = "github:numtide/flake-utils"; nixpkgs.url = "<>"; }; outputs = { self, nixpkgs, flake-utils }: let pkgs = nixpkgs.legacyPackages.x86_64-linux; in { packages.x86_64-linux.spine = pkgs.stdenv.mkDerivation { name = "spine"; inherit self; src = self; shell = ./shell.nix; installPhase = '' install -m755 spine $out/bin/spine ''; }; defaultPackage.x86_64-linux = self.packages.x86_64-linux.spine; }; } #+END_SRC #+BEGIN_SRC nix { description = "a sisu like document parser"; inputs = { flake-utils.url = "github:numtide/flake-utils"; nixpkgs.url = "<>"; d2sqlite3 = { url = "github:dlang-community/d2sqlite3"; flake = false; }; tinyendian = { url = "github:kiith-sa/tinyendian"; flake = false; }; dyaml = { url = "github:dlang-community/D-YAML"; flake = false; }; imageformats = { url = "github:lgvz/imageformats"; flake = false; }; }; outputs = { self, d2sqlite3, tinyendian, dyaml, imageformats, nixpkgs, flake-utils }: let pkgs = nixpkgs.legacyPackages.x86_64-linux; in { packages.x86_64-linux.spine = pkgs.stdenv.mkDerivation { name = "spine"; inherit self; src = self; shell = ./shell.nix; installPhase = '' install -m755 spine $out/bin/spine ''; }; defaultPackage.x86_64-linux = self.packages.x86_64-linux.spine; }; } #+END_SRC #+HEADER: :NO-tangle ../flake.nix #+BEGIN_SRC nix { description = "a sisu like document parser"; inputs = { flake-utils.url = "github:numtide/flake-utils"; nixpkgs.url = "<>"; # pkgs /nixpkgs-ra/nixpkgs {} #spine.url = "/grotto-ra/repo/git.repo/projects/project-spine/doc-reform"; #spine.url = "./."; }; outputs = { self, nixpkgs, flake-utils }: #flake-utils.lib.eachSystem [ "x86_64-linux" "defaultPackage.x86_64-linux" "packages.x86_64-linux.defaultPackage.x86_64-linux" ] (system: flake-utils.lib.eachDefaultSystem (system: let #pkgs = ${system}; pkgs = nixpkgs.legacyPackages.${system}; # project = pkgs.spine.project' { # src = ./.; # compiler-nix-name = "ldc"; # }; in { #project.flake {} // { #devShell = project.shellFor { # (import ./shell.nix self { inherit pkgs; }); #}; devShell = import ./default.nix self { inherit pkgs; }; } ); } #+END_SRC *** shell.nix :shell: nix-shell shell.nix --pure shell.nix --pure -I .envrc nix-shell --pure -I nixpkgs=/nixpkgs-ra/nixpkgs nix-shell --pure -p "with import /nixpkgs-ra/nixpkgs {}; ldc" nix-shell --pure -p "with import /nixpkgs-ra/nixpkgs {}; [dub ldc]" shell.nix direnv: export +IN_NIX_SHELL nix-shell -p nixFlakes nix-shell -p nixFlakes --pure nix-shell -p nixFlakes --pure -p "with import /nixpkgs-ra/nixpkgs {}; [dub ldc]" shell.nix -I .envrc shell.nix nix develop nix-build nix-build -I nixpkgs= nix-build -I .envrc nix build nix build -f default.nix && noti nix build -f default.nix && ./result/bin/spine -v && noti -t "spine build" -m "see ./result/bin/spine" nix build -f spine.nix && ./result/bin/spine -v && noti -t "spine build" -m "see ./result/bin/spine" nix build -f project.nix nix build -f spine.nix nix-shell shell.nix --pure nix build -f default.nix ./result/bin/spine #+HEADER: :tangle ../shell.nix #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/usr/bin/env -S nix-shell --pure" #+BEGIN_SRC nix { pkgs ? import {} }: pkgs.mkShell { buildInputs = with pkgs; [( with pkgs; [ nixFlakes rund dub ldc sqlite nix-prefetch-git validatePkgConfig jq git ] )]; shellHook = '' if [[ -e ".envrc" ]]; then source .envrc fi ''; } #+END_SRC #+HEADER: :NO-tangle ../shell.nix #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/usr/bin/env -S nix-shell" #+BEGIN_SRC nix { pkgs ? import {} }: pkgs.mkShell { buildInputs = with pkgs; [ (import ./nix/pkglst/shell-pkgs.nix { inherit pkgs; }) ]; buildPhase = "nix build -f default.nix"; shellHook = '' echo "built spine @ ./result/bin/spine" if [[ -e ".envrc" ]]; then source .envrc fi nix-instantiate | nix show-derivation | jq ''; } #+END_SRC *** default.nix :default: - default.nix -I nixpkgs= e.g. default.nix -I nixpkgs=/nixpkgs-ra/nixpkgs - nix-build -I nixpkgs= e.g. nix-build -I nixpkgs=/nixpkgs-ra/nixpkgs nix-build -I nixpkgs=/nixpkgs-ra/nixpkgs nix-build -I .envrc-local nix-build -I .envrc default.nix -I .envrc default.nix nix build nix build -f default.nix nix build -f project.nix nix build -f spine.nix #+HEADER: :tangle ../default.nix #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/usr/bin/env -S nix-build" #+BEGIN_SRC nix { pkgs ? import {}, stdenv ? pkgs.stdenv, lib ? pkgs.lib, ldc ? null, dcompiler ? pkgs.ldc, dub ? pkgs.dub }: assert dcompiler != null; with ( assert dcompiler != null; with lib; let # Filter function to remove the .dub package folder from src filterDub = name: type: let baseName = baseNameOf (toString name); in ! ( type == "directory" && baseName == ".dub" ); targetOf = package: "${package.targetPath or "."}/${package.targetName or package.name}"; # Remove reference to build tools and library sources disallowedReferences = deps: [ dcompiler dub ]; removeExpr = refs: ''remove-references-to ${lib.concatMapStrings (ref: " -t ${ref}") refs}''; in { mkDubDerivation = lib.makeOverridable ({ src, nativeBuildInputs ? [], dubJSON ? src + "/dub.json", passthru ? {}, package ? lib.importJSON dubJSON, ... } @ attrs: stdenv.mkDerivation (attrs // { pname = package.name; nativeBuildInputs = [ dcompiler dub pkgs.removeReferencesTo ] ++ nativeBuildInputs; disallowedReferences = disallowedReferences deps; passthru = passthru // { inherit dub dcompiler pkgs; }; src = lib.cleanSourceWith { filter = filterDub; src = lib.cleanSource src; }; preFixup = '' find $out/bin -type f -exec ${removeExpr (disallowedReferences deps)} '{}' + || true ''; buildPhase = '' runHook preBuild export HOME=$PWD for dc_ in dmd ldmd2 gdmd; do echo "- check for D compiler $dc_" dc=$(type -P $dc_ || echo "") if [ ! "$dc" == "" ]; then break fi done if [ "$dc" == "" ]; then exit "Error: could not find D compiler" fi echo "$dc_ used as D compiler to build $pname" dub build --compiler=$dc --build=release --combined --skip-registry=all runHook postBuild ''; checkPhase = '' runHook preCheck export HOME=$PWD dub test --combined --skip-registry=all runHook postCheck ''; installPhase = '' runHook preInstall mkdir -p $out/bin cp -r "${targetOf package}" $out/bin runHook postInstall ''; meta = lib.optionalAttrs (package ? description) { description = package.description; } // attrs.meta or {}; } // lib.optionalAttrs (!(attrs ? version)) { # Use name from dub.json, unless pname and version are specified name = package.name; })); } ); mkDubDerivation rec { name = "spine-${version}"; version = "<>"; src = ./.; buildInputs = [ pkgs.sqlite ( with pkgs; [ nixFlakes rund dub ldc sqlite ] ) ]; # buildPhase = [ ]; installPhase = '' install -m755 -D spine $out/bin/spine echo "built $out/bin/spine" ''; <> } #+END_SRC - unused - installPhase #+BEGIN_SRC txt installPhase = '' mkdir -p $out/bin cp spine $out/bin chmod +x $out/bin/spine ''; #+END_SRC *** project meta #+NAME: nix_project_meta #+BEGIN_SRC nix meta = with pkgs.lib; { homepage = https://sisudoc.org; description = "a sisu like document parser"; license = licenses.agpl3Plus; platforms = platforms.linux; maintainers = [ RalphAmissah ]; }; #+END_SRC *** dub.selections.json #+HEADER: :NO-tangle ../dub.selections.json #+BEGIN_SRC nix { "fileVersion": 1, "versions": { "d2sqlite3": "0.19.1", "dyaml": "0.8.3", "imageformats": "7.0.2", "tinyendian": "0.2.0" } } #+END_SRC *** dub.selections.nix #+HEADER: :NO-tangle ../nix/dub.selections.nix #+BEGIN_SRC nix # This file was generated by https://github.com/lionello/dub2nix v0.2.3 [ { fetch = { type = "git"; url = "https://github.com/dlang-community/d2sqlite3.git"; rev = "v0.19.1"; sha256 = "0rnsgla6xyr8r34knf7v6dwhacra96q1b5rhxcz9246inwhvrk5k"; fetchSubmodules = false; date = "2020-07-21T12:32:51+02:00"; deepClone = false; leaveDotGit = false; path = "/nix/store/hsi8xvl15w6fwlqvs042m1z5i88yc72i-d2sqlite3"; }; } { fetch = { type = "git"; url = "https://github.com/kiith-sa/tinyendian.git"; rev = "v0.2.0"; sha256 = "086gf5aga52wr5rj2paq54daj8lafn980x77b706vvvqaz2mlis8"; fetchSubmodules = false; date = "2018-06-10T11:04:28+02:00"; deepClone = false; leaveDotGit = false; path = "/nix/store/9c7fsmi5am84j6dq2mp3va306x3ay291-tinyendian"; }; } { fetch = { type = "git"; url = "https://github.com/kiith-sa/D-YAML.git"; rev = "v0.8.3"; sha256 = "13wy304xjbwkpgg7ilql1lkxkm83s87jm59ffnrg26slp7cx149q"; fetchSubmodules = false; date = "2020-09-19T23:46:57+02:00"; deepClone = false; leaveDotGit = false; path = "/nix/store/3i8i56lkmw2xq3lxr5h66v909waq2mqg-D-YAML"; }; } { fetch = { type = "git"; url = "https://github.com/lgvz/imageformats.git"; rev = "v7.0.2"; sha256 = "1mfbsmi4fs1xny4zqs6jyr04d5f4h03r9f6jadvkdqj5kd1k0ws7"; fetchSubmodules = false; date = "2019-10-10T07:54:45+03:00"; deepClone = false; leaveDotGit = false; path = "/nix/store/wn554pn21nzmpvw2hs7hvv9v9y0sgarg-imageformats"; }; } ] #+END_SRC *** dub2nix & shared pkgs SHARED **** dub2nix with pkgs shared #+NAME: nix_with_pkgs #+BEGIN_SRC nix with pkgs; [ <> ] #+END_SRC **** with pkgs list #+NAME: nix_shell_with_pkgs_list #+BEGIN_SRC nix nixFlakes rund dub ldc sqlite nix-prefetch-git validatePkgConfig jq git #+END_SRC ** .gitignore :gitignore: #+HEADER: :tangle "../.gitignore" #+BEGIN_SRC sh # git ls-files --others --exclude-from=.git/info/exclude ,* !.gitignore !README.md !COPYRIGHT !CHANGELOG !makefile !version.txt !configuration.txt !*.json !*.sdl !meson.build !tangle !*.org !*.d !*.rb !*.txt !conf.sdl !doc !doc/** !man !man/** !org !sundry !sundry/** !*.nix !nix !nix/** !.envrc !src !src/** !data !data/* !data/src/** !data/src_yaml/** !data/pod/** !data/pod_yaml/** !*.sst !*.ssm !**/*.sst !**/*.ssm !subprojects !subprojects/*.wrap #!subprojects/*.meson.build #!subprojects/** !pod.manifest !sisu_document_make !config_local_site !sisudoc !views !docs/*.html !notes notes/** !notes/*.org !notes/sisu-description.sst !notes/reminders !notes/reminders/** .dub/** ,**/.dub/** ,**/cgi-bin/** tmp/** ,*_.org ,*_.d ,*_.txt ,*_ ,*.swp ,*~ ,*~ \#* ,*.\#* !.envrc-local_ #!*/ #\#* #*.\#* #.reggae/** #+END_SRC * sundry misc ** spine search cgi (in ./sundry) *** shell.nix #+HEADER: :tangle "../sundry/spine_search_cgi/shell.nix" #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/usr/bin/env -S nix-shell --pure" #+BEGIN_SRC nix { pkgs ? import {} }: pkgs.mkShell { buildInputs = with pkgs; [( with pkgs; [ nixFlakes rund dub ldc sqlite nix-prefetch-git validatePkgConfig jq git ] )]; shellHook = '' if [[ -e ".envrc" ]]; then source .envrc fi ''; } #+END_SRC *** default.nix #+HEADER: :tangle "../sundry/spine_search_cgi/default.nix" #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/usr/bin/env -S nix-build" #+BEGIN_SRC nix { pkgs ? import {}, stdenv ? pkgs.stdenv, lib ? pkgs.lib, ldc ? null, dcompiler ? pkgs.ldc, dub ? pkgs.dub }: assert dcompiler != null; with ( assert dcompiler != null; with lib; let # Filter function to remove the .dub package folder from src filterDub = name: type: let baseName = baseNameOf (toString name); in ! ( type == "directory" && baseName == ".dub" ); targetOf = package: "${package.targetPath or "."}/${package.targetName or package.name}"; # Remove reference to build tools and library sources disallowedReferences = deps: [ dcompiler dub ]; removeExpr = refs: ''remove-references-to ${lib.concatMapStrings (ref: " -t ${ref}") refs}''; in { mkDubDerivation = lib.makeOverridable ({ src, nativeBuildInputs ? [], dubJSON ? src + "/dub.json", passthru ? {}, package ? lib.importJSON dubJSON, ... } @ attrs: stdenv.mkDerivation (attrs // { pname = package.name; nativeBuildInputs = [ dcompiler dub pkgs.removeReferencesTo ] ++ nativeBuildInputs; disallowedReferences = disallowedReferences deps; passthru = passthru // { inherit dub dcompiler pkgs; }; src = lib.cleanSourceWith { filter = filterDub; src = lib.cleanSource src; }; preFixup = '' find $out/share/cgi-bin -type f -exec ${removeExpr (disallowedReferences deps)} '{}' + || true ''; buildPhase = '' runHook preBuild export HOME=$PWD for dc_ in dmd ldmd2 gdmd; do echo "- check for D compiler $dc_" dc=$(type -P $dc_ || echo "") if [ ! "$dc" == "" ]; then break fi done if [ "$dc" == "" ]; then exit "Error: could not find D compiler" fi echo "$dc_ used as D compiler to build $pname" dub build --compiler=$dc --build=release --combined --skip-registry=all runHook postBuild ''; checkPhase = '' runHook preCheck export HOME=$PWD dub test --combined --skip-registry=all runHook postCheck ''; installPhase = '' runHook preInstall mkdir -p $out/share/cgi-bin cp -r "${targetOf package}" $out/share/cgi-bin install -m755 -D $out/share/cgi-bin/spine_search spine_search runHook postInstall ''; postInstall = '' echo "HERE ${targetOf package} $out/share/cgi-bin" echo `ls -la $out/share/cgi-bin/spine_search` ''; meta = lib.optionalAttrs (package ? description) { description = package.description; } // attrs.meta or {}; } // lib.optionalAttrs (!(attrs ? version)) { # Use name from dub.json, unless pname and version are specified name = package.name; })); } ); mkDubDerivation rec { name = "spine-search-${version}"; version = "0.11.3"; src = ./.; buildInputs = [ pkgs.sqlite ( with pkgs; [ nixFlakes rund dub ldc sqlite ] ) ]; # # buildPhase = [ ]; # installPhase = '' # install -m755 -D spine_search $out/bin/spine-search # echo "built $out/bin/spine-search" # ''; meta = with pkgs.lib; { homepage = https://sisudoc.org; description = "a sisu like document parser"; license = licenses.agpl3Plus; platforms = platforms.linux; maintainers = [ RalphAmissah ]; }; } #+END_SRC *** .envrc #+HEADER: :tangle "../sundry/spine_search_cgi/.envrc" #+BEGIN_SRC sh NIX_ENFORCE_PURITY=0 if [ -e .envrc-local ]; then # source an additional user-specific .envrc in ./.envrc-local source .envrc-local fi #+END_SRC *** .envrc-local #+HEADER: :tangle "../sundry/spine_search_cgi/.envrc-local_" #+BEGIN_SRC sh export NIX_PATH=/nix/var/nix/profiles/per-user/root/channels/nixos #export NIX_PATH=nixpkgs=/nixpkgs-ra/nixpkgs # reload when these files change watch_file flake.nix # watch_file flake.lock ## load the flake devShell eval "$(nix print-dev-env)" # echo $NIX_BUILD_TOP export SpineVER=$(git describe --long --tags | sed 's/^[ a-z_-]\+\([0-9.]\+\)/\1/;s/\([^-]*-g\)/r\1/;s/-/./g') export SpineSRC=<> export SpineBIN=<> export SpineDOC=<> #export SpineOUTversioned=<> export SpineOUTstatic=<> export SpineCgiBIN=<> echo '-*- mode: org -*- ,* nixpkgs path? eval "$(nix print-dev-env)" ' > nix_note_.org echo " == `nix-instantiate --find-file nixpkgs`" >> nix_note_.org echo ' ,* nix build and show derivation ,#+BEGIN_SRC sh nix-shell --pure nix-build nix build -f default.nix nix shell -f default.nix nix-instantiate | nix-build nix build `nix-instantiate` nix develop nix-instantiate | nix show-derivation | jq nix-instantiate | nix show-derivation --recursive | jq nix search --json 2>/dev/null |jq ,#+END_SRC ,* version and build info ,#+BEGIN_SRC sh' >> nix_note_.org echo 'spine version (git) == $SpineVER' >> nix_note_.org echo "spine version (git) == $SpineVER" >> nix_note_.org echo 'nix-instantiate == `nix-instantiate`' >> nix_note_.org echo "nix-instantiate == `nix-instantiate`" >> nix_note_.org echo "#+END_SRC ,* initialised shell variables ,#+BEGIN_SRC sh SpineSRC=$SpineSRC SpineBIN=$SpineBIN SpineDOC=$SpineDOC #SpineOUTversioned=$SpineOUTversioned SpineOUTstatic=$SpineOUTstatic SpineCgiBIN=$SpineCgiBIN ,#+END_SRC " >> nix_note_.org echo '* cgi form spine_search nix build and copy to spine-search ,#+BEGIN_SRC sh nix-build sudo cp -vi result/share/cgi-bin/spine_search $SpineCgiBIN/. ,#+END_SRC ,* spine run instruction examples ,** cgi operations (output to $SpineOUTstatic) ,*** configuration ,#+BEGIN_SRC sh $SpineBIN/spine -v --show-config --config=$SpineDOC/markup/pod/.dr ,#+END_SRC ,*** generate html (linked to search form & harvest) ,#+BEGIN_SRC sh $SpineBIN/spine -v --html --html-link-search --html-link-harvest --sqlite-db-filename="<>" --cgi-sqlite-search-filename="<>"--output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** generate sql output ,#+BEGIN_SRC sh SpineBIN/spine -v --sqlite-update --sqlite-db-filename="<>" --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** generate harvest ,#+BEGIN_SRC sh $SpineBIN/spine -v --harvest --output=$SpineOUTstatic $SpineDOC/markup/pod/* $SpineBIN/spine -v --html --html-link-harvest --harvest --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** create or re-create sql db (--sqlite-db-create or --sqlite-db-recreate) ,#+BEGIN_SRC sh $SpineBIN/spine -v --sqlite-db-recreate --sqlite-db-filename="<>" --output="$SpineOUTstatic" ,#+END_SRC ,*** generate html (linked to search form), sql output, harvest ,#+BEGIN_SRC sh $SpineBIN/spine -v --html --html-link-search --html-link-harvest --harvest --sqlite-db-filename="<>" --cgi-sqlite-search-filename="<>" --sqlite-update --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ,*** make search form ,#+BEGIN_SRC sh $SpineBIN/spine -v --cgi-search-form-codegen --config=$SpineDOC/markup/pod/.dr/config_local_site #$SpineBIN/spine -v --cgi-search-form-codegen --output=$SpineOUTstatic $SpineDOC/markup/pod/* ,#+END_SRC ' >> nix_note_.org cat nix_note_.org echo "emacs nix_note_.org" echo "cat nix_note_.org" #+END_SRC *** dub.json #+HEADER: :tangle "../sundry/spine_search_cgi/dub.json" #+BEGIN_SRC json { "authors": [ "Ralph Amissah" ], "copyright": "Copyright © 2015 - 2021 Ralph Amissah", "name": "spine_search", "version": "0.11.3", "description": "cgi search for spine, a sisu like document parser", "homepage": "https://sisudoc.org", "license": "AGPL-3.0+", "targetPath": "./cgi-bin", "sourcePaths": [ "./src" ], "stringImportPaths": [ "./views" ], "buildRequirements": [ "allowWarnings" ], "targetType": "executable", "platforms": [ "posix" ], "buildTypes": { "dmd": { "dflags": [ "-J=views", "-I=src" ], "buildOptions": [ "verbose", "inline" ], "buildRequirements": [ "allowWarnings" ] }, "ldc": { "dflags": [ "-O2", "-J=views", "-I=src" ], "buildOptions": [ "verbose", "optimize", "inline" ], "buildRequirements": [ "allowWarnings" ] } }, "dependencies": { "spine_search:arsd.cgi": "*", "spine_search:d2sqlite3": "*" }, "subPackages": [ { "name": "arsd.cgi", "description": "cgi", "homepage": "https://github.com/dlang-community/d2sqlite3", "authors": [ "Aadam Ruppee" ], "copyright": "Copyright 2011-18 Aadam Ruppee", "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends_cgi/arsd" ], "configurations": [ { "name": "cgi", "cgi": "with-lib", "targetType": "library", "systemDependencies": "Arsd version >= 0.8.7" } ] }, { "name": "d2sqlite3", "description": "A thin wrapper around SQLite 3", "homepage": "https://github.com/dlang-community/d2sqlite3", "authors": [ "Nicolas Sicard", "Other contributors: see Github repo" ], "copyright": "Copyright 2011-18 Nicolas Sicard", "license": "BSL-1.0", "sourcePaths": [ "./src/ext_depends_cgi/d2sqlite3/source" ], "configurations": [ { "name": "d2sqlite3", "d2sqlite3": "with-lib", "targetType": "library", "systemDependencies": "SQLite version >= 3.8.7", "libs": [ "sqlite3" ], "excludedSourceFiles": [ "source/tests.d" ] } ] } ], "configurations": [ { "name": "default", "targetName": "spine_search" }, { "name": "ldc", "targetName": "spine-search-ldc.cgi" }, { "name": "dmd", "targetName": "spine-search-dmd.cgi" } ], "subConfigurations": { "d2sqlite3": "with-lib" } } #+END_SRC *** spine_search.d **** settings forf spine search SET #+NAME: url_doc_root #+BEGIN_SRC text /srv/www #+END_SRC #+NAME: doc_root_path #+BEGIN_SRC text _cfg.doc_root_www #+END_SRC #+BEGIN_SRC text /var/www/html #+END_SRC #+NAME: cgi_root_path #+BEGIN_SRC text _cfg.doc_root_cgi #+END_SRC #+BEGIN_SRC text /usr/lib/cgi-bin/ #+END_SRC #+NAME: db_root_path #+BEGIN_SRC text _cfg.doc_root_db #+END_SRC #+BEGIN_SRC text <>/spine/static/sqlite/ #+END_SRC **** spine_search configuration.txt #+HEADER: :tangle "../sundry/spine_search_cgi/views/configuration_suggested.txt" #+BEGIN_SRC d /+ obt - org-mode generated file +/ struct Cfg { string doc_root_www = "/srv/www/spine/static"; string doc_root_cgi = "/var/www/cgi/cgi-bin/"; string doc_root_db = "/srv/www/spine/static/sqlite/"; string filename_cgi = "spine_search"; string filename_db = "spine.search.db"; } enum _cfg = Cfg(); #+END_SRC **** spine_search.d #+HEADER: :tangle "../sundry/spine_search_cgi/src/spine_search.d_" #+BEGIN_SRC d /+ dub.sdl name "spine search" description "spine cgi search" /+ obt - org-mode generated file +/ +/ import std.format; import std.range; import std.regex; import arsd.cgi; import d2sqlite3; import std.process : environment; void cgi_function_intro(Cgi cgi) { mixin(import("configuration.txt")); string header; string table; string form; struct Config { string http_request_type; string http_host; // string server_name; string web_doc_root_path; string doc_collection_sub_root; string cgi_root; string cgi_script; string data_path_html; string db_path; string query_string; string http_url; string request_method; } auto conf = Config(); // SET conf.http_request_type = environment.get("REQUEST_SCHEME", "http"); conf.http_host = environment.get("HTTP_HOST", "localhost"); // conf.server_name = environment.get("SERVER_NAME", "localhost"); conf.web_doc_root_path = environment.get("DOCUMENT_ROOT", <>); conf.doc_collection_sub_root = <>; // (output_path - web_doc_root_path) // conf.doc_collection_sub_root = "<>/spine/static"; // (output_path - web_doc_root_path) // problem FIX conf.cgi_root = environment.get("CONTEXT_DOCUMENT_ROOT", <>); // conf.cgi_script = environment.get("SCRIPT_NAME", "/cgi-bin/spine-search"); conf.query_string = environment.get("QUERY_STRING", ""); conf.http_url = environment.get("HTTP_REFERER", conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ conf.query_string); conf.db_path = <>; // (output_path + /sqlite) conf.request_method = environment.get("REQUEST_METHOD", "POST"); struct CGI_val { string db_selected = ""; string sql_match_limit = ""; // radio: ( 1000 | 2500 ) string sql_match_offset = ""; string search_text = ""; string results_type = ""; // index bool checked_echo = false; bool checked_stats = false; bool checked_url = false; bool checked_searched = false; bool checked_tip = false; bool checked_sql = false; } auto cv = CGI_val(); cv.db_selected = _cfg.filename_db; // cv.db_selected = "spine.search.db"; auto text_fields() { string canned_query_str = environment.get("QUERY_STRING", ""); if ("query_string" in cgi.post) { canned_query_str = environment.get("QUERY_STRING", ""); } string[string] canned_query; if (conf.request_method == "POST") { } else if (conf.request_method == "GET") { foreach (pair_str; canned_query_str.split("&")) { // cgi.write(pair_str ~ "
"); string[] pair = pair_str.split("="); canned_query[pair[0]] = pair[1]; } // foreach (field, content; canned_query) { // cgi.write(field ~ ": " ~ content ~ "
"); // } } static struct Rgx { // static canned_query = ctRegex!(`\A(?P.+)\Z`, "m"); static search_text_area = ctRegex!(`\A(?P.+)\Z`, "m"); // static fulltext = ctRegex!(`\A(?P.+)\Z`, "m"); static line = ctRegex!(`^(?P.+?)(?: ~|$)`, "m"); static text = ctRegex!(`(?:^|\s~\s*)text:\s+(?P.+?)(?: ~|$)`, "m"); static author = ctRegex!(`(?:^|\s~\s*)author:\s+(?P.+)$`, "m"); static title = ctRegex!(`(?:^|\s~\s*)title:\s+(?P.+)$`, "m"); static uid = ctRegex!(`(?:^|\s~\s*)uid:\s+(?P.+)$`, "m"); static fn = ctRegex!(`(?:^|\s~\s*)fn:\s+(?P.+)$`, "m"); static keywords = ctRegex!(`(?:^|\s~\s*)keywords:\s+(?P.+)$`, "m"); static topic_register = ctRegex!(`(?:^|\s~\s*)topic_register:\s+(?P.+)$`, "m"); static subject = ctRegex!(`(?:^|\s~\s*)subject:\s+(?P.+)$`, "m"); static description = ctRegex!(`(?:^|\s~\s*)description:\s+(?P.+)$`, "m"); static publisher = ctRegex!(`(?:^|\s~\s*)publisher:\s+(?P.+)$`, "m"); static editor = ctRegex!(`(?:^|\s~\s*)editor:\s+(?P.+)$`, "m"); static contributor = ctRegex!(`(?:^|\s~\s*)contributor:\s+(?P.+)$`, "m"); static date = ctRegex!(`(?:^|\s~\s*)date:\s+(?P.+)$`, "m"); static results_type = ctRegex!(`(?:^|\s~\s*)type:\s+(?P.+)$`, "m"); static format = ctRegex!(`(?:^|\s~\s*)format:\s+(?P.+)$`, "m"); static source = ctRegex!(`(?:^|\s~\s*)source:\s+(?P.+)$`, "m"); static language = ctRegex!(`(?:^|\s~\s*)language:\s+(?P.+)$`, "m"); static relation = ctRegex!(`(?:^|\s~\s*)relation:\s+(?P.+)$`, "m"); static coverage = ctRegex!(`(?:^|\s~\s*)coverage:\s+(?P.+)$`, "m"); static rights = ctRegex!(`(?:^|\s~\s*)rights:\s+(?P.+)$`, "m"); static comment = ctRegex!(`(?:^|\s~\s*)comment:\s+(?P.+)$`, "m"); // static abstract_ = ctRegex!(`(?:^|\s~\s*)abstract:\s+(?P.+)$`, "m"); static src_filename_base = ctRegex!(`^src_filename_base:\s+(?P.+)$`, "m"); } struct searchFields { string canned_query = ""; // GET canned_query == cq string search_text_area = ""; // POST search_text_area == tsa string text = ""; // text == txt string author = ""; // author == au string title = ""; // title == ti string uid = ""; // uid == uid string fn = ""; // fn == fn string keywords = ""; // keywords == kw string topic_register = ""; // topic_register == tr string subject = ""; // subject == su string description = ""; // description == de string publisher = ""; // publisher == pb string editor = ""; // editor == ed string contributor = ""; // contributor == ct string date = ""; // date == dt string format = ""; // format == fmt string source = ""; // source == src sfn string language = ""; // language == lng string relation = ""; // relation == rl string coverage = ""; // coverage == cv string rights = ""; // rights == rgt string comment = ""; // comment == cmt // string abstract = ""; string src_filename_base = ""; // src_filename_base == bfn string results_type = ""; // results_type == rt radio string sql_match_limit = ""; // sql_match_limit == sml radio string sql_match_offset = ""; // sql_match_offset == smo string stats = ""; // stats == sts checked string echo = ""; // echo == ec checked string url = ""; // url == url checked string searched = ""; // searched == se checked string sql = ""; // sql == sql checked } auto rgx = Rgx(); auto got = searchFields(); if (environment.get("REQUEST_METHOD", "POST") == "POST") { if ("sf" in cgi.post) { got.search_text_area = cgi.post["sf"]; if (auto m = got.search_text_area.matchFirst(rgx.text)) { got.text = m["matched"]; got.canned_query ~= "sf=" ~ m["matched"]; } else if (auto m = got.search_text_area.matchFirst(rgx.line)) { if ( !(m["matched"].matchFirst(rgx.author)) && !(m["matched"].matchFirst(rgx.title)) ) { got.text = m["matched"]; got.canned_query ~= "sf=" ~ m["matched"]; } } if (auto m = got.search_text_area.matchFirst(rgx.author)) { got.author = m["matched"]; got.canned_query ~= "&au=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.title)) { got.title = m["matched"]; got.canned_query ~= "&ti=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.uid)) { got.uid = m["matched"]; got.canned_query ~= "&uid=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.fn)) { got.fn = m["matched"]; got.canned_query ~= "&fn=" ~ m["matched"]; } else if ("fn" in cgi.post) { got.search_text_area ~= "\nfn: " ~ cgi.post["fn"] ~ "\n"; } if (auto m = got.search_text_area.matchFirst(rgx.keywords)) { got.keywords = m["matched"]; got.canned_query ~= "&kw=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.topic_register)) { got.topic_register = m["matched"]; got.canned_query ~= "&tr=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.subject)) { got.subject = m["matched"]; got.canned_query ~= "&su=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.description)) { got.description = m["matched"]; got.canned_query ~= "&de=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.publisher)) { got.publisher = m["matched"]; got.canned_query ~= "&pb=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.editor)) { got.editor = m["matched"]; got.canned_query ~= "&ed=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.contributor)) { got.contributor = m["matched"]; got.canned_query ~= "&ct=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.date)) { got.date = m["matched"]; got.canned_query ~= "&dt=" ~ m["matched"]; } // if (auto m = got.search_text_area.matchFirst(rgx.results_type)) { // got.results_type = m["matched"]; // got.canned_query ~= "&rt=" ~ m["matched"]; // } if (auto m = got.search_text_area.matchFirst(rgx.format)) { got.format = m["matched"]; got.canned_query ~= "&fmt=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.source)) { got.source = m["matched"]; got.canned_query ~= "&src=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.language)) { got.language = m["matched"]; got.canned_query ~= "&lng=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.relation)) { got.relation = m["matched"]; got.canned_query ~= "&rl=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.coverage)) { got.coverage = m["matched"]; got.canned_query ~= "&cv=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.rights)) { got.rights = m["matched"]; got.canned_query ~= "&rgt=" ~ m["matched"]; } if (auto m = got.search_text_area.matchFirst(rgx.comment)) { got.comment = m["matched"]; got.canned_query ~= "&cmt=" ~ m["matched"]; } // if (auto m = search_text_area.matchFirst(rgx.abstract)) { // got.abstract = m["matched"]; // } if (auto m = got.search_text_area.matchFirst(rgx.src_filename_base)) { got.src_filename_base = m["matched"]; got.canned_query ~= "&bfn=" ~ m["matched"]; } } if ("fn" in cgi.post) { got.fn = cgi.post["fn"]; got.canned_query ~= "&fn=" ~ cgi.post["fn"]; } if ("rt" in cgi.post) { got.results_type = cgi.post["rt"]; got.canned_query ~= "&rt=" ~ cgi.post["rt"]; } if ("sts" in cgi.post) { got.stats = cgi.post["sts"]; got.canned_query ~= "&sts=" ~ cgi.post["sts"]; } if ("ec" in cgi.post) { got.echo = cgi.post["ec"]; got.canned_query ~= "&ec=" ~ cgi.post["ec"]; } if ("url" in cgi.post) { got.url = cgi.post["url"]; got.canned_query ~= "&url=" ~ cgi.post["url"]; } if ("se" in cgi.post) { got.searched = cgi.post["se"]; got.canned_query ~= "&se=" ~ cgi.post["se"]; } if ("sql" in cgi.post) { got.sql = cgi.post["sql"]; got.canned_query ~= "&sql=" ~ cgi.post["sql"]; } if ("sml" in cgi.post) { got.sql_match_limit = cgi.post["sml"]; got.canned_query ~= "&sml=" ~ cgi.post["sml"]; } if ("smo" in cgi.post) { got.sql_match_offset = "0"; // cgi.post["smo"]; got.canned_query ~= "&smo=0"; // ~ cgi.post["smo"]; } got.canned_query = got.canned_query.strip.split(" ").join("%20"); conf.query_string = got.canned_query; // cgi.write("f.canned_query: " ~ got.canned_query ~ "
"); } else if (environment.get("REQUEST_METHOD", "POST") == "GET") { got.canned_query = environment.get("QUERY_STRING", ""); // cgi.write("f.canned_query: " ~ got.canned_query ~ "
"); got.search_text_area = ""; if ("sf" in canned_query && !(canned_query["sf"]).empty) { got.text = canned_query["sf"].split("%20").join(" "); got.search_text_area ~= "text: " ~ got.text ~ "\n"; } if ("au" in canned_query && !(canned_query["au"]).empty) { got.author = canned_query["au"].split("%20").join(" "); got.search_text_area ~= "author: " ~ got.author ~ "\n"; } if ("ti" in canned_query && !(canned_query["ti"]).empty) { got.title = canned_query["ti"].split("%20").join(" "); got.search_text_area ~= "title: " ~ got.title ~ "\n"; } if ("uid" in canned_query && !(canned_query["uid"]).empty) { got.uid = canned_query["uid"].split("%20").join(" "); got.search_text_area ~= "uid: " ~ got.uid ~ "\n"; } if ("fn" in canned_query && !(canned_query["fn"]).empty) { got.fn = canned_query["fn"].split("%20").join(" "); got.search_text_area ~= "fn: " ~ got.fn ~ "\n"; } if ("kw" in canned_query && !(canned_query["kw"]).empty) { got.keywords = canned_query["kw"].split("%20").join(" "); got.search_text_area ~= "keywords: " ~ got.keywords ~ "\n"; } if ("tr" in canned_query && !(canned_query["tr"]).empty) { got.topic_register = canned_query["tr"].split("%20").join(" "); got.search_text_area ~= "topic_register: " ~ got.topic_register ~ "\n"; } if ("su" in canned_query && !(canned_query["su"]).empty) { got.subject = canned_query["su"].split("%20").join(" "); got.search_text_area ~= "subject: " ~ got.subject ~ "\n"; } if ("de" in canned_query && !(canned_query["de"]).empty) { got.description = canned_query["de"].split("%20").join(" "); got.search_text_area ~= "description: " ~ got.description ~ "\n"; } if ("pb" in canned_query && !(canned_query["pb"]).empty) { got.publisher = canned_query["pb"].split("%20").join(" "); got.search_text_area ~= "publisher: " ~ got.publisher ~ "\n"; } if ("ed" in canned_query && !(canned_query["ed"]).empty) { got.editor = canned_query["ed"].split("%20").join(" "); got.search_text_area ~= "editor: " ~ got.editor ~ "\n"; } if ("ct" in canned_query && !(canned_query["ct"]).empty) { got.contributor = canned_query["ct"].split("%20").join(" "); got.search_text_area ~= "contributor: " ~ got.contributor ~ "\n"; } if ("dt" in canned_query && !(canned_query["dt"]).empty) { got.date = canned_query["dt"].split("%20").join(" "); got.search_text_area ~= "date: " ~ got.date ~ "\n"; } if ("rt" in canned_query && !(canned_query["rt"]).empty) { got.results_type = canned_query["rt"].split("%20").join(" "); // got.search_text_area ~= "results_type: " ~ got.results_type ~ "\n"; } if ("fmt" in canned_query && !(canned_query["fmt"]).empty) { got.format = canned_query["fmt"].split("%20").join(" "); got.search_text_area ~= "format: " ~ got.format ~ "\n"; } if ("src" in canned_query && !(canned_query["src"]).empty) { got.source = canned_query["src"].split("%20").join(" "); got.search_text_area ~= "source: " ~ got.source ~ "\n"; } if ("lng" in canned_query && !(canned_query["lng"]).empty) { got.language = canned_query["lng"].split("%20").join(" "); got.search_text_area ~= "language: " ~ got.language ~ "\n"; } if ("rl" in canned_query && !(canned_query["rl"]).empty) { got.relation = canned_query["rl"].split("%20").join(" "); got.search_text_area ~= "relation: " ~ got.relation ~ "\n"; } if ("cv" in canned_query && !(canned_query["cv"]).empty) { got.coverage = canned_query["cv"].split("%20").join(" "); got.search_text_area ~= "coverage: " ~ got.coverage ~ "\n"; } if ("rgt" in canned_query && !(canned_query["rgt"]).empty) { got.rights = canned_query["rgt"].split("%20").join(" "); got.search_text_area ~= "rights: " ~ got.rights ~ "\n"; } if ("cmt" in canned_query && !(canned_query["cmt"]).empty) { got.comment = canned_query["cmt"].split("%20").join(" "); got.search_text_area ~= "comment: " ~ got.comment ~ "\n"; } // if ("abstract" in canned_query && !(canned_query["abstract"]).empty) { // got.abstract = canned_query["abstract"]; // } if ("bfn" in canned_query && !(canned_query["bfn"]).empty) { // search_field got.src_filename_base = canned_query["bfn"].split("%20").join(" "); got.search_text_area ~= "src_filename_base: " ~ got.src_filename_base ~ "\n"; } if ("sml" in canned_query && !(canned_query["sml"]).empty) { got.sql_match_limit = canned_query["sml"].split("%20").join(" "); // got.search_text_area ~= "sql_match_limit: " ~ got.sql_match_limit ~ "\n"; } // cgi.write("f.search_text_area: " ~ got.search_text_area ~ "
"); } return got; } auto tf = text_fields; // struct SQL_select { string the_body = ""; string the_range = ""; } auto sql_select = SQL_select(); string canned_url () { string _url = ""; if (environment.get("REQUEST_METHOD", "POST") == "POST") { _url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ tf.canned_query; } else if (environment.get("REQUEST_METHOD", "POST") == "GET") { _url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ environment.get("QUERY_STRING", ""); } return _url; } auto regex_canned_search () { static struct RgxCS { static track_offset = ctRegex!(`(?P[&]smo=)(?P[0-9]+)`); static results_type = ctRegex!(`[&]rt=(?Pidx|txt)`); static results_type_index = ctRegex!(`[&]rt=idx`); static results_type_text = ctRegex!(`[&]rt=txt`); static fn = ctRegex!(`[&]fn=(?P[^&]+)`); } return RgxCS(); } string show_matched_objects (string fn) { auto rgx = regex_canned_search; string _matched_objects_text = ""; string _url = canned_url; string _url_new = ""; string _matches_show_text = "&rt=txt"; string _matches_show_index = "&rt=idx"; string _fn = "&fn=" ~ fn; _url_new = _url; if (_url_new.match(rgx.results_type_index)) { _url_new = _url_new.replace(rgx.results_type_index, _matches_show_text); } else if (_url.match(rgx.results_type_text)) { _url_new = _url_new.replace(rgx.results_type_text, _matches_show_index); } else { if (!(_url.match(rgx.results_type))) { _url_new = _url ~ _matches_show_text; } } if (!(_url_new.match(rgx.fn))) { _url_new = _url_new ~ _fn; } _matched_objects_text = "" ~ "" ~ "※" ~ ""; return _matched_objects_text; } string base ; // = ""; string tip ; // = ""; string search_note ; // = ""; uint sql_match_offset_count = 0; string previous_next () { auto rgx = regex_canned_search; string _previous_next = ""; int _current_offset_value = 0; string _set_offset_next = ""; string _set_offset_previous = ""; string _url = canned_url; string _url_previous = ""; string _url_next = ""; string arrow_previous = ""; string arrow_next = ""; if (auto m = _url.matchFirst(rgx.track_offset)) { _current_offset_value = m.captures["offset_val"].to!int; _set_offset_next = m.captures["offset_key"] ~ ((m.captures["offset_val"]).to!int + cv.sql_match_limit.to!int).to!string; _url_next = _url.replace(rgx.track_offset, _set_offset_next); if (_current_offset_value < cv.sql_match_limit.to!int) { _url_previous = ""; } else { _url_previous = ""; _set_offset_previous = m.captures["offset_key"] ~ ((m.captures["offset_val"]).to!int - cv.sql_match_limit.to!int).to!string; _url_previous = _url.replace(rgx.track_offset, _set_offset_previous); } } else {// _current_offset_value = 0; _url_next = _url ~= "&smo=" ~ cv.sql_match_limit.to!string; } if (_url_previous.empty) { arrow_previous = ""; } else { arrow_previous = "" ~ "" ~ "<< prev" ~ " || "; } arrow_next = "" ~ "" ~ "next >>" ~ ""; _previous_next = "
" ~ arrow_previous ~ arrow_next; return _previous_next; } { header = format(q"┃ ≅ SiSU spine search form ┃", conf.http_host, ); } { table = format(q"┃
≅ SiSU spine search form
┃"); } { string post_value(string field_name, string type="box", string set="on") { string val = ""; switch (type) { case "field": val = ((field_name in cgi.post && !(cgi.post[field_name]).empty) ? cgi.post[field_name] : (field_name in cgi.get) ? cgi.get[field_name] : ""); val = tf.search_text_area; break; case "box": // generic for checkbox or radio; checkbox set == "on" radio set == "name set" val = ((field_name in cgi.post && !(cgi.post[field_name]).empty) ? (cgi.post[field_name] == set ? "checked" : "off") : (field_name in cgi.get) ? (cgi.get[field_name] == set ? "checked" : "off") : "off"); break; case "radio": // used generic bo val = ((field_name in cgi.post && !(cgi.post[field_name]).empty) ? (cgi.post[field_name] == set ? "checked" : "off") : (field_name in cgi.get) ? (cgi.get[field_name] == set ? "checked" : "off") : "checked"); break; case "checkbox": // used generic bo val = ((field_name in cgi.post && !(cgi.post[field_name]).empty) ? (cgi.post[field_name] == set ? "checked" : "off") : (field_name in cgi.get) ? (cgi.get[field_name] == set ? "checked" : "off") : "checked"); break; default: } return val; } string the_can(string fv) { string show_the_can = post_value("url"); string _the_can = ""; if (show_the_can == "checked") { tf = text_fields; string method_get_url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ environment.get("QUERY_STRING", ""); string method_post_url_construct = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ tf.canned_query; // assert(method_get_url == environment.get("HTTP_REFERER", conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ "?" ~ conf.query_string)); if (conf.request_method == "POST") { _the_can = "" ~ "POST: " ~ "" ~ method_post_url_construct ~ "" ~ "
"; } else if (conf.request_method == "GET") { _the_can = "" ~ "GET:  " ~ "" ~ method_get_url ~ ""; } conf.http_url = conf.http_request_type ~ "://" ~ conf.http_host ~ conf.cgi_script ~ tf.canned_query; } return _the_can; } string provide_tip() { string searched_tip = post_value("se"); string tip = ""; if (searched_tip == "checked") { string search_field = post_value("sf", "field"); tf = text_fields; tip = format(q"┃ database: %s; selected view: index search string: %s %s %s %s %s %s
%s %s %s %s %s %s
┃", cv.db_selected, (tf.text.empty ? "" : "\"text: " ~ tf.text ~ "; "), (tf.title.empty ? "" : "\"title: " ~ tf.title ~ "; "), (tf.author.empty ? "" : "\"author: " ~ tf.author ~ "; "), (tf.date.empty ? "" : "\"date " ~ tf.date ~ "; "), (tf.uid.empty ? "" : "\"uid: " ~ tf.uid ~ "; "), (tf.fn.empty ? "" : "\"fn: " ~ tf.fn ~ "; "), (tf.text.empty ? "" : "text: " ~ tf.text ~ "
"), (tf.title.empty ? "" : "title: " ~ tf.title ~ "
"), (tf.author.empty ? "" : "author: " ~ tf.author ~ "
"), (tf.date.empty ? "" : "date: " ~ tf.date ~ "
"), (tf.uid.empty ? "" : "\"uid: " ~ tf.uid ~ "; "), (tf.fn.empty ? "" : "\"fn: " ~ tf.fn ~ "; "), ); } return tip; } form = format(q"┃
%s %s %s
index text / grep; match limit: 1,000 2,500
echo query search url searched sql statement
┃", _cfg.filename_cgi, (post_value("ec") == "checked") ? post_value("sf", "field") : "", provide_tip, search_note, the_can(post_value("sf", "field")), cv.db_selected, post_value("rt", "box", "idx"), post_value("rt", "box", "txt"), post_value("sml", "box", "1000"), post_value("sml", "box", "2500"), post_value("ec"), post_value("url"), post_value("se"), post_value("sql"), ); { string set_value(string field_name, string default_val) { string val; if (field_name in cgi.post) { val = cgi.post[field_name]; } else if (field_name in cgi.get) { val = cgi.get[field_name]; } else { val = default_val; } return val; } bool set_bool(string field_name) { bool val; if (field_name in cgi.post && cgi.post[field_name] == "on") { val = true; } else if (field_name in cgi.get && cgi.get[field_name] == "on") { val = true; } else { val = false; } return val; } cv.db_selected = set_value("selected_db", _cfg.filename_db); // selected_db_name == db (spine.search.db or whatever) cv.sql_match_limit = set_value("sml", "1000"); cv.sql_match_offset = set_value("smo", "0"); cv.search_text = set_value("sf", ""); cv.results_type = set_value("rt", "idx"); cv.checked_echo = set_bool("ec"); cv.checked_stats = set_bool("sts"); cv.checked_url = set_bool("url"); cv.checked_searched = set_bool("se"); cv.checked_tip = set_bool("tip"); cv.checked_sql = set_bool("sql"); tf = text_fields; } } { cgi.write(header); cgi.write(table); cgi.write(form); // cgi.write(previous_next); { // debug environment // foreach (k, d; environment.toAA) { // cgi.write(k ~ ": " ~ d ~ "
"); // } } { // debug cgi info // cgi.write("db_selected: " ~ cv.db_selected ~ "
\n"); // cgi.write("search_text: " ~ cv.search_text ~ "
\n"); // cgi.write("sql_match_limit: " ~ cv.sql_match_limit ~ ";\n"); // cgi.write("sql_match_offset: " ~ cv.sql_match_offset ~ ";\n"); // cgi.write("results_type: " ~ cv.results_type ~ "
\n"); // cgi.write("cv.checked_echo: " ~ (cv.checked_echo ? "checked" : "off") ~ "; \n"); // cgi.write("cv.checked_stats: " ~ (cv.checked_stats ? "checked" : "off") ~ "; \n"); // cgi.write("cv.checked_url: " ~ (cv.checked_url ? "checked" : "off") ~ "; \n"); // cgi.write("cv.checked_searched: " ~ (cv.checked_searched ? "checked" : "off") ~ ";
\n"); // cgi.write("cv.checked_tip: " ~ (cv.checked_tip ? "checked" : "off") ~ "; \n"); // cgi.write("cv.checked_sql: " ~ (cv.checked_sql ? "checked" : "off") ~ "
\n"); } } auto db = Database(conf.db_path ~ cv.db_selected); { uint sql_match_offset_counter(T)(T cv) { sql_match_offset_count += cv.sql_match_limit.to!uint; return sql_match_offset_count; } void sql_search_query() { string highlight_text_matched(string _txt, string search_field) { string _mark_open = "┤"; string _mark_close = "├"; string _span_match = ""; string _span_close = ""; string _sf_str = search_field.strip.split("%20").join(" ").strip; string[] _sf_arr = _sf_str.split(regex(r"\s+AND\s+|\s+OR\s+")); auto rgx_url = regex(r"]+?>"); foreach (_sf; _sf_arr) { auto rgx_matched_text = regex(_sf, "i"); auto rgx_marked_pair = regex(r"┤(?P" ~ _sf ~ ")├", "i"); if (auto m = _txt.matchFirst(rgx_url)) { _txt = replaceAll!(m => _mark_open ~ m.captures[0] ~ _mark_close )(_txt, rgx_matched_text); _txt = replaceAll!(m => replaceAll!(u => u["keep"] )(m.hit, rgx_marked_pair) )(_txt, rgx_url); _txt = replaceAll!(m => _span_match ~ m["keep"] ~ _span_close )(_txt, rgx_marked_pair); } else { _txt = replaceAll!(m => _span_match ~ m.captures[0] ~ _span_close )(_txt, rgx_matched_text); } } return _txt; } string select_field_like(string db_field, string search_field) { string where_ = ""; if (!(search_field.empty)) { string _sf = search_field.strip.split("%20").join(" "); if (_sf.match(r" OR ")) { _sf = _sf.split(" OR ").join("%' OR " ~ db_field ~ " LIKE '%"); } if (_sf.match(r" AND ")) { _sf = _sf.split(" AND ").join("%' AND " ~ db_field ~ " LIKE '%"); } _sf = "( " ~ db_field ~ " LIKE\n '%" ~ _sf ~ "%' )"; where_ ~= format(q"┃ %s ┃", _sf ); } return where_; } string[] _fields; _fields ~= select_field_like("doc_objects.clean", tf.text); _fields ~= select_field_like("metadata_and_text.title", tf.title); _fields ~= select_field_like("metadata_and_text.creator_author", tf.author); _fields ~= select_field_like("metadata_and_text.uid", tf.uid); _fields ~= select_field_like("metadata_and_text.src_filename_base", tf.fn); _fields ~= select_field_like("metadata_and_text.src_filename_base", tf.src_filename_base); _fields ~= select_field_like("metadata_and_text.language_document_char", tf.language); _fields ~= select_field_like("metadata_and_text.date_published", tf.date); _fields ~= select_field_like("metadata_and_text.classify_keywords", tf.keywords); _fields ~= select_field_like("metadata_and_text.classify_topic_register", tf.topic_register); string[] fields; foreach (f; _fields) { if (!(f.empty)) { fields ~= f; } } string fields_str = ""; fields_str ~= fields.join(" AND "); sql_select.the_body ~= format(q"┃ SELECT metadata_and_text.uid, metadata_and_text.title, metadata_and_text.creator_author_last_first, metadata_and_text.creator_author, metadata_and_text.src_filename_base, metadata_and_text.language_document_char, metadata_and_text.date_published, metadata_and_text.classify_keywords, metadata_and_text.classify_topic_register, doc_objects.body, doc_objects.seg_name, doc_objects.ocn, metadata_and_text.uid FROM doc_objects, metadata_and_text WHERE ( %s ) AND doc_objects.uid_metadata_and_text = metadata_and_text.uid ORDER BY metadata_and_text.creator_author_last_first, metadata_and_text.date_published DESC, metadata_and_text.title, metadata_and_text.language_document_char, metadata_and_text.src_filename_base, doc_objects.ocn LIMIT %s OFFSET %s ;┃", fields_str, cv.sql_match_limit, cv.sql_match_offset, ); (cv.checked_sql) ? cgi.write(previous_next ~ "
" ~ sql_select.the_body.strip.split("\n ").join(" ").split("\n").join("
") ~ "
\n" ) : ""; cgi.write(previous_next); auto select_query_results = db.execute(sql_select.the_body).cached; string _old_uid = ""; if (!select_query_results.empty) { string _date_published = "0000"; string _close_para = ""; string _matched_ocn_open = ""; foreach (idx, row; select_query_results) { if (row["uid"].as!string != _old_uid) { _close_para = (idx == 1) ? "" : "

"; _matched_ocn_open = (idx == 1) ? "" : "

"; _old_uid = row["uid"].as!string; _date_published = (row["date_published"].as!string.match(regex(r"^([0-9]{4})"))) ? row["date_published"].as!string : "0000"; // used in regex that breaks if no match auto m = _date_published.match(regex(r"^([0-9]{4})")); string _date = (m.hit == "0000") ? "(year?) " : "(" ~ m.hit ~ ") "; cgi.write( _close_para ~ "


" ~ "

\"" ~ row["title"].as!string ~ "\"" ~ " " ~ _date ~ "[" ~ row["language_document_char"].as!string ~ "] " ~ row["creator_author_last_first"].as!string ~ " " ~ show_matched_objects(row["src_filename_base"].as!string) ~ "

" ~ "
" ); } if (cv.results_type == "txt") { if (row["ocn"].as!string != "0") { cgi.write( "
" ~ "" ~ "
" ~ highlight_text_matched(row["body"].as!string, tf.text) ~ "
" ~ "
" ); } else { cgi.write( "
" ~ "" ~ "
" ~ highlight_text_matched(row["body"].as!string, tf.text) ~ "
" ~ "
" ); } } else { if (row["ocn"].as!string != "0") { cgi.write( _matched_ocn_open ~ "" ~ row["ocn"].as!string ~ ", " ); } else { cgi.write( _matched_ocn_open ~ "" ~ row["ocn"].as!string ~ ", " ); } _matched_ocn_open = ""; } } cgi.write( previous_next); } else { // offset_not_beyond_limit = false; cgi.write("select_query_results empty

\n"); } cgi.write("


git

"); } sql_search_query; } { db.close; } { string tail = format(q"┃ ┃"); cgi.write(tail); } } mixin GenericMain!cgi_function_intro; #+END_SRC *** .gitignore :gitignore: #+HEADER: :tangle "../sundry/spine_search_cgi/.gitignore" #+BEGIN_SRC sh # git ls-files --others --exclude-from=.git/info/exclude ,* !.gitignore !README.md !COPYRIGHT !CHANGELOG !makefile !version.txt !*.json !*.sdl !meson.build !tangle !*.org !*.d !*.rb !*.txt !conf.sdl !*.nix !nix !nix/** !.envrc !src !src/** !*.sst !*.ssm !**/*.sst !**/*.ssm !config_local_site !views .dub/** ,**/.dub/** ,**/cgi-bin/** tmp/** ,*_.org ,*_.d ,*_.txt ,*_ ,*.swp ,*~ ,*~ \#* ,*.\#* !.envrc-local_ !src/spine_search.d_ #!*/ #\#* #*.\#* #.reggae/** #+END_SRC *** set names SET #+NAME: spine_search_db #+BEGIN_SRC sh spine.search.db #+END_SRC #+NAME: spine_search_cgi #+BEGIN_SRC sh spine_search #+END_SRC ** sh script to batch process _emacs org babel tangle_ :shell_script:tangle: [[https://orgmode.org/manual/Batch-execution.html]] creates a shell batch script called "tangle", that will tangle (emacs org babel tangle) org files in ./org/ to create .d source files in ./src/doc_reform/ (similar functionality is contained within the "makefile" created by this "dr_build_scaffold.org" file make tangle) #+HEADER: :tangle ../tangle #+HEADER: :tangle-mode (identity #o755) #+HEADER: :shebang "#!/bin/sh" #+BEGIN_SRC sh # -*- mode: shell-script -*- # tangle files with org-mode DIR=`pwd` ORGFILES="" EMACSLISP=/usr/share/emacs/site-lisp ORG_VER_AVAILABLE=$(shell echo `ls -d ~/.emacs.d/elpa/org-???????? | cut -d '-' -f2`) EMACSLISP_ORG=~/.emacs.d/elpa/org-$($(shell echo $(ORG_VER_AVAILABLE))) ORG_CONTRIB_VER_AVAILABLE=$(shell echo `ls -d ~/.emacs.d/elpa/org-plus-???????? | cut -d '-' -f2`) EMACSLISP_ORG_CONTRIB=~/.emacs.d/elpa/org-plus-contrib-$($(shell echo $(ORG_CONTRIB_VER_AVAILABLE))) # wrap each argument in the code required to call tangle on it for i in $@; do ORGFILES="$ORGFILES \"$i\"" done emacs --batch -Q -q \ --eval "(progn (add-to-list 'load-path (expand-file-name \"$EMACSLISP\")) (add-to-list 'load-path (expand-file-name \"$EMACSLISP_ORG\" t)) (add-to-list 'load-path (expand-file-name \"$EMACSLISP_ORG_CONTRIB\" t)) (require 'org)(require 'ob)(require 'ob-tangle) (mapc (lambda (file) (find-file (expand-file-name file \"$DIR\")) (setq-local org-src-preserve-indentation t) (org-babel-tangle) (kill-buffer)) '($ORGFILES)))" 2>&1 #|grep tangled #+END_SRC ** build *** nix **** nix-shell - default.nix #+BEGIN_SRC sh nix-shell nix-build #+END_SRC - project.nix #+BEGIN_SRC sh nix-shell nix-build project.nix #+END_SRC **** nix-shell --pure - default.nix #+BEGIN_SRC sh nix-shell --pure nix-build -I nixpkgs=/nixpkgs-ra/nixpkgs #+END_SRC - project.nix #+BEGIN_SRC sh nix-shell --pure nix-build -I nixpkgs=/nixpkgs-ra/nixpkgs #+END_SRC #+BEGIN_SRC sh nix build nix build -f default.nix nix build -f project.nix nix build -f spine.nix #+END_SRC **** derivation .drv #+BEGIN_SRC sh nix show-derivation /nix/store/q7n14bm8j5vzm62qaraczdb4bpyf59vv-spine-0.11.1.drv #+END_SRC *** dub #+BEGIN_SRC sh dub build -h #+END_SRC #+BEGIN_SRC sh time dub --compiler=ldc2 -v --force time (dub --compiler=ldc2 --config=spine-ldc --debug=steps) time (dub --compiler=ldc2 --config=spine-ldc --debug=checkdoc --debug=summary --debug=dumpdoc) time (dub --compiler=ldc2 --config=spine-ldc --debug=io) time (dub --compiler=ldc2 --config=spine-ldc --debug=checkdoc --debug=summary --debug=dumpdoc --debug=io) time (dub --compiler=ldc2 --config=spine-ldc-debug --debug=io) time dub --compiler=dmd -v --force time (dub --compiler=dmd --config=spine-dmd --debug=steps) time (dub --compiler=dmd --config=spine-dmd-debug --debug=io) time dub --compiler=gdc -v --force time (dub --compiler=gdc --config=spine-gdc --debug=steps) time (dub --compiler=gdc --config=spine-gdc-debug --debug=io) #+END_SRC *** make #+BEGIN_SRC sh time make dmd time make gdc time make ldc time make gdc ldc time make all time make all_ver time make dmd_ver ldc_ver gdc_ver time make restart time make restart ldc time make tangle ldc time make ldc_testrun_find_pod_epub #+END_SRC ** git *** project version #+BEGIN_SRC sh echo $(git describe --long --tags | sed 's/^[ a-z_-]\+\([0-9.]\+\)/\1/;s/\([^-]*-g\)/r\1/;s/-/./g') #+END_SRC *** what files changed #+BEGIN_SRC sh git whatchanged --since="1 day ago" --oneline --name-only --pretty=format: | sort -u git log --since="1 day ago" --name-only --pretty=format: | sort -u #+END_SRC ** test run *** e.g. #+BEGIN_SRC sh time (./result/bin/spine --source --html -v --output-dir=tmp/program-output data/pod/sisu-manual/media/text/en/sisu_markup.sst ) time (./bin/spine-ldc --source --html -v --output-dir=tmp/program-output data/pod/sisu-manual/media/text/en/sisu_markup.sst ) time (./bin/spine-ldc --source --html -v --output-dir=tmp/program-output data/pod/sisu-manual ) time (./bin/spine-ldc --source --html -v --output-dir=tmp/program-output data/pod/the_wealth_of_networks.yochai_benkler ) time (./bin/spine-ldc --source --html -v --output-dir=tmp/program-output data/pod/live-manual ) time (~sdp2/bin/spine-ldc --pod --source --html --epub --sqlite-create --sqlite-update -v sisudoc/media/text/en/sisu_markup_stress_test.sst sisudoc/media/text/en/the_wealth_of_networks.yochai_benkler.sst ) can point to directory containing sisudoc.txt file (listing which files to process) time (~sdp2/bin/spine-ldc --html -v sisudoc) # will process source files listed in sisudoc.txt for appropriate files and dir structure e.g. live-manual a multilingual document (with source file inserts .ssi) time (~sdp2/bin/spine-ldc --html --lang=en,es -v sisudoc) time ( find data/pod -name pod.manifest | sort | xargs ./bin/spine-ldc --source --html --epub -v --output-dir=tmp/program-output ) time ( find data/pod -maxdepth 2 -name pod.manifest | sort | xargs ./bin/spine-ldc --source --html --epub -v --output-dir=tmp/program-output ) time ( find data/sisudir/media/text -name *.ss[tm] | sort | xargs ./bin/spine-ldc --source --html --epub -v --output-dir=tmp/program-output ) find data/pod -maxdepth 2 -name pod.manifest | sort | xargs find data/sisudir/media/text -name *.ss[tm] | sort | xargs #+END_SRC *** sort #+BEGIN_SRC sh ~dr/bin/spine-ldc -v --sqlite-db-create --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=/var/www/html \ ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/bin/spine-ldc -v --sqlite-update --sqlite-db-filename="spine.search.db" --output=/var/www/html \ ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/bin/spine-ldc -v --html --harvest-link --output=/var/www/html \ ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/bin/spine-ldc -q --show-config --html --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/bin/spine-ldc -q --show-config ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/result/bin/spine --html --output=/var/www/html /grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/result/bin/spine --very-verbose --sqlite-db-create --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=/var/www /grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/result/bin/spine --very-verbose --sqlite-update --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=/var/www /grotto-ra/repo/git.repo/projects/project-spine/doc-reform-markup/markup_samples/markup/pod/* #+END_SRC - produces: - sqlite db @: /var/www/html/sqlite/spine.search.db - search script in D @: /var/www/html/cgi/src/spine_search.d - html output * /var/www/html/en/html/[filename] #+BEGIN_SRC sh cd /var/www/html/cgi cp arsd/cgi.d /var/www/html/cgi/. *-[needs to be implemented as part of code] dub --force --compiler=ldc2 && sudo cp -v cgi-bin/spine-search /usr/lib/cgi-bin/. #+END_SRC *** list markup files find data/pod -name pod.manifest | sort find data/sisudir/media/text -name *.ss[tm] | sort time make ldc_testrun_paths_pod_pod time make ldc_testrun_paths_pod_pod | ag "^\".+\"|NOT found" time make ldc_testrun_find_pod_pod time make ldc_testrun_find_dir_pod time make ldc_testrun_filelist_dir_pod ** compilers - [X] Set D_COMPILER (one of DMD LDC or GDC) - [X] Set debug flags (using DMD standard flag -deb [[https://wiki.dlang.org/Compilers][D Compilers wiki https://wiki.dlang.org/Compilers]] [[https://dlang.org/download.html][D Compilers download https://dlang.org/download.html]] ** version info REVIEW :version: - https://dlang.org/download.html *** compilers (set latest versions) :compiler:version: **** ldc VERSION :version:set:ldc: - https://github.com/ldc-developers/ldc/releases ***** 1.24.0 HACK - https://github.com/ldc-developers/ldc/releases/tag/v1.24.0 - nix-prefetch-url https://github.com/ldc-developers/ldc/releases/download/v1.24.0/ldc-1.24.0-src.tar.gz #+NAME: ldc_version_info #+BEGIN_SRC nix version = "1.24.0"; sha256 = "0g5svf55i0kq55q49awmwqj9qi1n907cyrn1vjdjgs8nx6nn35gx"; #+END_SRC - ldc-1.24.0 overlay does not build with latest nixos version, nixos-20.09 nixos at ldc-1.20.0 **** dmd :dmd: - https://dlang.org/changelog/index.html - https://dlang.org/changelog/pending.html - https://downloads.dlang.org/releases/ - https://downloads.dlang.org/releases/2.x/ ** dub :dub: https://github.com/dlang/dub/ https://code.dlang.org/getting_started https://code.dlang.org/docs/commandline *** setup **** dub json or sdlang Every DUB package should contain a [[https://code.dlang.org/package-format?lang=json][dub.json]] or [[https://code.dlang.org/package-format?lang=sdl][dub.sdl]] https://code.dlang.org/package-format?lang=sdl https://code.dlang.org/packages/sdlang-d https://github.com/Abscissa/SDLang-D/blob/master/HOWTO.md https://code.dlang.org/docs/commandline **** dub dependencies dub list dub upgrade dub fetch taggedalgebraic dub fetch libinputvisitor dub fetch unit-threaded ? sdp ~master: /home/ralph/grotto/repo/git.repo/utils/spine-x/ /home/ralph/.dub/packages/* *** build **** debug flags #+BEGIN_SRC sh biblio biblio_sorted bibliosorted block bookindex bookindexmatch bookindexraw check checkdoc code comment dumpdoc write out document contents, with object type and ocn endnotes endnotes_build footnotes footnotesdone group header header_and_content header1 headerjson headermakejson headermetadatajson heading headings headingsfound insert io munge node objectrelated1 objectrelated2 objects ocnoff para parabullet parabulletindent paraindent paraindenthang parent poem quote raw source srclines structattrib summary table toc #+END_SRC ** dub REFERENCE *** SET version #+NAME: version_dub #+BEGIN_SRC nix <> #+END_SRC #+NAME: nix_sha_dub #+BEGIN_SRC nix <> #+END_SRC *** SELECT version **** dub 1.24.0 #+NAME: version_dub_next #+BEGIN_SRC nix 1.24.0 #+END_SRC #+NAME: nix_sha_dub_next #+BEGIN_SRC nix 0wirm6msz8kw2a7v2ax9ab6i48cmfv6qjwpfrl9vrr4k4xdg3vn6 #+END_SRC **** dub 1.23.0 #+NAME: version_dub_current #+BEGIN_SRC nix 1.23.0 #+END_SRC #+NAME: nix_sha_dub_current #+BEGIN_SRC nix 06a4whsl1m600k096nwif83n7za3vr7pj1xwapncy5fcad1gmady #+END_SRC *** dub (using ldc rather than dmd) overlay REFERENCE :overlay: #+HEADER: :NO-tangle ~/nixpkgs/pkgs/development/tools/build-managers/dub/default.nix #+BEGIN_SRC nix { lib, stdenv, fetchFromGitHub, curl, libevent, rsync, ldc, dcompiler ? ldc }: assert dcompiler != null; stdenv.mkDerivation rec { pname = "dub"; version = "<>"; enableParallelBuilding = true; src = fetchFromGitHub { owner = "dlang"; repo = "dub"; rev = "v${version}"; sha256 = "<>"; }; postUnpack = '' patchShebangs . ''; # Can be removed with https://github.com/dlang/dub/pull/1368 dubvar = "\\$DUB"; postPatch = '' substituteInPlace test/fetchzip.sh \ --replace "dub remove" "\"${dubvar}\" remove" ''; nativeBuildInputs = [ dcompiler libevent rsync ]; buildInputs = [ curl ]; buildPhase = '' for dc_ in dmd ldmd2 gdmd; do echo "... check for D compiler $dc_ ..." dc=$(type -P $dc_ || echo "") if [ ! "$dc" == "" ]; then break fi done if [ "$dc" == "" ]; then exit "Error: could not find D compiler" fi export DC_NAME=$dc_ echo "$dc_ found and used as D compiler to build $pname" $dc ./build.d ./build ''; doCheck = !stdenv.isDarwin; checkPhase = '' export DUB=$NIX_BUILD_TOP/source/bin/dub export PATH=$PATH:$NIX_BUILD_TOP/source/bin/ export DC=${dcompiler.out}/bin/$DC_NAME echo "DC out --> $DC" export HOME=$TMP <> ./test/run-unittest.sh ''; installPhase = '' mkdir -p $out/bin cp bin/dub $out/bin ''; meta = with lib; { description = "Package and build manager for D applications and libraries"; homepage = "https://code.dlang.org/"; license = licenses.mit; maintainers = with maintainers; [ ThomasMader ]; platforms = [ "x86_64-linux" "i686-linux" "x86_64-darwin" ]; }; } #+END_SRC ***** removed failing tests #+NAME: build_dub_tests #+BEGIN_SRC nix rm test/0-init-multi.sh rm test/0-init-multi-json.sh rm test/5-convert-stdout.sh rm test/dc-env.sh rm test/ddox.sh rm test/feat663-search.sh rm test/fetchzip.sh rm test/interactive-remove.sh rm test/issue672-upgrade-optional.sh rm test/issue674-concurrent-dub.sh rm test/issue820-extra-fields-after-convert.sh rm test/issue877-auto-fetch-package-on-run.sh rm test/issue990-download-optional-selected.sh rm test/issue1003-check-empty-ld-flags.sh rm test/issue1037-better-dependency-messages.sh rm test/issue1040-run-with-ver.sh rm test/issue1091-bogus-rebuild.sh rm test/issue1180-local-cache-broken.sh rm test/issue1194-warn-wrong-subconfig.sh rm test/issue1277.sh rm test/issue1372-ignore-files-in-hidden-dirs.sh rm test/issue1416-maven-repo-pkg-supplier.sh rm test/issue1447-build-settings-vars.sh rm test/issue1574-addcommand.sh rm test/issue1524-maven-upgrade-dependency-tree.sh rm test/issue1773-lint.sh rm test/removed-dub-obj.sh rm test/version-spec.sh rm -r test/git-dependency rm -r test/issue502-root-import #+END_SRC * NOTE ** manually set version DRV=0.10.0; git tag -f doc-reform_v\${DRV} -m\"doc-reform spine-\${DRV}\" e5452d46475785c78fe4462717bdb74f17c4197 V_MAJOR=0; V_MINOR=10; V_PATCH=0 for i in org/spine.org; do; sed -i "s/Version(\s*[0-9]\+,\s\+[0-9]\+,\s\+[0-9]\+\s*)/Version(${V_MAJOR}, ${V_MINOR}, ${V_PATCH})/g" $i; done for i in org/spine_build_scaffold.org; do; sed -i "0,/\(version:\)\s\+['][0-9]\+\.[0-9]\+\.[0-9]\+['],/s//\1 \'${V_MAJOR}.${V_MINOR}.${V_PATCH}\',/g" $i; done ** check cgi build operations ~dr/bin/spine-ldc -v --cgi-search-form-codegen --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/bin/spine-ldc -v --show-config --config=/grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/.dr ~dr/bin/spine-ldc --show-config --html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/bin/spine-ldc -v --sqlite-db-create --sqlite-db-filename="spine.search.db" --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/bin/spine-ldc -v --sqlite-db-create --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/bin/spine-ldc -v --sqlite-db-recreate --sqlite-db-filename="spine.search.db" --cgi-sqlite-search-filename="spine-search" --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod ~dr/bin/spine-ldc -v --sqlite-update --sqlite-db-filename="spine.search.db" --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/* ~dr/bin/spine-ldc -v --cgi-search-form-codegen --config=/grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/.dr/config_local_site ~dr/bin/spine-ldc -v --html --html-link-search --html-link-harvest --harvest --output=/var/www/html ~grotto/repo/git.repo/code/project-spine/doc-reform-markup/markup_samples/markup/pod/*