From b230c345a061517a246bcdaf465faaf84258df8b Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Fri, 20 Jan 2017 10:49:44 -0500 Subject: sdp, remove some intermediate steps --- org/sdp.org | 69 +++++++++++++++++++++++++++++-------------------------------- src/sdp.d | 50 +++++++++++++++++++++++--------------------- 2 files changed, 60 insertions(+), 59 deletions(-) diff --git a/org/sdp.org b/org/sdp.org index 9452b57..663ead0 100644 --- a/org/sdp.org +++ b/org/sdp.org @@ -47,8 +47,7 @@ void main(string[] args) { foreach(fn_src; fns_src) { if (!empty(fn_src)) { <> - <> - <> + <> <> <> <> @@ -384,80 +383,78 @@ enforce( #+END_SRC *** 0. prepare document, _document abstraction_ -**** 0. _read in raw file_ (using filename with path) :file:read: +**** 0. (a) _read in raw file_ & (b) split content into: _doc header & doc content_ - [[./ao_read_source_files.org][ao_read_source_files]] - -#+NAME: sdp_each_file_do_read_sisu_markup_file -#+BEGIN_SRC d -/+ ↓ read file +/ -auto read_in_file_string = raw.sourceContent(fn_src); -#+END_SRC - -**** 1. split raw content file into: _doc header & doc content_ - [[./ao_read_source_files.org][ao_read_source_files]] -#+NAME: sdp_each_file_do_split_sisu_markup_file_content_into_header_and_body +#+NAME: sdp_each_file_do_read_and_split_sisu_markup_file_content_into_header_and_body #+BEGIN_SRC d +/+ ↓ read file (filename with path) +/ /+ ↓ file tuple of header and content +/ -auto header_and_body_tuple = raw.sourceContentSplitIntoHeaderAndBody(read_in_file_string, fn_src); -auto header = header_and_body_tuple[0]; -auto content_body = header_and_body_tuple[1]; -auto _file_insert_list = header_and_body_tuple[2]; +auto _0_header_1_body_content_2_insert_filelist_tuple = + raw.sourceContentSplitIntoHeaderAndBody(raw.sourceContent(fn_src), fn_src); +static assert(!isTypeTuple!(_0_header_1_body_content_2_insert_filelist_tuple)); +static assert(_0_header_1_body_content_2_insert_filelist_tuple.length==3); debug(header_and_body) { writeln(header); - writeln(header_and_body_tuple.length); - writeln(content_body[0]); + writeln(_0_header_1_body_content_2_insert_filelist_tuple.length); + writeln(_0_header_1_body_content_2_insert_filelist_tuple.length[1][0]); } #+END_SRC -**** 2. split doc header into: _metadata & make_ :doc:header:metadata:make: +**** 1. split doc header into: _metadata & make_ :doc:header:metadata:make: - [[./ao_conf_make_meta.org][ao_conf_make_meta]] #+NAME: sdp_each_file_do_split_sisu_markup_file_header_into_make_and_meta #+BEGIN_SRC d /+ ↓ split header into make and meta +/ -auto header_make_and_meta_tuple = head.headerContentAA(header, conf_doc_make_aa); -static assert(!isTypeTuple!(header_make_and_meta_tuple)); -string[string][string] _dochead_make = header_make_and_meta_tuple[0]; -string[string][string] _dochead_meta = header_make_and_meta_tuple[1]; +auto _0_make_1_dochead_meta_tuple = + head.headerContentAA(_0_header_1_body_content_2_insert_filelist_tuple[0], conf_doc_make_aa); +static assert(!isTypeTuple!(_0_make_1_dochead_meta_tuple)); +static assert(_0_make_1_dochead_meta_tuple.length==2); #+END_SRC -**** 3. processing: _document abstraction, tuple_ :processing: +**** 2. processing: _document abstraction, tuple_ :processing: - [[./ao_abstract_doc_source.org][ao_abstract_doc_source]] #+NAME: sdp_each_file_do_document_abstraction #+BEGIN_SRC d /+ ↓ document abstraction: process document, return abstraction as tuple +/ -auto t = abs.abstract_doc_source(content_body, _dochead_make, _dochead_meta, _opt_action_bool); +auto t = abs.abstract_doc_source( + (_0_header_1_body_content_2_insert_filelist_tuple[1]), + (_0_make_1_dochead_meta_tuple[0]), + (_0_make_1_dochead_meta_tuple[1]), + _opt_action_bool +); static assert(!isTypeTuple!(t)); auto doc_abstraction = t[0]; // head ~ toc ~ contents ~ endnotes_seg ~ glossary ~ bibliography ~ bookindex ~blurb; -string[][string] document_section_keys_sequenced = t[1]; -string[] doc_html_segnames = t[2]; +string[][string] _document_section_keys_sequenced = t[1]; +string[] _doc_html_segnames = t[2]; #+END_SRC -**** 4. _document matters_ (compiled from various sources) +**** 3. _document matters_ (compiled from various sources) #+NAME: sdp_each_file_do_document_abstraction #+BEGIN_SRC d struct DocumentMatters { string[] keys_seq_seg() { - string[] _k = document_section_keys_sequenced["seg"]; + string[] _k = _document_section_keys_sequenced["seg"]; return _k; } string[] keys_seq_scroll() { - string[] _k = document_section_keys_sequenced["scroll"]; + string[] _k = _document_section_keys_sequenced["scroll"]; return _k; } string[] segnames() { - string[] _k = doc_html_segnames; + string[] _k = _doc_html_segnames; return _k; } auto dochead_make() { - string[string][string] _k = _dochead_make; + string[string][string] _k = _0_make_1_dochead_meta_tuple[0]; return _k; } auto dochead_meta() { - string[string][string] _k = _dochead_meta; + string[string][string] _k = _0_make_1_dochead_meta_tuple[1]; return _k; } auto source_filename() { @@ -465,7 +462,7 @@ struct DocumentMatters { return _k; } auto file_insert_list() { - string[] _k = _file_insert_list; + string[] _k = _0_header_1_body_content_2_insert_filelist_tuple[2]; return _k; } auto opt_action_bool() { @@ -516,10 +513,10 @@ scope(exit) { fn_src ); } - destroy(content_body); + destroy(_0_header_1_body_content_2_insert_filelist_tuple); destroy(t); destroy(doc_abstraction); - destroy(doc_html_segnames); + destroy(_doc_html_segnames); destroy(fn_src); } #+END_SRC diff --git a/src/sdp.d b/src/sdp.d index 3a754b9..ba27789 100755 --- a/src/sdp.d +++ b/src/sdp.d @@ -204,48 +204,52 @@ void main(string[] args) { match(fn_src, rgx.src_pth), "not a sisu markup filename" ); - /+ ↓ read file +/ - auto read_in_file_string = raw.sourceContent(fn_src); + /+ ↓ read file (filename with path) +/ /+ ↓ file tuple of header and content +/ - auto header_and_body_tuple = raw.sourceContentSplitIntoHeaderAndBody(read_in_file_string, fn_src); - auto header = header_and_body_tuple[0]; - auto content_body = header_and_body_tuple[1]; - auto _file_insert_list = header_and_body_tuple[2]; + auto _0_header_1_body_content_2_insert_filelist_tuple = + raw.sourceContentSplitIntoHeaderAndBody(raw.sourceContent(fn_src), fn_src); + static assert(!isTypeTuple!(_0_header_1_body_content_2_insert_filelist_tuple)); + static assert(_0_header_1_body_content_2_insert_filelist_tuple.length==3); debug(header_and_body) { writeln(header); - writeln(header_and_body_tuple.length); - writeln(content_body[0]); + writeln(_0_header_1_body_content_2_insert_filelist_tuple.length); + writeln(_0_header_1_body_content_2_insert_filelist_tuple.length[1][0]); } /+ ↓ split header into make and meta +/ - auto header_make_and_meta_tuple = head.headerContentAA(header, conf_doc_make_aa); - static assert(!isTypeTuple!(header_make_and_meta_tuple)); - string[string][string] _dochead_make = header_make_and_meta_tuple[0]; - string[string][string] _dochead_meta = header_make_and_meta_tuple[1]; + auto _0_make_1_dochead_meta_tuple = + head.headerContentAA(_0_header_1_body_content_2_insert_filelist_tuple[0], conf_doc_make_aa); + static assert(!isTypeTuple!(_0_make_1_dochead_meta_tuple)); + static assert(_0_make_1_dochead_meta_tuple.length==2); /+ ↓ document abstraction: process document, return abstraction as tuple +/ - auto t = abs.abstract_doc_source(content_body, _dochead_make, _dochead_meta, _opt_action_bool); + auto t = abs.abstract_doc_source( + (_0_header_1_body_content_2_insert_filelist_tuple[1]), + (_0_make_1_dochead_meta_tuple[0]), + (_0_make_1_dochead_meta_tuple[1]), + _opt_action_bool + ); static assert(!isTypeTuple!(t)); auto doc_abstraction = t[0]; // head ~ toc ~ contents ~ endnotes_seg ~ glossary ~ bibliography ~ bookindex ~blurb; - string[][string] document_section_keys_sequenced = t[1]; - string[] doc_html_segnames = t[2]; + string[][string] _document_section_keys_sequenced = t[1]; + string[] _doc_html_segnames = t[2]; struct DocumentMatters { string[] keys_seq_seg() { - string[] _k = document_section_keys_sequenced["seg"]; + string[] _k = _document_section_keys_sequenced["seg"]; return _k; } string[] keys_seq_scroll() { - string[] _k = document_section_keys_sequenced["scroll"]; + string[] _k = _document_section_keys_sequenced["scroll"]; return _k; } string[] segnames() { - string[] _k = doc_html_segnames; + string[] _k = _doc_html_segnames; return _k; } auto dochead_make() { - string[string][string] _k = _dochead_make; + string[string][string] _k = _0_make_1_dochead_meta_tuple[0]; return _k; } auto dochead_meta() { - string[string][string] _k = _dochead_meta; + string[string][string] _k = _0_make_1_dochead_meta_tuple[1]; return _k; } auto source_filename() { @@ -253,7 +257,7 @@ void main(string[] args) { return _k; } auto file_insert_list() { - string[] _k = _file_insert_list; + string[] _k = _0_header_1_body_content_2_insert_filelist_tuple[2]; return _k; } auto opt_action_bool() { @@ -283,10 +287,10 @@ void main(string[] args) { fn_src ); } - destroy(content_body); + destroy(_0_header_1_body_content_2_insert_filelist_tuple); destroy(t); destroy(doc_abstraction); - destroy(doc_html_segnames); + destroy(_doc_html_segnames); destroy(fn_src); } } else { -- cgit v1.2.3