aboutsummaryrefslogtreecommitdiffhomepage
path: root/org
diff options
context:
space:
mode:
Diffstat (limited to 'org')
-rw-r--r--org/meta_read_source_files.org131
-rw-r--r--org/sdp.org71
2 files changed, 141 insertions, 61 deletions
diff --git a/org/meta_read_source_files.org b/org/meta_read_source_files.org
index 6b44dfb..8062f46 100644
--- a/org/meta_read_source_files.org
+++ b/org/meta_read_source_files.org
@@ -26,7 +26,7 @@ import
std.path;
#+END_SRC
-* 1. get _config file_ (read in) :module:sdp:meta_read_config_files:
+* A. get _config file_ (read in) :module:sdp:meta_read_config_files:
** 0. module template
#+BEGIN_SRC d :tangle ../src/sdp/meta/read_config_files.d
@@ -142,8 +142,8 @@ final auto configRead(C,E)(C conf_sdl, E env) {
}
#+END_SRC
-* 2. get _markup source_, read file :module:sdp:meta_read_source_files:
-** 0. module template
+* B. get _markup source_, read file :module:sdp:meta_read_source_files:
+** 0. module template (includes tuple)
#+BEGIN_SRC d :tangle ../src/sdp/meta/read_source_files.d
/++
@@ -158,10 +158,19 @@ static template SiSUrawMarkupContent() {
<<imports_std>>
mixin SiSUrgxInit;
static auto rgx = Rgx();
+ string[] _images=[];
+ auto _extract_images(S)(S content_block) {
+ string[] images_;
+ auto _content_block = content_block.to!string;
+ if (auto m = _content_block.matchAll(rgx.image)) {
+ images_ ~= m.captures[1].to!string;
+ }
+ return images_;
+ }
auto rawsrc = RawMarkupContent();
- auto SiSUrawMarkupContent(Fn)(Fn fn_src) {
+ auto SiSUrawMarkupContent(O,Fn)(O _opt_action, Fn fn_src) {
auto _0_header_1_body_content_2_insert_filelist_tuple =
- rawsrc.sourceContentSplitIntoHeaderAndBody(rawsrc.sourceContent(fn_src), fn_src);
+ rawsrc.sourceContentSplitIntoHeaderAndBody(_opt_action, rawsrc.sourceContent(fn_src), fn_src);
return _0_header_1_body_content_2_insert_filelist_tuple;
}
struct RawMarkupContent {
@@ -171,9 +180,10 @@ static template SiSUrawMarkupContent() {
raw.markupSourceReadIn(fn_src);
return source_txt_str;
}
- final auto sourceContentSplitIntoHeaderAndBody(in string source_txt_str, in string fn_src="") {
+ final auto sourceContentSplitIntoHeaderAndBody(O)(O _opt_action, in string source_txt_str, in string fn_src="") {
auto raw = MarkupRawUnit();
string[] insert_file_list;
+ string[] images_list;
auto t =
raw.markupSourceHeaderContentRawLineTupleArray(source_txt_str);
auto header_raw = t[0];
@@ -181,17 +191,25 @@ static template SiSUrawMarkupContent() {
if (fn_src.match(rgx.src_fn_master)) { // filename with path needed if master file (.ssm) not otherwise
auto ins = Inserts();
auto tu =
- ins.scan_master_src_for_insert_files_and_import_content(sourcefile_body_content, fn_src);
+ ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src);
static assert(!isTypeTuple!(tu));
sourcefile_body_content = tu[0];
insert_file_list = tu[1].dup;
+ images_list = tu[2].dup;
+ } else if (_opt_action.source || _opt_action.sisupod) {
+ auto ins = Inserts();
+ auto tu =
+ ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src);
+ static assert(!isTypeTuple!(tu));
+ images_list = tu[2].dup;
}
t = tuple(
header_raw,
sourcefile_body_content,
- insert_file_list
+ insert_file_list,
+ images_list
);
- static assert(t.length==3);
+ static assert(t.length==4);
return t;
}
}
@@ -206,9 +224,10 @@ static template SiSUrawMarkupContent() {
}
struct Inserts {
import sdp.meta.defaults;
- auto scan_subdoc_source(
+ auto scan_subdoc_source(O)(
+ O _opt_action,
char[][] markup_sourcefile_insert_content,
- string fn_src
+ string fn_src
) {
mixin SiSUrgxInitFlags;
<<meta_inserts_scan>>
@@ -217,10 +236,12 @@ static template SiSUrawMarkupContent() {
} // end src subdoc (inserts) loop
<<meta_inserts_scan_post>>
}
- auto scan_master_src_for_insert_files_and_import_content(
+ auto scan_master_src_for_insert_files_and_import_content(O)(
+ O _opt_action,
char[][] sourcefile_body_content,
- string fn_src
+ string fn_src
) {
+ import std.algorithm;
mixin SiSUrgxInitFlags;
<<meta_master_doc_scan_for_insert_filenames>>
foreach (line; sourcefile_body_content) {
@@ -321,12 +342,18 @@ auto markupSourceReadIn(in string fn_src) {
}
#+END_SRC
-**** tuple header and body content
+**** tuple (a) header, (b) body content, (c) file insert list & (d) image list?
+
+- header
+- body content
+- file insert list
+- [image list?]
#+name: meta_markup_source_raw_tuple_of_header_and_body
#+BEGIN_SRC d
auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) {
string[] file_insert_list = [];
+ string[] images_list = [];
auto hc = header0Content1(source_txt_str);
auto header = hc[0];
char[] source_txt = hc[1];
@@ -334,7 +361,8 @@ auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) {
auto t = tuple(
header,
source_line_arr,
- file_insert_list
+ file_insert_list,
+ images_list
);
return t;
}
@@ -432,20 +460,30 @@ if (type1["curly_code"] == 1) {
markup_sourcesubfile_insert_content.length
);
}
+ if (_opt_action.source || _opt_action.sisupod) {
+ _images ~= _extract_images(markup_sourcesubfile_insert_content);
+ }
auto ins = Inserts();
/+
- 1. load file,
- 2. read lines;
- 3. scan lines,
- 4. if filename insert, and insert filename
- 5. repeat 1
- 6. else
- 7. add line to new array;
+ - 1. load file
+ - 2. read lines
+ - 3. scan lines
+ - a. if filename insert, and insert filename
+ - repeat 1
+ - b. else
+ - add line to new array;
+ - build image list, search for any image files to add to image list
+/
} else {
type1["header_make"] = 0;
type1["header_meta"] = 0;
- contents_insert ~= line;
+ contents_insert ~= line; // images to extract for image list?
+ if (_opt_action.source || _opt_action.sisupod) {
+ auto _image_linelist = _extract_images(line);
+ if (_image_linelist.length > 0) {
+ _images ~= _image_linelist;
+ }
+ }
}
#+END_SRC
@@ -453,7 +491,11 @@ if (type1["curly_code"] == 1) {
#+name: meta_inserts_scan_post
#+BEGIN_SRC d
-return contents_insert;
+auto t = tuple(
+ contents_insert,
+ _images
+);
+return t;
#+END_SRC
*** scan document source :scan_src:
@@ -465,6 +507,8 @@ char[][] contents;
auto type = flags_type_init;
auto fn_pth_full = fn_src.match(rgx.src_pth);
auto markup_src_file_path = fn_pth_full.captures[1];
+char[][] contents_insert;
+string[] _images =[];
string[] insert_file_list =[];
#+END_SRC
@@ -511,22 +555,36 @@ if (type["curly_code"] == 1) {
);
}
auto ins = Inserts();
- auto contents_insert = ins.scan_subdoc_source(
+ auto contents_insert_tu = ins.scan_subdoc_source(
+ _opt_action,
markup_sourcefile_insert_content,
to!string(fn_src_insert)
);
- contents ~= contents_insert;
+ contents ~= contents_insert_tu[0]; // images to extract for image list?
+ if (_opt_action.source || _opt_action.sisupod) {
+ auto _image_linelist = _extract_images(contents_insert_tu[0]);
+ if (_image_linelist.length > 0) {
+ _images ~= _image_linelist;
+ }
+ }
/+
- 1. load file,
- 2. read lines;
- 3. scan lines,
- 4. if filename insert, and insert filename
- 5. repeat 1
- 6. else
- 7. add line to new array;
+ - 1. load file
+ - 2. read lines
+ - 3. scan lines
+ - a. if filename insert, and insert filename
+ - repeat 1
+ - b. else
+ - add line to new array;
+ - build image list, search for any image files to add to image list
+/
} else {
contents ~= line;
+ if (_opt_action.source || _opt_action.sisupod) {
+ auto _image_linelist = _extract_images(line);
+ if (_image_linelist.length > 0) {
+ _images ~= _image_linelist;
+ }
+ }
}
#+END_SRC
@@ -534,13 +592,18 @@ if (type["curly_code"] == 1) {
#+name: meta_master_doc_scan_for_insert_filenames_post
#+BEGIN_SRC d
+string[] images = [];
+foreach(i; uniq(_images.sort())) {
+ images ~= i;
+}
debug(insert_file) {
writeln(__LINE__);
writeln(contents.length);
}
auto t = tuple(
contents,
- insert_file_list
+ insert_file_list,
+ images
);
return t;
#+END_SRC
diff --git a/org/sdp.org b/org/sdp.org
index 650fb21..27b6cc8 100644
--- a/org/sdp.org
+++ b/org/sdp.org
@@ -26,7 +26,7 @@ struct Version {
int minor;
int patch;
}
-enum ver = Version(0, 22, 0);
+enum ver = Version(0, 23, 0);
#+END_SRC
** compilation restrictions (supported compilers)
@@ -78,7 +78,7 @@ void main(string[] args) {
<<sdp_args>>
<<sdp_env>>
<<sdp_do_selected>>
- if (_manifests.length > 1) { // _manifests[0] is dummy element used in initialization to be removed
+ if (_manifests.length > 1) { // _manifests[0] initialized dummy element
foreach(manifest; _manifests[1..$]) {
if (!empty(manifest.src_fn)) {
<<sdp_each_file_do_scope>>
@@ -477,7 +477,7 @@ struct OptActions {
auto _opt_action = OptActions();
#+END_SRC
-***** getopt processing path
+***** getopt processing path, _manifest[] >>
#+NAME: sdp_args
#+BEGIN_SRC d
@@ -485,19 +485,19 @@ auto _env = [
"pwd" : environment["PWD"],
"home" : environment["HOME"],
];
-auto _manifest = PodManifest!()();
-auto _manifest_plus = PodMatters!()(_opt_action, _env);
-auto _manifests = [ _manifest_plus ];
+auto _manifest_start = PodManifest!()();
+auto _manifest_matter = PodMatters!()(_opt_action, _env);
+auto _manifests = [ _manifest_matter ];
foreach(arg; args[1..$]) {
- _manifest = PodManifest!()(arg);
+ _manifest_start = PodManifest!()(arg);
if (arg.match(rgx.flag_action)) {
flag_action ~= " " ~ arg; // flags not taken by getopt
} else if (arg.match(rgx.src_pth)) {
_manifests ~= PodMatters!()(_opt_action, _env, arg, arg); // gather input markup source file names for processing
- } else if (_manifest.pod_manifest_file_with_path) {
+ } else if (_manifest_start.pod_manifest_file_with_path) {
string contents_location_raw_;
string contents_location_;
- string sisudoc_txt_ = _manifest.pod_manifest_file_with_path;
+ string sisudoc_txt_ = _manifest_start.pod_manifest_file_with_path;
enforce(
exists(sisudoc_txt_)!=0,
"file not found: «" ~
@@ -536,8 +536,8 @@ foreach(arg; args[1..$]) {
|| (contents_location_pth_).match(lang_rgx_)
) {
auto _fns = (((tmp_dir_).chainPath(contents_location_pth_)).array).to!(string);
- _manifest_plus = PodMatters!()(_opt_action, _env, arg, _fns, contents_locations_arr);
- _manifests ~= _manifest_plus; // TODO how to capture?
+ _manifest_matter = PodMatters!()(_opt_action, _env, arg, _fns, contents_locations_arr);
+ _manifests ~= _manifest_matter; // TODO how to capture?
}
}
} else if (arg.match(rgx.src_pth_zip)) {
@@ -676,7 +676,7 @@ module sdp.meta.metadoc;
template SiSUabstraction() {
<<imports_sdp>>
<<sdp_mixin>>
- enum headBody { header, body_content, insert_filelist }
+ enum headBody { header, body_content, insert_file_list, image_list }
enum makeMeta { make, meta }
enum docAbst { doc_abstraction, section_keys, segnames, segnames_0_4, images }
static auto rgx = Rgx();
@@ -697,37 +697,52 @@ template SiSUabstraction() {
}
#+END_SRC
-** 1. (a) _read in raw file_ (b) split content into: _doc header & doc content_
+** 1. raw file content split, doc: _header_, _content_ +(lists: subdocs? images?) >>
- [[./meta_read_source_files.org][meta_read_source_files]]
- read in the _marked up source document_ and
- split the document into:
- document header
- document body
- - if a master document make a list of insert files
+ - from markup source
+ - if master document from sub documents content
+ - if a master document
+ - make a list of insert files
+ - if build source pod requested
+ - scan for list of images
+ (action avoided if not needed at this stage)
+
- _return a tuple of_:
- header
- body
- insert file list
+ - image list (if build source pod requested)
+
+if build source pod requested all information needed to build it available at this point
+ - manifest related information _manifest
+ - insert file list _header_body_insertfilelist_imagelist[headBody.insert_file_list]
+ - image list _header_body_insertfilelist_imagelist[headBody.image_list]
#+NAME: sdp_each_file_do_read_and_split_sisu_markup_file_content_into_header_and_body
#+BEGIN_SRC d
/+ ↓ read file (filename with path) +/
/+ ↓ file tuple of header and content +/
debug(steps) {
- writeln(__LINE__, ":", __FILE__, ": step1 commence → (get document header & body & insert files)");
+ writeln(__LINE__, ":", __FILE__,
+ ": step1 commence → (get document header & body & insert file list & if needed image list)"
+ );
}
-auto _header_body_inserts =
- SiSUrawMarkupContent!()(_manifest.src_fn);
-static assert(!isTypeTuple!(_header_body_inserts));
-static assert(_header_body_inserts.length==3);
+auto _header_body_insertfilelist_imagelist =
+ SiSUrawMarkupContent!()(_opt_action, _manifest.src_fn);
+static assert(!isTypeTuple!(_header_body_insertfilelist_imagelist));
+static assert(_header_body_insertfilelist_imagelist.length==4);
debug(steps) {
writeln(__LINE__, ":", __FILE__, ": step1 complete");
}
debug(header_and_body) {
writeln(header);
- writeln(_header_body_inserts.length);
- writeln(_header_body_inserts.length[headBody.body_content][0]);
+ writeln(_header_body_insertfilelist_imagelist.length);
+ writeln(_header_body_insertfilelist_imagelist.length[headBody.body_content][0]);
}
#+END_SRC
@@ -748,16 +763,18 @@ debug(header_and_body) {
#+BEGIN_SRC d
/+ ↓ split header into make and meta +/
debug(steps) {
- writeln(__LINE__, ":", __FILE__, ": step2 commence → (doc header: make & meta as struct)");
+ writeln(__LINE__, ":", __FILE__,
+ ": step2 commence → (doc header: make & meta as struct)"
+ );
}
auto _make_and_meta_struct =
- docHeaderMakeAndMetaTupExtractAndConvertToStruct!()(conf_files_composite_make, _header_body_inserts[headBody.header]); // breakage ...
+ docHeaderMakeAndMetaTupExtractAndConvertToStruct!()(conf_files_composite_make, _header_body_insertfilelist_imagelist[headBody.header]); // breakage ...
debug(steps) {
writeln(__LINE__, ":", __FILE__, ": step2 complete");
}
#+END_SRC
-** 3. _document abstraction, tuple_ (pre-output-processing) :processing:
+** 3. _document abstraction, tuple_ (output-pre-processing) >>
- [[./meta_abstraction.org][meta_abstraction]]
- prepare the document abstraction used in downstream processing
@@ -778,7 +795,7 @@ debug(steps) {
writeln(__LINE__, ":", __FILE__, ": step3 commence → (document abstraction (da); da keys; segnames; doc_matters)");
}
auto da = SiSUdocAbstraction!()(
- _header_body_inserts[headBody.body_content],
+ _header_body_insertfilelist_imagelist[headBody.body_content],
_make_and_meta_struct,
_opt_action,
);
@@ -794,7 +811,7 @@ debug(steps) {
}
#+END_SRC
-** 4. _document matters_ (doc info gathered, various sources)
+** 4. _document matters_ (doc info gathered, various sources) >>
- prepare document_matters, miscellany about processing and the document of use
in downstream processing
@@ -865,7 +882,7 @@ struct DocumentMatters {
return _manifest.pod_image_dirs;
}
auto file_insert_list() {
- string[] _k = _header_body_inserts[headBody.insert_filelist];
+ string[] _k = _header_body_insertfilelist_imagelist[headBody.insert_file_list];
return _k;
}
auto image_list() {