From ae23669169b32d4986af06c1ae9483cc9c52d39d Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sat, 7 Jul 2018 13:55:43 -0400 Subject: 0.26.4 file renames, cleaning, reorganisation --- org/source_files_read.org | 733 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 733 insertions(+) create mode 100644 org/source_files_read.org (limited to 'org/source_files_read.org') diff --git a/org/source_files_read.org b/org/source_files_read.org new file mode 100644 index 0000000..899e628 --- /dev/null +++ b/org/source_files_read.org @@ -0,0 +1,733 @@ +#+TITLE: sdp markup source raw +#+AUTHOR: Ralph Amissah +#+EMAIL: [[mailto:ralph.amissah@gmail.com][ralph.amissah@gmail.com]] +#+DESCRIPTION: documents - structuring, publishing in multiple formats & search +#+KEYWORDS +#+LANGUAGE: en +#+STARTUP: indent content +#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t +#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc +#+OPTIONS: author:nil email:nil creator:nil timestamp:nil +#+PROPERTY: header-args :padline no :exports code :noweb yes +#+EXPORT_SELECT_TAGS: export +#+EXPORT_EXCLUDE_TAGS: noexport +#+FILETAGS: :sdp:rel:meta:read:file +#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) + +[[./sdp.org][sdp]] [[./][org/]] +* imports + +#+name: imports_std +#+BEGIN_SRC d +import + sdp.meta, + sdp.source.paths_source, + std.file, + std.path; +#+END_SRC + +* A. get _config file_ (read in) :module:sdp:source_read_config_files: +** 0. module template + +#+BEGIN_SRC d :tangle ../src/sdp/source/read_config_files.d +/++ + read configuration files
+ - read config files
+ meta_config_files.d ++/ +module sdp.source.read_config_files; +<> +<> +<> +#+END_SRC + +*** 0. read config files (config_local_site & sisu_document_make) toml +**** 1. site configuration + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template readConfigSite() { + <> + final auto readConfigSite(M,E)(M _manifest, E _env) { + string config_file_str; + string conf_filename = "NONE"; + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.config_local_site; + foreach(conf_fn; [_conf_file_details.config_filename_site_toml]) { + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; + conf_filename = conf_fn; + if (config_file_str.length > 0) { + // conf_filename = conf_fn; + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + // writeln(__LINE__, ": found: ", conf_file, " in ", pth); + } + config_file_str = conf_file.readText; + break; + } + } catch (ErrnoException ex) { + } catch (FileException ex) { + } + } + if (config_file_str.length > 0) { break; } + } + struct _ConfContent { + string filename() { + return conf_filename; + } + string filetype() { + return conf_filename.extension.chompPrefix("."); + } + auto content() { + return config_file_str; + } + } + return _ConfContent(); + } +} +#+END_SRC + +**** 2. document make/config + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template readConfigDoc() { + <> + final auto readConfigDoc(M,E)(M _manifest, E _env) { + string config_file_str; + string conf_filename = "NONE"; + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.sisu_document_make; + foreach(conf_fn; [_conf_file_details.config_filename_document_toml]) { + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; + conf_filename = conf_fn; + if (config_file_str.length > 0) { + // conf_filename = conf_fn; + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + if (config_file_str.length > 0) { break; } + } + struct _ConfContent { + string filename() { + return conf_filename; + } + string filetype() { + return conf_filename.extension.chompPrefix("."); + } + auto content() { + return config_file_str; + } + } + return _ConfContent(); + } +} +#+END_SRC + +** A. TOML +*** 1. TOML read config files (config_local_site & sisu_document_make) :file:config: +**** TOML config_local_site + +#+name: meta_config_file_in +#+BEGIN_SRC d +static template configReadInSiteTOML() { + <> + final string configReadInSiteTOML(M,E)(M manifest, E env) { + auto conf_file_details = ConfigFilePaths!()(manifest, env); + string conf_toml = conf_file_details.config_filename_site_toml; + auto possible_config_path_locations = conf_file_details.possible_config_path_locations.config_local_site; + string config_file_str; + debug(io) { + writeln("WARNING (io debug) in config filename: ", conf_toml); + writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); + } + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; + if (config_file_str.length > 0) { + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + return config_file_str; + } +} +#+END_SRC + +**** TOML sisu_document_make + +#+name: meta_config_file_in +#+BEGIN_SRC d +static template configReadInDocTOML() { + <> + final string configReadInDocTOML(M,E)(M manifest, E env) { + auto conf_file_details = ConfigFilePaths!()(manifest, env); + string conf_toml = conf_file_details.config_filename_document_toml; + auto possible_config_path_locations = conf_file_details.possible_config_path_locations.sisu_document_make; + string config_file_str; + debug(io) { + writeln("WARNING (io debug) in config filename: ", conf_toml); + writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); + } + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; + if (config_file_str.length > 0) { + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + return config_file_str; + } +} +#+END_SRC + +*** 2. TOML config files get + +#+name: meta_config_file_toml +#+BEGIN_SRC d +static template configTOML() { + import toml; // + <> + auto configTOML(string configuration, string conf_toml_filename) { + TOMLDocument _toml_conf; + try { + _toml_conf = parseTOML(configuration); // parseTOML(cast(string)(configuration)); + } + catch(ErrnoException e) { + stderr.writeln("Toml problem with content for ", conf_toml_filename); + stderr.writeln(e.msg); + } + return _toml_conf; + } +} +#+END_SRC + +*** 3. TOML config (config_local_site & sisu_document_make) :file:config:hub: + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template configReadSiteTOML() { + <> + import toml; + final auto configReadSiteTOML(M,E)(M _manifest, E _env) { + auto _configuration = configReadInSiteTOML!()(_manifest, _env); + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + string _conf_toml = _conf_file_details.config_filename_site_toml; + auto _toml_conf = configTOML!()(_configuration, _conf_toml); + return _toml_conf; + } +} +static template configReadDocTOML() { + <> + import toml; + final auto configReadDocTOML(M,E)(M _manifest, E _env) { + auto _configuration = configReadInDocTOML!()(_manifest, _env); + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + string _conf_toml = _conf_file_details.config_filename_document_toml; + auto _toml_conf = configTOML!()(_configuration, _conf_toml); + return _toml_conf; + } +} +#+END_SRC + +* B. get _markup source_, read file :module:sdp:source_read_source_files: +** 0. module template (includes tuple) + +#+BEGIN_SRC d :tangle ../src/sdp/source/read_source_files.d +/++ + module source_read_source_files;
+ - open markup files
+ - if master file scan for addional files to import/insert ++/ +module sdp.source.read_source_files; +static template SiSUrawMarkupContent() { + import + sdp.meta.rgx; + <> + mixin SiSUrgxInit; + static auto rgx = Rgx(); + string[] _images=[]; + auto _extract_images(S)(S content_block) { + string[] images_; + auto _content_block = content_block.to!string; + if (auto m = _content_block.matchAll(rgx.image)) { + images_ ~= m.captures[1].to!string; + } + return images_; + } + auto rawsrc = RawMarkupContent(); + auto SiSUrawMarkupContent(O,Fn)(O _opt_action, Fn fn_src) { + auto _0_header_1_body_content_2_insert_filelist_tuple + = rawsrc.sourceContentSplitIntoHeaderAndBody(_opt_action, rawsrc.sourceContent(fn_src), fn_src); + return _0_header_1_body_content_2_insert_filelist_tuple; + } + struct RawMarkupContent { + final sourceContent(in string fn_src) { + auto raw = MarkupRawUnit(); + auto source_txt_str + = raw.markupSourceReadIn(fn_src); + return source_txt_str; + } + final auto sourceContentSplitIntoHeaderAndBody(O)(O _opt_action, in string source_txt_str, in string fn_src="") { + auto raw = MarkupRawUnit(); + string[] insert_file_list; + string[] images_list; + auto t + = raw.markupSourceHeaderContentRawLineTupleArray(source_txt_str); + auto header_raw = t[0]; + auto sourcefile_body_content = t[1]; + if (fn_src.match(rgx.src_fn_master)) { // filename with path needed if master file (.ssm) not otherwise + auto ins = Inserts(); + auto tu + = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); + static assert(!isTypeTuple!(tu)); + sourcefile_body_content = tu[0]; + insert_file_list = tu[1].dup; + images_list = tu[2].dup; + } else if (_opt_action.source || _opt_action.sisupod) { + auto ins = Inserts(); + auto tu + = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); + static assert(!isTypeTuple!(tu)); + images_list = tu[2].dup; + } + t = tuple( + header_raw, + sourcefile_body_content, + insert_file_list, + images_list + ); + static assert(t.length==4); + return t; + } + } + struct MarkupRawUnit { + import std.file; + <> + <> + <> + <> + <> + <> + } + struct Inserts { + auto scan_subdoc_source(O)( + O _opt_action, + char[][] markup_sourcefile_insert_content, + string fn_src + ) { + mixin SiSUrgxInitFlags; + <> + foreach (line; markup_sourcefile_insert_content) { + <> + } // end src subdoc (inserts) loop + <> + } + auto scan_master_src_for_insert_files_and_import_content(O)( + O _opt_action, + char[][] sourcefile_body_content, + string fn_src + ) { + import std.algorithm; + mixin SiSUrgxInitFlags; + <> + foreach (line; sourcefile_body_content) { + <> + } // end src doc loop + <> + } + } +} +#+END_SRC + +** get markup source, read file :source:markup: +*** read file, source string [#A] :string: + +#+name: meta_markup_source_raw_read_file_source_string +#+BEGIN_SRC d +final private string readInMarkupSource(in char[] fn_src) { + enforce( + exists(fn_src)!=0, + "file not found: «" ~ + fn_src ~ "»" + ); + string source_txt_str; + try { + if (exists(fn_src)) { + debug(io) { + writeln("in src, markup source file found: ", fn_src); + } + source_txt_str = fn_src.readText; + } + } + catch (ErrnoException ex) { + } + catch (UTFException ex) { + // Handle validation errors + } + catch (FileException ex) { + // Handle errors + } + std.utf.validate(source_txt_str); + return source_txt_str; +} +#+END_SRC + +*** document header & content, array.length == 2 [#A] :array: + +here you split document header and body, an array.length == 2 +split is on first match of level A~ (which is required) + +#+name: meta_markup_source_raw_doc_header_and_content_split +#+BEGIN_SRC d +final private char[][] header0Content1(in string src_text) { + /+ split string on _first_ match of "^:?A~\s" into [header, content] array/tuple +/ + char[][] header_and_content; + auto m = (cast(char[]) src_text).matchFirst(rgx.heading_a); + header_and_content ~= m.pre; + header_and_content ~= m.hit ~ m.post; + assert(header_and_content.length == 2, + "document markup is broken, header body split == " + ~ header_and_content.length.to!string + ~ "; (header / body array split should == 2 (split is on level A~))" + ); + return header_and_content; +} +#+END_SRC + +*** source line array :array: + +#+name: meta_markup_source_raw_source_line_array +#+BEGIN_SRC d +final private char[][] markupSourceLineArray(in char[] src_text) { + char[][] source_line_arr + = (cast(char[]) src_text).split(rgx.newline_eol_strip_preceding); + return source_line_arr; +} +#+END_SRC + +*** source content raw line array :array: +- used for regular .sst files; master .ssm files and; .ssi inserts +- regex is passed for relevant enforce match + +**** read in file + +#+name: meta_markup_source_raw_read_in_file +#+BEGIN_SRC d +auto markupSourceReadIn(in string fn_src) { + static auto rgx = Rgx(); + enforce( + fn_src.match(rgx.src_pth_sst_or_ssm), + "not a sisu markup filename: «" ~ + fn_src ~ "»" + ); + auto source_txt_str = readInMarkupSource(fn_src); + return source_txt_str; +} +#+END_SRC + +**** tuple (a) header, (b) body content, (c) file insert list & (d) image list? + +- header +- body content +- file insert list +- [image list?] + +#+name: meta_markup_source_raw_tuple_of_header_and_body +#+BEGIN_SRC d +auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) { + string[] file_insert_list = []; + string[] images_list = []; + auto hc = header0Content1(source_txt_str); + auto header = hc[0]; + char[] source_txt = hc[1]; + auto source_line_arr = markupSourceLineArray(source_txt); + auto t = tuple( + header, + source_line_arr, + file_insert_list, + images_list + ); + return t; +} +#+END_SRC + +**** get insert source line array + +#+name: meta_markup_source_raw_get_insert_source_line_array +#+BEGIN_SRC d +final char[][] getInsertMarkupSourceContentRawLineArray( + in char[] fn_src_insert, + Regex!(char) rgx_file +) { + enforce( + fn_src_insert.match(rgx_file), + "not a sisu markup filename: «" ~ + fn_src_insert ~ "»" + ); + auto source_txt_str = readInMarkupSource(fn_src_insert); + auto source_line_arr = markupSourceLineArray(source_txt_str); + return source_line_arr; +} +#+END_SRC + +** get markup source, master file & inserts :masterfile:inserts: +[[./sdp.org][sdp]] [[./][org/]] + +*** scan inserts (sub-document) source :scan_insert_src: +**** scan subdoc source + +#+name: meta_inserts_scan +#+BEGIN_SRC d +char[][] contents_insert; +auto type1 = flags_type_init; +auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); +auto markup_src_file_path = fn_pth_full.captures[1]; +#+END_SRC + +**** loop insert (sub-document) + +#+name: meta_inserts_scan_loop +#+BEGIN_SRC d +if (type1["curly_code"] == 1) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + if (line.matchFirst(rgx.block_curly_code_close)) { + type1["curly_code"] = 0; + } + contents_insert ~= line; +} else if (line.matchFirst(rgx.block_curly_code_open)) { + type1["curly_code"] = 1; + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; +} else if (type1["tic_code"] == 1) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + if (line.matchFirst(rgx.block_tic_close)) { + type1["tic_code"] = 0; + } + contents_insert ~= line; +} else if (line.matchFirst(rgx.block_tic_code_open)) { + type1["tic_code"] = 1; + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; +} else if ( + (type1["header_make"] == 1) + && line.matchFirst(rgx.native_header_sub) +) { + type1["header_make"] = 1; + type1["header_meta"] = 0; +} else if ( + (type1["header_meta"] == 1) + && line.matchFirst(rgx.native_header_sub) +) { + type1["header_meta"] = 1; + type1["header_make"] = 0; +} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + auto insert_fn = m.captures[2]; + auto insert_sub_pth = m.captures[1]; + auto fn_src_insert + = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; + auto raw = MarkupRawUnit(); + auto markup_sourcesubfile_insert_content + = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); + debug(insert_file) { + tell_l("red", line); + tell_l("red", fn_src_insert); + tell_l("fuchsia", "ERROR"); + writeln( + " length contents insert array: ", + markup_sourcesubfile_insert_content.length + ); + } + if (_opt_action.source || _opt_action.sisupod) { + _images ~= _extract_images(markup_sourcesubfile_insert_content); + } + auto ins = Inserts(); + /+ + - 1. load file + - 2. read lines + - 3. scan lines + - a. if filename insert, and insert filename + - repeat 1 + - b. else + - add line to new array; + - build image list, search for any image files to add to image list + +/ +} else { + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; // images to extract for image list? + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(line); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } +} +#+END_SRC + +**** post loop + +#+name: meta_inserts_scan_post +#+BEGIN_SRC d +auto t = tuple( + contents_insert, + _images +); +return t; +#+END_SRC + +*** scan document source :scan_src: +**** scan doc source + +#+name: meta_master_doc_scan_for_insert_filenames +#+BEGIN_SRC d +char[][] contents; +auto type = flags_type_init; +auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); +auto markup_src_file_path = fn_pth_full.captures[1]; +char[][] contents_insert; +string[] _images =[]; +string[] insert_file_list =[]; +#+END_SRC + +**** include inserts: _loop master_ scan for inserts (insert documents) + +#+name: meta_master_doc_scan_for_insert_filenames_loop +#+BEGIN_SRC d +if (type["curly_code"] == 1) { + if (line.matchFirst(rgx.block_curly_code_close)) { + type["curly_code"] = 0; + } + contents ~= line; +} else if (line.matchFirst(rgx.block_curly_code_open)) { + type["curly_code"] = 1; + contents ~= line; +} else if (type["tic_code"] == 1) { + if (line.matchFirst(rgx.block_tic_close)) { + type["tic_code"] = 0; + } + contents ~= line; +} else if (line.matchFirst(rgx.block_tic_code_open)) { + type["tic_code"] = 1; + contents ~= line; +} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { + auto insert_fn = m.captures[2]; + auto insert_sub_pth = m.captures[1]; + auto fn_src_insert + = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; + insert_file_list ~= fn_src_insert.to!string; + auto raw = MarkupRawUnit(); + /+ TODO +/ + auto markup_sourcefile_insert_content + = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); + debug(insert_file) { + tell_l("red", line); + tell_l("red", fn_src_insert); + writeln( + " length contents insert array: ", + markup_sourcefile_insert_content.length + ); + } + auto ins = Inserts(); + auto contents_insert_tu = ins.scan_subdoc_source( + _opt_action, + markup_sourcefile_insert_content, + fn_src_insert.to!string + ); + contents ~= contents_insert_tu[0]; // images to extract for image list? + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(contents_insert_tu[0]); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } + /+ + - 1. load file + - 2. read lines + - 3. scan lines + - a. if filename insert, and insert filename + - repeat 1 + - b. else + - add line to new array; + - build image list, search for any image files to add to image list + +/ +} else { + contents ~= line; + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(line); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } +} +#+END_SRC + +**** post loop + +#+name: meta_master_doc_scan_for_insert_filenames_post +#+BEGIN_SRC d +string[] images = []; +foreach(i; uniq(_images.sort())) { + images ~= i; +} +debug(insert_file) { + writeln(__LINE__); + writeln(contents.length); +} +auto t = tuple( + contents, + insert_file_list, + images +); +return t; +#+END_SRC + +* __END__ -- cgit v1.2.3