diff --git a/lib/core/pathlib/path_copy.v b/lib/core/pathlib/path_copy.v
index 496a457b..8b71d993 100644
--- a/lib/core/pathlib/path_copy.v
+++ b/lib/core/pathlib/path_copy.v
@@ -61,4 +61,5 @@ pub fn (mut path Path) copy(args_ CopyArgs) ! {
dest.check()
}
+
}
diff --git a/lib/core/pathlib/path_list.v b/lib/core/pathlib/path_list.v
index 34048873..79a00bb1 100644
--- a/lib/core/pathlib/path_list.v
+++ b/lib/core/pathlib/path_list.v
@@ -126,10 +126,17 @@ fn (mut path Path) list_internal(args ListArgsInternal) ![]Path {
}
}
- mut addthefile := true
- for r in args.regex {
- if !(r.matches_string(item)) {
- addthefile = false
+ mut addthefile := false
+ // If no regex patterns provided, include all files
+ if args.regex.len == 0 {
+ addthefile = true
+ } else {
+ // Include file if ANY regex pattern matches (OR operation)
+ for r in args.regex {
+ if r.matches_string(item) {
+ addthefile = true
+ break
+ }
}
}
if addthefile && !args.dirs_only {
diff --git a/lib/core/pathlib/readme.md b/lib/core/pathlib/readme.md
index c8f8522f..5d76d456 100644
--- a/lib/core/pathlib/readme.md
+++ b/lib/core/pathlib/readme.md
@@ -43,7 +43,52 @@ if path.is_dir() { /* is directory */ }
if path.is_link() { /* is symlink */ }
```
-## 3. Common File Operations
+## 3. File Listing and Filtering
+
+```v
+// List all files in a directory (recursive by default)
+mut dir := pathlib.get('/some/dir')
+mut pathlist := dir.list()!
+
+// List only files matching specific extensions using regex
+mut pathlist_images := dir.list(
+ regex: [r'.*\.png$', r'.*\.jpg$', r'.*\.svg$', r'.*\.jpeg$'],
+ recursive: true
+)!
+
+// List only directories
+mut pathlist_dirs := dir.list(
+ dirs_only: true,
+ recursive: true
+)!
+
+// List only files
+mut pathlist_files := dir.list(
+ files_only: true,
+ recursive: false // only in current directory
+)!
+
+// Include symlinks in the results
+mut pathlist_with_links := dir.list(
+ include_links: true
+)!
+
+// Don't ignore hidden files (those starting with . or _)
+mut pathlist_all := dir.list(
+ ignoredefault: false
+)!
+
+// Access the resulting paths
+for path in pathlist.paths {
+ println(path.path)
+}
+
+// Perform operations on all paths in the list
+pathlist.copy('/destination/dir')!
+pathlist.delete()!
+```
+
+## 4. Common File Operations
```v
// Empty a directory
diff --git a/lib/data/markdownparser/elements/base.v b/lib/data/markdownparser/elements/base.v
index 371f2c35..fd0e5495 100644
--- a/lib/data/markdownparser/elements/base.v
+++ b/lib/data/markdownparser/elements/base.v
@@ -29,9 +29,9 @@ fn (mut self DocBase) process_base() ! {
fn (mut self DocBase) parent_doc() &Doc {
mut pd := self.parent_doc_ or {
e := doc_new() or { panic('bug') }
+ self.parent_doc_ = &e
&e
}
-
return pd
}
diff --git a/lib/data/markdownparser/elements/base_add_methods.v b/lib/data/markdownparser/elements/base_add_methods.v
index 7a9b38d3..b033bc75 100644
--- a/lib/data/markdownparser/elements/base_add_methods.v
+++ b/lib/data/markdownparser/elements/base_add_methods.v
@@ -117,6 +117,18 @@ pub fn (mut base DocBase) frontmatter_new(mut docparent ?&Doc, content string) &
return &fm
}
+pub fn (mut base DocBase) frontmatter2_new(mut docparent ?&Doc, content string) &Frontmatter2 {
+ mut fm := Frontmatter2{
+ content: content
+ type_name: 'frontmatter2'
+ parent_doc_: docparent
+ }
+
+ base.children << fm
+ return &fm
+}
+
+
pub fn (mut base DocBase) link_new(mut docparent ?&Doc, content string) &Link {
mut a := Link{
content: content
diff --git a/lib/data/markdownparser/elements/doc.v b/lib/data/markdownparser/elements/doc.v
index 14629b84..990d042c 100644
--- a/lib/data/markdownparser/elements/doc.v
+++ b/lib/data/markdownparser/elements/doc.v
@@ -79,3 +79,13 @@ pub fn (self Doc) pug() !string {
return ":markdown-it(linkify langPrefix='highlight-')\n${texttools.indent(self.markdown()!,
' ')}"
}
+
+
+pub fn (self Doc) frontmatter2() !&Frontmatter2 {
+ for item in self.children_recursive(){
+ if item is Frontmatter2{
+ return item
+ }
+ }
+ return error("can't find frontmatter in ${self}")
+}
diff --git a/lib/data/markdownparser/elements/element_frontmatter2.v b/lib/data/markdownparser/elements/element_frontmatter2.v
new file mode 100644
index 00000000..7d65a440
--- /dev/null
+++ b/lib/data/markdownparser/elements/element_frontmatter2.v
@@ -0,0 +1,80 @@
+module elements
+
+import toml
+
+// Frontmatter2 struct
+@[heap]
+pub struct Frontmatter2 {
+ DocBase
+pub mut:
+ args map[string]string
+}
+
+pub fn (mut self Frontmatter2) process() !int {
+ if self.processed {
+ return 0
+ }
+ for line in self.content.split_into_lines(){
+ if line.trim_space()==""{
+ continue
+ }
+ if line.contains(":"){
+ splitted:=line.split(":")
+ if splitted.len !=2{
+ return error("syntax error in frontmatter 2 in \n${self.content}")
+ }
+ pre:=splitted[0].trim_space()
+ post:=splitted[1].trim_space().trim(" '\"").trim_space()
+ self.args[pre]=post
+ }
+ }
+ // Clear content after parsing
+ self.content = ''
+ self.processed = true
+ return 1
+}
+
+pub fn (self Frontmatter2) markdown() !string {
+ mut out := '---\n'
+ for key, value in self.args{
+ if value.contains(" "){
+ out += '${key} : \'${value}\'\n'
+ }else{
+ out += '${key} : ${value}\n'
+ }
+ }
+ out += '---\n'
+ return out
+}
+
+pub fn (self Frontmatter2) html() !string {
+ mut out := '
\n'
+ for key, value in self.args {
+ out += '
${key}: ${value}
\n'
+ }
+ out += '
'
+ return out
+}
+
+pub fn (self Frontmatter2) pug() !string {
+ mut out := ''
+ out += 'div(class="Frontmatter2")\n'
+ for key, value in self.args {
+ out += ' p\n'
+ out += ' strong ${key}: ${value}\n'
+ }
+ return out
+}
+
+pub fn (self Frontmatter2) get_string(key string) !string {
+ // Retrieve a value using a query string
+ return self.args[key] or { return error('Key "${key}" not found in Frontmatter2') }
+}
+
+pub fn (self Frontmatter2) get_bool(key string) !bool {
+ return self.get_string(key)!.bool()
+}
+
+pub fn (self Frontmatter2) get_int(key string) !int {
+ return self.get_string(key)!.int()
+}
diff --git a/lib/data/markdownparser/elements/parser_paragraph.v b/lib/data/markdownparser/elements/parser_paragraph.v
index 66d91591..c5cec182 100644
--- a/lib/data/markdownparser/elements/parser_paragraph.v
+++ b/lib/data/markdownparser/elements/parser_paragraph.v
@@ -8,14 +8,26 @@ import freeflowuniverse.herolib.core.texttools
// adds the found links, text, comments to the paragraph
fn (mut paragraph Paragraph) paragraph_parse() ! {
mut parser := parser_char_new_text(paragraph.content)
-
- // mut d := para.doc or { panic('no doc') }
- paragraph.text_new(mut paragraph.parent_doc(), '') // the initial one
+ // Safely get the parent document
+ mut parent_doc := paragraph.parent_doc_ or {
+ // If parent_doc is not set, create a new one
+ mut new_doc := elements.doc_new() or { panic('Failed to create new doc') }
+ paragraph.parent_doc_ = &new_doc
+ &new_doc
+ }
+
+ // Create initial text element to ensure we have at least one child
+ paragraph.text_new(mut parent_doc, '') // the initial one
mut potential_link := false
mut link_in_link := false
for {
+ // Ensure we have at least one child before accessing last()
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ }
+
mut llast := paragraph.children.last()
mut char_ := parser.char_current()
@@ -25,12 +37,16 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
if mut llast is Def {
if (char_ == '' || char_ == ' ' || char_ == '\n') && parser.char_prev() != '*' {
if llast.content.len < 3 {
- paragraph.children.pop()
- mut llast2 := paragraph.children.last()
- if mut llast2 is Text {
+ paragraph.children.pop()
+ // Ensure we have at least one child after popping
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ }
+ mut llast2 := paragraph.children.last()
+ if mut llast2 is Text {
llast2.content += llast.content + char_
} else {
- paragraph.text_new(mut paragraph.parent_doc(), llast.content + char_)
+ paragraph.text_new(mut parent_doc, llast.content + char_)
}
parser.next()
char_ = ''
@@ -38,7 +54,7 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
} else {
// means we did find a def, we can stop
// console.print_debug(" -- end def")
- paragraph.text_new(mut paragraph.parent_doc(), char_)
+ paragraph.text_new(mut parent_doc, char_)
parser.next()
char_ = ''
continue
@@ -48,13 +64,17 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
// console.print_debug(' -- no def: ${char_}')
paragraph.children.pop()
// console.print_debug(' -- no def: ${paragraph.children.last()}')
+ // Ensure we have at least one child after popping
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ }
mut llast2 := paragraph.children.last()
if mut llast2 is Text {
llast2_content := llast2.content
llast2.content = llast2_content + llast.content + char_
// llast2.content += llast.content + char_
} else {
- paragraph.text_new(mut paragraph.parent_doc(), llast.content + char_)
+ paragraph.text_new(mut parent_doc, llast.content + char_)
}
parser.next()
char_ = ''
@@ -73,7 +93,7 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
if char_ == '\n' {
if llast.singleline {
// means we are at end of line of a single line comment
- paragraph.text_new(mut paragraph.parent_doc(), '\n')
+ paragraph.text_new(mut parent_doc, '\n')
parser.next()
char_ = ''
continue
@@ -87,7 +107,7 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
llast.content += char_ // need to add current content
// need to move forward not to have the 3 next
parser.forward(3)
- paragraph.text_new(mut paragraph.parent_doc(), '')
+ paragraph.text_new(mut parent_doc, '')
parser.next()
char_ = ''
continue
@@ -109,7 +129,11 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
mut c := llast.content
paragraph.children.delete_last() // remove the link
- paragraph.text_new(mut paragraph.parent_doc(), '')
+ paragraph.text_new(mut parent_doc, '')
+ // Ensure we have at least one child after deleting
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ }
llast = paragraph.children.last() // fetch last again
llast_content := llast.content
llast.content = llast_content + c + char_ // need to add current content
@@ -124,13 +148,13 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
// end of link
if link_in_link {
// the parsed content was actually the child links in the description
- llast.link_new(mut paragraph.parent_doc(), '${llast.content.trim_string_left('[')})')
+ llast.link_new(mut parent_doc, '${llast.content.trim_string_left('[')})')
link_in_link = false
potential_link = false
continue
} else {
llast.content += char_ // need to add current content
- paragraph.text_new(mut paragraph.parent_doc(), '')
+ paragraph.text_new(mut parent_doc, '')
parser.next()
char_ = ''
potential_link = false
@@ -142,7 +166,11 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
if mut llast is Text {
if char_ != '' {
if char_ == '*' {
- paragraph.def_new(mut paragraph.parent_doc(), '*')
+ paragraph.def_new(mut parent_doc, '*')
+ // Ensure we have at least one child after adding a definition
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ }
parser.next()
char_ = ''
continue
@@ -153,7 +181,12 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
is_url := llast.content.ends_with(':') && totry == '//'
if parser.text_next_is(totry, 0) && !is_url {
// we are now in comment
- paragraph.comment_new(mut paragraph.parent_doc(), '')
+ paragraph.comment_new(mut parent_doc, '')
+ // Ensure we have at least one child after adding a comment
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut parent_doc, '')
+ continue
+ }
mut llast2 := paragraph.children.last()
if totry == '//' {
if mut llast2 is Comment {
@@ -169,6 +202,11 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
for totry in ['![', '['] {
if parser.text_next_is(totry, 0) {
paragraph.link_new(mut paragraph.parent_doc(), totry)
+ // Ensure we have at least one child after adding a link
+ if paragraph.children.len == 0 {
+ paragraph.text_new(mut paragraph.parent_doc(), '')
+ continue
+ }
parser.forward(totry.len - 1)
char_ = ''
break
@@ -176,7 +214,11 @@ fn (mut paragraph Paragraph) paragraph_parse() ! {
}
}
}
- llast.content += char_
+ // Make sure llast is still valid before appending to it
+ if paragraph.children.len > 0 {
+ llast = paragraph.children.last()
+ llast.content += char_
+ }
parser.next()
}
paragraph.remove_empty_children()
diff --git a/lib/data/markdownparser/parsers/parse_doc.v b/lib/data/markdownparser/parsers/parse_doc.v
index f48cf4ed..cd7a8645 100644
--- a/lib/data/markdownparser/parsers/parse_doc.v
+++ b/lib/data/markdownparser/parsers/parse_doc.v
@@ -77,6 +77,18 @@ pub fn parse_doc(mut doc elements.Doc) ! {
continue
}
+ if mut llast is elements.Frontmatter || mut llast is elements.Frontmatter2{
+ if trimmed_line == '---' || trimmed_line == '+++' {
+ parser.next_start_lf()!
+ parser.frontmatter = true
+ continue
+ }
+ llast.content += '${line}\n'
+ parser.next()
+ continue
+ }
+
+
if mut llast is elements.Paragraph {
if elements.line_is_list(line) {
doc.list_new(mut &doc, line)!
@@ -99,6 +111,19 @@ pub fn parse_doc(mut doc elements.Doc) ! {
continue
}
+ if line.starts_with('+++') && parser.frontmatter == false{
+ mut e := doc.frontmatter_new(mut &doc, '')
+ parser.next()
+ continue
+ }
+
+ if line.starts_with('---') && parser.frontmatter == false{
+ mut e := doc.frontmatter2_new(mut &doc, '')
+ parser.next()
+ continue
+ }
+
+
// process headers (# is 35)
if line.len > 0 && line[0] == 35 {
mut d := 0
diff --git a/lib/data/markdownparser/parsers/parser_line.v b/lib/data/markdownparser/parsers/parser_line.v
index 5c08a148..140af9d1 100644
--- a/lib/data/markdownparser/parsers/parser_line.v
+++ b/lib/data/markdownparser/parsers/parser_line.v
@@ -18,6 +18,7 @@ mut:
lines []string
errors []ParserError
endlf bool // if there is a linefeed or \n at end
+ frontmatter bool
}
fn parser_line_new(mut doc elements.Doc) !Parser {
@@ -25,26 +26,6 @@ fn parser_line_new(mut doc elements.Doc) !Parser {
doc: doc
}
- // Parse frontmatter if present
- if doc.content.starts_with('+++') {
- mut frontmatter_content := ''
- mut lines := doc.content.split_into_lines()
- lines = lines[1..].clone() // Skip the opening '+++' with explicit clone
-
- for line in lines {
- if line.trim_space() == '+++' {
- // End of frontmatter
- doc.content = lines.join('\n') // Update content to exclude frontmatter
- break
- }
- frontmatter_content += '${line}\n'
- }
-
- // Create and process the Frontmatter element
- mut frontmatter := doc.frontmatter_new(mut &doc, frontmatter_content)
- frontmatter.process() or { return error('Failed to parse frontmatter: ${err.msg()}') }
- }
-
doc.paragraph_new(mut parser.doc, '')
parser.lines = doc.content.split_into_lines()
if doc.content.ends_with('\n') {
diff --git a/lib/data/markdownparser/readme.md b/lib/data/markdownparser/readme.md
index 133a359d..a139c397 100644
--- a/lib/data/markdownparser/readme.md
+++ b/lib/data/markdownparser/readme.md
@@ -48,6 +48,7 @@
- Html
- cannot have children
- markdown() -> returns the original
+- FrontMatter
- Paragraph
- has children which were parsed with the char parser
- children
diff --git a/lib/web/docusaurus/cfg/main.json b/lib/web/docusaurus/cfg/main.json
index 44afc4d5..8fdf9c69 100644
--- a/lib/web/docusaurus/cfg/main.json
+++ b/lib/web/docusaurus/cfg/main.json
@@ -13,5 +13,10 @@
},
"buildDest":["root@info.ourworld.tf:/root/hero/www/info"],
"buildDestDev":["root@info.ourworld.tf:/root/hero/www/infodev"],
+ "import":[{
+ "url":"",
+ "dest":"",
+ "visible":true
+ }],
"copyright": "someone"
}
diff --git a/lib/web/docusaurus/config.v b/lib/web/docusaurus/config.v
index ba8f21ec..813151bf 100644
--- a/lib/web/docusaurus/config.v
+++ b/lib/web/docusaurus/config.v
@@ -46,8 +46,18 @@ pub mut:
build_dest []string @[json: 'buildDest']
build_dest_dev []string @[json: 'buildDestDev']
copyright string = "someone"
+ to_import []MyImport @[json: 'import']
}
+pub struct MyImport {
+pub mut:
+ url string
+ dest string
+ visible bool
+ replace map[string]string
+}
+
+
// Navbar config structures
pub struct NavbarItem {
pub mut:
diff --git a/lib/web/docusaurus/dsite.v b/lib/web/docusaurus/dsite.v
index 0c2218ca..774c365a 100644
--- a/lib/web/docusaurus/dsite.v
+++ b/lib/web/docusaurus/dsite.v
@@ -4,9 +4,10 @@ import freeflowuniverse.herolib.osal.screen
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.texttools
-import freeflowuniverse.herolib.core.base
+//import freeflowuniverse.herolib.core.base
+import freeflowuniverse.herolib.data.markdownparser
import freeflowuniverse.herolib.develop.gittools
-import json
+//import json
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
@@ -146,13 +147,6 @@ pub fn (mut site DocSite) generate() ! {
console.print_header(' site source on ${site.path_src.path}')
site.check()!
site.template_install()!
- // osal.exec(
- // cmd: '
- // cd ${site.path_build.path}
- // #Docusaurus build --dest-dir ${site.path_publish.path}
- // '
- // retry: 0
- // )!
// Now copy all directories that exist in src to build
for item in ['src', 'static', 'cfg'] {
@@ -167,6 +161,49 @@ pub fn (mut site DocSite) generate() ! {
aa.copy(dest: '${site.path_build.path}/${item}', delete: true)!
}
}
+
+ mut gs := gittools.new()!
+
+ for item in site.config.main.to_import {
+ mypath:=gs.get_path(
+ pull:false,
+ reset:false,
+ url:item.url
+ )!
+ mut mypatho:=pathlib.get(mypath)
+ site.process_md(mut mypatho,item)!
+ }
+}
+
+fn (mut site DocSite) process_md(mut path pathlib.Path, args MyImport)!{
+
+ if path.is_dir(){
+ mut pathlist_images:=path.list(regex: [r'.*\.png$',r'.*\.jpg$',r'.*\.svg$',r'.*\.jpeg$'],recursive:true)!
+ for mut mypatho_img in pathlist_images.paths{
+ //now copy the image to the dest
+ dest:='${site.path_build.path}/docs/${args.dest}/img/${texttools.name_fix(mypatho_img.name())}'
+ //println("image copy: ${dest}")
+ mypatho_img.copy(dest:dest,rsync:false)!
+ }
+
+ mut pathlist:=path.list()!
+ for mut mypatho2 in pathlist.paths{
+ site.process_md(mut mypatho2,args)!
+ }
+ return
+ }
+ mydest:='${site.path_build.path}/docs/${args.dest}/${texttools.name_fix(path.name())}'
+ mut mydesto:=pathlib.get_file(path:mydest,create:true)!
+
+ mut mymd:=markdownparser.new(path:path.path)!
+ mut myfm:=mymd.frontmatter2()!
+ if ! args.visible{
+ myfm.args["draft"]= 'true'
+ }
+ //println(myfm)
+ //println(mymd.markdown()!)
+ mydesto.write(mymd.markdown()!)!
+ //exit(0)
}
fn (mut site DocSite) template_install() ! {