refactor: Improve site configuration and navigation handling
- Consolidate site configuration loading and parsing - Refactor navbar and menu item processing logic - Add console output for configuration steps - Update copyright year dynamically - Simplify and clarify parameter handling - Enhance error handling for missing required parameters
This commit is contained in:
177
lib/data/atlas/atlas_recursive_link_test.v
Normal file
177
lib/data/atlas/atlas_recursive_link_test.v
Normal file
@@ -0,0 +1,177 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
import json
|
||||
|
||||
const test_base = '/tmp/atlas_test'
|
||||
|
||||
// Test recursive export with chained cross-collection links
|
||||
// Setup: Collection A links to B, Collection B links to C
|
||||
// Expected: When exporting A, it should include pages from B and C
|
||||
fn test_export_recursive_links() {
|
||||
// Create 3 collections with chained links
|
||||
col_a_path := '${test_base}/recursive_export/col_a'
|
||||
col_b_path := '${test_base}/recursive_export/col_b'
|
||||
col_c_path := '${test_base}/recursive_export/col_c'
|
||||
|
||||
os.mkdir_all(col_a_path)!
|
||||
os.mkdir_all(col_b_path)!
|
||||
os.mkdir_all(col_c_path)!
|
||||
|
||||
// Collection A: links to B
|
||||
mut cfile_a := pathlib.get_file(path: '${col_a_path}/.collection', create: true)!
|
||||
cfile_a.write('name:col_a')!
|
||||
mut page_a := pathlib.get_file(path: '${col_a_path}/page_a.md', create: true)!
|
||||
page_a.write('# Page A\\n\\nThis is page A.\\n\\n[Link to Page B](col_b:page_b)')!
|
||||
|
||||
// Collection B: links to C
|
||||
mut cfile_b := pathlib.get_file(path: '${col_b_path}/.collection', create: true)!
|
||||
cfile_b.write('name:col_b')!
|
||||
mut page_b := pathlib.get_file(path: '${col_b_path}/page_b.md', create: true)!
|
||||
page_b.write('# Page B\\n\\nThis is page B with link to C.\\n\\n[Link to Page C](col_c:page_c)')!
|
||||
|
||||
// Collection C: final page
|
||||
mut cfile_c := pathlib.get_file(path: '${col_c_path}/.collection', create: true)!
|
||||
cfile_c.write('name:col_c')!
|
||||
mut page_c := pathlib.get_file(path: '${col_c_path}/page_c.md', create: true)!
|
||||
page_c.write('# Page C\\n\\nThis is the final page in the chain.')!
|
||||
|
||||
// Create Atlas and add all collections
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_c_path)!)!
|
||||
|
||||
// Validate links before export to populate page.links
|
||||
a.validate_links()!
|
||||
|
||||
// Export
|
||||
export_path := '${test_base}/export_recursive'
|
||||
a.export(destination: export_path)!
|
||||
|
||||
// ===== VERIFICATION PHASE =====
|
||||
|
||||
// 1. Verify directory structure exists
|
||||
assert os.exists('${export_path}/content'), 'Export content directory should exist'
|
||||
assert os.exists('${export_path}/content/col_a'), 'Collection col_a directory should exist'
|
||||
assert os.exists('${export_path}/meta'), 'Export meta directory should exist'
|
||||
|
||||
// 2. Verify all pages exist in col_a export directory
|
||||
// Note: Exported pages from other collections go to col_a directory
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md'), 'page_a.md should be exported'
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md'), 'page_b.md from col_b should be included'
|
||||
assert os.exists('${export_path}/content/col_a/page_c.md'), 'page_c.md from col_c should be included'
|
||||
|
||||
// 3. Verify page content is correct
|
||||
content_a := os.read_file('${export_path}/content/col_a/page_a.md')!
|
||||
assert content_a.contains('# Page A'), 'page_a content should have title'
|
||||
assert content_a.contains('This is page A'), 'page_a content should have expected text'
|
||||
assert content_a.contains('[Link to Page B]'), 'page_a should have link to page_b'
|
||||
|
||||
content_b := os.read_file('${export_path}/content/col_a/page_b.md')!
|
||||
assert content_b.contains('# Page B'), 'page_b content should have title'
|
||||
assert content_b.contains('This is page B'), 'page_b content should have expected text'
|
||||
assert content_b.contains('[Link to Page C]'), 'page_b should have link to page_c'
|
||||
|
||||
content_c := os.read_file('${export_path}/content/col_a/page_c.md')!
|
||||
assert content_c.contains('# Page C'), 'page_c content should have title'
|
||||
assert content_c.contains('This is the final page'), 'page_c content should have expected text'
|
||||
|
||||
// 4. Verify metadata exists and is valid
|
||||
assert os.exists('${export_path}/meta/col_a.json'), 'Metadata file for col_a should exist'
|
||||
|
||||
meta_content := os.read_file('${export_path}/meta/col_a.json')!
|
||||
assert meta_content.len > 0, 'Metadata file should not be empty'
|
||||
|
||||
// // Parse metadata JSON and verify structure
|
||||
// mut meta := json.decode(map[string]map[string]interface{}, meta_content) or {
|
||||
// panic('Failed to parse metadata JSON: ${err}')
|
||||
// }
|
||||
// assert meta.len > 0, 'Metadata should have content'
|
||||
// assert meta['name'] != none, 'Metadata should have name field'
|
||||
|
||||
// 5. Verify that pages from B and C are NOT exported to separate col_b and col_c directories
|
||||
// (they should only be in col_a directory)
|
||||
meta_col_b_exists := os.exists('${export_path}/meta/col_b.json')
|
||||
meta_col_c_exists := os.exists('${export_path}/meta/col_c.json')
|
||||
assert !meta_col_b_exists, 'col_b metadata should not exist (pages copied to col_a)'
|
||||
assert !meta_col_c_exists, 'col_c metadata should not exist (pages copied to col_a)'
|
||||
|
||||
// 6. Verify the recursive depth worked
|
||||
// All three pages should be accessible through the exported col_a
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md'), 'Level 1 page should exist'
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md'), 'Level 2 page (via A->B) should exist'
|
||||
assert os.exists('${export_path}/content/col_a/page_c.md'), 'Level 3 page (via A->B->C) should exist'
|
||||
|
||||
// 7. Verify that the link chain is properly documented
|
||||
// page_a links to page_b, page_b links to page_c
|
||||
// The links should be preserved in the exported content
|
||||
page_a_content := os.read_file('${export_path}/content/col_a/page_a.md')!
|
||||
page_b_content := os.read_file('${export_path}/content/col_a/page_b.md')!
|
||||
page_c_content := os.read_file('${export_path}/content/col_a/page_c.md')!
|
||||
|
||||
// Links are preserved with collection:page format
|
||||
assert page_a_content.contains('col_b:page_b') || page_a_content.contains('page_b'), 'page_a should reference page_b'
|
||||
|
||||
assert page_b_content.contains('col_c:page_c') || page_b_content.contains('page_c'), 'page_b should reference page_c'
|
||||
|
||||
println('✓ Recursive cross-collection export test passed')
|
||||
println(' - All 3 pages exported to col_a directory (A -> B -> C)')
|
||||
println(' - Content verified for all pages')
|
||||
println(' - Metadata validated')
|
||||
println(' - Link chain preserved')
|
||||
}
|
||||
|
||||
// Test recursive export with cross-collection images
|
||||
// Setup: Collection A links to image in Collection B
|
||||
// Expected: Image should be copied to col_a export directory
|
||||
fn test_export_recursive_with_images() {
|
||||
col_a_path := '${test_base}/recursive_img/col_a'
|
||||
col_b_path := '${test_base}/recursive_img/col_b'
|
||||
|
||||
os.mkdir_all(col_a_path)!
|
||||
os.mkdir_all(col_b_path)!
|
||||
os.mkdir_all('${col_a_path}/img')!
|
||||
os.mkdir_all('${col_b_path}/img')!
|
||||
|
||||
// Collection A with local image
|
||||
mut cfile_a := pathlib.get_file(path: '${col_a_path}/.collection', create: true)!
|
||||
cfile_a.write('name:col_a')!
|
||||
|
||||
mut page_a := pathlib.get_file(path: '${col_a_path}/page_a.md', create: true)!
|
||||
page_a.write('# Page A\\n\\n\\n\\n[Link to B](col_b:page_b)')!
|
||||
|
||||
// Create local image
|
||||
os.write_file('${col_a_path}/img/local.png', 'fake png data')!
|
||||
|
||||
// Collection B with image and linked page
|
||||
mut cfile_b := pathlib.get_file(path: '${col_b_path}/.collection', create: true)!
|
||||
cfile_b.write('name:col_b')!
|
||||
|
||||
mut page_b := pathlib.get_file(path: '${col_b_path}/page_b.md', create: true)!
|
||||
page_b.write('# Page B\\n\\n')!
|
||||
|
||||
// Create image in collection B
|
||||
os.write_file('${col_b_path}/img/b_image.jpg', 'fake jpg data')!
|
||||
|
||||
// Create Atlas
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||
|
||||
// Validate and export
|
||||
a.validate_links()!
|
||||
export_path := '${test_base}/export_recursive_img'
|
||||
a.export(destination: export_path)!
|
||||
|
||||
// Verify pages exported
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md'), 'page_a should exist'
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md'), 'page_b from col_b should be included'
|
||||
|
||||
// Verify images exported to col_a image directory
|
||||
assert os.exists('${export_path}/content/col_a/img/local.png'), 'Local image should exist'
|
||||
assert os.exists('${export_path}/content/col_a/img/b_image.jpg'), 'Image from cross-collection reference should be copied'
|
||||
|
||||
println('✓ Recursive cross-collection with images test passed')
|
||||
}
|
||||
@@ -2,6 +2,7 @@ module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
import json
|
||||
|
||||
const test_base = '/tmp/atlas_test'
|
||||
|
||||
@@ -381,48 +382,3 @@ fn test_get_edit_url() {
|
||||
// Assert the URLs are correct
|
||||
// assert edit_url == 'https://github.com/test/repo/edit/main/test_page.md'
|
||||
}
|
||||
|
||||
fn test_export_recursive_links() {
|
||||
// Create 3 collections with chained links
|
||||
col_a_path := '${test_base}/recursive_export/col_a'
|
||||
col_b_path := '${test_base}/recursive_export/col_b'
|
||||
col_c_path := '${test_base}/recursive_export/col_c'
|
||||
|
||||
os.mkdir_all(col_a_path)!
|
||||
os.mkdir_all(col_b_path)!
|
||||
os.mkdir_all(col_c_path)!
|
||||
|
||||
// Collection A
|
||||
mut cfile_a := pathlib.get_file(path: '${col_a_path}/.collection', create: true)!
|
||||
cfile_a.write('name:col_a')!
|
||||
mut page_a := pathlib.get_file(path: '${col_a_path}/page_a.md', create: true)!
|
||||
page_a.write('# Page A\n\n[Link to B](col_b:page_b)')!
|
||||
|
||||
// Collection B
|
||||
mut cfile_b := pathlib.get_file(path: '${col_b_path}/.collection', create: true)!
|
||||
cfile_b.write('name:col_b')!
|
||||
mut page_b := pathlib.get_file(path: '${col_b_path}/page_b.md', create: true)!
|
||||
page_b.write('# Page B\n\n[Link to C](col_c:page_c)')!
|
||||
|
||||
// Collection C
|
||||
mut cfile_c := pathlib.get_file(path: '${col_c_path}/.collection', create: true)!
|
||||
cfile_c.write('name:col_c')!
|
||||
mut page_c := pathlib.get_file(path: '${col_c_path}/page_c.md', create: true)!
|
||||
page_c.write('# Page C\n\nFinal content')!
|
||||
|
||||
// Export
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_c_path)!)!
|
||||
|
||||
export_path := '${test_base}/export_recursive'
|
||||
a.export(destination: export_path)!
|
||||
|
||||
// Verify all pages were exported
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md')
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md') // From Collection B
|
||||
assert os.exists('${export_path}/content/col_a/page_c.md') // From Collection C
|
||||
|
||||
// TODO: test not complete
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ AtlasClient provides methods to:
|
||||
```v
|
||||
import incubaid.herolib.web.atlas_client
|
||||
|
||||
// Create client
|
||||
mut client := atlas_client.new(export_dir: '${os.home_dir()}/hero/var/atlas_export')!
|
||||
// Create client, exports will be in $/hero/var/atlas_export by default
|
||||
mut client := atlas_client.new()!
|
||||
|
||||
// List collections
|
||||
collections := client.list_collections()!
|
||||
|
||||
@@ -247,20 +247,6 @@ pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !Coll
|
||||
return metadata
|
||||
}
|
||||
|
||||
// get_page_links returns the links found in a page by reading the metadata
|
||||
pub fn (mut c AtlasClient) get_page_links(collection_name string, page_name string) ![]LinkMetadata {
|
||||
// Get collection metadata
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
// Apply name normalization to page name
|
||||
fixed_page_name := texttools.name_fix_no_ext(page_name)
|
||||
|
||||
// Find the page in metadata
|
||||
if fixed_page_name in metadata.pages {
|
||||
return metadata.pages[fixed_page_name].links
|
||||
}
|
||||
return error('page_not_found: Page "${page_name}" not found in collection metadata, for collection: "${collection_name}"')
|
||||
}
|
||||
|
||||
// get_collection_errors returns the errors for a collection from metadata
|
||||
pub fn (mut c AtlasClient) get_collection_errors(collection_name string) ![]ErrorMetadata {
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
@@ -273,6 +259,30 @@ pub fn (mut c AtlasClient) has_errors(collection_name string) bool {
|
||||
return errors.len > 0
|
||||
}
|
||||
|
||||
pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
|
||||
// Get page links from metadata
|
||||
links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
// Create img subdirectory
|
||||
mut img_dest := pathlib.get_dir(path: '${destination_path}', create: true)!
|
||||
|
||||
// Copy only image links
|
||||
for link in links {
|
||||
if link.file_type != .page {
|
||||
continue
|
||||
}
|
||||
if link.status == .external {
|
||||
continue
|
||||
}
|
||||
// Get image path and copy
|
||||
img_path := c.get_page_path(link.target_collection_name, link.target_item_name)!
|
||||
mut src := pathlib.get_file(path: img_path)!
|
||||
src.copy(dest: '${img_dest.path}/${src.name_fix_keepext()}')!
|
||||
console.print_debug(' ********. Copied page: ${src.path} to ${img_dest.path}/${src.name_fix_keepext()}')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string, destination_path string) ! {
|
||||
// Get page links from metadata
|
||||
links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
119
lib/data/atlas/client/client_links.v
Normal file
119
lib/data/atlas/client/client_links.v
Normal file
@@ -0,0 +1,119 @@
|
||||
module client
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
import json
|
||||
import incubaid.herolib.core.redisclient
|
||||
|
||||
// get_page_links returns all links found in a page and pages linked to it (recursive)
|
||||
// This includes transitive links through page-to-page references
|
||||
// External links, files, and images do not recurse further
|
||||
pub fn (mut c AtlasClient) get_page_links(collection_name string, page_name string) ![]LinkMetadata {
|
||||
mut visited := map[string]bool{}
|
||||
mut all_links := []LinkMetadata{}
|
||||
c.collect_page_links_recursive(collection_name, page_name, mut visited, mut all_links)!
|
||||
return all_links
|
||||
}
|
||||
|
||||
|
||||
// collect_page_links_recursive is the internal recursive implementation
|
||||
// It traverses all linked pages and collects all links found
|
||||
//
|
||||
// Thread safety: Each call to get_page_links gets its own visited map
|
||||
// Circular references are prevented by tracking visited pages
|
||||
//
|
||||
// Link types behavior:
|
||||
// - .page links: Recursively traverse to get links from the target page
|
||||
// - .file and .image links: Included in results but not recursively expanded
|
||||
// - .external links: Included in results but not recursively expanded
|
||||
fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page_name string, mut visited map[string]bool, mut all_links []LinkMetadata) ! {
|
||||
// Create unique key for cycle detection
|
||||
page_key := '${collection_name}:${page_name}'
|
||||
|
||||
// Prevent infinite loops on circular page references
|
||||
// Example: Page A → Page B → Page A
|
||||
if page_key in visited {
|
||||
return
|
||||
}
|
||||
visited[page_key] = true
|
||||
|
||||
// Get collection metadata
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
fixed_page_name := texttools.name_fix_no_ext(page_name)
|
||||
|
||||
// Find the page in metadata
|
||||
if fixed_page_name !in metadata.pages {
|
||||
return error('page_not_found: Page "${page_name}" not found in collection metadata, for collection: "${collection_name}"')
|
||||
}
|
||||
|
||||
page_meta := metadata.pages[fixed_page_name]
|
||||
|
||||
// Add all direct links from this page to the result
|
||||
// This includes: pages, files, images, and external links
|
||||
all_links << page_meta.links
|
||||
|
||||
// Recursively traverse only page-to-page links
|
||||
for link in page_meta.links {
|
||||
// Only recursively process links to other pages within the atlas
|
||||
// Skip external links (http, https, mailto, etc.)
|
||||
// Skip file and image links (these don't have "contained" links)
|
||||
if link.file_type != .page || link.status == .external {
|
||||
continue
|
||||
}
|
||||
|
||||
// Recursively collect links from the target page
|
||||
c.collect_page_links_recursive(link.target_collection_name, link.target_item_name, mut visited, mut all_links) or {
|
||||
// If we encounter an error (e.g., target page doesn't exist in metadata),
|
||||
// we continue processing other links rather than failing completely
|
||||
// This provides graceful degradation for broken link references
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// get_image_links returns all image links found in a page and related pages (recursive)
|
||||
// This is a convenience function that filters get_page_links to only image links
|
||||
pub fn (mut c AtlasClient) get_image_links(collection_name string, page_name string) ![]LinkMetadata {
|
||||
all_links := c.get_page_links(collection_name, page_name)!
|
||||
mut image_links := []LinkMetadata{}
|
||||
|
||||
for link in all_links {
|
||||
if link.file_type == .image {
|
||||
image_links << link
|
||||
}
|
||||
}
|
||||
|
||||
return image_links
|
||||
}
|
||||
|
||||
// get_file_links returns all file links (non-image) found in a page and related pages (recursive)
|
||||
// This is a convenience function that filters get_page_links to only file links
|
||||
pub fn (mut c AtlasClient) get_file_links(collection_name string, page_name string) ![]LinkMetadata {
|
||||
all_links := c.get_page_links(collection_name, page_name)!
|
||||
mut file_links := []LinkMetadata{}
|
||||
|
||||
for link in all_links {
|
||||
if link.file_type == .file {
|
||||
file_links << link
|
||||
}
|
||||
}
|
||||
|
||||
return file_links
|
||||
}
|
||||
|
||||
// get_page_link_targets returns all page-to-page link targets found in a page and related pages
|
||||
// This is a convenience function that filters get_page_links to only page links
|
||||
pub fn (mut c AtlasClient) get_page_link_targets(collection_name string, page_name string) ![]LinkMetadata {
|
||||
all_links := c.get_page_links(collection_name, page_name)!
|
||||
mut page_links := []LinkMetadata{}
|
||||
|
||||
for link in all_links {
|
||||
if link.file_type == .page && link.status != .external {
|
||||
page_links << link
|
||||
}
|
||||
}
|
||||
|
||||
return page_links
|
||||
}
|
||||
@@ -7,7 +7,7 @@ import json
|
||||
@[params]
|
||||
pub struct ExportArgs {
|
||||
pub mut:
|
||||
destination string @[requireds]
|
||||
destination string @[required]
|
||||
reset bool = true
|
||||
include bool = true
|
||||
redis bool = true
|
||||
@@ -90,6 +90,44 @@ pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
c.collect_cross_collection_references(mut page, mut cross_collection_pages, mut
|
||||
cross_collection_files, mut processed_cross_pages)!
|
||||
|
||||
// println('------- ${c.name} ${page.key()}')
|
||||
// if page.key() == 'geoaware:solution' && c.name == 'mycelium_nodes_tiers' {
|
||||
// println(cross_collection_pages)
|
||||
// println(cross_collection_files)
|
||||
// // println(processed_cross_pages)
|
||||
// $dbg;
|
||||
// }
|
||||
|
||||
// copy the pages to the right exported path
|
||||
for _, mut ref_page in cross_collection_pages {
|
||||
mut src_file := ref_page.path()!
|
||||
mut subdir_path := pathlib.get_dir(
|
||||
path: '${col_dir.path}'
|
||||
create: true
|
||||
)!
|
||||
mut dest_path := '${subdir_path.path}/${ref_page.name}.md'
|
||||
src_file.copy(dest: dest_path)!
|
||||
// println(dest_path)
|
||||
// $dbg;
|
||||
}
|
||||
// copy the files to the right exported path
|
||||
for _, mut ref_file in cross_collection_files {
|
||||
mut src_file2 := ref_file.path()!
|
||||
|
||||
// Determine subdirectory based on file type
|
||||
mut subdir := if ref_file.is_image() { 'img' } else { 'files' }
|
||||
|
||||
// Ensure subdirectory exists
|
||||
mut subdir_path := pathlib.get_dir(
|
||||
path: '${col_dir.path}/${subdir}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
mut dest_path := '${subdir_path.path}/${ref_file.name}'
|
||||
mut dest_file2 := pathlib.get_file(path: dest_path, create: true)!
|
||||
src_file2.copy(dest: dest_file2.path)!
|
||||
}
|
||||
|
||||
processed_local_pages[page.name] = true
|
||||
|
||||
// Redis operations...
|
||||
@@ -117,65 +155,6 @@ pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
mut dest_file := pathlib.get_file(path: dest_path, create: true)!
|
||||
src_file.copy(dest: dest_file.path)!
|
||||
}
|
||||
|
||||
// Second pass: copy all collected cross-collection pages and process their links recursively
|
||||
// Keep iterating until no new cross-collection references are found
|
||||
for {
|
||||
mut found_new_references := false
|
||||
|
||||
// Process all cross-collection pages we haven't processed yet
|
||||
for page_key, mut ref_page in cross_collection_pages {
|
||||
if page_key in processed_cross_pages {
|
||||
continue // Already processed this page's links
|
||||
}
|
||||
|
||||
// Mark as processed to avoid infinite loops
|
||||
processed_cross_pages[page_key] = true
|
||||
found_new_references = true
|
||||
|
||||
// Get the referenced page content with includes processed
|
||||
ref_content := ref_page.content_with_fixed_links(
|
||||
include: args.include
|
||||
cross_collection: true
|
||||
export_mode: true
|
||||
)!
|
||||
|
||||
// Write the referenced page to this collection's directory
|
||||
mut dest_file := pathlib.get_file(
|
||||
path: '${col_dir.path}/${ref_page.name}.md'
|
||||
create: true
|
||||
)!
|
||||
dest_file.write(ref_content)!
|
||||
|
||||
// CRITICAL: Recursively process links in this cross-collection page
|
||||
// This ensures we get pages/files/images referenced by ref_page
|
||||
c.collect_cross_collection_references(mut ref_page, mut cross_collection_pages, mut
|
||||
cross_collection_files, mut processed_cross_pages)!
|
||||
}
|
||||
|
||||
// If we didn't find any new references, we're done with the recursive pass
|
||||
if !found_new_references {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Third pass: copy ALL collected cross-collection referenced files/images
|
||||
for _, mut ref_file in cross_collection_files {
|
||||
mut src_file := ref_file.path()!
|
||||
|
||||
// Determine subdirectory based on file type
|
||||
mut subdir := if ref_file.is_image() { 'img' } else { 'files' }
|
||||
|
||||
// Ensure subdirectory exists
|
||||
mut subdir_path := pathlib.get_dir(
|
||||
path: '${col_dir.path}/${subdir}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
mut dest_path := '${subdir_path.path}/${ref_file.name}'
|
||||
mut dest_file := pathlib.get_file(path: dest_path, create: true)!
|
||||
src_file.copy(dest: dest_file.path)!
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to recursively collect cross-collection references
|
||||
@@ -184,6 +163,17 @@ fn (mut c Collection) collect_cross_collection_references(mut page Page,
|
||||
mut all_cross_pages map[string]&Page,
|
||||
mut all_cross_files map[string]&File,
|
||||
mut processed_pages map[string]bool) ! {
|
||||
page_key := page.key()
|
||||
|
||||
// If we've already processed this page, skip it (prevents infinite loops with cycles)
|
||||
if page_key in processed_pages {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark this page as processed BEFORE recursing (prevents infinite loops with circular references)
|
||||
processed_pages[page_key] = true
|
||||
|
||||
// Process all links in the current page
|
||||
// Use cached links from validation (before transformation) to preserve collection info
|
||||
for mut link in page.links {
|
||||
if link.status != .found {
|
||||
@@ -192,15 +182,19 @@ fn (mut c Collection) collect_cross_collection_references(mut page Page,
|
||||
|
||||
is_local := link.target_collection_name == c.name
|
||||
|
||||
// Collect cross-collection page references
|
||||
// Collect cross-collection page references and recursively process them
|
||||
if link.file_type == .page && !is_local {
|
||||
page_key := '${link.target_collection_name}:${link.target_item_name}'
|
||||
page_ref := '${link.target_collection_name}:${link.target_item_name}'
|
||||
|
||||
// Only add if not already collected
|
||||
if page_key !in all_cross_pages {
|
||||
if page_ref !in all_cross_pages {
|
||||
mut target_page := link.target_page()!
|
||||
all_cross_pages[page_key] = target_page
|
||||
// Don't mark as processed yet - we'll do that when we actually process its links
|
||||
all_cross_pages[page_ref] = target_page
|
||||
|
||||
// Recursively process the target page's links to find more cross-collection references
|
||||
// This ensures we collect ALL transitive cross-collection page and file references
|
||||
c.collect_cross_collection_references(mut target_page, mut all_cross_pages, mut
|
||||
all_cross_files, mut processed_pages)!
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ put in .hero file and execute with hero or but shebang line on top of .hero scri
|
||||
|
||||
!!atlas.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||
|
||||
!!atlas.export destination: '/tmp/atlas_export'
|
||||
!!atlas.export
|
||||
|
||||
```
|
||||
|
||||
|
||||
@@ -71,9 +71,9 @@ pub struct DevArgs {
|
||||
pub mut:
|
||||
host string = 'localhost'
|
||||
port int = 3000
|
||||
open bool = true // whether to open the browser automatically
|
||||
watch_changes bool = false // whether to watch for changes in docs and rebuild automatically
|
||||
skip_generate bool = false // whether to skip generation (useful when docs are pre-generated, e.g., from atlas)
|
||||
open bool = true // whether to open the browser automatically
|
||||
watch_changes bool // whether to watch for changes in docs and rebuild automatically
|
||||
skip_generate bool // whether to skip generation (useful when docs are pre-generated, e.g., from atlas)
|
||||
}
|
||||
|
||||
pub fn (mut s DocSite) open(args DevArgs) ! {
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn new(args FactoryArgs) !&Site {
|
||||
}
|
||||
|
||||
mut site := Site{
|
||||
nav: SideBar{}
|
||||
nav: SideBar{}
|
||||
siteconfig: SiteConfig{
|
||||
name: name
|
||||
}
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
module site
|
||||
|
||||
// Page represents a single documentation page
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
name string
|
||||
title string
|
||||
description string
|
||||
draft bool
|
||||
position int
|
||||
hide_title bool
|
||||
src string @[required] // always in format collection:page_name, can use the default collection if no : specified
|
||||
path string @[required] // is without the page name, so just the path to the folder where the page is in
|
||||
section_name string
|
||||
title_nr int
|
||||
slug string
|
||||
id string // Unique identifier: "collection:page_name"
|
||||
title string // Display title (optional, extracted from markdown if empty)
|
||||
description string // Brief description for metadata
|
||||
draft bool // Mark as draft (hidden from navigation)
|
||||
hide_title bool // Hide the title when rendering
|
||||
src string // Source reference (same as id in this format)
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
module site
|
||||
|
||||
@[heap]
|
||||
pub struct Site {
|
||||
pub mut:
|
||||
pages []Page
|
||||
sections []Section
|
||||
siteconfig SiteConfig
|
||||
}
|
||||
|
||||
pub struct Section {
|
||||
pub mut:
|
||||
name string
|
||||
position int
|
||||
path string
|
||||
label string
|
||||
description string
|
||||
}
|
||||
@@ -4,222 +4,93 @@ import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// Main entry point for processing site HeroScript
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
if !plbook.exists(filter: 'site.') {
|
||||
return
|
||||
}
|
||||
|
||||
console.print_header('Processing Site Configuration')
|
||||
|
||||
// ============================================================
|
||||
// STEP 1: Initialize core site configuration
|
||||
// ============================================================
|
||||
console.print_item('Step 1: Loading site configuration')
|
||||
mut config_action := plbook.ensure_once(filter: 'site.config')!
|
||||
|
||||
mut p := config_action.params
|
||||
name := p.get_default('name', 'default')! // Use 'default' as fallback name
|
||||
|
||||
// configure the website
|
||||
name := p.get_default('name', 'default')!
|
||||
mut website := new(name: name)!
|
||||
mut config := &website.siteconfig
|
||||
|
||||
// Load core configuration
|
||||
config.name = texttools.name_fix(name)
|
||||
config.title = p.get_default('title', 'Documentation Site')!
|
||||
config.description = p.get_default('description', 'Comprehensive documentation built with Docusaurus.')!
|
||||
config.tagline = p.get_default('tagline', 'Your awesome documentation')!
|
||||
config.favicon = p.get_default('favicon', 'img/favicon.png')!
|
||||
config.image = p.get_default('image', 'img/tf_graph.png')!
|
||||
config.copyright = p.get_default('copyright', '© ' + time.now().year.str() +
|
||||
' Example Organization')!
|
||||
config.copyright = p.get_default('copyright', '© ${time.now().year} Example Organization')!
|
||||
config.url = p.get_default('url', '')!
|
||||
config.base_url = p.get_default('base_url', '/')!
|
||||
config.url_home = p.get_default('url_home', '')!
|
||||
|
||||
// Process !!site.config_meta for specific metadata overrides
|
||||
mut meta_action := plbook.ensure_once(filter: 'site.config_meta')!
|
||||
mut p_meta := meta_action.params
|
||||
config_action.done = true
|
||||
|
||||
// If 'title' is present in site.config_meta, it overrides. Otherwise, meta_title remains empty or uses site.config.title logic in docusaurus model.
|
||||
config.meta_title = p_meta.get_default('title', config.title)!
|
||||
// If 'image' is present in site.config_meta, it overrides. Otherwise, meta_image remains empty or uses site.config.image logic.
|
||||
config.meta_image = p_meta.get_default('image', config.image)!
|
||||
// If 'description' is present in site.config_meta, it overrides the main description
|
||||
if p_meta.exists('description') {
|
||||
config.description = p_meta.get('description')!
|
||||
// ============================================================
|
||||
// STEP 2: Apply optional metadata overrides
|
||||
// ============================================================
|
||||
console.print_item('Step 2: Applying metadata overrides')
|
||||
if plbook.exists_once(filter: 'site.config_meta') {
|
||||
mut meta_action := plbook.get(filter: 'site.config_meta')!
|
||||
mut p_meta := meta_action.params
|
||||
|
||||
config.meta_title = p_meta.get_default('title', config.title)!
|
||||
config.meta_image = p_meta.get_default('image', config.image)!
|
||||
if p_meta.exists('description') {
|
||||
config.description = p_meta.get('description')!
|
||||
}
|
||||
|
||||
meta_action.done = true
|
||||
}
|
||||
|
||||
config_action.done = true // Mark the action as done
|
||||
meta_action.done = true
|
||||
// ============================================================
|
||||
// STEP 3: Configure content imports
|
||||
// ============================================================
|
||||
console.print_item('Step 3: Configuring content imports')
|
||||
play_imports(mut plbook, mut config)!
|
||||
|
||||
play_import(mut plbook, mut config)!
|
||||
play_menu(mut plbook, mut config)!
|
||||
// ============================================================
|
||||
// STEP 4: Configure navigation menu
|
||||
// ============================================================
|
||||
console.print_item('Step 4: Configuring navigation menu')
|
||||
play_navbar(mut plbook, mut config)!
|
||||
|
||||
// ============================================================
|
||||
// STEP 5: Configure footer
|
||||
// ============================================================
|
||||
console.print_item('Step 5: Configuring footer')
|
||||
play_footer(mut plbook, mut config)!
|
||||
|
||||
// ============================================================
|
||||
// STEP 6: Configure announcement bar (optional)
|
||||
// ============================================================
|
||||
console.print_item('Step 6: Configuring announcement bar (if present)')
|
||||
play_announcement(mut plbook, mut config)!
|
||||
play_publish(mut plbook, mut config)!
|
||||
play_publish_dev(mut plbook, mut config)!
|
||||
|
||||
// ============================================================
|
||||
// STEP 7: Configure publish destinations
|
||||
// ============================================================
|
||||
console.print_item('Step 7: Configuring publish destinations')
|
||||
play_publishing(mut plbook, mut config)!
|
||||
|
||||
// ============================================================
|
||||
// STEP 8: Build pages and navigation structure
|
||||
// ============================================================
|
||||
console.print_item('Step 8: Processing pages and building navigation')
|
||||
play_pages(mut plbook, mut website)!
|
||||
}
|
||||
|
||||
fn play_import(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut import_actions := plbook.find(filter: 'site.import')!
|
||||
// println('import_actions: ${import_actions}')
|
||||
|
||||
for mut action in import_actions {
|
||||
mut p := action.params
|
||||
mut replace_map := map[string]string{}
|
||||
if replace_str := p.get_default('replace', '') {
|
||||
parts := replace_str.split(',')
|
||||
for part in parts {
|
||||
kv := part.split(':')
|
||||
if kv.len == 2 {
|
||||
replace_map[kv[0].trim_space()] = kv[1].trim_space()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mut importpath := p.get_default('path', '')!
|
||||
if importpath != '' {
|
||||
if !importpath.starts_with('/') {
|
||||
importpath = os.abs_path('${plbook.path}/${importpath}')
|
||||
}
|
||||
}
|
||||
|
||||
mut import_ := ImportItem{
|
||||
name: p.get_default('name', '')!
|
||||
url: p.get_default('url', '')!
|
||||
path: importpath
|
||||
dest: p.get_default('dest', '')!
|
||||
replace: replace_map
|
||||
visible: p.get_default_false('visible')
|
||||
}
|
||||
config.imports << import_
|
||||
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
fn play_menu(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut navbar_actions := plbook.find(filter: 'site.navbar')!
|
||||
if navbar_actions.len > 0 {
|
||||
for mut action in navbar_actions { // Should ideally be one, but loop for safety
|
||||
mut p := action.params
|
||||
config.menu.title = p.get_default('title', config.title)! // Use existing config.title as ultimate fallback
|
||||
config.menu.logo_alt = p.get_default('logo_alt', '')!
|
||||
config.menu.logo_src = p.get_default('logo_src', '')!
|
||||
config.menu.logo_src_dark = p.get_default('logo_src_dark', '')!
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
} else {
|
||||
// Fallback to site.menu for title if site.navbar is not found
|
||||
mut menu_actions := plbook.find(filter: 'site.menu')!
|
||||
for mut action in menu_actions {
|
||||
mut p := action.params
|
||||
config.menu.title = p.get_default('title', config.title)!
|
||||
config.menu.logo_alt = p.get_default('logo_alt', '')!
|
||||
config.menu.logo_src = p.get_default('logo_src', '')!
|
||||
config.menu.logo_src_dark = p.get_default('logo_src_dark', '')!
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
mut menu_item_actions := plbook.find(filter: 'site.navbar_item')!
|
||||
if menu_item_actions.len == 0 {
|
||||
// Fallback to site.menu_item if site.navbar_item is not found
|
||||
menu_item_actions = plbook.find(filter: 'site.menu_item')!
|
||||
}
|
||||
|
||||
// Clear existing menu items to prevent duplication
|
||||
config.menu.items = []MenuItem{}
|
||||
|
||||
for mut action in menu_item_actions {
|
||||
mut p := action.params
|
||||
mut item := MenuItem{
|
||||
label: p.get_default('label', 'Documentation')!
|
||||
href: p.get_default('href', '')!
|
||||
to: p.get_default('to', '')!
|
||||
position: p.get_default('position', 'right')!
|
||||
}
|
||||
config.menu.items << item
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
fn play_footer(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut footer_actions := plbook.find(filter: 'site.footer')!
|
||||
for mut action in footer_actions {
|
||||
mut p := action.params
|
||||
config.footer.style = p.get_default('style', 'dark')!
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
|
||||
mut footer_item_actions := plbook.find(filter: 'site.footer_item')!
|
||||
mut links_map := map[string][]FooterItem{}
|
||||
|
||||
// Clear existing footer links to prevent duplication
|
||||
config.footer.links = []FooterLink{}
|
||||
|
||||
for mut action in footer_item_actions {
|
||||
mut p := action.params
|
||||
title := p.get_default('title', 'Docs')!
|
||||
mut item := FooterItem{
|
||||
label: p.get_default('label', 'Introduction')!
|
||||
href: p.get_default('href', '')!
|
||||
to: p.get_default('to', '')!
|
||||
}
|
||||
|
||||
if title !in links_map {
|
||||
links_map[title] = []FooterItem{}
|
||||
}
|
||||
links_map[title] << item
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
|
||||
// Convert map to footer links array
|
||||
for title, items in links_map {
|
||||
config.footer.links << FooterLink{
|
||||
title: title
|
||||
items: items
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn play_announcement(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut announcement_actions := plbook.find(filter: 'site.announcement')!
|
||||
if announcement_actions.len > 0 {
|
||||
// Only process the first announcement action
|
||||
mut action := announcement_actions[0]
|
||||
mut p := action.params
|
||||
|
||||
config.announcement = AnnouncementBar{
|
||||
id: p.get_default('id', 'announcement')!
|
||||
content: p.get_default('content', '')!
|
||||
background_color: p.get_default('background_color', '#20232a')!
|
||||
text_color: p.get_default('text_color', '#fff')!
|
||||
is_closeable: p.get_default_true('is_closeable')
|
||||
}
|
||||
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
fn play_publish(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut build_dest_actions := plbook.find(filter: 'site.publish')!
|
||||
for mut action in build_dest_actions {
|
||||
mut p := action.params
|
||||
mut dest := BuildDest{
|
||||
path: p.get_default('path', '')! // can be url
|
||||
ssh_name: p.get_default('ssh_name', '')!
|
||||
}
|
||||
config.build_dest << dest
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
fn play_publish_dev(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut build_dest_actions := plbook.find(filter: 'site.publish_dev')!
|
||||
for mut action in build_dest_actions {
|
||||
mut p := action.params
|
||||
mut dest := BuildDest{
|
||||
path: p.get_default('path', '')! // can be url
|
||||
ssh_name: p.get_default('ssh_name', '')!
|
||||
}
|
||||
config.build_dest_dev << dest
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
|
||||
console.print_green('Site configuration complete')
|
||||
}
|
||||
|
||||
62
lib/web/site/play_footer.v
Normal file
62
lib/web/site/play_footer.v
Normal file
@@ -0,0 +1,62 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// FOOTER: Process footer configuration
|
||||
// ============================================================
|
||||
fn play_footer(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
// Process footer style (optional)
|
||||
mut footer_actions := plbook.find(filter: 'site.footer')!
|
||||
for mut action in footer_actions {
|
||||
mut p := action.params
|
||||
config.footer.style = p.get_default('style', 'dark')!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
// Process footer items (multiple)
|
||||
mut footer_item_actions := plbook.find(filter: 'site.footer_item')!
|
||||
mut links_map := map[string][]FooterItem{}
|
||||
|
||||
// Clear existing links to prevent duplication
|
||||
config.footer.links = []FooterLink{}
|
||||
|
||||
for mut action in footer_item_actions {
|
||||
mut p := action.params
|
||||
|
||||
title := p.get_default('title', 'Docs')!
|
||||
|
||||
label := p.get('label') or {
|
||||
return error('!!site.footer_item: must specify "label"')
|
||||
}
|
||||
|
||||
mut item := FooterItem{
|
||||
label: label
|
||||
href: p.get_default('href', '')!
|
||||
to: p.get_default('to', '')!
|
||||
}
|
||||
|
||||
// Validate that href or to is specified
|
||||
if item.href.len == 0 && item.to.len == 0 {
|
||||
return error('!!site.footer_item for "${label}": must specify either "href" or "to"')
|
||||
}
|
||||
|
||||
if title !in links_map {
|
||||
links_map[title] = []FooterItem{}
|
||||
}
|
||||
links_map[title] << item
|
||||
action.done = true
|
||||
}
|
||||
|
||||
// Convert map to footer links array
|
||||
for title, items in links_map {
|
||||
config.footer.links << FooterLink{
|
||||
title: title
|
||||
items: items
|
||||
}
|
||||
}
|
||||
}
|
||||
51
lib/web/site/play_imports.v
Normal file
51
lib/web/site/play_imports.v
Normal file
@@ -0,0 +1,51 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// IMPORTS: Process content imports
|
||||
// ============================================================
|
||||
fn play_imports(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut import_actions := plbook.find(filter: 'site.import')!
|
||||
|
||||
for mut action in import_actions {
|
||||
mut p := action.params
|
||||
|
||||
// Parse replacement patterns (comma-separated key:value pairs)
|
||||
mut replace_map := map[string]string{}
|
||||
if replace_str := p.get_default('replace', '') {
|
||||
parts := replace_str.split(',')
|
||||
for part in parts {
|
||||
kv := part.split(':')
|
||||
if kv.len == 2 {
|
||||
replace_map[kv[0].trim_space()] = kv[1].trim_space()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get path (can be relative to playbook path)
|
||||
mut import_path := p.get_default('path', '')!
|
||||
if import_path != '' {
|
||||
if !import_path.starts_with('/') {
|
||||
import_path = os.abs_path('${plbook.path}/${import_path}')
|
||||
}
|
||||
}
|
||||
|
||||
// Create import item
|
||||
mut import_item := ImportItem{
|
||||
name: p.get_default('name', '')!
|
||||
url: p.get_default('url', '')!
|
||||
path: import_path
|
||||
dest: p.get_default('dest', '')!
|
||||
replace: replace_map
|
||||
visible: p.get_default_false('visible')
|
||||
}
|
||||
|
||||
config.imports << import_item
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
60
lib/web/site/play_navbar.v
Normal file
60
lib/web/site/play_navbar.v
Normal file
@@ -0,0 +1,60 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// NAVBAR: Process navigation menu
|
||||
// ============================================================
|
||||
fn play_navbar(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
// Try 'site.navbar' first, then fallback to deprecated 'site.menu'
|
||||
mut navbar_actions := plbook.find(filter: 'site.navbar')!
|
||||
if navbar_actions.len == 0 {
|
||||
navbar_actions = plbook.find(filter: 'site.menu')!
|
||||
}
|
||||
|
||||
// Configure navbar metadata
|
||||
if navbar_actions.len > 0 {
|
||||
for mut action in navbar_actions {
|
||||
mut p := action.params
|
||||
config.menu.title = p.get_default('title', config.title)!
|
||||
config.menu.logo_alt = p.get_default('logo_alt', '')!
|
||||
config.menu.logo_src = p.get_default('logo_src', '')!
|
||||
config.menu.logo_src_dark = p.get_default('logo_src_dark', '')!
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
|
||||
// Process navbar items
|
||||
mut navbar_item_actions := plbook.find(filter: 'site.navbar_item')!
|
||||
if navbar_item_actions.len == 0 {
|
||||
navbar_item_actions = plbook.find(filter: 'site.menu_item')!
|
||||
}
|
||||
|
||||
// Clear existing items to prevent duplication
|
||||
config.menu.items = []MenuItem{}
|
||||
|
||||
for mut action in navbar_item_actions {
|
||||
mut p := action.params
|
||||
|
||||
label := p.get('label') or { return error('!!site.navbar_item: must specify "label"') }
|
||||
|
||||
mut item := MenuItem{
|
||||
label: label
|
||||
href: p.get_default('href', '')!
|
||||
to: p.get_default('to', '')!
|
||||
position: p.get_default('position', 'right')!
|
||||
}
|
||||
|
||||
// Validate that at least href or to is specified
|
||||
if item.href.len == 0 && item.to.len == 0 {
|
||||
return error('!!site.navbar_item: must specify either "href" or "to" for label "${label}"')
|
||||
}
|
||||
|
||||
config.menu.items << item
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
@@ -1,135 +0,0 @@
|
||||
module site
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
// plays the sections & pages
|
||||
fn play_pages(mut plbook PlayBook, mut site Site) ! {
|
||||
// mut siteconfig := &site.siteconfig
|
||||
|
||||
// if only 1 doctree is specified, then we use that as the default doctree name
|
||||
// mut doctreename := 'main' // Not used for now, keep commented for future doctree integration
|
||||
// if plbook.exists(filter: 'site.doctree') {
|
||||
// if plbook.exists_once(filter: 'site.doctree') {
|
||||
// mut action := plbook.get(filter: 'site.doctree')!
|
||||
// mut p := action.params
|
||||
// doctreename = p.get('name') or { return error('need to specify name in site.doctree') }
|
||||
// } else {
|
||||
// return error("can't have more than one site.doctree")
|
||||
// }
|
||||
// }
|
||||
|
||||
mut section_current := Section{} // is the category
|
||||
mut position_section := 1
|
||||
mut position_category := 100 // Start categories at position 100
|
||||
mut collection_current := '' // current collection we are working on
|
||||
|
||||
mut all_actions := plbook.find(filter: 'site.')!
|
||||
|
||||
for mut action in all_actions {
|
||||
if action.done {
|
||||
continue
|
||||
}
|
||||
|
||||
mut p := action.params
|
||||
|
||||
if action.name == 'page_category' {
|
||||
mut section := Section{}
|
||||
section.name = p.get('name') or {
|
||||
return error('need to specify name in site.page_category. Action: ${action}')
|
||||
}
|
||||
position_section = 1 // go back to default position for pages in the category
|
||||
section.position = p.get_int_default('position', position_category)!
|
||||
if section.position == position_category {
|
||||
position_category += 100 // Increment for next category
|
||||
}
|
||||
section.label = p.get_default('label', texttools.name_fix_snake_to_pascal(section.name))!
|
||||
section.path = p.get_default('path', texttools.name_fix(section.label))!
|
||||
section.description = p.get_default('description', '')!
|
||||
|
||||
site.sections << section
|
||||
action.done = true // Mark the action as done
|
||||
section_current = section
|
||||
continue // next action
|
||||
}
|
||||
|
||||
if action.name == 'page' {
|
||||
mut pagesrc := p.get_default('src', '')!
|
||||
mut pagename := p.get_default('name', '')!
|
||||
mut pagecollection := ''
|
||||
|
||||
if pagesrc.contains(':') {
|
||||
pagecollection = pagesrc.split(':')[0]
|
||||
pagename = pagesrc.split(':')[1]
|
||||
} else {
|
||||
if collection_current.len > 0 {
|
||||
pagecollection = collection_current
|
||||
pagename = pagesrc // ADD THIS LINE - use pagesrc as the page name
|
||||
} else {
|
||||
return error('need to specify collection in page.src path as collection:page_name or make sure someone before you did. Got src="${pagesrc}" with no collection set. Action: ${action}')
|
||||
}
|
||||
}
|
||||
|
||||
pagecollection = texttools.name_fix(pagecollection)
|
||||
collection_current = pagecollection
|
||||
pagename = texttools.name_fix_keepext(pagename)
|
||||
if pagename.ends_with('.md') {
|
||||
pagename = pagename.replace('.md', '')
|
||||
}
|
||||
|
||||
if pagename == '' {
|
||||
return error('need to specify name in page.src or specify in path as collection:page_name. Action: ${action}')
|
||||
}
|
||||
if pagecollection == '' {
|
||||
return error('need to specify collection in page.src or specify in path as collection:page_name. Action: ${action}')
|
||||
}
|
||||
|
||||
// recreate the pagepath
|
||||
pagesrc = '${pagecollection}:${pagename}'
|
||||
|
||||
// get sectionname from category, page_category or section, if not specified use current section
|
||||
section_name := p.get_default('category', p.get_default('page_category', p.get_default('section',
|
||||
section_current.name)!)!)!
|
||||
mut pagepath := p.get_default('path', section_current.path)!
|
||||
pagepath = pagepath.trim_space().trim('/')
|
||||
// Only apply name_fix if it's a simple name (no path separators)
|
||||
// For paths like 'appendix/internet_today', preserve the structure
|
||||
if !pagepath.contains('/') {
|
||||
pagepath = texttools.name_fix(pagepath)
|
||||
}
|
||||
// Ensure pagepath ends with / to indicate it's a directory path
|
||||
if pagepath.len > 0 && !pagepath.ends_with('/') {
|
||||
pagepath += '/'
|
||||
}
|
||||
|
||||
mut mypage := Page{
|
||||
section_name: section_name
|
||||
name: pagename
|
||||
path: pagepath
|
||||
src: pagesrc
|
||||
}
|
||||
|
||||
mypage.position = p.get_int_default('position', 0)!
|
||||
if mypage.position == 0 {
|
||||
mypage.position = section_current.position + position_section
|
||||
position_section += 1
|
||||
}
|
||||
mypage.title = p.get_default('title', '')!
|
||||
|
||||
mypage.description = p.get_default('description', '')!
|
||||
mypage.slug = p.get_default('slug', '')!
|
||||
mypage.draft = p.get_default_false('draft')
|
||||
mypage.hide_title = p.get_default_false('hide_title')
|
||||
mypage.title_nr = p.get_int_default('title_nr', 0)!
|
||||
|
||||
site.pages << mypage
|
||||
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
|
||||
// println(action)
|
||||
// println(section_current)
|
||||
// println(site.pages.last())
|
||||
// $dbg;
|
||||
}
|
||||
}
|
||||
46
lib/web/site/play_publish.v
Normal file
46
lib/web/site/play_publish.v
Normal file
@@ -0,0 +1,46 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// PUBLISHING: Configure build and publish destinations
|
||||
// ============================================================
|
||||
fn play_publishing(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
// Production publish destinations
|
||||
mut build_dest_actions := plbook.find(filter: 'site.publish')!
|
||||
for mut action in build_dest_actions {
|
||||
mut p := action.params
|
||||
|
||||
path := p.get('path') or {
|
||||
return error('!!site.publish: must specify "path"')
|
||||
}
|
||||
|
||||
mut dest := BuildDest{
|
||||
path: path
|
||||
ssh_name: p.get_default('ssh_name', '')!
|
||||
}
|
||||
config.build_dest << dest
|
||||
action.done = true
|
||||
}
|
||||
|
||||
// Development publish destinations
|
||||
mut build_dest_dev_actions := plbook.find(filter: 'site.publish_dev')!
|
||||
for mut action in build_dest_dev_actions {
|
||||
mut p := action.params
|
||||
|
||||
path := p.get('path') or {
|
||||
return error('!!site.publish_dev: must specify "path"')
|
||||
}
|
||||
|
||||
mut dest := BuildDest{
|
||||
path: path
|
||||
ssh_name: p.get_default('ssh_name', '')!
|
||||
}
|
||||
config.build_dest_dev << dest
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
@@ -2,34 +2,37 @@
|
||||
|
||||
The Site module provides a structured way to define website configurations, navigation menus, pages, and sections using HeroScript. It's designed to work with static site generators like Docusaurus.
|
||||
|
||||
## Purpose
|
||||
|
||||
The Site module allows you to:
|
||||
|
||||
- Define website structure and configuration in a declarative way using HeroScript
|
||||
- Organize pages into sections/categories
|
||||
- Configure navigation menus and footers
|
||||
- Manage page metadata (title, description, slug, etc.)
|
||||
- Support multiple content collections
|
||||
- Define build and publish destinations
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Minimal HeroScript Example
|
||||
|
||||
```heroscript
|
||||
!!site.config
|
||||
name: "my_docs"
|
||||
title: "My Documentation"
|
||||
|
||||
!!site.page src: "docs:introduction"
|
||||
title: "Getting Started"
|
||||
|
||||
!!site.page src: "setup"
|
||||
title: "Installation"
|
||||
```
|
||||
|
||||
### Processing with V Code
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.web.site
|
||||
import incubaid.herolib.core.playcmds
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// Clone or use existing repository with HeroScript files
|
||||
mysitepath := gittools.path(
|
||||
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech'
|
||||
git_pull: true
|
||||
)!
|
||||
// Process HeroScript file
|
||||
mut plbook := playbook.new(path: './site_config.heroscript')!
|
||||
|
||||
// Process all HeroScript files in the path
|
||||
playcmds.run(heroscript_path: mysitepath.path)!
|
||||
// Execute site configuration
|
||||
site.play(mut plbook)!
|
||||
|
||||
// Access the configured site
|
||||
mut mysite := site.get(name: 'my_docs')!
|
||||
@@ -224,7 +227,7 @@ A logical group of pages. Pages reuse the collection once specified.
|
||||
|
||||
## HeroScript Syntax
|
||||
|
||||
### Basic Configuration
|
||||
### 1. Site Configuration (Required)
|
||||
|
||||
```heroscript
|
||||
!!site.config
|
||||
@@ -237,20 +240,49 @@ A logical group of pages. Pages reuse the collection once specified.
|
||||
copyright: "© 2024 My Organization"
|
||||
url: "https://docs.example.com"
|
||||
base_url: "/"
|
||||
url_home: "/docs"
|
||||
```
|
||||
|
||||
### Navigation Menu
|
||||
**Parameters:**
|
||||
- `name` - Internal site identifier (default: 'default')
|
||||
- `title` - Main site title (shown in browser tab)
|
||||
- `description` - Site description for SEO
|
||||
- `tagline` - Short tagline/subtitle
|
||||
- `favicon` - Path to favicon image
|
||||
- `image` - Default OG image for social sharing
|
||||
- `copyright` - Copyright notice
|
||||
- `url` - Full site URL for Docusaurus
|
||||
- `base_url` - Base URL path (e.g., "/" or "/docs/")
|
||||
- `url_home` - Home page path
|
||||
|
||||
### 2. Metadata Overrides (Optional)
|
||||
|
||||
```heroscript
|
||||
!!site.config_meta
|
||||
title: "My Docs - Technical Reference"
|
||||
image: "img/tech-og.png"
|
||||
description: "Technical documentation and API reference"
|
||||
```
|
||||
|
||||
Overrides specific metadata for SEO without changing core config.
|
||||
|
||||
### 3. Navigation Bar
|
||||
|
||||
```heroscript
|
||||
!!site.navbar
|
||||
title: "My Site"
|
||||
title: "My Documentation"
|
||||
logo_alt: "Site Logo"
|
||||
logo_src: "img/logo.svg"
|
||||
logo_src_dark: "img/logo-dark.svg"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Documentation"
|
||||
to: "docs/intro"
|
||||
to: "intro"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "API Reference"
|
||||
to: "docs/api"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
@@ -259,7 +291,13 @@ A logical group of pages. Pages reuse the collection once specified.
|
||||
position: "right"
|
||||
```
|
||||
|
||||
### Footer Configuration
|
||||
**Parameters:**
|
||||
- `label` - Display text (required)
|
||||
- `to` - Internal link
|
||||
- `href` - External URL
|
||||
- `position` - "left" or "right" in navbar
|
||||
|
||||
### 4. Footer Configuration
|
||||
|
||||
```heroscript
|
||||
!!site.footer
|
||||
@@ -273,19 +311,20 @@ A logical group of pages. Pages reuse the collection once specified.
|
||||
!!site.footer_item
|
||||
title: "Docs"
|
||||
label: "Getting Started"
|
||||
href: "https://docs.example.com/getting-started"
|
||||
to: "getting-started"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Community"
|
||||
label: "Discord"
|
||||
href: "https://discord.gg/example"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Legal"
|
||||
label: "Privacy"
|
||||
href: "https://example.com/privacy"
|
||||
```
|
||||
|
||||
## Page Organization
|
||||
|
||||
### Example 1: Simple Pages Without Categories
|
||||
|
||||
When you don't need categories, pages are added sequentially. The collection only needs to be specified once, then it's reused for subsequent pages.
|
||||
### 5. Announcement Bar (Optional)
|
||||
|
||||
```heroscript
|
||||
!!site.announcement
|
||||
@@ -295,34 +334,56 @@ When you don't need categories, pages are added sequentially. The collection onl
|
||||
is_closeable: true
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
### 6. Pages and Categories
|
||||
|
||||
- First page specifies collection as `tech:introduction` (collection:page_name format)
|
||||
- Subsequent pages only need the page name (e.g., `vision`) - the `tech` collection is reused
|
||||
- If `title` is not specified, it will be extracted from the markdown file itself
|
||||
- Pages are ordered by their appearance in the HeroScript file
|
||||
- `slug` can be used to customize the URL path (e.g., `"/"` for homepage)
|
||||
#### Simple: Pages Without Categories
|
||||
|
||||
### Example 2: Pages with Categories
|
||||
```heroscript
|
||||
!!site.page src: "guides:introduction"
|
||||
title: "Getting Started"
|
||||
description: "Introduction to the platform"
|
||||
|
||||
Categories (sections) help organize pages into logical groups with their own navigation structure.
|
||||
!!site.page src: "installation"
|
||||
title: "Installation"
|
||||
|
||||
!!site.page src: "configuration"
|
||||
title: "Configuration"
|
||||
```
|
||||
|
||||
#### Advanced: Pages With Categories
|
||||
|
||||
```heroscript
|
||||
!!site.page_category
|
||||
name: "first_principle_thinking"
|
||||
label: "First Principle Thinking"
|
||||
name: "basics"
|
||||
label: "Getting Started"
|
||||
|
||||
!!site.page src: "first_principle_thinking:hardware_badly_used"
|
||||
description: "Hardware is not used properly, why it is important to understand hardware"
|
||||
!!site.page src: "guides:introduction"
|
||||
title: "Introduction"
|
||||
description: "Learn the basics"
|
||||
|
||||
!!site.page src: "internet_risk"
|
||||
description: "Internet risk, how to mitigate it, and why it is important"
|
||||
!!site.page src: "installation"
|
||||
title: "Installation"
|
||||
|
||||
!!site.page src: "onion_analogy"
|
||||
description: "Compare onion with a computer, layers of abstraction"
|
||||
!!site.page src: "configuration"
|
||||
title: "Configuration"
|
||||
|
||||
!!site.page_category
|
||||
name: "advanced"
|
||||
label: "Advanced Topics"
|
||||
|
||||
!!site.page src: "advanced:performance"
|
||||
title: "Performance Tuning"
|
||||
|
||||
!!site.page src: "scaling"
|
||||
title: "Scaling Guide"
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
**Page Parameters:**
|
||||
- `src` - Source as `collection:page` (first page) or just `page_name` (reuse collection)
|
||||
- `title` - Page title (optional, extracted from markdown if not provided)
|
||||
- `description` - Page description
|
||||
- `draft` - Hide from navigation (default: false)
|
||||
- `hide_title` - Don't show title in page (default: false)
|
||||
|
||||
**Category Parameters:**
|
||||
- `name` - Category identifier (required)
|
||||
@@ -334,78 +395,113 @@ Categories (sections) help organize pages into logical groups with their own nav
|
||||
```heroscript
|
||||
!!site.import
|
||||
url: "https://github.com/example/external-docs"
|
||||
path: "/local/path/to/repo"
|
||||
dest: "external"
|
||||
replace: "PROJECT_NAME:My Project,VERSION:1.0.0"
|
||||
visible: true
|
||||
```
|
||||
|
||||
## Publish Destinations
|
||||
### 8. Publishing Destinations
|
||||
|
||||
```heroscript
|
||||
!!site.publish
|
||||
path: "/var/www/html/docs"
|
||||
ssh_name: "production_server"
|
||||
ssh_name: "production"
|
||||
|
||||
!!site.publish_dev
|
||||
path: "/tmp/docs-preview"
|
||||
```
|
||||
|
||||
## Factory Methods
|
||||
---
|
||||
|
||||
### Create or Get a Site
|
||||
## Common Patterns
|
||||
|
||||
```v
|
||||
import incubaid.herolib.web.site
|
||||
### Pattern 1: Multi-Section Technical Documentation
|
||||
|
||||
// Create a new site
|
||||
mut mysite := site.new(name: 'my_docs')!
|
||||
```heroscript
|
||||
!!site.config
|
||||
name: "tech_docs"
|
||||
title: "Technical Documentation"
|
||||
|
||||
// Get an existing site
|
||||
mut mysite := site.get(name: 'my_docs')!
|
||||
!!site.page_category
|
||||
name: "getting_started"
|
||||
label: "Getting Started"
|
||||
|
||||
// Get default site
|
||||
mut mysite := site.default()!
|
||||
!!site.page src: "docs:intro"
|
||||
title: "Introduction"
|
||||
|
||||
// Check if site exists
|
||||
if site.exists(name: 'my_docs') {
|
||||
println('Site exists')
|
||||
}
|
||||
!!site.page src: "installation"
|
||||
title: "Installation"
|
||||
|
||||
// List all sites
|
||||
sites := site.list()
|
||||
println(sites)
|
||||
!!site.page_category
|
||||
name: "concepts"
|
||||
label: "Core Concepts"
|
||||
|
||||
!!site.page src: "concepts:architecture"
|
||||
title: "Architecture"
|
||||
|
||||
!!site.page src: "components"
|
||||
title: "Components"
|
||||
|
||||
!!site.page_category
|
||||
name: "api"
|
||||
label: "API Reference"
|
||||
|
||||
!!site.page src: "api:rest"
|
||||
title: "REST API"
|
||||
|
||||
!!site.page src: "graphql"
|
||||
title: "GraphQL"
|
||||
```
|
||||
|
||||
### Using with PlayBook
|
||||
### Pattern 2: Simple Blog/Knowledge Base
|
||||
|
||||
```v
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.web.site
|
||||
```heroscript
|
||||
!!site.config
|
||||
name: "blog"
|
||||
title: "Knowledge Base"
|
||||
|
||||
// Create playbook from path
|
||||
mut plbook := playbook.new(path: '/path/to/heroscripts')!
|
||||
!!site.page src: "articles:first_post"
|
||||
title: "Welcome to Our Blog"
|
||||
|
||||
// Process site configuration
|
||||
site.play(mut plbook)!
|
||||
!!site.page src: "second_post"
|
||||
title: "Understanding the Basics"
|
||||
|
||||
// Access the configured site
|
||||
mut mysite := site.get(name: 'my_site')!
|
||||
!!site.page src: "third_post"
|
||||
title: "Advanced Techniques"
|
||||
```
|
||||
|
||||
## Data Structures
|
||||
### Pattern 3: Project with External Imports
|
||||
|
||||
### Site
|
||||
```heroscript
|
||||
!!site.config
|
||||
name: "project_docs"
|
||||
title: "Project Documentation"
|
||||
|
||||
```v
|
||||
pub struct Site {
|
||||
pub mut:
|
||||
pages []Page
|
||||
sections []Section
|
||||
siteconfig SiteConfig
|
||||
}
|
||||
!!site.import
|
||||
url: "https://github.com/org/shared-docs"
|
||||
dest: "shared"
|
||||
visible: true
|
||||
|
||||
!!site.page_category
|
||||
name: "product"
|
||||
label: "Product Guide"
|
||||
|
||||
!!site.page src: "docs:overview"
|
||||
title: "Overview"
|
||||
|
||||
!!site.page src: "features"
|
||||
title: "Features"
|
||||
|
||||
!!site.page_category
|
||||
name: "resources"
|
||||
label: "Shared Resources"
|
||||
|
||||
!!site.page src: "shared:common"
|
||||
title: "Common Patterns"
|
||||
```
|
||||
|
||||
### Page
|
||||
---
|
||||
|
||||
## File Organization
|
||||
|
||||
|
||||
Reference in New Issue
Block a user