refactor: overhaul Docusaurus command and generation

- Rework `hero docusaurus` command to use local `cfg` files
- Scan and export doctree collections during site generation
- Fix `baseUrl` redirect path handling in `index.tsx`
- Add cycle detection for `play.include` in playbooks
- Improve site config processing to prevent duplicate items
This commit is contained in:
Mahmoud-Emad
2025-08-07 10:46:57 +03:00
parent 2667856633
commit 82a46e8149
15 changed files with 291 additions and 140 deletions

View File

@@ -7,8 +7,7 @@ import json
import os
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools.regext
// import freeflowuniverse.herolib.data.doctree
import freeflowuniverse.herolib.data.doctree
import freeflowuniverse.herolib.web.site as sitegen
pub fn (mut site DocSite) generate() ! {
@@ -40,14 +39,8 @@ pub fn (mut site DocSite) generate() ! {
}
}
mut main_file := pathlib.get_file(path: '${cfg_path}/main.json', create: true)!
main_file.write(json.encode_pretty(site.config.main))!
mut navbar_file := pathlib.get_file(path: '${cfg_path}/navbar.json', create: true)!
navbar_file.write(json.encode_pretty(site.config.navbar))!
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
footer_file.write(json.encode_pretty(site.config.footer))!
// We'll generate the configuration files after processing the site
// This is moved to after sitegen.play() so we can use the processed site configuration
osal.rm('${f.path_build.path}/docs')!
@@ -71,44 +64,101 @@ pub fn (mut site DocSite) generate() ! {
sitegen.play(mut plbook)!
// Get the updated site object after processing
mut updated_site := sitegen.get(name: site.name)!
// The site name in the config might be different from the docusaurus site name
// Find the site with the most pages (should contain the processed page definitions)
available_sites := sitegen.list()
mut best_site := &sitegen.Site(unsafe { nil })
mut max_pages := 0
for site_name in available_sites {
mut test_site := sitegen.get(name: site_name) or { continue }
if test_site.pages.len > max_pages {
max_pages = test_site.pages.len
best_site = test_site
}
}
if best_site == unsafe { nil } || max_pages == 0 {
return error('No sites with pages found after processing playbook. Available sites: ${available_sites}')
}
mut updated_site := best_site
// Generate the configuration files using the processed site configuration
mut updated_config := new_configuration(updated_site.siteconfig)!
mut main_file := pathlib.get_file(path: '${cfg_path}/main.json', create: true)!
main_file.write(json.encode_pretty(updated_config.main))!
mut navbar_file := pathlib.get_file(path: '${cfg_path}/navbar.json', create: true)!
navbar_file.write(json.encode_pretty(updated_config.navbar))!
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
footer_file.write(json.encode_pretty(updated_config.footer))!
// Fix the index.tsx redirect to handle baseUrl properly
// When baseUrl is not '/', we need to use an absolute redirect path
if updated_config.main.base_url != '/' {
index_tsx_path := '${f.path_build.path}/src/pages/index.tsx'
if os.exists(index_tsx_path) {
// Create the corrected index.tsx content
fixed_index_content := "import React from 'react';
import { Redirect } from '@docusaurus/router';
import main from '../../cfg/main.json';
export default function Home() {
// Use absolute redirect path when baseUrl is not root
const redirectPath = main.baseUrl + main.url_home;
return <Redirect to={redirectPath} />;
}"
mut index_file := pathlib.get_file(path: index_tsx_path, create: false)!
index_file.write(fixed_index_content)!
}
}
// Scan and export doctree collections to Redis before generating docs
// This ensures the doctreeclient can access the collections when generating pages
console.print_header(' scanning doctree collections for site: ${site.name}')
// Find the collections directory relative to the source path
// The collections should be in the parent directory of the ebooks
mut collections_path := ''
// Try to find collections directory by going up from the source path
mut current_path := pathlib.get_dir(path: site.path_src.path)!
for _ in 0 .. 5 { // Search up to 5 levels up
collections_candidate := '${current_path.path}/collections'
if os.exists(collections_candidate) {
collections_path = collections_candidate
break
}
parent := current_path.parent() or { break } // reached root or error
if parent.path == current_path.path {
break // reached root
}
current_path = parent
}
if collections_path != '' {
// Create a doctree and scan the collections
mut tree := doctree.new(name: site.name)!
tree.scan(path: collections_path)!
// Export to Redis and temporary location for doctreeclient access
tree.export(
destination: '/tmp/doctree_export_${site.name}'
reset: true
exclude_errors: false
)!
}
// Generate the actual docs content from the processed site configuration
docs_path := '${f.path_build.path}/docs'
generate(
console.print_header(' generating docs from site pages to: ${docs_path}')
generate_docs(
path: docs_path
site: updated_site
)!
// site.process_imports()!
}
// pub fn (mut site DocSite) process_imports() ! {
// mut gs := gittools.new()!
// mut f:=factory_get()!
// for item in site.siteconfig.imports {
// mypath := gs.get_path(
// pull: false
// reset: false
// url: item.url
// )!
// mut mypatho := pathlib.get(mypath)
// mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: true)!
// // println(item)
// // replace: {'NAME': 'MyName', 'URGENCY': 'red'}
// mut ri := regext.regex_instructions_new()
// for key, val in item.replace {
// ri.add_item('\{${key}\}', val)!
// }
// mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: true)!
// ri.replace_in_dir(
// path: '${f.path_build.path}/docs/${item.dest}'
// extensions: [
// 'md',
// ]
// )!
// }
// }