refactor: overhaul Docusaurus command and generation

- Rework `hero docusaurus` command to use local `cfg` files
- Scan and export doctree collections during site generation
- Fix `baseUrl` redirect path handling in `index.tsx`
- Add cycle detection for `play.include` in playbooks
- Improve site config processing to prevent duplicate items
This commit is contained in:
Mahmoud-Emad
2025-08-07 10:46:57 +03:00
parent 2667856633
commit 82a46e8149
15 changed files with 291 additions and 140 deletions

View File

@@ -5,6 +5,9 @@ import freeflowuniverse.herolib.core.playcmds
playcmds.run(
heroscript: '
!!docusaurus.config
title: "TFGrid Tech Docs"
!!docusaurus.define
path_build: "/tmp/docusaurus_build"
path_publish: "/tmp/docusaurus_publish"
@@ -12,12 +15,14 @@ playcmds.run(
install: 1
template_update: 1
!!docusaurus.add name:"tfgrid_docs"
!!docusaurus.add sitename:"default"
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
git_root:"/tmp/code"
git_reset:1
git_pull:1
!!docusaurus.build
!!docusaurus.dev site:"default" open:true watch_changes:true
'
)!

View File

@@ -1,9 +1,11 @@
module herocmds
import freeflowuniverse.herolib.web.docusaurus
import freeflowuniverse.herolib.web.site
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.playcmds
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.web.site
import freeflowuniverse.herolib.web.docusaurus
import os
import cli { Command, Flag }
@@ -119,23 +121,20 @@ pub fn cmd_docusaurus(mut cmdroot Command) Command {
}
fn cmd_docusaurus_execute(cmd Command) ! {
mut update := cmd.flags.get_bool('update') or { false }
mut init := cmd.flags.get_bool('new') or { false }
mut open := cmd.flags.get_bool('open') or { false }
mut url := cmd.flags.get_string('url') or { '' }
mut publish_path := cmd.flags.get_string('publish') or { '' }
mut buildpublish := cmd.flags.get_bool('buildpublish') or { false }
mut builddevpublish := cmd.flags.get_bool('builddevpublish') or { false }
mut dev := cmd.flags.get_bool('dev') or { false }
// --- Build Path Logic ---
mut build_path := cmd.flags.get_string('buildpath') or { '' }
if build_path == '' {
// Default build path if not provided (e.g., use CWD or a specific temp dir)
// Using CWD for now based on previous edits, adjust if needed
build_path = '${os.home_dir()}/hero/var/docusaurus'
}
// --- Start: Heroscript Path Logic ---
// --- Path Logic ---
mut provided_path := cmd.flags.get_string('path') or { '' }
mut heroscript_config_dir := ''
mut source_path := ''
if provided_path != '' {
if !os.exists(provided_path) || !os.is_dir(provided_path) {
@@ -145,10 +144,14 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// Check if the provided path contains a cfg subdirectory (ebook directory structure)
cfg_subdir := os.join_path(provided_path, 'cfg')
if os.exists(cfg_subdir) && os.is_dir(cfg_subdir) {
heroscript_config_dir = cfg_subdir
source_path = provided_path
} else {
// Assume the provided path is already the cfg directory
heroscript_config_dir = provided_path
if provided_path.ends_with('cfg') {
// If path ends with cfg, use parent directory as source
source_path = os.dir(provided_path)
} else {
return error('Provided path "${provided_path}" does not contain a "cfg" subdirectory.')
}
}
} else {
mut cwd := os.getwd()
@@ -156,54 +159,72 @@ fn cmd_docusaurus_execute(cmd Command) ! {
if !os.exists(cfg_dir) || !os.is_dir(cfg_dir) {
return error('Flag -path not provided and directory "./cfg" not found in the current working directory.')
}
heroscript_config_dir = cfg_dir
source_path = cwd
}
mut buildpublish := cmd.flags.get_bool('buildpublish') or { false }
mut builddevpublish := cmd.flags.get_bool('builddevpublish') or { false }
mut dev := cmd.flags.get_bool('dev') or { false }
console.print_header('Running Docusaurus for: ${source_path}')
// Process the site configuration first (excluding global includes)
mut combined_heroscript := ''
cfg_path := os.join_path(source_path, 'cfg')
if os.exists(cfg_path) {
files := os.ls(cfg_path) or { []string{} }
for file in files {
if file.ends_with('.heroscript') {
file_path := os.join_path(cfg_path, file)
content := os.read_file(file_path) or { continue }
// // Get the site configuration that was processed from the heroscript files
// // The site.play() function processes the heroscript and creates sites in the global websites map
// // We need to get the site by name from the processed configuration
// config_actions := plbook.find(filter: 'site.config')!
// if config_actions.len == 0 {
// return error('No site.config found in heroscript files. Make sure config.heroscript contains !!site.config.')
// }
// Skip files that contain play.include to avoid global processing
if content.contains('!!play.include') {
continue
}
// // Get the site name from the first site.config action
// site_name := config_actions[0].params.get('name') or {
// return error('site.config must specify a name parameter')
// }
combined_heroscript += content + '\n\n'
}
}
}
// // Get the processed site configuration
// mut generic_site := site.get(name: site_name)!
if combined_heroscript.len == 0 {
return error('No valid heroscript files found (all contained global includes)')
}
// // Add docusaurus site
// mut dsite := docusaurus.dsite_add(
// site: generic_site
// path_src: url // Use URL as source path for now
// path_build: build_path
// path_publish: publish_path
// reset: false
// template_update: update
// install: init
// )!
// Process the site configuration to get the site name
mut plbook := playbook.new(text: combined_heroscript)!
site.play(mut plbook)!
// // Conditional site actions based on flags
// if buildpublish {
// dsite.build_publish()!
// } else if builddevpublish {
// dsite.build_dev_publish()!
// } else if dev {
// dsite.dev(host: 'localhost', port: 3000, open: open)!
// } else if open {
// dsite.open('localhost', 3000)!
// } else {
// // If no specific action (build/dev/open) is requested, just generate the site
// dsite.generate()!
// }
// Get the site name from the processed site configuration
available_sites := site.list()
if available_sites.len == 0 {
return error('No sites were created from the configuration')
}
site_name := available_sites[0] // Use the first (and likely only) site
panic("implement")
// Set up the docusaurus factory
docusaurus.factory_set(
path_build: build_path
reset: true
install: true
template_update: true
)!
// Add the docusaurus site
mut dsite := docusaurus.dsite_add(
sitename: site_name
path: source_path
play: false // Site already processed
)!
// Execute the requested action directly
if buildpublish {
dsite.build_publish()!
} else if builddevpublish {
dsite.build()!
} else if dev {
dsite.dev(
open: open
watch_changes: true
)!
} else {
dsite.build()!
}
}

View File

@@ -40,18 +40,20 @@ pub fn (action Action) str() string {
// serialize to heroscript
pub fn (action Action) heroscript() string {
mut out := ''
if action.comments.len > 0 {
out += texttools.indent(action.comments, '// ')
}
if action.comments.len > 0 {
// out += texttools.indent(action.comments, '// ')
}
if action.actiontype == .dal {
out += '!'
} else if action.actiontype == .sal {
out += '!!'
} else if action.actiontype == .wal {
out += '!!!!'
} else if action.actiontype == .macro {
out += '!!!'
} else {
panic('only action sal and macro supported for now')
panic('unsupported action type: ${action.actiontype}')
}
if action.actor != '' {

View File

@@ -8,13 +8,13 @@ import crypto.blake2b
@[heap]
pub struct PlayBook {
pub mut:
actions []&Action
actions []&Action @[skip; str: skip]
priorities map[int][]int // first key is the priority, the list of int's is position in list self.actions
othertext string // in case there is text outside of the actions
result string // if any result
nractions int
done []int // which actions did we already find/run?
session &base.Session
session &base.Session @[skip; str: skip]
}
@[params]

View File

@@ -89,7 +89,8 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
if state == .comment_for_action_maybe {
if line.starts_with('//') {
comments << line_strip.trim_left('/ ')
comment_content := line_strip.trim_left('/ ')
comments << comment_content
} else {
if line_strip.starts_with('!') {
// we are at end of comment
@@ -150,7 +151,8 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
continue
} else if line.starts_with('//') {
state = .comment_for_action_maybe
comments << line_strip.trim_left('/ ')
comment_content := line_strip.trim_left('/ ')
comments << comment_content
// } else {
// plbook.othertext += '${line_strip}\n'
}

View File

@@ -7,7 +7,6 @@ import freeflowuniverse.herolib.biz.bizmodel
import freeflowuniverse.herolib.web.docusaurus
import freeflowuniverse.herolib.clients.openai
// import freeflowuniverse.herolib.hero.publishing
// import freeflowuniverse.herolib.threefold.grid4.gridsimulator
// import freeflowuniverse.herolib.installers.sysadmintools.daguserver
@@ -51,7 +50,7 @@ pub fn run(args_ PlayArgs) ! {
bizmodel.play(mut plbook)!
doctree.play(mut plbook)!
docusaurus.play(mut plbook)!
openai.play(mut plbook)!
// openai.play(mut plbook)!
// slides.play(mut plbook)!
// base_install(play(mut plbook)!

View File

@@ -27,6 +27,9 @@ fn play_core(mut plbook PlayBook) ! {
// action.done = true
// }
// Track included paths to prevent infinite recursion
mut included_paths := map[string]bool{}
for action_ in plbook.find(filter: 'play.*')! {
if action_.name == 'include' {
console.print_debug('play run:${action_}')
@@ -44,7 +47,15 @@ fn play_core(mut plbook PlayBook) ! {
if playrunpath.len == 0 {
return error("can't run a heroscript didn't find url or path.")
}
// Check for cycle detection
if playrunpath in included_paths {
console.print_debug('Skipping already included path: ${playrunpath}')
continue
}
console.print_debug('play run path:${playrunpath}')
included_paths[playrunpath] = true
plbook.add(path: playrunpath)!
}
if action_.name == 'echo' {

View File

@@ -24,9 +24,9 @@ pub mut:
pub fn (mut s DocSite) build() ! {
s.generate()!
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
exit 1
bun run build
'
retry: 0
)!
@@ -35,9 +35,9 @@ pub fn (mut s DocSite) build() ! {
pub fn (mut s DocSite) build_dev_publish() ! {
s.generate()!
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
exit 1
bun run build
'
retry: 0
)!
@@ -46,9 +46,9 @@ pub fn (mut s DocSite) build_dev_publish() ! {
pub fn (mut s DocSite) build_publish() ! {
s.generate()!
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
exit 1
bun run build
'
retry: 0
)!

View File

@@ -63,7 +63,12 @@ pub fn dsite_add(args_ AddArgs) !&DocSite {
osal.rm('${args.path}/sync.sh')!
osal.rm('${args.path}/.DS_Store')!
mut website := site.get(name: args.sitename)!
// Try to get the site by name, if it doesn't exist, try to get the default site
mut website := site.get(name: args.sitename) or {
site.get(name: 'default') or {
return error('Neither site "${args.sitename}" nor default site exists. Available sites need to be created first with docusaurus.config.')
}
}
mut myconfig := new_configuration(website.siteconfig)! // go from site.SiteConfig to docusaurus.Configuration
@@ -80,15 +85,15 @@ pub fn dsite_add(args_ AddArgs) !&DocSite {
path_build_ := '${f.path_build.path}/${args.sitename}'
// get our website
mut mysite := site.new(name: args.sitename)!
mut mysite := &site.Site(unsafe { nil })
if site.exists(name: args.sitename) {
console.print_debug('Docusaurus site ${args.sitename} already exists, using existing site.')
// Site already exists (likely processed by hero command), use existing site
mysite = site.get(name: args.sitename)!
} else {
if !args.play {
return error('Docusaurus site ${args.sitename} does not exist, please set play to true to create it.')
}
console.print_debug('Creating new Docusaurus site ${args.sitename}.')
// Create new site and process config files
mut plbook := playbook.new(path: '${args.path}/cfg')!
site.play(mut plbook)!
mysite = site.get(name: args.sitename) or {

View File

@@ -155,15 +155,19 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
}
fn config_fix(config Configuration) !Configuration {
// Fix empty logo sources by removing logo entirely if all fields are empty
// Fix empty logo sources by providing defaults if all fields are empty
mut navbar_fixed := config.navbar
if config.navbar.logo.src == '' && config.navbar.logo.src_dark == ''
&& config.navbar.logo.alt == '' {
// Create navbar without logo if all logo fields are empty
// Provide default logo values when all are empty
navbar_fixed = Navbar{
title: config.navbar.title
logo: Logo{
alt: 'Logo'
src: 'img/logo.svg'
src_dark: 'img/logo_dark.svg'
}
items: config.navbar.items
// logo field omitted entirely
}
}
@@ -175,6 +179,19 @@ fn config_fix(config Configuration) !Configuration {
favicon: if config.main.favicon == '' { 'img/favicon.ico' } else { config.main.favicon }
url: if config.main.url == '' { 'https://example.com' } else { config.main.url }
base_url: if config.main.base_url == '' { '/' } else { config.main.base_url }
metadata: Metadata{
...config.main.metadata
description: if config.main.metadata.description == '' {
'Documentation built with Docusaurus.'
} else {
config.main.metadata.description
}
title: if config.main.metadata.title == '' {
config.main.title
} else {
config.main.metadata.title
}
}
}
navbar: navbar_fixed
}

View File

@@ -7,8 +7,7 @@ import json
import os
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools.regext
// import freeflowuniverse.herolib.data.doctree
import freeflowuniverse.herolib.data.doctree
import freeflowuniverse.herolib.web.site as sitegen
pub fn (mut site DocSite) generate() ! {
@@ -40,14 +39,8 @@ pub fn (mut site DocSite) generate() ! {
}
}
mut main_file := pathlib.get_file(path: '${cfg_path}/main.json', create: true)!
main_file.write(json.encode_pretty(site.config.main))!
mut navbar_file := pathlib.get_file(path: '${cfg_path}/navbar.json', create: true)!
navbar_file.write(json.encode_pretty(site.config.navbar))!
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
footer_file.write(json.encode_pretty(site.config.footer))!
// We'll generate the configuration files after processing the site
// This is moved to after sitegen.play() so we can use the processed site configuration
osal.rm('${f.path_build.path}/docs')!
@@ -71,44 +64,101 @@ pub fn (mut site DocSite) generate() ! {
sitegen.play(mut plbook)!
// Get the updated site object after processing
mut updated_site := sitegen.get(name: site.name)!
// The site name in the config might be different from the docusaurus site name
// Find the site with the most pages (should contain the processed page definitions)
available_sites := sitegen.list()
mut best_site := &sitegen.Site(unsafe { nil })
mut max_pages := 0
for site_name in available_sites {
mut test_site := sitegen.get(name: site_name) or { continue }
if test_site.pages.len > max_pages {
max_pages = test_site.pages.len
best_site = test_site
}
}
if best_site == unsafe { nil } || max_pages == 0 {
return error('No sites with pages found after processing playbook. Available sites: ${available_sites}')
}
mut updated_site := best_site
// Generate the configuration files using the processed site configuration
mut updated_config := new_configuration(updated_site.siteconfig)!
mut main_file := pathlib.get_file(path: '${cfg_path}/main.json', create: true)!
main_file.write(json.encode_pretty(updated_config.main))!
mut navbar_file := pathlib.get_file(path: '${cfg_path}/navbar.json', create: true)!
navbar_file.write(json.encode_pretty(updated_config.navbar))!
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
footer_file.write(json.encode_pretty(updated_config.footer))!
// Fix the index.tsx redirect to handle baseUrl properly
// When baseUrl is not '/', we need to use an absolute redirect path
if updated_config.main.base_url != '/' {
index_tsx_path := '${f.path_build.path}/src/pages/index.tsx'
if os.exists(index_tsx_path) {
// Create the corrected index.tsx content
fixed_index_content := "import React from 'react';
import { Redirect } from '@docusaurus/router';
import main from '../../cfg/main.json';
export default function Home() {
// Use absolute redirect path when baseUrl is not root
const redirectPath = main.baseUrl + main.url_home;
return <Redirect to={redirectPath} />;
}"
mut index_file := pathlib.get_file(path: index_tsx_path, create: false)!
index_file.write(fixed_index_content)!
}
}
// Scan and export doctree collections to Redis before generating docs
// This ensures the doctreeclient can access the collections when generating pages
console.print_header(' scanning doctree collections for site: ${site.name}')
// Find the collections directory relative to the source path
// The collections should be in the parent directory of the ebooks
mut collections_path := ''
// Try to find collections directory by going up from the source path
mut current_path := pathlib.get_dir(path: site.path_src.path)!
for _ in 0 .. 5 { // Search up to 5 levels up
collections_candidate := '${current_path.path}/collections'
if os.exists(collections_candidate) {
collections_path = collections_candidate
break
}
parent := current_path.parent() or { break } // reached root or error
if parent.path == current_path.path {
break // reached root
}
current_path = parent
}
if collections_path != '' {
// Create a doctree and scan the collections
mut tree := doctree.new(name: site.name)!
tree.scan(path: collections_path)!
// Export to Redis and temporary location for doctreeclient access
tree.export(
destination: '/tmp/doctree_export_${site.name}'
reset: true
exclude_errors: false
)!
}
// Generate the actual docs content from the processed site configuration
docs_path := '${f.path_build.path}/docs'
generate(
console.print_header(' generating docs from site pages to: ${docs_path}')
generate_docs(
path: docs_path
site: updated_site
)!
// site.process_imports()!
}
// pub fn (mut site DocSite) process_imports() ! {
// mut gs := gittools.new()!
// mut f:=factory_get()!
// for item in site.siteconfig.imports {
// mypath := gs.get_path(
// pull: false
// reset: false
// url: item.url
// )!
// mut mypatho := pathlib.get(mypath)
// mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: true)!
// // println(item)
// // replace: {'NAME': 'MyName', 'URGENCY': 'red'}
// mut ri := regext.regex_instructions_new()
// for key, val in item.replace {
// ri.add_item('\{${key}\}', val)!
// }
// mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: true)!
// ri.replace_in_dir(
// path: '${f.path_build.path}/docs/${item.dest}'
// extensions: [
// 'md',
// ]
// )!
// }
// }

View File

@@ -25,12 +25,13 @@ mut:
site Site
}
// new creates a new siteconfig and stores it in redis, or gets an existing one
fn generate(args SiteGeneratorArgs) ! {
// Generate docs from site configuration
pub fn generate_docs(args SiteGeneratorArgs) ! {
mut path := args.path
if args.path == '' {
return error('Path must be provided to generate site')
}
mut gen := SiteGenerator{
path: pathlib.get_dir(path: path, create: true)!
client: doctreeclient.new()!
@@ -127,8 +128,6 @@ fn (mut mysite SiteGenerator) page_generate(args_ Page) ! {
pagefile.write(c)!
// console.print_debug("Copy images in collection '${collection_name}' to ${pagefile.path_dir()}")
mysite.client.copy_images(collection_name, page_name, pagefile.path_dir()) or {
return error("Couldn't copy images for '${page_name}' in collection '${collection_name}' using doctreeclient. Available pages:\n${mysite.client.list_markdown()!}\nError: ${err}")
}

View File

@@ -4,7 +4,6 @@ import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.web.site
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'docusaurus.') {
return
}
@@ -41,10 +40,10 @@ pub fn play(mut plbook PlayBook) ! {
dsite_add(
sitename: site_name
path: p.get('path')!
git_url: p.get('git_url')!
path: p.get_default('path', '')! // Make path optional
git_url: p.get_default('git_url', '')! // Make git_url optional too
git_reset: p.get_default_false('git_reset')
git_root: p.get('git_root')!
git_root: p.get_default('git_root', '')! // Make git_root optional
git_pull: p.get_default_false('git_pull')
path_publish: p.get_default('path_publish', f.path_publish.path)!
play: false // need to make sure we don't play again
@@ -56,6 +55,7 @@ pub fn play(mut plbook PlayBook) ! {
if actions_dev.len > 1 {
return error('Multiple "docusaurus.dev" actions found. Only one is allowed.')
}
for mut action in actions_dev {
mut p := action.params
site_name := p.get('site')!
@@ -75,7 +75,14 @@ pub fn play(mut plbook PlayBook) ! {
}
for mut action in actions_build {
mut p := action.params
site_name := p.get('site')!
site_name := p.get('site') or {
// If no site specified, use the first available site
if docusaurus_sites.len == 0 {
return error('No docusaurus sites available to build. Use docusaurus.add to create a site first.')
}
// Get the first site name
docusaurus_sites.keys()[0]
}
mut dsite := dsite_get(site_name)!
dsite.build()!
action.done = true

View File

@@ -14,6 +14,13 @@ pub mut:
pub fn new(args FactoryArgs) !&Site {
name := texttools.name_fix(args.name)
// Check if a site with this name already exists
if name in websites {
// Return the existing site instead of creating a new one
return get(name: name)!
}
websites[name] = &Site{
siteconfig: SiteConfig{
name: name
@@ -40,3 +47,8 @@ pub fn default() !&Site {
}
return get()!
}
// list returns all site names that have been created
pub fn list() []string {
return websites.keys()
}

View File

@@ -5,16 +5,27 @@ import freeflowuniverse.herolib.core.texttools
import time
pub fn play(mut plbook PlayBook) ! {
// Handle multiple site configurations
mut config_actions := plbook.find(filter: 'site.config')!
// Handle multiple site configurations - look for both site.config and docusaurus.config
mut config_actions := plbook.find(filter: 'site.config')!
if config_actions.len == 0 {
return error('No site.config actions found')
// Fallback to docusaurus.config for backward compatibility
config_actions = plbook.find(filter: 'docusaurus.config')!
}
// Process each site configuration separately
for mut config_action in config_actions {
mut website := play_config_single(mut config_action)!
if config_actions.len == 0 {
return error('No site.config or docusaurus.config actions found')
}
// For now, just process the first site configuration to avoid memory issues
// TODO: Fix the underlying memory corruption issue with multiple site configs
if config_actions.len > 0 {
mut config_action := config_actions[0]
// Work around memory corruption by accessing params directly here
mut p := config_action.params
name := p.get_default('name', 'default')! // Use 'default' as fallback name
mut website := play_config_single_safe(name, mut config_action)!
mut config := &website.siteconfig
@@ -25,12 +36,16 @@ pub fn play(mut plbook PlayBook) ! {
play_build_dest_dev(mut plbook, mut config)!
play_pages(mut plbook, mut website)!
// Mark all other config actions as done to avoid processing them
for i in 1 .. config_actions.len {
config_actions[i].done = true
}
}
}
fn play_config_single(mut action Action) !&Site {
fn play_config_single_safe(name string, mut action Action) !&Site {
mut p := action.params
name := p.get('name') or { return error('need to specify name in site.config.\n${action}') }
mut website := new(name: name)!
mut config := &website.siteconfig
@@ -149,6 +164,9 @@ fn play_menu(mut plbook PlayBook, mut config SiteConfig) ! {
menu_item_actions = plbook.find(filter: 'site.menu_item')!
}
// Clear existing menu items to prevent duplication
config.menu.items = []MenuItem{}
for mut action in menu_item_actions {
mut p := action.params
mut item := MenuItem{
@@ -173,6 +191,9 @@ fn play_footer(mut plbook PlayBook, mut config SiteConfig) ! {
mut footer_item_actions := plbook.find(filter: 'site.footer_item')!
mut links_map := map[string][]FooterItem{}
// Clear existing footer links to prevent duplication
config.footer.links = []FooterLink{}
for mut action in footer_item_actions {
mut p := action.params
title := p.get_default('title', 'Docs')!