Merge pull request #221 from Incubaid/dev_docusaurus
Docusaurus Landing Page Slug Handling & Documentation Updates
This commit is contained in:
@@ -2,13 +2,38 @@
|
||||
|
||||
This manual provides a comprehensive guide on how to leverage HeroLib's Docusaurus integration, Doctree, and HeroScript to create and manage technical ebooks, optimized for AI-driven content generation and project management.
|
||||
|
||||
## Quick Start - Recommended Ebook Structure
|
||||
|
||||
The recommended directory structure for an ebook:
|
||||
|
||||
```
|
||||
my_ebook/
|
||||
├── scan.hero # Atlas collection scanning
|
||||
├── config.hero # Site configuration
|
||||
├── menus.hero # Navbar and footer configuration
|
||||
├── include.hero # Docusaurus define and atlas export
|
||||
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
||||
├── 2_concepts.heroscript # More page definitions
|
||||
└── 3_advanced.heroscript # Additional pages
|
||||
```
|
||||
|
||||
**Running an ebook:**
|
||||
|
||||
```bash
|
||||
# Start development server
|
||||
hero docs -d -p /path/to/my_ebook
|
||||
|
||||
# Build for production
|
||||
hero docs -p /path/to/my_ebook
|
||||
```
|
||||
|
||||
## 1. Core Concepts
|
||||
|
||||
To effectively create ebooks with HeroLib, it's crucial to understand the interplay of three core components:
|
||||
|
||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process.
|
||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process. Files use `.hero` extension for configuration and `.heroscript` for page definitions.
|
||||
* **Docusaurus**: A popular open-source static site generator. HeroLib uses Docusaurus as the underlying framework to render your ebook content into a navigable website.
|
||||
* **Atlas (and Doctree)**: HeroLib's document collection layer. In the current pipeline, Atlas exports markdown "collections" and "pages" that Docusaurus consumes via the Atlas client. Doctree and `doctreeclient` are legacy/alternative ways to provide the same collections.
|
||||
* **Atlas**: HeroLib's document collection layer. Atlas scans and exports markdown "collections" and "pages" that Docusaurus consumes.
|
||||
|
||||
## 2. Setting Up a Docusaurus Project with HeroLib
|
||||
|
||||
@@ -246,32 +271,36 @@ This is where you define the actual content pages and how they are organized int
|
||||
|
||||
```heroscript
|
||||
// Define a category
|
||||
!!site.page_category path:'introduction' label:"Introduction to Ebook" position:10
|
||||
!!site.page_category name:'introduction' label:"Introduction to Ebook"
|
||||
|
||||
// Define a page within that category, linking to Doctree content
|
||||
!!site.page path:'introduction' src:"my_doctree_collection:chapter_1_overview"
|
||||
// Define pages - first page specifies collection, subsequent pages reuse it
|
||||
!!site.page src:"my_collection:chapter_1_overview"
|
||||
title:"Chapter 1: Overview"
|
||||
description:"A brief introduction to the ebook's content."
|
||||
position:1 // Order within the category
|
||||
hide_title:true // Hide the title on the page itself
|
||||
|
||||
!!site.page src:"chapter_2_basics"
|
||||
title:"Chapter 2: Basics"
|
||||
|
||||
// New category with new collection
|
||||
!!site.page_category name:'advanced' label:"Advanced Topics"
|
||||
|
||||
!!site.page src:"advanced_collection:performance"
|
||||
title:"Performance Tuning"
|
||||
hide_title:true
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.page_category`**:
|
||||
* `path` (string, required): The path to the category directory within your Docusaurus `docs` folder (e.g., `introduction` will create `docs/introduction/_category_.json`).
|
||||
* `name` (string, required): Category identifier (used internally).
|
||||
* `label` (string, required): The display name for the category in the sidebar.
|
||||
* `position` (int, optional): The order of the category in the sidebar.
|
||||
* `sitename` (string, optional): If you have multiple Docusaurus sites defined, specify which site this category belongs to. Defaults to the current site's name.
|
||||
* `position` (int, optional): The order of the category in the sidebar (auto-incremented if omitted).
|
||||
* **`site.page`**:
|
||||
* `src` (string, required): **Crucial for Doctree integration.** This specifies the source of the page content in the format `collection_name:page_name`. HeroLib will fetch the markdown content from the specified Doctree collection and page.
|
||||
* `path` (string, required): The relative path and filename for the generated markdown file within your Docusaurus `docs` folder (e.g., `introduction/chapter_1.md`). If only a directory is provided (e.g., `introduction/`), the `page_name` from `src` will be used as the filename.
|
||||
* `title` (string, optional): The title of the page. If not provided, HeroLib will attempt to extract it from the markdown content or use the `page_name`.
|
||||
* `src` (string, required): **Crucial for Atlas/collection integration.** Format: `collection_name:page_name` for the first page, or just `page_name` to reuse the previous collection.
|
||||
* `title` (string, optional): The title of the page. If not provided, HeroLib extracts it from the markdown `# Heading` or uses the page name.
|
||||
* `description` (string, optional): A short description for the page, used in frontmatter.
|
||||
* `position` (int, optional): The order of the page within its category.
|
||||
* `hide_title` (boolean, optional): If `true`, the title will not be displayed on the page itself.
|
||||
* `draft` (boolean, optional): If `true`, the page will be marked as a draft and not included in production builds.
|
||||
* `title_nr` (int, optional): If set, HeroLib will re-number the markdown headings (e.g., `title_nr:3` will make `# Heading` become `### Heading`). Useful for consistent heading levels across imported content.
|
||||
* `draft` (boolean, optional): If `true`, the page will be hidden from navigation.
|
||||
|
||||
### 3.7. Collections and Atlas/Doctree Integration
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
@@ -61,6 +62,8 @@ compile_cmd := if os.user_os() == 'macos' {
|
||||
'v -enable-globals -g -w -n -prod hero.v'
|
||||
} else {
|
||||
'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
// 'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
// 'v -cg -enable-globals -parallel-cc -w -n -d use_openssl hero.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
|
||||
@@ -13,7 +13,7 @@ import incubaid.herolib.installers.lang.python
|
||||
import os
|
||||
|
||||
fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
mut installer := get()!
|
||||
_ := get()!
|
||||
mut res := []startupmanager.ZProcessNewArgs{}
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// res << startupmanager.ZProcessNewArgs{
|
||||
@@ -28,7 +28,7 @@ fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
}
|
||||
|
||||
fn running() !bool {
|
||||
mut installer := get()!
|
||||
_ := get()!
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// this checks health of erpnext
|
||||
// curl http://localhost:3333/api/v1/s --oauth2-bearer 1234 works
|
||||
|
||||
@@ -16,7 +16,7 @@ pub mut:
|
||||
|
||||
pub fn (b BizModel) export(args ExportArgs) ! {
|
||||
name := if args.name != '' { args.name } else { texttools.snake_case(args.title) }
|
||||
path := pathlib.get_dir(
|
||||
pathlib.get_dir(
|
||||
path: os.join_path(os.home_dir(), 'hero/var/bizmodel/exports/${name}')
|
||||
create: true
|
||||
empty: true
|
||||
@@ -52,7 +52,7 @@ pub fn (model BizModel) write_operational_plan(args ExportArgs) ! {
|
||||
mut hr_page := pathlib.get_file(path: '${hr_dir.path}/human_resources.md')!
|
||||
hr_page.template_write($tmpl('./templates/human_resources.md'), true)!
|
||||
|
||||
for key, employee in model.employees {
|
||||
for _, employee in model.employees {
|
||||
mut employee_page := pathlib.get_file(
|
||||
path: '${hr_dir.path}/${texttools.snake_case(employee.name)}.md'
|
||||
)!
|
||||
@@ -73,7 +73,7 @@ pub fn (model BizModel) write_operational_plan(args ExportArgs) ! {
|
||||
}
|
||||
}
|
||||
|
||||
for key, department in model.departments {
|
||||
for _, department in model.departments {
|
||||
dept := department
|
||||
mut dept_page := pathlib.get_file(
|
||||
path: '${depts_dir.path}/${texttools.snake_case(department.name)}.md'
|
||||
@@ -94,7 +94,7 @@ pub fn (model BizModel) write_revenue_model(args ExportArgs) ! {
|
||||
products_page.template_write('# Products', true)!
|
||||
|
||||
name1 := 'example'
|
||||
for key, product in model.products {
|
||||
for _, product in model.products {
|
||||
mut product_page := pathlib.get_file(
|
||||
path: '${products_dir.path}/${texttools.snake_case(product.name)}.md'
|
||||
)!
|
||||
|
||||
@@ -7,7 +7,7 @@ import incubaid.herolib.core.pathlib
|
||||
pub struct ExportCSVArgs {
|
||||
pub mut:
|
||||
path string
|
||||
include_empty bool = false // whether to include empty cells or not
|
||||
include_empty bool // whether to include empty cells or not
|
||||
separator string = '|' // separator character for CSV
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
})
|
||||
|
||||
// play actions for each biz in plbook
|
||||
for biz, actions in actions_by_biz {
|
||||
for biz, _ in actions_by_biz {
|
||||
mut model := getset(biz)!
|
||||
model.play(mut plbook)!
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import incubaid.herolib.core.playbook { Action }
|
||||
// title:'Engineering Division'
|
||||
// avg_monthly_cost:'6000USD' avg_indexation:'5%'
|
||||
fn (mut m BizModel) department_define_action(action Action) !Action {
|
||||
bizname := action.params.get_default('bizname', '')!
|
||||
_ := action.params.get_default('bizname', '')!
|
||||
mut name := action.params.get('name') or { return error('department name is required') }
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
|
||||
@@ -74,7 +74,7 @@ fn (mut m BizModel) employee_define_action(action Action) !Action {
|
||||
mut curcost := -costpeople_row.cells[x].val
|
||||
mut curpeople := nrpeople_row.cells[x].val
|
||||
mut currev := revtotal.cells[x].val
|
||||
// println("currev: ${currev}, curcost: ${curcost}, curpeople: ${curpeople}, costpercent_revenue: ${cost_percent_revenue}")
|
||||
println("currev: ${currev}, curcost: ${curcost}, curpeople: ${curpeople}, costpercent_revenue: ${cost_percent_revenue}")
|
||||
if currev * cost_percent_revenue > curcost {
|
||||
costpeople_row.cells[x].val = -currev * cost_percent_revenue
|
||||
nrpeople_row.cells[x].val = f64(currev * cost_percent_revenue / costperson_default.usd())
|
||||
|
||||
@@ -10,7 +10,7 @@ fn (mut sim BizModel) pl_total() ! {
|
||||
|
||||
// sheet.pprint(nr_columns: 10)!
|
||||
|
||||
mut pl_total := sheet.group2row(
|
||||
_ := sheet.group2row(
|
||||
name: 'pl_summary'
|
||||
include: ['pl']
|
||||
tags: 'summary'
|
||||
|
||||
@@ -77,7 +77,7 @@ fn (mut m BizModel) revenue_action(action Action) !Action {
|
||||
product.has_revenue = true
|
||||
}
|
||||
|
||||
mut margin := revenue.action(
|
||||
_ := revenue.action(
|
||||
name: '${r.name}_margin'
|
||||
descr: 'Margin for ${r.name}'
|
||||
action: .substract
|
||||
|
||||
@@ -6,7 +6,7 @@ import incubaid.herolib.core.texttools
|
||||
// see lib/biz/bizmodel/docs/revenue.md
|
||||
fn (mut m BizModel) revenue_item_action(action Action) !Action {
|
||||
mut r := get_action_descr(action)!
|
||||
mut product := m.products[r.name]
|
||||
mut product := m.products[r.name] or { return error('Product "${r.name}" not found for revenue item action') }
|
||||
|
||||
mut nr_sold := m.sheet.row_new(
|
||||
name: '${r.name}_nr_sold'
|
||||
@@ -193,7 +193,7 @@ fn (mut m BizModel) revenue_item_action(action Action) !Action {
|
||||
tags: 'name:${r.name}'
|
||||
)!
|
||||
|
||||
mut margin := margin_setup.action(
|
||||
_ := margin_setup.action(
|
||||
name: '${r.name}_margin'
|
||||
descr: 'Margin for ${r.name}'
|
||||
action: .add
|
||||
|
||||
@@ -6,19 +6,19 @@ import incubaid.herolib.core.playbook
|
||||
fn (mut sim BizModel) revenue_total() ! {
|
||||
mut sheet := sim.sheet
|
||||
|
||||
mut revenue_total := sheet.group2row(
|
||||
_ := sheet.group2row(
|
||||
name: 'revenue_total'
|
||||
include: ['rev']
|
||||
tags: 'total revtotal pl'
|
||||
descr: 'Revenue Total'
|
||||
)!
|
||||
mut cogs_total := sheet.group2row(
|
||||
_ := sheet.group2row(
|
||||
name: 'cogs_total'
|
||||
include: ['cogs']
|
||||
tags: 'total cogstotal pl'
|
||||
descr: 'Cost of Goods Total.'
|
||||
)!
|
||||
mut margin_total := sheet.group2row(
|
||||
_ := sheet.group2row(
|
||||
name: 'margin_total'
|
||||
include: ['margin']
|
||||
tags: 'total margintotal'
|
||||
|
||||
@@ -7,7 +7,7 @@ import incubaid.herolib.core.pathlib
|
||||
pub struct ExportCSVArgs {
|
||||
pub mut:
|
||||
path string
|
||||
include_empty bool = false // whether to include empty cells or not
|
||||
include_empty bool // whether to include empty cells or not
|
||||
separator string = '|' // separator character for CSV
|
||||
}
|
||||
|
||||
|
||||
@@ -118,23 +118,23 @@ pub fn (s Sheet) data_get_as_string(args RowGetArgs) !string {
|
||||
}
|
||||
nryears := 5
|
||||
err_pre := "Can't get data for sheet:${s.name} row:${args.rowname}.\n"
|
||||
mut s2 := s
|
||||
mut s2 := s
|
||||
|
||||
if args.period_type == .year {
|
||||
s2 = s.toyear(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
s2 = *s.toyear(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
if args.period_type == .quarter {
|
||||
s2 = s.toquarter(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
s2 = *s.toquarter(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
mut out := ''
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ fn pad_right(s string, length int) string {
|
||||
pub struct PPrintArgs {
|
||||
pub mut:
|
||||
group_months int = 1 // e.g. if 2 then will group by 2 months
|
||||
nr_columns int = 0 // number of columns to show in the table, 0 is all
|
||||
nr_columns int // number of columns to show in the table, 0 is all
|
||||
description bool // show description in the table
|
||||
aggrtype bool = true // show aggregate type in the table
|
||||
tags bool = true // show tags in the table
|
||||
@@ -151,7 +151,7 @@ pub fn (mut s Sheet) pprint(args PPrintArgs) ! {
|
||||
}
|
||||
max_cols := data_start_index + args.nr_columns
|
||||
mut new_all_rows := [][]string{}
|
||||
for i, row in all_rows {
|
||||
for _, row in all_rows {
|
||||
if row.len > max_cols {
|
||||
new_all_rows << row[0..max_cols]
|
||||
} else {
|
||||
|
||||
@@ -228,7 +228,7 @@ pub fn (mut client MeilisearchClient) similar_documents(uid string, args Similar
|
||||
method: .post
|
||||
data: json.encode(args)
|
||||
}
|
||||
res := client.enable_eperimental_feature(vector_store: true)! // Enable the feature first.
|
||||
client.enable_eperimental_feature(vector_store: true)! // Enable the feature first.
|
||||
mut http := client.httpclient()!
|
||||
rsponse := http.post_json_str(req)!
|
||||
println('rsponse: ${rsponse}')
|
||||
|
||||
@@ -19,7 +19,7 @@ pub mut:
|
||||
user string = 'root'
|
||||
port int = 5432
|
||||
host string = 'localhost'
|
||||
password string = ''
|
||||
password string
|
||||
dbname string = 'postgres'
|
||||
}
|
||||
|
||||
@@ -52,8 +52,7 @@ pub fn heroscript_dumps(obj PostgresqlClient) !string {
|
||||
}
|
||||
|
||||
pub fn heroscript_loads(heroscript string) !PostgresqlClient {
|
||||
mut obj := encoderhero.decode[PostgresqlClient](heroscript)!
|
||||
return PostgresqlClient{
|
||||
db_: pg.DB{}
|
||||
}
|
||||
mut client := encoderhero.decode[PostgresqlClient](heroscript)!
|
||||
client.db_ = pg.DB{}
|
||||
return client
|
||||
}
|
||||
|
||||
@@ -114,5 +114,5 @@ fn (q QueryBuilder) build_query(args BuildQueryArgs) string {
|
||||
|
||||
fn type_to_map[T](t T) !map[string]json2.Any {
|
||||
encoded_input := json2.encode(t)
|
||||
return json2.raw_decode(encoded_input)!.as_map()
|
||||
return json2.decode[json2.Any](encoded_input)!.as_map()
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
|
||||
// ---------- FLAGS ----------
|
||||
mut open_ := cmd.flags.get_bool('open') or { false }
|
||||
mut buildpublish := cmd.flags.get_bool('buildpublish') or { false }
|
||||
mut builddevpublish := cmd.flags.get_bool('builddevpublish') or { false }
|
||||
_ := cmd.flags.get_bool('builddevpublish') or { false }
|
||||
mut dev := cmd.flags.get_bool('dev') or { false }
|
||||
mut reset := cmd.flags.get_bool('reset') or { false }
|
||||
mut update := cmd.flags.get_bool('update') or { false }
|
||||
|
||||
@@ -217,7 +217,7 @@ fn cmd_git_execute(cmd Command) ! {
|
||||
mut gs := gittools.new(coderoot: coderoot)!
|
||||
|
||||
// create the filter for doing group actions, or action on 1 repo
|
||||
mut filter := ''
|
||||
_ := ''
|
||||
mut url := ''
|
||||
mut path := ''
|
||||
|
||||
|
||||
@@ -164,7 +164,7 @@ pub fn plbook_run(cmd Command) !(&playbook.PlayBook, string) {
|
||||
playbook.new(path: path)!
|
||||
}
|
||||
|
||||
dagu := cmd.flags.get_bool('dagu') or { false }
|
||||
_ := cmd.flags.get_bool('dagu') or { false }
|
||||
|
||||
playcmds.run(plbook: plbook)!
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ pub fn play_ssh(mut plbook PlayBook) ! {
|
||||
}
|
||||
|
||||
// Get or create a single SSH agent instance
|
||||
mut agent := sshagent.new_single(sshagent.SSHAgentNewArgs{})!
|
||||
_ := sshagent.new_single(sshagent.SSHAgentNewArgs{})!
|
||||
|
||||
// TO IMPLEMENT:
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ pub fn escape_regex_chars(s string) string {
|
||||
// This function does not add implicit ^ and $ anchors, allowing for substring matches.
|
||||
fn wildcard_to_regex(wildcard_pattern string) string {
|
||||
mut regex_pattern := ''
|
||||
for i, r in wildcard_pattern.runes() {
|
||||
for _, r in wildcard_pattern.runes() {
|
||||
match r {
|
||||
`*` {
|
||||
regex_pattern += '.*'
|
||||
|
||||
@@ -38,7 +38,7 @@ pub fn set_titles(page string, maxnr int) string {
|
||||
for line in lines {
|
||||
mut hash_count := 0
|
||||
mut first_char_idx := 0
|
||||
for char_idx, r in line.runes() {
|
||||
for _, r in line.runes() {
|
||||
if r == ` ` {
|
||||
first_char_idx++
|
||||
continue
|
||||
@@ -89,7 +89,7 @@ pub fn set_titles(page string, maxnr int) string {
|
||||
// Remove existing numbering (e.g., "1. ", "1.1. ")
|
||||
mut skip_chars := 0
|
||||
mut in_numbering := true
|
||||
for r_idx, r in original_title_text.runes() {
|
||||
for _, r in original_title_text.runes() {
|
||||
if in_numbering {
|
||||
if (r >= `0` && r <= `9`) || r == `.` || r == ` ` {
|
||||
skip_chars++
|
||||
|
||||
@@ -17,7 +17,7 @@ pub mut:
|
||||
apikey string
|
||||
apisecret string @[secret]
|
||||
configpath string
|
||||
nr int = 0 // each specific instance onto this server needs to have a unique nr
|
||||
nr int // each specific instance onto this server needs to have a unique nr
|
||||
}
|
||||
|
||||
fn obj_init(obj_ LivekitServer) !LivekitServer {
|
||||
|
||||
@@ -11,7 +11,7 @@ import incubaid.herolib.installers.virt.qemu
|
||||
import os
|
||||
|
||||
fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
mut installer := get()!
|
||||
_ := get()!
|
||||
mut res := []startupmanager.ZProcessNewArgs{}
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// res << startupmanager.ZProcessNewArgs{
|
||||
@@ -26,7 +26,7 @@ fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
}
|
||||
|
||||
fn running() !bool {
|
||||
mut installer := get()!
|
||||
_ := get()!
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// this checks health of lima
|
||||
// curl http://localhost:3333/api/v1/s --oauth2-bearer 1234 works
|
||||
@@ -147,7 +147,7 @@ fn destroy() ! {
|
||||
lima
|
||||
limactl
|
||||
${os.home_dir()}/bin/*.lima
|
||||
${os.home_dir()}/bin/*.lima
|
||||
${os.home_dir()}/bin/*.lima
|
||||
${os.home_dir()}/share/doc/lima
|
||||
${os.home_dir()}/share/lima
|
||||
${os.home_dir()}/share/man/lima*
|
||||
|
||||
@@ -9,7 +9,7 @@ import incubaid.herolib.installers.ulist
|
||||
import os
|
||||
|
||||
fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
mut installer := get()!
|
||||
_ := get()!
|
||||
mut res := []startupmanager.ZProcessNewArgs{}
|
||||
res << startupmanager.ZProcessNewArgs{
|
||||
name: 'traefik'
|
||||
|
||||
@@ -50,7 +50,7 @@ pub fn (mut self SystemdProcess) start() ! {
|
||||
systemctl start ${self.name}
|
||||
'
|
||||
|
||||
job := osal.exec(cmd: cmd, stdout: false)!
|
||||
osal.exec(cmd: cmd, stdout: false)!
|
||||
|
||||
// Wait for service to start with timeout
|
||||
mut attempts := 0
|
||||
|
||||
@@ -251,7 +251,7 @@ pub fn new_response_generic[D](id int, result D) ResponseGeneric[D] {
|
||||
// Returns:
|
||||
// - A ResponseGeneric object with result of type D, or an error if parsing fails
|
||||
pub fn decode_response_generic[D](data string) !ResponseGeneric[D] {
|
||||
raw := json2.raw_decode(data)!
|
||||
raw := json2.decode[json2.Any](data)!
|
||||
raw_map := raw.as_map()
|
||||
|
||||
// Validate that the response contains either result or error, but not both or neither
|
||||
|
||||
@@ -28,7 +28,7 @@ pub fn (sim Simulation) generate_market_cap_chart() !echarts.EChartsOption {
|
||||
curr: sim.params.simulation.currency
|
||||
)!
|
||||
|
||||
for name, scenario in sim.scenarios {
|
||||
for name, _ in sim.scenarios {
|
||||
mut mc_row := mc_sheet.row_new(
|
||||
name: 'scenario_${name}_mc'
|
||||
tags: 'scenario:${name} type:market_cap'
|
||||
|
||||
@@ -41,7 +41,7 @@ pub fn (mut sim Simulation) create_vesting_schedules() ! {
|
||||
}
|
||||
|
||||
// Create total unlocked row
|
||||
mut total_row := vesting_sheet.group2row(
|
||||
_ := vesting_sheet.group2row(
|
||||
name: 'total_unlocked'
|
||||
include: ['type:vesting']
|
||||
tags: 'summary type:total_vesting'
|
||||
|
||||
@@ -2,100 +2,256 @@
|
||||
|
||||
This module allows you to build and manage Docusaurus websites using a generic configuration layer provided by `lib/web/site`.
|
||||
|
||||
### Workflow
|
||||
|
||||
1. **Configure Your Site**: Define your site's metadata, navigation, footer, pages, and content sources using `!!site.*` actions in a `.heroscript` file. This creates a generic site definition.
|
||||
2. **Define Docusaurus Build**: Use `!!docusaurus.define` to specify build paths and other factory-level settings.
|
||||
3. **Link Site to Docusaurus**: Use `!!docusaurus.add` to link your generic site configuration to the Docusaurus factory. This tells HeroLib to build this specific site using Docusaurus.
|
||||
4. **Run Actions**: Use actions like `!!docusaurus.dev` or `!!docusaurus.build` to generate and serve your site.
|
||||
|
||||
### Hero Command (Recommended)
|
||||
|
||||
For quick setup and development, use the hero command:
|
||||
|
||||
```bash
|
||||
# Start development server
|
||||
hero docs -d -path /path/to/your/site
|
||||
hero docs -d -p /path/to/your/ebook
|
||||
|
||||
# Build for production
|
||||
hero docs -b -path /path/to/your/site
|
||||
hero docs -p /path/to/your/ebook
|
||||
|
||||
# Build and publish
|
||||
hero docs -bp -path /path/to/your/site
|
||||
hero docs -bp -p /path/to/your/ebook
|
||||
```
|
||||
|
||||
### Example HeroScript
|
||||
---
|
||||
|
||||
## Ebook Directory Structure
|
||||
|
||||
The recommended structure for an ebook follows this pattern:
|
||||
|
||||
```
|
||||
my_ebook/
|
||||
├── scan.hero # Atlas collection scanning
|
||||
├── config.hero # Site configuration
|
||||
├── menus.hero # Navbar and footer configuration
|
||||
├── include.hero # Docusaurus define and atlas export
|
||||
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
||||
├── 2_concepts.heroscript # More page definitions
|
||||
└── 3_advanced.heroscript # Additional pages
|
||||
```
|
||||
|
||||
### File Descriptions
|
||||
|
||||
#### `scan.hero` - Scan Collections
|
||||
|
||||
Defines which collections to scan for content:
|
||||
|
||||
```heroscript
|
||||
// Scan local collections
|
||||
!!atlas.scan path:"../../collections/my_collection"
|
||||
|
||||
// Define the Docusaurus build environment, is optional
|
||||
// Scan remote collections from git
|
||||
!!atlas.scan git_url:"https://git.example.com/org/repo/src/branch/main/collections/docs"
|
||||
```
|
||||
|
||||
#### `config.hero` - Site Configuration
|
||||
|
||||
Core site settings:
|
||||
|
||||
```heroscript
|
||||
!!site.config
|
||||
name:"my_ebook"
|
||||
title:"My Awesome Ebook"
|
||||
tagline:"Documentation made easy"
|
||||
url:"https://docs.example.com"
|
||||
url_home:"docs/"
|
||||
base_url:"/my_ebook/"
|
||||
favicon:"img/favicon.png"
|
||||
copyright:"© 2024 My Organization"
|
||||
default_collection:"my_collection"
|
||||
|
||||
!!site.config_meta
|
||||
description:"Comprehensive documentation for my project"
|
||||
title:"My Ebook - Documentation"
|
||||
keywords:"docs, ebook, tutorial"
|
||||
```
|
||||
|
||||
**Note:** When `url_home` ends with `/` (e.g., `docs/`), the first page in the sidebar automatically becomes the landing page. This means both `/docs/` and `/docs/intro` will work.
|
||||
|
||||
#### `menus.hero` - Navigation Configuration
|
||||
|
||||
```heroscript
|
||||
!!site.navbar
|
||||
title:"My Ebook"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"Documentation"
|
||||
to:"docs/"
|
||||
position:"left"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"GitHub"
|
||||
href:"https://github.com/myorg/myrepo"
|
||||
position:"right"
|
||||
|
||||
!!site.footer
|
||||
style:"dark"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Docs"
|
||||
label:"Getting Started"
|
||||
to:"docs/"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Community"
|
||||
label:"GitHub"
|
||||
href:"https://github.com/myorg/myrepo"
|
||||
```
|
||||
|
||||
#### `include.hero` - Docusaurus Setup
|
||||
|
||||
Links to shared configuration or defines docusaurus directly:
|
||||
|
||||
```heroscript
|
||||
// Option 1: Include shared configuration with variable replacement
|
||||
!!play.include path:'../../heroscriptall' replace:'SITENAME:my_ebook'
|
||||
|
||||
// Option 2: Define directly
|
||||
!!docusaurus.define name:'my_ebook'
|
||||
|
||||
!!atlas.export include:true
|
||||
```
|
||||
|
||||
#### Page Definition Files (`*.heroscript`)
|
||||
|
||||
Define pages and categories:
|
||||
|
||||
```heroscript
|
||||
// Define a category
|
||||
!!site.page_category name:'getting_started' label:"Getting Started"
|
||||
|
||||
// Define pages (first page specifies collection, subsequent pages reuse it)
|
||||
!!site.page src:"my_collection:intro"
|
||||
title:"Introduction"
|
||||
|
||||
!!site.page src:"installation"
|
||||
title:"Installation Guide"
|
||||
|
||||
!!site.page src:"configuration"
|
||||
title:"Configuration"
|
||||
|
||||
// New category
|
||||
!!site.page_category name:'advanced' label:"Advanced Topics"
|
||||
|
||||
!!site.page src:"my_collection:performance"
|
||||
title:"Performance Tuning"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Collections
|
||||
|
||||
Collections are directories containing markdown files. They're scanned by Atlas and referenced in page definitions.
|
||||
|
||||
```
|
||||
collections/
|
||||
├── my_collection/
|
||||
│ ├── .collection # Marker file (empty)
|
||||
│ ├── intro.md
|
||||
│ ├── installation.md
|
||||
│ └── configuration.md
|
||||
└── another_collection/
|
||||
├── .collection
|
||||
└── overview.md
|
||||
```
|
||||
|
||||
Pages reference collections using `collection:page` format:
|
||||
|
||||
```heroscript
|
||||
!!site.page src:"my_collection:intro" # Specifies collection
|
||||
!!site.page src:"installation" # Reuses previous collection
|
||||
!!site.page src:"another_collection:overview" # Switches collection
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Legacy Configuration
|
||||
|
||||
The older approach using `!!docusaurus.add` is still supported but not recommended:
|
||||
|
||||
```heroscript
|
||||
!!docusaurus.define
|
||||
path_build: "/tmp/docusaurus_build"
|
||||
path_publish: "/tmp/docusaurus_publish"
|
||||
reset: 1
|
||||
install: 1
|
||||
template_update: 1
|
||||
|
||||
!!docusaurus.add
|
||||
sitename:"my_site"
|
||||
path:"./path/to/my/site/source"
|
||||
path_publish: "/tmp/docusaurus_publish" //optional
|
||||
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech" //optional: can use git to pull the site source
|
||||
git_root:"/tmp/code" //optional: where to clone git repo
|
||||
git_reset:1 //optional: reset git repo
|
||||
git_pull:1 //optional: pull latest changes
|
||||
play:true //required when using git_url: process heroscript files from source path
|
||||
|
||||
|
||||
// Run the development server
|
||||
!!docusaurus.dev site:"my_site" open:true watch_changes:true
|
||||
path:"./path/to/site"
|
||||
|
||||
!!docusaurus.dev site:"my_site" open:true
|
||||
```
|
||||
|
||||
## see sites to define a site
|
||||
---
|
||||
|
||||
the site needs to be defined following the generic site definition, see the `lib/web/site` module for more details.
|
||||
## HeroScript Actions Reference
|
||||
|
||||
```heroscript
|
||||
### `!!atlas.scan`
|
||||
|
||||
//Configure the site using the generic 'site' module
|
||||
!!site.config
|
||||
name: "my_site"
|
||||
title: "My Awesome Docs"
|
||||
tagline: "The best docs ever"
|
||||
url: "https://docs.example.com"
|
||||
base_url: "/"
|
||||
copyright: "Example Corp"
|
||||
Scans a directory for markdown collections:
|
||||
|
||||
!!site.menu_item
|
||||
label: "Homepage"
|
||||
href: "https://example.com"
|
||||
position: "right"
|
||||
- `path` (string): Local path to scan
|
||||
- `git_url` (string): Git URL to clone and scan
|
||||
- `name` (string): Atlas instance name (default: `main`)
|
||||
- `ignore` (list): Directory names to skip
|
||||
|
||||
// ... add footer, pages, etc. using !!site.* actions ...
|
||||
### `!!atlas.export`
|
||||
|
||||
```
|
||||
Exports scanned collections:
|
||||
|
||||
### Heroscript Actions
|
||||
- `include` (bool): Include content in export (default: `true`)
|
||||
- `destination` (string): Export directory
|
||||
|
||||
- `!!docusaurus.define`: Configures a Docusaurus factory instance.
|
||||
- `name` (string): Name of the factory (default: `default`).
|
||||
- `path_build` (string): Path to build the site.
|
||||
- `path_publish` (string): Path to publish the final build.
|
||||
- `reset` (bool): If `true`, clean the build directory before starting.
|
||||
- `template_update` (bool): If `true`, update the Docusaurus template.
|
||||
- `install` (bool): If `true`, run `bun install`.
|
||||
### `!!docusaurus.define`
|
||||
|
||||
- `!!docusaurus.add`: Links a configured site to the Docusaurus factory.
|
||||
- `site` (string, required): The name of the site defined in `!!site.config`.
|
||||
- `path` (string, required): The local filesystem path to the site's source directory (e.g., for `static/` folder).
|
||||
Configures the Docusaurus build environment:
|
||||
|
||||
- `!!docusaurus.dev`: Runs the Docusaurus development server.
|
||||
- `site` (string, required): The name of the site to run.
|
||||
- `host` (string): Host to bind to (default: `localhost`).
|
||||
- `port` (int): Port to use (default: `3000`).
|
||||
- `open` (bool): Open the site in a browser.
|
||||
- `watch_changes` (bool): Watch for source file changes and auto-reload.
|
||||
- `name` (string, required): Site name (must match `!!site.config` name)
|
||||
- `path_build` (string): Build directory path
|
||||
- `path_publish` (string): Publish directory path
|
||||
- `reset` (bool): Clean build directory before starting
|
||||
- `template_update` (bool): Update Docusaurus template
|
||||
- `install` (bool): Run `bun install`
|
||||
- `atlas_dir` (string): Atlas export directory
|
||||
|
||||
- `!!docusaurus.build`: Builds the static site for production.
|
||||
- `site` (string, required): The name of the site to build.
|
||||
### `!!site.config`
|
||||
|
||||
Core site configuration:
|
||||
|
||||
- `name` (string, required): Unique site identifier
|
||||
- `title` (string): Site title
|
||||
- `tagline` (string): Site tagline
|
||||
- `url` (string): Full site URL
|
||||
- `base_url` (string): Base URL path (e.g., `/my_ebook/`)
|
||||
- `url_home` (string): Home page path (e.g., `docs/`)
|
||||
- `default_collection` (string): Default collection for pages
|
||||
- `favicon` (string): Favicon path
|
||||
- `copyright` (string): Copyright notice
|
||||
|
||||
### `!!site.page`
|
||||
|
||||
Defines a documentation page:
|
||||
|
||||
- `src` (string, required): Source as `collection:page` or just `page` (reuses previous collection)
|
||||
- `title` (string): Page title
|
||||
- `description` (string): Page description
|
||||
- `draft` (bool): Hide from navigation
|
||||
- `hide_title` (bool): Don't show title on page
|
||||
|
||||
### `!!site.page_category`
|
||||
|
||||
Defines a sidebar category:
|
||||
|
||||
- `name` (string, required): Category identifier
|
||||
- `label` (string): Display label
|
||||
- `position` (int): Sort order
|
||||
|
||||
---
|
||||
|
||||
## See Also
|
||||
|
||||
- `lib/web/site` - Generic site configuration module
|
||||
- `lib/data/atlas` - Atlas collection management
|
||||
|
||||
@@ -17,9 +17,7 @@ pub mut:
|
||||
reset bool
|
||||
template_update bool
|
||||
coderoot string
|
||||
// Client configuration
|
||||
use_atlas bool // true = atlas_client, false = doctreeclient
|
||||
atlas_dir string // Required when use_atlas = true
|
||||
atlas_dir string
|
||||
}
|
||||
|
||||
@[params]
|
||||
@@ -31,9 +29,7 @@ pub mut:
|
||||
reset bool
|
||||
template_update bool
|
||||
coderoot string
|
||||
// Client configuration
|
||||
use_atlas bool // true = atlas_client, false = doctreeclient
|
||||
atlas_dir string // Required when use_atlas = true
|
||||
atlas_dir string
|
||||
}
|
||||
|
||||
// return the last know config
|
||||
@@ -42,8 +38,8 @@ pub fn config() !DocusaurusConfig {
|
||||
docusaurus_config << DocusaurusConfigParams{}
|
||||
}
|
||||
mut args := docusaurus_config[0] or { panic('bug in docusaurus config') }
|
||||
if args.use_atlas && args.atlas_dir == '' {
|
||||
return error('use_atlas is true but atlas_dir is not set')
|
||||
if args.atlas_dir == '' {
|
||||
return error('atlas_dir is not set')
|
||||
}
|
||||
if args.path_build == '' {
|
||||
args.path_build = '${os.home_dir()}/hero/var/docusaurus/build'
|
||||
@@ -62,7 +58,6 @@ pub fn config() !DocusaurusConfig {
|
||||
install: args.install
|
||||
reset: args.reset
|
||||
template_update: args.template_update
|
||||
use_atlas: args.use_atlas
|
||||
atlas_dir: args.atlas_dir
|
||||
}
|
||||
if c.install {
|
||||
|
||||
@@ -6,10 +6,11 @@ import incubaid.herolib.web.site
|
||||
|
||||
pub struct Configuration {
|
||||
pub mut:
|
||||
main Main
|
||||
navbar Navbar
|
||||
footer Footer
|
||||
announcement AnnouncementBar
|
||||
main Main
|
||||
navbar Navbar
|
||||
footer Footer
|
||||
sidebar_json_txt string // will hold the sidebar.json content
|
||||
announcement AnnouncementBar
|
||||
}
|
||||
|
||||
pub struct Main {
|
||||
@@ -78,18 +79,17 @@ pub mut:
|
||||
|
||||
pub struct AnnouncementBar {
|
||||
pub mut:
|
||||
id string @[json: 'id']
|
||||
// id string @[json: 'id']
|
||||
content string @[json: 'content']
|
||||
background_color string @[json: 'backgroundColor']
|
||||
text_color string @[json: 'textColor']
|
||||
is_closeable bool @[json: 'isCloseable']
|
||||
}
|
||||
|
||||
// ... (struct definitions remain the same) ...
|
||||
|
||||
// This function is now a pure transformer: site.SiteConfig -> docusaurus.Configuration
|
||||
fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
// This function is a pure transformer: site.SiteConfig -> docusaurus.Configuration
|
||||
fn new_configuration(mysite site.Site) !Configuration {
|
||||
// Transform site.SiteConfig to docusaurus.Configuration
|
||||
mut site_cfg := mysite.siteconfig
|
||||
mut nav_items := []NavbarItem{}
|
||||
for item in site_cfg.menu.items {
|
||||
nav_items << NavbarItem{
|
||||
@@ -116,8 +116,10 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
}
|
||||
}
|
||||
|
||||
sidebar_json_txt := mysite.nav.sidebar_to_json()!
|
||||
|
||||
cfg := Configuration{
|
||||
main: Main{
|
||||
main: Main{
|
||||
title: site_cfg.title
|
||||
tagline: site_cfg.tagline
|
||||
favicon: site_cfg.favicon
|
||||
@@ -147,7 +149,7 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
copyright: site_cfg.copyright
|
||||
name: site_cfg.name
|
||||
}
|
||||
navbar: Navbar{
|
||||
navbar: Navbar{
|
||||
title: site_cfg.menu.title
|
||||
logo: Logo{
|
||||
alt: site_cfg.menu.logo_alt
|
||||
@@ -156,18 +158,20 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
}
|
||||
items: nav_items
|
||||
}
|
||||
footer: Footer{
|
||||
footer: Footer{
|
||||
style: site_cfg.footer.style
|
||||
links: footer_links
|
||||
}
|
||||
announcement: AnnouncementBar{
|
||||
id: site_cfg.announcement.id
|
||||
announcement: AnnouncementBar{
|
||||
// id: site_cfg.announcement.id
|
||||
content: site_cfg.announcement.content
|
||||
background_color: site_cfg.announcement.background_color
|
||||
text_color: site_cfg.announcement.text_color
|
||||
is_closeable: site_cfg.announcement.is_closeable
|
||||
}
|
||||
sidebar_json_txt: sidebar_json_txt
|
||||
}
|
||||
|
||||
return config_fix(cfg)!
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,11 @@ pub fn (mut docsite DocSite) generate() ! {
|
||||
mut announcement_file := pathlib.get_file(path: '${cfg_path}/announcement.json', create: true)!
|
||||
announcement_file.write(json.encode_pretty(docsite.config.announcement))!
|
||||
|
||||
docsite.generate_docs()!
|
||||
// generate sidebar.json, now new way to drive docusaurus navigation
|
||||
mut sidebar_file := pathlib.get_file(path: '${cfg_path}/sidebar.json', create: true)!
|
||||
sidebar_file.write(docsite.config.sidebar_json_txt)!
|
||||
|
||||
docsite.link_docs()!
|
||||
|
||||
docsite.import()!
|
||||
}
|
||||
|
||||
@@ -1,438 +0,0 @@
|
||||
module docusaurus
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.atlas.client as atlas_client
|
||||
import incubaid.herolib.web.site { Page, Section, Site }
|
||||
import incubaid.herolib.data.markdown.tools as markdowntools
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
struct SiteGenerator {
|
||||
mut:
|
||||
siteconfig_name string
|
||||
path pathlib.Path
|
||||
client IDocClient
|
||||
flat bool // if flat then won't use sitenames as subdir's
|
||||
site Site
|
||||
errors []string // collect errors here
|
||||
}
|
||||
|
||||
// Generate docs from site configuration
|
||||
pub fn (mut docsite DocSite) generate_docs() ! {
|
||||
c := config()!
|
||||
|
||||
// we generate the docs in the build path
|
||||
docs_path := '${c.path_build.path}/docs'
|
||||
|
||||
// Create the appropriate client based on configuration
|
||||
mut client_instance := atlas_client.new(export_dir: c.atlas_dir)!
|
||||
mut client := IDocClient(client_instance)
|
||||
|
||||
mut gen := SiteGenerator{
|
||||
path: pathlib.get_dir(path: docs_path, create: true)!
|
||||
client: client
|
||||
flat: true
|
||||
site: docsite.website
|
||||
}
|
||||
|
||||
for section in gen.site.sections {
|
||||
gen.section_generate(section)!
|
||||
}
|
||||
|
||||
for page in gen.site.pages {
|
||||
gen.page_generate(page)!
|
||||
}
|
||||
|
||||
if gen.errors.len > 0 {
|
||||
println('Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}')
|
||||
return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n')
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut generator SiteGenerator) error(msg string) ! {
|
||||
console.print_stderr('Error: ${msg}')
|
||||
generator.errors << msg
|
||||
}
|
||||
|
||||
fn (mut generator SiteGenerator) page_generate(args_ Page) ! {
|
||||
mut args := args_
|
||||
|
||||
mut content := ['---']
|
||||
|
||||
mut parts := args.src.split(':')
|
||||
if parts.len != 2 {
|
||||
generator.error("Invalid src format for page '${args.src}', expected format: collection:page_name, TODO: fix in ${args.path}, check the collection & page_name exists in the pagelist")!
|
||||
return
|
||||
}
|
||||
collection_name := parts[0]
|
||||
page_name := parts[1]
|
||||
|
||||
mut page_content := generator.client.get_page_content(collection_name, page_name) or {
|
||||
generator.error("Couldn't find page '${collection_name}:${page_name}' is formatted as collectionname:pagename. TODO: fix in ${args.path}, check the collection & page_name exists in the pagelist. ")!
|
||||
return
|
||||
}
|
||||
|
||||
if args.description.len == 0 {
|
||||
descnew := markdowntools.extract_title(page_content)
|
||||
if descnew != '' {
|
||||
args.description = descnew
|
||||
} else {
|
||||
args.description = page_name
|
||||
}
|
||||
}
|
||||
|
||||
if args.title.len == 0 {
|
||||
descnew := markdowntools.extract_title(page_content)
|
||||
if descnew != '' {
|
||||
args.title = descnew
|
||||
} else {
|
||||
args.title = page_name
|
||||
}
|
||||
}
|
||||
// Escape single quotes in YAML by doubling them
|
||||
escaped_title := args.title.replace("'", "''")
|
||||
content << "title: '${escaped_title}'"
|
||||
|
||||
if args.description.len > 0 {
|
||||
escaped_description := args.description.replace("'", "''")
|
||||
content << "description: '${escaped_description}'"
|
||||
}
|
||||
|
||||
if args.slug.len > 0 {
|
||||
escaped_slug := args.slug.replace("'", "''")
|
||||
content << "slug: '${escaped_slug}'"
|
||||
}
|
||||
|
||||
if args.hide_title {
|
||||
content << 'hide_title: ${args.hide_title}'
|
||||
}
|
||||
|
||||
if args.draft {
|
||||
content << 'draft: ${args.draft}'
|
||||
}
|
||||
|
||||
if args.position > 0 {
|
||||
content << 'sidebar_position: ${args.position}'
|
||||
}
|
||||
|
||||
content << '---'
|
||||
|
||||
mut c := content.join('\n')
|
||||
|
||||
if args.title_nr > 0 {
|
||||
// Set the title number in the page content
|
||||
page_content = markdowntools.set_titles(page_content, args.title_nr)
|
||||
}
|
||||
|
||||
// Fix links to account for nested categories
|
||||
page_content = generator.fix_links(page_content, args.path)
|
||||
|
||||
c += '\n${page_content}\n'
|
||||
|
||||
if args.path.ends_with('/') || args.path.trim_space() == '' {
|
||||
// means is dir
|
||||
args.path += page_name
|
||||
}
|
||||
|
||||
if !args.path.ends_with('.md') {
|
||||
args.path += '.md'
|
||||
}
|
||||
|
||||
mut pagepath := '${generator.path.path}/${args.path}'
|
||||
mut pagefile := pathlib.get_file(path: pagepath, create: true)!
|
||||
|
||||
pagefile.write(c)!
|
||||
|
||||
generator.client.copy_images(collection_name, page_name, pagefile.path_dir()) or {
|
||||
generator.error("Couldn't copy images for page:'${page_name}' in collection:'${collection_name}'\nERROR:${err}")!
|
||||
return
|
||||
}
|
||||
generator.client.copy_files(collection_name, page_name, pagefile.path_dir()) or {
|
||||
generator.error("Couldn't copy files for page:'${page_name}' in collection:'${collection_name}'\nERROR:${err}")!
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut generator SiteGenerator) section_generate(args_ Section) ! {
|
||||
mut args := args_
|
||||
|
||||
mut c := ''
|
||||
if args.description.len > 0 {
|
||||
c = '{
|
||||
"label": "${args.label}",
|
||||
"position": ${args.position},
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "${args.description}"
|
||||
}
|
||||
}'
|
||||
} else {
|
||||
c = '{
|
||||
"label": "${args.label}",
|
||||
"position": ${args.position},
|
||||
"link": {
|
||||
"type": "generated-index"
|
||||
}
|
||||
}'
|
||||
}
|
||||
|
||||
mut category_path := '${generator.path.path}/${args.path}/_category_.json'
|
||||
mut catfile := pathlib.get_file(path: category_path, create: true)!
|
||||
|
||||
catfile.write(c)!
|
||||
}
|
||||
|
||||
// Strip numeric prefix from filename (e.g., "03_linux_installation" -> "linux_installation")
|
||||
// Docusaurus automatically strips these prefixes from URLs
|
||||
fn strip_numeric_prefix(name string) string {
|
||||
// Match pattern: digits followed by underscore at the start
|
||||
if name.len > 2 && name[0].is_digit() {
|
||||
for i := 1; i < name.len; i++ {
|
||||
if name[i] == `_` {
|
||||
// Found the underscore, return everything after it
|
||||
return name[i + 1..]
|
||||
}
|
||||
if !name[i].is_digit() {
|
||||
// Not a numeric prefix pattern, return as-is
|
||||
return name
|
||||
}
|
||||
}
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// Calculate relative path from current directory to target directory
|
||||
// current_dir: directory of the current page (e.g., '' for root, 'tokens' for tokens/, 'farming/advanced' for nested)
|
||||
// target_dir: directory of the target page
|
||||
// page_name: name of the target page
|
||||
// Returns: relative path (e.g., './page', '../dir/page', '../../page')
|
||||
fn calculate_relative_path(current_dir string, target_dir string, page_name string) string {
|
||||
// Both at root level
|
||||
if current_dir == '' && target_dir == '' {
|
||||
return './${page_name}'
|
||||
}
|
||||
|
||||
// Current at root, target in subdirectory
|
||||
if current_dir == '' && target_dir != '' {
|
||||
return './${target_dir}/${page_name}'
|
||||
}
|
||||
|
||||
// Current in subdirectory, target at root
|
||||
if current_dir != '' && target_dir == '' {
|
||||
// Count directory levels to go up
|
||||
levels := current_dir.split('/').len
|
||||
up := '../'.repeat(levels)
|
||||
return '${up}${page_name}'
|
||||
}
|
||||
|
||||
// Both in subdirectories
|
||||
current_parts := current_dir.split('/')
|
||||
target_parts := target_dir.split('/')
|
||||
|
||||
// Find common prefix
|
||||
mut common_len := 0
|
||||
for i := 0; i < current_parts.len && i < target_parts.len; i++ {
|
||||
if current_parts[i] == target_parts[i] {
|
||||
common_len++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate how many levels to go up
|
||||
up_levels := current_parts.len - common_len
|
||||
mut path_parts := []string{}
|
||||
|
||||
// Add ../ for each level up
|
||||
for _ in 0 .. up_levels {
|
||||
path_parts << '..'
|
||||
}
|
||||
|
||||
// Add remaining target path parts
|
||||
for i in common_len .. target_parts.len {
|
||||
path_parts << target_parts[i]
|
||||
}
|
||||
|
||||
// Add page name
|
||||
path_parts << page_name
|
||||
|
||||
return path_parts.join('/')
|
||||
}
|
||||
|
||||
// Fix links to account for nested categories and Docusaurus URL conventions
|
||||
fn (generator SiteGenerator) fix_links(content string, current_page_path string) string {
|
||||
mut result := content
|
||||
|
||||
// Extract current page's directory path
|
||||
mut current_dir := current_page_path.trim('/')
|
||||
if current_dir.contains('/') && !current_dir.ends_with('/') {
|
||||
last_part := current_dir.all_after_last('/')
|
||||
if last_part.contains('.') {
|
||||
current_dir = current_dir.all_before_last('/')
|
||||
}
|
||||
}
|
||||
// If path is just a filename or empty, current_dir should be empty (root level)
|
||||
if !current_dir.contains('/') && current_dir.contains('.') {
|
||||
current_dir = ''
|
||||
}
|
||||
|
||||
// Build maps for link fixing
|
||||
mut collection_paths := map[string]string{} // collection -> directory path (for nested collections)
|
||||
mut page_to_path := map[string]string{} // page_name -> full directory path in Docusaurus
|
||||
mut collection_page_map := map[string]string{} // "collection:page" -> directory path
|
||||
|
||||
for page in generator.site.pages {
|
||||
parts := page.src.split(':')
|
||||
if parts.len != 2 {
|
||||
continue
|
||||
}
|
||||
collection := parts[0]
|
||||
page_name := parts[1]
|
||||
|
||||
// Extract directory path from page.path
|
||||
mut dir_path := page.path.trim('/')
|
||||
if dir_path.contains('/') && !dir_path.ends_with('/') {
|
||||
last_part := dir_path.all_after_last('/')
|
||||
if last_part.contains('.') || last_part == page_name {
|
||||
dir_path = dir_path.all_before_last('/')
|
||||
}
|
||||
}
|
||||
|
||||
// Store collection -> directory mapping for nested collections
|
||||
if dir_path != collection && dir_path != '' {
|
||||
collection_paths[collection] = dir_path
|
||||
}
|
||||
|
||||
// Store page_name -> directory path for fixing same-collection links
|
||||
// Strip numeric prefix from page_name for the map key
|
||||
clean_page_name := strip_numeric_prefix(page_name)
|
||||
page_to_path[clean_page_name] = dir_path
|
||||
|
||||
// Store collection:page -> directory path for fixing collection:page format links
|
||||
collection_page_map['${collection}:${clean_page_name}'] = dir_path
|
||||
}
|
||||
|
||||
// STEP 1: Strip numeric prefixes from all page references in links FIRST
|
||||
mut lines := result.split('\n')
|
||||
for i, line in lines {
|
||||
if !line.contains('](') {
|
||||
continue
|
||||
}
|
||||
|
||||
mut new_line := line
|
||||
parts := line.split('](')
|
||||
if parts.len < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
for j := 1; j < parts.len; j++ {
|
||||
close_idx := parts[j].index(')') or { continue }
|
||||
link_url := parts[j][..close_idx]
|
||||
|
||||
mut new_url := link_url
|
||||
if link_url.contains('/') {
|
||||
path_part := link_url.all_before_last('/')
|
||||
file_part := link_url.all_after_last('/')
|
||||
new_file := strip_numeric_prefix(file_part)
|
||||
if new_file != file_part {
|
||||
new_url = '${path_part}/${new_file}'
|
||||
}
|
||||
} else {
|
||||
new_url = strip_numeric_prefix(link_url)
|
||||
}
|
||||
|
||||
if new_url != link_url {
|
||||
new_line = new_line.replace('](${link_url})', '](${new_url})')
|
||||
}
|
||||
}
|
||||
lines[i] = new_line
|
||||
}
|
||||
result = lines.join('\n')
|
||||
|
||||
// STEP 2: Replace ../collection/ with ../actual/nested/path/ for cross-collection links
|
||||
for collection, actual_path in collection_paths {
|
||||
result = result.replace('../${collection}/', '../${actual_path}/')
|
||||
}
|
||||
|
||||
// STEP 3: Fix same-collection links: ./page -> correct path based on Docusaurus structure
|
||||
for page_name, target_dir in page_to_path {
|
||||
old_link := './${page_name}'
|
||||
if result.contains(old_link) {
|
||||
new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
result = result.replace(old_link, new_link)
|
||||
}
|
||||
}
|
||||
|
||||
// STEP 4: Convert collection:page format to proper relative paths
|
||||
// Calculate relative path from current page to target page
|
||||
for collection_page, target_dir in collection_page_map {
|
||||
old_pattern := collection_page
|
||||
if result.contains(old_pattern) {
|
||||
// Extract just the page name from "collection:page"
|
||||
page_name := collection_page.all_after(':')
|
||||
new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
result = result.replace(old_pattern, new_link)
|
||||
}
|
||||
}
|
||||
|
||||
// STEP 5: Fix bare page references (from atlas self-contained exports)
|
||||
// Atlas exports convert cross-collection links to simple relative links like "token_system2.md"
|
||||
// We need to transform these to proper relative paths based on Docusaurus structure
|
||||
for page_name, target_dir in page_to_path {
|
||||
// Match links in the format ](page_name) or ](page_name.md)
|
||||
old_link_with_md := '](${page_name}.md)'
|
||||
old_link_without_md := '](${page_name})'
|
||||
|
||||
if result.contains(old_link_with_md) || result.contains(old_link_without_md) {
|
||||
new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
// Replace both .md and non-.md versions
|
||||
result = result.replace(old_link_with_md, '](${new_link})')
|
||||
result = result.replace(old_link_without_md, '](${new_link})')
|
||||
}
|
||||
}
|
||||
|
||||
// STEP 6: Remove .md extensions from all remaining links (Docusaurus doesn't use them in URLs)
|
||||
result = result.replace('.md)', ')')
|
||||
|
||||
// STEP 7: Fix image links to point to img/ subdirectory
|
||||
// Images are copied to img/ subdirectory by copy_images(), so we need to update the links
|
||||
// Transform  to  for local images only
|
||||
mut image_lines := result.split('\n')
|
||||
for i, line in image_lines {
|
||||
// Find image links:  but skip external URLs
|
||||
if line.contains('![') {
|
||||
mut pos := 0
|
||||
for {
|
||||
img_start := line.index_after('![', pos) or { break }
|
||||
alt_end := line.index_after(']', img_start) or { break }
|
||||
if alt_end + 1 >= line.len || line[alt_end + 1] != `(` {
|
||||
pos = alt_end + 1
|
||||
continue
|
||||
}
|
||||
url_start := alt_end + 2
|
||||
url_end := line.index_after(')', url_start) or { break }
|
||||
url := line[url_start..url_end]
|
||||
|
||||
// Skip external URLs and already-prefixed img/ paths
|
||||
if url.starts_with('http://') || url.starts_with('https://')
|
||||
|| url.starts_with('img/') || url.starts_with('./img/') {
|
||||
pos = url_end + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip absolute paths and paths with ../
|
||||
if url.starts_with('/') || url.starts_with('../') {
|
||||
pos = url_end + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// This is a local image reference - add img/ prefix
|
||||
new_url := 'img/${url}'
|
||||
image_lines[i] = line[0..url_start] + new_url + line[url_end..]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
result = image_lines.join('\n')
|
||||
|
||||
return result
|
||||
}
|
||||
442
lib/web/docusaurus/dsite_generate_docs__.v
Normal file
442
lib/web/docusaurus/dsite_generate_docs__.v
Normal file
@@ -0,0 +1,442 @@
|
||||
module docusaurus
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
// import incubaid.herolib.data.atlas.client as atlas_client
|
||||
// import incubaid.herolib.web.site { Page, Section, Site }
|
||||
// import incubaid.herolib.data.markdown.tools as markdowntools
|
||||
// import incubaid.herolib.ui.console
|
||||
|
||||
// struct SiteGenerator {
|
||||
// mut:
|
||||
// siteconfig_name string
|
||||
// path pathlib.Path
|
||||
// client IDocClient
|
||||
// flat bool // if flat then won't use sitenames as subdir's
|
||||
// site Site
|
||||
// errors []string // collect errors here
|
||||
// }
|
||||
|
||||
// // Generate docs from site configuration
|
||||
// pub fn (mut docsite DocSite) generate_docs() ! {
|
||||
// c := config()!
|
||||
|
||||
// // we generate the docs in the build path
|
||||
// docs_path := '${c.path_build.path}/docs'
|
||||
|
||||
// // Create the appropriate client based on configuration
|
||||
// mut client_instance := atlas_client.new(export_dir: c.atlas_dir)!
|
||||
// mut client := IDocClient(client_instance)
|
||||
|
||||
// mut gen := SiteGenerator{
|
||||
// path: pathlib.get_dir(path: docs_path, create: true)!
|
||||
// client: client
|
||||
// flat: true
|
||||
// site: docsite.website
|
||||
// }
|
||||
|
||||
// for section in gen.site.sections {
|
||||
// gen.section_generate(section)!
|
||||
// }
|
||||
|
||||
// for page in gen.site.pages {
|
||||
// gen.page_generate(page)!
|
||||
// }
|
||||
|
||||
// if gen.errors.len > 0 {
|
||||
// println('Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}')
|
||||
// return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n')
|
||||
// }
|
||||
// }
|
||||
|
||||
// fn (mut generator SiteGenerator) error(msg string) ! {
|
||||
// console.print_stderr('Error: ${msg}')
|
||||
// generator.errors << msg
|
||||
// }
|
||||
|
||||
// fn (mut generator SiteGenerator) page_generate(args_ Page) ! {
|
||||
// mut args := args_
|
||||
|
||||
// mut content := ['---']
|
||||
|
||||
// mut parts := args.src.split(':')
|
||||
// if parts.len != 2 {
|
||||
// generator.error("Invalid src format for page '${args.src}', expected format: collection:page_name, TODO: fix in ${args.path}, check the collection & page_name exists in the pagelist")!
|
||||
// return
|
||||
// }
|
||||
// collection_name := parts[0]
|
||||
// page_name := parts[1]
|
||||
|
||||
// mut page_content := generator.client.get_page_content(collection_name, page_name) or {
|
||||
// generator.error("Couldn't find page '${collection_name}:${page_name}' is formatted as collectionname:pagename. TODO: fix in ${args.path}, check the collection & page_name exists in the pagelist. ")!
|
||||
// return
|
||||
// }
|
||||
|
||||
// if args.description.len == 0 {
|
||||
// descnew := markdowntools.extract_title(page_content)
|
||||
// if descnew != '' {
|
||||
// args.description = descnew
|
||||
// } else {
|
||||
// args.description = page_name
|
||||
// }
|
||||
// }
|
||||
|
||||
// if args.title.len == 0 {
|
||||
// descnew := markdowntools.extract_title(page_content)
|
||||
// if descnew != '' {
|
||||
// args.title = descnew
|
||||
// } else {
|
||||
// args.title = page_name
|
||||
// }
|
||||
// }
|
||||
// // Escape single quotes in YAML by doubling them
|
||||
// escaped_title := args.title.replace("'", "''")
|
||||
// content << "title: '${escaped_title}'"
|
||||
|
||||
// if args.description.len > 0 {
|
||||
// escaped_description := args.description.replace("'", "''")
|
||||
// content << "description: '${escaped_description}'"
|
||||
// }
|
||||
|
||||
// if args.slug.len > 0 {
|
||||
// escaped_slug := args.slug.replace("'", "''")
|
||||
// content << "slug: '${escaped_slug}'"
|
||||
// }
|
||||
|
||||
// if args.hide_title {
|
||||
// content << 'hide_title: ${args.hide_title}'
|
||||
// }
|
||||
|
||||
// if args.draft {
|
||||
// content << 'draft: ${args.draft}'
|
||||
// }
|
||||
|
||||
// if args.position > 0 {
|
||||
// content << 'sidebar_position: ${args.position}'
|
||||
// }
|
||||
|
||||
// content << '---'
|
||||
|
||||
// mut c := content.join('\n')
|
||||
|
||||
// if args.title_nr > 0 {
|
||||
// // Set the title number in the page content
|
||||
// page_content = markdowntools.set_titles(page_content, args.title_nr)
|
||||
// }
|
||||
|
||||
// // Fix links to account for nested categories
|
||||
// page_content = generator.fix_links(page_content, args.path)
|
||||
|
||||
// c += '\n${page_content}\n'
|
||||
|
||||
// if args.path.ends_with('/') || args.path.trim_space() == '' {
|
||||
// // means is dir
|
||||
// args.path += page_name
|
||||
// }
|
||||
|
||||
// if !args.path.ends_with('.md') {
|
||||
// args.path += '.md'
|
||||
// }
|
||||
|
||||
// mut pagepath := '${generator.path.path}/${args.path}'
|
||||
// mut pagefile := pathlib.get_file(path: pagepath, create: true)!
|
||||
|
||||
// pagefile.write(c)!
|
||||
|
||||
// generator.client.copy_pages(collection_name, page_name, pagefile.path_dir()) or {
|
||||
// generator.error("Couldn't copy pages for page:'${page_name}' in collection:'${collection_name}'\nERROR:${err}")!
|
||||
// return
|
||||
// }
|
||||
// generator.client.copy_images(collection_name, page_name, pagefile.path_dir()) or {
|
||||
// generator.error("Couldn't copy images for page:'${page_name}' in collection:'${collection_name}'\nERROR:${err}")!
|
||||
// return
|
||||
// }
|
||||
// generator.client.copy_files(collection_name, page_name, pagefile.path_dir()) or {
|
||||
// generator.error("Couldn't copy files for page:'${page_name}' in collection:'${collection_name}'\nERROR:${err}")!
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
|
||||
// fn (mut generator SiteGenerator) section_generate(args_ Section) ! {
|
||||
// mut args := args_
|
||||
|
||||
// mut c := ''
|
||||
// if args.description.len > 0 {
|
||||
// c = '{
|
||||
// "label": "${args.label}",
|
||||
// "position": ${args.position},
|
||||
// "link": {
|
||||
// "type": "generated-index",
|
||||
// "description": "${args.description}"
|
||||
// }
|
||||
// }'
|
||||
// } else {
|
||||
// c = '{
|
||||
// "label": "${args.label}",
|
||||
// "position": ${args.position},
|
||||
// "link": {
|
||||
// "type": "generated-index"
|
||||
// }
|
||||
// }'
|
||||
// }
|
||||
|
||||
// mut category_path := '${generator.path.path}/${args.path}/_category_.json'
|
||||
// mut catfile := pathlib.get_file(path: category_path, create: true)!
|
||||
|
||||
// catfile.write(c)!
|
||||
// }
|
||||
|
||||
// // Strip numeric prefix from filename (e.g., "03_linux_installation" -> "linux_installation")
|
||||
// // Docusaurus automatically strips these prefixes from URLs
|
||||
// fn strip_numeric_prefix(name string) string {
|
||||
// // Match pattern: digits followed by underscore at the start
|
||||
// if name.len > 2 && name[0].is_digit() {
|
||||
// for i := 1; i < name.len; i++ {
|
||||
// if name[i] == `_` {
|
||||
// // Found the underscore, return everything after it
|
||||
// return name[i + 1..]
|
||||
// }
|
||||
// if !name[i].is_digit() {
|
||||
// // Not a numeric prefix pattern, return as-is
|
||||
// return name
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// return name
|
||||
// }
|
||||
|
||||
// // Calculate relative path from current directory to target directory
|
||||
// // current_dir: directory of the current page (e.g., '' for root, 'tokens' for tokens/, 'farming/advanced' for nested)
|
||||
// // target_dir: directory of the target page
|
||||
// // page_name: name of the target page
|
||||
// // Returns: relative path (e.g., './page', '../dir/page', '../../page')
|
||||
// fn calculate_relative_path(current_dir string, target_dir string, page_name string) string {
|
||||
// // Both at root level
|
||||
// if current_dir == '' && target_dir == '' {
|
||||
// return './${page_name}'
|
||||
// }
|
||||
|
||||
// // Current at root, target in subdirectory
|
||||
// if current_dir == '' && target_dir != '' {
|
||||
// return './${target_dir}/${page_name}'
|
||||
// }
|
||||
|
||||
// // Current in subdirectory, target at root
|
||||
// if current_dir != '' && target_dir == '' {
|
||||
// // Count directory levels to go up
|
||||
// levels := current_dir.split('/').len
|
||||
// up := '../'.repeat(levels)
|
||||
// return '${up}${page_name}'
|
||||
// }
|
||||
|
||||
// // Both in subdirectories
|
||||
// current_parts := current_dir.split('/')
|
||||
// target_parts := target_dir.split('/')
|
||||
|
||||
// // Find common prefix
|
||||
// mut common_len := 0
|
||||
// for i := 0; i < current_parts.len && i < target_parts.len; i++ {
|
||||
// if current_parts[i] == target_parts[i] {
|
||||
// common_len++
|
||||
// } else {
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
|
||||
// // Calculate how many levels to go up
|
||||
// up_levels := current_parts.len - common_len
|
||||
// mut path_parts := []string{}
|
||||
|
||||
// // Add ../ for each level up
|
||||
// for _ in 0 .. up_levels {
|
||||
// path_parts << '..'
|
||||
// }
|
||||
|
||||
// // Add remaining target path parts
|
||||
// for i in common_len .. target_parts.len {
|
||||
// path_parts << target_parts[i]
|
||||
// }
|
||||
|
||||
// // Add page name
|
||||
// path_parts << page_name
|
||||
|
||||
// return path_parts.join('/')
|
||||
// }
|
||||
|
||||
// // Fix links to account for nested categories and Docusaurus URL conventions
|
||||
// fn (generator SiteGenerator) fix_links(content string, current_page_path string) string {
|
||||
// mut result := content
|
||||
|
||||
// // Extract current page's directory path
|
||||
// mut current_dir := current_page_path.trim('/')
|
||||
// if current_dir.contains('/') && !current_dir.ends_with('/') {
|
||||
// last_part := current_dir.all_after_last('/')
|
||||
// if last_part.contains('.') {
|
||||
// current_dir = current_dir.all_before_last('/')
|
||||
// }
|
||||
// }
|
||||
// // If path is just a filename or empty, current_dir should be empty (root level)
|
||||
// if !current_dir.contains('/') && current_dir.contains('.') {
|
||||
// current_dir = ''
|
||||
// }
|
||||
|
||||
// // Build maps for link fixing
|
||||
// mut collection_paths := map[string]string{} // collection -> directory path (for nested collections)
|
||||
// mut page_to_path := map[string]string{} // page_name -> full directory path in Docusaurus
|
||||
// mut collection_page_map := map[string]string{} // "collection:page" -> directory path
|
||||
|
||||
// for page in generator.site.pages {
|
||||
// parts := page.src.split(':')
|
||||
// if parts.len != 2 {
|
||||
// continue
|
||||
// }
|
||||
// collection := parts[0]
|
||||
// page_name := parts[1]
|
||||
|
||||
// // Extract directory path from page.path
|
||||
// mut dir_path := page.path.trim('/')
|
||||
// if dir_path.contains('/') && !dir_path.ends_with('/') {
|
||||
// last_part := dir_path.all_after_last('/')
|
||||
// if last_part.contains('.') || last_part == page_name {
|
||||
// dir_path = dir_path.all_before_last('/')
|
||||
// }
|
||||
// }
|
||||
|
||||
// // Store collection -> directory mapping for nested collections
|
||||
// if dir_path != collection && dir_path != '' {
|
||||
// collection_paths[collection] = dir_path
|
||||
// }
|
||||
|
||||
// // Store page_name -> directory path for fixing same-collection links
|
||||
// // Strip numeric prefix from page_name for the map key
|
||||
// clean_page_name := strip_numeric_prefix(page_name)
|
||||
// page_to_path[clean_page_name] = dir_path
|
||||
|
||||
// // Store collection:page -> directory path for fixing collection:page format links
|
||||
// collection_page_map['${collection}:${clean_page_name}'] = dir_path
|
||||
// }
|
||||
|
||||
// // STEP 1: Strip numeric prefixes from all page references in links FIRST
|
||||
// mut lines := result.split('\n')
|
||||
// for i, line in lines {
|
||||
// if !line.contains('](') {
|
||||
// continue
|
||||
// }
|
||||
|
||||
// mut new_line := line
|
||||
// parts := line.split('](')
|
||||
// if parts.len < 2 {
|
||||
// continue
|
||||
// }
|
||||
|
||||
// for j := 1; j < parts.len; j++ {
|
||||
// close_idx := parts[j].index(')') or { continue }
|
||||
// link_url := parts[j][..close_idx]
|
||||
|
||||
// mut new_url := link_url
|
||||
// if link_url.contains('/') {
|
||||
// path_part := link_url.all_before_last('/')
|
||||
// file_part := link_url.all_after_last('/')
|
||||
// new_file := strip_numeric_prefix(file_part)
|
||||
// if new_file != file_part {
|
||||
// new_url = '${path_part}/${new_file}'
|
||||
// }
|
||||
// } else {
|
||||
// new_url = strip_numeric_prefix(link_url)
|
||||
// }
|
||||
|
||||
// if new_url != link_url {
|
||||
// new_line = new_line.replace('](${link_url})', '](${new_url})')
|
||||
// }
|
||||
// }
|
||||
// lines[i] = new_line
|
||||
// }
|
||||
// result = lines.join('\n')
|
||||
|
||||
// // STEP 2: Replace ../collection/ with ../actual/nested/path/ for cross-collection links
|
||||
// for collection, actual_path in collection_paths {
|
||||
// result = result.replace('../${collection}/', '../${actual_path}/')
|
||||
// }
|
||||
|
||||
// // STEP 3: Fix same-collection links: ./page -> correct path based on Docusaurus structure
|
||||
// for page_name, target_dir in page_to_path {
|
||||
// old_link := './${page_name}'
|
||||
// if result.contains(old_link) {
|
||||
// new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
// result = result.replace(old_link, new_link)
|
||||
// }
|
||||
// }
|
||||
|
||||
// // STEP 4: Convert collection:page format to proper relative paths
|
||||
// // Calculate relative path from current page to target page
|
||||
// for collection_page, target_dir in collection_page_map {
|
||||
// old_pattern := collection_page
|
||||
// if result.contains(old_pattern) {
|
||||
// // Extract just the page name from "collection:page"
|
||||
// page_name := collection_page.all_after(':')
|
||||
// new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
// result = result.replace(old_pattern, new_link)
|
||||
// }
|
||||
// }
|
||||
|
||||
// // STEP 5: Fix bare page references (from atlas self-contained exports)
|
||||
// // Atlas exports convert cross-collection links to simple relative links like "token_system2.md"
|
||||
// // We need to transform these to proper relative paths based on Docusaurus structure
|
||||
// for page_name, target_dir in page_to_path {
|
||||
// // Match links in the format ](page_name) or ](page_name.md)
|
||||
// old_link_with_md := '](${page_name}.md)'
|
||||
// old_link_without_md := '](${page_name})'
|
||||
|
||||
// if result.contains(old_link_with_md) || result.contains(old_link_without_md) {
|
||||
// new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
// // Replace both .md and non-.md versions
|
||||
// result = result.replace(old_link_with_md, '](${new_link})')
|
||||
// result = result.replace(old_link_without_md, '](${new_link})')
|
||||
// }
|
||||
// }
|
||||
|
||||
// // STEP 6: Remove .md extensions from all remaining links (Docusaurus doesn't use them in URLs)
|
||||
// result = result.replace('.md)', ')')
|
||||
|
||||
// // STEP 7: Fix image links to point to img/ subdirectory
|
||||
// // Images are copied to img/ subdirectory by copy_images(), so we need to update the links
|
||||
// // Transform  to  for local images only
|
||||
// mut image_lines := result.split('\n')
|
||||
// for i, line in image_lines {
|
||||
// // Find image links:  but skip external URLs
|
||||
// if line.contains('![') {
|
||||
// mut pos := 0
|
||||
// for {
|
||||
// img_start := line.index_after('![', pos) or { break }
|
||||
// alt_end := line.index_after(']', img_start) or { break }
|
||||
// if alt_end + 1 >= line.len || line[alt_end + 1] != `(` {
|
||||
// pos = alt_end + 1
|
||||
// continue
|
||||
// }
|
||||
// url_start := alt_end + 2
|
||||
// url_end := line.index_after(')', url_start) or { break }
|
||||
// url := line[url_start..url_end]
|
||||
|
||||
// // Skip external URLs and already-prefixed img/ paths
|
||||
// if url.starts_with('http://') || url.starts_with('https://')
|
||||
// || url.starts_with('img/') || url.starts_with('./img/') {
|
||||
// pos = url_end + 1
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // Skip absolute paths and paths with ../
|
||||
// if url.starts_with('/') || url.starts_with('../') {
|
||||
// pos = url_end + 1
|
||||
// continue
|
||||
// }
|
||||
|
||||
// // This is a local image reference - add img/ prefix
|
||||
// new_url := 'img/${url}'
|
||||
// image_lines[i] = line[0..url_start] + new_url + line[url_end..]
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// result = image_lines.join('\n')
|
||||
|
||||
// return result
|
||||
// }
|
||||
178
lib/web/docusaurus/dsite_link_docs.v
Normal file
178
lib/web/docusaurus/dsite_link_docs.v
Normal file
@@ -0,0 +1,178 @@
|
||||
module docusaurus
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.atlas.client as atlas_client
|
||||
import incubaid.herolib.data.markdown.tools as markdowntools
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.web.site
|
||||
import os
|
||||
|
||||
// ============================================================================
|
||||
// Doc Linking - Generate Docusaurus docs from Atlas collections
|
||||
// ============================================================================
|
||||
|
||||
// get_first_doc_from_sidebar recursively finds the first doc ID in the sidebar.
|
||||
// Used to determine which page should get slug: / in frontmatter when url_home ends with "/".
|
||||
fn get_first_doc_from_sidebar(items []site.NavItem) string {
|
||||
for item in items {
|
||||
match item {
|
||||
site.NavDoc {
|
||||
return site.extract_page_id(item.id)
|
||||
}
|
||||
site.NavCat {
|
||||
// Recursively search in category items
|
||||
doc := get_first_doc_from_sidebar(item.items)
|
||||
if doc.len > 0 {
|
||||
return doc
|
||||
}
|
||||
}
|
||||
site.NavLink {
|
||||
// Skip links, we want docs
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
// link_docs generates markdown files from site page definitions.
|
||||
// Pages are fetched from Atlas collections and written with frontmatter.
|
||||
pub fn (mut docsite DocSite) link_docs() ! {
|
||||
c := config()!
|
||||
docs_path := '${c.path_build.path}/docs'
|
||||
|
||||
reset_docs_dir(docs_path)!
|
||||
console.print_header('Linking docs to ${docs_path}')
|
||||
|
||||
mut client := atlas_client.new(export_dir: c.atlas_dir)!
|
||||
mut errors := []string{}
|
||||
|
||||
// Determine if we need to set a docs landing page (when url_home ends with "/")
|
||||
first_doc_page := if docsite.website.siteconfig.url_home.ends_with('/') {
|
||||
get_first_doc_from_sidebar(docsite.website.nav.my_sidebar)
|
||||
} else {
|
||||
''
|
||||
}
|
||||
|
||||
for _, page in docsite.website.pages {
|
||||
process_page(mut client, docs_path, page, first_doc_page, mut errors)
|
||||
}
|
||||
|
||||
if errors.len > 0 {
|
||||
report_errors(mut client, errors)!
|
||||
}
|
||||
|
||||
console.print_green('Successfully linked ${docsite.website.pages.len} pages to docs folder')
|
||||
}
|
||||
|
||||
fn reset_docs_dir(docs_path string) ! {
|
||||
if os.exists(docs_path) {
|
||||
os.rmdir_all(docs_path) or {}
|
||||
}
|
||||
os.mkdir_all(docs_path)!
|
||||
}
|
||||
|
||||
fn report_errors(mut client atlas_client.AtlasClient, errors []string) ! {
|
||||
available := client.list_markdown() or { 'Could not list available pages' }
|
||||
console.print_stderr('Available pages:\n${available}')
|
||||
return error('Errors during doc generation:\n${errors.join('\n\n')}')
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Page Processing
|
||||
// ============================================================================
|
||||
|
||||
fn process_page(mut client atlas_client.AtlasClient, docs_path string, page site.Page, first_doc_page string, mut errors []string) {
|
||||
collection, page_name := parse_page_src(page.src) or {
|
||||
errors << err.msg()
|
||||
return
|
||||
}
|
||||
|
||||
content := client.get_page_content(collection, page_name) or {
|
||||
errors << "Page not found: '${collection}:${page_name}'"
|
||||
return
|
||||
}
|
||||
|
||||
// Check if this page is the docs landing page
|
||||
is_landing_page := first_doc_page.len > 0 && page_name == first_doc_page
|
||||
|
||||
write_page(docs_path, page_name, page, content, is_landing_page) or {
|
||||
errors << "Failed to write page '${page_name}': ${err.msg()}"
|
||||
return
|
||||
}
|
||||
|
||||
copy_page_assets(mut client, docs_path, collection, page_name)
|
||||
console.print_item('Generated: ${page_name}.md')
|
||||
}
|
||||
|
||||
fn parse_page_src(src string) !(string, string) {
|
||||
parts := src.split(':')
|
||||
if parts.len != 2 {
|
||||
return error("Invalid src format '${src}' - expected 'collection:page_name'")
|
||||
}
|
||||
return parts[0], parts[1]
|
||||
}
|
||||
|
||||
fn write_page(docs_path string, page_name string, page site.Page, content string, is_landing_page bool) ! {
|
||||
frontmatter := build_frontmatter(page, content, is_landing_page)
|
||||
final_content := frontmatter + '\n\n' + content
|
||||
|
||||
output_path := '${docs_path}/${page_name}.md'
|
||||
mut file := pathlib.get_file(path: output_path, create: true)!
|
||||
file.write(final_content)!
|
||||
}
|
||||
|
||||
fn copy_page_assets(mut client atlas_client.AtlasClient, docs_path string, collection string, page_name string) {
|
||||
client.copy_images(collection, page_name, docs_path) or {}
|
||||
client.copy_files(collection, page_name, docs_path) or {}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Frontmatter Generation
|
||||
// ============================================================================
|
||||
|
||||
fn build_frontmatter(page site.Page, content string, is_landing_page bool) string {
|
||||
title := get_title(page, content)
|
||||
description := get_description(page, title)
|
||||
|
||||
mut lines := ['---']
|
||||
lines << "title: '${escape_yaml(title)}'"
|
||||
lines << "description: '${escape_yaml(description)}'"
|
||||
|
||||
// Add slug: / for the docs landing page so /docs/ works directly
|
||||
if is_landing_page {
|
||||
lines << 'slug: /'
|
||||
}
|
||||
|
||||
if page.draft {
|
||||
lines << 'draft: true'
|
||||
}
|
||||
if page.hide_title {
|
||||
lines << 'hide_title: true'
|
||||
}
|
||||
|
||||
lines << '---'
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
fn get_title(page site.Page, content string) string {
|
||||
if page.title.len > 0 {
|
||||
return page.title
|
||||
}
|
||||
extracted := markdowntools.extract_title(content)
|
||||
if extracted.len > 0 {
|
||||
return extracted
|
||||
}
|
||||
return page.src.split(':').last()
|
||||
}
|
||||
|
||||
fn get_description(page site.Page, title string) string {
|
||||
if page.description.len > 0 {
|
||||
return page.description
|
||||
}
|
||||
return title
|
||||
}
|
||||
|
||||
fn escape_yaml(s string) string {
|
||||
return s.replace("'", "''")
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
module docusaurus
|
||||
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
// // Store the Docusaurus site structure in Redis for link processing
|
||||
// // This maps collection:page to their actual Docusaurus paths
|
||||
// pub fn (mut docsite DocSite) store_site_structure() ! {
|
||||
// mut context := base.context()!
|
||||
// mut redis := context.redis()!
|
||||
|
||||
// // Store mapping of collection:page to docusaurus path (without .md extension)
|
||||
// for page in docsite.website.pages {
|
||||
// parts := page.src.split(':')
|
||||
// if parts.len != 2 {
|
||||
// continue
|
||||
// }
|
||||
// collection_name := texttools.name_fix(parts[0])
|
||||
// page_name := texttools.name_fix(parts[1])
|
||||
|
||||
// // Calculate the docusaurus path (without .md extension for URLs)
|
||||
// mut doc_path := page.path
|
||||
|
||||
// // Handle empty or root path
|
||||
// if doc_path.trim_space() == '' || doc_path == '/' {
|
||||
// doc_path = page_name
|
||||
// } else if doc_path.ends_with('/') {
|
||||
// doc_path += page_name
|
||||
// }
|
||||
|
||||
// // Remove .md extension if present for URL paths
|
||||
// if doc_path.ends_with('.md') {
|
||||
// doc_path = doc_path[..doc_path.len - 3]
|
||||
// }
|
||||
|
||||
// // Store in Redis with key format: collection:page.md
|
||||
// key := '${collection_name}:${page_name}.md'
|
||||
// redis.hset('doctree_docusaurus_paths', key, doc_path)!
|
||||
// }
|
||||
// }
|
||||
@@ -15,7 +15,7 @@ pub fn dsite_define(sitename string) ! {
|
||||
console.print_header('Add Docusaurus Site: ${sitename}')
|
||||
mut c := config()!
|
||||
|
||||
path_publish := '${c.path_publish.path}/${sitename}'
|
||||
_ := '${c.path_publish.path}/${sitename}'
|
||||
path_build_ := '${c.path_build.path}'
|
||||
|
||||
// Get the site object after processing, this is the website which is a generic definition of a site
|
||||
@@ -26,7 +26,7 @@ pub fn dsite_define(sitename string) ! {
|
||||
name: sitename
|
||||
path_publish: pathlib.get_dir(path: '${path_build_}/build', create: true)!
|
||||
path_build: pathlib.get_dir(path: path_build_, create: true)!
|
||||
config: new_configuration(website.siteconfig)!
|
||||
config: new_configuration(website)!
|
||||
website: website
|
||||
}
|
||||
|
||||
|
||||
82
lib/web/docusaurus/for_testing/README.md
Normal file
82
lib/web/docusaurus/for_testing/README.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Docusaurus Link Resolution Test
|
||||
|
||||
This directory contains a comprehensive test for the herolib documentation linking mechanism.
|
||||
|
||||
## Structure
|
||||
|
||||
```
|
||||
for_testing/
|
||||
├── README.md # This file
|
||||
├── collections/
|
||||
│ └── test_collection/ # Markdown source files
|
||||
│ ├── .collection # Collection metadata
|
||||
│ ├── page1.md # Introduction
|
||||
│ ├── page2.md # Basic Concepts
|
||||
│ ├── page3.md # Configuration
|
||||
│ ├── page4.md # Advanced Features
|
||||
│ ├── page5.md # Troubleshooting
|
||||
│ ├── page6.md # Best Practices
|
||||
│ └── page7.md # Conclusion
|
||||
└── ebooks/
|
||||
└── test_site/ # Heroscript configuration
|
||||
├── heroscriptall # Master configuration (entry point)
|
||||
├── config.heroscript # Site configuration
|
||||
├── pages.heroscript # Page definitions
|
||||
└── docusaurus.heroscript # Docusaurus settings
|
||||
```
|
||||
|
||||
## What This Tests
|
||||
|
||||
1. **Link Resolution** - Each page contains links using the `[text](collection:page)` format
|
||||
2. **Navigation Chain** - Pages link sequentially: 1 → 2 → 3 → 4 → 5 → 6 → 7
|
||||
3. **Sidebar Generation** - All 7 pages should appear in the sidebar
|
||||
4. **Category Support** - Pages are organized into categories (root, basics, advanced, reference)
|
||||
|
||||
## Running the Test
|
||||
|
||||
From the herolib root directory:
|
||||
|
||||
```bash
|
||||
# Build herolib first
|
||||
./cli/compile.vsh
|
||||
|
||||
# Run the test site
|
||||
/Users/mahmoud/hero/bin/hero docs -d -p lib/web/docusaurus/for_testing/ebooks/test_site
|
||||
```
|
||||
|
||||
## Expected Results
|
||||
|
||||
When the test runs successfully:
|
||||
|
||||
1. ✅ All 7 pages are generated in `~/hero/var/docusaurus/build/docs/`
|
||||
2. ✅ Sidebar shows all pages organized by category
|
||||
3. ✅ Clicking navigation links works (page1 → page2 → ... → page7)
|
||||
4. ✅ No broken links or 404 errors
|
||||
5. ✅ Back-links also work (e.g., page7 → page1)
|
||||
|
||||
## Link Syntax Being Tested
|
||||
|
||||
```markdown
|
||||
[Next Page](test_collection:page2)
|
||||
```
|
||||
|
||||
This should resolve to a proper Docusaurus link when the site is built.
|
||||
|
||||
## Verification
|
||||
|
||||
After running the test:
|
||||
|
||||
1. Open http://localhost:3000/test/ in your browser
|
||||
2. Click through all navigation links from Page 1 to Page 7
|
||||
3. Verify the back-link on Page 7 returns to Page 1
|
||||
4. Check the sidebar displays all pages correctly
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If links don't resolve:
|
||||
|
||||
1. Check that the collection is registered in the atlas
|
||||
2. Verify page names match (no typos)
|
||||
3. Run with debug flag (`-d`) to see detailed output
|
||||
4. Check `~/hero/var/docusaurus/build/docs/` for generated files
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
name: test_collection
|
||||
description: Test collection for link resolution testing
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
# Page 1: Introduction
|
||||
|
||||
Welcome to the documentation linking test. This page serves as the entry point for testing herolib's link resolution mechanism.
|
||||
|
||||
## Overview
|
||||
|
||||
This test suite consists of 7 interconnected pages that form a chain. Each page links to the next, demonstrating that the `collection:page_name` link syntax works correctly across multiple layers.
|
||||
|
||||
## What We're Testing
|
||||
|
||||
- Link resolution using `collection:page_name` format
|
||||
- Proper generation of Docusaurus-compatible links
|
||||
- Navigation chain integrity from page 1 through page 7
|
||||
- Sidebar generation with all pages
|
||||
|
||||
## Navigation
|
||||
|
||||
Continue to the next section to learn about the basic concepts.
|
||||
|
||||
**Next:** [Page 2: Basic Concepts](test_collection:page2)
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
# Page 2: Basic Concepts
|
||||
|
||||
This page covers the basic concepts of the documentation system.
|
||||
|
||||
## Link Syntax
|
||||
|
||||
In herolib, links between pages use the format:
|
||||
|
||||
```
|
||||
[Link Text](collection_name:page_name)
|
||||
```
|
||||
|
||||
For example, to link to `page3` in `test_collection`:
|
||||
|
||||
```markdown
|
||||
[Go to Page 3](test_collection:page3)
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. The parser identifies links with the `collection:page` format
|
||||
2. During site generation, these are resolved to actual file paths
|
||||
3. Docusaurus receives properly formatted relative links
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 1: Introduction](test_collection:page1)
|
||||
|
||||
**Next:** [Page 3: Configuration](test_collection:page3)
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
# Page 3: Configuration
|
||||
|
||||
This page explains configuration options for the documentation system.
|
||||
|
||||
## Site Configuration
|
||||
|
||||
The site is configured using heroscript files:
|
||||
|
||||
```heroscript
|
||||
!!site.config
|
||||
name:"test_site"
|
||||
title:"Test Documentation"
|
||||
base_url:"/test/"
|
||||
url_home:"docs/page1"
|
||||
```
|
||||
|
||||
## Page Definitions
|
||||
|
||||
Each page is defined using the `!!site.page` action:
|
||||
|
||||
```heroscript
|
||||
!!site.page src:"test_collection:page1"
|
||||
title:"Introduction"
|
||||
```
|
||||
|
||||
## Important Settings
|
||||
|
||||
| Setting | Description |
|
||||
|---------|-------------|
|
||||
| `name` | Unique page identifier |
|
||||
| `collection` | Source collection name |
|
||||
| `title` | Display title in sidebar |
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 2: Basic Concepts](test_collection:page2)
|
||||
|
||||
**Next:** [Page 4: Advanced Features](test_collection:page4)
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
# Page 4: Advanced Features
|
||||
|
||||
This page covers advanced features of the linking mechanism.
|
||||
|
||||
## Cross-Collection Links
|
||||
|
||||
You can link to pages in different collections:
|
||||
|
||||
```markdown
|
||||
[Link to other collection](other_collection:some_page)
|
||||
```
|
||||
|
||||
## Categories
|
||||
|
||||
Pages can be organized into categories:
|
||||
|
||||
```heroscript
|
||||
!!site.page_category name:'advanced' label:"Advanced Topics"
|
||||
|
||||
!!site.page name:'page4' collection:'test_collection'
|
||||
title:"Advanced Features"
|
||||
```
|
||||
|
||||
## Multiple Link Formats
|
||||
|
||||
The system supports various link formats:
|
||||
|
||||
1. **Collection links:** `[text](collection:page)`
|
||||
2. **Relative links:** `[text](./other_page.md)`
|
||||
3. **External links:** `[text](https://example.com)`
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 3: Configuration](test_collection:page3)
|
||||
|
||||
**Next:** [Page 5: Troubleshooting](test_collection:page5)
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
# Page 5: Troubleshooting
|
||||
|
||||
This page helps you troubleshoot common issues.
|
||||
|
||||
## Common Issues
|
||||
|
||||
### Broken Links
|
||||
|
||||
If links appear broken, check:
|
||||
|
||||
1. The collection name is correct
|
||||
2. The page name matches the markdown filename (without `.md`)
|
||||
3. The collection is properly registered in the atlas
|
||||
|
||||
### Page Not Found
|
||||
|
||||
Ensure the page is defined in your heroscript:
|
||||
|
||||
```heroscript
|
||||
!!site.page name:'page5' collection:'test_collection'
|
||||
title:"Troubleshooting"
|
||||
```
|
||||
|
||||
## Debugging Tips
|
||||
|
||||
- Run with debug flag: `hero docs -d -p .`
|
||||
- Check the generated `sidebar.json`
|
||||
- Verify the docs output in `~/hero/var/docusaurus/build/docs/`
|
||||
|
||||
## Error Messages
|
||||
|
||||
| Error | Solution |
|
||||
|-------|----------|
|
||||
| "Page not found" | Check page name spelling |
|
||||
| "Collection not found" | Verify atlas configuration |
|
||||
| "Link resolution failed" | Check link syntax |
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 4: Advanced Features](test_collection:page4)
|
||||
|
||||
**Next:** [Page 6: Best Practices](test_collection:page6)
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
# Page 6: Best Practices
|
||||
|
||||
This page outlines best practices for documentation.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Use lowercase for page names: `page_name.md`
|
||||
- Use underscores for multi-word names: `my_long_page_name.md`
|
||||
- Keep names short but descriptive
|
||||
|
||||
## Link Organization
|
||||
|
||||
### Do This ✓
|
||||
|
||||
```markdown
|
||||
See the [configuration guide](test_collection:page3) for details.
|
||||
```
|
||||
|
||||
### Avoid This ✗
|
||||
|
||||
```markdown
|
||||
Click [here](test_collection:page3) for more.
|
||||
```
|
||||
|
||||
## Documentation Structure
|
||||
|
||||
A well-organized documentation site should:
|
||||
|
||||
1. **Start with an introduction** - Explain what the documentation covers
|
||||
2. **Progress logically** - Each page builds on the previous
|
||||
3. **End with reference material** - API docs, troubleshooting, etc.
|
||||
|
||||
## Content Guidelines
|
||||
|
||||
- Keep paragraphs short
|
||||
- Use code blocks for examples
|
||||
- Include navigation links at the bottom of each page
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 5: Troubleshooting](test_collection:page5)
|
||||
|
||||
**Next:** [Page 7: Conclusion](test_collection:page7)
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
# Page 7: Conclusion
|
||||
|
||||
Congratulations! You've reached the final page of the documentation linking test.
|
||||
|
||||
## Summary
|
||||
|
||||
This test suite demonstrated:
|
||||
|
||||
- ✅ Link resolution using `collection:page_name` format
|
||||
- ✅ Navigation chain across 7 pages
|
||||
- ✅ Proper sidebar generation
|
||||
- ✅ Docusaurus-compatible output
|
||||
|
||||
## Test Verification
|
||||
|
||||
If you've reached this page by clicking through all the navigation links, the linking mechanism is working correctly!
|
||||
|
||||
### Link Chain Verified
|
||||
|
||||
1. [Page 1: Introduction](test_collection:page1) → Entry point
|
||||
2. [Page 2: Basic Concepts](test_collection:page2) → Link syntax
|
||||
3. [Page 3: Configuration](test_collection:page3) → Site setup
|
||||
4. [Page 4: Advanced Features](test_collection:page4) → Cross-collection links
|
||||
5. [Page 5: Troubleshooting](test_collection:page5) → Common issues
|
||||
6. [Page 6: Best Practices](test_collection:page6) → Guidelines
|
||||
7. **Page 7: Conclusion** → You are here!
|
||||
|
||||
## What's Next
|
||||
|
||||
You can now use the herolib documentation system with confidence that links will resolve correctly across your entire documentation site.
|
||||
|
||||
## Navigation
|
||||
|
||||
**Previous:** [Page 6: Best Practices](test_collection:page6)
|
||||
|
||||
**Back to Start:** [Page 1: Introduction](test_collection:page1)
|
||||
|
||||
16
lib/web/docusaurus/for_testing/ebooks/test_site/config.hero
Normal file
16
lib/web/docusaurus/for_testing/ebooks/test_site/config.hero
Normal file
@@ -0,0 +1,16 @@
|
||||
!!site.config
|
||||
name:"test_site"
|
||||
title:"Link Resolution Test"
|
||||
tagline:"Testing herolib documentation linking mechanism"
|
||||
url:"http://localhost:3000"
|
||||
url_home:"docs/"
|
||||
base_url:"/test/"
|
||||
favicon:"img/favicon.png"
|
||||
copyright:"© 2024 Herolib Test"
|
||||
default_collection:"test_collection"
|
||||
|
||||
!!site.config_meta
|
||||
description:"Test suite for verifying herolib documentation link resolution across multiple pages"
|
||||
title:"Link Resolution Test - Herolib"
|
||||
keywords:"herolib, docusaurus, testing, links, documentation"
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
!!docusaurus.define name:'test_site'
|
||||
|
||||
!!atlas.export include:true
|
||||
|
||||
33
lib/web/docusaurus/for_testing/ebooks/test_site/menus.hero
Normal file
33
lib/web/docusaurus/for_testing/ebooks/test_site/menus.hero
Normal file
@@ -0,0 +1,33 @@
|
||||
// Navbar configuration
|
||||
!!site.navbar
|
||||
title:"Link Test"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"Documentation"
|
||||
to:"docs/"
|
||||
position:"left"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"GitHub"
|
||||
href:"https://github.com/incubaid/herolib"
|
||||
position:"right"
|
||||
|
||||
// Footer configuration
|
||||
!!site.footer
|
||||
style:"dark"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Docs"
|
||||
label:"Introduction"
|
||||
to:"docs/"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Docs"
|
||||
label:"Configuration"
|
||||
to:"docs/page3"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Community"
|
||||
label:"GitHub"
|
||||
href:"https://github.com/incubaid/herolib"
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
// Page Definitions for Link Resolution Test
|
||||
// Each page maps to a markdown file in the test_collection
|
||||
|
||||
// Root pages (no category)
|
||||
!!site.page src:"test_collection:page1"
|
||||
title:"Introduction"
|
||||
|
||||
!!site.page src:"page2"
|
||||
title:"Basic Concepts"
|
||||
|
||||
// Basics category
|
||||
!!site.page_category name:'basics' label:"Getting Started"
|
||||
|
||||
!!site.page src:"page3"
|
||||
title:"Configuration"
|
||||
|
||||
!!site.page src:"page4"
|
||||
title:"Advanced Features"
|
||||
|
||||
// Advanced category
|
||||
!!site.page_category name:'advanced' label:"Advanced Topics"
|
||||
|
||||
!!site.page src:"page5"
|
||||
title:"Troubleshooting"
|
||||
|
||||
!!site.page src:"page6"
|
||||
title:"Best Practices"
|
||||
|
||||
// Reference category
|
||||
!!site.page_category name:'reference' label:"Reference"
|
||||
|
||||
!!site.page src:"page7"
|
||||
title:"Conclusion"
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
!!atlas.scan path:"../../collections/test_collection"
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
module docusaurus
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.base // For context and Redis, if test needs to manage it
|
||||
import time
|
||||
|
||||
const test_heroscript_content = '!!site.config\n name:"Kristof"\n title:"Internet Geek"\n tagline:"Internet Geek"\n url:"https://friends.threefold.info"\n url_home:"docs/"\n base_url:"/kristof/"\n favicon:"img/favicon.png"\n image:"img/tf_graph.png"\n copyright:"Kristof"\n\n!!site.config_meta\n description:"ThreeFold is laying the foundation for a geo aware Web 4, the next generation of the Internet."\n image:"https://threefold.info/kristof/img/tf_graph.png"\n title:"ThreeFold Technology Vision"\n\n!!site.build_dest\n ssh_name:"production"\n path:"/root/hero/www/info/kristof"\n\n!!site.navbar\n title:"Kristof = Chief Executive Geek"\n logo_alt:"Kristof Logo"\n logo_src:"img/logo.svg"\n logo_src_dark:"img/logo.svg"\n\n!!site.navbar_item\n label:"ThreeFold Technology"\n href:"https://threefold.info/kristof/"\n position:"right"\n\n!!site.navbar_item\n label:"ThreeFold.io"\n href:"https://threefold.io"\n position:"right"\n\n!!site.footer\n style:"dark"\n\n!!site.footer_item\n title:"Docs"\n label:"Introduction"\n href:"/docs"\n\n!!site.footer_item\n title:"Docs"\n label:"TFGrid V4 Docs"\n href:"https://docs.threefold.io/"\n\n!!site.footer_item\n title:"Community"\n label:"Telegram"\n href:"https://t.me/threefold"\n\n!!site.footer_item\n title:"Community"\n label:"X"\n href:"https://x.com/threefold_io"\n\n!!site.footer_item\n title:"Links"\n label:"ThreeFold.io"\n href:"https://threefold.io"\n'
|
||||
|
||||
fn test_load_configuration_from_heroscript() ! {
|
||||
// Ensure context is initialized for Redis connection if siteconfig.new() needs it implicitly
|
||||
base.context()!
|
||||
|
||||
temp_cfg_dir := os.join_path(os.temp_dir(), 'test_docusaurus_cfg_${time.ticks()}')
|
||||
os.mkdir_all(temp_cfg_dir)!
|
||||
defer {
|
||||
os.rmdir_all(temp_cfg_dir) or { eprintln('Error removing temp dir.') }
|
||||
}
|
||||
|
||||
heroscript_path := os.join_path(temp_cfg_dir, 'config.heroscript')
|
||||
os.write_file(heroscript_path, test_heroscript_content)!
|
||||
|
||||
config := load_configuration(temp_cfg_dir)!
|
||||
|
||||
// Main assertions
|
||||
assert config.main.name == 'kristof' // texttools.name_fix converts to lowercase
|
||||
assert config.main.title == 'Internet Geek'
|
||||
assert config.main.tagline == 'Internet Geek'
|
||||
assert config.main.url == 'https://friends.threefold.info'
|
||||
assert config.main.url_home == 'docs/'
|
||||
assert config.main.base_url == '/kristof/'
|
||||
assert config.main.favicon == 'img/favicon.png'
|
||||
assert config.main.image == 'img/tf_graph.png'
|
||||
assert config.main.copyright == 'Kristof'
|
||||
|
||||
// Metadata assertions
|
||||
assert config.main.metadata.title == 'ThreeFold Technology Vision'
|
||||
assert config.main.metadata.description == 'ThreeFold is laying the foundation for a geo aware Web 4, the next generation of the Internet.'
|
||||
assert config.main.metadata.image == 'https://threefold.info/kristof/img/tf_graph.png'
|
||||
|
||||
// Build Dest assertions
|
||||
assert config.main.build_dest.len == 1
|
||||
assert config.main.build_dest[0] == '/root/hero/www/info/kristof'
|
||||
|
||||
// Navbar assertions
|
||||
assert config.navbar.title == 'Kristof = Chief Executive Geek'
|
||||
assert config.navbar.logo.alt == 'Kristof Logo'
|
||||
assert config.navbar.logo.src == 'img/logo.svg'
|
||||
assert config.navbar.logo.src_dark == 'img/logo.svg'
|
||||
assert config.navbar.items.len == 2
|
||||
assert config.navbar.items[0].label == 'ThreeFold Technology'
|
||||
assert config.navbar.items[0].href == 'https://threefold.info/kristof/'
|
||||
assert config.navbar.items[0].position == 'right'
|
||||
assert config.navbar.items[1].label == 'ThreeFold.io'
|
||||
assert config.navbar.items[1].href == 'https://threefold.io'
|
||||
assert config.navbar.items[1].position == 'right'
|
||||
|
||||
// Footer assertions
|
||||
assert config.footer.style == 'dark'
|
||||
assert config.footer.links.len == 3 // 'Docs', 'Community', 'Links'
|
||||
|
||||
// Check 'Docs' footer links
|
||||
mut docs_link_found := false
|
||||
for link in config.footer.links {
|
||||
if link.title == 'Docs' {
|
||||
docs_link_found = true
|
||||
assert link.items.len == 2
|
||||
assert link.items[0].label == 'Introduction'
|
||||
assert link.items[0].href == '/docs'
|
||||
assert link.items[1].label == 'TFGrid V4 Docs'
|
||||
assert link.items[1].href == 'https://docs.threefold.io/'
|
||||
break
|
||||
}
|
||||
}
|
||||
assert docs_link_found
|
||||
|
||||
// Check 'Community' footer links
|
||||
mut community_link_found := false
|
||||
for link in config.footer.links {
|
||||
if link.title == 'Community' {
|
||||
community_link_found = true
|
||||
assert link.items.len == 2
|
||||
assert link.items[0].label == 'Telegram'
|
||||
assert link.items[0].href == 'https://t.me/threefold'
|
||||
assert link.items[1].label == 'X'
|
||||
assert link.items[1].href == 'https://x.com/threefold_io'
|
||||
break
|
||||
}
|
||||
}
|
||||
assert community_link_found
|
||||
|
||||
// Check 'Links' footer links
|
||||
mut links_link_found := false
|
||||
for link in config.footer.links {
|
||||
if link.title == 'Links' {
|
||||
links_link_found = true
|
||||
assert link.items.len == 1
|
||||
assert link.items[0].label == 'ThreeFold.io'
|
||||
assert link.items[0].href == 'https://threefold.io'
|
||||
break
|
||||
}
|
||||
}
|
||||
assert links_link_found
|
||||
|
||||
println('test_load_configuration_from_heroscript passed successfully.')
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
module docusaurus
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
@@ -8,62 +10,78 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
return
|
||||
}
|
||||
|
||||
// there should be 1 define section
|
||||
mut action_define := plbook.ensure_once(filter: 'docusaurus.define')!
|
||||
mut param_define := action_define.params
|
||||
|
||||
config_set(
|
||||
path_build: param_define.get_default('path_build', '')!
|
||||
path_publish: param_define.get_default('path_publish', '')!
|
||||
reset: param_define.get_default_false('reset')
|
||||
template_update: param_define.get_default_false('template_update')
|
||||
install: param_define.get_default_false('install')
|
||||
atlas_dir: param_define.get_default('atlas_dir', '${os.home_dir()}/hero/var/atlas_export')!
|
||||
use_atlas: param_define.get_default_false('use_atlas')
|
||||
)!
|
||||
|
||||
site_name := param_define.get('name') or {
|
||||
return error('In docusaurus.define, param "name" is required.')
|
||||
}
|
||||
|
||||
dsite_define(site_name)!
|
||||
|
||||
action_define.done = true
|
||||
mut dsite := dsite_get(site_name)!
|
||||
|
||||
mut dsite := process_define(mut plbook)!
|
||||
dsite.generate()!
|
||||
|
||||
mut actions_build := plbook.find(filter: 'docusaurus.build')!
|
||||
if actions_build.len > 1 {
|
||||
return error('Multiple "docusaurus.build" actions found. Only one is allowed.')
|
||||
}
|
||||
for mut action in actions_build {
|
||||
dsite.build()!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
mut actions_export := plbook.find(filter: 'docusaurus.publish')!
|
||||
if actions_export.len > 1 {
|
||||
return error('Multiple "docusaurus.publish" actions found. Only one is allowed.')
|
||||
}
|
||||
for mut action in actions_export {
|
||||
dsite.build_publish()!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
mut actions_dev := plbook.find(filter: 'docusaurus.dev')!
|
||||
if actions_dev.len > 1 {
|
||||
return error('Multiple "docusaurus.dev" actions found. Only one is allowed.')
|
||||
}
|
||||
for mut action in actions_dev {
|
||||
mut p := action.params
|
||||
dsite.dev(
|
||||
host: p.get_default('host', 'localhost')!
|
||||
port: p.get_int_default('port', 3000)!
|
||||
open: p.get_default_false('open')
|
||||
)!
|
||||
action.done = true
|
||||
}
|
||||
process_build(mut plbook, mut dsite)!
|
||||
process_publish(mut plbook, mut dsite)!
|
||||
process_dev(mut plbook, mut dsite)!
|
||||
|
||||
plbook.ensure_processed(filter: 'docusaurus.')!
|
||||
}
|
||||
|
||||
fn process_define(mut plbook PlayBook) !&DocSite {
|
||||
mut action := plbook.ensure_once(filter: 'docusaurus.define')!
|
||||
p := action.params
|
||||
|
||||
atlas_dir := p.get_default('atlas_dir', '${os.home_dir()}/hero/var/atlas_export')!
|
||||
|
||||
config_set(
|
||||
path_build: p.get_default('path_build', '')!
|
||||
path_publish: p.get_default('path_publish', '')!
|
||||
reset: p.get_default_false('reset')
|
||||
template_update: p.get_default_false('template_update')
|
||||
install: p.get_default_false('install')
|
||||
atlas_dir: atlas_dir
|
||||
)!
|
||||
|
||||
site_name := p.get('name') or { return error('docusaurus.define: "name" is required') }
|
||||
atlas_name := p.get_default('atlas', 'main')!
|
||||
|
||||
export_atlas(atlas_name, atlas_dir)!
|
||||
dsite_define(site_name)!
|
||||
action.done = true
|
||||
|
||||
return dsite_get(site_name)!
|
||||
}
|
||||
|
||||
fn process_build(mut plbook PlayBook, mut dsite DocSite) ! {
|
||||
if !plbook.max_once(filter: 'docusaurus.build')! {
|
||||
return
|
||||
}
|
||||
mut action := plbook.get(filter: 'docusaurus.build')!
|
||||
dsite.build()!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
fn process_publish(mut plbook PlayBook, mut dsite DocSite) ! {
|
||||
if !plbook.max_once(filter: 'docusaurus.publish')! {
|
||||
return
|
||||
}
|
||||
mut action := plbook.get(filter: 'docusaurus.publish')!
|
||||
dsite.build_publish()!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
fn process_dev(mut plbook PlayBook, mut dsite DocSite) ! {
|
||||
if !plbook.max_once(filter: 'docusaurus.dev')! {
|
||||
return
|
||||
}
|
||||
mut action := plbook.get(filter: 'docusaurus.dev')!
|
||||
p := action.params
|
||||
dsite.dev(
|
||||
host: p.get_default('host', 'localhost')!
|
||||
port: p.get_int_default('port', 3000)!
|
||||
open: p.get_default_false('open')
|
||||
)!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
fn export_atlas(name string, dir string) ! {
|
||||
if !atlas.exists(name) {
|
||||
return
|
||||
}
|
||||
console.print_debug('Auto-exporting Atlas "${name}" to ${dir}')
|
||||
mut a := atlas.get(name)!
|
||||
a.export(destination: dir, reset: true, include: true, redis: false)!
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ module site
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
__global (
|
||||
websites map[string]&Site
|
||||
sites_global map[string]&Site
|
||||
)
|
||||
|
||||
@[params]
|
||||
@@ -15,28 +15,38 @@ pub mut:
|
||||
pub fn new(args FactoryArgs) !&Site {
|
||||
name := texttools.name_fix(args.name)
|
||||
|
||||
websites[name] = &Site{
|
||||
// Check if a site with this name already exists
|
||||
if name in sites_global {
|
||||
// Return the existing site instead of creating a new one
|
||||
return get(name: name)!
|
||||
}
|
||||
|
||||
mut site := Site{
|
||||
nav: SideBar{}
|
||||
siteconfig: SiteConfig{
|
||||
name: name
|
||||
}
|
||||
}
|
||||
sites_global[name] = &site
|
||||
return get(name: name)!
|
||||
}
|
||||
|
||||
pub fn get(args FactoryArgs) !&Site {
|
||||
name := texttools.name_fix(args.name)
|
||||
mut sc := websites[name] or { return error('siteconfig with name "${name}" does not exist') }
|
||||
return sc
|
||||
// mut sc := sites_global[name] or { return error('siteconfig with name "${name}" does not exist') }
|
||||
return sites_global[name] or {
|
||||
print_backtrace()
|
||||
return error('could not get site with name:${name}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exists(args FactoryArgs) bool {
|
||||
name := texttools.name_fix(args.name)
|
||||
mut sc := websites[name] or { return false }
|
||||
return true
|
||||
return name in sites_global
|
||||
}
|
||||
|
||||
pub fn default() !&Site {
|
||||
if websites.len == 0 {
|
||||
if sites_global.len == 0 {
|
||||
return new(name: 'default')!
|
||||
}
|
||||
return get()!
|
||||
@@ -44,5 +54,5 @@ pub fn default() !&Site {
|
||||
|
||||
// list returns all site names that have been created
|
||||
pub fn list() []string {
|
||||
return websites.keys()
|
||||
return sites_global.keys()
|
||||
}
|
||||
|
||||
104
lib/web/site/model_sidebar.v
Normal file
104
lib/web/site/model_sidebar.v
Normal file
@@ -0,0 +1,104 @@
|
||||
module site
|
||||
|
||||
import json
|
||||
|
||||
// ============================================================================
|
||||
// Sidebar Navigation Models (Domain Types)
|
||||
// ============================================================================
|
||||
|
||||
pub struct SideBar {
|
||||
pub mut:
|
||||
my_sidebar []NavItem
|
||||
}
|
||||
|
||||
pub type NavItem = NavDoc | NavCat | NavLink
|
||||
|
||||
pub struct NavDoc {
|
||||
pub:
|
||||
id string
|
||||
label string
|
||||
}
|
||||
|
||||
pub struct NavCat {
|
||||
pub mut:
|
||||
label string
|
||||
collapsible bool = true
|
||||
collapsed bool
|
||||
items []NavItem
|
||||
}
|
||||
|
||||
pub struct NavLink {
|
||||
pub:
|
||||
label string
|
||||
href string
|
||||
description string
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// JSON Serialization Struct (unified to avoid sum type _type field)
|
||||
// ============================================================================
|
||||
|
||||
struct SidebarItem {
|
||||
typ string @[json: 'type']
|
||||
id string @[omitempty]
|
||||
label string
|
||||
href string @[omitempty]
|
||||
description string @[omitempty]
|
||||
collapsible bool @[json: 'collapsible'; omitempty]
|
||||
collapsed bool @[json: 'collapsed'; omitempty]
|
||||
items []SidebarItem @[omitempty]
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// JSON Serialization
|
||||
// ============================================================================
|
||||
|
||||
pub fn (sb SideBar) sidebar_to_json() !string {
|
||||
items := sb.my_sidebar.map(to_sidebar_item(it))
|
||||
return json.encode_pretty(items)
|
||||
}
|
||||
|
||||
fn to_sidebar_item(item NavItem) SidebarItem {
|
||||
return match item {
|
||||
NavDoc { from_doc(item) }
|
||||
NavLink { from_link(item) }
|
||||
NavCat { from_category(item) }
|
||||
}
|
||||
}
|
||||
|
||||
fn from_doc(doc NavDoc) SidebarItem {
|
||||
return SidebarItem{
|
||||
typ: 'doc'
|
||||
id: extract_page_id(doc.id)
|
||||
label: doc.label
|
||||
}
|
||||
}
|
||||
|
||||
fn from_link(link NavLink) SidebarItem {
|
||||
return SidebarItem{
|
||||
typ: 'link'
|
||||
label: link.label
|
||||
href: link.href
|
||||
description: link.description
|
||||
}
|
||||
}
|
||||
|
||||
fn from_category(cat NavCat) SidebarItem {
|
||||
return SidebarItem{
|
||||
typ: 'category'
|
||||
label: cat.label
|
||||
collapsible: cat.collapsible
|
||||
collapsed: cat.collapsed
|
||||
items: cat.items.map(to_sidebar_item(it))
|
||||
}
|
||||
}
|
||||
|
||||
// extract_page_id extracts the page name from a "collection:page_name" format.
|
||||
// If the id doesn't contain a colon, returns the id as-is.
|
||||
pub fn extract_page_id(id string) string {
|
||||
parts := id.split(':')
|
||||
if parts.len == 2 {
|
||||
return parts[1]
|
||||
}
|
||||
return id
|
||||
}
|
||||
9
lib/web/site/model_site.v
Normal file
9
lib/web/site/model_site.v
Normal file
@@ -0,0 +1,9 @@
|
||||
module site
|
||||
|
||||
@[heap]
|
||||
pub struct Site {
|
||||
pub mut:
|
||||
pages map[string]Page // key: "collection:page_name"
|
||||
nav SideBar // Navigation sidebar configuration
|
||||
siteconfig SiteConfig // Full site configuration
|
||||
}
|
||||
@@ -34,7 +34,7 @@ pub mut:
|
||||
// Announcement bar config structure
|
||||
pub struct AnnouncementBar {
|
||||
pub mut:
|
||||
id string @[json: 'id']
|
||||
// id string @[json: 'id']
|
||||
content string @[json: 'content']
|
||||
background_color string @[json: 'backgroundColor']
|
||||
text_color string @[json: 'textColor']
|
||||
|
||||
34
lib/web/site/play_announcement.v
Normal file
34
lib/web/site/play_announcement.v
Normal file
@@ -0,0 +1,34 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// ANNOUNCEMENT: Process announcement bar (optional)
|
||||
// ============================================================
|
||||
fn play_announcement(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut announcement_actions := plbook.find(filter: 'site.announcement')!
|
||||
|
||||
if announcement_actions.len > 0 {
|
||||
// Only process the first announcement action
|
||||
mut action := announcement_actions[0]
|
||||
mut p := action.params
|
||||
|
||||
content := p.get('content') or {
|
||||
return error('!!site.announcement: must specify "content"')
|
||||
}
|
||||
|
||||
config.announcement = AnnouncementBar{
|
||||
// id: p.get('id')!
|
||||
content: content
|
||||
background_color: p.get_default('background_color', '#20232a')!
|
||||
text_color: p.get_default('text_color', '#fff')!
|
||||
is_closeable: p.get_default_true('is_closeable')
|
||||
}
|
||||
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
208
lib/web/site/play_pages.v
Normal file
208
lib/web/site/play_pages.v
Normal file
@@ -0,0 +1,208 @@
|
||||
module site
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.core.texttools
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// ============================================================
|
||||
// Helper function: normalize name while preserving .md extension handling
|
||||
// ============================================================
|
||||
fn normalize_page_name(name string) string {
|
||||
mut result := name
|
||||
// Remove .md extension if present for processing
|
||||
if result.ends_with('.md') {
|
||||
result = result[0..result.len - 3]
|
||||
}
|
||||
// Apply name fixing
|
||||
return texttools.name_fix(result)
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Internal structure for tracking category information
|
||||
// ============================================================
|
||||
struct CategoryInfo {
|
||||
pub mut:
|
||||
name string
|
||||
label string
|
||||
position int
|
||||
nav_items []NavItem
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// PAGES: Process pages and build navigation structure
|
||||
// ============================================================
|
||||
fn play_pages(mut plbook PlayBook, mut website Site) ! {
|
||||
mut collection_current := '' // Track current collection for reuse
|
||||
mut categories := map[string]CategoryInfo{} // Map of category name -> info
|
||||
mut category_current := '' // Track current active category
|
||||
mut root_nav_items := []NavItem{} // Root-level items (pages without category)
|
||||
mut next_category_position := 100 // Auto-increment position for categories
|
||||
|
||||
// ============================================================
|
||||
// PASS 1: Process all page and category actions
|
||||
// ============================================================
|
||||
mut all_actions := plbook.find(filter: 'site.')!
|
||||
|
||||
for mut action in all_actions {
|
||||
if action.done {
|
||||
continue
|
||||
}
|
||||
|
||||
// ========== PAGE CATEGORY ==========
|
||||
if action.name == 'page_category' {
|
||||
mut p := action.params
|
||||
|
||||
category_name := p.get('name') or {
|
||||
return error('!!site.page_category: must specify "name"')
|
||||
}
|
||||
|
||||
category_name_fixed := texttools.name_fix(category_name)
|
||||
|
||||
// Get label (derive from name if not specified)
|
||||
mut label := p.get_default('label', texttools.name_fix_snake_to_pascal(category_name_fixed))!
|
||||
mut position := p.get_int_default('position', next_category_position)!
|
||||
|
||||
// Auto-increment position if using default
|
||||
if position == next_category_position {
|
||||
next_category_position += 100
|
||||
}
|
||||
|
||||
// Create and store category info
|
||||
categories[category_name_fixed] = CategoryInfo{
|
||||
name: category_name_fixed
|
||||
label: label
|
||||
position: position
|
||||
nav_items: []NavItem{}
|
||||
}
|
||||
|
||||
category_current = category_name_fixed
|
||||
console.print_item('Created page category: "${label}" (${category_name_fixed})')
|
||||
action.done = true
|
||||
continue
|
||||
}
|
||||
|
||||
// ========== PAGE ==========
|
||||
if action.name == 'page' {
|
||||
mut p := action.params
|
||||
|
||||
mut page_src := p.get_default('src', '')!
|
||||
mut page_collection := ''
|
||||
mut page_name := ''
|
||||
|
||||
// Parse collection:page format from src
|
||||
if page_src.contains(':') {
|
||||
parts := page_src.split(':')
|
||||
page_collection = texttools.name_fix(parts[0])
|
||||
page_name = normalize_page_name(parts[1])
|
||||
} else {
|
||||
// Use previously specified collection if available
|
||||
if collection_current.len > 0 {
|
||||
page_collection = collection_current
|
||||
page_name = normalize_page_name(page_src)
|
||||
} else {
|
||||
return error('!!site.page: must specify source as "collection:page_name" in "src".\nGot src="${page_src}" with no collection previously set.\nEither specify "collection:page_name" or define a collection first.')
|
||||
}
|
||||
}
|
||||
|
||||
// Validation
|
||||
if page_name.len == 0 {
|
||||
return error('!!site.page: could not extract valid page name from src="${page_src}"')
|
||||
}
|
||||
if page_collection.len == 0 {
|
||||
return error('!!site.page: could not determine collection')
|
||||
}
|
||||
|
||||
// Store collection for subsequent pages
|
||||
collection_current = page_collection
|
||||
|
||||
// Build page ID
|
||||
page_id := '${page_collection}:${page_name}'
|
||||
|
||||
// Get optional page metadata
|
||||
page_title := p.get_default('title', '')!
|
||||
page_description := p.get_default('description', '')!
|
||||
page_draft := p.get_default_false('draft')
|
||||
page_hide_title := p.get_default_false('hide_title')
|
||||
|
||||
// Create page
|
||||
mut page := Page{
|
||||
id: page_id
|
||||
title: page_title
|
||||
description: page_description
|
||||
draft: page_draft
|
||||
hide_title: page_hide_title
|
||||
src: page_id
|
||||
}
|
||||
|
||||
website.pages[page_id] = page
|
||||
|
||||
// Create navigation item with human-readable label
|
||||
nav_label := if page_title.len > 0 {
|
||||
page_title
|
||||
} else {
|
||||
texttools.title_case(page_name)
|
||||
}
|
||||
nav_doc := NavDoc{
|
||||
id: page_id
|
||||
label: nav_label
|
||||
}
|
||||
|
||||
// Add to appropriate category or root
|
||||
if category_current.len > 0 {
|
||||
if category_current in categories {
|
||||
mut cat_info := categories[category_current]
|
||||
cat_info.nav_items << nav_doc
|
||||
categories[category_current] = cat_info
|
||||
console.print_debug('Added page "${page_id}" to category "${category_current}"')
|
||||
}
|
||||
} else {
|
||||
root_nav_items << nav_doc
|
||||
console.print_debug('Added root page "${page_id}"')
|
||||
}
|
||||
|
||||
action.done = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// PASS 2: Build final navigation structure from categories
|
||||
// ============================================================
|
||||
console.print_item('Building navigation structure...')
|
||||
|
||||
mut final_nav_items := []NavItem{}
|
||||
|
||||
// Add root items first
|
||||
for item in root_nav_items {
|
||||
final_nav_items << item
|
||||
}
|
||||
|
||||
// Sort categories by position and add them
|
||||
mut sorted_categories := []CategoryInfo{}
|
||||
for _, cat_info in categories {
|
||||
sorted_categories << cat_info
|
||||
}
|
||||
|
||||
// Sort by position
|
||||
sorted_categories.sort(a.position < b.position)
|
||||
|
||||
// Convert categories to NavCat items and add to navigation
|
||||
for cat_info in sorted_categories {
|
||||
// Unwrap NavDoc items from cat_info.nav_items (they're already NavItem)
|
||||
nav_cat := NavCat{
|
||||
label: cat_info.label
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
items: cat_info.nav_items
|
||||
}
|
||||
final_nav_items << nav_cat
|
||||
console.print_debug('Added category to nav: "${cat_info.label}" with ${cat_info.nav_items.len} items')
|
||||
}
|
||||
|
||||
// Update website navigation
|
||||
website.nav.my_sidebar = final_nav_items
|
||||
|
||||
console.print_green('Navigation structure built with ${website.pages.len} pages in ${categories.len} categories')
|
||||
}
|
||||
@@ -31,11 +31,197 @@ mysitepath := gittools.path(
|
||||
// Process all HeroScript files in the path
|
||||
playcmds.run(heroscript_path: mysitepath.path)!
|
||||
|
||||
// Get the configured site
|
||||
mut mysite := site.get(name: 'tfgrid_tech')!
|
||||
println(mysite)
|
||||
// Access the configured site
|
||||
mut mysite := site.get(name: 'my_docs')!
|
||||
|
||||
// Print available pages
|
||||
for page_id, page in mysite.pages {
|
||||
console.print_item('Page: ${page_id} - "${page.title}"')
|
||||
}
|
||||
|
||||
println('Site has ${mysite.pages.len} pages')
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## API Reference
|
||||
|
||||
### Site Factory
|
||||
|
||||
Factory functions to create and retrieve site instances:
|
||||
|
||||
```v
|
||||
// Create a new site
|
||||
mut mysite := site.new(name: 'my_docs')!
|
||||
|
||||
// Get existing site
|
||||
mut mysite := site.get(name: 'my_docs')!
|
||||
|
||||
// Check if site exists
|
||||
if site.exists(name: 'my_docs') {
|
||||
println('Site exists')
|
||||
}
|
||||
|
||||
// Get all site names
|
||||
site_names := site.list() // Returns []string
|
||||
|
||||
// Get default site (creates if needed)
|
||||
mut default := site.default()!
|
||||
```
|
||||
|
||||
### Site Object Structure
|
||||
|
||||
```v
|
||||
pub struct Site {
|
||||
pub mut:
|
||||
pages map[string]Page // key: "collection:page_name"
|
||||
nav NavConfig // Navigation sidebar
|
||||
siteconfig SiteConfig // Full configuration
|
||||
}
|
||||
```
|
||||
|
||||
### Accessing Pages
|
||||
|
||||
```v
|
||||
// Access all pages
|
||||
pages := mysite.pages // map[string]Page
|
||||
|
||||
// Get specific page
|
||||
page := mysite.pages['docs:introduction']
|
||||
|
||||
// Page structure
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
id string // "collection:page_name"
|
||||
title string // Display title
|
||||
description string // SEO metadata
|
||||
draft bool // Hidden if true
|
||||
hide_title bool // Don't show title in rendering
|
||||
src string // Source reference
|
||||
}
|
||||
```
|
||||
|
||||
### Navigation Structure
|
||||
|
||||
```v
|
||||
// Access sidebar navigation
|
||||
sidebar := mysite.nav.my_sidebar // []NavItem
|
||||
|
||||
// NavItem is a sum type (can be one of three types):
|
||||
pub type NavItem = NavDoc | NavCat | NavLink
|
||||
|
||||
// Navigation items:
|
||||
|
||||
pub struct NavDoc {
|
||||
pub:
|
||||
id string // page id
|
||||
label string // display name
|
||||
}
|
||||
|
||||
pub struct NavCat {
|
||||
pub mut:
|
||||
label string
|
||||
collapsible bool
|
||||
collapsed bool
|
||||
items []NavItem // nested NavDoc/NavCat/NavLink
|
||||
}
|
||||
|
||||
pub struct NavLink {
|
||||
pub:
|
||||
label string
|
||||
href string
|
||||
description string
|
||||
}
|
||||
|
||||
// Example: iterate navigation
|
||||
for item in mysite.nav.my_sidebar {
|
||||
match item {
|
||||
NavDoc {
|
||||
println('Page: ${item.label} (${item.id})')
|
||||
}
|
||||
NavCat {
|
||||
println('Category: ${item.label} (${item.items.len} items)')
|
||||
}
|
||||
NavLink {
|
||||
println('Link: ${item.label} -> ${item.href}')
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Site Configuration
|
||||
|
||||
```v
|
||||
pub struct SiteConfig {
|
||||
pub mut:
|
||||
// Core
|
||||
name string
|
||||
title string
|
||||
description string
|
||||
tagline string
|
||||
favicon string
|
||||
image string
|
||||
copyright string
|
||||
|
||||
// URLs (Docusaurus)
|
||||
url string // Full site URL
|
||||
base_url string // Base path (e.g., "/" or "/docs/")
|
||||
url_home string // Home page path
|
||||
|
||||
// SEO Metadata
|
||||
meta_title string // SEO title override
|
||||
meta_image string // OG image override
|
||||
|
||||
// Publishing
|
||||
build_dest []BuildDest // Production destinations
|
||||
build_dest_dev []BuildDest // Development destinations
|
||||
|
||||
// Navigation & Footer
|
||||
footer Footer
|
||||
menu Menu
|
||||
announcement AnnouncementBar
|
||||
|
||||
// Imports
|
||||
imports []ImportItem
|
||||
}
|
||||
|
||||
pub struct BuildDest {
|
||||
pub mut:
|
||||
path string
|
||||
ssh_name string
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### Site
|
||||
A website configuration that contains pages, navigation structure, and metadata.
|
||||
|
||||
### Page
|
||||
A single page with:
|
||||
- **ID**: `collection:page_name` format
|
||||
- **Title**: Display name (optional - extracted from markdown if not provided)
|
||||
- **Description**: SEO metadata
|
||||
- **Draft**: Hidden from navigation if true
|
||||
|
||||
### Category (Section)
|
||||
Groups related pages together in the navigation sidebar. Automatically collapsed/expandable.
|
||||
|
||||
### Collection
|
||||
A logical group of pages. Pages reuse the collection once specified.
|
||||
|
||||
```heroscript
|
||||
!!site.page src: "tech:intro" # Specifies collection "tech"
|
||||
!!site.page src: "benefits" # Reuses collection "tech"
|
||||
!!site.page src: "components" # Still uses collection "tech"
|
||||
!!site.page src: "api:reference" # Switches to collection "api"
|
||||
!!site.page src: "endpoints" # Uses collection "api"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## HeroScript Syntax
|
||||
|
||||
### Basic Configuration
|
||||
@@ -102,21 +288,11 @@ println(mysite)
|
||||
When you don't need categories, pages are added sequentially. The collection only needs to be specified once, then it's reused for subsequent pages.
|
||||
|
||||
```heroscript
|
||||
!!site.page src: "mycelium_tech:introduction"
|
||||
description: "Introduction to ThreeFold Technology"
|
||||
slug: "/"
|
||||
|
||||
!!site.page src: "vision"
|
||||
description: "Our Vision for the Future Internet"
|
||||
|
||||
!!site.page src: "what"
|
||||
description: "What ThreeFold is Building"
|
||||
|
||||
!!site.page src: "presentation"
|
||||
description: "ThreeFold Technology Presentation"
|
||||
|
||||
!!site.page src: "status"
|
||||
description: "Current Development Status"
|
||||
!!site.announcement
|
||||
content: "🎉 Version 2.0 is now available!"
|
||||
background_color: "#20232a"
|
||||
text_color: "#fff"
|
||||
is_closeable: true
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
@@ -148,63 +324,12 @@ Categories (sections) help organize pages into logical groups with their own nav
|
||||
|
||||
**Key Points:**
|
||||
|
||||
- `!!site.page_category` creates a new section/category
|
||||
- `name` is the internal identifier (snake_case)
|
||||
- `label` is the display name (automatically derived from `name` if not specified)
|
||||
- Category name is converted to title case: `first_principle_thinking` → "First Principle Thinking"
|
||||
- Once a category is defined, all subsequent pages belong to it until a new category is declared
|
||||
- Collection persistence works the same: specify once (e.g., `first_principle_thinking:hardware_badly_used`), then reuse
|
||||
**Category Parameters:**
|
||||
- `name` - Category identifier (required)
|
||||
- `label` - Display label (auto-generated from name if omitted)
|
||||
- `position` - Sort order (auto-incremented if omitted)
|
||||
|
||||
### Example 3: Advanced Page Configuration
|
||||
|
||||
```heroscript
|
||||
!!site.page_category
|
||||
name: "components"
|
||||
label: "System Components"
|
||||
position: 100
|
||||
|
||||
!!site.page src: "mycelium_tech:mycelium"
|
||||
title: "Mycelium Network"
|
||||
description: "Peer-to-peer overlay network"
|
||||
slug: "mycelium-network"
|
||||
position: 1
|
||||
draft: false
|
||||
hide_title: false
|
||||
|
||||
!!site.page src: "fungistor"
|
||||
title: "Fungistor Storage"
|
||||
description: "Distributed storage system"
|
||||
position: 2
|
||||
```
|
||||
|
||||
**Available Page Parameters:**
|
||||
|
||||
- `src`: Source reference as `collection:page_name` (required for first page in collection)
|
||||
- `title`: Page title (optional, extracted from markdown if not provided)
|
||||
- `description`: Page description for metadata
|
||||
- `slug`: Custom URL slug
|
||||
- `position`: Manual ordering (auto-incremented if not specified)
|
||||
- `draft`: Mark page as draft (default: false)
|
||||
- `hide_title`: Hide the page title in rendering (default: false)
|
||||
- `path`: Custom path for the page (defaults to category name)
|
||||
- `category`: Override the current category for this page
|
||||
|
||||
## File Organization
|
||||
|
||||
HeroScript files should be organized with numeric prefixes to control execution order:
|
||||
|
||||
```
|
||||
docs/
|
||||
├── 0_config.heroscript # Site configuration
|
||||
├── 1_menu.heroscript # Navigation and footer
|
||||
├── 2_intro_pages.heroscript # Introduction pages
|
||||
├── 3_tech_pages.heroscript # Technical documentation
|
||||
└── 4_api_pages.heroscript # API reference
|
||||
```
|
||||
|
||||
**Important:** Files are processed in alphabetical order, so use numeric prefixes (0_, 1_, 2_, etc.) to ensure correct execution sequence.
|
||||
|
||||
## Import External Content
|
||||
### 7. Content Imports
|
||||
|
||||
```heroscript
|
||||
!!site.import
|
||||
@@ -282,47 +407,55 @@ pub mut:
|
||||
|
||||
### Page
|
||||
|
||||
```v
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
name string // Page identifier
|
||||
title string // Display title
|
||||
description string // Page description
|
||||
draft bool // Draft status
|
||||
position int // Sort order
|
||||
hide_title bool // Hide title in rendering
|
||||
src string // Source as collection:page_name
|
||||
path string // URL path (without page name)
|
||||
section_name string // Category/section name
|
||||
title_nr int // Title numbering level
|
||||
slug string // Custom URL slug
|
||||
}
|
||||
## File Organization
|
||||
|
||||
### Recommended Ebook Structure
|
||||
|
||||
The modern ebook structure uses `.hero` files for configuration and `.heroscript` files for page definitions:
|
||||
|
||||
```
|
||||
my_ebook/
|
||||
├── scan.hero # !!atlas.scan - collection scanning
|
||||
├── config.hero # !!site.config - site configuration
|
||||
├── menus.hero # !!site.navbar and !!site.footer
|
||||
├── include.hero # !!docusaurus.define and !!atlas.export
|
||||
├── 1_intro.heroscript # Page definitions (categories + pages)
|
||||
├── 2_concepts.heroscript # More page definitions
|
||||
└── 3_advanced.heroscript # Additional pages
|
||||
```
|
||||
|
||||
### Section
|
||||
### File Types
|
||||
|
||||
```v
|
||||
pub struct Section {
|
||||
pub mut:
|
||||
name string // Internal identifier
|
||||
position int // Sort order
|
||||
path string // URL path
|
||||
label string // Display name
|
||||
}
|
||||
- **`.hero` files**: Configuration files processed in any order
|
||||
- **`.heroscript` files**: Page definition files processed alphabetically
|
||||
|
||||
Use numeric prefixes on `.heroscript` files to control page/category ordering in the sidebar.
|
||||
|
||||
### Example scan.hero
|
||||
|
||||
```heroscript
|
||||
!!atlas.scan path:"../../collections/my_collection"
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
### Example include.hero
|
||||
|
||||
1. **File Naming**: Use numeric prefixes (0_, 1_, 2_) to control execution order
|
||||
2. **Collection Reuse**: Specify collection once, then reuse for subsequent pages
|
||||
3. **Category Organization**: Group related pages under categories for better navigation
|
||||
4. **Title Extraction**: Let titles be extracted from markdown files when possible
|
||||
5. **Position Management**: Use automatic positioning unless you need specific ordering
|
||||
6. **Description**: Always provide descriptions for better SEO and navigation
|
||||
7. **Draft Status**: Use `draft: true` for work-in-progress pages
|
||||
```heroscript
|
||||
// Include shared configuration (optional)
|
||||
!!play.include path:'../../heroscriptall' replace:'SITENAME:my_ebook'
|
||||
|
||||
## Complete Example
|
||||
// Or define directly
|
||||
!!docusaurus.define name:'my_ebook'
|
||||
|
||||
See `examples/web/site/site_example.vsh` for a complete working example.
|
||||
!!atlas.export include:true
|
||||
```
|
||||
|
||||
### Running an Ebook
|
||||
|
||||
```bash
|
||||
# Development server
|
||||
hero docs -d -p /path/to/my_ebook
|
||||
|
||||
# Build for production
|
||||
hero docs -p /path/to/my_ebook
|
||||
```
|
||||
|
||||
For a real-world example, check: <https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech>
|
||||
|
||||
447
lib/web/site/siteplay_test.v
Normal file
447
lib/web/site/siteplay_test.v
Normal file
@@ -0,0 +1,447 @@
|
||||
module site
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
// Big comprehensive HeroScript for testing
|
||||
const test_heroscript = '
|
||||
!!site.config
|
||||
name: "test_docs"
|
||||
title: "Test Documentation Site"
|
||||
description: "A comprehensive test documentation site"
|
||||
tagline: "Testing everything"
|
||||
favicon: "img/favicon.png"
|
||||
image: "img/test-og.png"
|
||||
copyright: "© 2024 Test Organization"
|
||||
url: "https://test.example.com"
|
||||
base_url: "/"
|
||||
url_home: "/docs"
|
||||
|
||||
!!site.config_meta
|
||||
title: "Test Docs - Advanced"
|
||||
image: "img/test-og-alternative.png"
|
||||
description: "Advanced test documentation"
|
||||
|
||||
!!site.navbar
|
||||
title: "Test Documentation"
|
||||
logo_alt: "Test Logo"
|
||||
logo_src: "img/logo.svg"
|
||||
logo_src_dark: "img/logo-dark.svg"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Getting Started"
|
||||
to: "intro"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "API Reference"
|
||||
to: "api"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "GitHub"
|
||||
href: "https://github.com/example/test"
|
||||
position: "right"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Blog"
|
||||
href: "https://blog.example.com"
|
||||
position: "right"
|
||||
|
||||
!!site.footer
|
||||
style: "dark"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Documentation"
|
||||
label: "Introduction"
|
||||
to: "intro"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Documentation"
|
||||
label: "Getting Started"
|
||||
to: "getting-started"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Documentation"
|
||||
label: "Advanced Topics"
|
||||
to: "advanced"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Community"
|
||||
label: "Discord"
|
||||
href: "https://discord.gg/example"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Community"
|
||||
label: "Twitter"
|
||||
href: "https://twitter.com/example"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Legal"
|
||||
label: "Privacy Policy"
|
||||
href: "https://example.com/privacy"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Legal"
|
||||
label: "Terms of Service"
|
||||
href: "https://example.com/terms"
|
||||
|
||||
!!site.announcement
|
||||
content: "🎉 Version 2.0 is now available! Check out the new features."
|
||||
background_color: "#1a472a"
|
||||
text_color: "#fff"
|
||||
is_closeable: true
|
||||
|
||||
!!site.page_category
|
||||
name: "getting_started"
|
||||
label: "Getting Started"
|
||||
position: 10
|
||||
|
||||
!!site.page src: "guides:introduction"
|
||||
title: "Introduction to Test Docs"
|
||||
description: "Learn what this project is about"
|
||||
|
||||
!!site.page src: "installation"
|
||||
title: "Installation Guide"
|
||||
description: "How to install and setup"
|
||||
|
||||
!!site.page src: "quick_start"
|
||||
title: "Quick Start"
|
||||
description: "5 minute quick start guide"
|
||||
|
||||
!!site.page_category
|
||||
name: "concepts"
|
||||
label: "Core Concepts"
|
||||
position: 20
|
||||
|
||||
!!site.page src: "concepts:architecture"
|
||||
title: "Architecture Overview"
|
||||
description: "Understanding the system architecture"
|
||||
|
||||
!!site.page src: "components"
|
||||
title: "Key Components"
|
||||
description: "Learn about the main components"
|
||||
|
||||
!!site.page src: "workflow"
|
||||
title: "Typical Workflow"
|
||||
description: "How to use the system"
|
||||
|
||||
!!site.page_category
|
||||
name: "api"
|
||||
label: "API Reference"
|
||||
position: 30
|
||||
|
||||
!!site.page src: "api:rest"
|
||||
title: "REST API"
|
||||
description: "Complete REST API reference"
|
||||
|
||||
!!site.page src: "graphql"
|
||||
title: "GraphQL API"
|
||||
description: "GraphQL API documentation"
|
||||
|
||||
!!site.page src: "webhooks"
|
||||
title: "Webhooks"
|
||||
description: "Webhook configuration and examples"
|
||||
|
||||
!!site.page_category
|
||||
name: "advanced"
|
||||
label: "Advanced Topics"
|
||||
position: 40
|
||||
|
||||
!!site.page src: "advanced:performance"
|
||||
title: "Performance Optimization"
|
||||
description: "Tips for optimal performance"
|
||||
|
||||
!!site.page src: "scaling"
|
||||
title: "Scaling Guide"
|
||||
description: "How to scale the system"
|
||||
|
||||
!!site.page src: "security"
|
||||
title: "Security Best Practices"
|
||||
description: "Security considerations and best practices"
|
||||
|
||||
!!site.page src: "troubleshooting"
|
||||
title: "Troubleshooting"
|
||||
description: "Common issues and solutions"
|
||||
draft: false
|
||||
|
||||
!!site.publish
|
||||
path: "/var/www/html/docs"
|
||||
ssh_name: "production-server"
|
||||
|
||||
!!site.publish_dev
|
||||
path: "/tmp/docs-dev"
|
||||
'
|
||||
|
||||
|
||||
|
||||
fn test_site1() ! {
|
||||
console.print_header('Site Module Comprehensive Test')
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 1: Create playbook from heroscript
|
||||
// ========================================================
|
||||
console.print_item('TEST 1: Creating playbook from HeroScript')
|
||||
mut plbook := playbook.new(text: test_heroscript)!
|
||||
console.print_green('✓ Playbook created successfully')
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 2: Process site configuration
|
||||
// ========================================================
|
||||
console.print_item('TEST 2: Processing site.play()')
|
||||
play(mut plbook)!
|
||||
console.print_green('✓ Site configuration processed successfully')
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 3: Retrieve site and validate
|
||||
// ========================================================
|
||||
console.print_item('TEST 3: Retrieving configured site')
|
||||
mut test_site := site.get(name: 'test_docs')!
|
||||
console.print_green('✓ Site retrieved successfully')
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 4: Validate SiteConfig
|
||||
// ========================================================
|
||||
console.print_header('Validating SiteConfig')
|
||||
mut config := &test_site.siteconfig
|
||||
|
||||
help_test_string('Site Name', config.name, 'test_docs')
|
||||
help_test_string('Site Title', config.title, 'Test Documentation Site')
|
||||
help_test_string('Site Description', config.description, 'Advanced test documentation')
|
||||
help_test_string('Site Tagline', config.tagline, 'Testing everything')
|
||||
help_test_string('Copyright', config.copyright, '© 2024 Test Organization')
|
||||
help_test_string('Base URL', config.base_url, '/')
|
||||
help_test_string('URL Home', config.url_home, '/docs')
|
||||
|
||||
help_test_string('Meta Title', config.meta_title, 'Test Docs - Advanced')
|
||||
help_test_string('Meta Image', config.meta_image, 'img/test-og-alternative.png')
|
||||
|
||||
assert config.build_dest.len == 1, 'Should have 1 production build destination'
|
||||
console.print_green('✓ Production build dest: ${config.build_dest[0].path}')
|
||||
|
||||
assert config.build_dest_dev.len == 1, 'Should have 1 dev build destination'
|
||||
console.print_green('✓ Dev build dest: ${config.build_dest_dev[0].path}')
|
||||
|
||||
console.lf()
|
||||
|
||||
|
||||
// ========================================================
|
||||
// TEST 5: Validate Menu Configuration
|
||||
// ========================================================
|
||||
console.print_header('Validating Menu Configuration')
|
||||
mut menu := config.menu
|
||||
|
||||
help_test_string('Menu Title', menu.title, 'Test Documentation')
|
||||
help_test_string('Menu Logo Alt', menu.logo_alt, 'Test Logo')
|
||||
help_test_string('Menu Logo Src', menu.logo_src, 'img/logo.svg')
|
||||
help_test_string('Menu Logo Src Dark', menu.logo_src_dark, 'img/logo-dark.svg')
|
||||
|
||||
assert menu.items.len == 4, 'Should have 4 navbar items, got ${menu.items.len}'
|
||||
console.print_green('✓ Menu has 4 navbar items')
|
||||
|
||||
// Validate navbar items
|
||||
help_test_navbar_item(menu.items[0], 'Getting Started', 'intro', '', 'left')
|
||||
help_test_navbar_item(menu.items[1], 'API Reference', 'api', '', 'left')
|
||||
help_test_navbar_item(menu.items[2], 'GitHub', '', 'https://github.com/example/test',
|
||||
'right')
|
||||
help_test_navbar_item(menu.items[3], 'Blog', '', 'https://blog.example.com', 'right')
|
||||
|
||||
console.lf()
|
||||
|
||||
|
||||
// ========================================================
|
||||
// TEST 6: Validate Footer Configuration
|
||||
// ========================================================
|
||||
console.print_header('Validating Footer Configuration')
|
||||
mut footer := config.footer
|
||||
|
||||
help_test_string('Footer Style', footer.style, 'dark')
|
||||
assert footer.links.len == 3, 'Should have 3 footer link groups, got ${footer.links.len}'
|
||||
console.print_green('✓ Footer has 3 link groups')
|
||||
|
||||
// Validate footer structure
|
||||
for link_group in footer.links {
|
||||
console.print_item('Footer group: "${link_group.title}" has ${link_group.items.len} items')
|
||||
}
|
||||
|
||||
// Detailed footer validation
|
||||
mut doc_links := footer.links.filter(it.title == 'Documentation')
|
||||
assert doc_links.len == 1, 'Should have 1 Documentation link group'
|
||||
assert doc_links[0].items.len == 3, 'Documentation should have 3 items'
|
||||
console.print_green('✓ Documentation footer: 3 items')
|
||||
|
||||
mut community_links := footer.links.filter(it.title == 'Community')
|
||||
assert community_links.len == 1, 'Should have 1 Community link group'
|
||||
assert community_links[0].items.len == 2, 'Community should have 2 items'
|
||||
console.print_green('✓ Community footer: 2 items')
|
||||
|
||||
mut legal_links := footer.links.filter(it.title == 'Legal')
|
||||
assert legal_links.len == 1, 'Should have 1 Legal link group'
|
||||
assert legal_links[0].items.len == 2, 'Legal should have 2 items'
|
||||
console.print_green('✓ Legal footer: 2 items')
|
||||
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 7: Validate Announcement Bar
|
||||
// ========================================================
|
||||
console.print_header('Validating Announcement Bar')
|
||||
mut announcement := config.announcement
|
||||
|
||||
help_test_string('Announcement Content', announcement.content, '🎉 Version 2.0 is now available! Check out the new features.')
|
||||
help_test_string('Announcement BG Color', announcement.background_color, '#1a472a')
|
||||
help_test_string('Announcement Text Color', announcement.text_color, '#fff')
|
||||
assert announcement.is_closeable == true, 'Announcement should be closeable'
|
||||
console.print_green('✓ Announcement bar configured correctly')
|
||||
|
||||
console.lf()
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
fn test_site2() ! {
|
||||
console.print_header('Site Module Comprehensive Test')
|
||||
console.lf()
|
||||
|
||||
mut plbook := playbook.new(text: test_heroscript)!
|
||||
mut test_site := site.get(name: 'test_docs')!
|
||||
|
||||
// ========================================================
|
||||
// TEST 8: Validate Pages
|
||||
// ========================================================
|
||||
console.print_header('Validating Pages')
|
||||
mut pages := test_site.pages.clone()
|
||||
|
||||
assert pages.len == 13, 'Should have 13 pages, got ${pages.len}'
|
||||
console.print_green('✓ Total pages: ${pages.len}')
|
||||
|
||||
// List and validate pages
|
||||
mut page_ids := pages.keys()
|
||||
page_ids.sort()
|
||||
|
||||
for page_id in page_ids {
|
||||
mut page := pages[page_id]
|
||||
console.print_debug(' Page: ${page_id} - "${page.title}"')
|
||||
}
|
||||
|
||||
// Validate specific pages
|
||||
assert 'guides:introduction' in pages, 'guides:introduction page not found'
|
||||
console.print_green('✓ Found guides:introduction')
|
||||
|
||||
assert 'concepts:architecture' in pages, 'concepts:architecture page not found'
|
||||
console.print_green('✓ Found concepts:architecture')
|
||||
|
||||
assert 'api:rest' in pages, 'api:rest page not found'
|
||||
console.print_green('✓ Found api:rest')
|
||||
|
||||
console.lf()
|
||||
|
||||
// ========================================================
|
||||
// TEST 9: Validate Navigation Structure
|
||||
// ========================================================
|
||||
console.print_header('Validating Navigation Structure')
|
||||
mut sidebar := unsafe { test_site.nav.my_sidebar.clone() }
|
||||
|
||||
console.print_item('Navigation sidebar has ${sidebar.len} items')
|
||||
|
||||
// Count categories
|
||||
mut category_count := 0
|
||||
mut doc_count := 0
|
||||
|
||||
for item in sidebar {
|
||||
match item {
|
||||
NavCat {
|
||||
category_count++
|
||||
console.print_debug(' Category: "${item.label}" with ${item.items.len} sub-items')
|
||||
}
|
||||
NavDoc {
|
||||
doc_count++
|
||||
console.print_debug(' Doc: "${item.label}" (${item.id})')
|
||||
}
|
||||
NavLink {
|
||||
console.print_debug(' Link: "${item.label}" -> ${item.href}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert category_count == 4, 'Should have 4 categories, got ${category_count}'
|
||||
console.print_green('✓ Navigation has 4 categories')
|
||||
|
||||
// Validate category structure
|
||||
for item in sidebar {
|
||||
match item {
|
||||
NavCat {
|
||||
console.print_item('Category: "${item.label}"')
|
||||
println(' Collapsible: ${item.collapsible}, Collapsed: ${item.collapsed}')
|
||||
println(' Items: ${item.items.len}')
|
||||
|
||||
// Validate sub-items
|
||||
for sub_item in item.items {
|
||||
match sub_item {
|
||||
NavDoc {
|
||||
println(' - ${sub_item.label} (${sub_item.id})')
|
||||
}
|
||||
else {
|
||||
println(' - Unexpected item type')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
|
||||
console.lf()
|
||||
|
||||
|
||||
// ========================================================
|
||||
// TEST 10: Validate Site Factory
|
||||
// ========================================================
|
||||
console.print_header('Validating Site Factory')
|
||||
|
||||
mut all_sites := list()
|
||||
console.print_item('Total sites registered: ${all_sites.len}')
|
||||
for site_name in all_sites {
|
||||
console.print_debug(' - ${site_name}')
|
||||
}
|
||||
|
||||
assert all_sites.contains('test_docs'), 'test_docs should be in sites list'
|
||||
console.print_green('✓ test_docs found in factory')
|
||||
|
||||
assert exists(name: 'test_docs'), 'test_docs should exist'
|
||||
console.print_green('✓ test_docs verified to exist')
|
||||
|
||||
console.lf()
|
||||
|
||||
// println(test_site)
|
||||
// if true{panic("ss33")}
|
||||
}
|
||||
|
||||
|
||||
// ============================================================
|
||||
// Helper Functions for Testing
|
||||
// ============================================================
|
||||
|
||||
fn help_test_string(label string, actual string, expected string) {
|
||||
if actual == expected {
|
||||
console.print_green('✓ ${label}: "${actual}"')
|
||||
} else {
|
||||
console.print_stderr('✗ ${label}: expected "${expected}", got "${actual}"')
|
||||
panic('Test failed: ${label}')
|
||||
}
|
||||
}
|
||||
|
||||
fn help_test_navbar_item(item MenuItem, label string, to string, href string, position string) {
|
||||
assert item.label == label, 'Expected label "${label}", got "${item.label}"'
|
||||
assert item.to == to, 'Expected to "${to}", got "${item.to}"'
|
||||
assert item.href == href, 'Expected href "${href}", got "${item.href}"'
|
||||
assert item.position == position, 'Expected position "${position}", got "${item.position}"'
|
||||
console.print_green('✓ Navbar item: "${label}"')
|
||||
}
|
||||
Reference in New Issue
Block a user