...
This commit is contained in:
@@ -35,7 +35,7 @@ For any domain, **code and module-level docs are authoritative**:
|
|||||||
- Core install & usage: `herolib/README.md`, scripts under `scripts/`
|
- Core install & usage: `herolib/README.md`, scripts under `scripts/`
|
||||||
- Site module: `lib/web/site/ai_instructions.md`, `lib/web/site/readme.md`
|
- Site module: `lib/web/site/ai_instructions.md`, `lib/web/site/readme.md`
|
||||||
- Docusaurus module: `lib/web/docusaurus/README.md`, `lib/web/docusaurus/*.v`
|
- Docusaurus module: `lib/web/docusaurus/README.md`, `lib/web/docusaurus/*.v`
|
||||||
- Atlas client: `lib/data/atlas/client/README.md`
|
- DocTree client: `lib/data/doctree/client/README.md`
|
||||||
- HeroModels: `lib/hero/heromodels/*.v` + tests
|
- HeroModels: `lib/hero/heromodels/*.v` + tests
|
||||||
|
|
||||||
`aiprompts/` files **must not contradict** these. When in doubt, follow the code / module docs first and treat prompts as guidance.
|
`aiprompts/` files **must not contradict** these. When in doubt, follow the code / module docs first and treat prompts as guidance.
|
||||||
@@ -47,7 +47,7 @@ For any domain, **code and module-level docs are authoritative**:
|
|||||||
- `herolib_core/` & `herolib_advanced/`
|
- `herolib_core/` & `herolib_advanced/`
|
||||||
Per-module instructions for core/advanced HeroLib features.
|
Per-module instructions for core/advanced HeroLib features.
|
||||||
- `docusaurus/`
|
- `docusaurus/`
|
||||||
AI manual for building Hero docs/ebooks with the Docusaurus + Site + Atlas pipeline.
|
AI manual for building Hero docs/ebooks with the Docusaurus + Site + DocTree pipeline.
|
||||||
- `instructions/`
|
- `instructions/`
|
||||||
Active, higher-level instructions (e.g. HeroDB base filesystem).
|
Active, higher-level instructions (e.g. HeroDB base filesystem).
|
||||||
- `instructions_archive/`
|
- `instructions_archive/`
|
||||||
@@ -63,7 +63,7 @@ For any domain, **code and module-level docs are authoritative**:
|
|||||||
|
|
||||||
- Content under `instructions_archive/` is **kept for reference** and may describe older flows (e.g. older documentation or prompt pipelines).
|
- Content under `instructions_archive/` is **kept for reference** and may describe older flows (e.g. older documentation or prompt pipelines).
|
||||||
Do **not** use it as a primary source for new work unless explicitly requested.
|
Do **not** use it as a primary source for new work unless explicitly requested.
|
||||||
- Some prompts mention **Doctree**; the current default docs pipeline uses **Atlas**. Doctree/`doctreeclient` is an alternative/legacy backend.
|
- Some prompts mention **Doctree**; the current default docs pipeline uses **DocTree**. Doctree/`doctreeclient` is an alternative/legacy backend.
|
||||||
|
|
||||||
## Guidelines for AI Agents
|
## Guidelines for AI Agents
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
> NOTE: Atlas is the default document collections/export pipeline used by the current Docusaurus integration (see `lib/data/atlas/client`). The Doctree module described here is an alternative/legacy export mechanism that still exists but is not the primary path. Use Atlas by default unless you explicitly need Doctree.
|
> NOTE: DocTree is the default document collections/export pipeline used by the current Docusaurus integration (see `lib/data/doctree/client`). The Doctree module described here is an alternative/legacy export mechanism that still exists but is not the primary path. Use DocTree by default unless you explicitly need Doctree.
|
||||||
|
|
||||||
# Doctree Export Specification
|
# Doctree Export Specification
|
||||||
|
|
||||||
|
|||||||
@@ -8,10 +8,10 @@ The recommended directory structure for an ebook:
|
|||||||
|
|
||||||
```
|
```
|
||||||
my_ebook/
|
my_ebook/
|
||||||
├── scan.hero # Atlas collection scanning
|
├── scan.hero # DocTree collection scanning
|
||||||
├── config.hero # Site configuration
|
├── config.hero # Site configuration
|
||||||
├── menus.hero # Navbar and footer configuration
|
├── menus.hero # Navbar and footer configuration
|
||||||
├── include.hero # Docusaurus define and atlas export
|
├── include.hero # Docusaurus define and doctree export
|
||||||
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
||||||
├── 2_concepts.heroscript # More page definitions
|
├── 2_concepts.heroscript # More page definitions
|
||||||
└── 3_advanced.heroscript # Additional pages
|
└── 3_advanced.heroscript # Additional pages
|
||||||
@@ -33,7 +33,7 @@ To effectively create ebooks with HeroLib, it's crucial to understand the interp
|
|||||||
|
|
||||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process. Files use `.hero` extension for configuration and `.heroscript` for page definitions.
|
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process. Files use `.hero` extension for configuration and `.heroscript` for page definitions.
|
||||||
* **Docusaurus**: A popular open-source static site generator. HeroLib uses Docusaurus as the underlying framework to render your ebook content into a navigable website.
|
* **Docusaurus**: A popular open-source static site generator. HeroLib uses Docusaurus as the underlying framework to render your ebook content into a navigable website.
|
||||||
* **Atlas**: HeroLib's document collection layer. Atlas scans and exports markdown "collections" and "pages" that Docusaurus consumes.
|
* **DocTree**: HeroLib's document collection layer. DocTree scans and exports markdown "collections" and "pages" that Docusaurus consumes.
|
||||||
|
|
||||||
## 2. Setting Up a Docusaurus Project with HeroLib
|
## 2. Setting Up a Docusaurus Project with HeroLib
|
||||||
|
|
||||||
@@ -53,8 +53,8 @@ The `docusaurus.define` HeroScript directive configures the global settings for
|
|||||||
reset: true // clean build dir before building (optional)
|
reset: true // clean build dir before building (optional)
|
||||||
install: true // run bun install if needed (optional)
|
install: true // run bun install if needed (optional)
|
||||||
template_update: true // update the Docusaurus template (optional)
|
template_update: true // update the Docusaurus template (optional)
|
||||||
atlas_dir: "/tmp/atlas_export" // where Atlas exports collections
|
doctree_dir: "/tmp/doctree_export" // where DocTree exports collections
|
||||||
use_atlas: true // use Atlas as content backend
|
use_doctree: true // use DocTree as content backend
|
||||||
```
|
```
|
||||||
|
|
||||||
**Arguments:**
|
**Arguments:**
|
||||||
@@ -65,8 +65,8 @@ The `docusaurus.define` HeroScript directive configures the global settings for
|
|||||||
* `reset` (boolean, optional): If `true`, clean the build directory before starting.
|
* `reset` (boolean, optional): If `true`, clean the build directory before starting.
|
||||||
* `install` (boolean, optional): If `true`, run dependency installation (e.g., `bun install`).
|
* `install` (boolean, optional): If `true`, run dependency installation (e.g., `bun install`).
|
||||||
* `template_update` (boolean, optional): If `true`, update the Docusaurus template.
|
* `template_update` (boolean, optional): If `true`, update the Docusaurus template.
|
||||||
* `atlas_dir` (string, optional): Directory where Atlas exports collections (used by the Atlas client in `lib/data/atlas/client`).
|
* `doctree_dir` (string, optional): Directory where DocTree exports collections (used by the DocTree client in `lib/data/doctree/client`).
|
||||||
* `use_atlas` (boolean, optional): If `true`, use the Atlas client as the content backend (default behavior).
|
* `use_doctree` (boolean, optional): If `true`, use the DocTree client as the content backend (default behavior).
|
||||||
|
|
||||||
### 2.2. Adding a Docusaurus Site (`docusaurus.add`)
|
### 2.2. Adding a Docusaurus Site (`docusaurus.add`)
|
||||||
|
|
||||||
@@ -296,21 +296,21 @@ This is where you define the actual content pages and how they are organized int
|
|||||||
* `label` (string, required): The display name for the category in the sidebar.
|
* `label` (string, required): The display name for the category in the sidebar.
|
||||||
* `position` (int, optional): The order of the category in the sidebar (auto-incremented if omitted).
|
* `position` (int, optional): The order of the category in the sidebar (auto-incremented if omitted).
|
||||||
* **`site.page`**:
|
* **`site.page`**:
|
||||||
* `src` (string, required): **Crucial for Atlas/collection integration.** Format: `collection_name:page_name` for the first page, or just `page_name` to reuse the previous collection.
|
* `src` (string, required): **Crucial for DocTree/collection integration.** Format: `collection_name:page_name` for the first page, or just `page_name` to reuse the previous collection.
|
||||||
* `title` (string, optional): The title of the page. If not provided, HeroLib extracts it from the markdown `# Heading` or uses the page name.
|
* `title` (string, optional): The title of the page. If not provided, HeroLib extracts it from the markdown `# Heading` or uses the page name.
|
||||||
* `description` (string, optional): A short description for the page, used in frontmatter.
|
* `description` (string, optional): A short description for the page, used in frontmatter.
|
||||||
* `hide_title` (boolean, optional): If `true`, the title will not be displayed on the page itself.
|
* `hide_title` (boolean, optional): If `true`, the title will not be displayed on the page itself.
|
||||||
* `draft` (boolean, optional): If `true`, the page will be hidden from navigation.
|
* `draft` (boolean, optional): If `true`, the page will be hidden from navigation.
|
||||||
|
|
||||||
### 3.7. Collections and Atlas/Doctree Integration
|
### 3.7. Collections and DocTree/Doctree Integration
|
||||||
|
|
||||||
The `site.page` directive's `src` parameter (`collection_name:page_name`) is the bridge to your content collections.
|
The `site.page` directive's `src` parameter (`collection_name:page_name`) is the bridge to your content collections.
|
||||||
|
|
||||||
**Current default: Atlas export**
|
**Current default: DocTree export**
|
||||||
|
|
||||||
1. **Collections**: Atlas exports markdown files into collections under an `export_dir` (see `lib/data/atlas/client`).
|
1. **Collections**: DocTree exports markdown files into collections under an `export_dir` (see `lib/data/doctree/client`).
|
||||||
2. **Export step**: A separate process (Atlas) writes the collections into `atlas_dir` (e.g., `/tmp/atlas_export`), following the `content/` + `meta/` structure.
|
2. **Export step**: A separate process (DocTree) writes the collections into `doctree_dir` (e.g., `/tmp/doctree_export`), following the `content/` + `meta/` structure.
|
||||||
3. **Docusaurus consumption**: The Docusaurus module uses the Atlas client (`atlas_client`) to resolve `collection_name:page_name` into markdown content and assets when generating docs.
|
3. **Docusaurus consumption**: The Docusaurus module uses the DocTree client (`doctree_client`) to resolve `collection_name:page_name` into markdown content and assets when generating docs.
|
||||||
|
|
||||||
**Alternative: Doctree/`doctreeclient`**
|
**Alternative: Doctree/`doctreeclient`**
|
||||||
|
|
||||||
@@ -324,7 +324,7 @@ In older setups, or when explicitly configured, Doctree and `doctreeclient` can
|
|||||||
```
|
```
|
||||||
|
|
||||||
This will pull the `collections` directory from the specified Git URL and make its contents available to Doctree.
|
This will pull the `collections` directory from the specified Git URL and make its contents available to Doctree.
|
||||||
3. **Page Retrieval**: When `site.page` references `src:"my_collection:my_page"`, the client (`atlas_client` or `doctreeclient`, depending on configuration) fetches the content of `my_page.md` from the `my_collection` collection.
|
3. **Page Retrieval**: When `site.page` references `src:"my_collection:my_page"`, the client (`doctree_client` or `doctreeclient`, depending on configuration) fetches the content of `my_page.md` from the `my_collection` collection.
|
||||||
|
|
||||||
## 4. Building and Developing Your Ebook
|
## 4. Building and Developing Your Ebook
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
path_build := p.get_default('path_build', '')!
|
path_build := p.get_default('path_build', '')!
|
||||||
path_publish := p.get_default('path_publish', '')!
|
path_publish := p.get_default('path_publish', '')!
|
||||||
reset := p.get_default_false('reset')
|
reset := p.get_default_false('reset')
|
||||||
use_atlas := p.get_default_false('use_atlas')
|
use_doctree := p.get_default_false('use_doctree')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
path_build := p.get_default('path_build', '')!
|
path_build := p.get_default('path_build', '')!
|
||||||
path_publish := p.get_default('path_publish', '')!
|
path_publish := p.get_default('path_publish', '')!
|
||||||
reset := p.get_default_false('reset')
|
reset := p.get_default_false('reset')
|
||||||
use_atlas := p.get_default_false('use_atlas')
|
use_doctree := p.get_default_false('use_doctree')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ fn do() ! {
|
|||||||
herocmds.cmd_docusaurus(mut cmd)
|
herocmds.cmd_docusaurus(mut cmd)
|
||||||
herocmds.cmd_web(mut cmd)
|
herocmds.cmd_web(mut cmd)
|
||||||
herocmds.cmd_sshagent(mut cmd)
|
herocmds.cmd_sshagent(mut cmd)
|
||||||
herocmds.cmd_atlas(mut cmd)
|
herocmds.cmd_doctree(mut cmd)
|
||||||
|
|
||||||
cmd.setup()
|
cmd.setup()
|
||||||
cmd.parse(os.args)
|
cmd.parse(os.args)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
#!/usr/bin/env hero
|
#!/usr/bin/env hero
|
||||||
|
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/mycelium_economics'
|
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/mycelium_economics'
|
||||||
|
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/authentic_web'
|
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/authentic_web'
|
||||||
|
|
||||||
// !!atlas.scan
|
// !!doctree.scan
|
||||||
// git_url: 'https://git.ourworld.tf/geomind/docs_geomind/src/branch/main/collections/usecases'
|
// git_url: 'https://git.ourworld.tf/geomind/docs_geomind/src/branch/main/collections/usecases'
|
||||||
|
|
||||||
!!atlas.export destination: '/tmp/atlas_export'
|
!!doctree.export destination: '/tmp/doctree_export'
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
#!/usr/bin/env hero
|
#!/usr/bin/env hero
|
||||||
|
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
git_url: 'https://git.ourworld.tf/geomind/atlas_geomind/src/branch/main/content'
|
git_url: 'https://git.ourworld.tf/geomind/doctree_geomind/src/branch/main/content'
|
||||||
meta_path: '/tmp/atlas_export_meta'
|
meta_path: '/tmp/doctree_export_meta'
|
||||||
|
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
git_url: 'https://git.ourworld.tf/tfgrid/atlas_threefold/src/branch/main/content'
|
git_url: 'https://git.ourworld.tf/tfgrid/doctree_threefold/src/branch/main/content'
|
||||||
meta_path: '/tmp/atlas_export_meta'
|
meta_path: '/tmp/doctree_export_meta'
|
||||||
ignore3: 'static,templates,groups'
|
ignore3: 'static,templates,groups'
|
||||||
|
|
||||||
!!atlas.export
|
!!doctree.export
|
||||||
destination: '/tmp/atlas_export_test'
|
destination: '/tmp/doctree_export_test'
|
||||||
include: true
|
include: true
|
||||||
redis: true
|
redis: true
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env hero
|
#!/usr/bin/env hero
|
||||||
|
|
||||||
!!atlas.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
!!doctree.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||||
|
|
||||||
!!atlas.export destination: '/tmp/atlas_export'
|
!!doctree.export destination: '/tmp/doctree_export'
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
#!/usr/bin/env -S vrun
|
#!/usr/bin/env -S vrun
|
||||||
|
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import os
|
import os
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
println('=== ATLAS DEBUG SCRIPT ===\n')
|
println('=== ATLAS DEBUG SCRIPT ===\n')
|
||||||
|
|
||||||
// Create and scan atlas
|
// Create and scan doctree
|
||||||
mut a := atlas.new(name: 'main')!
|
mut a := doctree.new(name: 'main')!
|
||||||
|
|
||||||
// Scan the collections
|
// Scan the collections
|
||||||
println('Scanning collections...\n')
|
println('Scanning collections...\n')
|
||||||
@@ -29,7 +29,7 @@ fn main() {
|
|||||||
path: '/Users/despiegk/code/git.ourworld.tf/tfgrid/docs_tfgrid4/collections/mycelium_cloud_tech'
|
path: '/Users/despiegk/code/git.ourworld.tf/tfgrid/docs_tfgrid4/collections/mycelium_cloud_tech'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Initialize atlas (post-scanning validation)
|
// Initialize doctree (post-scanning validation)
|
||||||
a.init_post()!
|
a.init_post()!
|
||||||
|
|
||||||
// Print all pages per collection
|
// Print all pages per collection
|
||||||
@@ -190,7 +190,7 @@ fn main() {
|
|||||||
println('\n\n=== EXPORT AND FILE VERIFICATION TEST ===\n')
|
println('\n\n=== EXPORT AND FILE VERIFICATION TEST ===\n')
|
||||||
|
|
||||||
// Create export directory
|
// Create export directory
|
||||||
export_path := '/tmp/atlas_debug_export'
|
export_path := '/tmp/doctree_debug_export'
|
||||||
if os.exists(export_path) {
|
if os.exists(export_path) {
|
||||||
os.rmdir_all(export_path)!
|
os.rmdir_all(export_path)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.web.atlas_client
|
import incubaid.herolib.web.doctree_client
|
||||||
import os
|
import os
|
||||||
|
|
||||||
// Example: Atlas Export and AtlasClient Usage
|
// Example: DocTree Export and AtlasClient Usage
|
||||||
|
|
||||||
println('Atlas Export & Client Example')
|
println('DocTree Export & Client Example')
|
||||||
println('============================================================')
|
println('============================================================')
|
||||||
|
|
||||||
// Setup test directory
|
// Setup test directory
|
||||||
test_dir := '/tmp/atlas_example'
|
test_dir := '/tmp/doctree_example'
|
||||||
export_dir := '/tmp/atlas_export'
|
export_dir := '/tmp/doctree_export'
|
||||||
os.rmdir_all(test_dir) or {}
|
os.rmdir_all(test_dir) or {}
|
||||||
os.rmdir_all(export_dir) or {}
|
os.rmdir_all(export_dir) or {}
|
||||||
os.mkdir_all(test_dir)!
|
os.mkdir_all(test_dir)!
|
||||||
@@ -30,9 +30,9 @@ page1.write('# Introduction\n\nWelcome to the docs!')!
|
|||||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||||
|
|
||||||
// Create and scan atlas
|
// Create and scan doctree
|
||||||
println('\n1. Creating Atlas and scanning...')
|
println('\n1. Creating DocTree and scanning...')
|
||||||
mut a := atlas.new(name: 'my_docs')!
|
mut a := doctree.new(name: 'my_docs')!
|
||||||
a.scan(path: test_dir)!
|
a.scan(path: test_dir)!
|
||||||
|
|
||||||
println(' Found ${a.collections.len} collection(s)')
|
println(' Found ${a.collections.len} collection(s)')
|
||||||
@@ -60,7 +60,7 @@ println(' ✓ Export complete')
|
|||||||
|
|
||||||
// Use AtlasClient to access exported content
|
// Use AtlasClient to access exported content
|
||||||
println('\n4. Using AtlasClient to read exported content...')
|
println('\n4. Using AtlasClient to read exported content...')
|
||||||
mut client := atlas_client.new(export_dir: export_dir)!
|
mut client := doctree_client.new(export_dir: export_dir)!
|
||||||
|
|
||||||
// List collections
|
// List collections
|
||||||
collections := client.list_collections()!
|
collections := client.list_collections()!
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
module herocmds
|
module herocmds
|
||||||
|
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.core.playcmds
|
import incubaid.herolib.core.playcmds
|
||||||
import incubaid.herolib.develop.gittools
|
import incubaid.herolib.develop.gittools
|
||||||
import incubaid.herolib.web.docusaurus
|
import incubaid.herolib.web.docusaurus
|
||||||
import os
|
import os
|
||||||
import cli { Command, Flag }
|
import cli { Command, Flag }
|
||||||
|
|
||||||
pub fn cmd_atlas(mut cmdroot Command) Command {
|
pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||||
mut cmd_run := Command{
|
mut cmd_run := Command{
|
||||||
name: 'atlas'
|
name: 'doctree'
|
||||||
description: 'Scan and export atlas collections.'
|
description: 'Scan and export doctree collections.'
|
||||||
required_args: 0
|
required_args: 0
|
||||||
execute: cmd_atlas_execute
|
execute: cmd_doctree_execute
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd_run.add_flag(Flag{
|
cmd_run.add_flag(Flag{
|
||||||
@@ -29,7 +29,7 @@ pub fn cmd_atlas(mut cmdroot Command) Command {
|
|||||||
required: false
|
required: false
|
||||||
name: 'url'
|
name: 'url'
|
||||||
abbrev: 'u'
|
abbrev: 'u'
|
||||||
description: 'Git URL where atlas source is.'
|
description: 'Git URL where doctree source is.'
|
||||||
})
|
})
|
||||||
|
|
||||||
cmd_run.add_flag(Flag{
|
cmd_run.add_flag(Flag{
|
||||||
@@ -37,7 +37,7 @@ pub fn cmd_atlas(mut cmdroot Command) Command {
|
|||||||
required: false
|
required: false
|
||||||
name: 'path'
|
name: 'path'
|
||||||
abbrev: 'p'
|
abbrev: 'p'
|
||||||
description: 'Path where atlas collections are located.'
|
description: 'Path where doctree collections are located.'
|
||||||
})
|
})
|
||||||
|
|
||||||
cmd_run.add_flag(Flag{
|
cmd_run.add_flag(Flag{
|
||||||
@@ -45,7 +45,7 @@ pub fn cmd_atlas(mut cmdroot Command) Command {
|
|||||||
required: false
|
required: false
|
||||||
name: 'name'
|
name: 'name'
|
||||||
abbrev: 'n'
|
abbrev: 'n'
|
||||||
description: 'Atlas instance name (default: "default").'
|
description: 'DocTree instance name (default: "default").'
|
||||||
})
|
})
|
||||||
|
|
||||||
cmd_run.add_flag(Flag{
|
cmd_run.add_flag(Flag{
|
||||||
@@ -112,7 +112,7 @@ pub fn cmd_atlas(mut cmdroot Command) Command {
|
|||||||
return cmdroot
|
return cmdroot
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cmd_atlas_execute(cmd Command) ! {
|
fn cmd_doctree_execute(cmd Command) ! {
|
||||||
// ---------- FLAGS ----------
|
// ---------- FLAGS ----------
|
||||||
mut reset := cmd.flags.get_bool('reset') or { false }
|
mut reset := cmd.flags.get_bool('reset') or { false }
|
||||||
mut update := cmd.flags.get_bool('update') or { false }
|
mut update := cmd.flags.get_bool('update') or { false }
|
||||||
@@ -138,27 +138,27 @@ fn cmd_atlas_execute(cmd Command) ! {
|
|||||||
path = os.getwd()
|
path = os.getwd()
|
||||||
}
|
}
|
||||||
|
|
||||||
atlas_path := gittools.path(
|
doctree_path := gittools.path(
|
||||||
git_url: url
|
git_url: url
|
||||||
path: path
|
path: path
|
||||||
git_reset: reset
|
git_reset: reset
|
||||||
git_pull: update
|
git_pull: update
|
||||||
)!
|
)!
|
||||||
|
|
||||||
console.print_header('Running Atlas for: ${atlas_path.path}')
|
console.print_header('Running DocTree for: ${doctree_path.path}')
|
||||||
|
|
||||||
// Run HeroScript if exists
|
// Run HeroScript if exists
|
||||||
playcmds.run(
|
playcmds.run(
|
||||||
heroscript_path: atlas_path.path
|
heroscript_path: doctree_path.path
|
||||||
reset: reset
|
reset: reset
|
||||||
emptycheck: false
|
emptycheck: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Create or get atlas instance
|
// Create or get doctree instance
|
||||||
mut a := if atlas.exists(name) {
|
mut a := if doctree.exists(name) {
|
||||||
atlas.get(name)!
|
doctree.get(name)!
|
||||||
} else {
|
} else {
|
||||||
atlas.new(name: name)!
|
doctree.new(name: name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Default behavior: scan and export if no flags specified
|
// Default behavior: scan and export if no flags specified
|
||||||
@@ -170,13 +170,13 @@ fn cmd_atlas_execute(cmd Command) ! {
|
|||||||
// Execute operations
|
// Execute operations
|
||||||
if scan {
|
if scan {
|
||||||
console.print_header('Scanning collections...')
|
console.print_header('Scanning collections...')
|
||||||
a.scan(path: atlas_path.path)!
|
a.scan(path: doctree_path.path)!
|
||||||
console.print_green('✓ Scan complete: ${a.collections.len} collection(s) found')
|
console.print_green('✓ Scan complete: ${a.collections.len} collection(s) found')
|
||||||
}
|
}
|
||||||
|
|
||||||
if export {
|
if export {
|
||||||
if destination == '' {
|
if destination == '' {
|
||||||
destination = '${atlas_path.path}/output'
|
destination = '${doctree_path.path}/output'
|
||||||
}
|
}
|
||||||
|
|
||||||
console.print_header('Exporting collections to: ${destination}')
|
console.print_header('Exporting collections to: ${destination}')
|
||||||
@@ -203,14 +203,14 @@ fn cmd_atlas_execute(cmd Command) ! {
|
|||||||
// Run dev server if -dev flag is set
|
// Run dev server if -dev flag is set
|
||||||
if dev {
|
if dev {
|
||||||
console.print_header('Starting development server...')
|
console.print_header('Starting development server...')
|
||||||
console.print_item('Atlas export directory: ${destination}')
|
console.print_item('DocTree export directory: ${destination}')
|
||||||
console.print_item('Looking for docusaurus configuration in: ${atlas_path.path}')
|
console.print_item('Looking for docusaurus configuration in: ${doctree_path.path}')
|
||||||
|
|
||||||
// Run the docusaurus dev server using the exported atlas content
|
// Run the docusaurus dev server using the exported doctree content
|
||||||
// This will look for a .heroscript file in the atlas_path that configures docusaurus
|
// This will look for a .heroscript file in the doctree_path that configures docusaurus
|
||||||
// with use_atlas:true and atlas_export_dir pointing to the destination
|
// with use_doctree:true and doctree_export_dir pointing to the destination
|
||||||
playcmds.run(
|
playcmds.run(
|
||||||
heroscript_path: atlas_path.path
|
heroscript_path: doctree_path.path
|
||||||
reset: reset
|
reset: reset
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
module playcmds
|
module playcmds
|
||||||
|
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.biz.bizmodel
|
import incubaid.herolib.biz.bizmodel
|
||||||
import incubaid.herolib.threefold.incatokens
|
import incubaid.herolib.threefold.incatokens
|
||||||
import incubaid.herolib.web.site
|
import incubaid.herolib.web.site
|
||||||
@@ -71,7 +71,7 @@ pub fn run(args_ PlayArgs) ! {
|
|||||||
site.play(mut plbook)!
|
site.play(mut plbook)!
|
||||||
|
|
||||||
incatokens.play(mut plbook)!
|
incatokens.play(mut plbook)!
|
||||||
atlas.play(mut plbook)!
|
doctree.play(mut plbook)!
|
||||||
docusaurus.play(mut plbook)!
|
docusaurus.play(mut plbook)!
|
||||||
hetznermanager.play(mut plbook)!
|
hetznermanager.play(mut plbook)!
|
||||||
hetznermanager.play2(mut plbook)!
|
hetznermanager.play2(mut plbook)!
|
||||||
|
|||||||
@@ -1,61 +0,0 @@
|
|||||||
module atlas
|
|
||||||
|
|
||||||
import incubaid.herolib.core.texttools
|
|
||||||
import incubaid.herolib.core.pathlib
|
|
||||||
import incubaid.herolib.ui.console
|
|
||||||
import incubaid.herolib.data.paramsparser
|
|
||||||
|
|
||||||
__global (
|
|
||||||
atlases shared map[string]&Atlas
|
|
||||||
)
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct AtlasNewArgs {
|
|
||||||
pub mut:
|
|
||||||
name string = 'default'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new Atlas
|
|
||||||
pub fn new(args AtlasNewArgs) !&Atlas {
|
|
||||||
mut name := texttools.name_fix(args.name)
|
|
||||||
|
|
||||||
mut a := &Atlas{
|
|
||||||
name: name
|
|
||||||
}
|
|
||||||
|
|
||||||
set(a)
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Atlas from global map
|
|
||||||
pub fn get(name string) !&Atlas {
|
|
||||||
mut fixed_name := texttools.name_fix(name)
|
|
||||||
rlock atlases {
|
|
||||||
if fixed_name in atlases {
|
|
||||||
return atlases[fixed_name] or { return error('Atlas ${name} not found') }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return error("Atlas '${name}' not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if Atlas exists
|
|
||||||
pub fn exists(name string) bool {
|
|
||||||
mut fixed_name := texttools.name_fix(name)
|
|
||||||
rlock atlases {
|
|
||||||
return fixed_name in atlases
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// List all Atlas names
|
|
||||||
pub fn list() []string {
|
|
||||||
rlock atlases {
|
|
||||||
return atlases.keys()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store Atlas in global map
|
|
||||||
fn set(atlas &Atlas) {
|
|
||||||
lock atlases {
|
|
||||||
atlases[atlas.name] = atlas
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
module atlas
|
|
||||||
|
|
||||||
// Get a page from any collection using format "collection:page"
|
|
||||||
pub fn (a Atlas) page_get(key string) !&Page {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error('Invalid page key format. Use "collection:page" in page_get')
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0])!
|
|
||||||
return col.page_get(parts[1])!
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get an image from any collection using format "collection:image"
|
|
||||||
pub fn (a Atlas) image_get(key string) !&File {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error('Invalid image key format. Use "collection:image" in image_get')
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0])!
|
|
||||||
return col.image_get(parts[1])!
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get a file from any collection using format "collection:file"
|
|
||||||
pub fn (a Atlas) file_get(key string) !&File {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error('Invalid file key format. Use "collection:file" in file_get')
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0])!
|
|
||||||
return col.file_get(parts[1])!
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get a file (can be image) from any collection using format "collection:file"
|
|
||||||
pub fn (a Atlas) file_or_image_get(key string) !&File {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error('Invalid file key format. Use "collection:file"')
|
|
||||||
}
|
|
||||||
col := a.get_collection(parts[0])!
|
|
||||||
return col.file_or_image_get(parts[1])!
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if page exists
|
|
||||||
pub fn (a Atlas) page_exists(key string) !bool {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error("Invalid file key format. Use 'collection:file' in page_exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0]) or { return false }
|
|
||||||
return col.page_exists(parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if image exists
|
|
||||||
pub fn (a Atlas) image_exists(key string) !bool {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error("Invalid file key format. Use 'collection:file' in image_exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0]) or { return false }
|
|
||||||
return col.image_exists(parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if file exists
|
|
||||||
pub fn (a Atlas) file_exists(key string) !bool {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error("Invalid file key format. Use 'collection:file' in file_exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
col := a.get_collection(parts[0]) or { return false }
|
|
||||||
return col.file_exists(parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (a Atlas) file_or_image_exists(key string) !bool {
|
|
||||||
parts := key.split(':')
|
|
||||||
if parts.len != 2 {
|
|
||||||
return error("Invalid file key format. Use 'collection:file' in file_or_image_exists")
|
|
||||||
}
|
|
||||||
col := a.get_collection(parts[0]) or { return false }
|
|
||||||
return col.file_or_image_exists(parts[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
// List all pages in Atlas
|
|
||||||
pub fn (a Atlas) list_pages() map[string][]string {
|
|
||||||
mut result := map[string][]string{}
|
|
||||||
|
|
||||||
for col_name, col in a.collections {
|
|
||||||
mut page_names := []string{}
|
|
||||||
for page_name, _ in col.pages {
|
|
||||||
page_names << page_name
|
|
||||||
}
|
|
||||||
page_names.sort()
|
|
||||||
result[col_name] = page_names
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
@@ -466,7 +466,7 @@ pub fn generate_random_workspace_name() string {
|
|||||||
'script',
|
'script',
|
||||||
'ocean',
|
'ocean',
|
||||||
'phoenix',
|
'phoenix',
|
||||||
'atlas',
|
'doctree',
|
||||||
'quest',
|
'quest',
|
||||||
'shield',
|
'shield',
|
||||||
'dragon',
|
'dragon',
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# AtlasClient
|
# AtlasClient
|
||||||
|
|
||||||
A simple API for accessing document collections exported by the `atlas` module.
|
A simple API for accessing document collections exported by the `doctree` module.
|
||||||
|
|
||||||
## What It Does
|
## What It Does
|
||||||
|
|
||||||
@@ -15,10 +15,10 @@ AtlasClient provides methods to:
|
|||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import incubaid.herolib.web.atlas_client
|
import incubaid.herolib.web.doctree_client
|
||||||
|
|
||||||
// Create client, exports will be in $/hero/var/atlas_export by default
|
// Create client, exports will be in $/hero/var/doctree_export by default
|
||||||
mut client := atlas_client.new()!
|
mut client := doctree_client.new()!
|
||||||
|
|
||||||
// List collections
|
// List collections
|
||||||
collections := client.list_collections()!
|
collections := client.list_collections()!
|
||||||
@@ -34,7 +34,7 @@ if client.has_errors('my_collection')! {
|
|||||||
|
|
||||||
## Export Structure
|
## Export Structure
|
||||||
|
|
||||||
Atlas exports to this structure:
|
DocTree exports to this structure:
|
||||||
|
|
||||||
```txt
|
```txt
|
||||||
export_dir/
|
export_dir/
|
||||||
@@ -87,9 +87,9 @@ Names are normalized using `name_fix()`:
|
|||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
See `examples/data/atlas_client/basic_usage.vsh` for a complete working example.
|
See `examples/data/doctree_client/basic_usage.vsh` for a complete working example.
|
||||||
|
|
||||||
## See Also
|
## See Also
|
||||||
|
|
||||||
- `lib/data/atlas/` - Atlas module for exporting collections
|
- `lib/data/doctree/` - DocTree module for exporting collections
|
||||||
- `lib/web/doctreeclient/` - Alternative client for doctree collections
|
- `lib/web/doctreeclient/` - Alternative client for doctree collections
|
||||||
@@ -7,12 +7,12 @@ import os
|
|||||||
import json
|
import json
|
||||||
import incubaid.herolib.core.redisclient
|
import incubaid.herolib.core.redisclient
|
||||||
|
|
||||||
// AtlasClient provides access to Atlas-exported documentation collections
|
// AtlasClient provides access to DocTree-exported documentation collections
|
||||||
// It reads from both the exported directory structure and Redis metadata
|
// It reads from both the exported directory structure and Redis metadata
|
||||||
pub struct AtlasClient {
|
pub struct AtlasClient {
|
||||||
pub mut:
|
pub mut:
|
||||||
redis &redisclient.Redis
|
redis &redisclient.Redis
|
||||||
export_dir string // Path to the atlas export directory (contains content/ and meta/)
|
export_dir string // Path to the doctree export directory (contains content/ and meta/)
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_page_path returns the path for a page in a collection
|
// get_page_path returns the path for a page in a collection
|
||||||
@@ -41,8 +41,8 @@ pub fn (mut c AtlasClient) get_page_path(collection_name string, page_name strin
|
|||||||
// get_file_path returns the path for a file in a collection
|
// get_file_path returns the path for a file in a collection
|
||||||
// Files are stored in {export_dir}/content/{collection}/{filename}
|
// Files are stored in {export_dir}/content/{collection}/{filename}
|
||||||
pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ string) !string {
|
pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ string) !string {
|
||||||
collection_name := texttools.name_fix_no_ext(collection_name_)
|
collection_name := texttools.name_fix(collection_name_)
|
||||||
file_name := texttools.name_fix_keepext(file_name_)
|
file_name := texttools.name_fix(file_name_)
|
||||||
|
|
||||||
// Check if export directory exists
|
// Check if export directory exists
|
||||||
if !os.exists(c.export_dir) {
|
if !os.exists(c.export_dir) {
|
||||||
@@ -64,9 +64,9 @@ pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ str
|
|||||||
// Images are stored in {export_dir}/content/{collection}/{imagename}
|
// Images are stored in {export_dir}/content/{collection}/{imagename}
|
||||||
pub fn (mut c AtlasClient) get_image_path(collection_name_ string, image_name_ string) !string {
|
pub fn (mut c AtlasClient) get_image_path(collection_name_ string, image_name_ string) !string {
|
||||||
// Apply name normalization
|
// Apply name normalization
|
||||||
collection_name := texttools.name_fix_no_ext(collection_name_)
|
collection_name := texttools.name_fix(collection_name_)
|
||||||
// Images keep their original names with extensions
|
// Images keep their original names with extensions
|
||||||
image_name := texttools.name_fix_keepext(image_name_)
|
image_name := texttools.name_fix(image_name_)
|
||||||
|
|
||||||
// Check if export directory exists
|
// Check if export directory exists
|
||||||
if !os.exists(c.export_dir) {
|
if !os.exists(c.export_dir) {
|
||||||
@@ -199,38 +199,11 @@ pub fn (mut c AtlasClient) list_pages_map() !map[string][]string {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
// list_markdown returns the collections and their pages in markdown format.
|
|
||||||
pub fn (mut c AtlasClient) list_markdown() !string {
|
|
||||||
mut markdown_output := ''
|
|
||||||
pages_map := c.list_pages_map()!
|
|
||||||
|
|
||||||
if pages_map.len == 0 {
|
|
||||||
return 'No collections or pages found in this atlas export.'
|
|
||||||
}
|
|
||||||
|
|
||||||
mut sorted_collections := pages_map.keys()
|
|
||||||
sorted_collections.sort()
|
|
||||||
|
|
||||||
for col_name in sorted_collections {
|
|
||||||
page_names := pages_map[col_name]
|
|
||||||
markdown_output += '## ${col_name}\n'
|
|
||||||
if page_names.len == 0 {
|
|
||||||
markdown_output += ' * No pages in this collection.\n'
|
|
||||||
} else {
|
|
||||||
for page_name in page_names {
|
|
||||||
markdown_output += ' * ${page_name}\n'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
markdown_output += '\n' // Add a newline for spacing between collections
|
|
||||||
}
|
|
||||||
return markdown_output
|
|
||||||
}
|
|
||||||
|
|
||||||
// get_collection_metadata reads and parses the metadata JSON file for a collection
|
// get_collection_metadata reads and parses the metadata JSON file for a collection
|
||||||
// Metadata is stored in {export_dir}/meta/{collection}.json
|
// Metadata is stored in {export_dir}/meta/{collection}.json
|
||||||
pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !CollectionMetadata {
|
pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !CollectionMetadata {
|
||||||
// Apply name normalization
|
// Apply name normalization
|
||||||
fixed_collection_name := texttools.name_fix_no_ext(collection_name)
|
fixed_collection_name := texttools.name_fix(collection_name)
|
||||||
|
|
||||||
meta_path := os.join_path(c.export_dir, 'meta', '${fixed_collection_name}.json')
|
meta_path := os.join_path(c.export_dir, 'meta', '${fixed_collection_name}.json')
|
||||||
|
|
||||||
@@ -259,7 +232,14 @@ pub fn (mut c AtlasClient) has_errors(collection_name string) bool {
|
|||||||
return errors.len > 0
|
return errors.len > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn (mut c AtlasClient) copy_collection(collection_name string, destination_path string) ! {
|
||||||
|
// TODO: list over all pages, links & files and copy them to destination
|
||||||
|
}
|
||||||
|
|
||||||
|
// will copy all pages linked from a page to a destination directory as well as the page itself
|
||||||
pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
|
pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
|
||||||
|
// TODO: copy page itself
|
||||||
|
|
||||||
// Get page links from metadata
|
// Get page links from metadata
|
||||||
links := c.get_page_links(collection_name, page_name)!
|
links := c.get_page_links(collection_name, page_name)!
|
||||||
|
|
||||||
@@ -277,12 +257,11 @@ pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string,
|
|||||||
// Get image path and copy
|
// Get image path and copy
|
||||||
img_path := c.get_page_path(link.target_collection_name, link.target_item_name)!
|
img_path := c.get_page_path(link.target_collection_name, link.target_item_name)!
|
||||||
mut src := pathlib.get_file(path: img_path)!
|
mut src := pathlib.get_file(path: img_path)!
|
||||||
src.copy(dest: '${img_dest.path}/${src.name_fix_keepext()}')!
|
src.copy(dest: '${img_dest.path}/${src.name_fix_no_ext()}')!
|
||||||
console.print_debug(' ********. Copied page: ${src.path} to ${img_dest.path}/${src.name_fix_keepext()}')
|
console.print_debug(' ********. Copied page: ${src.path} to ${img_dest.path}/${src.name_fix_no_ext()}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string, destination_path string) ! {
|
pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string, destination_path string) ! {
|
||||||
// Get page links from metadata
|
// Get page links from metadata
|
||||||
links := c.get_page_links(collection_name, page_name)!
|
links := c.get_page_links(collection_name, page_name)!
|
||||||
@@ -301,8 +280,8 @@ pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string,
|
|||||||
// Get image path and copy
|
// Get image path and copy
|
||||||
img_path := c.get_image_path(link.target_collection_name, link.target_item_name)!
|
img_path := c.get_image_path(link.target_collection_name, link.target_item_name)!
|
||||||
mut src := pathlib.get_file(path: img_path)!
|
mut src := pathlib.get_file(path: img_path)!
|
||||||
src.copy(dest: '${img_dest.path}/${src.name_fix_keepext()}')!
|
src.copy(dest: '${img_dest.path}/${src.name_fix_no_ext()}')!
|
||||||
// console.print_debug('Copied image: ${src.path} to ${img_dest.path}/${src.name_fix_keepext()}')
|
// console.print_debug('Copied image: ${src.path} to ${img_dest.path}/${src.name_fix()}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -328,7 +307,7 @@ pub fn (mut c AtlasClient) copy_files(collection_name string, page_name string,
|
|||||||
// Get file path and copy
|
// Get file path and copy
|
||||||
file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
||||||
mut src := pathlib.get_file(path: file_path)!
|
mut src := pathlib.get_file(path: file_path)!
|
||||||
// src.copy(dest: '${files_dest.path}/${src.name_fix_keepext()}')!
|
// src.copy(dest: '${files_dest.path}/${src.name_fix_no_ext()}')!
|
||||||
console.print_debug('Copied file: ${src.path} to ${files_dest.path}/${src.name_fix_keepext()}')
|
console.print_debug('Copied file: ${src.path} to ${files_dest.path}/${src.name_fix_no_ext()}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -41,7 +41,7 @@ fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page
|
|||||||
|
|
||||||
// Get collection metadata
|
// Get collection metadata
|
||||||
metadata := c.get_collection_metadata(collection_name)!
|
metadata := c.get_collection_metadata(collection_name)!
|
||||||
fixed_page_name := texttools.name_fix_no_ext(page_name)
|
fixed_page_name := texttools.name_fix(page_name)
|
||||||
|
|
||||||
// Find the page in metadata
|
// Find the page in metadata
|
||||||
if fixed_page_name !in metadata.pages {
|
if fixed_page_name !in metadata.pages {
|
||||||
@@ -56,7 +56,7 @@ fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page
|
|||||||
|
|
||||||
// Recursively traverse only page-to-page links
|
// Recursively traverse only page-to-page links
|
||||||
for link in page_meta.links {
|
for link in page_meta.links {
|
||||||
// Only recursively process links to other pages within the atlas
|
// Only recursively process links to other pages within the doctree
|
||||||
// Skip external links (http, https, mailto, etc.)
|
// Skip external links (http, https, mailto, etc.)
|
||||||
// Skip file and image links (these don't have "contained" links)
|
// Skip file and image links (these don't have "contained" links)
|
||||||
if link.file_type != .page || link.status == .external {
|
if link.file_type != .page || link.status == .external {
|
||||||
@@ -5,7 +5,7 @@ import incubaid.herolib.core.texttools
|
|||||||
|
|
||||||
// Helper function to create a test export directory structure
|
// Helper function to create a test export directory structure
|
||||||
fn setup_test_export() string {
|
fn setup_test_export() string {
|
||||||
test_dir := os.join_path(os.temp_dir(), 'atlas_client_test_${os.getpid()}')
|
test_dir := os.join_path(os.temp_dir(), 'doctree_client_test_${os.getpid()}')
|
||||||
|
|
||||||
// Clean up if exists
|
// Clean up if exists
|
||||||
if os.exists(test_dir) {
|
if os.exists(test_dir) {
|
||||||
@@ -5,7 +5,7 @@ import incubaid.herolib.core.base
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct AtlasClientArgs {
|
pub struct AtlasClientArgs {
|
||||||
pub:
|
pub:
|
||||||
export_dir string @[required] // Path to atlas export directory
|
export_dir string @[required] // Path to doctree export directory
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new AtlasClient instance
|
// Create a new AtlasClient instance
|
||||||
28
lib/web/doctree/client/markdown.v
Normal file
28
lib/web/doctree/client/markdown.v
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
module client
|
||||||
|
|
||||||
|
// list_markdown returns the collections and their pages in markdown format.
|
||||||
|
pub fn (mut c AtlasClient) list_markdown() !string {
|
||||||
|
mut markdown_output := ''
|
||||||
|
pages_map := c.list_pages_map()!
|
||||||
|
|
||||||
|
if pages_map.len == 0 {
|
||||||
|
return 'No collections or pages found in this doctree export.'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut sorted_collections := pages_map.keys()
|
||||||
|
sorted_collections.sort()
|
||||||
|
|
||||||
|
for col_name in sorted_collections {
|
||||||
|
page_names := pages_map[col_name]
|
||||||
|
markdown_output += '## ${col_name}\n'
|
||||||
|
if page_names.len == 0 {
|
||||||
|
markdown_output += ' * No pages in this collection.\n'
|
||||||
|
} else {
|
||||||
|
for page_name in page_names {
|
||||||
|
markdown_output += ' * ${page_name}\n'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
markdown_output += '\n' // Add a newline for spacing between collections
|
||||||
|
}
|
||||||
|
return markdown_output
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
module client
|
module client
|
||||||
|
|
||||||
// AtlasClient provides access to Atlas-exported documentation collections
|
// AtlasClient provides access to DocTree-exported documentation collections
|
||||||
// It reads from both the exported directory structure and Redis metadata
|
// It reads from both the exported directory structure and Redis metadata
|
||||||
|
|
||||||
// List of recognized image file extensions
|
// List of recognized image file extensions
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.web.doctree as doctreetools
|
||||||
import incubaid.herolib.develop.gittools
|
import incubaid.herolib.develop.gittools
|
||||||
import incubaid.herolib.data.paramsparser { Params }
|
import incubaid.herolib.data.paramsparser { Params }
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
@@ -21,7 +21,7 @@ pub mut:
|
|||||||
path string // absolute path
|
path string // absolute path
|
||||||
pages map[string]&Page
|
pages map[string]&Page
|
||||||
files map[string]&File
|
files map[string]&File
|
||||||
atlas &Atlas @[skip; str: skip]
|
doctree &DocTree @[skip; str: skip]
|
||||||
errors []CollectionError
|
errors []CollectionError
|
||||||
error_cache map[string]bool
|
error_cache map[string]bool
|
||||||
git_url string
|
git_url string
|
||||||
@@ -70,7 +70,7 @@ fn (mut c Collection) add_page(mut path pathlib.Path) ! {
|
|||||||
|
|
||||||
// Add an image to the collection
|
// Add an image to the collection
|
||||||
fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
||||||
name := p.name_fix_keepext() // keep extension
|
name := p.name_fix_no_ext() // keep extension
|
||||||
if name in c.files {
|
if name in c.files {
|
||||||
return error('File ${name} already exists in collection ${c.name}')
|
return error('File ${name} already exists in collection ${c.name}')
|
||||||
}
|
}
|
||||||
@@ -95,7 +95,7 @@ fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
|||||||
|
|
||||||
// Get a page by name
|
// Get a page by name
|
||||||
pub fn (c Collection) page_get(name_ string) !&Page {
|
pub fn (c Collection) page_get(name_ string) !&Page {
|
||||||
name := texttools.name_fix_no_ext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
return c.pages[name] or { return PageNotFound{
|
return c.pages[name] or { return PageNotFound{
|
||||||
collection: c.name
|
collection: c.name
|
||||||
page: name
|
page: name
|
||||||
@@ -104,7 +104,7 @@ pub fn (c Collection) page_get(name_ string) !&Page {
|
|||||||
|
|
||||||
// Get an image by name
|
// Get an image by name
|
||||||
pub fn (c Collection) image_get(name_ string) !&File {
|
pub fn (c Collection) image_get(name_ string) !&File {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
mut img := c.files[name] or { return FileNotFound{
|
mut img := c.files[name] or { return FileNotFound{
|
||||||
collection: c.name
|
collection: c.name
|
||||||
file: name
|
file: name
|
||||||
@@ -117,7 +117,7 @@ pub fn (c Collection) image_get(name_ string) !&File {
|
|||||||
|
|
||||||
// Get a file by name
|
// Get a file by name
|
||||||
pub fn (c Collection) file_get(name_ string) !&File {
|
pub fn (c Collection) file_get(name_ string) !&File {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
mut f := c.files[name] or { return FileNotFound{
|
mut f := c.files[name] or { return FileNotFound{
|
||||||
collection: c.name
|
collection: c.name
|
||||||
file: name
|
file: name
|
||||||
@@ -129,7 +129,7 @@ pub fn (c Collection) file_get(name_ string) !&File {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn (c Collection) file_or_image_get(name_ string) !&File {
|
pub fn (c Collection) file_or_image_get(name_ string) !&File {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
mut f := c.files[name] or { return FileNotFound{
|
mut f := c.files[name] or { return FileNotFound{
|
||||||
collection: c.name
|
collection: c.name
|
||||||
file: name
|
file: name
|
||||||
@@ -139,26 +139,26 @@ pub fn (c Collection) file_or_image_get(name_ string) !&File {
|
|||||||
|
|
||||||
// Check if page exists
|
// Check if page exists
|
||||||
pub fn (c Collection) page_exists(name_ string) !bool {
|
pub fn (c Collection) page_exists(name_ string) !bool {
|
||||||
name := texttools.name_fix_no_ext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
return name in c.pages
|
return name in c.pages
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if image exists
|
// Check if image exists
|
||||||
pub fn (c Collection) image_exists(name_ string) !bool {
|
pub fn (c Collection) image_exists(name_ string) !bool {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
f := c.files[name] or { return false }
|
f := c.files[name] or { return false }
|
||||||
return f.ftype == .image
|
return f.ftype == .image
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
pub fn (c Collection) file_exists(name_ string) !bool {
|
pub fn (c Collection) file_exists(name_ string) !bool {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
f := c.files[name] or { return false }
|
f := c.files[name] or { return false }
|
||||||
return f.ftype == .file
|
return f.ftype == .file
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (c Collection) file_or_image_exists(name_ string) !bool {
|
pub fn (c Collection) file_or_image_exists(name_ string) !bool {
|
||||||
name := texttools.name_fix_keepext(name_)
|
name := doctreetools.name_fix(name_)
|
||||||
_ := c.files[name] or { return false }
|
_ := c.files[name] or { return false }
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -280,8 +280,8 @@ pub fn (c Collection) can_read(session Session) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user's groups
|
// Get user's groups
|
||||||
mut atlas := c.atlas
|
mut doctree := c.doctree
|
||||||
groups := atlas.groups_get(session)
|
groups := doctree.groups_get(session)
|
||||||
group_names := groups.map(it.name)
|
group_names := groups.map(it.name)
|
||||||
|
|
||||||
// Check if any of user's groups are in read ACL
|
// Check if any of user's groups are in read ACL
|
||||||
@@ -302,8 +302,8 @@ pub fn (c Collection) can_write(session Session) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get user's groups
|
// Get user's groups
|
||||||
mut atlas := c.atlas
|
mut doctree := c.doctree
|
||||||
groups := atlas.groups_get(session)
|
groups := doctree.groups_get(session)
|
||||||
group_names := groups.map(it.name)
|
group_names := groups.map(it.name)
|
||||||
|
|
||||||
// Check if any of user's groups are in write ACL
|
// Check if any of user's groups are in write ACL
|
||||||
@@ -412,7 +412,7 @@ pub fn (mut c Collection) scan_groups() ! {
|
|||||||
mut visited := map[string]bool{}
|
mut visited := map[string]bool{}
|
||||||
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
|
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
|
||||||
|
|
||||||
c.atlas.group_add(mut group)!
|
c.doctree.group_add(mut group)!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import crypto.md5
|
import crypto.md5
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.web.doctree
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import incubaid.herolib.data.paramsparser
|
import incubaid.herolib.data.paramsparser
|
||||||
|
|
||||||
@[heap]
|
@[heap]
|
||||||
pub struct Atlas {
|
pub struct DocTree {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
collections map[string]&Collection
|
collections map[string]&Collection
|
||||||
@@ -14,7 +14,7 @@ pub mut:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create a new collection
|
// Create a new collection
|
||||||
fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
|
fn (mut self DocTree) add_collection(mut path pathlib.Path) !Collection {
|
||||||
mut name := path.name_fix_no_ext()
|
mut name := path.name_fix_no_ext()
|
||||||
mut filepath := path.file_get('.collection')!
|
mut filepath := path.file_get('.collection')!
|
||||||
content := filepath.read()!
|
content := filepath.read()!
|
||||||
@@ -24,18 +24,17 @@ fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
|
|||||||
name = params.get('name')!
|
name = params.get('name')!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
name = doctree.name_fix(name)
|
||||||
name = texttools.name_fix(name)
|
console.print_item("Adding collection '${name}' to DocTree '${self.name}' at path '${path.path}'")
|
||||||
console.print_item("Adding collection '${name}' to Atlas '${self.name}' at path '${path.path}'")
|
|
||||||
|
|
||||||
if name in self.collections {
|
if name in self.collections {
|
||||||
return error('Collection ${name} already exists in Atlas ${self.name}')
|
return error('Collection ${name} already exists in DocTree ${self.name}')
|
||||||
}
|
}
|
||||||
|
|
||||||
mut c := Collection{
|
mut c := Collection{
|
||||||
name: name
|
name: name
|
||||||
path: path.path // absolute path
|
path: path.path // absolute path
|
||||||
atlas: &self // Set atlas reference
|
doctree: &self // Set doctree reference
|
||||||
error_cache: map[string]bool{}
|
error_cache: map[string]bool{}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,38 +46,38 @@ fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get a collection by name
|
// Get a collection by name
|
||||||
pub fn (a Atlas) get_collection(name string) !&Collection {
|
pub fn (a DocTree) get_collection(name string) !&Collection {
|
||||||
return a.collections[name] or {
|
return a.collections[name] or {
|
||||||
return CollectionNotFound{
|
return CollectionNotFound{
|
||||||
name: name
|
name: name
|
||||||
msg: 'Collection not found in Atlas ${a.name}'
|
msg: 'Collection not found in DocTree ${a.name}'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate all links in all collections
|
// Validate all links in all collections
|
||||||
pub fn (mut a Atlas) init_post() ! {
|
pub fn (mut a DocTree) init_post() ! {
|
||||||
for _, mut col in a.collections {
|
for _, mut col in a.collections {
|
||||||
col.init_post()!
|
col.init_post()!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate all links in all collections
|
// Validate all links in all collections
|
||||||
pub fn (mut a Atlas) validate_links() ! {
|
pub fn (mut a DocTree) validate_links() ! {
|
||||||
for _, mut col in a.collections {
|
for _, mut col in a.collections {
|
||||||
col.validate_links()!
|
col.validate_links()!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fix all links in all collections (rewrite source files)
|
// Fix all links in all collections (rewrite source files)
|
||||||
pub fn (mut a Atlas) fix_links() ! {
|
pub fn (mut a DocTree) fix_links() ! {
|
||||||
for _, mut col in a.collections {
|
for _, mut col in a.collections {
|
||||||
col.fix_links()!
|
col.fix_links()!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a group to the atlas
|
// Add a group to the doctree
|
||||||
pub fn (mut a Atlas) group_add(mut group Group) ! {
|
pub fn (mut a DocTree) group_add(mut group Group) ! {
|
||||||
if group.name in a.groups {
|
if group.name in a.groups {
|
||||||
return error('Group ${group.name} already exists')
|
return error('Group ${group.name} already exists')
|
||||||
}
|
}
|
||||||
@@ -86,13 +85,13 @@ pub fn (mut a Atlas) group_add(mut group Group) ! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get a group by name
|
// Get a group by name
|
||||||
pub fn (a Atlas) group_get(name string) !&Group {
|
pub fn (a DocTree) group_get(name string) !&Group {
|
||||||
name_lower := texttools.name_fix(name)
|
name_lower := doctree.name_fix(name)
|
||||||
return a.groups[name_lower] or { return error('Group ${name} not found') }
|
return a.groups[name_lower] or { return error('Group ${name} not found') }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all groups matching a session's email
|
// Get all groups matching a session's email
|
||||||
pub fn (a Atlas) groups_get(session Session) []&Group {
|
pub fn (a DocTree) groups_get(session Session) []&Group {
|
||||||
mut matching := []&Group{}
|
mut matching := []&Group{}
|
||||||
|
|
||||||
email_lower := session.email.to_lower()
|
email_lower := session.email.to_lower()
|
||||||
@@ -117,7 +116,7 @@ pub mut:
|
|||||||
ignore []string // list of directory names to ignore
|
ignore []string // list of directory names to ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
pub fn (mut a DocTree) scan(args ScanArgs) ! {
|
||||||
mut path := pathlib.get_dir(path: args.path)!
|
mut path := pathlib.get_dir(path: args.path)!
|
||||||
mut ignore := args.ignore.clone()
|
mut ignore := args.ignore.clone()
|
||||||
ignore = ignore.map(it.to_lower())
|
ignore = ignore.map(it.to_lower())
|
||||||
@@ -125,7 +124,7 @@ pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Scan a directory for collections
|
// Scan a directory for collections
|
||||||
fn (mut a Atlas) scan_(mut dir pathlib.Path, ignore_ []string) ! {
|
fn (mut a DocTree) scan_(mut dir pathlib.Path, ignore_ []string) ! {
|
||||||
console.print_item('Scanning directory: ${dir.path}')
|
console.print_item('Scanning directory: ${dir.path}')
|
||||||
if !dir.is_dir() {
|
if !dir.is_dir() {
|
||||||
return error('Path is not a directory: ${dir.path}')
|
return error('Path is not a directory: ${dir.path}')
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
pub struct CollectionNotFound {
|
pub struct CollectionNotFound {
|
||||||
Error
|
Error
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.core.base
|
import incubaid.herolib.core.base
|
||||||
@@ -14,7 +14,7 @@ pub mut:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Export all collections
|
// Export all collections
|
||||||
pub fn (mut a Atlas) export(args ExportArgs) ! {
|
pub fn (mut a DocTree) export(args ExportArgs) ! {
|
||||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||||
|
|
||||||
if args.reset {
|
if args.reset {
|
||||||
@@ -134,7 +134,7 @@ pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
|||||||
if args.redis {
|
if args.redis {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut redis := context.redis()!
|
mut redis := context.redis()!
|
||||||
redis.hset('atlas:${c.name}', page.name, page.path)!
|
redis.hset('doctree:${c.name}', page.name, page.path)!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
61
lib/web/doctree/core/factory.v
Normal file
61
lib/web/doctree/core/factory.v
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
module core
|
||||||
|
|
||||||
|
import incubaid.herolib.web.doctree as doctreetools
|
||||||
|
import incubaid.herolib.core.pathlib
|
||||||
|
import incubaid.herolib.ui.console
|
||||||
|
import incubaid.herolib.data.paramsparser
|
||||||
|
|
||||||
|
__global (
|
||||||
|
doctrees shared map[string]&DocTree
|
||||||
|
)
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct AtlasNewArgs {
|
||||||
|
pub mut:
|
||||||
|
name string = 'default'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new DocTree
|
||||||
|
pub fn new(args AtlasNewArgs) !&DocTree {
|
||||||
|
mut name := doctreetools.name_fix(args.name)
|
||||||
|
|
||||||
|
mut a := &DocTree{
|
||||||
|
name: name
|
||||||
|
}
|
||||||
|
|
||||||
|
set(a)
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get DocTree from global map
|
||||||
|
pub fn get(name string) !&DocTree {
|
||||||
|
mut fixed_name := doctreetools.name_fix(name)
|
||||||
|
rlock doctrees {
|
||||||
|
if fixed_name in doctrees {
|
||||||
|
return doctrees[fixed_name] or { return error('DocTree ${name} not found') }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return error("DocTree '${name}' not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if DocTree exists
|
||||||
|
pub fn exists(name string) bool {
|
||||||
|
mut fixed_name := doctreetools.name_fix(name)
|
||||||
|
rlock doctrees {
|
||||||
|
return fixed_name in doctrees
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List all DocTree names
|
||||||
|
pub fn list() []string {
|
||||||
|
rlock doctrees {
|
||||||
|
return doctrees.keys()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store DocTree in global map
|
||||||
|
fn set(doctree &DocTree) {
|
||||||
|
lock doctrees {
|
||||||
|
doctrees[doctree.name] = doctree
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import os
|
import os
|
||||||
86
lib/web/doctree/core/getters.v
Normal file
86
lib/web/doctree/core/getters.v
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
module core
|
||||||
|
|
||||||
|
import incubaid.herolib.web.doctree
|
||||||
|
|
||||||
|
// Get a page from any collection using format "collection:page"
|
||||||
|
pub fn (a DocTree) page_get(key string) !&Page {
|
||||||
|
parts := key.split(':')
|
||||||
|
if parts.len != 2 {
|
||||||
|
return error('Invalid page key format. Use "collection:page" in page_get')
|
||||||
|
}
|
||||||
|
|
||||||
|
col := a.get_collection(parts[0])!
|
||||||
|
return col.page_get(parts[1])!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get an image from any collection using format "collection:image"
|
||||||
|
pub fn (a DocTree) image_get(key string) !&File {
|
||||||
|
parts := key.split(':')
|
||||||
|
if parts.len != 2 {
|
||||||
|
return error('Invalid image key format. Use "collection:image" in image_get')
|
||||||
|
}
|
||||||
|
|
||||||
|
col := a.get_collection(parts[0])!
|
||||||
|
return col.image_get(parts[1])!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a file from any collection using format "collection:file"
|
||||||
|
pub fn (a DocTree) file_get(key string) !&File {
|
||||||
|
parts := key.split(':')
|
||||||
|
if parts.len != 2 {
|
||||||
|
return error('Invalid file key format. Use "collection:file" in file_get')
|
||||||
|
}
|
||||||
|
|
||||||
|
col := a.get_collection(parts[0])!
|
||||||
|
return col.file_get(parts[1])!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a file (can be image) from any collection using format "collection:file"
|
||||||
|
pub fn (a DocTree) file_or_image_get(key string) !&File {
|
||||||
|
c, n := doctree.key_parse(key)!
|
||||||
|
col := a.get_collection(c)!
|
||||||
|
return col.file_or_image_get(n)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if page exists
|
||||||
|
pub fn (a DocTree) page_exists(key string) !bool {
|
||||||
|
c, n := doctree.key_parse(key)!
|
||||||
|
col := a.get_collection(c) or { return false }
|
||||||
|
return col.page_exists(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if image exists
|
||||||
|
pub fn (a DocTree) image_exists(key string) !bool {
|
||||||
|
c, n := doctree.key_parse(key)!
|
||||||
|
col := a.get_collection(c) or { return false }
|
||||||
|
return col.image_exists(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
pub fn (a DocTree) file_exists(key string) !bool {
|
||||||
|
c, n := doctree.key_parse(key)!
|
||||||
|
col := a.get_collection(c) or { return false }
|
||||||
|
return col.file_exists(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (a DocTree) file_or_image_exists(key string) !bool {
|
||||||
|
c, n := doctree.key_parse(key)!
|
||||||
|
col := a.get_collection(c) or { return false }
|
||||||
|
return col.file_or_image_exists(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// List all pages in DocTree
|
||||||
|
pub fn (a DocTree) list_pages() map[string][]string {
|
||||||
|
mut result := map[string][]string{}
|
||||||
|
|
||||||
|
for col_name, col in a.collections {
|
||||||
|
mut page_names := []string{}
|
||||||
|
for page_name, _ in col.pages {
|
||||||
|
page_names << page_name
|
||||||
|
}
|
||||||
|
page_names.sort()
|
||||||
|
result[col_name] = page_names
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.web.doctree
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ pub mut:
|
|||||||
|
|
||||||
// Create a new Group
|
// Create a new Group
|
||||||
pub fn new_group(args GroupNewArgs) !Group {
|
pub fn new_group(args GroupNewArgs) !Group {
|
||||||
mut name := texttools.name_fix(args.name)
|
mut name := doctree.name_fix(args.name)
|
||||||
mut patterns := args.patterns.map(it.to_lower())
|
mut patterns := args.patterns.map(it.to_lower())
|
||||||
|
|
||||||
return Group{
|
return Group{
|
||||||
@@ -72,7 +72,7 @@ fn parse_group_file(filename string, base_path string, mut visited map[string]bo
|
|||||||
visited[filename] = true
|
visited[filename] = true
|
||||||
|
|
||||||
mut group := Group{
|
mut group := Group{
|
||||||
name: texttools.name_fix(filename)
|
name: doctree.name_fix(filename)
|
||||||
patterns: []string{}
|
patterns: []string{}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
in atlas/
|
in doctree/
|
||||||
|
|
||||||
check format of groups
|
check format of groups
|
||||||
see content/groups
|
see content/groups
|
||||||
@@ -7,9 +7,9 @@ now the groups end with .group
|
|||||||
|
|
||||||
check how the include works, so we can include another group in the group as defined, only works in same folder
|
check how the include works, so we can include another group in the group as defined, only works in same folder
|
||||||
|
|
||||||
in the scan function in atlas, now make scan_groups function, find groups, only do this for collection as named groups
|
in the scan function in doctree, now make scan_groups function, find groups, only do this for collection as named groups
|
||||||
do not add collection groups to atlas, this is a system collection
|
do not add collection groups to doctree, this is a system collection
|
||||||
|
|
||||||
make the groups and add them to atlas
|
make the groups and add them to doctree
|
||||||
|
|
||||||
give clear instructions for coding agent how to write the code
|
give clear instructions for coding agent how to write the code
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.web.doctree
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
|
|
||||||
pub enum LinkFileType {
|
pub enum LinkFileType {
|
||||||
@@ -43,7 +43,7 @@ pub fn (mut self Link) target_page() !&Page {
|
|||||||
if self.status == .external {
|
if self.status == .external {
|
||||||
return error('External links do not have a target page')
|
return error('External links do not have a target page')
|
||||||
}
|
}
|
||||||
return self.page.collection.atlas.page_get(self.key())
|
return self.page.collection.doctree.page_get(self.key())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the target file this link points to
|
// Get the target file this link points to
|
||||||
@@ -51,7 +51,7 @@ pub fn (mut self Link) target_file() !&File {
|
|||||||
if self.status == .external {
|
if self.status == .external {
|
||||||
return error('External links do not have a target file')
|
return error('External links do not have a target file')
|
||||||
}
|
}
|
||||||
return self.page.collection.atlas.file_or_image_get(self.key())
|
return self.page.collection.doctree.file_or_image_get(self.key())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find all markdown links in content
|
// Find all markdown links in content
|
||||||
@@ -163,10 +163,10 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
|
|||||||
if target.contains(':') {
|
if target.contains(':') {
|
||||||
parts := target.split(':')
|
parts := target.split(':')
|
||||||
if parts.len >= 2 {
|
if parts.len >= 2 {
|
||||||
link.target_collection_name = texttools.name_fix(parts[0])
|
link.target_collection_name = doctree.name_fix(parts[0])
|
||||||
// For file links, use name without extension; for page links, normalize normally
|
// For file links, use name without extension; for page links, normalize normally
|
||||||
if link.file_type == .file {
|
if link.file_type == .file {
|
||||||
link.target_item_name = texttools.name_fix_no_ext(parts[1])
|
link.target_item_name = doctree.name_fix(parts[1])
|
||||||
} else {
|
} else {
|
||||||
link.target_item_name = normalize_page_name(parts[1])
|
link.target_item_name = normalize_page_name(parts[1])
|
||||||
}
|
}
|
||||||
@@ -174,7 +174,7 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
|
|||||||
} else {
|
} else {
|
||||||
// For file links, use name without extension; for page links, normalize normally
|
// For file links, use name without extension; for page links, normalize normally
|
||||||
if link.file_type == .file {
|
if link.file_type == .file {
|
||||||
link.target_item_name = texttools.name_fix_no_ext(target).trim_space()
|
link.target_item_name = doctree.name_fix(target).trim_space()
|
||||||
} else {
|
} else {
|
||||||
link.target_item_name = normalize_page_name(target).trim_space()
|
link.target_item_name = normalize_page_name(target).trim_space()
|
||||||
}
|
}
|
||||||
@@ -189,11 +189,11 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
|
|||||||
mut error_prefix := 'Broken link'
|
mut error_prefix := 'Broken link'
|
||||||
|
|
||||||
if link.file_type == .file || link.file_type == .image {
|
if link.file_type == .file || link.file_type == .image {
|
||||||
target_exists = p.collection.atlas.file_or_image_exists(link.key())!
|
target_exists = p.collection.doctree.file_or_image_exists(link.key())!
|
||||||
error_category = .invalid_file_reference
|
error_category = .invalid_file_reference
|
||||||
error_prefix = if link.file_type == .file { 'Broken file link' } else { 'Broken image link' }
|
error_prefix = if link.file_type == .file { 'Broken file link' } else { 'Broken image link' }
|
||||||
} else {
|
} else {
|
||||||
target_exists = p.collection.atlas.page_exists(link.key())!
|
target_exists = p.collection.doctree.page_exists(link.key())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// console.print_debug('Link target exists: ${target_exists} for key=${link.key()}')
|
// console.print_debug('Link target exists: ${target_exists} for key=${link.key()}')
|
||||||
@@ -307,5 +307,5 @@ fn normalize_page_name(name string) string {
|
|||||||
if clean.ends_with('.md') {
|
if clean.ends_with('.md') {
|
||||||
clean = clean[0..clean.len - 3]
|
clean = clean[0..clean.len - 3]
|
||||||
}
|
}
|
||||||
return texttools.name_fix(clean)
|
return doctree.name_fix(clean)
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.web.doctree as doctreetools
|
||||||
|
|
||||||
@[heap]
|
@[heap]
|
||||||
pub struct Page {
|
pub struct Page {
|
||||||
@@ -49,7 +49,7 @@ pub fn (mut p Page) content(args ReadContentArgs) !string {
|
|||||||
|
|
||||||
// Recursively process includes
|
// Recursively process includes
|
||||||
fn (mut p Page) process_includes(content string, mut visited map[string]bool) !string {
|
fn (mut p Page) process_includes(content string, mut visited map[string]bool) !string {
|
||||||
mut atlas := p.collection.atlas
|
mut doctree := p.collection.doctree
|
||||||
// Prevent circular includes
|
// Prevent circular includes
|
||||||
page_key := p.key()
|
page_key := p.key()
|
||||||
if page_key in visited {
|
if page_key in visited {
|
||||||
@@ -80,34 +80,16 @@ fn (mut p Page) process_includes(content string, mut visited map[string]bool) !s
|
|||||||
mut target_page := ''
|
mut target_page := ''
|
||||||
|
|
||||||
if include_ref.contains(':') {
|
if include_ref.contains(':') {
|
||||||
parts := include_ref.split(':')
|
target_collection, target_page = doctreetools.key_parse(include_ref)!
|
||||||
if parts.len == 2 {
|
|
||||||
target_collection = texttools.name_fix(parts[0])
|
|
||||||
target_page = texttools.name_fix(parts[1])
|
|
||||||
} else {
|
|
||||||
p.collection.error(
|
|
||||||
category: .include_syntax_error
|
|
||||||
page_key: page_key
|
|
||||||
message: 'Invalid include format: `${include_ref}`'
|
|
||||||
show_console: false
|
|
||||||
)
|
|
||||||
processed_lines << '<!-- Invalid include format: ${include_ref} -->'
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
target_page = texttools.name_fix(include_ref)
|
target_page = doctreetools.name_fix(include_ref)
|
||||||
}
|
|
||||||
|
|
||||||
// Remove .md extension if present
|
|
||||||
if target_page.ends_with('.md') {
|
|
||||||
target_page = target_page[0..target_page.len - 3]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build page key
|
// Build page key
|
||||||
page_ref := '${target_collection}:${target_page}'
|
page_ref := '${target_collection}:${target_page}'
|
||||||
|
|
||||||
// Get the referenced page from atlas
|
// Get the referenced page from doctree
|
||||||
mut include_page := atlas.page_get(page_ref) or {
|
mut include_page := doctree.page_get(page_ref) or {
|
||||||
p.collection.error(
|
p.collection.error(
|
||||||
category: .missing_include
|
category: .missing_include
|
||||||
page_key: page_key
|
page_key: page_key
|
||||||
@@ -1,36 +1,36 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
import incubaid.herolib.develop.gittools
|
import incubaid.herolib.develop.gittools
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import os
|
import os
|
||||||
|
|
||||||
// Play function to process HeroScript actions for Atlas
|
// Play function to process HeroScript actions for DocTree
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
if !plbook.exists(filter: 'atlas.') {
|
if !plbook.exists(filter: 'doctree.') {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Track which atlases we've processed in this playbook
|
// Track which doctrees we've processed in this playbook
|
||||||
mut processed_atlases := map[string]bool{}
|
mut processed_doctreees := map[string]bool{}
|
||||||
|
|
||||||
mut name := ''
|
mut name := ''
|
||||||
|
|
||||||
// Process scan actions - scan directories for collections
|
// Process scan actions - scan directories for collections
|
||||||
mut scan_actions := plbook.find(filter: 'atlas.scan')!
|
mut scan_actions := plbook.find(filter: 'doctree.scan')!
|
||||||
for mut action in scan_actions {
|
for mut action in scan_actions {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
name = p.get_default('name', 'main')!
|
name = p.get_default('name', 'main')!
|
||||||
ignore := p.get_list_default('ignore', [])!
|
ignore := p.get_list_default('ignore', [])!
|
||||||
console.print_item("Scanning Atlas '${name}' with ignore patterns: ${ignore}")
|
console.print_item("Scanning DocTree '${name}' with ignore patterns: ${ignore}")
|
||||||
// Get or create atlas from global map
|
// Get or create doctree from global map
|
||||||
mut atlas_instance := if exists(name) {
|
mut doctree_instance := if exists(name) {
|
||||||
get(name)!
|
get(name)!
|
||||||
} else {
|
} else {
|
||||||
console.print_debug('Atlas not found, creating a new one')
|
console.print_debug('DocTree not found, creating a new one')
|
||||||
new(name: name)!
|
new(name: name)!
|
||||||
}
|
}
|
||||||
processed_atlases[name] = true
|
processed_doctreees[name] = true
|
||||||
|
|
||||||
mut path := p.get_default('path', '')!
|
mut path := p.get_default('path', '')!
|
||||||
|
|
||||||
@@ -45,38 +45,38 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
)!.path
|
)!.path
|
||||||
}
|
}
|
||||||
if path == '' {
|
if path == '' {
|
||||||
return error('Either "path" or "git_url" must be provided for atlas.scan action.')
|
return error('Either "path" or "git_url" must be provided for doctree.scan action.')
|
||||||
}
|
}
|
||||||
atlas_instance.scan(path: path, ignore: ignore)!
|
doctree_instance.scan(path: path, ignore: ignore)!
|
||||||
action.done = true
|
action.done = true
|
||||||
|
|
||||||
// No need to call set() again - atlas is already in global map from new()
|
// No need to call set() again - doctree is already in global map from new()
|
||||||
// and we're modifying it by reference
|
// and we're modifying it by reference
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run init_post on all processed atlases
|
// Run init_post on all processed doctrees
|
||||||
for atlas_name, _ in processed_atlases {
|
for doctree_name, _ in processed_doctreees {
|
||||||
mut atlas_instance_post := get(atlas_name)!
|
mut doctree_instance_post := get(doctree_name)!
|
||||||
atlas_instance_post.init_post()!
|
doctree_instance_post.init_post()!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process export actions - export collections to destination
|
// Process export actions - export collections to destination
|
||||||
mut export_actions := plbook.find(filter: 'atlas.export')!
|
mut export_actions := plbook.find(filter: 'doctree.export')!
|
||||||
|
|
||||||
// Process explicit export actions
|
// Process explicit export actions
|
||||||
for mut action in export_actions {
|
for mut action in export_actions {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
name = p.get_default('name', 'main')!
|
name = p.get_default('name', 'main')!
|
||||||
destination := p.get_default('destination', '${os.home_dir()}/hero/var/atlas_export')!
|
destination := p.get_default('destination', '${os.home_dir()}/hero/var/doctree_export')!
|
||||||
reset := p.get_default_true('reset')
|
reset := p.get_default_true('reset')
|
||||||
include := p.get_default_true('include')
|
include := p.get_default_true('include')
|
||||||
redis := p.get_default_true('redis')
|
redis := p.get_default_true('redis')
|
||||||
|
|
||||||
mut atlas_instance := get(name) or {
|
mut doctree_instance := get(name) or {
|
||||||
return error("Atlas '${name}' not found. Use !!atlas.scan first.")
|
return error("DocTree '${name}' not found. Use !!doctree.scan first.")
|
||||||
}
|
}
|
||||||
|
|
||||||
atlas_instance.export(
|
doctree_instance.export(
|
||||||
destination: destination
|
destination: destination
|
||||||
reset: reset
|
reset: reset
|
||||||
include: include
|
include: include
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
const test_base = '/tmp/atlas_test'
|
const test_base = '/tmp/doctree_test'
|
||||||
|
|
||||||
fn testsuite_begin() {
|
fn testsuite_begin() {
|
||||||
os.rmdir_all(test_base) or {}
|
os.rmdir_all(test_base) or {}
|
||||||
@@ -15,9 +15,9 @@ fn testsuite_end() {
|
|||||||
os.rmdir_all(test_base) or {}
|
os.rmdir_all(test_base) or {}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_create_atlas() {
|
fn test_create_doctree() {
|
||||||
mut a := new(name: 'test_atlas')!
|
mut a := new(name: 'test_doctree')!
|
||||||
assert a.name == 'test_atlas'
|
assert a.name == 'test_doctree'
|
||||||
assert a.collections.len == 0
|
assert a.collections.len == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -363,12 +363,12 @@ fn test_load_from_directory() {
|
|||||||
|
|
||||||
fn test_get_edit_url() {
|
fn test_get_edit_url() {
|
||||||
// Create a mock collection
|
// Create a mock collection
|
||||||
mut atlas := new(name: 'test_atlas')!
|
mut doctree := new(name: 'test_doctree')!
|
||||||
col_path := '${test_base}/git_test'
|
col_path := '${test_base}/git_test'
|
||||||
os.mkdir_all(col_path)!
|
os.mkdir_all(col_path)!
|
||||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||||
cfile.write('name:git_test_col')!
|
cfile.write('name:git_test_col')!
|
||||||
mut col := atlas.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
mut col := doctree.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||||
col.git_url = 'https://github.com/test/repo.git' // Assuming git_url is a field on Collection
|
col.git_url = 'https://github.com/test/repo.git' // Assuming git_url is a field on Collection
|
||||||
// Create a mock page
|
// Create a mock page
|
||||||
mut page_path := pathlib.get_file(path: '${col_path}/test_page.md', create: true)!
|
mut page_path := pathlib.get_file(path: '${col_path}/test_page.md', create: true)!
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Atlas Module
|
# DocTree Module
|
||||||
|
|
||||||
A lightweight document collection manager for V, inspired by doctree but simplified.
|
A lightweight document collection manager for V, inspired by doctree but simplified.
|
||||||
|
|
||||||
@@ -18,7 +18,7 @@ put in .hero file and execute with hero or but shebang line on top of .hero scri
|
|||||||
|
|
||||||
**Scan Parameters:**
|
**Scan Parameters:**
|
||||||
|
|
||||||
- `name` (optional, default: 'main') - Atlas instance name
|
- `name` (optional, default: 'main') - DocTree instance name
|
||||||
- `path` (required when git_url not provided) - Directory path to scan
|
- `path` (required when git_url not provided) - Directory path to scan
|
||||||
- `git_url` (alternative to path) - Git repository URL to clone/checkout
|
- `git_url` (alternative to path) - Git repository URL to clone/checkout
|
||||||
- `git_root` (optional when using git_url, default: ~/code) - Base directory for cloning
|
- `git_root` (optional when using git_url, default: ~/code) - Base directory for cloning
|
||||||
@@ -31,9 +31,9 @@ put in .hero file and execute with hero or but shebang line on top of .hero scri
|
|||||||
```heroscript
|
```heroscript
|
||||||
#!/usr/bin/env hero
|
#!/usr/bin/env hero
|
||||||
|
|
||||||
!!atlas.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
!!doctree.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||||
|
|
||||||
!!atlas.export
|
!!doctree.export
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -42,10 +42,10 @@ put this in .hero file
|
|||||||
## usage in herolib
|
## usage in herolib
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
|
|
||||||
// Create a new Atlas
|
// Create a new DocTree
|
||||||
mut a := atlas.new(name: 'my_docs')!
|
mut a := doctree.new(name: 'my_docs')!
|
||||||
|
|
||||||
// Scan a directory for collections
|
// Scan a directory for collections
|
||||||
a.scan(path: '/path/to/docs')!
|
a.scan(path: '/path/to/docs')!
|
||||||
@@ -94,7 +94,7 @@ file := a.file_get('guides:diagram')!
|
|||||||
### Scanning for Collections
|
### Scanning for Collections
|
||||||
|
|
||||||
```v
|
```v
|
||||||
mut a := atlas.new()!
|
mut a := doctree.new()!
|
||||||
a.scan(path: './docs')!
|
a.scan(path: './docs')!
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -191,7 +191,7 @@ for _, col in a.collections {
|
|||||||
|
|
||||||
### Include Processing
|
### Include Processing
|
||||||
|
|
||||||
Atlas supports simple include processing using `!!include` actions:
|
DocTree supports simple include processing using `!!include` actions:
|
||||||
|
|
||||||
```v
|
```v
|
||||||
// Export with includes processed (default)
|
// Export with includes processed (default)
|
||||||
@@ -241,11 +241,11 @@ content := page.content()!
|
|||||||
|
|
||||||
## Git Integration
|
## Git Integration
|
||||||
|
|
||||||
Atlas automatically detects the git repository URL for each collection and stores it for reference. This allows users to easily navigate to the source for editing.
|
DocTree automatically detects the git repository URL for each collection and stores it for reference. This allows users to easily navigate to the source for editing.
|
||||||
|
|
||||||
### Automatic Detection
|
### Automatic Detection
|
||||||
|
|
||||||
When scanning collections, Atlas walks up the directory tree to find the `.git` directory and captures:
|
When scanning collections, DocTree walks up the directory tree to find the `.git` directory and captures:
|
||||||
- **git_url**: The remote origin URL
|
- **git_url**: The remote origin URL
|
||||||
- **git_branch**: The current branch
|
- **git_branch**: The current branch
|
||||||
|
|
||||||
@@ -254,7 +254,7 @@ When scanning collections, Atlas walks up the directory tree to find the `.git`
|
|||||||
You can scan collections directly from a git repository:
|
You can scan collections directly from a git repository:
|
||||||
|
|
||||||
```heroscript
|
```heroscript
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
name: 'my_docs'
|
name: 'my_docs'
|
||||||
git_url: 'https://github.com/myorg/docs.git'
|
git_url: 'https://github.com/myorg/docs.git'
|
||||||
git_root: '~/code' // optional, defaults to ~/code
|
git_root: '~/code' // optional, defaults to ~/code
|
||||||
@@ -265,7 +265,7 @@ The repository will be automatically cloned if it doesn't exist locally.
|
|||||||
### Accessing Edit URLs
|
### Accessing Edit URLs
|
||||||
|
|
||||||
```v
|
```v
|
||||||
mut page := atlas.page_get('guides:intro')!
|
mut page := doctree.page_get('guides:intro')!
|
||||||
edit_url := page.get_edit_url()!
|
edit_url := page.get_edit_url()!
|
||||||
println('Edit at: ${edit_url}')
|
println('Edit at: ${edit_url}')
|
||||||
// Output: Edit at: https://github.com/myorg/docs/edit/main/guides.md
|
// Output: Edit at: https://github.com/myorg/docs/edit/main/guides.md
|
||||||
@@ -282,7 +282,7 @@ Collection guides source: https://github.com/myorg/docs.git (branch: main)
|
|||||||
This allows published documentation to link back to the source repository for contributions.
|
This allows published documentation to link back to the source repository for contributions.
|
||||||
## Links
|
## Links
|
||||||
|
|
||||||
Atlas supports standard Markdown links with several formats for referencing pages within collections.
|
DocTree supports standard Markdown links with several formats for referencing pages within collections.
|
||||||
|
|
||||||
### Link Formats
|
### Link Formats
|
||||||
|
|
||||||
@@ -313,10 +313,10 @@ Link using a path - **only the filename is used** for matching:
|
|||||||
|
|
||||||
#### Validation
|
#### Validation
|
||||||
|
|
||||||
Check all links in your Atlas:
|
Check all links in your DocTree:
|
||||||
|
|
||||||
```v
|
```v
|
||||||
mut a := atlas.new()!
|
mut a := doctree.new()!
|
||||||
a.scan(path: './docs')!
|
a.scan(path: './docs')!
|
||||||
|
|
||||||
// Validate all links
|
// Validate all links
|
||||||
@@ -335,7 +335,7 @@ for _, col in a.collections {
|
|||||||
Automatically rewrite links with correct relative paths:
|
Automatically rewrite links with correct relative paths:
|
||||||
|
|
||||||
```v
|
```v
|
||||||
mut a := atlas.new()!
|
mut a := doctree.new()!
|
||||||
a.scan(path: './docs')!
|
a.scan(path: './docs')!
|
||||||
|
|
||||||
// Fix all links in place
|
// Fix all links in place
|
||||||
@@ -384,7 +384,7 @@ After fix (assuming pages are in subdirectories):
|
|||||||
|
|
||||||
### Export Directory Structure
|
### Export Directory Structure
|
||||||
|
|
||||||
When you export an Atlas, the directory structure is organized as:
|
When you export an DocTree, the directory structure is organized as:
|
||||||
|
|
||||||
$$\text{export\_dir}/
|
$$\text{export\_dir}/
|
||||||
\begin{cases}
|
\begin{cases}
|
||||||
@@ -409,17 +409,17 @@ $$\text{export\_dir}/
|
|||||||
|
|
||||||
## Redis Integration
|
## Redis Integration
|
||||||
|
|
||||||
Atlas uses Redis to store metadata about collections, pages, images, and files for fast lookups and caching.
|
DocTree uses Redis to store metadata about collections, pages, images, and files for fast lookups and caching.
|
||||||
|
|
||||||
### Redis Data Structure
|
### Redis Data Structure
|
||||||
|
|
||||||
When `redis: true` is set during export, Atlas stores:
|
When `redis: true` is set during export, DocTree stores:
|
||||||
|
|
||||||
1. **Collection Paths** - Hash: `atlas:path`
|
1. **Collection Paths** - Hash: `doctree:path`
|
||||||
- Key: collection name
|
- Key: collection name
|
||||||
- Value: exported collection directory path
|
- Value: exported collection directory path
|
||||||
|
|
||||||
2. **Collection Contents** - Hash: `atlas:<collection_name>`
|
2. **Collection Contents** - Hash: `doctree:<collection_name>`
|
||||||
- Pages: `page_name` → `page_name.md`
|
- Pages: `page_name` → `page_name.md`
|
||||||
- Images: `image_name.ext` → `img/image_name.ext`
|
- Images: `image_name.ext` → `img/image_name.ext`
|
||||||
- Files: `file_name.ext` → `files/file_name.ext`
|
- Files: `file_name.ext` → `files/file_name.ext`
|
||||||
@@ -427,11 +427,11 @@ When `redis: true` is set during export, Atlas stores:
|
|||||||
### Redis Usage Examples
|
### Redis Usage Examples
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.core.base
|
import incubaid.herolib.core.base
|
||||||
|
|
||||||
// Export with Redis metadata (default)
|
// Export with Redis metadata (default)
|
||||||
mut a := atlas.new(name: 'docs')!
|
mut a := doctree.new(name: 'docs')!
|
||||||
a.scan(path: './docs')!
|
a.scan(path: './docs')!
|
||||||
a.export(
|
a.export(
|
||||||
destination: './output'
|
destination: './output'
|
||||||
@@ -443,15 +443,15 @@ mut context := base.context()!
|
|||||||
mut redis := context.redis()!
|
mut redis := context.redis()!
|
||||||
|
|
||||||
// Get collection path
|
// Get collection path
|
||||||
col_path := redis.hget('atlas:path', 'guides')!
|
col_path := redis.hget('doctree:path', 'guides')!
|
||||||
println('Guides collection exported to: ${col_path}')
|
println('Guides collection exported to: ${col_path}')
|
||||||
|
|
||||||
// Get page location
|
// Get page location
|
||||||
page_path := redis.hget('atlas:guides', 'introduction')!
|
page_path := redis.hget('doctree:guides', 'introduction')!
|
||||||
println('Introduction page: ${page_path}') // Output: introduction.md
|
println('Introduction page: ${page_path}') // Output: introduction.md
|
||||||
|
|
||||||
// Get image location
|
// Get image location
|
||||||
img_path := redis.hget('atlas:guides', 'logo.png')!
|
img_path := redis.hget('doctree:guides', 'logo.png')!
|
||||||
println('Logo image: ${img_path}') // Output: img/logo.png
|
println('Logo image: ${img_path}') // Output: img/logo.png
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -468,9 +468,9 @@ println('Logo image: ${img_path}') // Output: img/logo.png
|
|||||||
Save collection metadata to JSON files for archival or cross-tool compatibility:
|
Save collection metadata to JSON files for archival or cross-tool compatibility:
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
|
|
||||||
mut a := atlas.new(name: 'my_docs')!
|
mut a := doctree.new(name: 'my_docs')!
|
||||||
a.scan(path: './docs')!
|
a.scan(path: './docs')!
|
||||||
|
|
||||||
// Save all collections to a specified directory
|
// Save all collections to a specified directory
|
||||||
@@ -497,32 +497,32 @@ save_path/
|
|||||||
|
|
||||||
## HeroScript Integration
|
## HeroScript Integration
|
||||||
|
|
||||||
Atlas integrates with HeroScript, allowing you to define Atlas operations in `.vsh` or playbook files.
|
DocTree integrates with HeroScript, allowing you to define DocTree operations in `.vsh` or playbook files.
|
||||||
|
|
||||||
### Using in V Scripts
|
### Using in V Scripts
|
||||||
|
|
||||||
Create a `.vsh` script to process Atlas operations:
|
Create a `.vsh` script to process DocTree operations:
|
||||||
|
|
||||||
```v
|
```v
|
||||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import incubaid.herolib.core.playbook
|
import incubaid.herolib.core.playbook
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
|
|
||||||
// Define your HeroScript content
|
// Define your HeroScript content
|
||||||
heroscript := "
|
heroscript := "
|
||||||
!!atlas.scan path: './docs'
|
!!doctree.scan path: './docs'
|
||||||
|
|
||||||
!!atlas.export destination: './output' include: true
|
!!doctree.export destination: './output' include: true
|
||||||
"
|
"
|
||||||
|
|
||||||
// Create playbook from text
|
// Create playbook from text
|
||||||
mut plbook := playbook.new(text: heroscript)!
|
mut plbook := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
// Execute atlas actions
|
// Execute doctree actions
|
||||||
atlas.play(mut plbook)!
|
doctree.play(mut plbook)!
|
||||||
|
|
||||||
println('Atlas processing complete!')
|
println('DocTree processing complete!')
|
||||||
```
|
```
|
||||||
|
|
||||||
### Using in Playbook Files
|
### Using in Playbook Files
|
||||||
@@ -530,11 +530,11 @@ println('Atlas processing complete!')
|
|||||||
Create a `docs.play` file:
|
Create a `docs.play` file:
|
||||||
|
|
||||||
```heroscript
|
```heroscript
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
name: 'main'
|
name: 'main'
|
||||||
path: '~/code/docs'
|
path: '~/code/docs'
|
||||||
|
|
||||||
!!atlas.export
|
!!doctree.export
|
||||||
destination: '~/code/output'
|
destination: '~/code/output'
|
||||||
reset: true
|
reset: true
|
||||||
include: true
|
include: true
|
||||||
@@ -565,11 +565,11 @@ playcmds.run(mut plbook)!
|
|||||||
Errors are automatically collected and reported:
|
Errors are automatically collected and reported:
|
||||||
|
|
||||||
```heroscript
|
```heroscript
|
||||||
!!atlas.scan
|
!!doctree.scan
|
||||||
path: './docs'
|
path: './docs'
|
||||||
|
|
||||||
# Errors will be printed during export
|
# Errors will be printed during export
|
||||||
!!atlas.export
|
!!doctree.export
|
||||||
destination: './output'
|
destination: './output'
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -583,13 +583,13 @@ Collection guides - Errors (2)
|
|||||||
|
|
||||||
### Auto-Export Behavior
|
### Auto-Export Behavior
|
||||||
|
|
||||||
If you use `!!atlas.scan` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
If you use `!!doctree.scan` **without** an explicit `!!doctree.export`, DocTree will automatically export to the default location (current directory).
|
||||||
|
|
||||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan actions.
|
To disable auto-export, include an explicit (empty) export action or simply don't include any scan actions.
|
||||||
|
|
||||||
### Best Practices
|
### Best Practices
|
||||||
|
|
||||||
1. **Always validate before export**: Use `!!atlas.validate` to catch broken links early
|
1. **Always validate before export**: Use `!!doctree.validate` to catch broken links early
|
||||||
2. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
2. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||||
3. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
3. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||||
4. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
4. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||||
@@ -599,7 +599,7 @@ The following features are planned but not yet available:
|
|||||||
|
|
||||||
- [ ] Load collections from `.collection.json` files
|
- [ ] Load collections from `.collection.json` files
|
||||||
- [ ] Python API for reading collections
|
- [ ] Python API for reading collections
|
||||||
- [ ] `atlas.validate` playbook action
|
- [ ] `doctree.validate` playbook action
|
||||||
- [ ] `atlas.fix_links` playbook action
|
- [ ] `doctree.fix_links` playbook action
|
||||||
- [ ] Auto-save on collection modifications
|
- [ ] Auto-save on collection modifications
|
||||||
- [ ] Collection version control
|
- [ ] Collection version control
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
const test_base = '/tmp/atlas_test'
|
const test_base = '/tmp/doctree_test'
|
||||||
|
|
||||||
// Test recursive export with chained cross-collection links
|
// Test recursive export with chained cross-collection links
|
||||||
// Setup: Collection A links to B, Collection B links to C
|
// Setup: Collection A links to B, Collection B links to C
|
||||||
@@ -37,7 +37,7 @@ fn test_export_recursive_links() {
|
|||||||
mut page_c := pathlib.get_file(path: '${col_c_path}/page_c.md', create: true)!
|
mut page_c := pathlib.get_file(path: '${col_c_path}/page_c.md', create: true)!
|
||||||
page_c.write('# Page C\\n\\nThis is the final page in the chain.')!
|
page_c.write('# Page C\\n\\nThis is the final page in the chain.')!
|
||||||
|
|
||||||
// Create Atlas and add all collections
|
// Create DocTree and add all collections
|
||||||
mut a := new()!
|
mut a := new()!
|
||||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||||
@@ -155,7 +155,7 @@ fn test_export_recursive_with_images() {
|
|||||||
// Create image in collection B
|
// Create image in collection B
|
||||||
os.write_file('${col_b_path}/img/b_image.jpg', 'fake jpg data')!
|
os.write_file('${col_b_path}/img/b_image.jpg', 'fake jpg data')!
|
||||||
|
|
||||||
// Create Atlas
|
// Create DocTree
|
||||||
mut a := new()!
|
mut a := new()!
|
||||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
module atlas
|
module core
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import os
|
import os
|
||||||
|
|
||||||
const test_dir = '/tmp/atlas_save_test'
|
const test_dir = '/tmp/doctree_save_test'
|
||||||
|
|
||||||
fn testsuite_begin() {
|
fn testsuite_begin() {
|
||||||
os.rmdir_all(test_dir) or {}
|
os.rmdir_all(test_dir) or {}
|
||||||
@@ -28,17 +28,17 @@ fn test_save_and_load_basic() {
|
|||||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||||
page2.write('# Guide\n\nMore content here.')!
|
page2.write('# Guide\n\nMore content here.')!
|
||||||
|
|
||||||
// Create and scan atlas
|
// Create and scan doctree
|
||||||
mut a := new(name: 'my_docs')!
|
mut a := new(name: 'my_docs')!
|
||||||
a.scan(path: test_dir)!
|
a.scan(path: test_dir)!
|
||||||
|
|
||||||
assert a.collections.len == 1
|
assert a.collections.len == 1
|
||||||
|
|
||||||
// Save all collections
|
// Save all collections
|
||||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
// a.save(destination_meta: '/tmp/doctree_meta')!
|
||||||
// assert os.exists('${col_path}/.collection.json')
|
// assert os.exists('${col_path}/.collection.json')
|
||||||
|
|
||||||
// // Load in a new atlas
|
// // Load in a new doctree
|
||||||
// mut a2 := new(name: 'loaded_docs')!
|
// mut a2 := new(name: 'loaded_docs')!
|
||||||
// a2.load_from_directory(test_dir)!
|
// a2.load_from_directory(test_dir)!
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ fn test_save_and_load_with_includes() {
|
|||||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||||
|
|
||||||
// Create and scan atlas
|
// Create and scan doctree
|
||||||
mut a := new(name: 'my_docs')!
|
mut a := new(name: 'my_docs')!
|
||||||
a.scan(path: '${test_dir}/docs_include')!
|
a.scan(path: '${test_dir}/docs_include')!
|
||||||
|
|
||||||
@@ -84,7 +84,7 @@ fn test_save_and_load_with_includes() {
|
|||||||
assert !col.has_errors()
|
assert !col.has_errors()
|
||||||
|
|
||||||
// // Save
|
// // Save
|
||||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
// a.save(destination_meta: '/tmp/doctree_meta')!
|
||||||
|
|
||||||
// // Load
|
// // Load
|
||||||
// mut a2 := new(name: 'loaded')!
|
// mut a2 := new(name: 'loaded')!
|
||||||
@@ -106,7 +106,7 @@ fn test_save_and_load_with_errors() {
|
|||||||
mut page1 := pathlib.get_file(path: '${col_path}/broken.md', create: true)!
|
mut page1 := pathlib.get_file(path: '${col_path}/broken.md', create: true)!
|
||||||
page1.write('[Broken link](nonexistent)')!
|
page1.write('[Broken link](nonexistent)')!
|
||||||
|
|
||||||
// Create and scan atlas
|
// Create and scan doctree
|
||||||
mut a := new(name: 'my_docs')!
|
mut a := new(name: 'my_docs')!
|
||||||
a.scan(path: '${test_dir}/docs_errors')!
|
a.scan(path: '${test_dir}/docs_errors')!
|
||||||
|
|
||||||
@@ -118,7 +118,7 @@ fn test_save_and_load_with_errors() {
|
|||||||
initial_error_count := col.errors.len
|
initial_error_count := col.errors.len
|
||||||
|
|
||||||
// // Save with errors
|
// // Save with errors
|
||||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
// a.save(destination_meta: '/tmp/doctree_meta')!
|
||||||
|
|
||||||
// // Load
|
// // Load
|
||||||
// mut a2 := new(name: 'loaded')!
|
// mut a2 := new(name: 'loaded')!
|
||||||
@@ -156,7 +156,7 @@ fn test_save_and_load_multiple_collections() {
|
|||||||
|
|
||||||
assert a.collections.len == 2
|
assert a.collections.len == 2
|
||||||
|
|
||||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
// a.save(destination_meta: '/tmp/doctree_meta')!
|
||||||
|
|
||||||
// // Load from directory
|
// // Load from directory
|
||||||
// mut a2 := new(name: 'loaded')!
|
// mut a2 := new(name: 'loaded')!
|
||||||
@@ -191,7 +191,7 @@ fn test_save_and_load_with_images() {
|
|||||||
assert col.image_exists('test.png')!
|
assert col.image_exists('test.png')!
|
||||||
|
|
||||||
// // Save
|
// // Save
|
||||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
// a.save(destination_meta: '/tmp/doctree_meta')!
|
||||||
|
|
||||||
// // Load
|
// // Load
|
||||||
// mut a2 := new(name: 'loaded')!
|
// mut a2 := new(name: 'loaded')!
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import incubaid.herolib.core.texttools
|
import incubaid.herolib.core.texttools
|
||||||
|
|
||||||
39
lib/web/doctree/meta/model_page.v
Normal file
39
lib/web/doctree/meta/model_page.v
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
module meta
|
||||||
|
|
||||||
|
import incubaid.herolib.data.doctree.client as doctree_client
|
||||||
|
import incubaid.herolib.data.markdown.tools as markdowntools
|
||||||
|
|
||||||
|
|
||||||
|
// Page represents a single documentation page
|
||||||
|
pub struct Page {
|
||||||
|
pub mut:
|
||||||
|
id string // Unique identifier: "collection:page_name"
|
||||||
|
title string // Display title (optional, extracted from markdown if empty)
|
||||||
|
description string // Brief description for metadata
|
||||||
|
questions []Question
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Question {
|
||||||
|
pub mut:
|
||||||
|
question string
|
||||||
|
answer string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut p Page) content(client doctree_client.AtlasClient) !string {
|
||||||
|
mut c := client.get_page_content(p.id)!
|
||||||
|
|
||||||
|
if p.title =="" {
|
||||||
|
p.title = markdowntools.extract_title(c)
|
||||||
|
}
|
||||||
|
//TODO in future should do AI
|
||||||
|
if p.description =="" {
|
||||||
|
p.description = p.title
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
36
lib/web/doctree/meta/model_sidebar.v
Normal file
36
lib/web/doctree/meta/model_sidebar.v
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
module meta
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Sidebar Navigation Models (Domain Types)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
pub struct SideBar {
|
||||||
|
pub mut:
|
||||||
|
my_sidebar []NavItem
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type NavItem = NavDoc | NavCat | NavLink
|
||||||
|
|
||||||
|
pub struct NavDoc {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
label string
|
||||||
|
hide_title bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NavCat {
|
||||||
|
pub mut:
|
||||||
|
label string
|
||||||
|
collapsible bool = true
|
||||||
|
collapsed bool
|
||||||
|
items []NavItem
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NavLink {
|
||||||
|
pub:
|
||||||
|
label string
|
||||||
|
href string
|
||||||
|
description string
|
||||||
|
}
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
@[heap]
|
@[heap]
|
||||||
pub struct Site {
|
pub struct Site {
|
||||||
pub mut:
|
pub mut:
|
||||||
pages map[string]Page // key: "collection:page_name"
|
pages map[string]Page // key: "collection:page_name"
|
||||||
nav SideBar // Navigation sidebar configuration
|
nav SideBar // Navigation sidebar configuration
|
||||||
siteconfig SiteConfig // Full site configuration
|
siteconfig SiteConfig // Full site configuration
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
// Combined config structure
|
// Combined config structure
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -6,18 +6,6 @@ import incubaid.herolib.core.texttools
|
|||||||
import time
|
import time
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
|
|
||||||
// ============================================================
|
|
||||||
// Helper function: normalize name while preserving .md extension handling
|
|
||||||
// ============================================================
|
|
||||||
fn normalize_page_name(name string) string {
|
|
||||||
mut result := name
|
|
||||||
// Remove .md extension if present for processing
|
|
||||||
if result.ends_with('.md') {
|
|
||||||
result = result[0..result.len - 3]
|
|
||||||
}
|
|
||||||
// Apply name fixing
|
|
||||||
return texttools.name_fix(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================
|
// ============================================================
|
||||||
// Internal structure for tracking category information
|
// Internal structure for tracking category information
|
||||||
@@ -60,8 +48,8 @@ fn play_pages(mut plbook PlayBook, mut website Site) ! {
|
|||||||
|
|
||||||
category_name_fixed := texttools.name_fix(category_name)
|
category_name_fixed := texttools.name_fix(category_name)
|
||||||
|
|
||||||
// Get label (derive from name if not specified)
|
// label is empty when not specified
|
||||||
mut label := p.get_default('label', texttools.name_fix_snake_to_pascal(category_name_fixed))!
|
mut label := p.get_default('label', "")!
|
||||||
mut position := p.get_int_default('position', next_category_position)!
|
mut position := p.get_int_default('position', next_category_position)!
|
||||||
|
|
||||||
// Auto-increment position if using default
|
// Auto-increment position if using default
|
||||||
@@ -139,14 +127,11 @@ fn play_pages(mut plbook PlayBook, mut website Site) ! {
|
|||||||
website.pages[page_id] = page
|
website.pages[page_id] = page
|
||||||
|
|
||||||
// Create navigation item with human-readable label
|
// Create navigation item with human-readable label
|
||||||
nav_label := if page_title.len > 0 {
|
// nav_label := page_title.len
|
||||||
page_title
|
|
||||||
} else {
|
|
||||||
texttools.title_case(page_name)
|
|
||||||
}
|
|
||||||
nav_doc := NavDoc{
|
nav_doc := NavDoc{
|
||||||
id: page_id
|
id: page.id
|
||||||
label: nav_label
|
label: page.title
|
||||||
|
hide_title: page.hide_title
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to appropriate category or root
|
// Add to appropriate category or root
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
@@ -511,10 +511,10 @@ The modern ebook structure uses `.hero` files for configuration and `.heroscript
|
|||||||
|
|
||||||
```
|
```
|
||||||
my_ebook/
|
my_ebook/
|
||||||
├── scan.hero # !!atlas.scan - collection scanning
|
├── scan.hero # !!doctree.scan - collection scanning
|
||||||
├── config.hero # !!site.config - site configuration
|
├── config.hero # !!site.config - site configuration
|
||||||
├── menus.hero # !!site.navbar and !!site.footer
|
├── menus.hero # !!site.navbar and !!site.footer
|
||||||
├── include.hero # !!docusaurus.define and !!atlas.export
|
├── include.hero # !!docusaurus.define and !!doctree.export
|
||||||
├── 1_intro.heroscript # Page definitions (categories + pages)
|
├── 1_intro.heroscript # Page definitions (categories + pages)
|
||||||
├── 2_concepts.heroscript # More page definitions
|
├── 2_concepts.heroscript # More page definitions
|
||||||
└── 3_advanced.heroscript # Additional pages
|
└── 3_advanced.heroscript # Additional pages
|
||||||
@@ -530,7 +530,7 @@ Use numeric prefixes on `.heroscript` files to control page/category ordering in
|
|||||||
### Example scan.hero
|
### Example scan.hero
|
||||||
|
|
||||||
```heroscript
|
```heroscript
|
||||||
!!atlas.scan path:"../../collections/my_collection"
|
!!doctree.scan path:"../../collections/my_collection"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Example include.hero
|
### Example include.hero
|
||||||
@@ -542,7 +542,7 @@ Use numeric prefixes on `.heroscript` files to control page/category ordering in
|
|||||||
// Or define directly
|
// Or define directly
|
||||||
!!docusaurus.define name:'my_ebook'
|
!!docusaurus.define name:'my_ebook'
|
||||||
|
|
||||||
!!atlas.export include:true
|
!!doctree.export include:true
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running an Ebook
|
### Running an Ebook
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module site
|
module meta
|
||||||
|
|
||||||
import incubaid.herolib.core.playbook
|
import incubaid.herolib.core.playbook
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
2
lib/web/doctree/meta/utils.v
Normal file
2
lib/web/doctree/meta/utils.v
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
module meta
|
||||||
|
|
||||||
48
lib/web/doctree/utils.v
Normal file
48
lib/web/doctree/utils.v
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
module doctree
|
||||||
|
|
||||||
|
import incubaid.herolib.core.texttools
|
||||||
|
|
||||||
|
// returns collection and file name from "collection:file" format
|
||||||
|
// works for file, image, page keys
|
||||||
|
pub fn key_parse(key string) !(string, string) {
|
||||||
|
parts := key.split(':')
|
||||||
|
if parts.len != 2 {
|
||||||
|
return error('Invalid key format. Use "collection:file"')
|
||||||
|
}
|
||||||
|
col := texttools.name_fix(parts[0])
|
||||||
|
file := texttools.name_fix(parts[1])
|
||||||
|
return col, file
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Helper function: normalize name while preserving .md extension handling
|
||||||
|
// ============================================================
|
||||||
|
pub fn name_fix(name string) string {
|
||||||
|
mut result := name
|
||||||
|
// Remove .md extension if present for processing
|
||||||
|
if result.ends_with('.md') {
|
||||||
|
result = result[0..result.len - 3]
|
||||||
|
}
|
||||||
|
// Apply name fixing
|
||||||
|
result = strip_numeric_prefix(result)
|
||||||
|
return texttools.name_fix(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip numeric prefix from filename (e.g., "03_linux_installation" -> "linux_installation")
|
||||||
|
// Docusaurus automatically strips these prefixes from URLs
|
||||||
|
fn strip_numeric_prefix(name string) string {
|
||||||
|
// Match pattern: digits followed by underscore at the start
|
||||||
|
if name.len > 2 && name[0].is_digit() {
|
||||||
|
for i := 1; i < name.len; i++ {
|
||||||
|
if name[i] == `_` {
|
||||||
|
// Found the underscore, return everything after it
|
||||||
|
return name[i + 1..]
|
||||||
|
}
|
||||||
|
if !name[i].is_digit() {
|
||||||
|
// Not a numeric prefix pattern, return as-is
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return name
|
||||||
|
}
|
||||||
@@ -25,10 +25,10 @@ The recommended structure for an ebook follows this pattern:
|
|||||||
|
|
||||||
```
|
```
|
||||||
my_ebook/
|
my_ebook/
|
||||||
├── scan.hero # Atlas collection scanning
|
├── scan.hero # DocTree collection scanning
|
||||||
├── config.hero # Site configuration
|
├── config.hero # Site configuration
|
||||||
├── menus.hero # Navbar and footer configuration
|
├── menus.hero # Navbar and footer configuration
|
||||||
├── include.hero # Docusaurus define and atlas export
|
├── include.hero # Docusaurus define and doctree export
|
||||||
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
||||||
├── 2_concepts.heroscript # More page definitions
|
├── 2_concepts.heroscript # More page definitions
|
||||||
└── 3_advanced.heroscript # Additional pages
|
└── 3_advanced.heroscript # Additional pages
|
||||||
@@ -42,10 +42,10 @@ Defines which collections to scan for content:
|
|||||||
|
|
||||||
```heroscript
|
```heroscript
|
||||||
// Scan local collections
|
// Scan local collections
|
||||||
!!atlas.scan path:"../../collections/my_collection"
|
!!doctree.scan path:"../../collections/my_collection"
|
||||||
|
|
||||||
// Scan remote collections from git
|
// Scan remote collections from git
|
||||||
!!atlas.scan git_url:"https://git.example.com/org/repo/src/branch/main/collections/docs"
|
!!doctree.scan git_url:"https://git.example.com/org/repo/src/branch/main/collections/docs"
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `config.hero` - Site Configuration
|
#### `config.hero` - Site Configuration
|
||||||
@@ -113,7 +113,7 @@ Links to shared configuration or defines docusaurus directly:
|
|||||||
// Option 2: Define directly
|
// Option 2: Define directly
|
||||||
!!docusaurus.define name:'my_ebook'
|
!!docusaurus.define name:'my_ebook'
|
||||||
|
|
||||||
!!atlas.export include:true
|
!!doctree.export include:true
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Page Definition Files (`*.heroscript`)
|
#### Page Definition Files (`*.heroscript`)
|
||||||
@@ -145,7 +145,7 @@ Define pages and categories:
|
|||||||
|
|
||||||
## Collections
|
## Collections
|
||||||
|
|
||||||
Collections are directories containing markdown files. They're scanned by Atlas and referenced in page definitions.
|
Collections are directories containing markdown files. They're scanned by DocTree and referenced in page definitions.
|
||||||
|
|
||||||
```
|
```
|
||||||
collections/
|
collections/
|
||||||
@@ -189,16 +189,16 @@ The older approach using `!!docusaurus.add` is still supported but not recommend
|
|||||||
|
|
||||||
## HeroScript Actions Reference
|
## HeroScript Actions Reference
|
||||||
|
|
||||||
### `!!atlas.scan`
|
### `!!doctree.scan`
|
||||||
|
|
||||||
Scans a directory for markdown collections:
|
Scans a directory for markdown collections:
|
||||||
|
|
||||||
- `path` (string): Local path to scan
|
- `path` (string): Local path to scan
|
||||||
- `git_url` (string): Git URL to clone and scan
|
- `git_url` (string): Git URL to clone and scan
|
||||||
- `name` (string): Atlas instance name (default: `main`)
|
- `name` (string): DocTree instance name (default: `main`)
|
||||||
- `ignore` (list): Directory names to skip
|
- `ignore` (list): Directory names to skip
|
||||||
|
|
||||||
### `!!atlas.export`
|
### `!!doctree.export`
|
||||||
|
|
||||||
Exports scanned collections:
|
Exports scanned collections:
|
||||||
|
|
||||||
@@ -215,7 +215,7 @@ Configures the Docusaurus build environment:
|
|||||||
- `reset` (bool): Clean build directory before starting
|
- `reset` (bool): Clean build directory before starting
|
||||||
- `template_update` (bool): Update Docusaurus template
|
- `template_update` (bool): Update Docusaurus template
|
||||||
- `install` (bool): Run `bun install`
|
- `install` (bool): Run `bun install`
|
||||||
- `atlas_dir` (string): Atlas export directory
|
- `doctree_dir` (string): DocTree export directory
|
||||||
|
|
||||||
### `!!site.config`
|
### `!!site.config`
|
||||||
|
|
||||||
@@ -254,4 +254,4 @@ Defines a sidebar category:
|
|||||||
## See Also
|
## See Also
|
||||||
|
|
||||||
- `lib/web/site` - Generic site configuration module
|
- `lib/web/site` - Generic site configuration module
|
||||||
- `lib/data/atlas` - Atlas collection management
|
- `lib/data/doctree` - DocTree collection management
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub mut:
|
|||||||
reset bool
|
reset bool
|
||||||
template_update bool
|
template_update bool
|
||||||
coderoot string
|
coderoot string
|
||||||
atlas_dir string
|
doctree_dir string
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
@@ -29,7 +29,7 @@ pub mut:
|
|||||||
reset bool
|
reset bool
|
||||||
template_update bool
|
template_update bool
|
||||||
coderoot string
|
coderoot string
|
||||||
atlas_dir string
|
doctree_dir string
|
||||||
}
|
}
|
||||||
|
|
||||||
// return the last know config
|
// return the last know config
|
||||||
@@ -38,8 +38,8 @@ pub fn config() !DocusaurusConfig {
|
|||||||
docusaurus_config << DocusaurusConfigParams{}
|
docusaurus_config << DocusaurusConfigParams{}
|
||||||
}
|
}
|
||||||
mut args := docusaurus_config[0] or { panic('bug in docusaurus config') }
|
mut args := docusaurus_config[0] or { panic('bug in docusaurus config') }
|
||||||
if args.atlas_dir == '' {
|
if args.doctree_dir == '' {
|
||||||
return error('atlas_dir is not set')
|
return error('doctree_dir is not set')
|
||||||
}
|
}
|
||||||
if args.path_build == '' {
|
if args.path_build == '' {
|
||||||
args.path_build = '${os.home_dir()}/hero/var/docusaurus/build'
|
args.path_build = '${os.home_dir()}/hero/var/docusaurus/build'
|
||||||
@@ -58,7 +58,7 @@ pub fn config() !DocusaurusConfig {
|
|||||||
install: args.install
|
install: args.install
|
||||||
reset: args.reset
|
reset: args.reset
|
||||||
template_update: args.template_update
|
template_update: args.template_update
|
||||||
atlas_dir: args.atlas_dir
|
doctree_dir: args.doctree_dir
|
||||||
}
|
}
|
||||||
if c.install {
|
if c.install {
|
||||||
install(c)!
|
install(c)!
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ pub mut:
|
|||||||
port int = 3000
|
port int = 3000
|
||||||
open bool = true // whether to open the browser automatically
|
open bool = true // whether to open the browser automatically
|
||||||
watch_changes bool // whether to watch for changes in docs and rebuild automatically
|
watch_changes bool // whether to watch for changes in docs and rebuild automatically
|
||||||
skip_generate bool // whether to skip generation (useful when docs are pre-generated, e.g., from atlas)
|
skip_generate bool // whether to skip generation (useful when docs are pre-generated, e.g., from doctree)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut s DocSite) open(args DevArgs) ! {
|
pub fn (mut s DocSite) open(args DevArgs) ! {
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ pub fn (mut docsite DocSite) generate() ! {
|
|||||||
mut sidebar_file := pathlib.get_file(path: '${cfg_path}/sidebar.json', create: true)!
|
mut sidebar_file := pathlib.get_file(path: '${cfg_path}/sidebar.json', create: true)!
|
||||||
sidebar_file.write(docsite.config.sidebar_json_txt)!
|
sidebar_file.write(docsite.config.sidebar_json_txt)!
|
||||||
|
|
||||||
docsite.link_docs()!
|
docsite.generate_docs()!
|
||||||
|
|
||||||
docsite.import()!
|
docsite.import()!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
module docusaurus
|
module docusaurus
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
import incubaid.herolib.data.atlas.client as atlas_client
|
import incubaid.herolib.data.doctree.client as doctree_client
|
||||||
import incubaid.herolib.data.markdown.tools as markdowntools
|
import incubaid.herolib.data.markdown.tools as markdowntools
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import incubaid.herolib.web.site
|
import incubaid.herolib.web.site
|
||||||
import os
|
import os
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// Doc Linking - Generate Docusaurus docs from Atlas collections
|
// Doc Linking - Generate Docusaurus docs from DocTree collections
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
// get_first_doc_from_sidebar recursively finds the first doc ID in the sidebar.
|
// get_first_doc_from_sidebar recursively finds the first doc ID in the sidebar.
|
||||||
@@ -35,16 +35,16 @@ fn get_first_doc_from_sidebar(items []site.NavItem) string {
|
|||||||
return ''
|
return ''
|
||||||
}
|
}
|
||||||
|
|
||||||
// link_docs generates markdown files from site page definitions.
|
// generate_docs generates markdown files from site page definitions.
|
||||||
// Pages are fetched from Atlas collections and written with frontmatter.
|
// Pages are fetched from DocTree collections and written with frontmatter.
|
||||||
pub fn (mut docsite DocSite) link_docs() ! {
|
pub fn (mut docsite DocSite) generate_docs() ! {
|
||||||
c := config()!
|
c := config()!
|
||||||
docs_path := '${c.path_build.path}/docs'
|
docs_path := '${c.path_build.path}/docs'
|
||||||
|
|
||||||
reset_docs_dir(docs_path)!
|
reset_docs_dir(docs_path)!
|
||||||
console.print_header('Linking docs to ${docs_path}')
|
console.print_header('Write doc: ${docs_path}')
|
||||||
|
|
||||||
mut client := atlas_client.new(export_dir: c.atlas_dir)!
|
mut client := doctree_client.new(export_dir: c.doctree_dir)!
|
||||||
mut errors := []string{}
|
mut errors := []string{}
|
||||||
|
|
||||||
// Determine if we need to set a docs landing page (when url_home ends with "/")
|
// Determine if we need to set a docs landing page (when url_home ends with "/")
|
||||||
@@ -72,7 +72,7 @@ fn reset_docs_dir(docs_path string) ! {
|
|||||||
os.mkdir_all(docs_path)!
|
os.mkdir_all(docs_path)!
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_errors(mut client atlas_client.AtlasClient, errors []string) ! {
|
fn report_errors(mut client doctree_client.AtlasClient, errors []string) ! {
|
||||||
available := client.list_markdown() or { 'Could not list available pages' }
|
available := client.list_markdown() or { 'Could not list available pages' }
|
||||||
console.print_stderr('Available pages:\n${available}')
|
console.print_stderr('Available pages:\n${available}')
|
||||||
return error('Errors during doc generation:\n${errors.join('\n\n')}')
|
return error('Errors during doc generation:\n${errors.join('\n\n')}')
|
||||||
@@ -82,7 +82,7 @@ fn report_errors(mut client atlas_client.AtlasClient, errors []string) ! {
|
|||||||
// Page Processing
|
// Page Processing
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
fn process_page(mut client atlas_client.AtlasClient, docs_path string, page site.Page, first_doc_page string, mut errors []string) {
|
fn process_page(mut client doctree_client.AtlasClient, docs_path string, page site.Page, first_doc_page string, mut errors []string) {
|
||||||
collection, page_name := parse_page_src(page.src) or {
|
collection, page_name := parse_page_src(page.src) or {
|
||||||
errors << err.msg()
|
errors << err.msg()
|
||||||
return
|
return
|
||||||
@@ -122,7 +122,7 @@ fn write_page(docs_path string, page_name string, page site.Page, content string
|
|||||||
file.write(final_content)!
|
file.write(final_content)!
|
||||||
}
|
}
|
||||||
|
|
||||||
fn copy_page_assets(mut client atlas_client.AtlasClient, docs_path string, collection string, page_name string) {
|
fn copy_page_assets(mut client doctree_client.AtlasClient, docs_path string, collection string, page_name string) {
|
||||||
client.copy_images(collection, page_name, docs_path) or {}
|
client.copy_images(collection, page_name, docs_path) or {}
|
||||||
client.copy_files(collection, page_name, docs_path) or {}
|
client.copy_files(collection, page_name, docs_path) or {}
|
||||||
}
|
}
|
||||||
@@ -132,12 +132,21 @@ fn copy_page_assets(mut client atlas_client.AtlasClient, docs_path string, colle
|
|||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
||||||
fn build_frontmatter(page site.Page, content string, is_landing_page bool) string {
|
fn build_frontmatter(page site.Page, content string, is_landing_page bool) string {
|
||||||
|
|
||||||
title := get_title(page, content)
|
title := get_title(page, content)
|
||||||
|
|
||||||
description := get_description(page, title)
|
description := get_description(page, title)
|
||||||
|
|
||||||
mut lines := ['---']
|
mut lines := ['---']
|
||||||
lines << "title: '${escape_yaml(title)}'"
|
lines << "title: '${title}'"
|
||||||
lines << "description: '${escape_yaml(description)}'"
|
lines << "description: '${description}'"
|
||||||
|
|
||||||
|
// if page.id.contains('tfhowto_tools'){
|
||||||
|
// println('extracted title: ${title}')
|
||||||
|
// println('page.src: ${lines}')
|
||||||
|
// $dbg;
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
// Add slug: / for the docs landing page so /docs/ works directly
|
// Add slug: / for the docs landing page so /docs/ works directly
|
||||||
if is_landing_page {
|
if is_landing_page {
|
||||||
@@ -154,25 +163,3 @@ fn build_frontmatter(page site.Page, content string, is_landing_page bool) strin
|
|||||||
lines << '---'
|
lines << '---'
|
||||||
return lines.join('\n')
|
return lines.join('\n')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_title(page site.Page, content string) string {
|
|
||||||
if page.title.len > 0 {
|
|
||||||
return page.title
|
|
||||||
}
|
|
||||||
extracted := markdowntools.extract_title(content)
|
|
||||||
if extracted.len > 0 {
|
|
||||||
return extracted
|
|
||||||
}
|
|
||||||
return page.src.split(':').last()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_description(page site.Page, title string) string {
|
|
||||||
if page.description.len > 0 {
|
|
||||||
return page.description
|
|
||||||
}
|
|
||||||
return title
|
|
||||||
}
|
|
||||||
|
|
||||||
fn escape_yaml(s string) string {
|
|
||||||
return s.replace("'", "''")
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
module docusaurus
|
module docusaurus
|
||||||
|
|
||||||
import incubaid.herolib.core.pathlib
|
import incubaid.herolib.core.pathlib
|
||||||
// import incubaid.herolib.data.atlas.client as atlas_client
|
// import incubaid.herolib.data.doctree.client as doctree_client
|
||||||
// import incubaid.herolib.web.site { Page, Section, Site }
|
// import incubaid.herolib.web.site { Page, Section, Site }
|
||||||
// import incubaid.herolib.data.markdown.tools as markdowntools
|
// import incubaid.herolib.data.markdown.tools as markdowntools
|
||||||
// import incubaid.herolib.ui.console
|
// import incubaid.herolib.ui.console
|
||||||
@@ -24,7 +24,7 @@ import incubaid.herolib.core.pathlib
|
|||||||
// docs_path := '${c.path_build.path}/docs'
|
// docs_path := '${c.path_build.path}/docs'
|
||||||
|
|
||||||
// // Create the appropriate client based on configuration
|
// // Create the appropriate client based on configuration
|
||||||
// mut client_instance := atlas_client.new(export_dir: c.atlas_dir)!
|
// mut client_instance := doctree_client.new(export_dir: c.doctree_dir)!
|
||||||
// mut client := IDocClient(client_instance)
|
// mut client := IDocClient(client_instance)
|
||||||
|
|
||||||
// mut gen := SiteGenerator{
|
// mut gen := SiteGenerator{
|
||||||
@@ -378,8 +378,8 @@ import incubaid.herolib.core.pathlib
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// // STEP 5: Fix bare page references (from atlas self-contained exports)
|
// // STEP 5: Fix bare page references (from doctree self-contained exports)
|
||||||
// // Atlas exports convert cross-collection links to simple relative links like "token_system2.md"
|
// // DocTree exports convert cross-collection links to simple relative links like "token_system2.md"
|
||||||
// // We need to transform these to proper relative paths based on Docusaurus structure
|
// // We need to transform these to proper relative paths based on Docusaurus structure
|
||||||
// for page_name, target_dir in page_to_path {
|
// for page_name, target_dir in page_to_path {
|
||||||
// // Match links in the format ](page_name) or ](page_name.md)
|
// // Match links in the format ](page_name) or ](page_name.md)
|
||||||
|
|||||||
60
lib/web/docusaurus/dsite_to_sidebar_json.v
Normal file
60
lib/web/docusaurus/dsite_to_sidebar_json.v
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
module doc
|
||||||
|
import incubaid.herolib.web.site
|
||||||
|
|
||||||
|
//this is the logic to create docusaurus sidebar.json from site.NavItems
|
||||||
|
|
||||||
|
struct SidebarItem {
|
||||||
|
typ string @[json: 'type']
|
||||||
|
id string @[omitempty]
|
||||||
|
label string
|
||||||
|
href string @[omitempty]
|
||||||
|
description string @[omitempty]
|
||||||
|
collapsible bool @[json: 'collapsible'; omitempty]
|
||||||
|
collapsed bool @[json: 'collapsed'; omitempty]
|
||||||
|
items []SidebarItem @[omitempty]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// JSON Serialization
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
pub fn sidebar_to_json(sb site.SideBar) !string {
|
||||||
|
items := sb.my_sidebar.map(to_sidebar_item(it))
|
||||||
|
return json.encode_pretty(items)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_sidebar_item(item site.NavItem) SidebarItem {
|
||||||
|
return match item {
|
||||||
|
NavDoc { from_doc(item) }
|
||||||
|
NavLink { from_link(item) }
|
||||||
|
NavCat { from_category(item) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_doc(doc site.NavDoc) SidebarItem {
|
||||||
|
return SidebarItem{
|
||||||
|
typ: 'doc'
|
||||||
|
id: doc.id
|
||||||
|
label: doc.label
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_link(link site.NavLink) SidebarItem {
|
||||||
|
return SidebarItem{
|
||||||
|
typ: 'link'
|
||||||
|
label: link.label
|
||||||
|
href: link.href
|
||||||
|
description: link.description
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_category(cat site.NavCat) SidebarItem {
|
||||||
|
return SidebarItem{
|
||||||
|
typ: 'category'
|
||||||
|
label: cat.label
|
||||||
|
collapsible: cat.collapsible
|
||||||
|
collapsed: cat.collapsed
|
||||||
|
items: cat.items.map(to_sidebar_item(it))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -75,7 +75,7 @@ After running the test:
|
|||||||
|
|
||||||
If links don't resolve:
|
If links don't resolve:
|
||||||
|
|
||||||
1. Check that the collection is registered in the atlas
|
1. Check that the collection is registered in the doctree
|
||||||
2. Verify page names match (no typos)
|
2. Verify page names match (no typos)
|
||||||
3. Run with debug flag (`-d`) to see detailed output
|
3. Run with debug flag (`-d`) to see detailed output
|
||||||
4. Check `~/hero/var/docusaurus/build/docs/` for generated files
|
4. Check `~/hero/var/docusaurus/build/docs/` for generated files
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ If links appear broken, check:
|
|||||||
|
|
||||||
1. The collection name is correct
|
1. The collection name is correct
|
||||||
2. The page name matches the markdown filename (without `.md`)
|
2. The page name matches the markdown filename (without `.md`)
|
||||||
3. The collection is properly registered in the atlas
|
3. The collection is properly registered in the doctree
|
||||||
|
|
||||||
### Page Not Found
|
### Page Not Found
|
||||||
|
|
||||||
@@ -29,11 +29,11 @@ Ensure the page is defined in your heroscript:
|
|||||||
|
|
||||||
## Error Messages
|
## Error Messages
|
||||||
|
|
||||||
| Error | Solution |
|
| Error | Solution |
|
||||||
|-------|----------|
|
| ------------------------ | ---------------------------- |
|
||||||
| "Page not found" | Check page name spelling |
|
| "Page not found" | Check page name spelling |
|
||||||
| "Collection not found" | Verify atlas configuration |
|
| "Collection not found" | Verify doctree configuration |
|
||||||
| "Link resolution failed" | Check link syntax |
|
| "Link resolution failed" | Check link syntax |
|
||||||
|
|
||||||
## Navigation
|
## Navigation
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
!!docusaurus.define name:'test_site'
|
!!docusaurus.define name:'test_site'
|
||||||
|
|
||||||
!!atlas.export include:true
|
!!doctree.export include:true
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
!!atlas.scan path:"../../collections/test_collection"
|
!!doctree.scan path:"../../collections/test_collection"
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
module docusaurus
|
module docusaurus
|
||||||
|
|
||||||
import incubaid.herolib.core.playbook { PlayBook }
|
import incubaid.herolib.core.playbook { PlayBook }
|
||||||
import incubaid.herolib.data.atlas
|
import incubaid.herolib.data.doctree
|
||||||
import incubaid.herolib.ui.console
|
import incubaid.herolib.ui.console
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ fn process_define(mut plbook PlayBook) !&DocSite {
|
|||||||
mut action := plbook.ensure_once(filter: 'docusaurus.define')!
|
mut action := plbook.ensure_once(filter: 'docusaurus.define')!
|
||||||
p := action.params
|
p := action.params
|
||||||
|
|
||||||
atlas_dir := p.get_default('atlas_dir', '${os.home_dir()}/hero/var/atlas_export')!
|
doctree_dir := p.get_default('doctree_dir', '${os.home_dir()}/hero/var/doctree_export')!
|
||||||
|
|
||||||
config_set(
|
config_set(
|
||||||
path_build: p.get_default('path_build', '')!
|
path_build: p.get_default('path_build', '')!
|
||||||
@@ -32,13 +32,13 @@ fn process_define(mut plbook PlayBook) !&DocSite {
|
|||||||
reset: p.get_default_false('reset')
|
reset: p.get_default_false('reset')
|
||||||
template_update: p.get_default_false('template_update')
|
template_update: p.get_default_false('template_update')
|
||||||
install: p.get_default_false('install')
|
install: p.get_default_false('install')
|
||||||
atlas_dir: atlas_dir
|
doctree_dir: doctree_dir
|
||||||
)!
|
)!
|
||||||
|
|
||||||
site_name := p.get('name') or { return error('docusaurus.define: "name" is required') }
|
site_name := p.get('name') or { return error('docusaurus.define: "name" is required') }
|
||||||
atlas_name := p.get_default('atlas', 'main')!
|
doctree_name := p.get_default('doctree', 'main')!
|
||||||
|
|
||||||
export_atlas(atlas_name, atlas_dir)!
|
export_doctree(doctree_name, doctree_dir)!
|
||||||
dsite_define(site_name)!
|
dsite_define(site_name)!
|
||||||
action.done = true
|
action.done = true
|
||||||
|
|
||||||
@@ -77,11 +77,11 @@ fn process_dev(mut plbook PlayBook, mut dsite DocSite) ! {
|
|||||||
action.done = true
|
action.done = true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn export_atlas(name string, dir string) ! {
|
fn export_doctree(name string, dir string) ! {
|
||||||
if !atlas.exists(name) {
|
if !doctree.exists(name) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
console.print_debug('Auto-exporting Atlas "${name}" to ${dir}')
|
console.print_debug('Auto-exporting DocTree "${name}" to ${dir}')
|
||||||
mut a := atlas.get(name)!
|
mut a := doctree.get(name)!
|
||||||
a.export(destination: dir, reset: true, include: true, redis: false)!
|
a.export(destination: dir, reset: true, include: true, redis: false)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,536 +0,0 @@
|
|||||||
# AI Instructions for Site Module HeroScript
|
|
||||||
|
|
||||||
This document provides comprehensive instructions for AI agents working with the Site module's HeroScript format.
|
|
||||||
|
|
||||||
## HeroScript Format Overview
|
|
||||||
|
|
||||||
HeroScript is a declarative configuration language with the following characteristics:
|
|
||||||
|
|
||||||
### Basic Syntax
|
|
||||||
|
|
||||||
```heroscript
|
|
||||||
!!actor.action
|
|
||||||
param1: "value1"
|
|
||||||
param2: "value2"
|
|
||||||
multiline_param: "
|
|
||||||
This is a multiline value.
|
|
||||||
It can span multiple lines.
|
|
||||||
"
|
|
||||||
arg1 arg2 // Arguments without keys
|
|
||||||
```
|
|
||||||
|
|
||||||
**Key Rules:**
|
|
||||||
1. Actions start with `!!` followed by `actor.action` format
|
|
||||||
2. Parameters are indented and use `key: "value"` or `key: value` format
|
|
||||||
3. Values with spaces must be quoted
|
|
||||||
4. Multiline values are supported with quotes
|
|
||||||
5. Arguments without keys are space-separated
|
|
||||||
6. Comments start with `//`
|
|
||||||
|
|
||||||
## Site Module Actions
|
|
||||||
|
|
||||||
### 1. Site Configuration (`!!site.config`)
|
|
||||||
|
|
||||||
**Purpose:** Define the main site configuration including title, description, and metadata.
|
|
||||||
|
|
||||||
**Required Parameters:**
|
|
||||||
- `name`: Site identifier (will be normalized to snake_case)
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `title`: Site title (default: "Documentation Site")
|
|
||||||
- `description`: Site description
|
|
||||||
- `tagline`: Site tagline
|
|
||||||
- `favicon`: Path to favicon (default: "img/favicon.png")
|
|
||||||
- `image`: Default site image (default: "img/tf_graph.png")
|
|
||||||
- `copyright`: Copyright text
|
|
||||||
- `url`: Main site URL
|
|
||||||
- `base_url`: Base URL path (default: "/")
|
|
||||||
- `url_home`: Home page path
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.config
|
|
||||||
name: "my_documentation"
|
|
||||||
title: "My Documentation Site"
|
|
||||||
description: "Comprehensive technical documentation"
|
|
||||||
tagline: "Learn everything you need"
|
|
||||||
url: "https://docs.example.com"
|
|
||||||
base_url: "/"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Always include `name` parameter
|
|
||||||
- Use descriptive titles and descriptions
|
|
||||||
- Ensure URLs are properly formatted with protocol
|
|
||||||
|
|
||||||
### 2. Metadata Configuration (`!!site.config_meta`)
|
|
||||||
|
|
||||||
**Purpose:** Override specific metadata for SEO purposes.
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `title`: SEO-specific title (overrides site.config title for meta tags)
|
|
||||||
- `image`: SEO-specific image (overrides site.config image for og:image)
|
|
||||||
- `description`: SEO-specific description
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.config_meta
|
|
||||||
title: "My Docs - Complete Guide"
|
|
||||||
image: "img/social-preview.png"
|
|
||||||
description: "The ultimate guide to using our platform"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use only when SEO metadata needs to differ from main config
|
|
||||||
- Keep titles concise for social media sharing
|
|
||||||
- Use high-quality images for social previews
|
|
||||||
|
|
||||||
### 3. Navigation Bar (`!!site.navbar` or `!!site.menu`)
|
|
||||||
|
|
||||||
**Purpose:** Configure the main navigation bar.
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `title`: Navigation title (defaults to site.config title)
|
|
||||||
- `logo_alt`: Logo alt text
|
|
||||||
- `logo_src`: Logo image path
|
|
||||||
- `logo_src_dark`: Dark mode logo path
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.navbar
|
|
||||||
title: "My Site"
|
|
||||||
logo_alt: "My Site Logo"
|
|
||||||
logo_src: "img/logo.svg"
|
|
||||||
logo_src_dark: "img/logo-dark.svg"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use `!!site.navbar` for modern syntax (preferred)
|
|
||||||
- `!!site.menu` is supported for backward compatibility
|
|
||||||
- Provide both light and dark logos when possible
|
|
||||||
|
|
||||||
### 4. Navigation Items (`!!site.navbar_item` or `!!site.menu_item`)
|
|
||||||
|
|
||||||
**Purpose:** Add items to the navigation bar.
|
|
||||||
|
|
||||||
**Required Parameters (one of):**
|
|
||||||
- `to`: Internal link path
|
|
||||||
- `href`: External URL
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `label`: Display text (required in practice)
|
|
||||||
- `position`: "left" or "right" (default: "right")
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.navbar_item
|
|
||||||
label: "Documentation"
|
|
||||||
to: "docs/intro"
|
|
||||||
position: "left"
|
|
||||||
|
|
||||||
!!site.navbar_item
|
|
||||||
label: "GitHub"
|
|
||||||
href: "https://github.com/myorg/repo"
|
|
||||||
position: "right"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use `to` for internal navigation
|
|
||||||
- Use `href` for external links
|
|
||||||
- Position important items on the left, secondary items on the right
|
|
||||||
|
|
||||||
### 5. Footer Configuration (`!!site.footer`)
|
|
||||||
|
|
||||||
**Purpose:** Configure footer styling.
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `style`: "dark" or "light" (default: "dark")
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.footer
|
|
||||||
style: "dark"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6. Footer Items (`!!site.footer_item`)
|
|
||||||
|
|
||||||
**Purpose:** Add links to the footer, grouped by title.
|
|
||||||
|
|
||||||
**Required Parameters:**
|
|
||||||
- `title`: Group title (items with same title are grouped together)
|
|
||||||
- `label`: Link text
|
|
||||||
|
|
||||||
**Required Parameters (one of):**
|
|
||||||
- `to`: Internal link path
|
|
||||||
- `href`: External URL
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.footer_item
|
|
||||||
title: "Docs"
|
|
||||||
label: "Introduction"
|
|
||||||
to: "intro"
|
|
||||||
|
|
||||||
!!site.footer_item
|
|
||||||
title: "Docs"
|
|
||||||
label: "API Reference"
|
|
||||||
to: "api"
|
|
||||||
|
|
||||||
!!site.footer_item
|
|
||||||
title: "Community"
|
|
||||||
label: "Discord"
|
|
||||||
href: "https://discord.gg/example"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Group related links under the same title
|
|
||||||
- Use consistent title names across related items
|
|
||||||
- Provide both internal and external links as appropriate
|
|
||||||
|
|
||||||
### 7. Page Categories (`!!site.page_category`)
|
|
||||||
|
|
||||||
**Purpose:** Create a section/category to organize pages.
|
|
||||||
|
|
||||||
**Required Parameters:**
|
|
||||||
- `name`: Category identifier (snake_case)
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `label`: Display name (auto-generated from name if not provided)
|
|
||||||
- `position`: Manual sort order (auto-incremented if not specified)
|
|
||||||
- `path`: URL path segment (defaults to normalized label)
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.page_category
|
|
||||||
name: "getting_started"
|
|
||||||
label: "Getting Started"
|
|
||||||
position: 100
|
|
||||||
|
|
||||||
!!site.page_category
|
|
||||||
name: "advanced_topics"
|
|
||||||
label: "Advanced Topics"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use descriptive snake_case names
|
|
||||||
- Let label be auto-generated when possible (name_fix converts to Title Case)
|
|
||||||
- Categories persist for all subsequent pages until a new category is declared
|
|
||||||
- Position values should leave gaps (100, 200, 300) for future insertions
|
|
||||||
|
|
||||||
### 8. Pages (`!!site.page`)
|
|
||||||
|
|
||||||
**Purpose:** Define individual pages in the site.
|
|
||||||
|
|
||||||
**Required Parameters:**
|
|
||||||
- `src`: Source reference as `collection:page_name` (required for first page in a collection)
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `name`: Page identifier (extracted from src if not provided)
|
|
||||||
- `title`: Page title (extracted from markdown if not provided)
|
|
||||||
- `description`: Page description for metadata
|
|
||||||
- `slug`: Custom URL slug
|
|
||||||
- `position`: Manual sort order (auto-incremented if not specified)
|
|
||||||
- `draft`: Mark as draft (default: false)
|
|
||||||
- `hide_title`: Hide title in rendering (default: false)
|
|
||||||
- `path`: Custom path (defaults to current category name)
|
|
||||||
- `category`: Override current category
|
|
||||||
- `title_nr`: Title numbering level
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.page src: "docs:introduction"
|
|
||||||
description: "Introduction to the platform"
|
|
||||||
slug: "/"
|
|
||||||
|
|
||||||
!!site.page src: "quickstart"
|
|
||||||
description: "Get started in 5 minutes"
|
|
||||||
|
|
||||||
!!site.page src: "installation"
|
|
||||||
title: "Installation Guide"
|
|
||||||
description: "How to install and configure"
|
|
||||||
position: 10
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- **Collection Persistence:** Specify collection once (e.g., `docs:introduction`), then subsequent pages only need page name (e.g., `quickstart`)
|
|
||||||
- **Category Persistence:** Pages belong to the most recently declared category
|
|
||||||
- **Title Extraction:** Prefer extracting titles from markdown files
|
|
||||||
- **Position Management:** Use automatic positioning unless specific order is required
|
|
||||||
- **Description Required:** Always provide descriptions for SEO
|
|
||||||
- **Slug Usage:** Use slug for special pages like homepage (`slug: "/"`)
|
|
||||||
|
|
||||||
### 9. Import External Content (`!!site.import`)
|
|
||||||
|
|
||||||
**Purpose:** Import content from external sources.
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `name`: Import identifier
|
|
||||||
- `url`: Git URL or HTTP URL
|
|
||||||
- `path`: Local file system path
|
|
||||||
- `dest`: Destination path in site
|
|
||||||
- `replace`: Comma-separated key:value pairs for variable replacement
|
|
||||||
- `visible`: Whether imported content is visible (default: true)
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.import
|
|
||||||
url: "https://github.com/example/docs"
|
|
||||||
dest: "external"
|
|
||||||
replace: "VERSION:1.0.0,PROJECT:MyProject"
|
|
||||||
visible: true
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use for shared documentation across multiple sites
|
|
||||||
- Replace variables using `${VARIABLE}` syntax in source content
|
|
||||||
- Set `visible: false` for imported templates or partials
|
|
||||||
|
|
||||||
### 10. Publish Destinations (`!!site.publish` and `!!site.publish_dev`)
|
|
||||||
|
|
||||||
**Purpose:** Define where to publish the built site.
|
|
||||||
|
|
||||||
**Optional Parameters:**
|
|
||||||
- `path`: File system path or URL
|
|
||||||
- `ssh_name`: SSH connection name for remote deployment
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
```heroscript
|
|
||||||
!!site.publish
|
|
||||||
path: "/var/www/html/docs"
|
|
||||||
ssh_name: "production_server"
|
|
||||||
|
|
||||||
!!site.publish_dev
|
|
||||||
path: "/tmp/docs-preview"
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Use `!!site.publish` for production deployments
|
|
||||||
- Use `!!site.publish_dev` for development/preview deployments
|
|
||||||
- Can specify multiple destinations
|
|
||||||
|
|
||||||
## File Organization Best Practices
|
|
||||||
|
|
||||||
### Naming Convention
|
|
||||||
|
|
||||||
Use numeric prefixes to control execution order:
|
|
||||||
|
|
||||||
```
|
|
||||||
0_config.heroscript # Site configuration
|
|
||||||
1_navigation.heroscript # Menu and footer
|
|
||||||
2_intro.heroscript # Introduction pages
|
|
||||||
3_guides.heroscript # User guides
|
|
||||||
4_reference.heroscript # API reference
|
|
||||||
```
|
|
||||||
|
|
||||||
**AI Guidelines:**
|
|
||||||
- Always use numeric prefixes (0_, 1_, 2_, etc.)
|
|
||||||
- Leave gaps in numbering (0, 10, 20) for future insertions
|
|
||||||
- Group related configurations in the same file
|
|
||||||
- Process order matters: config → navigation → pages
|
|
||||||
|
|
||||||
### Execution Order Rules
|
|
||||||
|
|
||||||
1. **Configuration First:** `!!site.config` must be processed before other actions
|
|
||||||
2. **Categories Before Pages:** Declare `!!site.page_category` before pages in that category
|
|
||||||
3. **Collection Persistence:** First page in a collection must specify `collection:page_name`
|
|
||||||
4. **Category Persistence:** Pages inherit the most recent category declaration
|
|
||||||
|
|
||||||
## Common Patterns
|
|
||||||
|
|
||||||
### Pattern 1: Simple Documentation Site
|
|
||||||
|
|
||||||
```heroscript
|
|
||||||
!!site.config
|
|
||||||
name: "simple_docs"
|
|
||||||
title: "Simple Documentation"
|
|
||||||
|
|
||||||
!!site.navbar
|
|
||||||
title: "Simple Docs"
|
|
||||||
|
|
||||||
!!site.page src: "docs:index"
|
|
||||||
description: "Welcome page"
|
|
||||||
slug: "/"
|
|
||||||
|
|
||||||
!!site.page src: "getting-started"
|
|
||||||
description: "Getting started guide"
|
|
||||||
|
|
||||||
!!site.page src: "api"
|
|
||||||
description: "API reference"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Pattern 2: Multi-Section Documentation
|
|
||||||
|
|
||||||
```heroscript
|
|
||||||
!!site.config
|
|
||||||
name: "multi_section_docs"
|
|
||||||
title: "Complete Documentation"
|
|
||||||
|
|
||||||
!!site.page_category
|
|
||||||
name: "introduction"
|
|
||||||
label: "Introduction"
|
|
||||||
|
|
||||||
!!site.page src: "docs:welcome"
|
|
||||||
description: "Welcome to our documentation"
|
|
||||||
|
|
||||||
!!site.page src: "overview"
|
|
||||||
description: "Platform overview"
|
|
||||||
|
|
||||||
!!site.page_category
|
|
||||||
name: "tutorials"
|
|
||||||
label: "Tutorials"
|
|
||||||
|
|
||||||
!!site.page src: "tutorial_basics"
|
|
||||||
description: "Basic tutorial"
|
|
||||||
|
|
||||||
!!site.page src: "tutorial_advanced"
|
|
||||||
description: "Advanced tutorial"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Pattern 3: Complex Site with External Links
|
|
||||||
|
|
||||||
```heroscript
|
|
||||||
!!site.config
|
|
||||||
name: "complex_site"
|
|
||||||
title: "Complex Documentation Site"
|
|
||||||
url: "https://docs.example.com"
|
|
||||||
|
|
||||||
!!site.navbar
|
|
||||||
title: "My Platform"
|
|
||||||
logo_src: "img/logo.svg"
|
|
||||||
|
|
||||||
!!site.navbar_item
|
|
||||||
label: "Docs"
|
|
||||||
to: "docs/intro"
|
|
||||||
position: "left"
|
|
||||||
|
|
||||||
!!site.navbar_item
|
|
||||||
label: "API"
|
|
||||||
to: "api"
|
|
||||||
position: "left"
|
|
||||||
|
|
||||||
!!site.navbar_item
|
|
||||||
label: "GitHub"
|
|
||||||
href: "https://github.com/example/repo"
|
|
||||||
position: "right"
|
|
||||||
|
|
||||||
!!site.footer
|
|
||||||
style: "dark"
|
|
||||||
|
|
||||||
!!site.footer_item
|
|
||||||
title: "Documentation"
|
|
||||||
label: "Getting Started"
|
|
||||||
to: "docs/intro"
|
|
||||||
|
|
||||||
!!site.footer_item
|
|
||||||
title: "Community"
|
|
||||||
label: "Discord"
|
|
||||||
href: "https://discord.gg/example"
|
|
||||||
|
|
||||||
!!site.page_category
|
|
||||||
name: "getting_started"
|
|
||||||
|
|
||||||
!!site.page src: "docs:introduction"
|
|
||||||
description: "Introduction to the platform"
|
|
||||||
slug: "/"
|
|
||||||
|
|
||||||
!!site.page src: "installation"
|
|
||||||
description: "Installation guide"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Error Prevention
|
|
||||||
|
|
||||||
### Common Mistakes to Avoid
|
|
||||||
|
|
||||||
1. **Missing Collection on First Page:**
|
|
||||||
```heroscript
|
|
||||||
# WRONG - no collection specified
|
|
||||||
!!site.page src: "introduction"
|
|
||||||
|
|
||||||
# CORRECT
|
|
||||||
!!site.page src: "docs:introduction"
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Category Without Name:**
|
|
||||||
```heroscript
|
|
||||||
# WRONG - missing name
|
|
||||||
!!site.page_category
|
|
||||||
label: "Getting Started"
|
|
||||||
|
|
||||||
# CORRECT
|
|
||||||
!!site.page_category
|
|
||||||
name: "getting_started"
|
|
||||||
label: "Getting Started"
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Missing Description:**
|
|
||||||
```heroscript
|
|
||||||
# WRONG - no description
|
|
||||||
!!site.page src: "docs:intro"
|
|
||||||
|
|
||||||
# CORRECT
|
|
||||||
!!site.page src: "docs:intro"
|
|
||||||
description: "Introduction to the platform"
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Incorrect File Ordering:**
|
|
||||||
```
|
|
||||||
# WRONG - pages before config
|
|
||||||
pages.heroscript
|
|
||||||
config.heroscript
|
|
||||||
|
|
||||||
# CORRECT - config first
|
|
||||||
0_config.heroscript
|
|
||||||
1_pages.heroscript
|
|
||||||
```
|
|
||||||
|
|
||||||
## Validation Checklist
|
|
||||||
|
|
||||||
When generating HeroScript for the Site module, verify:
|
|
||||||
|
|
||||||
- [ ] `!!site.config` includes `name` parameter
|
|
||||||
- [ ] All pages have `description` parameter
|
|
||||||
- [ ] First page in each collection specifies `collection:page_name`
|
|
||||||
- [ ] Categories are declared before their pages
|
|
||||||
- [ ] Files use numeric prefixes for ordering
|
|
||||||
- [ ] Navigation items have either `to` or `href`
|
|
||||||
- [ ] Footer items are grouped by `title`
|
|
||||||
- [ ] External URLs include protocol (https://)
|
|
||||||
- [ ] Paths don't have trailing slashes unless intentional
|
|
||||||
- [ ] Draft pages are marked with `draft: true`
|
|
||||||
|
|
||||||
## Integration with V Code
|
|
||||||
|
|
||||||
When working with the Site module in V code:
|
|
||||||
|
|
||||||
```v
|
|
||||||
import incubaid.herolib.web.site
|
|
||||||
import incubaid.herolib.core.playbook
|
|
||||||
|
|
||||||
// Process HeroScript files
|
|
||||||
mut plbook := playbook.new(path: '/path/to/heroscripts')!
|
|
||||||
site.play(mut plbook)!
|
|
||||||
|
|
||||||
// Access configured site
|
|
||||||
mut mysite := site.get(name: 'my_site')!
|
|
||||||
|
|
||||||
// Iterate through pages
|
|
||||||
for page in mysite.pages {
|
|
||||||
println('Page: ${page.name} - ${page.description}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterate through sections
|
|
||||||
for section in mysite.sections {
|
|
||||||
println('Section: ${section.label}')
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
The Site module's HeroScript format provides a declarative way to configure websites with:
|
|
||||||
- Clear separation of concerns (config, navigation, content)
|
|
||||||
- Automatic ordering and organization
|
|
||||||
- Collection and category persistence for reduced repetition
|
|
||||||
- Flexible metadata and SEO configuration
|
|
||||||
- Support for both internal and external content
|
|
||||||
|
|
||||||
Always follow the execution order rules, use numeric file prefixes, and provide complete metadata for best results.
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
module site
|
|
||||||
|
|
||||||
// Page represents a single documentation page
|
|
||||||
pub struct Page {
|
|
||||||
pub mut:
|
|
||||||
id string // Unique identifier: "collection:page_name"
|
|
||||||
title string // Display title (optional, extracted from markdown if empty)
|
|
||||||
description string // Brief description for metadata
|
|
||||||
draft bool // Mark as draft (hidden from navigation)
|
|
||||||
hide_title bool // Hide the title when rendering
|
|
||||||
src string // Source reference (same as id in this format)
|
|
||||||
}
|
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
module site
|
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Sidebar Navigation Models (Domain Types)
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
pub struct SideBar {
|
|
||||||
pub mut:
|
|
||||||
my_sidebar []NavItem
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type NavItem = NavDoc | NavCat | NavLink
|
|
||||||
|
|
||||||
pub struct NavDoc {
|
|
||||||
pub:
|
|
||||||
id string
|
|
||||||
label string
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct NavCat {
|
|
||||||
pub mut:
|
|
||||||
label string
|
|
||||||
collapsible bool = true
|
|
||||||
collapsed bool
|
|
||||||
items []NavItem
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct NavLink {
|
|
||||||
pub:
|
|
||||||
label string
|
|
||||||
href string
|
|
||||||
description string
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// JSON Serialization Struct (unified to avoid sum type _type field)
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
struct SidebarItem {
|
|
||||||
typ string @[json: 'type']
|
|
||||||
id string @[omitempty]
|
|
||||||
label string
|
|
||||||
href string @[omitempty]
|
|
||||||
description string @[omitempty]
|
|
||||||
collapsible bool @[json: 'collapsible'; omitempty]
|
|
||||||
collapsed bool @[json: 'collapsed'; omitempty]
|
|
||||||
items []SidebarItem @[omitempty]
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// JSON Serialization
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
pub fn (sb SideBar) sidebar_to_json() !string {
|
|
||||||
items := sb.my_sidebar.map(to_sidebar_item(it))
|
|
||||||
return json.encode_pretty(items)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_sidebar_item(item NavItem) SidebarItem {
|
|
||||||
return match item {
|
|
||||||
NavDoc { from_doc(item) }
|
|
||||||
NavLink { from_link(item) }
|
|
||||||
NavCat { from_category(item) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_doc(doc NavDoc) SidebarItem {
|
|
||||||
return SidebarItem{
|
|
||||||
typ: 'doc'
|
|
||||||
id: extract_page_id(doc.id)
|
|
||||||
label: doc.label
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_link(link NavLink) SidebarItem {
|
|
||||||
return SidebarItem{
|
|
||||||
typ: 'link'
|
|
||||||
label: link.label
|
|
||||||
href: link.href
|
|
||||||
description: link.description
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_category(cat NavCat) SidebarItem {
|
|
||||||
return SidebarItem{
|
|
||||||
typ: 'category'
|
|
||||||
label: cat.label
|
|
||||||
collapsible: cat.collapsible
|
|
||||||
collapsed: cat.collapsed
|
|
||||||
items: cat.items.map(to_sidebar_item(it))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// extract_page_id extracts the page name from a "collection:page_name" format.
|
|
||||||
// If the id doesn't contain a colon, returns the id as-is.
|
|
||||||
pub fn extract_page_id(id string) string {
|
|
||||||
parts := id.split(':')
|
|
||||||
if parts.len == 2 {
|
|
||||||
return parts[1]
|
|
||||||
}
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user