This commit is contained in:
2025-12-01 19:00:31 +01:00
parent 55966be158
commit c22e9ae8ce
22 changed files with 231 additions and 279 deletions

View File

@@ -1,10 +1,10 @@
# AtlasClient
# DocTreeClient
A simple API for accessing document collections exported by the `doctree` module.
## What It Does
AtlasClient provides methods to:
DocTreeClient provides methods to:
- List collections, pages, files, and images
- Check if resources exist

View File

@@ -7,9 +7,9 @@ import os
import json
import incubaid.herolib.core.redisclient
// AtlasClient provides access to DocTree-exported documentation collections
// DocTreeClient provides access to exported documentation collections
// It reads from both the exported directory structure and Redis metadata
pub struct AtlasClient {
pub struct DocTreeClient {
pub mut:
redis &redisclient.Redis
export_dir string // Path to the doctree export directory (contains content/ and meta/)
@@ -17,7 +17,7 @@ pub mut:
// get_page_path returns the path for a page in a collection
// Pages are stored in {export_dir}/content/{collection}/{page}.md
pub fn (mut c AtlasClient) get_page_path(collection_name string, page_name string) !string {
pub fn (mut c DocTreeClient) get_page_path(collection_name string, page_name string) !string {
// Apply name normalization
fixed_collection_name := texttools.name_fix(collection_name)
fixed_page_name := texttools.name_fix(page_name)
@@ -40,7 +40,7 @@ pub fn (mut c AtlasClient) get_page_path(collection_name string, page_name strin
// get_file_path returns the path for a file in a collection
// Files are stored in {export_dir}/content/{collection}/{filename}
pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ string) !string {
pub fn (mut c DocTreeClient) get_file_path(collection_name_ string, file_name_ string) !string {
collection_name := texttools.name_fix(collection_name_)
file_name := texttools.name_fix(file_name_)
@@ -62,7 +62,7 @@ pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ str
// get_image_path returns the path for an image in a collection
// Images are stored in {export_dir}/content/{collection}/{imagename}
pub fn (mut c AtlasClient) get_image_path(collection_name_ string, image_name_ string) !string {
pub fn (mut c DocTreeClient) get_image_path(collection_name_ string, image_name_ string) !string {
// Apply name normalization
collection_name := texttools.name_fix(collection_name_)
// Images keep their original names with extensions
@@ -85,28 +85,28 @@ pub fn (mut c AtlasClient) get_image_path(collection_name_ string, image_name_ s
}
// page_exists checks if a page exists in a collection
pub fn (mut c AtlasClient) page_exists(collection_name string, page_name string) bool {
pub fn (mut c DocTreeClient) page_exists(collection_name string, page_name string) bool {
// Try to get the page path - if it succeeds, the page exists
_ := c.get_page_path(collection_name, page_name) or { return false }
return true
}
// file_exists checks if a file exists in a collection
pub fn (mut c AtlasClient) file_exists(collection_name string, file_name string) bool {
pub fn (mut c DocTreeClient) file_exists(collection_name string, file_name string) bool {
// Try to get the file path - if it succeeds, the file exists
_ := c.get_file_path(collection_name, file_name) or { return false }
return true
}
// image_exists checks if an image exists in a collection
pub fn (mut c AtlasClient) image_exists(collection_name string, image_name string) bool {
pub fn (mut c DocTreeClient) image_exists(collection_name string, image_name string) bool {
// Try to get the image path - if it succeeds, the image exists
_ := c.get_image_path(collection_name, image_name) or { return false }
return true
}
// get_page_content returns the content of a page in a collection
pub fn (mut c AtlasClient) get_page_content(collection_name string, page_name string) !string {
pub fn (mut c DocTreeClient) get_page_content(collection_name string, page_name string) !string {
// Get the path for the page
page_path := c.get_page_path(collection_name, page_name)!
@@ -124,7 +124,7 @@ pub fn (mut c AtlasClient) get_page_content(collection_name string, page_name st
// list_collections returns a list of all collection names
// Collections are directories in {export_dir}/content/
pub fn (mut c AtlasClient) list_collections() ![]string {
pub fn (mut c DocTreeClient) list_collections() ![]string {
content_dir := os.join_path(c.export_dir, 'content')
// Check if content directory exists
@@ -148,7 +148,7 @@ pub fn (mut c AtlasClient) list_collections() ![]string {
// list_pages returns a list of all page names in a collection
// Uses metadata to get the authoritative list of pages that belong to this collection
pub fn (mut c AtlasClient) list_pages(collection_name string) ![]string {
pub fn (mut c DocTreeClient) list_pages(collection_name string) ![]string {
// Get metadata which contains the authoritative list of pages
metadata := c.get_collection_metadata(collection_name)!
@@ -162,7 +162,7 @@ pub fn (mut c AtlasClient) list_pages(collection_name string) ![]string {
}
// list_files returns a list of all file names in a collection (excluding pages and images)
pub fn (mut c AtlasClient) list_files(collection_name string) ![]string {
pub fn (mut c DocTreeClient) list_files(collection_name string) ![]string {
metadata := c.get_collection_metadata(collection_name)!
mut file_names := []string{}
for file_name, file_meta in metadata.files {
@@ -174,7 +174,7 @@ pub fn (mut c AtlasClient) list_files(collection_name string) ![]string {
}
// list_images returns a list of all image names in a collection
pub fn (mut c AtlasClient) list_images(collection_name string) ![]string {
pub fn (mut c DocTreeClient) list_images(collection_name string) ![]string {
metadata := c.get_collection_metadata(collection_name)!
mut images := []string{}
for file_name, file_meta in metadata.files {
@@ -187,7 +187,7 @@ pub fn (mut c AtlasClient) list_images(collection_name string) ![]string {
// list_pages_map returns a map of collection names to a list of page names within that collection.
// The structure is map[collectionname][]pagename.
pub fn (mut c AtlasClient) list_pages_map() !map[string][]string {
pub fn (mut c DocTreeClient) list_pages_map() !map[string][]string {
mut result := map[string][]string{}
collections := c.list_collections()!
@@ -201,7 +201,7 @@ pub fn (mut c AtlasClient) list_pages_map() !map[string][]string {
// get_collection_metadata reads and parses the metadata JSON file for a collection
// Metadata is stored in {export_dir}/meta/{collection}.json
pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !CollectionMetadata {
pub fn (mut c DocTreeClient) get_collection_metadata(collection_name string) !CollectionMetadata {
// Apply name normalization
fixed_collection_name := texttools.name_fix(collection_name)
@@ -221,23 +221,23 @@ pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !Coll
}
// get_collection_errors returns the errors for a collection from metadata
pub fn (mut c AtlasClient) get_collection_errors(collection_name string) ![]ErrorMetadata {
pub fn (mut c DocTreeClient) get_collection_errors(collection_name string) ![]ErrorMetadata {
metadata := c.get_collection_metadata(collection_name)!
return metadata.errors
}
// has_errors checks if a collection has any errors
pub fn (mut c AtlasClient) has_errors(collection_name string) bool {
pub fn (mut c DocTreeClient) has_errors(collection_name string) bool {
errors := c.get_collection_errors(collection_name) or { return false }
return errors.len > 0
}
pub fn (mut c AtlasClient) copy_collection(collection_name string, destination_path string) ! {
pub fn (mut c DocTreeClient) copy_collection(collection_name string, destination_path string) ! {
// TODO: list over all pages, links & files and copy them to destination
}
// will copy all pages linked from a page to a destination directory as well as the page itself
pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
pub fn (mut c DocTreeClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
// TODO: copy page itself
// Get page links from metadata
@@ -262,7 +262,7 @@ pub fn (mut c AtlasClient) copy_pages(collection_name string, page_name string,
}
}
pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string, destination_path string) ! {
pub fn (mut c DocTreeClient) copy_images(collection_name string, page_name string, destination_path string) ! {
// Get page links from metadata
links := c.get_page_links(collection_name, page_name)!
@@ -288,7 +288,7 @@ pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string,
// copy_files copies all non-image files from a page to a destination directory
// Files are placed in {destination}/files/ subdirectory
// Only copies files referenced in the page (via links)
pub fn (mut c AtlasClient) copy_files(collection_name string, page_name string, destination_path string) ! {
pub fn (mut c DocTreeClient) copy_files(collection_name string, page_name string, destination_path string) ! {
// Get page links from metadata
links := c.get_page_links(collection_name, page_name)!

View File

@@ -10,17 +10,16 @@ import incubaid.herolib.core.redisclient
// get_page_links returns all links found in a page and pages linked to it (recursive)
// This includes transitive links through page-to-page references
// External links, files, and images do not recurse further
pub fn (mut c AtlasClient) get_page_links(collection_name string, page_name string) ![]LinkMetadata {
pub fn (mut c DocTreeClient) get_page_links(collection_name string, page_name string) ![]LinkMetadata {
mut visited := map[string]bool{}
mut all_links := []LinkMetadata{}
c.collect_page_links_recursive(collection_name, page_name, mut visited, mut all_links)!
return all_links
}
// collect_page_links_recursive is the internal recursive implementation
// It traverses all linked pages and collects all links found
//
//
// Thread safety: Each call to get_page_links gets its own visited map
// Circular references are prevented by tracking visited pages
//
@@ -28,17 +27,17 @@ pub fn (mut c AtlasClient) get_page_links(collection_name string, page_name stri
// - .page links: Recursively traverse to get links from the target page
// - .file and .image links: Included in results but not recursively expanded
// - .external links: Included in results but not recursively expanded
fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page_name string, mut visited map[string]bool, mut all_links []LinkMetadata) ! {
fn (mut c DocTreeClient) collect_page_links_recursive(collection_name string, page_name string, mut visited map[string]bool, mut all_links []LinkMetadata) ! {
// Create unique key for cycle detection
page_key := '${collection_name}:${page_name}'
// Prevent infinite loops on circular page references
// Example: Page A Page B Page A
if page_key in visited {
return
}
visited[page_key] = true
// Get collection metadata
metadata := c.get_collection_metadata(collection_name)!
fixed_page_name := texttools.name_fix(page_name)
@@ -47,13 +46,13 @@ fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page
if fixed_page_name !in metadata.pages {
return error('page_not_found: Page "${page_name}" not found in collection metadata, for collection: "${collection_name}"')
}
page_meta := metadata.pages[fixed_page_name]
// Add all direct links from this page to the result
// This includes: pages, files, images, and external links
all_links << page_meta.links
// Recursively traverse only page-to-page links
for link in page_meta.links {
// Only recursively process links to other pages within the doctree
@@ -62,9 +61,10 @@ fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page
if link.file_type != .page || link.status == .external {
continue
}
// Recursively collect links from the target page
c.collect_page_links_recursive(link.target_collection_name, link.target_item_name, mut visited, mut all_links) or {
c.collect_page_links_recursive(link.target_collection_name, link.target_item_name, mut
visited, mut all_links) or {
// If we encounter an error (e.g., target page doesn't exist in metadata),
// we continue processing other links rather than failing completely
// This provides graceful degradation for broken link references
@@ -75,45 +75,45 @@ fn (mut c AtlasClient) collect_page_links_recursive(collection_name string, page
// get_image_links returns all image links found in a page and related pages (recursive)
// This is a convenience function that filters get_page_links to only image links
pub fn (mut c AtlasClient) get_image_links(collection_name string, page_name string) ![]LinkMetadata {
pub fn (mut c DocTreeClient) get_image_links(collection_name string, page_name string) ![]LinkMetadata {
all_links := c.get_page_links(collection_name, page_name)!
mut image_links := []LinkMetadata{}
for link in all_links {
if link.file_type == .image {
image_links << link
}
}
return image_links
}
// get_file_links returns all file links (non-image) found in a page and related pages (recursive)
// This is a convenience function that filters get_page_links to only file links
pub fn (mut c AtlasClient) get_file_links(collection_name string, page_name string) ![]LinkMetadata {
pub fn (mut c DocTreeClient) get_file_links(collection_name string, page_name string) ![]LinkMetadata {
all_links := c.get_page_links(collection_name, page_name)!
mut file_links := []LinkMetadata{}
for link in all_links {
if link.file_type == .file {
file_links << link
}
}
return file_links
}
// get_page_link_targets returns all page-to-page link targets found in a page and related pages
// This is a convenience function that filters get_page_links to only page links
pub fn (mut c AtlasClient) get_page_link_targets(collection_name string, page_name string) ![]LinkMetadata {
pub fn (mut c DocTreeClient) get_page_link_targets(collection_name string, page_name string) ![]LinkMetadata {
all_links := c.get_page_links(collection_name, page_name)!
mut page_links := []LinkMetadata{}
for link in all_links {
if link.file_type == .page && link.status != .external {
page_links << link
}
}
return page_links
}
}

View File

@@ -3,18 +3,18 @@ module client
import incubaid.herolib.core.base
@[params]
pub struct AtlasClientArgs {
pub struct DocTreeClientArgs {
pub:
export_dir string @[required] // Path to doctree export directory
}
// Create a new AtlasClient instance
// Create a new DocTreeClient instance
// The export_dir should point to the directory containing content/ and meta/ subdirectories
pub fn new(args AtlasClientArgs) !&AtlasClient {
pub fn new(args DocTreeClientArgs) !&DocTreeClient {
mut context := base.context()!
mut redis := context.redis()!
return &AtlasClient{
return &DocTreeClient{
redis: redis
export_dir: args.export_dir
}

View File

@@ -1,7 +1,7 @@
module client
// list_markdown returns the collections and their pages in markdown format.
pub fn (mut c AtlasClient) list_markdown() !string {
pub fn (mut c DocTreeClient) list_markdown() !string {
mut markdown_output := ''
pages_map := c.list_pages_map()!

View File

@@ -1,6 +1,6 @@
module client
// AtlasClient provides access to DocTree-exported documentation collections
// DocTreeClient provides access to DocTree-exported documentation collections
// It reads from both the exported directory structure and Redis metadata
// List of recognized image file extensions

View File

@@ -41,7 +41,7 @@ fn (mut c Collection) init_pre() ! {
}
fn (mut c Collection) init_post() ! {
c.validate_links()!
c.find_links()!
c.init_git_info()!
}
@@ -247,31 +247,6 @@ pub fn (c Collection) print_errors() {
}
}
// Validate all links in collection
pub fn (mut c Collection) validate_links() ! {
for _, mut page in c.pages {
content := page.content(include: true)!
page.links = page.find_links(content)! // will walk over links see if errors and add errors
}
}
// Fix all links in collection (rewrite files)
pub fn (mut c Collection) fix_links() ! {
for _, mut page in c.pages {
// Read original content
content := page.content()!
// Fix links
fixed_content := page.content_with_fixed_links()!
// Write back if changed
if fixed_content != content {
mut p := page.path()!
p.write(fixed_content)!
}
}
}
// Check if session can read this collection
pub fn (c Collection) can_read(session Session) bool {
// If no ACL set, everyone can read
@@ -315,104 +290,3 @@ pub fn (c Collection) can_write(session Session) bool {
return false
}
// Detect git repository URL for a collection
fn (mut c Collection) init_git_info() ! {
mut current_path := c.path()!
// Walk up directory tree to find .git
mut git_repo := current_path.parent_find('.git') or {
// No git repo found
return
}
if git_repo.path == '' {
panic('Unexpected empty git repo path')
}
mut gs := gittools.new()!
mut p := c.path()!
mut location := gs.gitlocation_from_path(p.path)!
r := os.execute_opt('cd ${p.path} && git branch --show-current')!
location.branch_or_tag = r.output.trim_space()
c.git_url = location.web_url()!
}
////////////SCANNING FUNCTIONS ?//////////////////////////////////////////////////////
fn (mut c Collection) scan(mut dir pathlib.Path) ! {
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
// Skip hidden files/dirs
if entry.name().starts_with('.') || entry.name().starts_with('_') {
continue
}
if entry.is_dir() {
// Recursively scan subdirectories
mut mutable_entry := entry
c.scan(mut mutable_entry)!
continue
}
// Process files based on extension
match entry.extension_lower() {
'md' {
mut mutable_entry := entry
c.add_page(mut mutable_entry)!
}
else {
mut mutable_entry := entry
c.add_file(mut mutable_entry)!
}
}
}
}
// Scan for ACL files
fn (mut c Collection) scan_acl() ! {
// Look for read.acl in collection directory
read_acl_path := '${c.path()!.path}/read.acl'
if os.exists(read_acl_path) {
content := os.read_file(read_acl_path)!
// Split by newlines and normalize
c.acl_read = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
// Look for write.acl in collection directory
write_acl_path := '${c.path()!.path}/write.acl'
if os.exists(write_acl_path) {
content := os.read_file(write_acl_path)!
// Split by newlines and normalize
c.acl_write = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
}
// scan_groups scans the collection's directory for .group files and loads them into memory.
pub fn (mut c Collection) scan_groups() ! {
if c.name != 'groups' {
return error('scan_groups only works on "groups" collection')
}
mut p := c.path()!
mut entries := p.list(recursive: false)!
for mut entry in entries.paths {
if entry.extension_lower() == 'group' {
filename := entry.name_fix_no_ext()
mut visited := map[string]bool{}
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
c.doctree.group_add(mut group)!
}
}
}

View File

@@ -0,0 +1,54 @@
module core
import incubaid.herolib.develop.gittools
import os
// Validate all links in collection
fn (mut c Collection) find_links() ! {
for _, mut page in c.pages {
content := page.content(include: true)!
page.links = page.find_links(content)! // will walk over links see if errors and add errors
}
}
// Fix all links in collection (rewrite files)
fn (mut c Collection) fix_links() ! {
for _, mut page in c.pages {
// Read original content
content := page.content()!
// Fix links
fixed_content := page.content_with_fixed_links()!
// Write back if changed
if fixed_content != content {
mut p := page.path()!
p.write(fixed_content)!
}
}
}
// Detect git repository URL for a collection
fn (mut c Collection) init_git_info() ! {
mut current_path := c.path()!
// Walk up directory tree to find .git
mut git_repo := current_path.parent_find('.git') or {
// No git repo found
return
}
if git_repo.path == '' {
panic('Unexpected empty git repo path')
}
mut gs := gittools.new()!
mut p := c.path()!
mut location := gs.gitlocation_from_path(p.path)!
r := os.execute_opt('cd ${p.path} && git branch --show-current')!
location.branch_or_tag = r.output.trim_space()
c.git_url = location.web_url()!
}

View File

@@ -0,0 +1,84 @@
module core
import incubaid.herolib.core.pathlib
import incubaid.herolib.web.doctree as doctreetools
import incubaid.herolib.develop.gittools
import incubaid.herolib.data.paramsparser
import incubaid.herolib.ui.console
import os
////////////SCANNING FUNCTIONS ?//////////////////////////////////////////////////////
fn (mut c Collection) scan(mut dir pathlib.Path) ! {
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
// Skip hidden files/dirs
if entry.name().starts_with('.') || entry.name().starts_with('_') {
continue
}
if entry.is_dir() {
// Recursively scan subdirectories
mut mutable_entry := entry
c.scan(mut mutable_entry)!
continue
}
// Process files based on extension
match entry.extension_lower() {
'md' {
mut mutable_entry := entry
c.add_page(mut mutable_entry)!
}
else {
mut mutable_entry := entry
c.add_file(mut mutable_entry)!
}
}
}
}
// Scan for ACL files
fn (mut c Collection) scan_acl() ! {
// Look for read.acl in collection directory
read_acl_path := '${c.path()!.path}/read.acl'
if os.exists(read_acl_path) {
content := os.read_file(read_acl_path)!
// Split by newlines and normalize
c.acl_read = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
// Look for write.acl in collection directory
write_acl_path := '${c.path()!.path}/write.acl'
if os.exists(write_acl_path) {
content := os.read_file(write_acl_path)!
// Split by newlines and normalize
c.acl_write = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
}
// scan_groups scans the collection's directory for .group files and loads them into memory.
pub fn (mut c Collection) scan_groups() ! {
if c.name != 'groups' {
return error('scan_groups only works on "groups" collection')
}
mut p := c.path()!
mut entries := p.list(recursive: false)!
for mut entry in entries.paths {
if entry.extension_lower() == 'group' {
filename := entry.name_fix_no_ext()
mut visited := map[string]bool{}
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
c.doctree.group_add(mut group)!
}
}
}

View File

@@ -62,20 +62,6 @@ pub fn (mut a DocTree) init_post() ! {
}
}
// Validate all links in all collections
pub fn (mut a DocTree) validate_links() ! {
for _, mut col in a.collections {
col.validate_links()!
}
}
// Fix all links in all collections (rewrite source files)
pub fn (mut a DocTree) fix_links() ! {
for _, mut col in a.collections {
col.fix_links()!
}
}
// Add a group to the doctree
pub fn (mut a DocTree) group_add(mut group Group) ! {
if group.name in a.groups {

View File

@@ -13,7 +13,7 @@ pub mut:
redis bool = true
}
// Export all collections
// Export all collections and do all processing steps
pub fn (mut a DocTree) export(args ExportArgs) ! {
mut dest := pathlib.get_dir(path: args.destination, create: true)!
@@ -21,8 +21,12 @@ pub fn (mut a DocTree) export(args ExportArgs) ! {
dest.empty()!
}
// Validate links before export to populate page.links
a.validate_links()!
// first make sure we have all links identified, in the pages itself
// and make sure we know the git info
for _, mut col in a.collections {
col.find_links()!
col.init_git_info()!
}
for _, mut col in a.collections {
col.export(
@@ -32,6 +36,10 @@ pub fn (mut a DocTree) export(args ExportArgs) ! {
redis: args.redis
)!
}
for _, mut col in a.collections {
col.fix_links()!
}
}
@[params]

View File

@@ -10,13 +10,13 @@ __global (
)
@[params]
pub struct AtlasNewArgs {
pub struct DocTreeNewArgs {
pub mut:
name string = 'default'
}
// Create a new DocTree
pub fn new(args AtlasNewArgs) !&DocTree {
pub fn new(args DocTreeNewArgs) !&DocTree {
mut name := doctreetools.name_fix(args.name)
mut a := &DocTree{

View File

@@ -1,7 +1,6 @@
module core
import incubaid.herolib.web.doctree
import incubaid.herolib.ui.console
import incubaid.herolib.web.doctree as doctreetools
pub enum LinkFileType {
page // Default: link to another page
@@ -161,23 +160,10 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
// Format: $collection:$pagename or $collection:$pagename.md
if target.contains(':') {
parts := target.split(':')
if parts.len >= 2 {
link.target_collection_name = doctree.name_fix(parts[0])
// For file links, use name without extension; for page links, normalize normally
if link.file_type == .file {
link.target_item_name = doctree.name_fix(parts[1])
} else {
link.target_item_name = normalize_page_name(parts[1])
}
}
link.target_collection_name, link.target_item_name = doctreetools.key_parse(target)!
} else {
// For file links, use name without extension; for page links, normalize normally
if link.file_type == .file {
link.target_item_name = doctree.name_fix(target).trim_space()
} else {
link.target_item_name = normalize_page_name(target).trim_space()
}
link.target_item_name = doctreetools.name_fix(target)
link.target_collection_name = p.collection.name
}
@@ -298,14 +284,3 @@ fn (mut p Page) filesystem_link_path(mut link Link) !string {
return target_path.path_relative(source_path.path)!
}
/////////////TOOLS//////////////////////////////////
// Normalize page name (remove .md, apply name_fix)
fn normalize_page_name(name string) string {
mut clean := name
if clean.ends_with('.md') {
clean = clean[0..clean.len - 3]
}
return doctree.name_fix(clean)
}

View File

@@ -1,4 +0,0 @@
- first find all pages
- then for each page find all links

View File

@@ -158,7 +158,7 @@ fn test_find_links() {
assert links.len >= 2
}
fn test_validate_links() {
fn test_find_links() {
// Setup
col_path := '${test_base}/link_test'
os.mkdir_all(col_path)!
@@ -177,12 +177,11 @@ fn test_validate_links() {
mut a := new()!
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
// Validate
a.validate_links()!
// Should have no errors
col := a.get_collection('test_col')!
assert col.errors.len == 0
a.export(destination: '${test_base}/export_links')!
}
fn test_validate_broken_links() {
@@ -201,7 +200,7 @@ fn test_validate_broken_links() {
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
// Validate
a.validate_links()!
a.export(destination: '${test_base}/validate_broken_links')!
// Should have error
col := a.get_collection('test_col')!
@@ -294,14 +293,10 @@ fn test_cross_collection_links() {
a.add_collection(mut pathlib.get_dir(path: col1_path)!)!
a.add_collection(mut pathlib.get_dir(path: col2_path)!)!
// Validate - should pass
a.validate_links()!
col1 := a.get_collection('col1')!
assert col1.errors.len == 0
// Fix links - cross-collection links should NOT be rewritten
a.fix_links()!
a.export(destination: '${test_base}/export_cross')!
fixed := page1.read()!
assert fixed.contains('[Link to col2](col2:page2)') // Unchanged

View File

@@ -42,7 +42,7 @@ put this in .hero file
## usage in herolib
```v
import incubaid.herolib.data.doctree
import incubaid.herolib.web.doctree
// Create a new DocTree
mut a := doctree.new(name: 'my_docs')!
@@ -320,7 +320,7 @@ mut a := doctree.new()!
a.scan(path: './docs')!
// Validate all links
a.validate_links()!
a.find_links()!
// Check for errors
for _, col in a.collections {
@@ -427,7 +427,7 @@ When `redis: true` is set during export, DocTree stores:
### Redis Usage Examples
```v
import incubaid.herolib.data.doctree
import incubaid.herolib.web.doctree
import incubaid.herolib.core.base
// Export with Redis metadata (default)
@@ -468,7 +468,7 @@ println('Logo image: ${img_path}') // Output: img/logo.png
Save collection metadata to JSON files for archival or cross-tool compatibility:
```v
import incubaid.herolib.data.doctree
import incubaid.herolib.web.doctree
mut a := doctree.new(name: 'my_docs')!
a.scan(path: './docs')!
@@ -507,7 +507,7 @@ Create a `.vsh` script to process DocTree operations:
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import incubaid.herolib.core.playbook
import incubaid.herolib.data.doctree
import incubaid.herolib.web.doctree
// Define your HeroScript content
heroscript := "

View File

@@ -43,9 +43,6 @@ fn test_export_recursive_links() {
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
a.add_collection(mut pathlib.get_dir(path: col_c_path)!)!
// Validate links before export to populate page.links
a.validate_links()!
// Export
export_path := '${test_base}/export_recursive'
a.export(destination: export_path)!
@@ -160,8 +157,6 @@ fn test_export_recursive_with_images() {
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
// Validate and export
a.validate_links()!
export_path := '${test_base}/export_recursive_img'
a.export(destination: export_path)!

View File

@@ -77,9 +77,6 @@ fn test_save_and_load_with_includes() {
mut a := new(name: 'my_docs')!
a.scan(path: '${test_dir}/docs_include')!
// Validate links (should find the include)
a.validate_links()!
col := a.get_collection('docs')!
assert !col.has_errors()
@@ -110,9 +107,6 @@ fn test_save_and_load_with_errors() {
mut a := new(name: 'my_docs')!
a.scan(path: '${test_dir}/docs_errors')!
// Validate - will generate errors
a.validate_links()!
col := a.get_collection('docs')!
assert col.has_errors()
initial_error_count := col.errors.len

View File

@@ -1,39 +1,32 @@
module meta
import incubaid.herolib.data.doctree.client as doctree_client
import incubaid.herolib.web.doctree.client as doctree_client
import incubaid.herolib.data.markdown.tools as markdowntools
// Page represents a single documentation page
pub struct Page {
pub mut:
id string // Unique identifier: "collection:page_name"
title string // Display title (optional, extracted from markdown if empty)
description string // Brief description for metadata
questions []Question
questions []Question
}
pub struct Question {
pub mut:
question string
answer string
question string
answer string
}
pub fn (mut p Page) content(client doctree_client.AtlasClient) !string {
pub fn (mut p Page) content(client doctree_client.DocTreeClient) !string {
mut c := client.get_page_content(p.id)!
if p.title =="" {
if p.title == '' {
p.title = markdowntools.extract_title(c)
}
//TODO in future should do AI
if p.description =="" {
// TODO in future should do AI
if p.description == '' {
p.description = p.title
}
}
return c
}

View File

@@ -1,7 +1,7 @@
module docusaurus
import incubaid.herolib.core.pathlib
import incubaid.herolib.data.doctree.client as doctree_client
import incubaid.herolib.web.doctree.client as doctree_client
import incubaid.herolib.data.markdown.tools as markdowntools
import incubaid.herolib.ui.console
import incubaid.herolib.web.site
@@ -72,7 +72,7 @@ fn reset_docs_dir(docs_path string) ! {
os.mkdir_all(docs_path)!
}
fn report_errors(mut client doctree_client.AtlasClient, errors []string) ! {
fn report_errors(mut client doctree_client.DocTreeClient, errors []string) ! {
available := client.list_markdown() or { 'Could not list available pages' }
console.print_stderr('Available pages:\n${available}')
return error('Errors during doc generation:\n${errors.join('\n\n')}')
@@ -82,7 +82,7 @@ fn report_errors(mut client doctree_client.AtlasClient, errors []string) ! {
// Page Processing
// ============================================================================
fn process_page(mut client doctree_client.AtlasClient, docs_path string, page site.Page, first_doc_page string, mut errors []string) {
fn process_page(mut client doctree_client.DocTreeClient, docs_path string, page site.Page, first_doc_page string, mut errors []string) {
collection, page_name := parse_page_src(page.src) or {
errors << err.msg()
return
@@ -122,7 +122,7 @@ fn write_page(docs_path string, page_name string, page site.Page, content string
file.write(final_content)!
}
fn copy_page_assets(mut client doctree_client.AtlasClient, docs_path string, collection string, page_name string) {
fn copy_page_assets(mut client doctree_client.DocTreeClient, docs_path string, collection string, page_name string) {
client.copy_images(collection, page_name, docs_path) or {}
client.copy_files(collection, page_name, docs_path) or {}
}
@@ -132,7 +132,6 @@ fn copy_page_assets(mut client doctree_client.AtlasClient, docs_path string, col
// ============================================================================
fn build_frontmatter(page site.Page, content string, is_landing_page bool) string {
title := get_title(page, content)
description := get_description(page, title)
@@ -147,7 +146,6 @@ fn build_frontmatter(page site.Page, content string, is_landing_page bool) strin
// $dbg;
// }
// Add slug: / for the docs landing page so /docs/ works directly
if is_landing_page {
lines << 'slug: /'

View File

@@ -1,7 +1,7 @@
module docusaurus
import incubaid.herolib.core.pathlib
// import incubaid.herolib.data.doctree.client as doctree_client
// import incubaid.herolib.web.doctree.client as doctree_client
// import incubaid.herolib.web.site { Page, Section, Site }
// import incubaid.herolib.data.markdown.tools as markdowntools
// import incubaid.herolib.ui.console

View File

@@ -1,7 +1,7 @@
module docusaurus
import incubaid.herolib.core.playbook { PlayBook }
import incubaid.herolib.data.doctree
import incubaid.herolib.web.doctree
import incubaid.herolib.ui.console
import os
@@ -32,7 +32,7 @@ fn process_define(mut plbook PlayBook) !&DocSite {
reset: p.get_default_false('reset')
template_update: p.get_default_false('template_update')
install: p.get_default_false('install')
doctree_dir: doctree_dir
doctree_dir: doctree_dir
)!
site_name := p.get('name') or { return error('docusaurus.define: "name" is required') }