From 04e1e2375ffe962604661f1c4a5aa8d051b8b8c3 Mon Sep 17 00:00:00 2001 From: Mahmoud-Emad Date: Wed, 5 Nov 2025 15:25:50 +0200 Subject: [PATCH] refactor: Remove docusaurus dev server and path_meta flag - Remove 'dev' flag from run command - Remove 'path_meta' flag from run command - Remove docusaurus integration from playcmds - Add `validate_links` and `fix_links` to Atlas - Refactor page link processing for clarity and export mode --- lib/core/herocmds/atlas.v | 53 +------------- lib/data/atlas/atlas.v | 14 ++++ lib/data/atlas/export.v | 12 ++-- lib/data/atlas/factory.v | 16 +++-- lib/data/atlas/link.v | 139 ++++++++++++++++++++----------------- lib/data/atlas/play.v | 29 ++++---- lib/web/docusaurus/dsite.v | 7 +- 7 files changed, 128 insertions(+), 142 deletions(-) diff --git a/lib/core/herocmds/atlas.v b/lib/core/herocmds/atlas.v index 427c348d..6dfd5181 100644 --- a/lib/core/herocmds/atlas.v +++ b/lib/core/herocmds/atlas.v @@ -4,7 +4,6 @@ import incubaid.herolib.ui.console import incubaid.herolib.data.atlas import incubaid.herolib.core.playcmds import incubaid.herolib.develop.gittools -import incubaid.herolib.web.docusaurus import os import cli { Command, Flag } @@ -40,14 +39,6 @@ pub fn cmd_atlas(mut cmdroot Command) Command { description: 'Path where atlas collections are located.' }) - cmd_run.add_flag(Flag{ - flag: .string - required: false - name: 'path_meta' - abbrev: 'pm' - description: 'Path where collection.json... will be saved too.' - }) - cmd_run.add_flag(Flag{ flag: .string required: false @@ -61,6 +52,7 @@ pub fn cmd_atlas(mut cmdroot Command) Command { required: false name: 'destination' description: 'Export destination path.' + abbrev: 'd' }) cmd_run.add_flag(Flag{ @@ -71,14 +63,6 @@ pub fn cmd_atlas(mut cmdroot Command) Command { description: 'Scan directories for collections.' }) - cmd_run.add_flag(Flag{ - flag: .bool - required: false - name: 'dev' - abbrev: 'd' - description: 'Run development server after export.' - }) - cmd_run.add_flag(Flag{ flag: .bool required: false @@ -118,7 +102,6 @@ fn cmd_atlas_execute(cmd Command) ! { mut update := cmd.flags.get_bool('update') or { false } mut scan := cmd.flags.get_bool('scan') or { false } mut export := cmd.flags.get_bool('export') or { false } - mut dev := cmd.flags.get_bool('dev') or { false } // Include and redis default to true unless explicitly disabled mut no_include := cmd.flags.get_bool('no-include') or { false } @@ -128,9 +111,9 @@ fn cmd_atlas_execute(cmd Command) ! { // ---------- PATH LOGIC ---------- mut path := cmd.flags.get_string('path') or { '' } - mut path_meta := cmd.flags.get_string('path_meta') or { '' } mut url := cmd.flags.get_string('url') or { '' } mut name := cmd.flags.get_string('name') or { 'default' } + mut destination := cmd.flags.get_string('destination') or { '' } if path == '' && url == '' { @@ -166,12 +149,6 @@ fn cmd_atlas_execute(cmd Command) ! { export = true } - // If dev server is requested, ensure we scan and export first - if dev { - scan = true - export = true - } - // Execute operations if scan { console.print_header('Scanning collections...') @@ -205,30 +182,4 @@ fn cmd_atlas_execute(cmd Command) ! { } } } - - // Run development server if requested - always run even if there were export errors - if dev { - if destination == '' { - return error('Cannot run dev server: no destination specified. Use -destination flag.') - } - - console.print_header('Starting Docusaurus development server') - console.print_item('Atlas content exported to: ${destination}') - - // Get the docusaurus site that was configured via heroscript - // The heroscript should have been processed by playcmds.run() above - mut dsite := docusaurus.dsite_get('') or { - console.print_item('Warning: No Docusaurus site configured') - console.print_item('Make sure your atlas source directory contains a heroscript file with Docusaurus configuration') - return error('Cannot start dev server: ${err}') - } - - // Run the docusaurus dev server - this will block until Ctrl+C - dsite.dev( - host: 'localhost' - port: 3000 - open: true - watch_changes: false - )! - } } diff --git a/lib/data/atlas/atlas.v b/lib/data/atlas/atlas.v index fa3f41a4..a1ce54b6 100644 --- a/lib/data/atlas/atlas.v +++ b/lib/data/atlas/atlas.v @@ -63,6 +63,20 @@ pub fn (mut a Atlas) init_post() ! { } } +// Validate all links in all collections +pub fn (mut a Atlas) validate_links() ! { + for _, mut col in a.collections { + col.validate_links()! + } +} + +// Fix all links in all collections (rewrite source files) +pub fn (mut a Atlas) fix_links() ! { + for _, mut col in a.collections { + col.fix_links()! + } +} + // Add a group to the atlas pub fn (mut a Atlas) group_add(mut group Group) ! { if group.name in a.groups { diff --git a/lib/data/atlas/export.v b/lib/data/atlas/export.v index f9125395..ea0017c3 100644 --- a/lib/data/atlas/export.v +++ b/lib/data/atlas/export.v @@ -68,13 +68,15 @@ pub fn (mut c Collection) export(args CollectionExportArgs) ! { json_file.write(meta)! for _, mut page in c.pages { - content := page.content(include: args.include)! - - // NEW: Process cross-collection links - processed_content := page.process_links(mut col_dir)! + // Get content with includes processed and links transformed for export + content := page.content_with_fixed_links( + include: args.include + cross_collection: true + export_mode: true + )! mut dest_file := pathlib.get_file(path: '${col_dir.path}/${page.name}.md', create: true)! - dest_file.write(processed_content)! + dest_file.write(content)! // Redis operations... if args.redis { diff --git a/lib/data/atlas/factory.v b/lib/data/atlas/factory.v index bd1731f6..187046d0 100644 --- a/lib/data/atlas/factory.v +++ b/lib/data/atlas/factory.v @@ -19,19 +19,20 @@ pub mut: pub fn new(args AtlasNewArgs) !&Atlas { mut name := texttools.name_fix(args.name) - mut a := Atlas{ + mut a := &Atlas{ name: name } set(a) - return &a + return a } // Get Atlas from global map pub fn get(name string) !&Atlas { + mut fixed_name := texttools.name_fix(name) rlock atlases { - if name in atlases { - return atlases[name] or { return error('Atlas ${name} not found') } + if fixed_name in atlases { + return atlases[fixed_name] or { return error('Atlas ${name} not found') } } } return error("Atlas '${name}' not found") @@ -39,8 +40,9 @@ pub fn get(name string) !&Atlas { // Check if Atlas exists pub fn exists(name string) bool { + mut fixed_name := texttools.name_fix(name) rlock atlases { - return name in atlases + return fixed_name in atlases } } @@ -52,8 +54,8 @@ pub fn list() []string { } // Store Atlas in global map -fn set(atlas Atlas) { +fn set(atlas &Atlas) { lock atlases { - atlases[atlas.name] = &atlas + atlases[atlas.name] = atlas } } diff --git a/lib/data/atlas/link.v b/lib/data/atlas/link.v index 04e9d52c..8637c69c 100644 --- a/lib/data/atlas/link.v +++ b/lib/data/atlas/link.v @@ -1,8 +1,6 @@ module atlas import incubaid.herolib.core.texttools -import incubaid.herolib.core.pathlib -import os // Link represents a markdown link found in content pub struct Link { @@ -10,7 +8,8 @@ pub mut: src string // Source content where link was found (what to replace) text string // Link text [text] target string // Original link target (the source text) - line int // Line number where link was found + line int // Line number where link was found (1-based) + pos int // Character position in line where link starts (0-based) target_collection_name string target_item_name string status LinkStatus @@ -103,11 +102,15 @@ fn (mut p Page) find_links(content string) ![]Link { is_image_link = false // means it's a file link, not an image link } + // Store position - use image_open if it's an image, otherwise open_bracket + link_start_pos := if is_image_link { image_open } else { open_bracket } + mut link := Link{ src: line[open_bracket..close_paren + 1] text: text target: target.trim_space() line: line_idx + 1 + pos: link_start_pos is_file_link: is_file_link is_image_link: is_image_link page: &p @@ -185,88 +188,96 @@ fn (mut p Page) parse_link_target(mut link Link) { ////////////////FIX PAGES FOR THE LINKS/////////////////////// +@[params] +pub struct FixLinksArgs { + include bool // Process includes before fixing links + cross_collection bool // Process cross-collection links (for export) + export_mode bool // Use export-style simple paths instead of filesystem paths +} + // Fix links in page content - rewrites links with proper relative paths -fn (mut p Page) content_with_fixed_links() !string { - mut content := p.content(include: false)! - if p.links.len == 0 { - return content +fn (mut p Page) content_with_fixed_links(args FixLinksArgs) !string { + mut content := p.content(include: args.include)! + + // Get links - either re-find them (if includes processed) or use cached + mut links := if args.include { + p.find_links(content)! // Re-find links in processed content + } else { + p.links // Use cached links from validation } - // Process links in reverse order to maintain positions - for mut link in p.links.reverse() { - // if page not existing no point in fixing + // Filter and transform links + for mut link in links { + // Skip invalid links if link.status != .found { continue } - // if not local then no point in fixing - if !link.is_local_in_collection() { + + // Skip cross-collection links unless enabled + if !args.cross_collection && !link.is_local_in_collection() { continue } - // Get target page - mut target_page := link.target_page()! - mut target_path := target_page.path()! - relative_path := target_path.path_relative(p.path()!.path)! + // Calculate new link path + new_link := p.calculate_link_path(mut link, args) or { continue } - new_link := '[${link.text}](${relative_path})' + // Build the complete link markdown + prefix := if link.is_file_link { '!' } else { '' } + new_link_md := '${prefix}[${link.text}](${new_link})' // Replace in content - content = content.replace(link.src, new_link) + content = content.replace(link.src, new_link_md) } return content } -// process_cross_collection_links handles exporting cross-collection references -// It: -// 1. Finds all cross-collection links (collection:page format) -// 2. Copies the target page to the export directory -// 3. Renames the link to avoid conflicts (collectionname_pagename.md) -// 4. Rewrites the link in the content -fn (mut p Page) process_links(mut export_dir pathlib.Path) !string { - mut c := p.content(include: true)! +// calculate_link_path returns the relative path for a link +fn (mut p Page) calculate_link_path(mut link Link, args FixLinksArgs) !string { + if args.export_mode { + // Export mode: simple flat structure + return p.export_link_path(mut link)! + } + // Fix mode: filesystem paths + return p.filesystem_link_path(mut link)! +} - mut links := p.find_links(c)! +// export_link_path calculates path for export (flat structure: collection/file.md) +fn (mut p Page) export_link_path(mut link Link) !string { + mut target_collection := '' + mut target_filename := '' - // Process links in reverse order to maintain string positions - for mut link in links.reverse() { - if link.status != .found { - continue - } - mut exported_filename := '' - if link.is_file_link { - mut target_file := link.target_file()! - mut target_path := target_file.path()! - // Copy target page with renamed filename - exported_filename = 'files/${target_file.collection.name}_${target_file.name}' - os.mkdir_all('${export_dir.path}/files')! - os.cp(target_path.path, '${export_dir.path}/${exported_filename}')! - } else { - mut target_page := link.target_page()! - mut target_path := target_page.path()! - - // Copy target page with renamed filename - exported_filename = '${target_page.collection.name}_${target_page.name}.md' - page_content := target_page.content(include: true)! - - mut exported_file := pathlib.get_file( - path: '${export_dir.path}/${exported_filename}' - create: true - )! - exported_file.write(page_content)! - } - - mut pre := '' - if link.is_file_link { - pre = '!' - } - - // Update link in source content - new_link := '${pre}[${link.text}](${exported_filename})' - c = c.replace(link.src, new_link) + if link.is_file_link { + mut tf := link.target_file()! + target_collection = tf.collection.name + target_filename = tf.name + } else { + mut tp := link.target_page()! + target_collection = tp.collection.name + target_filename = '${tp.name}.md' } - return c + // Same collection: just filename, different collection: ../collection/filename + return if link.is_local_in_collection() { + target_filename + } else { + '../${target_collection}/${target_filename}' + } +} + +// filesystem_link_path calculates path using actual filesystem paths +fn (mut p Page) filesystem_link_path(mut link Link) !string { + source_path := p.path()! + + mut target_path := if link.is_file_link { + mut tf := link.target_file()! + tf.path()! + } else { + mut tp := link.target_page()! + tp.path()! + } + + return target_path.path_relative(source_path.path)! } /////////////TOOLS////////////////////////////////// diff --git a/lib/data/atlas/play.v b/lib/data/atlas/play.v index d925ccee..798afb66 100644 --- a/lib/data/atlas/play.v +++ b/lib/data/atlas/play.v @@ -10,7 +10,8 @@ pub fn play(mut plbook PlayBook) ! { return } - mut atlases := map[string]&Atlas{} + // Track which atlases we've processed in this playbook + mut processed_atlases := map[string]bool{} mut name := '' @@ -20,14 +21,15 @@ pub fn play(mut plbook PlayBook) ! { mut p := action.params name = p.get_default('name', 'main')! ignore := p.get_list_default('ignore', [])! - console.print_item("Scanning Atlas '${name}' with ignore patterns: ${ignore}\n${p}") - // Get or create atlas - mut atlas_instance := atlases[name] or { + console.print_item("Scanning Atlas '${name}' with ignore patterns: ${ignore}") + // Get or create atlas from global map + mut atlas_instance := if exists(name) { + get(name)! + } else { console.print_debug('Atlas not found, creating a new one') - mut new_atlas := new(name: name)! - atlases[name] = new_atlas - new_atlas + new(name: name)! } + processed_atlases[name] = true mut path := p.get_default('path', '')! @@ -47,15 +49,16 @@ pub fn play(mut plbook PlayBook) ! { atlas_instance.scan(path: path, ignore: ignore)! action.done = true - set(atlas_instance) + // No need to call set() again - atlas is already in global map from new() + // and we're modifying it by reference } - mut atlas_instance_post := atlases[name] or { - return error("Atlas '${name}' not found. Use !!atlas.scan first.") + // Run init_post on all processed atlases + for atlas_name, _ in processed_atlases { + mut atlas_instance_post := get(atlas_name)! + atlas_instance_post.init_post()! } - atlas_instance_post.init_post()! - // Process export actions - export collections to destination mut export_actions := plbook.find(filter: 'atlas.export')! @@ -68,7 +71,7 @@ pub fn play(mut plbook PlayBook) ! { include := p.get_default_true('include') redis := p.get_default_true('redis') - mut atlas_instance := atlases[name] or { + mut atlas_instance := get(name) or { return error("Atlas '${name}' not found. Use !!atlas.scan first.") } diff --git a/lib/web/docusaurus/dsite.v b/lib/web/docusaurus/dsite.v index 027067a3..80f48226 100644 --- a/lib/web/docusaurus/dsite.v +++ b/lib/web/docusaurus/dsite.v @@ -73,6 +73,7 @@ pub mut: port int = 3000 open bool = true // whether to open the browser automatically watch_changes bool = false // whether to watch for changes in docs and rebuild automatically + skip_generate bool = false // whether to skip generation (useful when docs are pre-generated, e.g., from atlas) } pub fn (mut s DocSite) open(args DevArgs) ! { @@ -82,9 +83,11 @@ pub fn (mut s DocSite) open(args DevArgs) ! { } pub fn (mut s DocSite) dev(args DevArgs) ! { - s.generate()! + if !args.skip_generate { + s.generate()! + } osal.exec( - cmd: ' + cmd: ' cd ${s.path_build.path} bun run start -p ${args.port} -h ${args.host} '