Merge branch 'development' into development_heroprompt_v2
* development: ... feat: Update site page source references feat: Add announcement bar configuration ... Update the pages ... ... ... ... ... ... ... ... ... fix: Improve Docusaurus link generation logic
This commit is contained in:
@@ -91,6 +91,7 @@ fn do() ! {
|
||||
herocmds.cmd_docusaurus(mut cmd)
|
||||
herocmds.cmd_web(mut cmd)
|
||||
herocmds.cmd_sshagent(mut cmd)
|
||||
herocmds.cmd_atlas(mut cmd)
|
||||
|
||||
cmd.setup()
|
||||
cmd.parse(os.args)
|
||||
|
||||
498
examples/data/atlas/atlas_loader.py
Normal file
498
examples/data/atlas/atlas_loader.py
Normal file
@@ -0,0 +1,498 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Atlas Collection Loader for Python
|
||||
|
||||
Load Atlas collections from .collection.json files created by the V Atlas module.
|
||||
This allows Python applications to access Atlas data without running V code.
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class FileType(Enum):
|
||||
"""File type enumeration"""
|
||||
FILE = "file"
|
||||
IMAGE = "image"
|
||||
|
||||
|
||||
class CollectionErrorCategory(Enum):
|
||||
"""Error category enumeration matching V implementation"""
|
||||
CIRCULAR_INCLUDE = "circular_include"
|
||||
MISSING_INCLUDE = "missing_include"
|
||||
INCLUDE_SYNTAX_ERROR = "include_syntax_error"
|
||||
INVALID_PAGE_REFERENCE = "invalid_page_reference"
|
||||
FILE_NOT_FOUND = "file_not_found"
|
||||
INVALID_COLLECTION = "invalid_collection"
|
||||
GENERAL_ERROR = "general_error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CollectionError:
|
||||
"""Collection error matching V CollectionError struct"""
|
||||
category: str
|
||||
page_key: str = ""
|
||||
message: str = ""
|
||||
file: str = ""
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'CollectionError':
|
||||
"""Create from dictionary"""
|
||||
return cls(
|
||||
category=data.get('category', ''),
|
||||
page_key=data.get('page_key', ''),
|
||||
message=data.get('message', ''),
|
||||
file=data.get('file', '')
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Human-readable error message"""
|
||||
location = ""
|
||||
if self.page_key:
|
||||
location = f" [{self.page_key}]"
|
||||
elif self.file:
|
||||
location = f" [{self.file}]"
|
||||
return f"[{self.category}]{location}: {self.message}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class File:
|
||||
"""File metadata matching V File struct"""
|
||||
name: str
|
||||
ext: str
|
||||
path: str
|
||||
ftype: str
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'File':
|
||||
"""Create from dictionary"""
|
||||
return cls(
|
||||
name=data['name'],
|
||||
ext=data['ext'],
|
||||
path=data['path'],
|
||||
ftype=data['ftype']
|
||||
)
|
||||
|
||||
@property
|
||||
def file_type(self) -> FileType:
|
||||
"""Get file type as enum"""
|
||||
return FileType(self.ftype)
|
||||
|
||||
@property
|
||||
def file_name(self) -> str:
|
||||
"""Get full filename with extension"""
|
||||
return f"{self.name}.{self.ext}"
|
||||
|
||||
def is_image(self) -> bool:
|
||||
"""Check if file is an image"""
|
||||
return self.file_type == FileType.IMAGE
|
||||
|
||||
def read(self) -> bytes:
|
||||
"""Read file content as bytes"""
|
||||
return Path(self.path).read_bytes()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Page:
|
||||
"""Page metadata matching V Page struct"""
|
||||
name: str
|
||||
path: str
|
||||
collection_name: str
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'Page':
|
||||
"""Create from dictionary"""
|
||||
return cls(
|
||||
name=data['name'],
|
||||
path=data['path'],
|
||||
collection_name=data['collection_name']
|
||||
)
|
||||
|
||||
def key(self) -> str:
|
||||
"""Get page key in format 'collection:page'"""
|
||||
return f"{self.collection_name}:{self.name}"
|
||||
|
||||
def read_content(self) -> str:
|
||||
"""Read page content from file"""
|
||||
return Path(self.path).read_text(encoding='utf-8')
|
||||
|
||||
|
||||
@dataclass
|
||||
class Collection:
|
||||
"""Collection matching V Collection struct"""
|
||||
name: str
|
||||
path: str
|
||||
pages: Dict[str, Page] = field(default_factory=dict)
|
||||
images: Dict[str, File] = field(default_factory=dict)
|
||||
files: Dict[str, File] = field(default_factory=dict)
|
||||
errors: List[CollectionError] = field(default_factory=list)
|
||||
|
||||
def page_get(self, name: str) -> Optional[Page]:
|
||||
"""Get a page by name"""
|
||||
return self.pages.get(name)
|
||||
|
||||
def page_exists(self, name: str) -> bool:
|
||||
"""Check if page exists"""
|
||||
return name in self.pages
|
||||
|
||||
def image_get(self, name: str) -> Optional[File]:
|
||||
"""Get an image by name"""
|
||||
return self.images.get(name)
|
||||
|
||||
def image_exists(self, name: str) -> bool:
|
||||
"""Check if image exists"""
|
||||
return name in self.images
|
||||
|
||||
def file_get(self, name: str) -> Optional[File]:
|
||||
"""Get a file by name"""
|
||||
return self.files.get(name)
|
||||
|
||||
def file_exists(self, name: str) -> bool:
|
||||
"""Check if file exists"""
|
||||
return name in self.files
|
||||
|
||||
def has_errors(self) -> bool:
|
||||
"""Check if collection has errors"""
|
||||
return len(self.errors) > 0
|
||||
|
||||
def error_summary(self) -> Dict[str, int]:
|
||||
"""Get error count by category"""
|
||||
summary = {}
|
||||
for err in self.errors:
|
||||
category = err.category
|
||||
summary[category] = summary.get(category, 0) + 1
|
||||
return summary
|
||||
|
||||
def print_errors(self):
|
||||
"""Print all errors to console"""
|
||||
if not self.has_errors():
|
||||
print(f"Collection {self.name}: No errors")
|
||||
return
|
||||
|
||||
print(f"\nCollection {self.name} - Errors ({len(self.errors)})")
|
||||
print("=" * 60)
|
||||
for err in self.errors:
|
||||
print(f" {err}")
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_path: Path) -> 'Collection':
|
||||
"""
|
||||
Load collection from .collection.json file
|
||||
|
||||
Args:
|
||||
json_path: Path to .collection.json file
|
||||
|
||||
Returns:
|
||||
Collection instance
|
||||
"""
|
||||
with open(json_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Parse pages - V outputs as map[string]Page
|
||||
pages = {}
|
||||
for name, page_data in data.get('pages', {}).items():
|
||||
pages[name] = Page.from_dict(page_data)
|
||||
|
||||
# Parse images - V outputs as map[string]File
|
||||
images = {}
|
||||
for name, file_data in data.get('images', {}).items():
|
||||
images[name] = File.from_dict(file_data)
|
||||
|
||||
# Parse files - V outputs as map[string]File
|
||||
files = {}
|
||||
for name, file_data in data.get('files', {}).items():
|
||||
files[name] = File.from_dict(file_data)
|
||||
|
||||
# Parse errors - V outputs as []CollectionError
|
||||
errors = []
|
||||
for err_data in data.get('errors', []):
|
||||
errors.append(CollectionError.from_dict(err_data))
|
||||
|
||||
return cls(
|
||||
name=data['name'],
|
||||
path=data['path'],
|
||||
pages=pages,
|
||||
images=images,
|
||||
files=files,
|
||||
errors=errors
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Atlas:
|
||||
"""Atlas matching V Atlas struct"""
|
||||
name: str = "default"
|
||||
collections: Dict[str, Collection] = field(default_factory=dict)
|
||||
|
||||
def add_collection(self, collection: Collection):
|
||||
"""Add a collection to the atlas"""
|
||||
self.collections[collection.name] = collection
|
||||
|
||||
def get_collection(self, name: str) -> Optional[Collection]:
|
||||
"""Get a collection by name"""
|
||||
return self.collections.get(name)
|
||||
|
||||
def collection_exists(self, name: str) -> bool:
|
||||
"""Check if collection exists"""
|
||||
return name in self.collections
|
||||
|
||||
def page_get(self, key: str) -> Optional[Page]:
|
||||
"""
|
||||
Get a page using format 'collection:page'
|
||||
|
||||
Args:
|
||||
key: Page key in format 'collection:page'
|
||||
|
||||
Returns:
|
||||
Page or None if not found
|
||||
"""
|
||||
parts = key.split(':', 1)
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
|
||||
col = self.get_collection(parts[0])
|
||||
if not col:
|
||||
return None
|
||||
|
||||
return col.page_get(parts[1])
|
||||
|
||||
def page_exists(self, key: str) -> bool:
|
||||
"""Check if page exists using format 'collection:page'"""
|
||||
return self.page_get(key) is not None
|
||||
|
||||
def image_get(self, key: str) -> Optional[File]:
|
||||
"""Get an image using format 'collection:image'"""
|
||||
parts = key.split(':', 1)
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
|
||||
col = self.get_collection(parts[0])
|
||||
if not col:
|
||||
return None
|
||||
|
||||
return col.image_get(parts[1])
|
||||
|
||||
def image_exists(self, key: str) -> bool:
|
||||
"""Check if image exists using format 'collection:image'"""
|
||||
return self.image_get(key) is not None
|
||||
|
||||
def file_get(self, key: str) -> Optional[File]:
|
||||
"""Get a file using format 'collection:file'"""
|
||||
parts = key.split(':', 1)
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
|
||||
col = self.get_collection(parts[0])
|
||||
if not col:
|
||||
return None
|
||||
|
||||
return col.file_get(parts[1])
|
||||
|
||||
def list_collections(self) -> List[str]:
|
||||
"""List all collection names"""
|
||||
return sorted(self.collections.keys())
|
||||
|
||||
def list_pages(self) -> Dict[str, List[str]]:
|
||||
"""List all pages grouped by collection"""
|
||||
result = {}
|
||||
for col_name, col in self.collections.items():
|
||||
result[col_name] = sorted(col.pages.keys())
|
||||
return result
|
||||
|
||||
def has_errors(self) -> bool:
|
||||
"""Check if any collection has errors"""
|
||||
return any(col.has_errors() for col in self.collections.values())
|
||||
|
||||
def print_all_errors(self):
|
||||
"""Print errors from all collections"""
|
||||
for col in self.collections.values():
|
||||
if col.has_errors():
|
||||
col.print_errors()
|
||||
|
||||
@classmethod
|
||||
def load_collection(cls, path: str, name: str = "default") -> 'Atlas':
|
||||
"""
|
||||
Load a single collection from a path.
|
||||
|
||||
Args:
|
||||
path: Path to the collection directory containing .collection.json
|
||||
name: Name for the atlas instance
|
||||
|
||||
Returns:
|
||||
Atlas with the loaded collection
|
||||
|
||||
Example:
|
||||
atlas = Atlas.load_collection('/path/to/my_collection')
|
||||
col = atlas.get_collection('my_collection')
|
||||
"""
|
||||
atlas = cls(name=name)
|
||||
collection_path = Path(path) / '.collection.json'
|
||||
|
||||
if not collection_path.exists():
|
||||
raise FileNotFoundError(
|
||||
f"No .collection.json found at {path}\n"
|
||||
f"Make sure to run collection.save() in V first"
|
||||
)
|
||||
|
||||
collection = Collection.from_json(collection_path)
|
||||
atlas.add_collection(collection)
|
||||
|
||||
return atlas
|
||||
|
||||
@classmethod
|
||||
def load_from_directory(cls, path: str, name: str = "default") -> 'Atlas':
|
||||
"""
|
||||
Walk directory tree and load all collections.
|
||||
|
||||
Args:
|
||||
path: Root path to scan for .collection.json files
|
||||
name: Name for the atlas instance
|
||||
|
||||
Returns:
|
||||
Atlas with all found collections
|
||||
|
||||
Example:
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
print(f"Loaded {len(atlas.collections)} collections")
|
||||
"""
|
||||
atlas = cls(name=name)
|
||||
root = Path(path)
|
||||
|
||||
if not root.exists():
|
||||
raise FileNotFoundError(f"Path not found: {path}")
|
||||
|
||||
# Walk directory tree looking for .collection.json files
|
||||
for json_file in root.rglob('.collection.json'):
|
||||
try:
|
||||
collection = Collection.from_json(json_file)
|
||||
atlas.add_collection(collection)
|
||||
except Exception as e:
|
||||
print(f"Warning: Failed to load {json_file}: {e}")
|
||||
|
||||
if len(atlas.collections) == 0:
|
||||
print(f"Warning: No collections found in {path}")
|
||||
|
||||
return atlas
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Example Usage Functions
|
||||
# ============================================================================
|
||||
|
||||
def example_load_single_collection():
|
||||
"""Example: Load a single collection"""
|
||||
print("\n" + "="*60)
|
||||
print("Example 1: Load Single Collection")
|
||||
print("="*60)
|
||||
|
||||
atlas = Atlas.load_collection(
|
||||
path='/tmp/atlas_test/col1',
|
||||
name='my_atlas'
|
||||
)
|
||||
|
||||
# Get collection
|
||||
col = atlas.get_collection('col1')
|
||||
if col:
|
||||
print(f"\nLoaded collection: {col.name}")
|
||||
print(f" Path: {col.path}")
|
||||
print(f" Pages: {len(col.pages)}")
|
||||
print(f" Images: {len(col.images)}")
|
||||
print(f" Files: {len(col.files)}")
|
||||
|
||||
# Print errors if any
|
||||
if col.has_errors():
|
||||
col.print_errors()
|
||||
|
||||
|
||||
def example_load_all_collections():
|
||||
"""Example: Load all collections from a directory tree"""
|
||||
print("\n" + "="*60)
|
||||
print("Example 2: Load All Collections")
|
||||
print("="*60)
|
||||
|
||||
atlas = Atlas.load_from_directory(
|
||||
path='/tmp/atlas_test',
|
||||
name='docs_atlas'
|
||||
)
|
||||
|
||||
print(f"\nLoaded {len(atlas.collections)} collections:")
|
||||
|
||||
# List all collections
|
||||
for col_name in atlas.list_collections():
|
||||
col = atlas.get_collection(col_name)
|
||||
print(f"\n Collection: {col_name}")
|
||||
print(f" Path: {col.path}")
|
||||
print(f" Pages: {len(col.pages)}")
|
||||
print(f" Images: {len(col.images)}")
|
||||
print(f" Errors: {len(col.errors)}")
|
||||
|
||||
|
||||
def example_access_pages():
|
||||
"""Example: Access pages and content"""
|
||||
print("\n" + "="*60)
|
||||
print("Example 3: Access Pages and Content")
|
||||
print("="*60)
|
||||
|
||||
atlas = Atlas.load_from_directory('/tmp/atlas_test')
|
||||
|
||||
# Get a specific page
|
||||
page = atlas.page_get('col1:page1')
|
||||
if page:
|
||||
print(f"\nPage: {page.name}")
|
||||
print(f" Key: {page.key()}")
|
||||
print(f" Path: {page.path}")
|
||||
|
||||
# Read content
|
||||
content = page.read_content()
|
||||
print(f" Content length: {len(content)} chars")
|
||||
print(f" First 100 chars: {content[:100]}")
|
||||
|
||||
# List all pages
|
||||
print("\nAll pages:")
|
||||
pages = atlas.list_pages()
|
||||
for col_name, page_names in pages.items():
|
||||
print(f"\n {col_name}:")
|
||||
for page_name in page_names:
|
||||
print(f" - {page_name}")
|
||||
|
||||
|
||||
def example_error_handling():
|
||||
"""Example: Working with errors"""
|
||||
print("\n" + "="*60)
|
||||
print("Example 4: Error Handling")
|
||||
print("="*60)
|
||||
|
||||
atlas = Atlas.load_from_directory('/tmp/atlas_test')
|
||||
|
||||
# Check for errors across all collections
|
||||
if atlas.has_errors():
|
||||
print("\nFound errors in collections:")
|
||||
atlas.print_all_errors()
|
||||
else:
|
||||
print("\nNo errors found!")
|
||||
|
||||
# Get error summary for a specific collection
|
||||
col = atlas.get_collection('col1')
|
||||
if col and col.has_errors():
|
||||
summary = col.error_summary()
|
||||
print(f"\nError summary for {col.name}:")
|
||||
for category, count in summary.items():
|
||||
print(f" {category}: {count}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("Atlas Loader - Python Implementation")
|
||||
print("="*60)
|
||||
print("\nThis script demonstrates loading Atlas collections")
|
||||
print("from .collection.json files created by the V Atlas module.")
|
||||
|
||||
# Uncomment to run examples:
|
||||
# example_load_single_collection()
|
||||
# example_load_all_collections()
|
||||
# example_access_pages()
|
||||
# example_error_handling()
|
||||
|
||||
print("\nUncomment example functions in __main__ to see them in action.")
|
||||
83
examples/data/atlas/example_save_load.vsh
Executable file
83
examples/data/atlas/example_save_load.vsh
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
// Example: Save and Load Atlas Collections
|
||||
|
||||
println('Atlas Save/Load Example')
|
||||
println('============================================================')
|
||||
|
||||
// Setup test directory
|
||||
test_dir := '/tmp/atlas_example'
|
||||
os.rmdir_all(test_dir) or {}
|
||||
os.mkdir_all(test_dir)!
|
||||
|
||||
// Create a collection with some content
|
||||
col_path := '${test_dir}/docs'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/intro.md', create: true)!
|
||||
page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||
|
||||
// Create and scan atlas
|
||||
println('\n1. Creating Atlas and scanning...')
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: test_dir)!
|
||||
|
||||
println(' Found ${a.collections.len} collection(s)')
|
||||
|
||||
// Validate links
|
||||
println('\n2. Validating links...')
|
||||
a.validate_links()!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
if col.has_errors() {
|
||||
println(' Errors found:')
|
||||
col.print_errors()
|
||||
} else {
|
||||
println(' No errors found!')
|
||||
}
|
||||
|
||||
// Save all collections
|
||||
println('\n3. Saving collections to .collection.json...')
|
||||
a.save_all()!
|
||||
println(' Saved to ${col_path}/.collection.json')
|
||||
|
||||
// Load in a new atlas
|
||||
println('\n4. Loading collections in new Atlas...')
|
||||
mut a2 := atlas.new(name: 'loaded_docs')!
|
||||
a2.load_from_directory(test_dir)!
|
||||
|
||||
println(' Loaded ${a2.collections.len} collection(s)')
|
||||
|
||||
// Access loaded data
|
||||
println('\n5. Accessing loaded data...')
|
||||
loaded_col := a2.get_collection('docs')!
|
||||
println(' Collection: ${loaded_col.name}')
|
||||
println(' Pages: ${loaded_col.pages.len}')
|
||||
|
||||
for name, page in loaded_col.pages {
|
||||
println(' - ${name}: ${page.path.path}')
|
||||
}
|
||||
|
||||
// Read page content
|
||||
println('\n6. Reading page content...')
|
||||
mut intro_page := loaded_col.page_get('intro')!
|
||||
content := intro_page.read_content()!
|
||||
println(' intro.md content:')
|
||||
println(' ${content}')
|
||||
|
||||
println('\n✓ Example completed successfully!')
|
||||
println('\nNow you can use the Python loader:')
|
||||
println(' python3 lib/data/atlas/atlas_loader.py')
|
||||
|
||||
// Cleanup
|
||||
os.rmdir_all(test_dir) or {}
|
||||
21
examples/data/atlas/heroscript_example.vsh
Normal file
21
examples/data/atlas/heroscript_example.vsh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
heroscript := "
|
||||
!!atlas.scan
|
||||
path: '~/code/github/incubaid/herolib/lib/data/atlas/testdata'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.export
|
||||
destination: '/tmp/atlas_export_test'
|
||||
include: true
|
||||
redis: false
|
||||
"
|
||||
|
||||
mut plbook := playbook.new(text: heroscript)!
|
||||
atlas.play(mut plbook)!
|
||||
|
||||
println('✅ Atlas HeroScript processing complete!')
|
||||
182
lib/core/herocmds/atlas.v
Normal file
182
lib/core/herocmds/atlas.v
Normal file
@@ -0,0 +1,182 @@
|
||||
module herocmds
|
||||
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.playcmds
|
||||
import incubaid.herolib.develop.gittools
|
||||
import os
|
||||
import cli { Command, Flag }
|
||||
|
||||
pub fn cmd_atlas(mut cmdroot Command) Command {
|
||||
mut cmd_run := Command{
|
||||
name: 'atlas'
|
||||
description: 'Scan and export atlas collections.'
|
||||
required_args: 0
|
||||
execute: cmd_atlas_execute
|
||||
}
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'reset'
|
||||
abbrev: 'r'
|
||||
description: 'Reset and clean before operations.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .string
|
||||
required: false
|
||||
name: 'url'
|
||||
abbrev: 'u'
|
||||
description: 'Git URL where atlas source is.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .string
|
||||
required: false
|
||||
name: 'path'
|
||||
abbrev: 'p'
|
||||
description: 'Path where atlas collections are located.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .string
|
||||
required: false
|
||||
name: 'name'
|
||||
abbrev: 'n'
|
||||
description: 'Atlas instance name (default: "default").'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .string
|
||||
required: false
|
||||
name: 'destination'
|
||||
abbrev: 'd'
|
||||
description: 'Export destination path.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'scan'
|
||||
abbrev: 's'
|
||||
description: 'Scan directories for collections.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'export'
|
||||
abbrev: 'e'
|
||||
description: 'Export collections to destination.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'no-include'
|
||||
description: 'Skip processing !!include actions during export.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'no-redis'
|
||||
description: 'Skip storing metadata in Redis during export.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
flag: .bool
|
||||
required: false
|
||||
name: 'update'
|
||||
description: 'Update environment and git pull before operations.'
|
||||
})
|
||||
|
||||
cmdroot.add_command(cmd_run)
|
||||
return cmdroot
|
||||
}
|
||||
|
||||
fn cmd_atlas_execute(cmd Command) ! {
|
||||
// ---------- FLAGS ----------
|
||||
mut reset := cmd.flags.get_bool('reset') or { false }
|
||||
mut update := cmd.flags.get_bool('update') or { false }
|
||||
mut scan := cmd.flags.get_bool('scan') or { false }
|
||||
mut export := cmd.flags.get_bool('export') or { false }
|
||||
|
||||
// Include and redis default to true unless explicitly disabled
|
||||
mut no_include := cmd.flags.get_bool('no-include') or { false }
|
||||
mut no_redis := cmd.flags.get_bool('no-redis') or { false }
|
||||
mut include := !no_include
|
||||
mut redis := !no_redis
|
||||
|
||||
// ---------- PATH LOGIC ----------
|
||||
mut path := cmd.flags.get_string('path') or { '' }
|
||||
mut url := cmd.flags.get_string('url') or { '' }
|
||||
mut name := cmd.flags.get_string('name') or { 'default' }
|
||||
mut destination := cmd.flags.get_string('destination') or { '' }
|
||||
|
||||
if path == '' && url == '' {
|
||||
path = os.getwd()
|
||||
}
|
||||
|
||||
atlas_path := gittools.path(
|
||||
git_url: url
|
||||
path: path
|
||||
git_reset: reset
|
||||
git_pull: update
|
||||
)!
|
||||
|
||||
console.print_header('Running Atlas for: ${atlas_path.path}')
|
||||
|
||||
// Run HeroScript if exists
|
||||
playcmds.run(
|
||||
heroscript_path: atlas_path.path
|
||||
reset: false
|
||||
)!
|
||||
|
||||
// Create or get atlas instance
|
||||
mut a := if atlas.atlas_exists(name) {
|
||||
atlas.atlas_get(name)!
|
||||
} else {
|
||||
atlas.new(name: name)!
|
||||
}
|
||||
|
||||
// Default behavior: scan and export if no flags specified
|
||||
if !scan && !export {
|
||||
scan = true
|
||||
export = true
|
||||
}
|
||||
|
||||
// Execute operations
|
||||
if scan {
|
||||
console.print_header('Scanning collections...')
|
||||
a.scan(path: atlas_path.path, save: true)!
|
||||
console.print_green('✓ Scan complete: ${a.collections.len} collection(s) found')
|
||||
}
|
||||
|
||||
if export {
|
||||
if destination == '' {
|
||||
destination = '${atlas_path.path}/output'
|
||||
}
|
||||
|
||||
console.print_header('Exporting collections to: ${destination}')
|
||||
console.print_item('Include processing: ${include}')
|
||||
console.print_item('Redis metadata: ${redis}')
|
||||
|
||||
a.export(
|
||||
destination: destination
|
||||
reset: reset
|
||||
include: include
|
||||
redis: redis
|
||||
)!
|
||||
|
||||
console.print_green('✓ Export complete to ${destination}')
|
||||
|
||||
// Print any errors encountered during export
|
||||
for _, col in a.collections {
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
120
lib/data/atlas/atlas.v
Normal file
120
lib/data/atlas/atlas.v
Normal file
@@ -0,0 +1,120 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
__global (
|
||||
atlases shared map[string]&Atlas
|
||||
)
|
||||
|
||||
@[heap]
|
||||
pub struct Atlas {
|
||||
pub mut:
|
||||
name string
|
||||
collections map[string]&Collection
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AtlasNewArgs {
|
||||
pub mut:
|
||||
name string = 'default'
|
||||
}
|
||||
|
||||
// Create a new Atlas
|
||||
pub fn new(args AtlasNewArgs) !&Atlas {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
|
||||
mut a := Atlas{
|
||||
name: name
|
||||
}
|
||||
|
||||
atlas_set(a)
|
||||
return &a
|
||||
}
|
||||
|
||||
// Get Atlas from global map
|
||||
pub fn atlas_get(name string) !&Atlas {
|
||||
rlock atlases {
|
||||
if name in atlases {
|
||||
return atlases[name] or { return error('Atlas ${name} not found') }
|
||||
}
|
||||
}
|
||||
return error("Atlas '${name}' not found")
|
||||
}
|
||||
|
||||
// Check if Atlas exists
|
||||
pub fn atlas_exists(name string) bool {
|
||||
rlock atlases {
|
||||
return name in atlases
|
||||
}
|
||||
}
|
||||
|
||||
// List all Atlas names
|
||||
pub fn atlas_list() []string {
|
||||
rlock atlases {
|
||||
return atlases.keys()
|
||||
}
|
||||
}
|
||||
|
||||
// Store Atlas in global map
|
||||
fn atlas_set(atlas Atlas) {
|
||||
lock atlases {
|
||||
atlases[atlas.name] = &atlas
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AddCollectionArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
}
|
||||
|
||||
// Add a collection to the Atlas
|
||||
pub fn (mut a Atlas) add_collection(args AddCollectionArgs) ! {
|
||||
name := texttools.name_fix(args.name)
|
||||
|
||||
if name in a.collections {
|
||||
return error('Collection ${name} already exists in Atlas ${a.name}')
|
||||
}
|
||||
|
||||
mut col := a.new_collection(name: name, path: args.path)!
|
||||
col.scan()!
|
||||
|
||||
a.collections[name] = &col
|
||||
}
|
||||
|
||||
// Scan a path for collections
|
||||
pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
a.scan_directory(mut path)!
|
||||
a.validate_links()!
|
||||
a.fix_links()!
|
||||
if args.save {
|
||||
a.save()!
|
||||
}
|
||||
}
|
||||
|
||||
// Get a collection by name
|
||||
pub fn (a Atlas) get_collection(name string) !&Collection {
|
||||
return a.collections[name] or {
|
||||
return CollectionNotFound{
|
||||
name: name
|
||||
msg: 'Collection not found in Atlas ${a.name}'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate all links in all collections
|
||||
pub fn (mut a Atlas) validate_links() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.validate_links()!
|
||||
}
|
||||
}
|
||||
|
||||
// Fix all links in all collections
|
||||
pub fn (mut a Atlas) fix_links() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.fix_links()!
|
||||
}
|
||||
}
|
||||
207
lib/data/atlas/atlas_save_test.v
Normal file
207
lib/data/atlas/atlas_save_test.v
Normal file
@@ -0,0 +1,207 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '/tmp/atlas_save_test'
|
||||
|
||||
fn testsuite_begin() {
|
||||
os.rmdir_all(test_dir) or {}
|
||||
os.mkdir_all(test_dir)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
os.rmdir_all(test_dir) or {}
|
||||
}
|
||||
|
||||
fn test_save_and_load_basic() {
|
||||
// Create a collection with some content
|
||||
col_path := '${test_dir}/docs'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/intro.md', create: true)!
|
||||
page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\nMore content here.')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: test_dir)!
|
||||
|
||||
assert a.collections.len == 1
|
||||
|
||||
// Save all collections
|
||||
a.save()!
|
||||
assert os.exists('${col_path}/.collection.json')
|
||||
|
||||
// Load in a new atlas
|
||||
mut a2 := new(name: 'loaded_docs')!
|
||||
a2.load_from_directory(test_dir)!
|
||||
|
||||
assert a2.collections.len == 1
|
||||
|
||||
// Access loaded data
|
||||
loaded_col := a2.get_collection('docs')!
|
||||
assert loaded_col.name == 'docs'
|
||||
assert loaded_col.pages.len == 2
|
||||
|
||||
// Verify pages exist
|
||||
assert loaded_col.page_exists('intro')
|
||||
assert loaded_col.page_exists('guide')
|
||||
|
||||
// Read page content
|
||||
mut intro_page := loaded_col.page_get('intro')!
|
||||
content := intro_page.read_content()!
|
||||
assert content.contains('# Introduction')
|
||||
assert content.contains('Welcome to the docs!')
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_includes() {
|
||||
col_path := '${test_dir}/docs_include'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/intro.md', create: true)!
|
||||
page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_include')!
|
||||
|
||||
// Validate links (should find the include)
|
||||
a.validate_links()!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
assert !col.has_errors()
|
||||
|
||||
// Save
|
||||
a.save()!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_dir}/docs_include')!
|
||||
|
||||
loaded_col := a2.get_collection('docs')!
|
||||
assert loaded_col.pages.len == 2
|
||||
assert !loaded_col.has_errors()
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_errors() {
|
||||
col_path := '${test_dir}/docs_errors'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
// Create page with broken link
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/broken.md', create: true)!
|
||||
page1.write('[Broken link](nonexistent)')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_errors')!
|
||||
|
||||
// Validate - will generate errors
|
||||
a.validate_links()!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
assert col.has_errors()
|
||||
initial_error_count := col.errors.len
|
||||
|
||||
// Save with errors
|
||||
a.save()!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_dir}/docs_errors')!
|
||||
|
||||
loaded_col := a2.get_collection('docs')!
|
||||
assert loaded_col.has_errors()
|
||||
assert loaded_col.errors.len == initial_error_count
|
||||
assert loaded_col.error_cache.len == initial_error_count
|
||||
}
|
||||
|
||||
fn test_save_and_load_multiple_collections() {
|
||||
// Create multiple collections
|
||||
col1_path := '${test_dir}/multi/col1'
|
||||
col2_path := '${test_dir}/multi/col2'
|
||||
|
||||
os.mkdir_all(col1_path)!
|
||||
os.mkdir_all(col2_path)!
|
||||
|
||||
mut cfile1 := pathlib.get_file(path: '${col1_path}/.collection', create: true)!
|
||||
cfile1.write('name:col1')!
|
||||
|
||||
mut cfile2 := pathlib.get_file(path: '${col2_path}/.collection', create: true)!
|
||||
cfile2.write('name:col2')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col1_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col2_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
// Create and save
|
||||
mut a := new(name: 'multi')!
|
||||
a.scan(path: '${test_dir}/multi')!
|
||||
|
||||
assert a.collections.len == 2
|
||||
|
||||
a.save()!
|
||||
|
||||
// Load from directory
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_dir}/multi')!
|
||||
|
||||
assert a2.collections.len == 2
|
||||
assert a2.get_collection('col1')!.page_exists('page1')
|
||||
assert a2.get_collection('col2')!.page_exists('page2')
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_images() {
|
||||
col_path := '${test_dir}/docs_images'
|
||||
os.mkdir_all(col_path)!
|
||||
os.mkdir_all('${col_path}/img')!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${col_path}/page.md', create: true)!
|
||||
page.write('# Page with image')!
|
||||
|
||||
// Create a dummy image file
|
||||
mut img := pathlib.get_file(path: '${col_path}/img/test.png', create: true)!
|
||||
img.write('fake png data')!
|
||||
|
||||
// Create and scan
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_images')!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
assert col.images.len == 1
|
||||
assert col.image_exists('test')
|
||||
|
||||
// Save
|
||||
a.save()!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_dir}/docs_images')!
|
||||
|
||||
loaded_col := a2.get_collection('docs')!
|
||||
assert loaded_col.images.len == 1
|
||||
assert loaded_col.image_exists('test')
|
||||
|
||||
img_file := loaded_col.image_get('test')!
|
||||
assert img_file.file_name() == 'test.png'
|
||||
assert img_file.is_image()
|
||||
}
|
||||
449
lib/data/atlas/atlas_test.v
Normal file
449
lib/data/atlas/atlas_test.v
Normal file
@@ -0,0 +1,449 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_base = '/tmp/atlas_test'
|
||||
|
||||
fn testsuite_begin() {
|
||||
os.rmdir_all(test_base) or {}
|
||||
os.mkdir_all(test_base)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
os.rmdir_all(test_base) or {}
|
||||
}
|
||||
|
||||
fn test_create_atlas() {
|
||||
mut a := new(name: 'test_atlas')!
|
||||
assert a.name == 'test_atlas'
|
||||
assert a.collections.len == 0
|
||||
}
|
||||
|
||||
fn test_add_collection() {
|
||||
// Create test collection
|
||||
col_path := '${test_base}/col1'
|
||||
os.mkdir_all(col_path)!
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:col1')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page.write('# Page 1\n\nContent here.')!
|
||||
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(name: 'col1', path: col_path)!
|
||||
|
||||
assert a.collections.len == 1
|
||||
assert 'col1' in a.collections
|
||||
}
|
||||
|
||||
fn test_scan() {
|
||||
// Create test structure
|
||||
os.mkdir_all('${test_base}/docs/guides')!
|
||||
mut cfile := pathlib.get_file(path: '${test_base}/docs/guides/.collection', create: true)!
|
||||
cfile.write('name:guides')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${test_base}/docs/guides/intro.md', create: true)!
|
||||
page.write('# Introduction')!
|
||||
|
||||
mut a := new()!
|
||||
a.scan(path: '${test_base}/docs')!
|
||||
|
||||
assert a.collections.len == 1
|
||||
col := a.get_collection('guides')!
|
||||
assert col.page_exists('intro')
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
// Setup
|
||||
col_path := '${test_base}/source/col1'
|
||||
export_path := '${test_base}/export'
|
||||
|
||||
os.mkdir_all(col_path)!
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:col1')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${col_path}/test.md', create: true)!
|
||||
page.write('# Test Page')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'col1', path: col_path)!
|
||||
|
||||
a.export(destination: export_path, redis: false)!
|
||||
|
||||
assert os.exists('${export_path}/col1/test.md')
|
||||
assert os.exists('${export_path}/col1/.collection')
|
||||
}
|
||||
|
||||
fn test_export_with_includes() {
|
||||
// Setup: Create pages with includes
|
||||
col_path := '${test_base}/include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
// Page 1: includes page 2
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1\n\n!!include test_col:page2\n\nEnd of page 1')!
|
||||
|
||||
// Page 2: standalone content
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/page2.md', create: true)!
|
||||
page2.write('## Page 2 Content\n\nThis is included.')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
|
||||
export_path := '${test_base}/export_include'
|
||||
a.export(destination: export_path, include: true)!
|
||||
|
||||
// Verify exported page1 has page2 content included
|
||||
exported := os.read_file('${export_path}/test_col/page1.md')!
|
||||
assert exported.contains('Page 2 Content')
|
||||
assert exported.contains('This is included')
|
||||
assert !exported.contains('!!include')
|
||||
}
|
||||
|
||||
fn test_export_without_includes() {
|
||||
col_path := '${test_base}/no_include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col2')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1\n\n!!include test_col2:page2\n\nEnd')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col2', path: col_path)!
|
||||
|
||||
export_path := '${test_base}/export_no_include'
|
||||
a.export(destination: export_path, include: false)!
|
||||
|
||||
// Verify exported page1 still has include action
|
||||
exported := os.read_file('${export_path}/test_col2/page1.md')!
|
||||
assert exported.contains('!!include')
|
||||
}
|
||||
|
||||
fn test_error_deduplication() {
|
||||
mut a := new(name: 'test')!
|
||||
mut col := a.new_collection(name: 'test', path: test_base)!
|
||||
|
||||
// Report same error twice
|
||||
col.error(
|
||||
category: .missing_include
|
||||
page_key: 'test:page1'
|
||||
message: 'Test error'
|
||||
)
|
||||
|
||||
col.error(
|
||||
category: .missing_include
|
||||
page_key: 'test:page1'
|
||||
message: 'Test error' // Same hash, should be deduplicated
|
||||
)
|
||||
|
||||
assert col.errors.len == 1
|
||||
|
||||
// Different page_key = different hash
|
||||
col.error(
|
||||
category: .missing_include
|
||||
page_key: 'test:page2'
|
||||
message: 'Test error'
|
||||
)
|
||||
|
||||
assert col.errors.len == 2
|
||||
}
|
||||
|
||||
fn test_error_hash() {
|
||||
err1 := CollectionError{
|
||||
category: .missing_include
|
||||
page_key: 'col:page1'
|
||||
message: 'Error message'
|
||||
}
|
||||
|
||||
err2 := CollectionError{
|
||||
category: .missing_include
|
||||
page_key: 'col:page1'
|
||||
message: 'Different message' // Hash is same!
|
||||
}
|
||||
|
||||
assert err1.hash() == err2.hash()
|
||||
}
|
||||
|
||||
fn test_find_links() {
|
||||
content := '
|
||||
# Test Page
|
||||
|
||||
[Link 1](page1)
|
||||
[Link 2](guides:intro)
|
||||
[Link 3](/path/to/page2)
|
||||
[External](https://example.com)
|
||||
[Anchor](#section)
|
||||
'
|
||||
|
||||
links := find_links(content)
|
||||
|
||||
// Should find 3 local links
|
||||
local_links := links.filter(it.is_local)
|
||||
assert local_links.len == 3
|
||||
|
||||
// Check collection:page format
|
||||
link2 := local_links[1]
|
||||
assert link2.collection == 'guides'
|
||||
assert link2.page == 'intro'
|
||||
|
||||
// Check path-based link (only filename used)
|
||||
link3 := local_links[2]
|
||||
assert link3.page == 'page2'
|
||||
assert link3.collection == ''
|
||||
}
|
||||
|
||||
fn test_validate_links() {
|
||||
// Setup
|
||||
col_path := '${test_base}/link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
// Create page1 with valid link
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('[Link to page2](page2)')!
|
||||
|
||||
// Create page2 (target exists)
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Should have no errors
|
||||
col := a.get_collection('test_col')!
|
||||
assert col.errors.len == 0
|
||||
}
|
||||
|
||||
fn test_validate_broken_links() {
|
||||
// Setup
|
||||
col_path := '${test_base}/broken_link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
// Create page with broken link
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('[Broken link](nonexistent)')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Should have error
|
||||
col := a.get_collection('test_col')!
|
||||
assert col.errors.len == 1
|
||||
assert col.errors[0].category == .invalid_page_reference
|
||||
}
|
||||
|
||||
fn test_fix_links() {
|
||||
// Setup - all pages in same directory for simpler test
|
||||
col_path := '${test_base}/fix_link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
// Create pages in same directory
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('[Link](page2)')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
|
||||
// Get the page and test fix_links directly
|
||||
mut col := a.get_collection('test_col')!
|
||||
mut p := col.page_get('page1')!
|
||||
|
||||
original := p.read_content()!
|
||||
println('Original: ${original}')
|
||||
|
||||
fixed := p.fix_links(original)!
|
||||
println('Fixed: ${fixed}')
|
||||
|
||||
// The fix_links should work on content
|
||||
assert fixed.contains('[Link](page2.md)')
|
||||
}
|
||||
|
||||
fn test_link_formats() {
|
||||
content := '
|
||||
[Same collection](page1)
|
||||
[With extension](page2.md)
|
||||
[Collection ref](guides:intro)
|
||||
[Path based](/some/path/page3)
|
||||
[Relative path](../other/page4.md)
|
||||
'
|
||||
|
||||
links := find_links(content)
|
||||
local_links := links.filter(it.is_local)
|
||||
|
||||
assert local_links.len == 5
|
||||
|
||||
// Check normalization
|
||||
assert local_links[0].page == 'page1'
|
||||
assert local_links[1].page == 'page2'
|
||||
assert local_links[2].collection == 'guides'
|
||||
assert local_links[2].page == 'intro'
|
||||
assert local_links[3].page == 'page3' // Path ignored, only filename
|
||||
assert local_links[4].page == 'page4' // Path ignored, only filename
|
||||
}
|
||||
|
||||
fn test_cross_collection_links() {
|
||||
// Setup two collections
|
||||
col1_path := '${test_base}/col1_cross'
|
||||
col2_path := '${test_base}/col2_cross'
|
||||
|
||||
os.mkdir_all(col1_path)!
|
||||
os.mkdir_all(col2_path)!
|
||||
|
||||
mut cfile1 := pathlib.get_file(path: '${col1_path}/.collection', create: true)!
|
||||
cfile1.write('name:col1')!
|
||||
|
||||
mut cfile2 := pathlib.get_file(path: '${col2_path}/.collection', create: true)!
|
||||
cfile2.write('name:col2')!
|
||||
|
||||
// Page in col1 links to col2
|
||||
mut page1 := pathlib.get_file(path: '${col1_path}/page1.md', create: true)!
|
||||
page1.write('[Link to col2](col2:page2)')!
|
||||
|
||||
// Page in col2
|
||||
mut page2 := pathlib.get_file(path: '${col2_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'col1', path: col1_path)!
|
||||
a.add_collection(name: 'col2', path: col2_path)!
|
||||
|
||||
// Validate - should pass
|
||||
a.validate_links()!
|
||||
|
||||
col1 := a.get_collection('col1')!
|
||||
assert col1.errors.len == 0
|
||||
|
||||
// Fix links - cross-collection links should NOT be rewritten
|
||||
a.fix_links()!
|
||||
|
||||
fixed := page1.read()!
|
||||
assert fixed.contains('[Link to col2](col2:page2)') // Unchanged
|
||||
}
|
||||
|
||||
fn test_save_and_load() {
|
||||
// Setup
|
||||
col_path := '${test_base}/save_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page.write('# Page 1\n\nContent here.')!
|
||||
|
||||
// Create and save
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
a.save()!
|
||||
|
||||
assert os.exists('${col_path}/.collection.json')
|
||||
|
||||
// Load in new atlas
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_collection(col_path)!
|
||||
|
||||
assert a2.collections.len == 1
|
||||
col := a2.get_collection('test_col')!
|
||||
assert col.pages.len == 1
|
||||
assert col.page_exists('page1')
|
||||
|
||||
// Verify page can read content
|
||||
mut page_loaded := col.page_get('page1')!
|
||||
content := page_loaded.read_content()!
|
||||
assert content.contains('# Page 1')
|
||||
}
|
||||
|
||||
fn test_save_with_errors() {
|
||||
col_path := '${test_base}/error_save_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:err_col')!
|
||||
|
||||
mut a := new(name: 'test')!
|
||||
mut col := a.new_collection(name: 'err_col', path: col_path)!
|
||||
|
||||
// Add some errors
|
||||
col.error(
|
||||
category: .missing_include
|
||||
page_key: 'err_col:page1'
|
||||
message: 'Test error 1'
|
||||
)
|
||||
|
||||
col.error(
|
||||
category: .invalid_page_reference
|
||||
page_key: 'err_col:page2'
|
||||
message: 'Test error 2'
|
||||
)
|
||||
|
||||
a.collections['err_col'] = &col
|
||||
|
||||
// Save
|
||||
col.save()!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
loaded_col := a2.load_collection(col_path)!
|
||||
|
||||
// Verify errors persisted
|
||||
assert loaded_col.errors.len == 2
|
||||
assert loaded_col.error_cache.len == 2
|
||||
}
|
||||
|
||||
fn test_load_from_directory() {
|
||||
// Setup multiple collections
|
||||
col1_path := '${test_base}/load_dir/col1'
|
||||
col2_path := '${test_base}/load_dir/col2'
|
||||
|
||||
os.mkdir_all(col1_path)!
|
||||
os.mkdir_all(col2_path)!
|
||||
|
||||
mut cfile1 := pathlib.get_file(path: '${col1_path}/.collection', create: true)!
|
||||
cfile1.write('name:col1')!
|
||||
|
||||
mut cfile2 := pathlib.get_file(path: '${col2_path}/.collection', create: true)!
|
||||
cfile2.write('name:col2')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col1_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col2_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
// Create and save
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(name: 'col1', path: col1_path)!
|
||||
a.add_collection(name: 'col2', path: col2_path)!
|
||||
a.save()!
|
||||
|
||||
// Load from directory
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_base}/load_dir')!
|
||||
|
||||
assert a2.collections.len == 2
|
||||
assert a2.get_collection('col1')!.page_exists('page1')
|
||||
assert a2.get_collection('col2')!.page_exists('page2')
|
||||
}
|
||||
320
lib/data/atlas/collection.v
Normal file
320
lib/data/atlas/collection.v
Normal file
@@ -0,0 +1,320 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
@[heap]
|
||||
pub struct Collection {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
pages map[string]&Page
|
||||
images map[string]&File
|
||||
files map[string]&File
|
||||
atlas &Atlas @[skip; str: skip] // Reference to parent atlas for include resolution
|
||||
errors []CollectionError
|
||||
error_cache map[string]bool // Track error hashes to avoid duplicates
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
}
|
||||
|
||||
// Create a new collection
|
||||
fn (mut self Atlas) new_collection(args CollectionNewArgs) !Collection {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
|
||||
mut col := Collection{
|
||||
name: name
|
||||
path: path
|
||||
atlas: &self // Set atlas reference
|
||||
error_cache: map[string]bool{}
|
||||
}
|
||||
|
||||
return col
|
||||
}
|
||||
|
||||
// Add a page to the collection
|
||||
fn (mut c Collection) add_page(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
if name in c.pages {
|
||||
return error('Page ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
|
||||
p_new := new_page(
|
||||
name: name
|
||||
path: p
|
||||
collection_name: c.name
|
||||
collection: &c
|
||||
)!
|
||||
|
||||
c.pages[name] = &p_new
|
||||
}
|
||||
|
||||
// Add an image to the collection
|
||||
fn (mut c Collection) add_image(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
if name in c.images {
|
||||
return error('Image ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
|
||||
mut img := new_file(path: p)!
|
||||
c.images[name] = &img
|
||||
}
|
||||
|
||||
// Add a file to the collection
|
||||
fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
if name in c.files {
|
||||
return error('File ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
|
||||
mut file := new_file(path: p)!
|
||||
c.files[name] = &file
|
||||
}
|
||||
|
||||
// Get a page by name
|
||||
pub fn (c Collection) page_get(name string) !&Page {
|
||||
return c.pages[name] or { return PageNotFound{
|
||||
collection: c.name
|
||||
page: name
|
||||
} }
|
||||
}
|
||||
|
||||
// Get an image by name
|
||||
pub fn (c Collection) image_get(name string) !&File {
|
||||
return c.images[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
} }
|
||||
}
|
||||
|
||||
// Get a file by name
|
||||
pub fn (c Collection) file_get(name string) !&File {
|
||||
return c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
} }
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
pub fn (c Collection) page_exists(name string) bool {
|
||||
return name in c.pages
|
||||
}
|
||||
|
||||
// Check if image exists
|
||||
pub fn (c Collection) image_exists(name string) bool {
|
||||
return name in c.images
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
pub fn (c Collection) file_exists(name string) bool {
|
||||
return name in c.files
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
mut col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
if args.reset {
|
||||
col_dir.empty()!
|
||||
}
|
||||
|
||||
// Write .collection file
|
||||
mut cfile := pathlib.get_file(
|
||||
path: '${col_dir.path}/.collection'
|
||||
create: true
|
||||
)!
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
// Export pages (process includes if requested)
|
||||
for _, mut page in c.pages {
|
||||
content := page.content(include: args.include)!
|
||||
mut dest_file := pathlib.get_file(
|
||||
path: '${col_dir.path}/${page.name}.md'
|
||||
create: true
|
||||
)!
|
||||
dest_file.write(content)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', page.name, '${page.name}.md')!
|
||||
}
|
||||
}
|
||||
|
||||
// Export images
|
||||
if c.images.len > 0 {
|
||||
img_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/img'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut img in c.images {
|
||||
dest_path := '${img_dir.path}/${img.file_name()}'
|
||||
img.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', img.file_name(), 'img/${img.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export files
|
||||
if c.files.len > 0 {
|
||||
files_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/files'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut file in c.files {
|
||||
dest_path := '${files_dir.path}/${file.file_name()}'
|
||||
file.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', file.file_name(), 'files/${file.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store collection metadata in Redis
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:path', c.name, col_dir.path)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionErrorArgs {
|
||||
pub mut:
|
||||
category CollectionErrorCategory @[required]
|
||||
message string @[required]
|
||||
page_key string
|
||||
file string
|
||||
show_console bool // Show error in console immediately
|
||||
log_error bool = true // Log to errors array (default: true)
|
||||
}
|
||||
|
||||
// Report an error, avoiding duplicates based on hash
|
||||
pub fn (mut c Collection) error(args CollectionErrorArgs) {
|
||||
// Create error struct
|
||||
err := CollectionError{
|
||||
category: args.category
|
||||
page_key: args.page_key
|
||||
message: args.message
|
||||
file: args.file
|
||||
}
|
||||
|
||||
// Calculate hash for deduplication
|
||||
hash := err.hash()
|
||||
|
||||
// Check if this error was already reported
|
||||
if hash in c.error_cache {
|
||||
return // Skip duplicate
|
||||
}
|
||||
|
||||
// Mark this error as reported
|
||||
c.error_cache[hash] = true
|
||||
|
||||
// Log to errors array if requested
|
||||
if args.log_error {
|
||||
c.errors << err
|
||||
}
|
||||
|
||||
// Show in console if requested
|
||||
if args.show_console {
|
||||
console.print_stderr('[${c.name}] ${err.str()}')
|
||||
}
|
||||
}
|
||||
|
||||
// Get all errors
|
||||
pub fn (c Collection) get_errors() []CollectionError {
|
||||
return c.errors
|
||||
}
|
||||
|
||||
// Check if collection has errors
|
||||
pub fn (c Collection) has_errors() bool {
|
||||
return c.errors.len > 0
|
||||
}
|
||||
|
||||
// Clear all errors
|
||||
pub fn (mut c Collection) clear_errors() {
|
||||
c.errors = []CollectionError{}
|
||||
c.error_cache = map[string]bool{}
|
||||
}
|
||||
|
||||
// Get error summary by category
|
||||
pub fn (c Collection) error_summary() map[CollectionErrorCategory]int {
|
||||
mut summary := map[CollectionErrorCategory]int{}
|
||||
|
||||
for err in c.errors {
|
||||
summary[err.category] = summary[err.category] + 1
|
||||
}
|
||||
|
||||
return summary
|
||||
}
|
||||
|
||||
// Print all errors to console
|
||||
pub fn (c Collection) print_errors() {
|
||||
if c.errors.len == 0 {
|
||||
console.print_green('Collection ${c.name}: No errors')
|
||||
return
|
||||
}
|
||||
|
||||
console.print_header('Collection ${c.name} - Errors (${c.errors.len})')
|
||||
|
||||
for err in c.errors {
|
||||
console.print_stderr(' ${err.str()}')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate all links in collection
|
||||
pub fn (mut c Collection) validate_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
page.validate_links()!
|
||||
}
|
||||
}
|
||||
|
||||
// Fix all links in collection (rewrite files)
|
||||
pub fn (mut c Collection) fix_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
// Read original content
|
||||
content := page.read_content()!
|
||||
|
||||
// Fix links
|
||||
fixed_content := page.fix_links(content)!
|
||||
|
||||
// Write back if changed
|
||||
if fixed_content != content {
|
||||
page.path.write(fixed_content)!
|
||||
}
|
||||
}
|
||||
}
|
||||
61
lib/data/atlas/collection_error.v
Normal file
61
lib/data/atlas/collection_error.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module atlas
|
||||
|
||||
import crypto.md5
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
pub enum CollectionErrorCategory {
|
||||
circular_include
|
||||
missing_include
|
||||
include_syntax_error
|
||||
invalid_page_reference
|
||||
file_not_found
|
||||
invalid_collection
|
||||
general_error
|
||||
}
|
||||
|
||||
pub struct CollectionError {
|
||||
pub mut:
|
||||
category CollectionErrorCategory
|
||||
page_key string // Format: "collection:page" or just collection name
|
||||
message string
|
||||
file string // Optional: specific file path if relevant
|
||||
}
|
||||
|
||||
// Generate MD5 hash for error deduplication
|
||||
// Hash is based on category + page_key (or file if page_key is empty)
|
||||
pub fn (e CollectionError) hash() string {
|
||||
mut hash_input := '${e.category}'
|
||||
|
||||
if e.page_key != '' {
|
||||
hash_input += ':${e.page_key}'
|
||||
} else if e.file != '' {
|
||||
hash_input += ':${e.file}'
|
||||
}
|
||||
|
||||
return md5.hexhash(hash_input)
|
||||
}
|
||||
|
||||
// Get human-readable error message
|
||||
pub fn (e CollectionError) str() string {
|
||||
mut location := ''
|
||||
if e.page_key != '' {
|
||||
location = ' [${e.page_key}]'
|
||||
} else if e.file != '' {
|
||||
location = ' [${e.file}]'
|
||||
}
|
||||
|
||||
return '[${e.category}]${location}: ${e.message}'
|
||||
}
|
||||
|
||||
// Get category as string
|
||||
pub fn (e CollectionError) category_str() string {
|
||||
return match e.category {
|
||||
.circular_include { 'Circular Include' }
|
||||
.missing_include { 'Missing Include' }
|
||||
.include_syntax_error { 'Include Syntax Error' }
|
||||
.invalid_page_reference { 'Invalid Page Reference' }
|
||||
.file_not_found { 'File Not Found' }
|
||||
.invalid_collection { 'Invalid Collection' }
|
||||
.general_error { 'General Error' }
|
||||
}
|
||||
}
|
||||
34
lib/data/atlas/error.v
Normal file
34
lib/data/atlas/error.v
Normal file
@@ -0,0 +1,34 @@
|
||||
module atlas
|
||||
|
||||
pub struct CollectionNotFound {
|
||||
Error
|
||||
pub:
|
||||
name string
|
||||
msg string
|
||||
}
|
||||
|
||||
pub fn (err CollectionNotFound) msg() string {
|
||||
return 'Collection ${err.name} not found: ${err.msg}'
|
||||
}
|
||||
|
||||
pub struct PageNotFound {
|
||||
Error
|
||||
pub:
|
||||
collection string
|
||||
page string
|
||||
}
|
||||
|
||||
pub fn (err PageNotFound) msg() string {
|
||||
return 'Page ${err.page} not found in collection ${err.collection}'
|
||||
}
|
||||
|
||||
pub struct FileNotFound {
|
||||
Error
|
||||
pub:
|
||||
collection string
|
||||
file string
|
||||
}
|
||||
|
||||
pub fn (err FileNotFound) msg() string {
|
||||
return 'File ${err.file} not found in collection ${err.collection}'
|
||||
}
|
||||
38
lib/data/atlas/export.v
Normal file
38
lib/data/atlas/export.v
Normal file
@@ -0,0 +1,38 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
@[params]
|
||||
pub struct ExportArgs {
|
||||
pub mut:
|
||||
destination string
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export all collections
|
||||
pub fn (mut a Atlas) export(args ExportArgs) ! {
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
|
||||
// Validate links before export
|
||||
a.validate_links()!
|
||||
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
include: args.include
|
||||
redis: args.redis
|
||||
)!
|
||||
|
||||
// Print errors for this collection if any
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
}
|
||||
}
|
||||
51
lib/data/atlas/file.v
Normal file
51
lib/data/atlas/file.v
Normal file
@@ -0,0 +1,51 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
pub enum FileType {
|
||||
file
|
||||
image
|
||||
}
|
||||
|
||||
pub struct File {
|
||||
pub mut:
|
||||
name string // name without extension
|
||||
ext string // file extension
|
||||
path pathlib.Path // full path to file
|
||||
ftype FileType // file or image
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewFileArgs {
|
||||
pub:
|
||||
path pathlib.Path @[required]
|
||||
}
|
||||
|
||||
pub fn new_file(args NewFileArgs) !File {
|
||||
mut f := File{
|
||||
path: args.path
|
||||
}
|
||||
f.init()!
|
||||
return f
|
||||
}
|
||||
|
||||
fn (mut f File) init() ! {
|
||||
// Determine file type
|
||||
if f.path.is_image() {
|
||||
f.ftype = .image
|
||||
} else {
|
||||
f.ftype = .file
|
||||
}
|
||||
|
||||
// Extract name and extension
|
||||
f.name = f.path.name_fix_no_ext()
|
||||
f.ext = f.path.extension_lower()
|
||||
}
|
||||
|
||||
pub fn (f File) file_name() string {
|
||||
return '${f.name}.${f.ext}'
|
||||
}
|
||||
|
||||
pub fn (f File) is_image() bool {
|
||||
return f.ftype == .image
|
||||
}
|
||||
83
lib/data/atlas/getters.v
Normal file
83
lib/data/atlas/getters.v
Normal file
@@ -0,0 +1,83 @@
|
||||
module atlas
|
||||
|
||||
// Get a page from any collection using format "collection:page"
|
||||
pub fn (a Atlas) page_get(key string) !&Page {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid page key format. Use "collection:page"')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.page_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get an image from any collection using format "collection:image"
|
||||
pub fn (a Atlas) image_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid image key format. Use "collection:image"')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.image_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get a file from any collection using format "collection:file"
|
||||
pub fn (a Atlas) file_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid file key format. Use "collection:file"')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.file_get(parts[1])!
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
pub fn (a Atlas) page_exists(key string) bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.page_exists(parts[1])
|
||||
}
|
||||
|
||||
// Check if image exists
|
||||
pub fn (a Atlas) image_exists(key string) bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.image_exists(parts[1])
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
pub fn (a Atlas) file_exists(key string) bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.file_exists(parts[1])
|
||||
}
|
||||
|
||||
// List all pages in Atlas
|
||||
pub fn (a Atlas) list_pages() map[string][]string {
|
||||
mut result := map[string][]string{}
|
||||
|
||||
for col_name, col in a.collections {
|
||||
mut page_names := []string{}
|
||||
for page_name, _ in col.pages {
|
||||
page_names << page_name
|
||||
}
|
||||
page_names.sort()
|
||||
result[col_name] = page_names
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
228
lib/data/atlas/link.v
Normal file
228
lib/data/atlas/link.v
Normal file
@@ -0,0 +1,228 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
// Link represents a markdown link found in content
|
||||
pub struct Link {
|
||||
pub mut:
|
||||
text string // Link text [text]
|
||||
target string // Original link target
|
||||
line int // Line number
|
||||
col_start int // Column start position
|
||||
col_end int // Column end position
|
||||
collection string // Target collection (if specified)
|
||||
page string // Target page name (normalized)
|
||||
is_local bool // Whether link points to local page
|
||||
valid bool // Whether link target exists
|
||||
}
|
||||
|
||||
// Find all markdown links in content
|
||||
pub fn find_links(content string) []Link {
|
||||
mut links := []Link{}
|
||||
lines := content.split_into_lines()
|
||||
|
||||
for line_idx, line in lines {
|
||||
mut pos := 0
|
||||
for {
|
||||
// Find next [
|
||||
open_bracket := line.index_after('[', pos) or { break }
|
||||
|
||||
// Find matching ]
|
||||
close_bracket := line.index_after(']', open_bracket) or { break }
|
||||
|
||||
// Check for (
|
||||
if close_bracket + 1 >= line.len || line[close_bracket + 1] != `(` {
|
||||
pos = close_bracket + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// Find matching )
|
||||
open_paren := close_bracket + 1
|
||||
close_paren := line.index_after(')', open_paren) or { break }
|
||||
|
||||
// Extract link components
|
||||
text := line[open_bracket + 1..close_bracket]
|
||||
target := line[open_paren + 1..close_paren]
|
||||
|
||||
mut link := Link{
|
||||
text: text
|
||||
target: target.trim_space()
|
||||
line: line_idx + 1
|
||||
col_start: open_bracket
|
||||
col_end: close_paren + 1
|
||||
}
|
||||
|
||||
parse_link_target(mut link)
|
||||
links << link
|
||||
|
||||
pos = close_paren + 1
|
||||
}
|
||||
}
|
||||
|
||||
return links
|
||||
}
|
||||
|
||||
// Parse link target to extract collection and page
|
||||
fn parse_link_target(mut link Link) {
|
||||
target := link.target
|
||||
|
||||
// Skip external links
|
||||
if target.starts_with('http://') || target.starts_with('https://')
|
||||
|| target.starts_with('mailto:') || target.starts_with('ftp://') {
|
||||
return
|
||||
}
|
||||
|
||||
// Skip anchors
|
||||
if target.starts_with('#') {
|
||||
return
|
||||
}
|
||||
|
||||
link.is_local = true
|
||||
|
||||
// Format: $collection:$pagename or $collection:$pagename.md
|
||||
if target.contains(':') {
|
||||
parts := target.split(':')
|
||||
if parts.len >= 2 {
|
||||
link.collection = texttools.name_fix(parts[0])
|
||||
link.page = normalize_page_name(parts[1])
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For all other formats, extract filename from path (ignore path components)
|
||||
// Handles: $page, path/to/$page, /path/to/$page, /path/to/$page.md
|
||||
filename := os.base(target)
|
||||
link.page = normalize_page_name(filename)
|
||||
}
|
||||
|
||||
// Normalize page name (remove .md, apply name_fix)
|
||||
fn normalize_page_name(name string) string {
|
||||
mut clean := name
|
||||
if clean.ends_with('.md') {
|
||||
clean = clean[0..clean.len - 3]
|
||||
}
|
||||
return texttools.name_fix(clean)
|
||||
}
|
||||
|
||||
// Validate links in page
|
||||
pub fn (mut p Page) validate_links() ! {
|
||||
content := p.read_content()!
|
||||
links := find_links(content)
|
||||
|
||||
for link in links {
|
||||
if !link.is_local {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine target collection
|
||||
mut target_collection := link.collection
|
||||
if target_collection == '' {
|
||||
target_collection = p.collection_name
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
page_key := '${target_collection}:${link.page}'
|
||||
if !p.collection.atlas.page_exists(page_key) {
|
||||
p.collection.error(
|
||||
category: .invalid_page_reference
|
||||
page_key: p.key()
|
||||
message: 'Broken link to `${page_key}` at line ${link.line}: [${link.text}](${link.target})'
|
||||
show_console: false
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fix links in page content - rewrites links with proper relative paths
|
||||
pub fn (mut p Page) fix_links(content string) !string {
|
||||
links := find_links(content)
|
||||
if links.len == 0 {
|
||||
return content
|
||||
}
|
||||
|
||||
mut result := content
|
||||
|
||||
// Process links in reverse order to maintain positions
|
||||
for link in links.reverse() {
|
||||
if !link.is_local || link.page == '' {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine target collection
|
||||
mut target_collection := link.collection
|
||||
if target_collection == '' {
|
||||
target_collection = p.collection_name
|
||||
}
|
||||
|
||||
// Only fix links within same collection
|
||||
if target_collection != p.collection_name {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get target page
|
||||
page_key := '${target_collection}:${link.page}'
|
||||
mut target_page := p.collection.atlas.page_get(page_key) or {
|
||||
// Skip if page doesn't exist - error already reported in validate
|
||||
continue
|
||||
}
|
||||
|
||||
// Calculate relative path
|
||||
relative_path := calculate_relative_path(mut p.path, mut target_page.path)
|
||||
|
||||
// Build replacement
|
||||
old_link := '[${link.text}](${link.target})'
|
||||
new_link := '[${link.text}](${relative_path})'
|
||||
|
||||
// Replace in content
|
||||
result = result.replace(old_link, new_link)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Calculate relative path from source file to target file with .md extension
|
||||
fn calculate_relative_path(mut from pathlib.Path, mut to pathlib.Path) string {
|
||||
from_dir := from.path_dir()
|
||||
to_dir := to.path_dir()
|
||||
to_name := to.name_fix_no_ext()
|
||||
|
||||
// If in same directory, just return filename with .md
|
||||
if from_dir == to_dir {
|
||||
return '${to_name}.md'
|
||||
}
|
||||
|
||||
// Split paths into parts
|
||||
from_parts := from_dir.split(os.path_separator).filter(it != '')
|
||||
to_parts := to_dir.split(os.path_separator).filter(it != '')
|
||||
|
||||
// Find common base
|
||||
mut common_len := 0
|
||||
for i := 0; i < from_parts.len && i < to_parts.len; i++ {
|
||||
if from_parts[i] == to_parts[i] {
|
||||
common_len = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Build relative path
|
||||
mut rel_parts := []string{}
|
||||
|
||||
// Add ../ for each directory we need to go up
|
||||
up_count := from_parts.len - common_len
|
||||
for _ in 0..up_count {
|
||||
rel_parts << '..'
|
||||
}
|
||||
|
||||
// Add path down to target
|
||||
for i := common_len; i < to_parts.len; i++ {
|
||||
rel_parts << to_parts[i]
|
||||
}
|
||||
|
||||
// Add filename with .md extension
|
||||
rel_parts << '${to_name}.md'
|
||||
|
||||
return rel_parts.join('/')
|
||||
}
|
||||
141
lib/data/atlas/page.v
Normal file
141
lib/data/atlas/page.v
Normal file
@@ -0,0 +1,141 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
@[heap]
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
name string
|
||||
path pathlib.Path
|
||||
collection_name string
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
collection &Collection @[required]
|
||||
}
|
||||
|
||||
pub fn new_page(args NewPageArgs) !Page {
|
||||
return Page{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
collection: args.collection
|
||||
}
|
||||
}
|
||||
|
||||
// Read content without processing includes
|
||||
pub fn (mut p Page) read_content() !string {
|
||||
return p.path.read()!
|
||||
}
|
||||
|
||||
// Read content with includes processed (default behavior)
|
||||
@[params]
|
||||
pub struct ReadContentArgs {
|
||||
pub mut:
|
||||
include bool = true
|
||||
}
|
||||
|
||||
pub fn (mut p Page) content(args ReadContentArgs) !string {
|
||||
mut content := p.path.read()!
|
||||
|
||||
if args.include {
|
||||
mut v := map[string]bool{}
|
||||
return p.process_includes(content, mut v)!
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
// Recursively process includes
|
||||
fn (mut p Page) process_includes(content string, mut visited map[string]bool) !string {
|
||||
mut atlas := p.collection.atlas
|
||||
// Prevent circular includes
|
||||
page_key := p.key()
|
||||
if page_key in visited {
|
||||
p.collection.error(
|
||||
category: .circular_include
|
||||
page_key: page_key
|
||||
message: 'Circular include detected for page `${page_key}`'
|
||||
show_console: false // Don't show immediately, collect for later
|
||||
)
|
||||
return ''
|
||||
}
|
||||
visited[page_key] = true
|
||||
|
||||
mut result := content
|
||||
mut lines := result.split_into_lines()
|
||||
mut processed_lines := []string{}
|
||||
|
||||
for line in lines {
|
||||
trimmed := line.trim_space()
|
||||
|
||||
// Check for include action: !!include collection:page or !!include page
|
||||
if trimmed.starts_with('!!include') {
|
||||
// Parse the include reference
|
||||
include_ref := trimmed.trim_string_left('!!include').trim_space()
|
||||
|
||||
// Determine collection and page name
|
||||
mut target_collection := p.collection_name
|
||||
mut target_page := ''
|
||||
|
||||
if include_ref.contains(':') {
|
||||
parts := include_ref.split(':')
|
||||
if parts.len == 2 {
|
||||
target_collection = texttools.name_fix(parts[0])
|
||||
target_page = texttools.name_fix(parts[1])
|
||||
} else {
|
||||
p.collection.error(
|
||||
category: .include_syntax_error
|
||||
page_key: page_key
|
||||
message: 'Invalid include format: `${include_ref}`'
|
||||
show_console: false
|
||||
)
|
||||
processed_lines << '<!-- Invalid include format: ${include_ref} -->'
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
target_page = texttools.name_fix(include_ref)
|
||||
}
|
||||
|
||||
// Remove .md extension if present
|
||||
if target_page.ends_with('.md') {
|
||||
target_page = target_page[0..target_page.len - 3]
|
||||
}
|
||||
|
||||
// Build page key
|
||||
page_ref := '${target_collection}:${target_page}'
|
||||
|
||||
// Get the referenced page from atlas
|
||||
mut include_page := atlas.page_get(page_ref) or {
|
||||
p.collection.error(
|
||||
category: .missing_include
|
||||
page_key: page_key
|
||||
message: 'Included page `${page_ref}` not found'
|
||||
show_console: false
|
||||
)
|
||||
processed_lines << '<!-- Include not found: ${page_ref} -->'
|
||||
continue
|
||||
}
|
||||
|
||||
// Recursively process the included page
|
||||
include_content := include_page.process_includes(include_page.read_content()!, mut
|
||||
visited)!
|
||||
|
||||
processed_lines << include_content
|
||||
} else {
|
||||
processed_lines << line
|
||||
}
|
||||
}
|
||||
|
||||
return processed_lines.join_lines()
|
||||
}
|
||||
|
||||
pub fn (p Page) key() string {
|
||||
return '${p.collection_name}:${p.name}'
|
||||
}
|
||||
56
lib/data/atlas/play.v
Normal file
56
lib/data/atlas/play.v
Normal file
@@ -0,0 +1,56 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
|
||||
// Play function to process HeroScript actions for Atlas
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
if !plbook.exists(filter: 'atlas.') {
|
||||
return
|
||||
}
|
||||
|
||||
mut atlases := map[string]&Atlas{}
|
||||
|
||||
// Process scan actions - scan directories for collections
|
||||
mut scan_actions := plbook.find(filter: 'atlas.scan')!
|
||||
for mut action in scan_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
|
||||
// Get or create atlas
|
||||
mut atlas_instance := atlases[name] or {
|
||||
mut new_atlas := new(name: name)!
|
||||
atlases[name] = new_atlas
|
||||
new_atlas
|
||||
}
|
||||
|
||||
path := p.get('path')!
|
||||
atlas_instance.scan(path: path, save: true)!
|
||||
action.done = true
|
||||
atlas_set(atlas_instance)
|
||||
}
|
||||
|
||||
// Process export actions - export collections to destination
|
||||
mut export_actions := plbook.find(filter: 'atlas.export')!
|
||||
|
||||
// Process explicit export actions
|
||||
for mut action in export_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
destination := p.get('destination')!
|
||||
reset := p.get_default_true('reset')
|
||||
include := p.get_default_true('include')
|
||||
redis := p.get_default_true('redis')
|
||||
|
||||
mut atlas_instance := atlases[name] or {
|
||||
return error("Atlas '${name}' not found. Use !!atlas.scan or !!atlas.load first.")
|
||||
}
|
||||
|
||||
atlas_instance.export(
|
||||
destination: destination
|
||||
reset: reset
|
||||
include: include
|
||||
redis: redis
|
||||
)!
|
||||
action.done = true
|
||||
}
|
||||
}
|
||||
910
lib/data/atlas/readme.md
Normal file
910
lib/data/atlas/readme.md
Normal file
@@ -0,0 +1,910 @@
|
||||
# Atlas Module
|
||||
|
||||
A lightweight document collection manager for V, inspired by doctree but simplified.
|
||||
|
||||
## Features
|
||||
|
||||
- **Simple Collection Scanning**: Automatically find collections marked with `.collection` files
|
||||
- **Include Processing**: Process `!!include` actions to embed content from other pages
|
||||
- **Easy Export**: Copy files to destination with organized structure
|
||||
- **Optional Redis**: Store metadata in Redis for quick lookups and caching
|
||||
- **Type-Safe Access**: Get pages, images, and files with error handling
|
||||
- **Error Tracking**: Built-in error collection and reporting with deduplication
|
||||
|
||||
## Quick Start
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create a new Atlas
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
|
||||
// Scan a directory for collections
|
||||
a.scan(path: '/path/to/docs')!
|
||||
|
||||
// Export to destination
|
||||
a.export(destination: '/path/to/output')!
|
||||
```
|
||||
|
||||
## Collections
|
||||
|
||||
Collections are directories marked with a `.collection` file.
|
||||
|
||||
### .collection File Format
|
||||
|
||||
```
|
||||
name:my_collection
|
||||
```
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### Collections
|
||||
|
||||
A collection is a directory containing:
|
||||
- A `.collection` file (marks the directory as a collection)
|
||||
- Markdown pages (`.md` files)
|
||||
- Images (`.png`, `.jpg`, `.jpeg`, `.gif`, `.svg`)
|
||||
- Other files
|
||||
|
||||
### Page Keys
|
||||
|
||||
Pages, images, and files are referenced using the format: `collection:name`
|
||||
|
||||
```v
|
||||
// Get a page
|
||||
page := a.page_get('guides:introduction')!
|
||||
|
||||
// Get an image
|
||||
img := a.image_get('guides:logo')!
|
||||
|
||||
// Get a file
|
||||
file := a.file_get('guides:diagram')!
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Scanning for Collections
|
||||
|
||||
```v
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
```
|
||||
|
||||
### Adding a Specific Collection
|
||||
|
||||
```v
|
||||
a.add_collection(name: 'guides', path: './docs/guides')!
|
||||
```
|
||||
|
||||
### Getting Pages
|
||||
|
||||
```v
|
||||
// Get a page
|
||||
page := a.page_get('guides:introduction')!
|
||||
content := page.content()!
|
||||
|
||||
// Check if page exists
|
||||
if a.page_exists('guides:setup') {
|
||||
println('Setup guide found')
|
||||
}
|
||||
```
|
||||
|
||||
### Getting Images and Files
|
||||
|
||||
```v
|
||||
// Get an image
|
||||
img := a.image_get('guides:logo')!
|
||||
println('Image path: ${img.path.path}')
|
||||
println('Image type: ${img.ftype}') // .image
|
||||
|
||||
// Get a file
|
||||
file := a.file_get('guides:diagram')!
|
||||
println('File name: ${file.file_name()}')
|
||||
|
||||
// Check existence
|
||||
if a.image_exists('guides:screenshot') {
|
||||
println('Screenshot found')
|
||||
}
|
||||
```
|
||||
|
||||
### Listing All Pages
|
||||
|
||||
```v
|
||||
pages_map := a.list_pages()
|
||||
for col_name, page_names in pages_map {
|
||||
println('Collection: ${col_name}')
|
||||
for page_name in page_names {
|
||||
println(' - ${page_name}')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Exporting
|
||||
|
||||
```v
|
||||
// Full export with all features
|
||||
a.export(
|
||||
destination: './output'
|
||||
reset: true // Clear destination before export
|
||||
include: true // Process !!include actions
|
||||
redis: true // Store metadata in Redis
|
||||
)!
|
||||
|
||||
// Export without Redis
|
||||
a.export(
|
||||
destination: './output'
|
||||
redis: false
|
||||
)!
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```v
|
||||
// Export and check for errors
|
||||
a.export(destination: './output')!
|
||||
|
||||
// Errors are automatically printed during export
|
||||
// You can also access them programmatically
|
||||
for _, col in a.collections {
|
||||
if col.has_errors() {
|
||||
errors := col.get_errors()
|
||||
for err in errors {
|
||||
println('Error: ${err.str()}')
|
||||
}
|
||||
|
||||
// Get error summary by category
|
||||
summary := col.error_summary()
|
||||
for category, count in summary {
|
||||
println('${category}: ${count} errors')
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Include Processing
|
||||
|
||||
Atlas supports simple include processing using `!!include` actions:
|
||||
|
||||
```v
|
||||
// Export with includes processed (default)
|
||||
a.export(
|
||||
destination: './output'
|
||||
include: true // default
|
||||
)!
|
||||
|
||||
// Export without processing includes
|
||||
a.export(
|
||||
destination: './output'
|
||||
include: false
|
||||
)!
|
||||
```
|
||||
|
||||
#### Include Syntax
|
||||
|
||||
In your markdown files:
|
||||
|
||||
```md
|
||||
# My Page
|
||||
|
||||
!!include collection:page_name
|
||||
|
||||
More content here
|
||||
```
|
||||
|
||||
Or within the same collection:
|
||||
|
||||
```md
|
||||
!!include page_name
|
||||
```
|
||||
|
||||
The `!!include` action will be replaced with the content of the referenced page during export.
|
||||
|
||||
#### Reading Pages with Includes
|
||||
|
||||
```v
|
||||
// Read with includes processed (default)
|
||||
mut page := a.page_get('col:mypage')!
|
||||
content := page.content(include: true)!
|
||||
|
||||
// Read raw content without processing includes
|
||||
content := page.content()!
|
||||
```
|
||||
|
||||
## Links
|
||||
|
||||
Atlas supports standard Markdown links with several formats for referencing pages within collections.
|
||||
|
||||
### Link Formats
|
||||
|
||||
#### 1. Explicit Collection Reference
|
||||
Link to a page in a specific collection:
|
||||
```md
|
||||
[Click here](guides:introduction)
|
||||
[Click here](guides:introduction.md)
|
||||
```
|
||||
|
||||
#### 2. Same Collection Reference
|
||||
Link to a page in the same collection (collection name omitted):
|
||||
```md
|
||||
[Click here](introduction)
|
||||
```
|
||||
|
||||
#### 3. Path-Based Reference
|
||||
Link using a path - **only the filename is used** for matching:
|
||||
```md
|
||||
[Click here](some/path/introduction)
|
||||
[Click here](/absolute/path/introduction)
|
||||
[Click here](path/to/introduction.md)
|
||||
```
|
||||
|
||||
**Important:** Paths are ignored during link resolution. Only the page name (filename) is used to find the target page within the same collection.
|
||||
|
||||
### Link Processing
|
||||
|
||||
#### Validation
|
||||
|
||||
Check all links in your Atlas:
|
||||
|
||||
```v
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Validate all links
|
||||
a.validate_links()!
|
||||
|
||||
// Check for errors
|
||||
for _, col in a.collections {
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Fixing Links
|
||||
|
||||
Automatically rewrite links with correct relative paths:
|
||||
|
||||
```v
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Fix all links in place
|
||||
a.fix_links()!
|
||||
|
||||
// Or fix links in a specific collection
|
||||
mut col := a.get_collection('guides')!
|
||||
col.fix_links()!
|
||||
```
|
||||
|
||||
**What `fix_links()` does:**
|
||||
- Finds all local page links
|
||||
- Calculates correct relative paths
|
||||
- Rewrites links as `[text](relative/path/pagename.md)`
|
||||
- Only fixes links within the same collection
|
||||
- Preserves `!!include` actions unchanged
|
||||
- Writes changes back to files
|
||||
|
||||
#### Example
|
||||
|
||||
Before fix:
|
||||
```md
|
||||
# My Page
|
||||
|
||||
[Introduction](introduction)
|
||||
[Setup](/some/old/path/setup)
|
||||
[Guide](guides:advanced)
|
||||
```
|
||||
|
||||
After fix (assuming pages are in subdirectories):
|
||||
```md
|
||||
# My Page
|
||||
|
||||
[Introduction](../intro/introduction.md)
|
||||
[Setup](setup.md)
|
||||
[Guide](guides:advanced) <!-- Cross-collection link unchanged -->
|
||||
```
|
||||
|
||||
### Link Rules
|
||||
|
||||
1. **Name Normalization**: All page names are normalized using `name_fix()` (lowercase, underscores, etc.)
|
||||
2. **Same Collection Only**: `fix_links()` only rewrites links within the same collection
|
||||
3. **Cross-Collection Links**: Links with explicit collection references (e.g., `guides:page`) are validated but not rewritten
|
||||
4. **External Links**: HTTP(S), mailto, and anchor links are ignored
|
||||
5. **Error Reporting**: Broken links are reported with file, line number, and link details
|
||||
|
||||
### Export with Link Validation
|
||||
|
||||
Links are automatically validated during export:
|
||||
|
||||
```v
|
||||
a.export(
|
||||
destination: './output'
|
||||
include: true
|
||||
)!
|
||||
|
||||
// Errors are printed for each collection automatically
|
||||
```
|
||||
|
||||
## Redis Integration
|
||||
|
||||
Atlas uses Redis to store metadata about collections, pages, images, and files for fast lookups and caching.
|
||||
|
||||
### Redis Data Structure
|
||||
|
||||
When `redis: true` is set during export, Atlas stores:
|
||||
|
||||
1. **Collection Paths** - Hash: `atlas:path`
|
||||
- Key: collection name
|
||||
- Value: exported collection directory path
|
||||
|
||||
2. **Collection Contents** - Hash: `atlas:<collection_name>`
|
||||
- Pages: `page_name` → `page_name.md`
|
||||
- Images: `image_name.ext` → `img/image_name.ext`
|
||||
- Files: `file_name.ext` → `files/file_name.ext`
|
||||
|
||||
### Redis Usage Examples
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.base
|
||||
|
||||
// Export with Redis metadata (default)
|
||||
mut a := atlas.new(name: 'docs')!
|
||||
a.scan(path: './docs')!
|
||||
a.export(
|
||||
destination: './output'
|
||||
redis: true // Store metadata in Redis
|
||||
)!
|
||||
|
||||
// Later, retrieve metadata from Redis
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
|
||||
// Get collection path
|
||||
col_path := redis.hget('atlas:path', 'guides')!
|
||||
println('Guides collection exported to: ${col_path}')
|
||||
|
||||
// Get page location
|
||||
page_path := redis.hget('atlas:guides', 'introduction')!
|
||||
println('Introduction page: ${page_path}') // Output: introduction.md
|
||||
|
||||
// Get image location
|
||||
img_path := redis.hget('atlas:guides', 'logo.png')!
|
||||
println('Logo image: ${img_path}') // Output: img/logo.png
|
||||
```
|
||||
|
||||
|
||||
## Atlas Save/Load Functionality
|
||||
|
||||
This document describes the save/load functionality for Atlas collections, which allows you to persist collection metadata to JSON files and load them in both V and Python.
|
||||
|
||||
## Overview
|
||||
|
||||
The Atlas module now supports:
|
||||
- **Saving collections** to `.collection.json` files
|
||||
- **Loading collections** from `.collection.json` files in V
|
||||
- **Loading collections** from `.collection.json` files in Python
|
||||
|
||||
This enables:
|
||||
1. Persistence of collection metadata (pages, images, files, errors)
|
||||
2. Cross-language access to Atlas data
|
||||
3. Faster loading without re-scanning directories
|
||||
|
||||
## V Implementation
|
||||
|
||||
### Saving Collections
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
// Or save a single collection
|
||||
col := a.get_collection('guides')!
|
||||
col.save()!
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all collections from directory tree
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
```
|
||||
|
||||
### What Gets Saved
|
||||
|
||||
The `.collection.json` file contains:
|
||||
- Collection name and path
|
||||
- All pages (name, path, collection_name)
|
||||
- All images (name, ext, path, ftype)
|
||||
- All files (name, ext, path, ftype)
|
||||
- All errors (category, page_key, message, file)
|
||||
|
||||
**Note:** Circular references (`atlas` and `collection` pointers) are automatically skipped using the `[skip]` attribute and reconstructed during load.
|
||||
|
||||
## Python Implementation
|
||||
|
||||
### Installation
|
||||
|
||||
The Python loader is a standalone script with no external dependencies (uses only Python stdlib):
|
||||
|
||||
```bash
|
||||
# No installation needed - just use the script
|
||||
python3 lib/data/atlas/atlas_loader.py
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```python
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections from directory tree
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access collections
|
||||
col = atlas.get_collection('guides')
|
||||
print(f"Pages: {len(col.pages)}")
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check for errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
### Python API
|
||||
|
||||
#### Atlas Class
|
||||
|
||||
- `Atlas.load_collection(path, name='default')` - Load single collection
|
||||
- `Atlas.load_from_directory(path, name='default')` - Load all collections from directory tree
|
||||
- `atlas.get_collection(name)` - Get collection by name
|
||||
- `atlas.page_get(key)` - Get page using 'collection:page' format
|
||||
- `atlas.image_get(key)` - Get image using 'collection:image' format
|
||||
- `atlas.file_get(key)` - Get file using 'collection:file' format
|
||||
- `atlas.list_collections()` - List all collection names
|
||||
- `atlas.list_pages()` - List all pages grouped by collection
|
||||
- `atlas.has_errors()` - Check if any collection has errors
|
||||
- `atlas.print_all_errors()` - Print errors from all collections
|
||||
|
||||
#### Collection Class
|
||||
|
||||
- `collection.page_get(name)` - Get page by name
|
||||
- `collection.image_get(name)` - Get image by name
|
||||
- `collection.file_get(name)` - Get file by name
|
||||
- `collection.has_errors()` - Check if collection has errors
|
||||
- `collection.error_summary()` - Get error count by category
|
||||
- `collection.print_errors()` - Print all errors
|
||||
|
||||
#### Page Class
|
||||
|
||||
- `page.key()` - Get page key in format 'collection:page'
|
||||
- `page.content()` - Read page content from file
|
||||
|
||||
#### File Class
|
||||
|
||||
- `file.file_name` - Get full filename with extension
|
||||
- `file.is_image()` - Check if file is an image
|
||||
- `file.read()` - Read file content as bytes
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. V: Create and Save
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create atlas and scan
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
println('Saved ${a.collections.len} collections')
|
||||
```
|
||||
|
||||
### 2. V: Load and Use
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all from directory
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
```
|
||||
|
||||
### 3. Python: Load and Use
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
After saving, each collection directory will contain:
|
||||
|
||||
```
|
||||
collection_dir/
|
||||
├── .collection # Original collection config
|
||||
├── .collection.json # Saved collection metadata (NEW)
|
||||
├── page1.md
|
||||
├── page2.md
|
||||
└── img/
|
||||
└── image1.png
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Errors are preserved during save/load:
|
||||
|
||||
```v
|
||||
// V: Errors are saved
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
a.validate_links()! // May generate errors
|
||||
a.save_all()! // Errors are saved to .collection.json
|
||||
|
||||
// V: Errors are loaded
|
||||
mut a2 := atlas.new()!
|
||||
a2.load_from_directory('./docs')!
|
||||
col := a2.get_collection('guides')!
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
```
|
||||
|
||||
```python
|
||||
# Python: Access errors
|
||||
atlas = Atlas.load_from_directory('./docs')
|
||||
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
|
||||
# Get error summary
|
||||
col = atlas.get_collection('guides')
|
||||
if col.has_errors():
|
||||
summary = col.error_summary()
|
||||
for category, count in summary.items():
|
||||
print(f"{category}: {count}")
|
||||
```
|
||||
|
||||
|
||||
|
||||
## HeroScript Integration
|
||||
|
||||
Atlas integrates with HeroScript, allowing you to define Atlas operations in `.vsh` or playbook files.
|
||||
|
||||
### Available Actions
|
||||
|
||||
#### 1. `atlas.scan` - Scan Directory for Collections
|
||||
|
||||
Scan a directory tree to find and load collections marked with `.collection` files.
|
||||
|
||||
```heroscript
|
||||
!!atlas.scan
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path to scan
|
||||
|
||||
#### 2. `atlas.load` - Load from Saved Collections
|
||||
|
||||
Load collections from `.collection.json` files (previously saved with `atlas.save`).
|
||||
|
||||
```heroscript
|
||||
!!atlas.load
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path containing `.collection.json` files
|
||||
|
||||
#### 3. `atlas.validate` - Validate All Links
|
||||
|
||||
Validate all markdown links in all collections.
|
||||
|
||||
```heroscript
|
||||
!!atlas.validate
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 4. `atlas.fix_links` - Fix All Links
|
||||
|
||||
Automatically rewrite all local links with correct relative paths.
|
||||
|
||||
```heroscript
|
||||
!!atlas.fix_links
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 5. `atlas.save` - Save Collections
|
||||
|
||||
Save all collections to `.collection.json` files in their respective directories.
|
||||
|
||||
```heroscript
|
||||
!!atlas.save
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 6. `atlas.export` - Export Collections
|
||||
|
||||
Export collections to a destination directory.
|
||||
|
||||
```heroscript
|
||||
!!atlas.export
|
||||
name: 'main'
|
||||
destination: './output'
|
||||
reset: true
|
||||
include: true
|
||||
redis: true
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `destination` (required) - Export destination path
|
||||
- `reset` (optional, default: true) - Clear destination before export
|
||||
- `include` (optional, default: true) - Process `!!include` actions
|
||||
- `redis` (optional, default: true) - Store metadata in Redis
|
||||
|
||||
### Complete Workflow Examples
|
||||
|
||||
#### Example 1: Scan, Validate, and Export
|
||||
|
||||
```heroscript
|
||||
# Scan for collections
|
||||
!!atlas.scan
|
||||
path: '~/docs/myproject'
|
||||
|
||||
# Validate all links
|
||||
!!atlas.validate
|
||||
|
||||
# Export to output directory
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
include: true
|
||||
```
|
||||
|
||||
#### Example 2: Load, Fix Links, and Export
|
||||
|
||||
```heroscript
|
||||
# Load from saved collections
|
||||
!!atlas.load
|
||||
path: '~/docs/myproject'
|
||||
|
||||
# Fix all broken links
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
```
|
||||
|
||||
#### Example 3: Multiple Atlas Instances
|
||||
|
||||
```heroscript
|
||||
# Main documentation
|
||||
!!atlas.scan
|
||||
name: 'docs'
|
||||
path: '~/docs'
|
||||
|
||||
# API reference
|
||||
!!atlas.scan
|
||||
name: 'api'
|
||||
path: '~/api-docs'
|
||||
|
||||
# Export docs
|
||||
!!atlas.export
|
||||
name: 'docs'
|
||||
destination: '~/output/docs'
|
||||
|
||||
# Export API
|
||||
!!atlas.export
|
||||
name: 'api'
|
||||
destination: '~/output/api'
|
||||
```
|
||||
|
||||
#### Example 4: Development Workflow
|
||||
|
||||
```heroscript
|
||||
# Scan collections
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
# Validate links (errors will be reported)
|
||||
!!atlas.validate
|
||||
|
||||
# Fix links automatically
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export final version
|
||||
!!atlas.export
|
||||
destination: './public'
|
||||
include: true
|
||||
redis: true
|
||||
```
|
||||
|
||||
### Using in V Scripts
|
||||
|
||||
Create a `.vsh` script to process Atlas operations:
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Define your HeroScript content
|
||||
heroscript := "
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
include: true
|
||||
"
|
||||
|
||||
// Create playbook from text
|
||||
mut plbook := playbook.new(text: heroscript)!
|
||||
|
||||
// Execute atlas actions
|
||||
atlas.play(mut plbook)!
|
||||
|
||||
println('Atlas processing complete!')
|
||||
```
|
||||
|
||||
### Using in Playbook Files
|
||||
|
||||
Create a `docs.play` file:
|
||||
|
||||
```heroscript
|
||||
!!atlas.scan
|
||||
name: 'main'
|
||||
path: '~/code/docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.fix_links
|
||||
|
||||
!!atlas.save
|
||||
|
||||
!!atlas.export
|
||||
destination: '~/code/output'
|
||||
reset: true
|
||||
include: true
|
||||
redis: true
|
||||
```
|
||||
|
||||
Execute it:
|
||||
|
||||
```bash
|
||||
vrun process_docs.vsh
|
||||
```
|
||||
|
||||
Where `process_docs.vsh` contains:
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
// Load and execute playbook
|
||||
mut plbook := playbook.new(path: './docs.play')!
|
||||
playcmds.run(mut plbook)!
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Errors are automatically collected and reported:
|
||||
|
||||
```heroscript
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
# Errors will be printed during export
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
```
|
||||
|
||||
Errors are shown in the console:
|
||||
|
||||
```
|
||||
Collection guides - Errors (2)
|
||||
[invalid_page_reference] [guides:intro]: Broken link to `guides:setup` at line 5
|
||||
[missing_include] [guides:advanced]: Included page `guides:examples` not found
|
||||
```
|
||||
|
||||
### Auto-Export Behavior
|
||||
|
||||
If you use `!!atlas.scan` or `!!atlas.load` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan/load actions.
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. **Always validate before export**: Use `!!atlas.validate` to catch broken links early
|
||||
2. **Save after fixing**: Use `!!atlas.save` after `!!atlas.fix_links` to persist changes
|
||||
3. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
4. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
5. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
76
lib/data/atlas/save.v
Normal file
76
lib/data/atlas/save.v
Normal file
@@ -0,0 +1,76 @@
|
||||
module atlas
|
||||
|
||||
import json
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
// Save collection to .collection.json in the collection directory
|
||||
pub fn (c Collection) save() ! {
|
||||
// json.encode automatically skips fields marked with [skip]
|
||||
json_str := json.encode(c)
|
||||
|
||||
mut json_file := pathlib.get_file(
|
||||
path: '${c.path.path}/.collection.json'
|
||||
create: true
|
||||
)!
|
||||
|
||||
json_file.write(json_str)!
|
||||
}
|
||||
|
||||
// Save all collections in atlas to their respective directories
|
||||
pub fn (a Atlas) save() ! {
|
||||
for _, col in a.collections {
|
||||
col.save()!
|
||||
}
|
||||
}
|
||||
|
||||
// Load collection from .collection.json file
|
||||
pub fn (mut a Atlas) load_collection(path string) !&Collection {
|
||||
mut json_file := pathlib.get_file(path: '${path}/.collection.json')!
|
||||
json_str := json_file.read()!
|
||||
|
||||
mut col := json.decode(Collection, json_str)!
|
||||
|
||||
// Fix circular references that were skipped during encode
|
||||
col.atlas = &a
|
||||
|
||||
// Rebuild error cache from errors
|
||||
col.error_cache = map[string]bool{}
|
||||
for err in col.errors {
|
||||
col.error_cache[err.hash()] = true
|
||||
}
|
||||
|
||||
// Fix page references to collection
|
||||
for name, mut page in col.pages {
|
||||
page.collection = &col
|
||||
col.pages[name] = page
|
||||
}
|
||||
|
||||
a.collections[col.name] = &col
|
||||
return &col
|
||||
}
|
||||
|
||||
// Load all collections from a directory tree
|
||||
pub fn (mut a Atlas) load_from_directory(path string) ! {
|
||||
mut dir := pathlib.get_dir(path: path)!
|
||||
a.scan_and_load(mut dir)!
|
||||
}
|
||||
|
||||
// Scan directory for .collection.json files and load them
|
||||
fn (mut a Atlas) scan_and_load(mut dir pathlib.Path) ! {
|
||||
// Check if this directory has .collection.json
|
||||
if dir.file_exists('.collection.json') {
|
||||
a.load_collection(dir.path)!
|
||||
return
|
||||
}
|
||||
|
||||
// Scan subdirectories
|
||||
mut entries := dir.list(recursive: false)!
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || should_skip_dir(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
mut mutable_entry := entry
|
||||
a.scan_and_load(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
104
lib/data/atlas/scan.v
Normal file
104
lib/data/atlas/scan.v
Normal file
@@ -0,0 +1,104 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.paramsparser
|
||||
import incubaid.herolib.core.texttools
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct ScanArgs {
|
||||
pub mut:
|
||||
path string @[required]
|
||||
save bool = true // save atlas after scan
|
||||
}
|
||||
|
||||
// Scan a directory for collections
|
||||
fn (mut a Atlas) scan_directory(mut dir pathlib.Path) ! {
|
||||
if !dir.is_dir() {
|
||||
return error('Path is not a directory: ${dir.path}')
|
||||
}
|
||||
|
||||
// Check if this directory is a collection
|
||||
if is_collection_dir(dir) {
|
||||
collection_name := get_collection_name(mut dir)!
|
||||
a.add_collection(path: dir.path, name: collection_name)!
|
||||
return
|
||||
}
|
||||
|
||||
// Scan subdirectories
|
||||
mut entries := dir.list(recursive: false)!
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || should_skip_dir(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
mut mutable_entry := entry
|
||||
a.scan_directory(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
|
||||
// Check if directory is a collection
|
||||
fn is_collection_dir(path pathlib.Path) bool {
|
||||
return path.file_exists('.collection')
|
||||
}
|
||||
|
||||
// Get collection name from .collection file
|
||||
fn get_collection_name(mut path pathlib.Path) !string {
|
||||
mut collection_name := path.name()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
|
||||
content := filepath.read()!
|
||||
if content.trim_space() != '' {
|
||||
mut params := paramsparser.parse(content)!
|
||||
if params.exists('name') {
|
||||
collection_name = params.get('name')!
|
||||
}
|
||||
}
|
||||
|
||||
return texttools.name_fix(collection_name)
|
||||
}
|
||||
|
||||
// Check if directory should be skipped
|
||||
fn should_skip_dir(entry pathlib.Path) bool {
|
||||
name := entry.name()
|
||||
return name.starts_with('.') || name.starts_with('_')
|
||||
}
|
||||
|
||||
// Scan collection directory for files
|
||||
fn (mut c Collection) scan() ! {
|
||||
c.scan_path(mut c.path)!
|
||||
}
|
||||
|
||||
fn (mut c Collection) scan_path(mut dir pathlib.Path) ! {
|
||||
mut entries := dir.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
// Skip hidden files/dirs
|
||||
if entry.name().starts_with('.') || entry.name().starts_with('_') {
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
// Recursively scan subdirectories
|
||||
mut mutable_entry := entry
|
||||
c.scan_path(mut mutable_entry)!
|
||||
continue
|
||||
}
|
||||
|
||||
// Process files based on extension
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
mut mutable_entry := entry
|
||||
c.add_page(mut mutable_entry)!
|
||||
}
|
||||
'png', 'jpg', 'jpeg', 'gif', 'svg' {
|
||||
mut mutable_entry := entry
|
||||
c.add_image(mut mutable_entry)!
|
||||
}
|
||||
else {
|
||||
mut mutable_entry := entry
|
||||
c.add_file(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,9 +6,10 @@ import incubaid.herolib.web.site
|
||||
|
||||
pub struct Configuration {
|
||||
pub mut:
|
||||
main Main
|
||||
navbar Navbar
|
||||
footer Footer
|
||||
main Main
|
||||
navbar Navbar
|
||||
footer Footer
|
||||
announcement AnnouncementBar
|
||||
}
|
||||
|
||||
pub struct Main {
|
||||
@@ -75,6 +76,15 @@ pub mut:
|
||||
to string @[omitempty]
|
||||
}
|
||||
|
||||
pub struct AnnouncementBar {
|
||||
pub mut:
|
||||
id string @[json: 'id']
|
||||
content string @[json: 'content']
|
||||
background_color string @[json: 'backgroundColor']
|
||||
text_color string @[json: 'textColor']
|
||||
is_closeable bool @[json: 'isCloseable']
|
||||
}
|
||||
|
||||
// ... (struct definitions remain the same) ...
|
||||
|
||||
// This function is now a pure transformer: site.SiteConfig -> docusaurus.Configuration
|
||||
@@ -107,7 +117,7 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
}
|
||||
|
||||
cfg := Configuration{
|
||||
main: Main{
|
||||
main: Main{
|
||||
title: site_cfg.title
|
||||
tagline: site_cfg.tagline
|
||||
favicon: site_cfg.favicon
|
||||
@@ -137,7 +147,7 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
copyright: site_cfg.copyright
|
||||
name: site_cfg.name
|
||||
}
|
||||
navbar: Navbar{
|
||||
navbar: Navbar{
|
||||
title: site_cfg.menu.title
|
||||
logo: Logo{
|
||||
alt: site_cfg.menu.logo_alt
|
||||
@@ -146,10 +156,17 @@ fn new_configuration(site_cfg site.SiteConfig) !Configuration {
|
||||
}
|
||||
items: nav_items
|
||||
}
|
||||
footer: Footer{
|
||||
footer: Footer{
|
||||
style: site_cfg.footer.style
|
||||
links: footer_links
|
||||
}
|
||||
announcement: AnnouncementBar{
|
||||
id: site_cfg.announcement.id
|
||||
content: site_cfg.announcement.content
|
||||
background_color: site_cfg.announcement.background_color
|
||||
text_color: site_cfg.announcement.text_color
|
||||
is_closeable: site_cfg.announcement.is_closeable
|
||||
}
|
||||
}
|
||||
return config_fix(cfg)!
|
||||
}
|
||||
|
||||
@@ -31,6 +31,9 @@ pub fn (mut docsite DocSite) generate() ! {
|
||||
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
|
||||
footer_file.write(json.encode_pretty(docsite.config.footer))!
|
||||
|
||||
mut announcement_file := pathlib.get_file(path: '${cfg_path}/announcement.json', create: true)!
|
||||
announcement_file.write(json.encode_pretty(docsite.config.announcement))!
|
||||
|
||||
docsite.generate_docs()!
|
||||
|
||||
docsite.import()!
|
||||
|
||||
@@ -86,14 +86,18 @@ fn (mut generator SiteGenerator) page_generate(args_ Page) ! {
|
||||
args.title = page_name
|
||||
}
|
||||
}
|
||||
content << "title: '${args.title}'"
|
||||
// Escape single quotes in YAML by doubling them
|
||||
escaped_title := args.title.replace("'", "''")
|
||||
content << "title: '${escaped_title}'"
|
||||
|
||||
if args.description.len > 0 {
|
||||
content << "description: '${args.description}'"
|
||||
escaped_description := args.description.replace("'", "''")
|
||||
content << "description: '${escaped_description}'"
|
||||
}
|
||||
|
||||
if args.slug.len > 0 {
|
||||
content << "slug: '${args.slug}'"
|
||||
escaped_slug := args.slug.replace("'", "''")
|
||||
content << "slug: '${escaped_slug}'"
|
||||
}
|
||||
|
||||
if args.hide_title {
|
||||
@@ -118,7 +122,7 @@ fn (mut generator SiteGenerator) page_generate(args_ Page) ! {
|
||||
}
|
||||
|
||||
// Fix links to account for nested categories
|
||||
page_content = generator.fix_links(page_content)
|
||||
page_content = generator.fix_links(page_content, args.path)
|
||||
|
||||
c += '\n${page_content}\n'
|
||||
|
||||
@@ -145,13 +149,25 @@ fn (mut generator SiteGenerator) page_generate(args_ Page) ! {
|
||||
fn (mut generator SiteGenerator) section_generate(args_ Section) ! {
|
||||
mut args := args_
|
||||
|
||||
mut c := '{
|
||||
mut c := ''
|
||||
if args.description.len > 0 {
|
||||
c = '{
|
||||
"label": "${args.label}",
|
||||
"position": ${args.position},
|
||||
"link": {
|
||||
"type": "generated-index",
|
||||
"description": "${args.description}"
|
||||
}
|
||||
}'
|
||||
} else {
|
||||
c = '{
|
||||
"label": "${args.label}",
|
||||
"position": ${args.position},
|
||||
"link": {
|
||||
"type": "generated-index"
|
||||
}
|
||||
}'
|
||||
}
|
||||
|
||||
mut category_path := '${generator.path.path}/${args.path}/_category_.json'
|
||||
mut catfile := pathlib.get_file(path: category_path, create: true)!
|
||||
@@ -159,46 +175,200 @@ fn (mut generator SiteGenerator) section_generate(args_ Section) ! {
|
||||
catfile.write(c)!
|
||||
}
|
||||
|
||||
// Fix links to account for nested categories in Docusaurus
|
||||
// Doctree exports links as ../collection/page.md but Docusaurus may have nested paths
|
||||
fn (generator SiteGenerator) fix_links(content string) string {
|
||||
// Strip numeric prefix from filename (e.g., "03_linux_installation" -> "linux_installation")
|
||||
// Docusaurus automatically strips these prefixes from URLs
|
||||
fn strip_numeric_prefix(name string) string {
|
||||
// Match pattern: digits followed by underscore at the start
|
||||
if name.len > 2 && name[0].is_digit() {
|
||||
for i := 1; i < name.len; i++ {
|
||||
if name[i] == `_` {
|
||||
// Found the underscore, return everything after it
|
||||
return name[i + 1..]
|
||||
}
|
||||
if !name[i].is_digit() {
|
||||
// Not a numeric prefix pattern, return as-is
|
||||
return name
|
||||
}
|
||||
}
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// Calculate relative path from current directory to target directory
|
||||
// current_dir: directory of the current page (e.g., '' for root, 'tokens' for tokens/, 'farming/advanced' for nested)
|
||||
// target_dir: directory of the target page
|
||||
// page_name: name of the target page
|
||||
// Returns: relative path (e.g., './page', '../dir/page', '../../page')
|
||||
fn calculate_relative_path(current_dir string, target_dir string, page_name string) string {
|
||||
// Both at root level
|
||||
if current_dir == '' && target_dir == '' {
|
||||
return './${page_name}'
|
||||
}
|
||||
|
||||
// Current at root, target in subdirectory
|
||||
if current_dir == '' && target_dir != '' {
|
||||
return './${target_dir}/${page_name}'
|
||||
}
|
||||
|
||||
// Current in subdirectory, target at root
|
||||
if current_dir != '' && target_dir == '' {
|
||||
// Count directory levels to go up
|
||||
levels := current_dir.split('/').len
|
||||
up := '../'.repeat(levels)
|
||||
return '${up}${page_name}'
|
||||
}
|
||||
|
||||
// Both in subdirectories
|
||||
current_parts := current_dir.split('/')
|
||||
target_parts := target_dir.split('/')
|
||||
|
||||
// Find common prefix
|
||||
mut common_len := 0
|
||||
for i := 0; i < current_parts.len && i < target_parts.len; i++ {
|
||||
if current_parts[i] == target_parts[i] {
|
||||
common_len++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate how many levels to go up
|
||||
up_levels := current_parts.len - common_len
|
||||
mut path_parts := []string{}
|
||||
|
||||
// Add ../ for each level up
|
||||
for _ in 0 .. up_levels {
|
||||
path_parts << '..'
|
||||
}
|
||||
|
||||
// Add remaining target path parts
|
||||
for i in common_len .. target_parts.len {
|
||||
path_parts << target_parts[i]
|
||||
}
|
||||
|
||||
// Add page name
|
||||
path_parts << page_name
|
||||
|
||||
return path_parts.join('/')
|
||||
}
|
||||
|
||||
// Fix links to account for nested categories and Docusaurus URL conventions
|
||||
fn (generator SiteGenerator) fix_links(content string, current_page_path string) string {
|
||||
mut result := content
|
||||
|
||||
// Build a map of collection name to actual directory path
|
||||
mut collection_paths := map[string]string{}
|
||||
// Extract current page's directory path
|
||||
mut current_dir := current_page_path.trim('/')
|
||||
if current_dir.contains('/') && !current_dir.ends_with('/') {
|
||||
last_part := current_dir.all_after_last('/')
|
||||
if last_part.contains('.') {
|
||||
current_dir = current_dir.all_before_last('/')
|
||||
}
|
||||
}
|
||||
// If path is just a filename or empty, current_dir should be empty (root level)
|
||||
if !current_dir.contains('/') && current_dir.contains('.') {
|
||||
current_dir = ''
|
||||
}
|
||||
|
||||
// Build maps for link fixing
|
||||
mut collection_paths := map[string]string{} // collection -> directory path (for nested collections)
|
||||
mut page_to_path := map[string]string{} // page_name -> full directory path in Docusaurus
|
||||
mut collection_page_map := map[string]string{} // "collection:page" -> directory path
|
||||
|
||||
for page in generator.site.pages {
|
||||
parts := page.src.split(':')
|
||||
if parts.len != 2 {
|
||||
continue
|
||||
}
|
||||
collection := parts[0]
|
||||
page_name := parts[1]
|
||||
|
||||
// Extract directory path from page.path
|
||||
// page.path can be like "appendix/internet_today/" or "appendix/internet_today/page.md"
|
||||
mut dir_path := page.path.trim('/')
|
||||
|
||||
// If path ends with a filename, remove it to get just the directory
|
||||
if dir_path.contains('/') && !dir_path.ends_with('/') {
|
||||
// Check if last part looks like a filename (has extension or is a page name)
|
||||
last_part := dir_path.all_after_last('/')
|
||||
if last_part.contains('.') || last_part == parts[1] {
|
||||
if last_part.contains('.') || last_part == page_name {
|
||||
dir_path = dir_path.all_before_last('/')
|
||||
}
|
||||
}
|
||||
|
||||
// If the directory path is different from collection name, store the mapping
|
||||
// This handles nested categories like appendix/internet_today
|
||||
// Store collection -> directory mapping for nested collections
|
||||
if dir_path != collection && dir_path != '' {
|
||||
collection_paths[collection] = dir_path
|
||||
}
|
||||
|
||||
// Store page_name -> directory path for fixing same-collection links
|
||||
// Strip numeric prefix from page_name for the map key
|
||||
clean_page_name := strip_numeric_prefix(page_name)
|
||||
page_to_path[clean_page_name] = dir_path
|
||||
|
||||
// Store collection:page -> directory path for fixing collection:page format links
|
||||
collection_page_map['${collection}:${clean_page_name}'] = dir_path
|
||||
}
|
||||
|
||||
// Replace ../collection/ with ../actual/nested/path/ for nested collections
|
||||
// STEP 1: Strip numeric prefixes from all page references in links FIRST
|
||||
mut lines := result.split('\n')
|
||||
for i, line in lines {
|
||||
if !line.contains('](') {
|
||||
continue
|
||||
}
|
||||
|
||||
mut new_line := line
|
||||
parts := line.split('](')
|
||||
if parts.len < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
for j := 1; j < parts.len; j++ {
|
||||
close_idx := parts[j].index(')') or { continue }
|
||||
link_url := parts[j][..close_idx]
|
||||
|
||||
mut new_url := link_url
|
||||
if link_url.contains('/') {
|
||||
path_part := link_url.all_before_last('/')
|
||||
file_part := link_url.all_after_last('/')
|
||||
new_file := strip_numeric_prefix(file_part)
|
||||
if new_file != file_part {
|
||||
new_url = '${path_part}/${new_file}'
|
||||
}
|
||||
} else {
|
||||
new_url = strip_numeric_prefix(link_url)
|
||||
}
|
||||
|
||||
if new_url != link_url {
|
||||
new_line = new_line.replace('](${link_url})', '](${new_url})')
|
||||
}
|
||||
}
|
||||
lines[i] = new_line
|
||||
}
|
||||
result = lines.join('\n')
|
||||
|
||||
// STEP 2: Replace ../collection/ with ../actual/nested/path/ for cross-collection links
|
||||
for collection, actual_path in collection_paths {
|
||||
result = result.replace('../${collection}/', '../${actual_path}/')
|
||||
}
|
||||
|
||||
// Remove .md extensions from all links (Docusaurus doesn't use them in URLs)
|
||||
// STEP 3: Fix same-collection links: ./page -> correct path based on Docusaurus structure
|
||||
for page_name, target_dir in page_to_path {
|
||||
old_link := './${page_name}'
|
||||
if result.contains(old_link) {
|
||||
new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
result = result.replace(old_link, new_link)
|
||||
}
|
||||
}
|
||||
|
||||
// STEP 4: Convert collection:page format to proper relative paths
|
||||
// Calculate relative path from current page to target page
|
||||
for collection_page, target_dir in collection_page_map {
|
||||
old_pattern := collection_page
|
||||
if result.contains(old_pattern) {
|
||||
// Extract just the page name from "collection:page"
|
||||
page_name := collection_page.all_after(':')
|
||||
new_link := calculate_relative_path(current_dir, target_dir, page_name)
|
||||
result = result.replace(old_pattern, new_link)
|
||||
}
|
||||
}
|
||||
|
||||
// STEP 5: Remove .md extensions from all links (Docusaurus doesn't use them in URLs)
|
||||
result = result.replace('.md)', ')')
|
||||
|
||||
return result
|
||||
|
||||
@@ -10,8 +10,9 @@ pub mut:
|
||||
|
||||
pub struct Section {
|
||||
pub mut:
|
||||
name string
|
||||
position int
|
||||
path string
|
||||
label string
|
||||
name string
|
||||
position int
|
||||
path string
|
||||
label string
|
||||
description string
|
||||
}
|
||||
|
||||
@@ -27,6 +27,18 @@ pub mut:
|
||||
|
||||
build_dest []BuildDest // Production build destinations (from !!site.build_dest)
|
||||
build_dest_dev []BuildDest // Development build destinations (from !!site.build_dest_dev)
|
||||
|
||||
announcement AnnouncementBar // Announcement bar configuration (from !!site.announcement)
|
||||
}
|
||||
|
||||
// Announcement bar config structure
|
||||
pub struct AnnouncementBar {
|
||||
pub mut:
|
||||
id string @[json: 'id']
|
||||
content string @[json: 'content']
|
||||
background_color string @[json: 'backgroundColor']
|
||||
text_color string @[json: 'textColor']
|
||||
is_closeable bool @[json: 'isCloseable']
|
||||
}
|
||||
|
||||
// Footer config structures
|
||||
@@ -73,7 +85,7 @@ pub mut:
|
||||
ssh_name string
|
||||
}
|
||||
|
||||
//is to import one docusaurus site into another, can be used to e.g. import static parts from one location into the build one we are building
|
||||
// is to import one docusaurus site into another, can be used to e.g. import static parts from one location into the build one we are building
|
||||
pub struct ImportItem {
|
||||
pub mut:
|
||||
name string // will normally be empty
|
||||
|
||||
@@ -50,6 +50,7 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
play_import(mut plbook, mut config)!
|
||||
play_menu(mut plbook, mut config)!
|
||||
play_footer(mut plbook, mut config)!
|
||||
play_announcement(mut plbook, mut config)!
|
||||
play_publish(mut plbook, mut config)!
|
||||
play_publish_dev(mut plbook, mut config)!
|
||||
play_pages(mut plbook, mut website)!
|
||||
@@ -178,6 +179,25 @@ fn play_footer(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
}
|
||||
}
|
||||
|
||||
fn play_announcement(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut announcement_actions := plbook.find(filter: 'site.announcement')!
|
||||
if announcement_actions.len > 0 {
|
||||
// Only process the first announcement action
|
||||
mut action := announcement_actions[0]
|
||||
mut p := action.params
|
||||
|
||||
config.announcement = AnnouncementBar{
|
||||
id: p.get_default('id', 'announcement')!
|
||||
content: p.get_default('content', '')!
|
||||
background_color: p.get_default('background_color', '#20232a')!
|
||||
text_color: p.get_default('text_color', '#fff')!
|
||||
is_closeable: p.get_default_true('is_closeable')
|
||||
}
|
||||
|
||||
action.done = true // Mark the action as done
|
||||
}
|
||||
}
|
||||
|
||||
fn play_publish(mut plbook PlayBook, mut config SiteConfig) ! {
|
||||
mut build_dest_actions := plbook.find(filter: 'site.publish')!
|
||||
for mut action in build_dest_actions {
|
||||
|
||||
@@ -45,6 +45,7 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
|
||||
}
|
||||
section.label = p.get_default('label', texttools.name_fix_snake_to_pascal(section.name))!
|
||||
section.path = p.get_default('path', texttools.name_fix(section.label))!
|
||||
section.description = p.get_default('description', '')!
|
||||
|
||||
site.sections << section
|
||||
action.done = true // Mark the action as done
|
||||
@@ -119,6 +120,9 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
|
||||
mypage.slug = p.get_default('slug', '')!
|
||||
mypage.draft = p.get_default_false('draft')
|
||||
mypage.hide_title = p.get_default_false('hide_title')
|
||||
if mypage.title.len > 0 {
|
||||
mypage.hide_title = true
|
||||
}
|
||||
mypage.title_nr = p.get_int_default('title_nr', 0)!
|
||||
|
||||
site.pages << mypage
|
||||
|
||||
@@ -5,6 +5,7 @@ The Site module provides a structured way to define website configurations, navi
|
||||
## Purpose
|
||||
|
||||
The Site module allows you to:
|
||||
|
||||
- Define website structure and configuration in a declarative way using HeroScript
|
||||
- Organize pages into sections/categories
|
||||
- Configure navigation menus and footers
|
||||
@@ -101,7 +102,7 @@ println(mysite)
|
||||
When you don't need categories, pages are added sequentially. The collection only needs to be specified once, then it's reused for subsequent pages.
|
||||
|
||||
```heroscript
|
||||
!!site.page src: "tech:introduction"
|
||||
!!site.page src: "mycelium_tech:introduction"
|
||||
description: "Introduction to ThreeFold Technology"
|
||||
slug: "/"
|
||||
|
||||
@@ -119,6 +120,7 @@ When you don't need categories, pages are added sequentially. The collection onl
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
|
||||
- First page specifies collection as `tech:introduction` (collection:page_name format)
|
||||
- Subsequent pages only need the page name (e.g., `vision`) - the `tech` collection is reused
|
||||
- If `title` is not specified, it will be extracted from the markdown file itself
|
||||
@@ -145,6 +147,7 @@ Categories (sections) help organize pages into logical groups with their own nav
|
||||
```
|
||||
|
||||
**Key Points:**
|
||||
|
||||
- `!!site.page_category` creates a new section/category
|
||||
- `name` is the internal identifier (snake_case)
|
||||
- `label` is the display name (automatically derived from `name` if not specified)
|
||||
@@ -160,7 +163,7 @@ Categories (sections) help organize pages into logical groups with their own nav
|
||||
label: "System Components"
|
||||
position: 100
|
||||
|
||||
!!site.page src: "tech:mycelium"
|
||||
!!site.page src: "mycelium_tech:mycelium"
|
||||
title: "Mycelium Network"
|
||||
description: "Peer-to-peer overlay network"
|
||||
slug: "mycelium-network"
|
||||
@@ -175,6 +178,7 @@ Categories (sections) help organize pages into logical groups with their own nav
|
||||
```
|
||||
|
||||
**Available Page Parameters:**
|
||||
|
||||
- `src`: Source reference as `collection:page_name` (required for first page in collection)
|
||||
- `title`: Page title (optional, extracted from markdown if not provided)
|
||||
- `description`: Page description for metadata
|
||||
@@ -321,4 +325,4 @@ pub mut:
|
||||
|
||||
See `examples/web/site/site_example.vsh` for a complete working example.
|
||||
|
||||
For a real-world example, check: https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech
|
||||
For a real-world example, check: <https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech>
|
||||
|
||||
Reference in New Issue
Block a user