...
This commit is contained in:
@@ -73,7 +73,7 @@ pub mut:
|
||||
}
|
||||
|
||||
// Add a collection to the Atlas
|
||||
pub fn (mut a Atlas) add_collection(args AddCollectionArgs) ! {
|
||||
pub fn (mut a Atlas) add_collection(args AddCollectionArgs) !&Collection {
|
||||
name := texttools.name_fix(args.name)
|
||||
console.print_item('Known collections: ${a.collections.keys()}')
|
||||
console.print_item("Adding collection '${name}' to Atlas '${a.name}' at path '${args.path}'")
|
||||
@@ -85,6 +85,7 @@ pub fn (mut a Atlas) add_collection(args AddCollectionArgs) ! {
|
||||
col.scan()!
|
||||
|
||||
a.collections[name] = &col
|
||||
return &col
|
||||
}
|
||||
|
||||
// Scan a path for collections
|
||||
|
||||
@@ -375,3 +375,22 @@ pub fn (c Collection) can_write(session Session) bool {
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// scan_groups scans the collection's directory for .group files and loads them into memory.
|
||||
pub fn (mut c Collection) scan_groups() ! {
|
||||
if c.name != 'groups' {
|
||||
return error('scan_groups only works on "groups" collection')
|
||||
}
|
||||
|
||||
mut entries := c.path.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
if entry.extension_lower() == 'group' {
|
||||
filename := entry.name_fix_no_ext()
|
||||
mut visited := map[string]bool{}
|
||||
mut group := parse_group_file(filename, c.path.path, mut visited)!
|
||||
|
||||
c.atlas.group_add(mut group)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,62 +1,104 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@[heap]
|
||||
pub struct Group {
|
||||
pub mut:
|
||||
name string // normalized to lowercase
|
||||
patterns []string // email patterns, normalized to lowercase
|
||||
name string // normalized to lowercase
|
||||
patterns []string // email patterns, normalized to lowercase
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct GroupNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
patterns []string @[required]
|
||||
name string @[required]
|
||||
patterns []string @[required]
|
||||
}
|
||||
|
||||
// Create a new Group
|
||||
pub fn new_group(args GroupNewArgs) !Group {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut patterns := args.patterns.map(it.to_lower())
|
||||
|
||||
return Group{
|
||||
name: name
|
||||
patterns: patterns
|
||||
}
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut patterns := args.patterns.map(it.to_lower())
|
||||
|
||||
return Group{
|
||||
name: name
|
||||
patterns: patterns
|
||||
}
|
||||
}
|
||||
|
||||
// Check if email matches any pattern in this group
|
||||
pub fn (g Group) matches(email string) bool {
|
||||
email_lower := email.to_lower()
|
||||
|
||||
for pattern in g.patterns {
|
||||
if matches_pattern(email_lower, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
email_lower := email.to_lower()
|
||||
|
||||
for pattern in g.patterns {
|
||||
if matches_pattern(email_lower, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Helper: match email against wildcard pattern
|
||||
// '*@domain.com' matches 'user@domain.com'
|
||||
// 'exact@email.com' matches only 'exact@email.com'
|
||||
fn matches_pattern(email string, pattern string) bool {
|
||||
if pattern == '*' {
|
||||
return true
|
||||
}
|
||||
|
||||
if !pattern.contains('*') {
|
||||
return email == pattern
|
||||
}
|
||||
|
||||
// Handle wildcard patterns like '*@domain.com'
|
||||
if pattern.starts_with('*') {
|
||||
suffix := pattern[1..] // Remove the '*'
|
||||
return email.ends_with(suffix)
|
||||
}
|
||||
|
||||
// Could add more complex patterns here if needed
|
||||
return false
|
||||
}
|
||||
if pattern == '*' {
|
||||
return true
|
||||
}
|
||||
|
||||
if !pattern.contains('*') {
|
||||
return email == pattern
|
||||
}
|
||||
|
||||
// Handle wildcard patterns like '*@domain.com'
|
||||
if pattern.starts_with('*') {
|
||||
suffix := pattern[1..] // Remove the '*'
|
||||
return email.ends_with(suffix)
|
||||
}
|
||||
|
||||
// Could add more complex patterns here if needed
|
||||
return false
|
||||
}
|
||||
|
||||
// parse_group_file parses a single .group file, resolving includes recursively.
|
||||
fn parse_group_file(filename string, base_path string, mut visited map[string]bool) !Group {
|
||||
if filename in visited {
|
||||
return error('Circular include detected: ${filename}')
|
||||
}
|
||||
|
||||
visited[filename] = true
|
||||
|
||||
mut group := Group{
|
||||
name: texttools.name_fix(filename)
|
||||
patterns: []string{}
|
||||
}
|
||||
|
||||
mut file_path := pathlib.get_file(path: '${base_path}/${filename}.group')!
|
||||
content := file_path.read()!
|
||||
|
||||
for line_orig in content.split_into_lines() {
|
||||
line := line_orig.trim_space()
|
||||
if line.len == 0 || line.starts_with('//') {
|
||||
continue
|
||||
}
|
||||
|
||||
if line.starts_with('include:') {
|
||||
mut included_name := line.trim_string_left('include:').trim_space()
|
||||
included_name = included_name.replace('.group', '') // Remove .group if present
|
||||
include_path := '${base_path}/${included_name}.group'
|
||||
if !os.exists(include_path) {
|
||||
return error('Included group file not found: ${included_name}.group')
|
||||
}
|
||||
included_group := parse_group_file(included_name, base_path, mut visited)!
|
||||
|
||||
group.patterns << included_group.patterns
|
||||
} else {
|
||||
group.patterns << line.to_lower()
|
||||
}
|
||||
}
|
||||
|
||||
return group
|
||||
}
|
||||
|
||||
15
lib/data/atlas/instruction.md
Normal file
15
lib/data/atlas/instruction.md
Normal file
@@ -0,0 +1,15 @@
|
||||
in atlas/
|
||||
|
||||
check format of groups
|
||||
see content/groups
|
||||
|
||||
now the groups end with .group
|
||||
|
||||
check how the include works, so we can include another group in the group as defined, only works in same folder
|
||||
|
||||
in the scan function in atlas, now make scan_groups function, find groups, only do this for collection as named groups
|
||||
do not add collection groups to atlas, this is a system collection
|
||||
|
||||
make the groups and add them to atlas
|
||||
|
||||
give clear instructions for coding agent how to write the code
|
||||
@@ -416,266 +416,59 @@ println('Logo image: ${img_path}') // Output: img/logo.png
|
||||
```
|
||||
|
||||
|
||||
## Atlas Save/Load Functionality
|
||||
|
||||
This document describes the save/load functionality for Atlas collections, which allows you to persist collection metadata to JSON files and load them in both V and Python.
|
||||
|
||||
## Overview
|
||||
|
||||
The Atlas module now supports:
|
||||
- **Saving collections** to `.collection.json` files
|
||||
- **Loading collections** from `.collection.json` files in V
|
||||
- **Loading collections** from `.collection.json` files in Python
|
||||
## Saving Collections (Beta)
|
||||
|
||||
This enables:
|
||||
1. Persistence of collection metadata (pages, images, files, errors)
|
||||
2. Cross-language access to Atlas data
|
||||
3. Faster loading without re-scanning directories
|
||||
**Status:** Basic save functionality is implemented. Load functionality is work-in-progress.
|
||||
|
||||
## V Implementation
|
||||
### Saving to JSON
|
||||
|
||||
### Saving Collections
|
||||
Save collection metadata to JSON files for archival or cross-tool compatibility:
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
// Or save a single collection
|
||||
col := a.get_collection('guides')!
|
||||
col.save()!
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all collections from directory tree
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
// Save all collections to a specified directory
|
||||
// Creates: ${save_path}/${collection_name}.json
|
||||
a.save('./metadata')!
|
||||
```
|
||||
|
||||
### What Gets Saved
|
||||
|
||||
The `.collection.json` file contains:
|
||||
- Collection name and path
|
||||
- All pages (name, path, collection_name)
|
||||
- All images (name, ext, path, ftype)
|
||||
- All files (name, ext, path, ftype)
|
||||
Each `.json` file contains:
|
||||
- Collection metadata (name, path, git URL, git branch)
|
||||
- All pages (with paths and collection references)
|
||||
- All images and files (with paths and types)
|
||||
- All errors (category, page_key, message, file)
|
||||
|
||||
**Note:** Circular references (`atlas` and `collection` pointers) are automatically skipped using the `[skip]` attribute and reconstructed during load.
|
||||
|
||||
## Python Implementation
|
||||
|
||||
### Installation
|
||||
|
||||
The Python loader is a standalone script with no external dependencies (uses only Python stdlib):
|
||||
|
||||
```bash
|
||||
# No installation needed - just use the script
|
||||
python3 lib/data/atlas/atlas_loader.py
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```python
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections from directory tree
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access collections
|
||||
col = atlas.get_collection('guides')
|
||||
print(f"Pages: {len(col.pages)}")
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check for errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
### Python API
|
||||
|
||||
#### Atlas Class
|
||||
|
||||
- `Atlas.load_collection(path, name='default')` - Load single collection
|
||||
- `Atlas.load_from_directory(path, name='default')` - Load all collections from directory tree
|
||||
- `atlas.get_collection(name)` - Get collection by name
|
||||
- `atlas.page_get(key)` - Get page using 'collection:page' format
|
||||
- `atlas.image_get(key)` - Get image using 'collection:image' format
|
||||
- `atlas.file_get(key)` - Get file using 'collection:file' format
|
||||
- `atlas.list_collections()` - List all collection names
|
||||
- `atlas.list_pages()` - List all pages grouped by collection
|
||||
- `atlas.has_errors()` - Check if any collection has errors
|
||||
- `atlas.print_all_errors()` - Print errors from all collections
|
||||
|
||||
#### Collection Class
|
||||
|
||||
- `collection.page_get(name)` - Get page by name
|
||||
- `collection.image_get(name)` - Get image by name
|
||||
- `collection.file_get(name)` - Get file by name
|
||||
- `collection.has_errors()` - Check if collection has errors
|
||||
- `collection.error_summary()` - Get error count by category
|
||||
- `collection.print_errors()` - Print all errors
|
||||
|
||||
#### Page Class
|
||||
|
||||
- `page.key()` - Get page key in format 'collection:page'
|
||||
- `page.content()` - Read page content from file
|
||||
|
||||
#### File Class
|
||||
|
||||
- `file.file_name` - Get full filename with extension
|
||||
- `file.is_image()` - Check if file is an image
|
||||
- `file.read()` - Read file content as bytes
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. V: Create and Save
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create atlas and scan
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
println('Saved ${a.collections.len} collections')
|
||||
```
|
||||
|
||||
### 2. V: Load and Use
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all from directory
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
```
|
||||
|
||||
### 3. Python: Load and Use
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
After saving, each collection directory will contain:
|
||||
### Storage Location
|
||||
|
||||
```
|
||||
collection_dir/
|
||||
├── .collection # Original collection config
|
||||
├── .collection.json # Saved collection metadata (NEW)
|
||||
├── page1.md
|
||||
├── page2.md
|
||||
└── img/
|
||||
└── image1.png
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Errors are preserved during save/load:
|
||||
|
||||
```v
|
||||
// V: Errors are saved
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
a.validate_links()! // May generate errors
|
||||
a.save_all()! // Errors are saved to .collection.json
|
||||
|
||||
// V: Errors are loaded
|
||||
mut a2 := atlas.new()!
|
||||
a2.load_from_directory('./docs')!
|
||||
col := a2.get_collection('guides')!
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
```
|
||||
|
||||
```python
|
||||
# Python: Access errors
|
||||
atlas = Atlas.load_from_directory('./docs')
|
||||
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
|
||||
# Get error summary
|
||||
col = atlas.get_collection('guides')
|
||||
if col.has_errors():
|
||||
summary = col.error_summary()
|
||||
for category, count in summary.items():
|
||||
print(f"{category}: {count}")
|
||||
save_path/
|
||||
├── collection1.json
|
||||
├── collection2.json
|
||||
└── collection3.json
|
||||
```
|
||||
|
||||
**Note:** Not in the collection directories themselves - saved to a separate location you specify.
|
||||
|
||||
### Limitations
|
||||
|
||||
- Load-from-JSON functionality is not yet implemented
|
||||
- Python loader is planned but not yet available
|
||||
- Currently, collections must be rescanned from source files
|
||||
## HeroScript Integration
|
||||
|
||||
Atlas integrates with HeroScript, allowing you to define Atlas operations in `.vsh` or playbook files.
|
||||
|
||||
### Available Actions
|
||||
|
||||
#### 1. `atlas.scan` - Scan Directory for Collections
|
||||
#### `atlas.scan` - Scan Directory for Collections
|
||||
|
||||
Scan a directory tree to find and load collections marked with `.collection` files.
|
||||
|
||||
@@ -683,163 +476,31 @@ Scan a directory tree to find and load collections marked with `.collection` fil
|
||||
!!atlas.scan
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
git_url: 'https://github.com/org/repo.git' # optional
|
||||
git_root: '~/code' # optional, default: ~/code
|
||||
meta_path: './metadata' # optional, saves metadata here
|
||||
ignore: ['private', 'draft'] # optional, directories to skip
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path to scan
|
||||
- `path` (required when git_url not provided) - Directory path to scan
|
||||
- `git_url` (alternative to path) - Git repository URL to clone/checkout
|
||||
- `git_root` (optional when using git_url, default: ~/code) - Base directory for cloning
|
||||
- `meta_path` (optional) - Directory to save collection metadata JSON
|
||||
- `ignore` (optional) - List of directory names to skip during scan
|
||||
|
||||
#### 2. `atlas.load` - Load from Saved Collections
|
||||
|
||||
Load collections from `.collection.json` files (previously saved with `atlas.save`).
|
||||
### Real Workflow Example: Scan and Export
|
||||
|
||||
```heroscript
|
||||
!!atlas.load
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path containing `.collection.json` files
|
||||
|
||||
#### 3. `atlas.validate` - Validate All Links
|
||||
|
||||
Validate all markdown links in all collections.
|
||||
|
||||
```heroscript
|
||||
!!atlas.validate
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 4. `atlas.fix_links` - Fix All Links
|
||||
|
||||
Automatically rewrite all local links with correct relative paths.
|
||||
|
||||
```heroscript
|
||||
!!atlas.fix_links
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 5. `atlas.save` - Save Collections
|
||||
|
||||
Save all collections to `.collection.json` files in their respective directories.
|
||||
|
||||
```heroscript
|
||||
!!atlas.save
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 6. `atlas.export` - Export Collections
|
||||
|
||||
Export collections to a destination directory.
|
||||
|
||||
```heroscript
|
||||
!!atlas.export
|
||||
name: 'main'
|
||||
destination: './output'
|
||||
reset: true
|
||||
include: true
|
||||
redis: true
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `destination` (required) - Export destination path
|
||||
- `reset` (optional, default: true) - Clear destination before export
|
||||
- `include` (optional, default: true) - Process `!!include` actions
|
||||
- `redis` (optional, default: true) - Store metadata in Redis
|
||||
|
||||
### Complete Workflow Examples
|
||||
|
||||
#### Example 1: Scan, Validate, and Export
|
||||
|
||||
```heroscript
|
||||
# Scan for collections
|
||||
!!atlas.scan
|
||||
path: '~/docs/myproject'
|
||||
meta_path: '~/docs/metadata'
|
||||
|
||||
# Validate all links
|
||||
!!atlas.validate
|
||||
|
||||
# Export to output directory
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
include: true
|
||||
```
|
||||
|
||||
#### Example 2: Load, Fix Links, and Export
|
||||
|
||||
```heroscript
|
||||
# Load from saved collections
|
||||
!!atlas.load
|
||||
path: '~/docs/myproject'
|
||||
|
||||
# Fix all broken links
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
```
|
||||
|
||||
#### Example 3: Multiple Atlas Instances
|
||||
|
||||
```heroscript
|
||||
# Main documentation
|
||||
!!atlas.scan
|
||||
name: 'docs'
|
||||
path: '~/docs'
|
||||
|
||||
# API reference
|
||||
!!atlas.scan
|
||||
name: 'api'
|
||||
path: '~/api-docs'
|
||||
|
||||
# Export docs
|
||||
!!atlas.export
|
||||
name: 'docs'
|
||||
destination: '~/output/docs'
|
||||
|
||||
# Export API
|
||||
!!atlas.export
|
||||
name: 'api'
|
||||
destination: '~/output/api'
|
||||
```
|
||||
|
||||
#### Example 4: Development Workflow
|
||||
|
||||
```heroscript
|
||||
# Scan collections
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
# Validate links (errors will be reported)
|
||||
!!atlas.validate
|
||||
|
||||
# Fix links automatically
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export final version
|
||||
!!atlas.export
|
||||
destination: './public'
|
||||
include: true
|
||||
redis: true
|
||||
redis: false
|
||||
```
|
||||
|
||||
### Using in V Scripts
|
||||
@@ -857,8 +518,6 @@ heroscript := "
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
include: true
|
||||
@@ -882,12 +541,6 @@ Create a `docs.play` file:
|
||||
name: 'main'
|
||||
path: '~/code/docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.fix_links
|
||||
|
||||
!!atlas.save
|
||||
|
||||
!!atlas.export
|
||||
destination: '~/code/output'
|
||||
reset: true
|
||||
@@ -922,8 +575,6 @@ Errors are automatically collected and reported:
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
# Errors will be printed during export
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
@@ -939,14 +590,23 @@ Collection guides - Errors (2)
|
||||
|
||||
### Auto-Export Behavior
|
||||
|
||||
If you use `!!atlas.scan` or `!!atlas.load` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
If you use `!!atlas.scan` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan/load actions.
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan actions.
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. **Always validate before export**: Use `!!atlas.validate` to catch broken links early
|
||||
2. **Save after fixing**: Use `!!atlas.save` after `!!atlas.fix_links` to persist changes
|
||||
3. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
4. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
5. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
2. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
3. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
4. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
## Roadmap - Not Yet Implemented
|
||||
|
||||
The following features are planned but not yet available:
|
||||
|
||||
- [ ] Load collections from `.collection.json` files
|
||||
- [ ] Python API for reading collections
|
||||
- [ ] `atlas.validate` playbook action
|
||||
- [ ] `atlas.fix_links` playbook action
|
||||
- [ ] Auto-save on collection modifications
|
||||
- [ ] Collection version control
|
||||
@@ -22,7 +22,10 @@ fn (mut a Atlas) scan_directory(mut dir pathlib.Path, ignore_ []string) ! {
|
||||
if collection_name.to_lower() in ignore {
|
||||
return
|
||||
}
|
||||
a.add_collection(path: dir.path, name: collection_name)!
|
||||
mut col := a.add_collection(path: dir.path, name: collection_name)!
|
||||
if collection_name == 'groups' {
|
||||
col.scan_groups()!
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user