...
This commit is contained in:
@@ -73,4 +73,54 @@ fn test_export() {
|
||||
|
||||
assert os.exists('${export_path}/col1/test.md')
|
||||
assert os.exists('${export_path}/col1/.collection')
|
||||
}
|
||||
|
||||
fn test_export_with_includes() {
|
||||
// Setup: Create pages with includes
|
||||
col_path := '${test_base}/include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col')!
|
||||
|
||||
// Page 1: includes page 2
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1\n\n!!include page:\'test_col:page2\'\n\nEnd of page 1')!
|
||||
|
||||
// Page 2: standalone content
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/page2.md', create: true)!
|
||||
page2.write('## Page 2 Content\n\nThis is included.')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
|
||||
export_path := '${test_base}/export_include'
|
||||
a.export(destination: export_path, include: true)!
|
||||
|
||||
// Verify exported page1 has page2 content included
|
||||
exported := os.read_file('${export_path}/test_col/page1.md')!
|
||||
assert exported.contains('Page 2 Content')
|
||||
assert exported.contains('This is included')
|
||||
assert !exported.contains('!!include')
|
||||
}
|
||||
|
||||
fn test_export_without_includes() {
|
||||
col_path := '${test_base}/no_include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:test_col2')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1\n\n!!include page:\'test_col2:page2\'\n\nEnd')!
|
||||
|
||||
mut a := new()!
|
||||
a.add_collection(name: 'test_col2', path: col_path)!
|
||||
|
||||
export_path := '${test_base}/export_no_include'
|
||||
a.export(destination: export_path, include: false)!
|
||||
|
||||
// Verify exported page1 still has include action
|
||||
exported := os.read_file('${export_path}/test_col2/page1.md')!
|
||||
assert exported.contains('!!include')
|
||||
}
|
||||
@@ -2,6 +2,8 @@ module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.base
|
||||
import os
|
||||
|
||||
@[heap]
|
||||
pub struct Collection {
|
||||
@@ -11,7 +13,8 @@ pub mut:
|
||||
pages map[string]&Page
|
||||
images map[string]&File
|
||||
files map[string]&File
|
||||
atlas &Atlas @[skip]
|
||||
atlas &Atlas @[skip; str: skip] // Reference to parent atlas for include resolution
|
||||
errors []CollectionError
|
||||
}
|
||||
|
||||
@[params]
|
||||
@@ -29,7 +32,7 @@ fn (mut self Atlas) new_collection(args CollectionNewArgs) !Collection {
|
||||
mut col := Collection{
|
||||
name: name
|
||||
path: path
|
||||
atlas: &self
|
||||
atlas: &self // Set atlas reference
|
||||
}
|
||||
|
||||
return col
|
||||
@@ -47,6 +50,7 @@ fn (mut c Collection) add_page(mut p pathlib.Path) ! {
|
||||
name: name
|
||||
path: p
|
||||
collection_name: c.name
|
||||
collection: &c
|
||||
)!
|
||||
|
||||
c.pages[name] = &p_new
|
||||
@@ -114,3 +118,93 @@ pub fn (c Collection) image_exists(name string) bool {
|
||||
pub fn (c Collection) file_exists(name string) bool {
|
||||
return name in c.files
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
mut col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
if args.reset {
|
||||
col_dir.empty()!
|
||||
}
|
||||
|
||||
// Write .collection file
|
||||
mut cfile := pathlib.get_file(
|
||||
path: '${col_dir.path}/.collection'
|
||||
create: true
|
||||
)!
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
// Export pages (process includes if requested)
|
||||
for _, mut page in c.pages {
|
||||
content := page.content(include: args.include)!
|
||||
mut dest_file := pathlib.get_file(
|
||||
path: '${col_dir.path}/${page.name}.md'
|
||||
create: true
|
||||
)!
|
||||
dest_file.write(content)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', page.name, '${page.name}.md')!
|
||||
}
|
||||
}
|
||||
|
||||
// Export images
|
||||
if c.images.len > 0 {
|
||||
img_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/img'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut img in c.images {
|
||||
dest_path := '${img_dir.path}/${img.file_name()}'
|
||||
img.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', img.file_name(), 'img/${img.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export files
|
||||
if c.files.len > 0 {
|
||||
files_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/files'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut file in c.files {
|
||||
dest_path := '${files_dir.path}/${file.file_name()}'
|
||||
file.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', file.file_name(), 'files/${file.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store collection metadata in Redis
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:path', c.name, col_dir.path)!
|
||||
}
|
||||
}
|
||||
|
||||
22
lib/data/atlas/collection_error.v
Normal file
22
lib/data/atlas/collection_error.v
Normal file
@@ -0,0 +1,22 @@
|
||||
module atlas
|
||||
|
||||
pub enum CollectionErrorCategory {
|
||||
missing_include
|
||||
include_syntax_error
|
||||
circular_include
|
||||
page_not_found
|
||||
file_not_found
|
||||
collection_not_found
|
||||
other
|
||||
}
|
||||
|
||||
pub struct CollectionError {
|
||||
pub:
|
||||
page_key string // "collection:page_name" if applicable
|
||||
message string
|
||||
category CollectionErrorCategory
|
||||
}
|
||||
|
||||
pub fn (e CollectionError) markdown() string {
|
||||
return 'ERROR [${e.category.str()}]: ${e.message}' + (if e.page_key != '' { ' (Page: `${e.page_key}`)' } else { '' })
|
||||
}
|
||||
@@ -1,118 +1,30 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.base
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct ExportArgs {
|
||||
pub mut:
|
||||
destination string
|
||||
reset bool = true
|
||||
redis bool = true
|
||||
destination string
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export all collections
|
||||
pub fn (mut a Atlas) export(args ExportArgs) ! {
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
redis: args.redis
|
||||
)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
// Write .collection file
|
||||
mut cfile := pathlib.get_file(
|
||||
path: '${col_dir.path}/.collection'
|
||||
create: true
|
||||
)!
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
// Export pages
|
||||
export_pages(c.name, c.pages.values(), col_dir, args.redis)!
|
||||
|
||||
// Export images
|
||||
export_files(c.name, c.images.values(), col_dir, 'img', args.redis)!
|
||||
|
||||
// Export files
|
||||
export_files(c.name, c.files.values(), col_dir, 'files', args.redis)!
|
||||
|
||||
// Store collection metadata in Redis if enabled
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:path', c.name, col_dir.path)!
|
||||
}
|
||||
}
|
||||
|
||||
// Export pages to destination
|
||||
fn export_pages(col_name string, pages []&Page, dest pathlib.Path, redis bool) ! {
|
||||
mut context := base.context()!
|
||||
mut redis_client := context.redis()!
|
||||
|
||||
for mut page in pages {
|
||||
// Simple copy of markdown content
|
||||
content := page.read_content()!
|
||||
|
||||
mut dest_file := pathlib.get_file(
|
||||
path: '${dest.path}/${page.name}.md'
|
||||
create: true
|
||||
)!
|
||||
dest_file.write(content)!
|
||||
|
||||
if redis {
|
||||
redis_client.hset('atlas:${col_name}', page.name, '${page.name}.md')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export files/images to destination
|
||||
fn export_files(col_name string, files []&File, dest pathlib.Path, subdir string, redis bool) ! {
|
||||
if files.len == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
mut context := base.context()!
|
||||
mut redis_client := context.redis()!
|
||||
|
||||
// Create subdirectory
|
||||
files_dir := pathlib.get_dir(
|
||||
path: '${dest.path}/${subdir}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for mut file in files {
|
||||
dest_path := '${files_dir.path}/${file.file_name()}'
|
||||
|
||||
// Copy file
|
||||
file.path.copy(dest: dest_path)!
|
||||
|
||||
if redis {
|
||||
redis_client.hset('atlas:${col_name}', file.file_name(), '${subdir}/${file.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
include: args.include
|
||||
redis: args.redis
|
||||
)!
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,140 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.data.atlas.collection_error { CollectionError, CollectionErrorCategory }
|
||||
|
||||
@[heap]
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
name string // name without extension
|
||||
path pathlib.Path // full path to markdown file
|
||||
collection_name string // parent collection name
|
||||
name string
|
||||
path pathlib.Path
|
||||
collection_name string
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
collection &Collection @[required]
|
||||
}
|
||||
|
||||
pub fn new_page(args NewPageArgs) !Page {
|
||||
return Page{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
}
|
||||
return Page{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
collection: args.collection
|
||||
}
|
||||
}
|
||||
|
||||
// Simple content reading (no processing)
|
||||
// Read content without processing includes
|
||||
pub fn (mut p Page) read_content() !string {
|
||||
return p.path.read()!
|
||||
return p.path.read()!
|
||||
}
|
||||
|
||||
// Read content with includes processed (default behavior)
|
||||
@[params]
|
||||
pub struct ReadContentArgs {
|
||||
pub mut:
|
||||
include bool = true
|
||||
}
|
||||
|
||||
pub fn (mut p Page) content(args ReadContentArgs) !string {
|
||||
mut content := p.path.read()!
|
||||
|
||||
if args.include {
|
||||
mut v := map[string]bool{}
|
||||
return p.process_includes(content, mut v)!
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
// Recursively process includes
|
||||
fn (mut p Page) process_includes(content string, mut visited map[string]bool) !string {
|
||||
mut atlas := p.collection.atlas
|
||||
// Prevent circular includes
|
||||
page_key := p.key()
|
||||
if page_key in visited {
|
||||
p.collection.errors << CollectionError{
|
||||
page_key: page_key
|
||||
message: 'Circular include detected for page `${page_key}`.'
|
||||
category: .circular_include
|
||||
}
|
||||
return '' // Return empty string for circular includes
|
||||
}
|
||||
visited[page_key] = true
|
||||
|
||||
mut result := content
|
||||
mut lines := result.split_into_lines()
|
||||
mut processed_lines := []string{}
|
||||
|
||||
for line in lines {
|
||||
trimmed := line.trim_space()
|
||||
|
||||
// Check for include action: !!include collection:page or !!include page
|
||||
if trimmed.starts_with('!!include') {
|
||||
// Parse the include reference
|
||||
include_ref := trimmed.trim_string_left('!!include').trim_space()
|
||||
|
||||
// Determine collection and page name
|
||||
mut target_collection := p.collection_name
|
||||
mut target_page := ''
|
||||
|
||||
if include_ref.contains(':') {
|
||||
parts := include_ref.split(':')
|
||||
if parts.len == 2 {
|
||||
target_collection = texttools.name_fix(parts[0])
|
||||
target_page = texttools.name_fix(parts[1])
|
||||
} else {
|
||||
p.collection.errors << CollectionError{
|
||||
page_key: page_key
|
||||
message: 'Invalid include format: `${include_ref}`.'
|
||||
category: .include_syntax_error
|
||||
}
|
||||
processed_lines << '<!-- Invalid include format: ${include_ref} -->'
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
target_page = texttools.name_fix(include_ref)
|
||||
}
|
||||
|
||||
// Remove .md extension if present
|
||||
if target_page.ends_with('.md') {
|
||||
target_page = target_page[0..target_page.len - 3]
|
||||
}
|
||||
|
||||
// Build page key
|
||||
page_ref := '${target_collection}:${target_page}'
|
||||
|
||||
// Get the referenced page from atlas
|
||||
mut include_page := atlas.page_get(page_ref) or {
|
||||
p.collection.errors << CollectionError{
|
||||
page_key: page_key
|
||||
message: 'Included page `${page_ref}` not found.'
|
||||
category: .missing_include
|
||||
}
|
||||
// If page not found, keep original line as comment
|
||||
processed_lines << '<!-- Include not found: ${page_ref} -->'
|
||||
continue
|
||||
}
|
||||
|
||||
// Recursively process the included page
|
||||
include_content := include_page.process_includes(include_page.read_content()!, mut
|
||||
visited)!
|
||||
|
||||
processed_lines << include_content
|
||||
} else {
|
||||
processed_lines << line
|
||||
}
|
||||
}
|
||||
|
||||
return processed_lines.join_lines()
|
||||
}
|
||||
|
||||
pub fn (p Page) key() string {
|
||||
return '${p.collection_name}:${p.name}'
|
||||
}
|
||||
return '${p.collection_name}:${p.name}'
|
||||
}
|
||||
|
||||
@@ -74,34 +74,50 @@ a.export(
|
||||
)!
|
||||
```
|
||||
|
||||
## Redis Structure
|
||||
### Include Processing
|
||||
|
||||
When `redis: true` in export:
|
||||
Atlas supports simple include processing using `!!include` actions:
|
||||
|
||||
```
|
||||
atlas:path -> hash of collection names to export paths
|
||||
atlas:my_collection -> hash of file names to relative paths
|
||||
```v
|
||||
// Export with includes processed (default)
|
||||
a.export(
|
||||
destination: './output'
|
||||
include: true // default
|
||||
)!
|
||||
|
||||
// Export without processing includes
|
||||
a.export(
|
||||
destination: './output'
|
||||
include: false
|
||||
)!
|
||||
```
|
||||
|
||||
## Key Differences from Doctree
|
||||
#### Include Syntax
|
||||
|
||||
- **No Processing**: Files are copied as-is
|
||||
- **No Includes**: No `!!wiki.include` processing
|
||||
- **No Definitions**: No `!!wiki.def` processing
|
||||
- **No Link Resolution**: Markdown links are not modified
|
||||
- **Simpler Structure**: Flat module organization
|
||||
- **Faster**: No parsing overhead
|
||||
In your markdown files:
|
||||
|
||||
## When to Use
|
||||
```md
|
||||
# My Page
|
||||
|
||||
Use **Atlas** when you need:
|
||||
- Simple document organization
|
||||
- Fast file copying without processing
|
||||
- Basic metadata tracking
|
||||
- Minimal overhead
|
||||
!!include page:'collection:page_name'
|
||||
|
||||
Use **Doctree** when you need:
|
||||
- Markdown processing and transformations
|
||||
- Include/definition resolution
|
||||
- Link rewriting
|
||||
- Complex document workflows
|
||||
More content here
|
||||
```
|
||||
|
||||
The `!!include` action will be replaced with the content of the referenced page during export.
|
||||
|
||||
#### Reading Pages with Includes
|
||||
|
||||
```v
|
||||
// Read with includes processed (requires atlas reference)
|
||||
mut page := a.page_get('col:mypage')!
|
||||
mut col := a.get_collection('col')!
|
||||
content := page.read_content_with_includes(a)!
|
||||
|
||||
// Read raw content without processing includes
|
||||
content := page.read_content()!
|
||||
```
|
||||
|
||||
#### Circular Include Detection
|
||||
|
||||
Atlas automatically detects circular includes and will return an error if detected.
|
||||
Reference in New Issue
Block a user