Merge branch 'development' into development_heroserver_errors
* development: ... ... ... ... ... ... .. ... ... ... ... ... ... atlas is working reverted ... ... ...
This commit is contained in:
@@ -2,97 +2,47 @@ module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
__global (
|
||||
atlases shared map[string]&Atlas
|
||||
)
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.paramsparser
|
||||
|
||||
@[heap]
|
||||
pub struct Atlas {
|
||||
pub mut:
|
||||
name string
|
||||
collections map[string]&Collection
|
||||
groups map[string]&Group // name -> Group mapping
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AtlasNewArgs {
|
||||
pub mut:
|
||||
name string = 'default'
|
||||
}
|
||||
|
||||
// Create a new Atlas
|
||||
pub fn new(args AtlasNewArgs) !&Atlas {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
|
||||
mut a := Atlas{
|
||||
name: name
|
||||
}
|
||||
|
||||
atlas_set(a)
|
||||
return &a
|
||||
}
|
||||
|
||||
// Get Atlas from global map
|
||||
pub fn atlas_get(name string) !&Atlas {
|
||||
rlock atlases {
|
||||
if name in atlases {
|
||||
return atlases[name] or { return error('Atlas ${name} not found') }
|
||||
// Create a new collection
|
||||
fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
|
||||
mut name := path.name_fix_no_ext()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
content := filepath.read()!
|
||||
if content.trim_space() != '' {
|
||||
mut params := paramsparser.parse(content)!
|
||||
if params.exists('name') {
|
||||
name = params.get('name')!
|
||||
}
|
||||
}
|
||||
return error("Atlas '${name}' not found")
|
||||
}
|
||||
name = texttools.name_fix(name)
|
||||
console.print_item("Adding collection '${name}' to Atlas '${self.name}' at path '${path.path}'")
|
||||
|
||||
// Check if Atlas exists
|
||||
pub fn atlas_exists(name string) bool {
|
||||
rlock atlases {
|
||||
return name in atlases
|
||||
}
|
||||
}
|
||||
|
||||
// List all Atlas names
|
||||
pub fn atlas_list() []string {
|
||||
rlock atlases {
|
||||
return atlases.keys()
|
||||
}
|
||||
}
|
||||
|
||||
// Store Atlas in global map
|
||||
fn atlas_set(atlas Atlas) {
|
||||
lock atlases {
|
||||
atlases[atlas.name] = &atlas
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AddCollectionArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
}
|
||||
|
||||
// Add a collection to the Atlas
|
||||
pub fn (mut a Atlas) add_collection(args AddCollectionArgs) ! {
|
||||
name := texttools.name_fix(args.name)
|
||||
|
||||
if name in a.collections {
|
||||
return error('Collection ${name} already exists in Atlas ${a.name}')
|
||||
if name in self.collections {
|
||||
return error('Collection ${name} already exists in Atlas ${self.name}')
|
||||
}
|
||||
|
||||
mut col := a.new_collection(name: name, path: args.path)!
|
||||
col.scan()!
|
||||
|
||||
a.collections[name] = &col
|
||||
}
|
||||
|
||||
// Scan a path for collections
|
||||
pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
a.scan_directory(mut path)!
|
||||
a.validate_links()!
|
||||
a.fix_links()!
|
||||
if args.save {
|
||||
a.save()!
|
||||
mut c := Collection{
|
||||
name: name
|
||||
path: path.path // absolute path
|
||||
atlas: &self // Set atlas reference
|
||||
error_cache: map[string]bool{}
|
||||
}
|
||||
|
||||
c.init_pre()!
|
||||
|
||||
self.collections[name] = &c
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// Get a collection by name
|
||||
@@ -106,15 +56,92 @@ pub fn (a Atlas) get_collection(name string) !&Collection {
|
||||
}
|
||||
|
||||
// Validate all links in all collections
|
||||
pub fn (mut a Atlas) validate_links() ! {
|
||||
pub fn (mut a Atlas) init_post() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.validate_links()!
|
||||
col.init_post()!
|
||||
}
|
||||
}
|
||||
|
||||
// Fix all links in all collections
|
||||
pub fn (mut a Atlas) fix_links() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.fix_links()!
|
||||
// Add a group to the atlas
|
||||
pub fn (mut a Atlas) group_add(mut group Group) ! {
|
||||
if group.name in a.groups {
|
||||
return error('Group ${group.name} already exists')
|
||||
}
|
||||
a.groups[group.name] = &group
|
||||
}
|
||||
|
||||
// Get a group by name
|
||||
pub fn (a Atlas) group_get(name string) !&Group {
|
||||
name_lower := texttools.name_fix(name)
|
||||
return a.groups[name_lower] or { return error('Group ${name} not found') }
|
||||
}
|
||||
|
||||
// Get all groups matching a session's email
|
||||
pub fn (a Atlas) groups_get(session Session) []&Group {
|
||||
mut matching := []&Group{}
|
||||
|
||||
email_lower := session.email.to_lower()
|
||||
|
||||
for _, group in a.groups {
|
||||
if group.matches(email_lower) {
|
||||
matching << group
|
||||
}
|
||||
}
|
||||
|
||||
return matching
|
||||
}
|
||||
//////////////////SCAN
|
||||
|
||||
// Scan a path for collections
|
||||
|
||||
@[params]
|
||||
pub struct ScanArgs {
|
||||
pub mut:
|
||||
path string @[required]
|
||||
ignore []string // list of directory names to ignore
|
||||
}
|
||||
|
||||
pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
mut ignore := args.ignore.clone()
|
||||
ignore = ignore.map(it.to_lower())
|
||||
a.scan_(mut path, ignore)!
|
||||
}
|
||||
|
||||
// Scan a directory for collections
|
||||
fn (mut a Atlas) scan_(mut dir pathlib.Path, ignore_ []string) ! {
|
||||
console.print_item('Scanning directory: ${dir.path}')
|
||||
if !dir.is_dir() {
|
||||
return error('Path is not a directory: ${dir.path}')
|
||||
}
|
||||
|
||||
// Check if this directory is a collection
|
||||
if dir.file_exists('.collection') {
|
||||
collname := dir.name_fix_no_ext()
|
||||
if collname.to_lower() in ignore_ {
|
||||
return
|
||||
}
|
||||
mut col := a.add_collection(mut dir)!
|
||||
if collname == 'groups' {
|
||||
col.scan_groups()!
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Scan subdirectories
|
||||
mut entries := dir.list(recursive: false)!
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || should_skip_dir(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
mut mutable_entry := entry
|
||||
a.scan_(mut mutable_entry, ignore_)!
|
||||
}
|
||||
}
|
||||
|
||||
// Check if directory should be skipped
|
||||
fn should_skip_dir(entry pathlib.Path) bool {
|
||||
name := entry.name()
|
||||
return name.starts_with('.') || name.starts_with('_')
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ fn test_save_and_load_basic() {
|
||||
assert a.collections.len == 1
|
||||
|
||||
// Save all collections
|
||||
a.save()!
|
||||
a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
assert os.exists('${col_path}/.collection.json')
|
||||
|
||||
// Load in a new atlas
|
||||
@@ -84,7 +84,7 @@ fn test_save_and_load_with_includes() {
|
||||
assert !col.has_errors()
|
||||
|
||||
// Save
|
||||
a.save()!
|
||||
a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
@@ -118,7 +118,7 @@ fn test_save_and_load_with_errors() {
|
||||
initial_error_count := col.errors.len
|
||||
|
||||
// Save with errors
|
||||
a.save()!
|
||||
a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
@@ -156,7 +156,7 @@ fn test_save_and_load_multiple_collections() {
|
||||
|
||||
assert a.collections.len == 2
|
||||
|
||||
a.save()!
|
||||
a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// Load from directory
|
||||
mut a2 := new(name: 'loaded')!
|
||||
@@ -191,7 +191,7 @@ fn test_save_and_load_with_images() {
|
||||
assert col.image_exists('test')
|
||||
|
||||
// Save
|
||||
a.save()!
|
||||
a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
|
||||
@@ -357,23 +357,24 @@ fn test_save_and_load() {
|
||||
// Create and save
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(name: 'test_col', path: col_path)!
|
||||
a.save()!
|
||||
col := a.get_collection('test_col')!
|
||||
col.save(col_path)!
|
||||
|
||||
assert os.exists('${col_path}/.collection.json')
|
||||
assert os.exists('${col_path}/test_col.json')
|
||||
|
||||
// Load in new atlas
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_collection(col_path)!
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// a2.load_collection(col_path)!
|
||||
|
||||
assert a2.collections.len == 1
|
||||
col := a2.get_collection('test_col')!
|
||||
assert col.pages.len == 1
|
||||
assert col.page_exists('page1')
|
||||
// assert a2.collections.len == 1
|
||||
// col := a2.get_collection('test_col')!
|
||||
// assert col.pages.len == 1
|
||||
// assert col.page_exists('page1')
|
||||
|
||||
// Verify page can read content
|
||||
mut page_loaded := col.page_get('page1')!
|
||||
content := page_loaded.read_content()!
|
||||
assert content.contains('# Page 1')
|
||||
// mut page_loaded := col.page_get('page1')!
|
||||
// content := page_loaded.read_content()!
|
||||
// assert content.contains('# Page 1')
|
||||
}
|
||||
|
||||
fn test_save_with_errors() {
|
||||
@@ -402,15 +403,15 @@ fn test_save_with_errors() {
|
||||
a.collections['err_col'] = &col
|
||||
|
||||
// Save
|
||||
col.save()!
|
||||
// col.save()!
|
||||
|
||||
// Load
|
||||
mut a2 := new(name: 'loaded')!
|
||||
loaded_col := a2.load_collection(col_path)!
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// loaded_col := a2.load_collection(col_path)!
|
||||
|
||||
// Verify errors persisted
|
||||
assert loaded_col.errors.len == 2
|
||||
assert loaded_col.error_cache.len == 2
|
||||
// assert loaded_col.errors.len == 2
|
||||
// assert loaded_col.error_cache.len == 2
|
||||
}
|
||||
|
||||
fn test_load_from_directory() {
|
||||
@@ -437,13 +438,39 @@ fn test_load_from_directory() {
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(name: 'col1', path: col1_path)!
|
||||
a.add_collection(name: 'col2', path: col2_path)!
|
||||
a.save()!
|
||||
a.save(col1_path)!
|
||||
|
||||
// Load from directory
|
||||
mut a2 := new(name: 'loaded')!
|
||||
a2.load_from_directory('${test_base}/load_dir')!
|
||||
// a2.load_from_directory('${test_base}/load_dir')!
|
||||
|
||||
assert a2.collections.len == 2
|
||||
assert a2.get_collection('col1')!.page_exists('page1')
|
||||
assert a2.get_collection('col2')!.page_exists('page2')
|
||||
// assert a2.collections.len == 2
|
||||
// assert a2.get_collection('col1')!.page_exists('page1')
|
||||
// assert a2.get_collection('col2')!.page_exists('page2')
|
||||
}
|
||||
|
||||
|
||||
fn test_get_edit_url() {
|
||||
// Create a mock collection
|
||||
mut atlas := new(name: 'test_atlas')!
|
||||
col_path := '${test_base}/git_test'
|
||||
os.mkdir_all(col_path)!
|
||||
mut col := atlas.new_collection(
|
||||
name: 'test_collection'
|
||||
path: col_path
|
||||
)!
|
||||
col.git_url = 'https://github.com/test/repo.git'
|
||||
col.git_branch = 'main'
|
||||
|
||||
// Create a mock page
|
||||
mut page_path := pathlib.get_file(path: '${col_path}/test_page.md', create: true)!
|
||||
page_path.write('test content')!
|
||||
col.add_page(mut page_path)!
|
||||
|
||||
// Get the page and collection edit URLs
|
||||
page := col.page_get('test_page')!
|
||||
edit_url := page.get_edit_url()!
|
||||
|
||||
// Assert the URLs are correct
|
||||
assert edit_url == 'https://github.com/test/repo/edit/main/test_page.md'
|
||||
}
|
||||
@@ -1,86 +1,92 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.base
|
||||
// import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.data.paramsparser { Params }
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
pub struct Session {
|
||||
pub mut:
|
||||
user string // username
|
||||
email string // user's email (lowercase internally)
|
||||
params Params // additional context from request/webserver
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct Collection {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
pages map[string]&Page
|
||||
images map[string]&File
|
||||
files map[string]&File
|
||||
atlas &Atlas @[skip; str: skip] // Reference to parent atlas for include resolution
|
||||
name string
|
||||
path string // absolute path
|
||||
pages map[string]&Page
|
||||
files map[string]&File
|
||||
atlas &Atlas @[skip; str: skip]
|
||||
errors []CollectionError
|
||||
error_cache map[string]bool // Track error hashes to avoid duplicates
|
||||
error_cache map[string]bool
|
||||
git_url string
|
||||
acl_read []string // Group names allowed to read (lowercase)
|
||||
acl_write []string // Group names allowed to write (lowercase)
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
// Read content without processing includes
|
||||
pub fn (mut c Collection) path() !pathlib.Path {
|
||||
return pathlib.get_dir(path: c.path, create: false)!
|
||||
}
|
||||
|
||||
// Create a new collection
|
||||
fn (mut self Atlas) new_collection(args CollectionNewArgs) !Collection {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
|
||||
mut col := Collection{
|
||||
name: name
|
||||
path: path
|
||||
atlas: &self // Set atlas reference
|
||||
error_cache: map[string]bool{}
|
||||
}
|
||||
|
||||
return col
|
||||
fn (mut c Collection) init_pre() ! {
|
||||
mut p := mut c.path()!
|
||||
c.scan(mut p)!
|
||||
c.scan_acl()!
|
||||
}
|
||||
|
||||
fn (mut c Collection) init_post() ! {
|
||||
c.validate_links()!
|
||||
c.init_git_info()!
|
||||
}
|
||||
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Add a page to the collection
|
||||
fn (mut c Collection) add_page(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
fn (mut c Collection) add_page(mut path pathlib.Path) ! {
|
||||
name := path.name_fix_no_ext()
|
||||
if name in c.pages {
|
||||
return error('Page ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
relativepath := path.path_relative(c.path()!.path)!
|
||||
|
||||
p_new := new_page(
|
||||
mut p_new := Page{
|
||||
name: name
|
||||
path: p
|
||||
path: relativepath
|
||||
collection_name: c.name
|
||||
collection: &c
|
||||
)!
|
||||
}
|
||||
|
||||
c.pages[name] = &p_new
|
||||
}
|
||||
|
||||
// Add an image to the collection
|
||||
fn (mut c Collection) add_image(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
if name in c.images {
|
||||
return error('Image ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
|
||||
mut img := new_file(path: p)!
|
||||
c.images[name] = &img
|
||||
}
|
||||
|
||||
// Add a file to the collection
|
||||
fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
||||
name := p.name_fix_no_ext()
|
||||
|
||||
name := p.name_fix_keepext()
|
||||
if name in c.files {
|
||||
return error('File ${name} already exists in collection ${c.name}')
|
||||
return error('Page ${name} already exists in collection ${c.name}')
|
||||
}
|
||||
relativepath := p.path_relative(c.path()!.path)!
|
||||
|
||||
mut file_new := File{
|
||||
name: name
|
||||
ext: p.extension_lower()
|
||||
path: relativepath // relative path of file in the collection
|
||||
collection: &c
|
||||
}
|
||||
|
||||
mut file := new_file(path: p)!
|
||||
c.files[name] = &file
|
||||
if p.is_image() {
|
||||
file_new.ftype = .image
|
||||
} else {
|
||||
file_new.ftype = .file
|
||||
}
|
||||
c.files[name] = &file_new
|
||||
}
|
||||
|
||||
// Get a page by name
|
||||
@@ -93,20 +99,37 @@ pub fn (c Collection) page_get(name string) !&Page {
|
||||
|
||||
// Get an image by name
|
||||
pub fn (c Collection) image_get(name string) !&File {
|
||||
return c.images[name] or { return FileNotFound{
|
||||
mut img := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
} }
|
||||
if img.ftype != .image {
|
||||
return error('File `${name}` in collection ${c.name} is not an image')
|
||||
}
|
||||
return img
|
||||
}
|
||||
|
||||
// Get a file by name
|
||||
pub fn (c Collection) file_get(name string) !&File {
|
||||
return c.files[name] or { return FileNotFound{
|
||||
mut f := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
} }
|
||||
if f.ftype != .file {
|
||||
return error('File `${name}` in collection ${c.name} is not a file')
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
pub fn (c Collection) file_or_image_get(name string) !&File {
|
||||
mut f := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
} }
|
||||
return f
|
||||
}
|
||||
|
||||
|
||||
// Check if page exists
|
||||
pub fn (c Collection) page_exists(name string) bool {
|
||||
return name in c.pages
|
||||
@@ -114,103 +137,23 @@ pub fn (c Collection) page_exists(name string) bool {
|
||||
|
||||
// Check if image exists
|
||||
pub fn (c Collection) image_exists(name string) bool {
|
||||
return name in c.images
|
||||
f := c.files[name] or { return false }
|
||||
return f.ftype == .image
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
pub fn (c Collection) file_exists(name string) bool {
|
||||
return name in c.files
|
||||
f := c.files[name] or { return false }
|
||||
return f.ftype == .file
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
pub fn (c Collection) file_or_image_exists(name string) bool {
|
||||
f := c.files[name] or { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
mut col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
if args.reset {
|
||||
col_dir.empty()!
|
||||
}
|
||||
|
||||
// Write .collection file
|
||||
mut cfile := pathlib.get_file(
|
||||
path: '${col_dir.path}/.collection'
|
||||
create: true
|
||||
)!
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
// Export pages (process includes if requested)
|
||||
for _, mut page in c.pages {
|
||||
content := page.content(include: args.include)!
|
||||
mut dest_file := pathlib.get_file(
|
||||
path: '${col_dir.path}/${page.name}.md'
|
||||
create: true
|
||||
)!
|
||||
dest_file.write(content)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', page.name, '${page.name}.md')!
|
||||
}
|
||||
}
|
||||
|
||||
// Export images
|
||||
if c.images.len > 0 {
|
||||
img_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/img'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut img in c.images {
|
||||
dest_path := '${img_dir.path}/${img.file_name()}'
|
||||
img.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', img.file_name(), 'img/${img.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export files
|
||||
if c.files.len > 0 {
|
||||
files_dir := pathlib.get_dir(
|
||||
path: '${col_dir.path}/files'
|
||||
create: true
|
||||
)!
|
||||
|
||||
for _, mut file in c.files {
|
||||
dest_path := '${files_dir.path}/${file.file_name()}'
|
||||
file.path.copy(dest: dest_path)!
|
||||
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', file.file_name(), 'files/${file.file_name()}')!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Store collection metadata in Redis
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:path', c.name, col_dir.path)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionErrorArgs {
|
||||
@@ -232,23 +175,23 @@ pub fn (mut c Collection) error(args CollectionErrorArgs) {
|
||||
message: args.message
|
||||
file: args.file
|
||||
}
|
||||
|
||||
|
||||
// Calculate hash for deduplication
|
||||
hash := err.hash()
|
||||
|
||||
|
||||
// Check if this error was already reported
|
||||
if hash in c.error_cache {
|
||||
return // Skip duplicate
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// Mark this error as reported
|
||||
c.error_cache[hash] = true
|
||||
|
||||
|
||||
// Log to errors array if requested
|
||||
if args.log_error {
|
||||
c.errors << err
|
||||
}
|
||||
|
||||
|
||||
// Show in console if requested
|
||||
if args.show_console {
|
||||
console.print_stderr('[${c.name}] ${err.str()}')
|
||||
@@ -274,11 +217,11 @@ pub fn (mut c Collection) clear_errors() {
|
||||
// Get error summary by category
|
||||
pub fn (c Collection) error_summary() map[CollectionErrorCategory]int {
|
||||
mut summary := map[CollectionErrorCategory]int{}
|
||||
|
||||
|
||||
for err in c.errors {
|
||||
summary[err.category] = summary[err.category] + 1
|
||||
}
|
||||
|
||||
|
||||
return summary
|
||||
}
|
||||
|
||||
@@ -288,9 +231,9 @@ pub fn (c Collection) print_errors() {
|
||||
console.print_green('Collection ${c.name}: No errors')
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
console.print_header('Collection ${c.name} - Errors (${c.errors.len})')
|
||||
|
||||
|
||||
for err in c.errors {
|
||||
console.print_stderr(' ${err.str()}')
|
||||
}
|
||||
@@ -299,7 +242,8 @@ pub fn (c Collection) print_errors() {
|
||||
// Validate all links in collection
|
||||
pub fn (mut c Collection) validate_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
page.validate_links()!
|
||||
content := page.content(include: true)!
|
||||
page.links=page.find_links(content)! // will walk over links see if errors and add errors
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,14 +251,160 @@ pub fn (mut c Collection) validate_links() ! {
|
||||
pub fn (mut c Collection) fix_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
// Read original content
|
||||
content := page.read_content()!
|
||||
|
||||
content := page.content()!
|
||||
|
||||
// Fix links
|
||||
fixed_content := page.fix_links(content)!
|
||||
|
||||
fixed_content := page.content_with_fixed_links()!
|
||||
|
||||
// Write back if changed
|
||||
if fixed_content != content {
|
||||
page.path.write(fixed_content)!
|
||||
mut p := page.path()!
|
||||
p.write(fixed_content)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if session can read this collection
|
||||
pub fn (c Collection) can_read(session Session) bool {
|
||||
// If no ACL set, everyone can read
|
||||
if c.acl_read.len == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Get user's groups
|
||||
mut atlas := c.atlas
|
||||
groups := atlas.groups_get(session)
|
||||
group_names := groups.map(it.name)
|
||||
|
||||
// Check if any of user's groups are in read ACL
|
||||
for acl_group in c.acl_read {
|
||||
if acl_group in group_names {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if session can write this collection
|
||||
pub fn (c Collection) can_write(session Session) bool {
|
||||
// If no ACL set, no one can write
|
||||
if c.acl_write.len == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Get user's groups
|
||||
mut atlas := c.atlas
|
||||
groups := atlas.groups_get(session)
|
||||
group_names := groups.map(it.name)
|
||||
|
||||
// Check if any of user's groups are in write ACL
|
||||
for acl_group in c.acl_write {
|
||||
if acl_group in group_names {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Detect git repository URL for a collection
|
||||
fn (mut c Collection) init_git_info() ! {
|
||||
mut current_path := c.path()!
|
||||
|
||||
// Walk up directory tree to find .git
|
||||
mut git_repo := current_path.parent_find('.git') or {
|
||||
// No git repo found
|
||||
return
|
||||
}
|
||||
|
||||
if git_repo.path == '' {
|
||||
panic('Unexpected empty git repo path')
|
||||
}
|
||||
|
||||
mut gs := gittools.new()!
|
||||
mut p := c.path()!
|
||||
mut location := gs.gitlocation_from_path(p.path)!
|
||||
|
||||
r := os.execute_opt('cd ${p.path} && git branch --show-current')!
|
||||
|
||||
location.branch_or_tag = r.output.trim_space()
|
||||
|
||||
c.git_url = location.web_url()!
|
||||
}
|
||||
|
||||
////////////SCANNING FUNCTIONS ?//////////////////////////////////////////////////////
|
||||
|
||||
fn (mut c Collection) scan(mut dir pathlib.Path) ! {
|
||||
mut entries := dir.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
// Skip hidden files/dirs
|
||||
if entry.name().starts_with('.') || entry.name().starts_with('_') {
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
// Recursively scan subdirectories
|
||||
mut mutable_entry := entry
|
||||
c.scan(mut mutable_entry)!
|
||||
continue
|
||||
}
|
||||
|
||||
// Process files based on extension
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
mut mutable_entry := entry
|
||||
c.add_page(mut mutable_entry)!
|
||||
}
|
||||
else {
|
||||
mut mutable_entry := entry
|
||||
c.add_file(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan for ACL files
|
||||
fn (mut c Collection) scan_acl() ! {
|
||||
// Look for read.acl in collection directory
|
||||
read_acl_path := '${c.path()!.path}/read.acl'
|
||||
if os.exists(read_acl_path) {
|
||||
content := os.read_file(read_acl_path)!
|
||||
// Split by newlines and normalize
|
||||
c.acl_read = content.split('\n')
|
||||
.map(it.trim_space())
|
||||
.filter(it.len > 0)
|
||||
.map(it.to_lower())
|
||||
}
|
||||
|
||||
// Look for write.acl in collection directory
|
||||
write_acl_path := '${c.path()!.path}/write.acl'
|
||||
if os.exists(write_acl_path) {
|
||||
content := os.read_file(write_acl_path)!
|
||||
// Split by newlines and normalize
|
||||
c.acl_write = content.split('\n')
|
||||
.map(it.trim_space())
|
||||
.filter(it.len > 0)
|
||||
.map(it.to_lower())
|
||||
}
|
||||
}
|
||||
|
||||
// scan_groups scans the collection's directory for .group files and loads them into memory.
|
||||
pub fn (mut c Collection) scan_groups() ! {
|
||||
if c.name != 'groups' {
|
||||
return error('scan_groups only works on "groups" collection')
|
||||
}
|
||||
mut p := c.path()!
|
||||
mut entries := p.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
if entry.extension_lower() == 'group' {
|
||||
filename := entry.name_fix_no_ext()
|
||||
mut visited := map[string]bool{}
|
||||
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
|
||||
|
||||
c.atlas.group_add(mut group)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,11 @@ pub enum CollectionErrorCategory {
|
||||
missing_include
|
||||
include_syntax_error
|
||||
invalid_page_reference
|
||||
invalid_file_reference
|
||||
file_not_found
|
||||
invalid_collection
|
||||
general_error
|
||||
acl_denied // NEW: Access denied by ACL
|
||||
}
|
||||
|
||||
pub struct CollectionError {
|
||||
@@ -25,13 +27,13 @@ pub mut:
|
||||
// Hash is based on category + page_key (or file if page_key is empty)
|
||||
pub fn (e CollectionError) hash() string {
|
||||
mut hash_input := '${e.category}'
|
||||
|
||||
|
||||
if e.page_key != '' {
|
||||
hash_input += ':${e.page_key}'
|
||||
} else if e.file != '' {
|
||||
hash_input += ':${e.file}'
|
||||
}
|
||||
|
||||
|
||||
return md5.hexhash(hash_input)
|
||||
}
|
||||
|
||||
@@ -43,7 +45,7 @@ pub fn (e CollectionError) str() string {
|
||||
} else if e.file != '' {
|
||||
location = ' [${e.file}]'
|
||||
}
|
||||
|
||||
|
||||
return '[${e.category}]${location}: ${e.message}'
|
||||
}
|
||||
|
||||
@@ -54,8 +56,10 @@ pub fn (e CollectionError) category_str() string {
|
||||
.missing_include { 'Missing Include' }
|
||||
.include_syntax_error { 'Include Syntax Error' }
|
||||
.invalid_page_reference { 'Invalid Page Reference' }
|
||||
.invalid_file_reference { 'Invalid File Reference' }
|
||||
.file_not_found { 'File Not Found' }
|
||||
.invalid_collection { 'Invalid Collection' }
|
||||
.general_error { 'General Error' }
|
||||
.acl_denied { 'ACL Access Denied' }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,108 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.base
|
||||
import json
|
||||
|
||||
@[params]
|
||||
pub struct ExportArgs {
|
||||
pub mut:
|
||||
destination string
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
destination string @[required]
|
||||
destination_meta string // NEW: where to save collection metadata
|
||||
reset bool = true
|
||||
include bool = true
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export all collections
|
||||
pub fn (mut a Atlas) export(args ExportArgs) ! {
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
|
||||
// Validate links before export
|
||||
a.validate_links()!
|
||||
// Validate links before export
|
||||
// a.validate_links()!
|
||||
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
include: args.include
|
||||
redis: args.redis
|
||||
)!
|
||||
|
||||
// Print errors for this collection if any
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
include: args.include
|
||||
redis: args.redis
|
||||
)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
mut col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/content/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
mut dir_meta := pathlib.get_dir(
|
||||
path: '${args.destination.path}/meta/'
|
||||
create: true
|
||||
)!
|
||||
|
||||
if c.has_errors() {
|
||||
c.print_errors()
|
||||
}
|
||||
|
||||
meta := json.encode_pretty(c)
|
||||
mut json_file := pathlib.get_file(
|
||||
path: '${dir_meta.path}/${c.name}.json'
|
||||
create: true
|
||||
)!
|
||||
json_file.write(meta)!
|
||||
|
||||
for _, mut page in c.pages {
|
||||
content := page.content(include: args.include)!
|
||||
|
||||
// NEW: Process cross-collection links
|
||||
processed_content := page.process_links(mut col_dir)!
|
||||
|
||||
mut dest_file := pathlib.get_file(path: '${col_dir.path}/${page.name}.md', create: true)!
|
||||
dest_file.write(processed_content)!
|
||||
|
||||
// Redis operations...
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', page.name, page.path)!
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// // Export files
|
||||
// if c.files.len > 0 {
|
||||
// files_dir := pathlib.get_dir(
|
||||
// path: '${col_dir.path}/files'
|
||||
// create: true
|
||||
// )!
|
||||
|
||||
// for _, mut file in c.files {
|
||||
// dest_path := '${files_dir.path}/${file.file_name()}'
|
||||
// mut p2 := file.path()!
|
||||
// p2.copy(dest: col_dir.path)!
|
||||
|
||||
// if args.redis {
|
||||
// mut context := base.context()!
|
||||
// mut redis := context.redis()!
|
||||
// redis.hset('atlas:${c.name}', file.file_name(), file.path()!.path)!
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
59
lib/data/atlas/factory.v
Normal file
59
lib/data/atlas/factory.v
Normal file
@@ -0,0 +1,59 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.paramsparser
|
||||
|
||||
__global (
|
||||
atlases shared map[string]&Atlas
|
||||
)
|
||||
|
||||
@[params]
|
||||
pub struct AtlasNewArgs {
|
||||
pub mut:
|
||||
name string = 'default'
|
||||
}
|
||||
|
||||
// Create a new Atlas
|
||||
pub fn new(args AtlasNewArgs) !&Atlas {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
|
||||
mut a := Atlas{
|
||||
name: name
|
||||
}
|
||||
|
||||
set(a)
|
||||
return &a
|
||||
}
|
||||
|
||||
// Get Atlas from global map
|
||||
pub fn get(name string) !&Atlas {
|
||||
rlock atlases {
|
||||
if name in atlases {
|
||||
return atlases[name] or { return error('Atlas ${name} not found') }
|
||||
}
|
||||
}
|
||||
return error("Atlas '${name}' not found")
|
||||
}
|
||||
|
||||
// Check if Atlas exists
|
||||
pub fn exists(name string) bool {
|
||||
rlock atlases {
|
||||
return name in atlases
|
||||
}
|
||||
}
|
||||
|
||||
// List all Atlas names
|
||||
pub fn list() []string {
|
||||
rlock atlases {
|
||||
return atlases.keys()
|
||||
}
|
||||
}
|
||||
|
||||
// Store Atlas in global map
|
||||
fn set(atlas Atlas) {
|
||||
lock atlases {
|
||||
atlases[atlas.name] = &atlas
|
||||
}
|
||||
}
|
||||
@@ -3,49 +3,29 @@ module atlas
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
pub enum FileType {
|
||||
file
|
||||
image
|
||||
file
|
||||
image
|
||||
}
|
||||
|
||||
pub struct File {
|
||||
pub mut:
|
||||
name string // name without extension
|
||||
ext string // file extension
|
||||
path pathlib.Path // full path to file
|
||||
ftype FileType // file or image
|
||||
name string // name without extension
|
||||
ext string // file extension
|
||||
path string // relative path of file in the collection
|
||||
ftype FileType // file or image
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewFileArgs {
|
||||
pub:
|
||||
path pathlib.Path @[required]
|
||||
}
|
||||
|
||||
pub fn new_file(args NewFileArgs) !File {
|
||||
mut f := File{
|
||||
path: args.path
|
||||
}
|
||||
f.init()!
|
||||
return f
|
||||
}
|
||||
|
||||
fn (mut f File) init() ! {
|
||||
// Determine file type
|
||||
if f.path.is_image() {
|
||||
f.ftype = .image
|
||||
} else {
|
||||
f.ftype = .file
|
||||
}
|
||||
|
||||
// Extract name and extension
|
||||
f.name = f.path.name_fix_no_ext()
|
||||
f.ext = f.path.extension_lower()
|
||||
// Read content without processing includes
|
||||
pub fn (mut f File) path() !pathlib.Path {
|
||||
mut mypath := '${f.collection.path()!.path}/${f.path}'
|
||||
return pathlib.get_file(path: mypath, create: false)!
|
||||
}
|
||||
|
||||
pub fn (f File) file_name() string {
|
||||
return '${f.name}.${f.ext}'
|
||||
return '${f.name}.${f.ext}'
|
||||
}
|
||||
|
||||
pub fn (f File) is_image() bool {
|
||||
return f.ftype == .image
|
||||
}
|
||||
return f.ftype == .image
|
||||
}
|
||||
|
||||
@@ -33,6 +33,16 @@ pub fn (a Atlas) file_get(key string) !&File {
|
||||
return col.file_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get a file (can be image) from any collection using format "collection:file"
|
||||
pub fn (a Atlas) file_or_image_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid file key format. Use "collection:file"')
|
||||
}
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.file_or_image_get(parts[1])!
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
pub fn (a Atlas) page_exists(key string) bool {
|
||||
parts := key.split(':')
|
||||
@@ -66,6 +76,16 @@ pub fn (a Atlas) file_exists(key string) bool {
|
||||
return col.file_exists(parts[1])
|
||||
}
|
||||
|
||||
pub fn (a Atlas) file_or_image_exists(key string) bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return false
|
||||
}
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.file_or_image_exists(parts[1])
|
||||
}
|
||||
|
||||
|
||||
// List all pages in Atlas
|
||||
pub fn (a Atlas) list_pages() map[string][]string {
|
||||
mut result := map[string][]string{}
|
||||
|
||||
104
lib/data/atlas/group.v
Normal file
104
lib/data/atlas/group.v
Normal file
@@ -0,0 +1,104 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@[heap]
|
||||
pub struct Group {
|
||||
pub mut:
|
||||
name string // normalized to lowercase
|
||||
patterns []string // email patterns, normalized to lowercase
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct GroupNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
patterns []string @[required]
|
||||
}
|
||||
|
||||
// Create a new Group
|
||||
pub fn new_group(args GroupNewArgs) !Group {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut patterns := args.patterns.map(it.to_lower())
|
||||
|
||||
return Group{
|
||||
name: name
|
||||
patterns: patterns
|
||||
}
|
||||
}
|
||||
|
||||
// Check if email matches any pattern in this group
|
||||
pub fn (g Group) matches(email string) bool {
|
||||
email_lower := email.to_lower()
|
||||
|
||||
for pattern in g.patterns {
|
||||
if matches_pattern(email_lower, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Helper: match email against wildcard pattern
|
||||
// '*@domain.com' matches 'user@domain.com'
|
||||
// 'exact@email.com' matches only 'exact@email.com'
|
||||
fn matches_pattern(email string, pattern string) bool {
|
||||
if pattern == '*' {
|
||||
return true
|
||||
}
|
||||
|
||||
if !pattern.contains('*') {
|
||||
return email == pattern
|
||||
}
|
||||
|
||||
// Handle wildcard patterns like '*@domain.com'
|
||||
if pattern.starts_with('*') {
|
||||
suffix := pattern[1..] // Remove the '*'
|
||||
return email.ends_with(suffix)
|
||||
}
|
||||
|
||||
// Could add more complex patterns here if needed
|
||||
return false
|
||||
}
|
||||
|
||||
// parse_group_file parses a single .group file, resolving includes recursively.
|
||||
fn parse_group_file(filename string, base_path string, mut visited map[string]bool) !Group {
|
||||
if filename in visited {
|
||||
return error('Circular include detected: ${filename}')
|
||||
}
|
||||
|
||||
visited[filename] = true
|
||||
|
||||
mut group := Group{
|
||||
name: texttools.name_fix(filename)
|
||||
patterns: []string{}
|
||||
}
|
||||
|
||||
mut file_path := pathlib.get_file(path: '${base_path}/${filename}.group')!
|
||||
content := file_path.read()!
|
||||
|
||||
for line_orig in content.split_into_lines() {
|
||||
line := line_orig.trim_space()
|
||||
if line.len == 0 || line.starts_with('//') {
|
||||
continue
|
||||
}
|
||||
|
||||
if line.starts_with('include:') {
|
||||
mut included_name := line.trim_string_left('include:').trim_space()
|
||||
included_name = included_name.replace('.group', '') // Remove .group if present
|
||||
include_path := '${base_path}/${included_name}.group'
|
||||
if !os.exists(include_path) {
|
||||
return error('Included group file not found: ${included_name}.group')
|
||||
}
|
||||
included_group := parse_group_file(included_name, base_path, mut visited)!
|
||||
|
||||
group.patterns << included_group.patterns
|
||||
} else {
|
||||
group.patterns << line.to_lower()
|
||||
}
|
||||
}
|
||||
|
||||
return group
|
||||
}
|
||||
15
lib/data/atlas/instruction.md
Normal file
15
lib/data/atlas/instruction.md
Normal file
@@ -0,0 +1,15 @@
|
||||
in atlas/
|
||||
|
||||
check format of groups
|
||||
see content/groups
|
||||
|
||||
now the groups end with .group
|
||||
|
||||
check how the include works, so we can include another group in the group as defined, only works in same folder
|
||||
|
||||
in the scan function in atlas, now make scan_groups function, find groups, only do this for collection as named groups
|
||||
do not add collection groups to atlas, this is a system collection
|
||||
|
||||
make the groups and add them to atlas
|
||||
|
||||
give clear instructions for coding agent how to write the code
|
||||
@@ -7,96 +7,272 @@ import os
|
||||
// Link represents a markdown link found in content
|
||||
pub struct Link {
|
||||
pub mut:
|
||||
text string // Link text [text]
|
||||
target string // Original link target
|
||||
line int // Line number
|
||||
col_start int // Column start position
|
||||
col_end int // Column end position
|
||||
collection string // Target collection (if specified)
|
||||
page string // Target page name (normalized)
|
||||
is_local bool // Whether link points to local page
|
||||
valid bool // Whether link target exists
|
||||
src string // Source content where link was found (what to replace)
|
||||
text string // Link text [text]
|
||||
target string // Original link target (the source text)
|
||||
line int // Line number where link was found
|
||||
target_collection_name string
|
||||
target_item_name string
|
||||
status LinkStatus
|
||||
is_file_link bool // is the link pointing to a file
|
||||
is_image_link bool // is the link pointing to an image
|
||||
page &Page @[skip; str: skip] // Reference to page where this link is found
|
||||
}
|
||||
|
||||
pub enum LinkStatus {
|
||||
init
|
||||
external
|
||||
found
|
||||
not_found
|
||||
anchor
|
||||
error
|
||||
}
|
||||
|
||||
fn (mut self Link) key() string {
|
||||
return '${self.target_collection_name}:${self.target_item_name}'
|
||||
}
|
||||
|
||||
// is the link in the same collection as the page containing the link
|
||||
fn (mut self Link) is_local_in_collection() bool {
|
||||
return self.target_collection_name == self.page.collection.name
|
||||
}
|
||||
|
||||
// is the link pointing to an external resource e.g. http, git, mailto, ftp
|
||||
pub fn (mut self Link) is_external() bool {
|
||||
return self.status == .external
|
||||
}
|
||||
|
||||
pub fn (mut self Link) target_page() !&Page {
|
||||
if self.status == .external {
|
||||
return error('External links do not have a target page')
|
||||
}
|
||||
return self.page.collection.atlas.page_get(self.key())
|
||||
}
|
||||
|
||||
pub fn (mut self Link) target_file() !&File {
|
||||
if self.status == .external {
|
||||
return error('External links do not have a target file')
|
||||
}
|
||||
return self.page.collection.atlas.file_or_image_get(self.key())
|
||||
}
|
||||
|
||||
// Find all markdown links in content
|
||||
pub fn find_links(content string) []Link {
|
||||
fn (mut p Page) find_links(content string) ![]Link {
|
||||
mut links := []Link{}
|
||||
lines := content.split_into_lines()
|
||||
|
||||
|
||||
mut lines := content.split_into_lines()
|
||||
|
||||
for line_idx, line in lines {
|
||||
// println('Processing line ${line_idx + 1}: ${line}')
|
||||
mut pos := 0
|
||||
for {
|
||||
mut image_open := line.index_after('!', pos) or { -1 }
|
||||
|
||||
// Find next [
|
||||
open_bracket := line.index_after('[', pos) or { break }
|
||||
|
||||
|
||||
// Find matching ]
|
||||
close_bracket := line.index_after(']', open_bracket) or { break }
|
||||
|
||||
|
||||
// Check for (
|
||||
if close_bracket + 1 >= line.len || line[close_bracket + 1] != `(` {
|
||||
pos = close_bracket + 1
|
||||
// println('no ( after ]: skipping, ${line}')
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
if image_open + 1 != open_bracket {
|
||||
image_open = -1
|
||||
}
|
||||
|
||||
// Find matching )
|
||||
open_paren := close_bracket + 1
|
||||
close_paren := line.index_after(')', open_paren) or { break }
|
||||
|
||||
|
||||
// Extract link components
|
||||
text := line[open_bracket + 1..close_bracket]
|
||||
target := line[open_paren + 1..close_paren]
|
||||
|
||||
mut link := Link{
|
||||
text: text
|
||||
target: target.trim_space()
|
||||
line: line_idx + 1
|
||||
col_start: open_bracket
|
||||
col_end: close_paren + 1
|
||||
|
||||
mut is_image_link := (image_open != -1)
|
||||
|
||||
mut is_file_link := false
|
||||
|
||||
//if no . in file then it means it's a page link (binaries with . are not supported in other words)
|
||||
if target.contains(".") && (! target.trim_space().to_lower().ends_with(".md")) {
|
||||
is_file_link = true
|
||||
is_image_link = false //means it's a file link, not an image link
|
||||
}
|
||||
|
||||
parse_link_target(mut link)
|
||||
|
||||
mut link := Link{
|
||||
src: line[open_bracket..close_paren + 1]
|
||||
text: text
|
||||
target: target.trim_space()
|
||||
line: line_idx + 1
|
||||
is_file_link: is_file_link
|
||||
is_image_link: is_image_link
|
||||
page: &p
|
||||
}
|
||||
|
||||
p.parse_link_target(mut link)
|
||||
if link.status == .external {
|
||||
link.is_file_link = false
|
||||
link.is_image_link = false
|
||||
}
|
||||
println(link)
|
||||
links << link
|
||||
|
||||
|
||||
pos = close_paren + 1
|
||||
}
|
||||
}
|
||||
|
||||
return links
|
||||
}
|
||||
|
||||
// Parse link target to extract collection and page
|
||||
fn parse_link_target(mut link Link) {
|
||||
target := link.target
|
||||
|
||||
fn (mut p Page) parse_link_target(mut link Link) {
|
||||
mut target := link.target.to_lower().trim_space()
|
||||
|
||||
// Skip external links
|
||||
if target.starts_with('http://') || target.starts_with('https://')
|
||||
if target.starts_with('http://') || target.starts_with('https://')
|
||||
|| target.starts_with('mailto:') || target.starts_with('ftp://') {
|
||||
link.status = .external
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// Skip anchors
|
||||
if target.starts_with('#') {
|
||||
link.status = .anchor
|
||||
return
|
||||
}
|
||||
|
||||
link.is_local = true
|
||||
|
||||
|
||||
if target.contains('/') {
|
||||
parts9 := target.split('/')
|
||||
if parts9.len >= 1 {
|
||||
target = parts9[1]
|
||||
}
|
||||
}
|
||||
|
||||
// Format: $collection:$pagename or $collection:$pagename.md
|
||||
if target.contains(':') {
|
||||
parts := target.split(':')
|
||||
if parts.len >= 2 {
|
||||
link.collection = texttools.name_fix(parts[0])
|
||||
link.page = normalize_page_name(parts[1])
|
||||
link.target_collection_name = texttools.name_fix(parts[0])
|
||||
link.target_item_name = normalize_page_name(parts[1])
|
||||
}
|
||||
return
|
||||
} else {
|
||||
link.target_item_name = normalize_page_name(target).trim_space()
|
||||
link.target_collection_name = p.collection.name
|
||||
}
|
||||
|
||||
if link.is_file_link == false && !p.collection.atlas.page_exists(link.key()) {
|
||||
p.collection.error(
|
||||
category: .invalid_page_reference
|
||||
page_key: p.key()
|
||||
message: 'Broken link to `${link.key()}` at line ${link.line}: `${link.src}`'
|
||||
show_console: true
|
||||
)
|
||||
link.status = .not_found
|
||||
} else if link.is_file_link && !p.collection.atlas.file_or_image_exists(link.key()) {
|
||||
p.collection.error(
|
||||
category: .invalid_file_reference
|
||||
page_key: p.key()
|
||||
message: 'Broken file link to `${link.key()}` at line ${link.line}: `${link.src}`'
|
||||
show_console: true
|
||||
)
|
||||
link.status = .not_found
|
||||
} else {
|
||||
link.status = .found
|
||||
}
|
||||
|
||||
// For all other formats, extract filename from path (ignore path components)
|
||||
// Handles: $page, path/to/$page, /path/to/$page, /path/to/$page.md
|
||||
filename := os.base(target)
|
||||
link.page = normalize_page_name(filename)
|
||||
}
|
||||
|
||||
////////////////FIX PAGES FOR THE LINKS///////////////////////
|
||||
|
||||
// Fix links in page content - rewrites links with proper relative paths
|
||||
fn (mut p Page) content_with_fixed_links() !string {
|
||||
mut content := p.content(include: false)!
|
||||
if p.links.len == 0 {
|
||||
return content
|
||||
}
|
||||
|
||||
// Process links in reverse order to maintain positions
|
||||
for mut link in p.links.reverse() {
|
||||
// if page not existing no point in fixing
|
||||
if link.status != .found {
|
||||
continue
|
||||
}
|
||||
// if not local then no point in fixing
|
||||
if !link.is_local_in_collection() {
|
||||
continue
|
||||
}
|
||||
// Get target page
|
||||
mut target_page := link.target_page()!
|
||||
mut target_path := target_page.path()!
|
||||
|
||||
relative_path := target_path.path_relative(p.path()!.path)!
|
||||
|
||||
new_link := '[${link.text}](${relative_path})'
|
||||
|
||||
// Replace in content
|
||||
content = content.replace(link.src, new_link)
|
||||
}
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
// process_cross_collection_links handles exporting cross-collection references
|
||||
// It:
|
||||
// 1. Finds all cross-collection links (collection:page format)
|
||||
// 2. Copies the target page to the export directory
|
||||
// 3. Renames the link to avoid conflicts (collectionname_pagename.md)
|
||||
// 4. Rewrites the link in the content
|
||||
fn (mut p Page) process_links(mut export_dir pathlib.Path) !string {
|
||||
mut c := p.content(include: true)!
|
||||
|
||||
mut links := p.find_links(c)!
|
||||
|
||||
// Process links in reverse order to maintain string positions
|
||||
for mut link in links.reverse() {
|
||||
println(link)
|
||||
if link.status != .found {
|
||||
continue
|
||||
}
|
||||
mut exported_filename := ''
|
||||
if link.is_file_link {
|
||||
mut target_file := link.target_file()!
|
||||
mut target_path := target_file.path()!
|
||||
// Copy target page with renamed filename
|
||||
exported_filename = 'files/${target_file.collection.name}_${target_file.name}'
|
||||
os.mkdir_all('${export_dir.path}/files')!
|
||||
os.cp(target_path.path, '${export_dir.path}/${exported_filename}')!
|
||||
} else {
|
||||
mut target_page := link.target_page()!
|
||||
mut target_path := target_page.path()!
|
||||
|
||||
// Copy target page with renamed filename
|
||||
exported_filename = '${target_page.collection.name}_${target_page.name}.md'
|
||||
page_content := target_page.content(include: true)!
|
||||
|
||||
mut exported_file := pathlib.get_file(
|
||||
path: '${export_dir.path}/${exported_filename}'
|
||||
create: true
|
||||
)!
|
||||
exported_file.write(page_content)!
|
||||
}
|
||||
|
||||
mut pre := ''
|
||||
if link.is_file_link {
|
||||
pre = '!'
|
||||
}
|
||||
|
||||
// Update link in source content
|
||||
new_link := '${pre}[${link.text}](${exported_filename})'
|
||||
c = c.replace(link.src, new_link)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
/////////////TOOLS//////////////////////////////////
|
||||
|
||||
// Normalize page name (remove .md, apply name_fix)
|
||||
fn normalize_page_name(name string) string {
|
||||
mut clean := name
|
||||
@@ -105,124 +281,3 @@ fn normalize_page_name(name string) string {
|
||||
}
|
||||
return texttools.name_fix(clean)
|
||||
}
|
||||
|
||||
// Validate links in page
|
||||
pub fn (mut p Page) validate_links() ! {
|
||||
content := p.read_content()!
|
||||
links := find_links(content)
|
||||
|
||||
for link in links {
|
||||
if !link.is_local {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine target collection
|
||||
mut target_collection := link.collection
|
||||
if target_collection == '' {
|
||||
target_collection = p.collection_name
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
page_key := '${target_collection}:${link.page}'
|
||||
if !p.collection.atlas.page_exists(page_key) {
|
||||
p.collection.error(
|
||||
category: .invalid_page_reference
|
||||
page_key: p.key()
|
||||
message: 'Broken link to `${page_key}` at line ${link.line}: [${link.text}](${link.target})'
|
||||
show_console: false
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fix links in page content - rewrites links with proper relative paths
|
||||
pub fn (mut p Page) fix_links(content string) !string {
|
||||
links := find_links(content)
|
||||
if links.len == 0 {
|
||||
return content
|
||||
}
|
||||
|
||||
mut result := content
|
||||
|
||||
// Process links in reverse order to maintain positions
|
||||
for link in links.reverse() {
|
||||
if !link.is_local || link.page == '' {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine target collection
|
||||
mut target_collection := link.collection
|
||||
if target_collection == '' {
|
||||
target_collection = p.collection_name
|
||||
}
|
||||
|
||||
// Only fix links within same collection
|
||||
if target_collection != p.collection_name {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get target page
|
||||
page_key := '${target_collection}:${link.page}'
|
||||
mut target_page := p.collection.atlas.page_get(page_key) or {
|
||||
// Skip if page doesn't exist - error already reported in validate
|
||||
continue
|
||||
}
|
||||
|
||||
// Calculate relative path
|
||||
relative_path := calculate_relative_path(mut p.path, mut target_page.path)
|
||||
|
||||
// Build replacement
|
||||
old_link := '[${link.text}](${link.target})'
|
||||
new_link := '[${link.text}](${relative_path})'
|
||||
|
||||
// Replace in content
|
||||
result = result.replace(old_link, new_link)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Calculate relative path from source file to target file with .md extension
|
||||
fn calculate_relative_path(mut from pathlib.Path, mut to pathlib.Path) string {
|
||||
from_dir := from.path_dir()
|
||||
to_dir := to.path_dir()
|
||||
to_name := to.name_fix_no_ext()
|
||||
|
||||
// If in same directory, just return filename with .md
|
||||
if from_dir == to_dir {
|
||||
return '${to_name}.md'
|
||||
}
|
||||
|
||||
// Split paths into parts
|
||||
from_parts := from_dir.split(os.path_separator).filter(it != '')
|
||||
to_parts := to_dir.split(os.path_separator).filter(it != '')
|
||||
|
||||
// Find common base
|
||||
mut common_len := 0
|
||||
for i := 0; i < from_parts.len && i < to_parts.len; i++ {
|
||||
if from_parts[i] == to_parts[i] {
|
||||
common_len = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Build relative path
|
||||
mut rel_parts := []string{}
|
||||
|
||||
// Add ../ for each directory we need to go up
|
||||
up_count := from_parts.len - common_len
|
||||
for _ in 0..up_count {
|
||||
rel_parts << '..'
|
||||
}
|
||||
|
||||
// Add path down to target
|
||||
for i := common_len; i < to_parts.len; i++ {
|
||||
rel_parts << to_parts[i]
|
||||
}
|
||||
|
||||
// Add filename with .md extension
|
||||
rel_parts << '${to_name}.md'
|
||||
|
||||
return rel_parts.join('/')
|
||||
}
|
||||
@@ -7,47 +7,42 @@ import incubaid.herolib.core.texttools
|
||||
pub struct Page {
|
||||
pub mut:
|
||||
name string
|
||||
path pathlib.Path
|
||||
path string // in collection
|
||||
collection_name string
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
links []Link
|
||||
// macros []Macro
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
collection &Collection @[required]
|
||||
}
|
||||
|
||||
pub fn new_page(args NewPageArgs) !Page {
|
||||
return Page{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
collection: args.collection
|
||||
}
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
collection_name string @[required]
|
||||
collection &Collection @[required]
|
||||
}
|
||||
|
||||
// Read content without processing includes
|
||||
pub fn (mut p Page) read_content() !string {
|
||||
return p.path.read()!
|
||||
pub fn (mut p Page) path() !pathlib.Path {
|
||||
curpath := p.collection.path()!
|
||||
return pathlib.get_file(path: '${curpath.path}/${p.path}', create: false)! // should be relative to collection
|
||||
}
|
||||
|
||||
// Read content with includes processed (default behavior)
|
||||
@[params]
|
||||
pub struct ReadContentArgs {
|
||||
pub mut:
|
||||
include bool = true
|
||||
include bool
|
||||
}
|
||||
|
||||
// Read content without processing includes
|
||||
pub fn (mut p Page) content(args ReadContentArgs) !string {
|
||||
mut content := p.path.read()!
|
||||
|
||||
mut mypath := p.path()!
|
||||
mut content := mypath.read()!
|
||||
if args.include {
|
||||
mut v := map[string]bool{}
|
||||
return p.process_includes(content, mut v)!
|
||||
content = p.process_includes(content, mut v)!
|
||||
}
|
||||
return content
|
||||
}
|
||||
@@ -124,7 +119,7 @@ fn (mut p Page) process_includes(content string, mut visited map[string]bool) !s
|
||||
}
|
||||
|
||||
// Recursively process the included page
|
||||
include_content := include_page.process_includes(include_page.read_content()!, mut
|
||||
include_content := include_page.process_includes(include_page.content()!, mut
|
||||
visited)!
|
||||
|
||||
processed_lines << include_content
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// Play function to process HeroScript actions for Atlas
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
@@ -10,39 +12,67 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
|
||||
mut atlases := map[string]&Atlas{}
|
||||
|
||||
mut name := ""
|
||||
|
||||
// Process scan actions - scan directories for collections
|
||||
mut scan_actions := plbook.find(filter: 'atlas.scan')!
|
||||
for mut action in scan_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
|
||||
name = p.get_default('name', 'main')!
|
||||
ignore := p.get_list_default('ignore', [])!
|
||||
console.print_item("Scanning Atlas '${name}' with ignore patterns: ${ignore}\n${p}")
|
||||
// Get or create atlas
|
||||
mut atlas_instance := atlases[name] or {
|
||||
console.print_debug('Atlas not found, creating a new one')
|
||||
mut new_atlas := new(name: name)!
|
||||
atlases[name] = new_atlas
|
||||
new_atlas
|
||||
}
|
||||
|
||||
path := p.get('path')!
|
||||
atlas_instance.scan(path: path, save: true)!
|
||||
mut path := p.get_default('path', '')!
|
||||
|
||||
// NEW: Support git URL as source
|
||||
mut git_url := p.get_default('git_url', '')!
|
||||
mut git_pull := p.get_default_false('git_pull')
|
||||
if git_url != '' {
|
||||
// Clone or get the repository using gittools
|
||||
path = gittools.path(
|
||||
git_pull: git_pull
|
||||
git_url: git_url
|
||||
)!.path
|
||||
}
|
||||
if path == '' {
|
||||
return error('Either "path" or "git_url" must be provided for atlas.scan action.')
|
||||
}
|
||||
atlas_instance.scan(path: path, ignore: ignore)!
|
||||
action.done = true
|
||||
atlas_set(atlas_instance)
|
||||
|
||||
set(atlas_instance)
|
||||
}
|
||||
|
||||
mut atlas_instance_post := atlases[name] or {
|
||||
return error("Atlas '${name}' not found. Use !!atlas.scan first.")
|
||||
}
|
||||
|
||||
|
||||
atlas_instance_post.init_post()!
|
||||
|
||||
println(atlas_instance_post)
|
||||
|
||||
// Process export actions - export collections to destination
|
||||
mut export_actions := plbook.find(filter: 'atlas.export')!
|
||||
|
||||
// Process explicit export actions
|
||||
for mut action in export_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
name = p.get_default('name', 'main')!
|
||||
destination := p.get('destination')!
|
||||
reset := p.get_default_true('reset')
|
||||
include := p.get_default_true('include')
|
||||
redis := p.get_default_true('redis')
|
||||
|
||||
mut atlas_instance := atlases[name] or {
|
||||
return error("Atlas '${name}' not found. Use !!atlas.scan or !!atlas.load first.")
|
||||
return error("Atlas '${name}' not found. Use !!atlas.scan first.")
|
||||
}
|
||||
|
||||
atlas_instance.export(
|
||||
|
||||
4
lib/data/atlas/process.md
Normal file
4
lib/data/atlas/process.md
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
|
||||
- first find all pages
|
||||
- then for each page find all links
|
||||
@@ -210,6 +210,48 @@ content := page.content(include: true)!
|
||||
content := page.content()!
|
||||
```
|
||||
|
||||
|
||||
## Git Integration
|
||||
|
||||
Atlas automatically detects the git repository URL for each collection and stores it for reference. This allows users to easily navigate to the source for editing.
|
||||
|
||||
### Automatic Detection
|
||||
|
||||
When scanning collections, Atlas walks up the directory tree to find the `.git` directory and captures:
|
||||
- **git_url**: The remote origin URL
|
||||
- **git_branch**: The current branch
|
||||
|
||||
### Scanning from Git URL
|
||||
|
||||
You can scan collections directly from a git repository:
|
||||
|
||||
```heroscript
|
||||
!!atlas.scan
|
||||
name: 'my_docs'
|
||||
git_url: 'https://github.com/myorg/docs.git'
|
||||
git_root: '~/code' // optional, defaults to ~/code
|
||||
```
|
||||
|
||||
The repository will be automatically cloned if it doesn't exist locally.
|
||||
|
||||
### Accessing Edit URLs
|
||||
|
||||
```v
|
||||
mut page := atlas.page_get('guides:intro')!
|
||||
edit_url := page.get_edit_url()!
|
||||
println('Edit at: ${edit_url}')
|
||||
// Output: Edit at: https://github.com/myorg/docs/edit/main/guides.md
|
||||
```
|
||||
|
||||
### Export with Source Information
|
||||
|
||||
When exporting, the git URL is displayed:
|
||||
|
||||
```
|
||||
Collection guides source: https://github.com/myorg/docs.git (branch: main)
|
||||
```
|
||||
|
||||
This allows published documentation to link back to the source repository for contributions.
|
||||
## Links
|
||||
|
||||
Atlas supports standard Markdown links with several formats for referencing pages within collections.
|
||||
@@ -374,266 +416,59 @@ println('Logo image: ${img_path}') // Output: img/logo.png
|
||||
```
|
||||
|
||||
|
||||
## Atlas Save/Load Functionality
|
||||
|
||||
This document describes the save/load functionality for Atlas collections, which allows you to persist collection metadata to JSON files and load them in both V and Python.
|
||||
|
||||
## Overview
|
||||
|
||||
The Atlas module now supports:
|
||||
- **Saving collections** to `.collection.json` files
|
||||
- **Loading collections** from `.collection.json` files in V
|
||||
- **Loading collections** from `.collection.json` files in Python
|
||||
## Saving Collections (Beta)
|
||||
|
||||
This enables:
|
||||
1. Persistence of collection metadata (pages, images, files, errors)
|
||||
2. Cross-language access to Atlas data
|
||||
3. Faster loading without re-scanning directories
|
||||
**Status:** Basic save functionality is implemented. Load functionality is work-in-progress.
|
||||
|
||||
## V Implementation
|
||||
### Saving to JSON
|
||||
|
||||
### Saving Collections
|
||||
Save collection metadata to JSON files for archival or cross-tool compatibility:
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
// Or save a single collection
|
||||
col := a.get_collection('guides')!
|
||||
col.save()!
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all collections from directory tree
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
// Save all collections to a specified directory
|
||||
// Creates: ${save_path}/${collection_name}.json
|
||||
a.save('./metadata')!
|
||||
```
|
||||
|
||||
### What Gets Saved
|
||||
|
||||
The `.collection.json` file contains:
|
||||
- Collection name and path
|
||||
- All pages (name, path, collection_name)
|
||||
- All images (name, ext, path, ftype)
|
||||
- All files (name, ext, path, ftype)
|
||||
Each `.json` file contains:
|
||||
- Collection metadata (name, path, git URL, git branch)
|
||||
- All pages (with paths and collection references)
|
||||
- All images and files (with paths and types)
|
||||
- All errors (category, page_key, message, file)
|
||||
|
||||
**Note:** Circular references (`atlas` and `collection` pointers) are automatically skipped using the `[skip]` attribute and reconstructed during load.
|
||||
|
||||
## Python Implementation
|
||||
|
||||
### Installation
|
||||
|
||||
The Python loader is a standalone script with no external dependencies (uses only Python stdlib):
|
||||
|
||||
```bash
|
||||
# No installation needed - just use the script
|
||||
python3 lib/data/atlas/atlas_loader.py
|
||||
```
|
||||
|
||||
### Loading Collections
|
||||
|
||||
```python
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections from directory tree
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access collections
|
||||
col = atlas.get_collection('guides')
|
||||
print(f"Pages: {len(col.pages)}")
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check for errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
### Python API
|
||||
|
||||
#### Atlas Class
|
||||
|
||||
- `Atlas.load_collection(path, name='default')` - Load single collection
|
||||
- `Atlas.load_from_directory(path, name='default')` - Load all collections from directory tree
|
||||
- `atlas.get_collection(name)` - Get collection by name
|
||||
- `atlas.page_get(key)` - Get page using 'collection:page' format
|
||||
- `atlas.image_get(key)` - Get image using 'collection:image' format
|
||||
- `atlas.file_get(key)` - Get file using 'collection:file' format
|
||||
- `atlas.list_collections()` - List all collection names
|
||||
- `atlas.list_pages()` - List all pages grouped by collection
|
||||
- `atlas.has_errors()` - Check if any collection has errors
|
||||
- `atlas.print_all_errors()` - Print errors from all collections
|
||||
|
||||
#### Collection Class
|
||||
|
||||
- `collection.page_get(name)` - Get page by name
|
||||
- `collection.image_get(name)` - Get image by name
|
||||
- `collection.file_get(name)` - Get file by name
|
||||
- `collection.has_errors()` - Check if collection has errors
|
||||
- `collection.error_summary()` - Get error count by category
|
||||
- `collection.print_errors()` - Print all errors
|
||||
|
||||
#### Page Class
|
||||
|
||||
- `page.key()` - Get page key in format 'collection:page'
|
||||
- `page.content()` - Read page content from file
|
||||
|
||||
#### File Class
|
||||
|
||||
- `file.file_name` - Get full filename with extension
|
||||
- `file.is_image()` - Check if file is an image
|
||||
- `file.read()` - Read file content as bytes
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. V: Create and Save
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create atlas and scan
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Save all collections (creates .collection.json in each collection dir)
|
||||
a.save_all()!
|
||||
|
||||
println('Saved ${a.collections.len} collections')
|
||||
```
|
||||
|
||||
### 2. V: Load and Use
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Load single collection
|
||||
mut a := atlas.new(name: 'loaded')!
|
||||
mut col := a.load_collection('/path/to/collection')!
|
||||
|
||||
println('Pages: ${col.pages.len}')
|
||||
|
||||
// Load all from directory
|
||||
mut a2 := atlas.new(name: 'all_docs')!
|
||||
a2.load_from_directory('./docs')!
|
||||
|
||||
println('Loaded ${a2.collections.len} collections')
|
||||
```
|
||||
|
||||
### 3. Python: Load and Use
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from atlas_loader import Atlas
|
||||
|
||||
# Load single collection
|
||||
atlas = Atlas.load_collection('/path/to/collection')
|
||||
|
||||
# Or load all collections
|
||||
atlas = Atlas.load_from_directory('/path/to/docs')
|
||||
|
||||
# Access pages
|
||||
page = atlas.page_get('guides:intro')
|
||||
if page:
|
||||
content = page.content()
|
||||
print(content)
|
||||
|
||||
# Check errors
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
After saving, each collection directory will contain:
|
||||
### Storage Location
|
||||
|
||||
```
|
||||
collection_dir/
|
||||
├── .collection # Original collection config
|
||||
├── .collection.json # Saved collection metadata (NEW)
|
||||
├── page1.md
|
||||
├── page2.md
|
||||
└── img/
|
||||
└── image1.png
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Errors are preserved during save/load:
|
||||
|
||||
```v
|
||||
// V: Errors are saved
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
a.validate_links()! // May generate errors
|
||||
a.save_all()! // Errors are saved to .collection.json
|
||||
|
||||
// V: Errors are loaded
|
||||
mut a2 := atlas.new()!
|
||||
a2.load_from_directory('./docs')!
|
||||
col := a2.get_collection('guides')!
|
||||
if col.has_errors() {
|
||||
col.print_errors()
|
||||
}
|
||||
```
|
||||
|
||||
```python
|
||||
# Python: Access errors
|
||||
atlas = Atlas.load_from_directory('./docs')
|
||||
|
||||
if atlas.has_errors():
|
||||
atlas.print_all_errors()
|
||||
|
||||
# Get error summary
|
||||
col = atlas.get_collection('guides')
|
||||
if col.has_errors():
|
||||
summary = col.error_summary()
|
||||
for category, count in summary.items():
|
||||
print(f"{category}: {count}")
|
||||
save_path/
|
||||
├── collection1.json
|
||||
├── collection2.json
|
||||
└── collection3.json
|
||||
```
|
||||
|
||||
**Note:** Not in the collection directories themselves - saved to a separate location you specify.
|
||||
|
||||
### Limitations
|
||||
|
||||
- Load-from-JSON functionality is not yet implemented
|
||||
- Python loader is planned but not yet available
|
||||
- Currently, collections must be rescanned from source files
|
||||
## HeroScript Integration
|
||||
|
||||
Atlas integrates with HeroScript, allowing you to define Atlas operations in `.vsh` or playbook files.
|
||||
|
||||
### Available Actions
|
||||
|
||||
#### 1. `atlas.scan` - Scan Directory for Collections
|
||||
#### `atlas.scan` - Scan Directory for Collections
|
||||
|
||||
Scan a directory tree to find and load collections marked with `.collection` files.
|
||||
|
||||
@@ -641,163 +476,31 @@ Scan a directory tree to find and load collections marked with `.collection` fil
|
||||
!!atlas.scan
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
git_url: 'https://github.com/org/repo.git' # optional
|
||||
git_root: '~/code' # optional, default: ~/code
|
||||
meta_path: './metadata' # optional, saves metadata here
|
||||
ignore: ['private', 'draft'] # optional, directories to skip
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path to scan
|
||||
- `path` (required when git_url not provided) - Directory path to scan
|
||||
- `git_url` (alternative to path) - Git repository URL to clone/checkout
|
||||
- `git_root` (optional when using git_url, default: ~/code) - Base directory for cloning
|
||||
- `meta_path` (optional) - Directory to save collection metadata JSON
|
||||
- `ignore` (optional) - List of directory names to skip during scan
|
||||
|
||||
#### 2. `atlas.load` - Load from Saved Collections
|
||||
|
||||
Load collections from `.collection.json` files (previously saved with `atlas.save`).
|
||||
### Real Workflow Example: Scan and Export
|
||||
|
||||
```heroscript
|
||||
!!atlas.load
|
||||
name: 'main'
|
||||
path: './docs'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required) - Directory path containing `.collection.json` files
|
||||
|
||||
#### 3. `atlas.validate` - Validate All Links
|
||||
|
||||
Validate all markdown links in all collections.
|
||||
|
||||
```heroscript
|
||||
!!atlas.validate
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 4. `atlas.fix_links` - Fix All Links
|
||||
|
||||
Automatically rewrite all local links with correct relative paths.
|
||||
|
||||
```heroscript
|
||||
!!atlas.fix_links
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 5. `atlas.save` - Save Collections
|
||||
|
||||
Save all collections to `.collection.json` files in their respective directories.
|
||||
|
||||
```heroscript
|
||||
!!atlas.save
|
||||
name: 'main'
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
|
||||
#### 6. `atlas.export` - Export Collections
|
||||
|
||||
Export collections to a destination directory.
|
||||
|
||||
```heroscript
|
||||
!!atlas.export
|
||||
name: 'main'
|
||||
destination: './output'
|
||||
reset: true
|
||||
include: true
|
||||
redis: true
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `destination` (required) - Export destination path
|
||||
- `reset` (optional, default: true) - Clear destination before export
|
||||
- `include` (optional, default: true) - Process `!!include` actions
|
||||
- `redis` (optional, default: true) - Store metadata in Redis
|
||||
|
||||
### Complete Workflow Examples
|
||||
|
||||
#### Example 1: Scan, Validate, and Export
|
||||
|
||||
```heroscript
|
||||
# Scan for collections
|
||||
!!atlas.scan
|
||||
path: '~/docs/myproject'
|
||||
meta_path: '~/docs/metadata'
|
||||
|
||||
# Validate all links
|
||||
!!atlas.validate
|
||||
|
||||
# Export to output directory
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
include: true
|
||||
```
|
||||
|
||||
#### Example 2: Load, Fix Links, and Export
|
||||
|
||||
```heroscript
|
||||
# Load from saved collections
|
||||
!!atlas.load
|
||||
path: '~/docs/myproject'
|
||||
|
||||
# Fix all broken links
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export
|
||||
!!atlas.export
|
||||
destination: '~/docs/output'
|
||||
```
|
||||
|
||||
#### Example 3: Multiple Atlas Instances
|
||||
|
||||
```heroscript
|
||||
# Main documentation
|
||||
!!atlas.scan
|
||||
name: 'docs'
|
||||
path: '~/docs'
|
||||
|
||||
# API reference
|
||||
!!atlas.scan
|
||||
name: 'api'
|
||||
path: '~/api-docs'
|
||||
|
||||
# Export docs
|
||||
!!atlas.export
|
||||
name: 'docs'
|
||||
destination: '~/output/docs'
|
||||
|
||||
# Export API
|
||||
!!atlas.export
|
||||
name: 'api'
|
||||
destination: '~/output/api'
|
||||
```
|
||||
|
||||
#### Example 4: Development Workflow
|
||||
|
||||
```heroscript
|
||||
# Scan collections
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
# Validate links (errors will be reported)
|
||||
!!atlas.validate
|
||||
|
||||
# Fix links automatically
|
||||
!!atlas.fix_links
|
||||
|
||||
# Save updated collections
|
||||
!!atlas.save
|
||||
|
||||
# Export final version
|
||||
!!atlas.export
|
||||
destination: './public'
|
||||
include: true
|
||||
redis: true
|
||||
redis: false
|
||||
```
|
||||
|
||||
### Using in V Scripts
|
||||
@@ -815,8 +518,6 @@ heroscript := "
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
include: true
|
||||
@@ -840,12 +541,6 @@ Create a `docs.play` file:
|
||||
name: 'main'
|
||||
path: '~/code/docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
!!atlas.fix_links
|
||||
|
||||
!!atlas.save
|
||||
|
||||
!!atlas.export
|
||||
destination: '~/code/output'
|
||||
reset: true
|
||||
@@ -880,8 +575,6 @@ Errors are automatically collected and reported:
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
!!atlas.validate
|
||||
|
||||
# Errors will be printed during export
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
@@ -897,14 +590,23 @@ Collection guides - Errors (2)
|
||||
|
||||
### Auto-Export Behavior
|
||||
|
||||
If you use `!!atlas.scan` or `!!atlas.load` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
If you use `!!atlas.scan` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan/load actions.
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan actions.
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. **Always validate before export**: Use `!!atlas.validate` to catch broken links early
|
||||
2. **Save after fixing**: Use `!!atlas.save` after `!!atlas.fix_links` to persist changes
|
||||
3. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
4. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
5. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
2. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
3. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
4. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
## Roadmap - Not Yet Implemented
|
||||
|
||||
The following features are planned but not yet available:
|
||||
|
||||
- [ ] Load collections from `.collection.json` files
|
||||
- [ ] Python API for reading collections
|
||||
- [ ] `atlas.validate` playbook action
|
||||
- [ ] `atlas.fix_links` playbook action
|
||||
- [ ] Auto-save on collection modifications
|
||||
- [ ] Collection version control
|
||||
@@ -1,76 +0,0 @@
|
||||
module atlas
|
||||
|
||||
import json
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
// Save collection to .collection.json in the collection directory
|
||||
pub fn (c Collection) save() ! {
|
||||
// json.encode automatically skips fields marked with [skip]
|
||||
json_str := json.encode(c)
|
||||
|
||||
mut json_file := pathlib.get_file(
|
||||
path: '${c.path.path}/.collection.json'
|
||||
create: true
|
||||
)!
|
||||
|
||||
json_file.write(json_str)!
|
||||
}
|
||||
|
||||
// Save all collections in atlas to their respective directories
|
||||
pub fn (a Atlas) save() ! {
|
||||
for _, col in a.collections {
|
||||
col.save()!
|
||||
}
|
||||
}
|
||||
|
||||
// Load collection from .collection.json file
|
||||
pub fn (mut a Atlas) load_collection(path string) !&Collection {
|
||||
mut json_file := pathlib.get_file(path: '${path}/.collection.json')!
|
||||
json_str := json_file.read()!
|
||||
|
||||
mut col := json.decode(Collection, json_str)!
|
||||
|
||||
// Fix circular references that were skipped during encode
|
||||
col.atlas = &a
|
||||
|
||||
// Rebuild error cache from errors
|
||||
col.error_cache = map[string]bool{}
|
||||
for err in col.errors {
|
||||
col.error_cache[err.hash()] = true
|
||||
}
|
||||
|
||||
// Fix page references to collection
|
||||
for name, mut page in col.pages {
|
||||
page.collection = &col
|
||||
col.pages[name] = page
|
||||
}
|
||||
|
||||
a.collections[col.name] = &col
|
||||
return &col
|
||||
}
|
||||
|
||||
// Load all collections from a directory tree
|
||||
pub fn (mut a Atlas) load_from_directory(path string) ! {
|
||||
mut dir := pathlib.get_dir(path: path)!
|
||||
a.scan_and_load(mut dir)!
|
||||
}
|
||||
|
||||
// Scan directory for .collection.json files and load them
|
||||
fn (mut a Atlas) scan_and_load(mut dir pathlib.Path) ! {
|
||||
// Check if this directory has .collection.json
|
||||
if dir.file_exists('.collection.json') {
|
||||
a.load_collection(dir.path)!
|
||||
return
|
||||
}
|
||||
|
||||
// Scan subdirectories
|
||||
mut entries := dir.list(recursive: false)!
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || should_skip_dir(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
mut mutable_entry := entry
|
||||
a.scan_and_load(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.paramsparser
|
||||
import incubaid.herolib.core.texttools
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct ScanArgs {
|
||||
pub mut:
|
||||
path string @[required]
|
||||
save bool = true // save atlas after scan
|
||||
}
|
||||
|
||||
// Scan a directory for collections
|
||||
fn (mut a Atlas) scan_directory(mut dir pathlib.Path) ! {
|
||||
if !dir.is_dir() {
|
||||
return error('Path is not a directory: ${dir.path}')
|
||||
}
|
||||
|
||||
// Check if this directory is a collection
|
||||
if is_collection_dir(dir) {
|
||||
collection_name := get_collection_name(mut dir)!
|
||||
a.add_collection(path: dir.path, name: collection_name)!
|
||||
return
|
||||
}
|
||||
|
||||
// Scan subdirectories
|
||||
mut entries := dir.list(recursive: false)!
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || should_skip_dir(entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
mut mutable_entry := entry
|
||||
a.scan_directory(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
|
||||
// Check if directory is a collection
|
||||
fn is_collection_dir(path pathlib.Path) bool {
|
||||
return path.file_exists('.collection')
|
||||
}
|
||||
|
||||
// Get collection name from .collection file
|
||||
fn get_collection_name(mut path pathlib.Path) !string {
|
||||
mut collection_name := path.name()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
|
||||
content := filepath.read()!
|
||||
if content.trim_space() != '' {
|
||||
mut params := paramsparser.parse(content)!
|
||||
if params.exists('name') {
|
||||
collection_name = params.get('name')!
|
||||
}
|
||||
}
|
||||
|
||||
return texttools.name_fix(collection_name)
|
||||
}
|
||||
|
||||
// Check if directory should be skipped
|
||||
fn should_skip_dir(entry pathlib.Path) bool {
|
||||
name := entry.name()
|
||||
return name.starts_with('.') || name.starts_with('_')
|
||||
}
|
||||
|
||||
// Scan collection directory for files
|
||||
fn (mut c Collection) scan() ! {
|
||||
c.scan_path(mut c.path)!
|
||||
}
|
||||
|
||||
fn (mut c Collection) scan_path(mut dir pathlib.Path) ! {
|
||||
mut entries := dir.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
// Skip hidden files/dirs
|
||||
if entry.name().starts_with('.') || entry.name().starts_with('_') {
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
// Recursively scan subdirectories
|
||||
mut mutable_entry := entry
|
||||
c.scan_path(mut mutable_entry)!
|
||||
continue
|
||||
}
|
||||
|
||||
// Process files based on extension
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
mut mutable_entry := entry
|
||||
c.add_page(mut mutable_entry)!
|
||||
}
|
||||
'png', 'jpg', 'jpeg', 'gif', 'svg' {
|
||||
mut mutable_entry := entry
|
||||
c.add_image(mut mutable_entry)!
|
||||
}
|
||||
else {
|
||||
mut mutable_entry := entry
|
||||
c.add_file(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -78,11 +78,12 @@ pub fn parse_doc(mut doc elements.Doc) ! {
|
||||
}
|
||||
|
||||
if mut llast is elements.Frontmatter || mut llast is elements.Frontmatter2 {
|
||||
if trimmed_line == '---' || trimmed_line == '+++' {
|
||||
parser.next_start_lf()!
|
||||
parser.frontmatter = true
|
||||
continue
|
||||
}
|
||||
// DON'T PROCESS FRONTMATTER, it has issues with --- which is added by AI often
|
||||
// if trimmed_line == '---' || trimmed_line == '+++' {
|
||||
// parser.next_start_lf()!
|
||||
// parser.frontmatter = true
|
||||
// continue
|
||||
// }
|
||||
llast.content += '${line}\n'
|
||||
parser.next()
|
||||
continue
|
||||
@@ -110,17 +111,18 @@ pub fn parse_doc(mut doc elements.Doc) ! {
|
||||
continue
|
||||
}
|
||||
|
||||
if line.starts_with('+++') && parser.frontmatter == false {
|
||||
mut e := doc.frontmatter_new(mut &doc, '')
|
||||
parser.next()
|
||||
continue
|
||||
}
|
||||
// DON\T PROCESS FRONTMATTER, it has issues with --- which is added by AI often
|
||||
// if line.starts_with('+++') && parser.frontmatter == false {
|
||||
// mut e := doc.frontmatter_new(mut &doc, '')
|
||||
// parser.next()
|
||||
// continue
|
||||
// }
|
||||
|
||||
if line.starts_with('---') && parser.frontmatter == false {
|
||||
mut e := doc.frontmatter2_new(mut &doc, '')
|
||||
parser.next()
|
||||
continue
|
||||
}
|
||||
// if line.starts_with('---') && parser.frontmatter == false {
|
||||
// mut e := doc.frontmatter2_new(mut &doc, '')
|
||||
// parser.next()
|
||||
// continue
|
||||
// }
|
||||
|
||||
// process headers (# is 35)
|
||||
if line.len > 0 && line[0] == 35 {
|
||||
|
||||
@@ -12,7 +12,8 @@ fn (mut p Parser) parse_paragraph() ?&MarkdownElement {
|
||||
|
||||
// Read the first line
|
||||
for p.pos < p.text.len && p.text[p.pos] != `\n` {
|
||||
content += p.text[p.pos].ascii_str()
|
||||
mut currentchar := p.text[p.pos]
|
||||
content += currentchar.ascii_str()
|
||||
p.pos++
|
||||
p.column++
|
||||
}
|
||||
@@ -36,9 +37,10 @@ fn (mut p Parser) parse_paragraph() ?&MarkdownElement {
|
||||
}
|
||||
|
||||
// Check if the line starts with a block element
|
||||
if p.text[p.pos] == `#` || p.text[p.pos] == `>`
|
||||
|| (p.text[p.pos] == `-` && p.peek(1) == `-` && p.peek(2) == `-`)
|
||||
|| (p.text[p.pos] == `\`` && p.peek(1) == `\`` && p.peek(2) == `\``)
|
||||
mut currentchar := p.text[p.pos]
|
||||
if currentchar == `#` || currentchar == `>`
|
||||
|| (currentchar == `-` && p.peek(1) == `-` && p.peek(2) == `-`)
|
||||
|| (currentchar == `\`` && p.peek(1) == `\`` && p.peek(2) == `\``)
|
||||
|| p.is_list_start() || p.is_table_start() || p.is_footnote_definition() {
|
||||
break
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user