This commit is contained in:
2025-10-26 18:14:32 +04:00
parent 79f2752b30
commit b85ac9adc9
14 changed files with 598 additions and 855 deletions

View File

@@ -6,8 +6,4 @@
!!atlas.scan
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/authentic_web'
!!atlas.export
destination: '/tmp/atlas_export_test'
destination_meta: '/tmp/atlas_export_meta'
include: true
redis: true
!!atlas.export destination: '/tmp/atlas_export'

View File

@@ -3,10 +3,7 @@ module atlas
import incubaid.herolib.core.texttools
import incubaid.herolib.core.pathlib
import incubaid.herolib.ui.console
__global (
atlases shared map[string]&Atlas
)
import incubaid.herolib.data.paramsparser
@[heap]
pub struct Atlas {
@@ -16,92 +13,36 @@ pub mut:
groups map[string]&Group // name -> Group mapping
}
@[params]
pub struct AtlasNewArgs {
pub mut:
name string = 'default'
}
// Create a new Atlas
pub fn new(args AtlasNewArgs) !&Atlas {
mut name := texttools.name_fix(args.name)
mut a := Atlas{
name: name
}
atlas_set(a)
return &a
}
// Get Atlas from global map
pub fn atlas_get(name string) !&Atlas {
rlock atlases {
if name in atlases {
return atlases[name] or { return error('Atlas ${name} not found') }
// Create a new collection
fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
mut name := path.name_fix_no_ext()
mut filepath := path.file_get('.collection')!
content := filepath.read()!
if content.trim_space() != '' {
mut params := paramsparser.parse(content)!
if params.exists('name') {
name = params.get('name')!
}
}
return error("Atlas '${name}' not found")
}
name = texttools.name_fix(name)
console.print_item("Adding collection '${name}' to Atlas '${self.name}' at path '${path.path}'")
// Check if Atlas exists
pub fn atlas_exists(name string) bool {
rlock atlases {
return name in atlases
if name in self.collections {
return error('Collection ${name} already exists in Atlas ${self.name}')
}
}
// List all Atlas names
pub fn atlas_list() []string {
rlock atlases {
return atlases.keys()
mut c := Collection{
name: name
path: path.path // absolute path
atlas: &self // Set atlas reference
error_cache: map[string]bool{}
}
}
// Store Atlas in global map
fn atlas_set(atlas Atlas) {
lock atlases {
atlases[atlas.name] = &atlas
}
}
c.init()!
@[params]
pub struct AddCollectionArgs {
pub mut:
name string @[required]
path string @[required]
}
self.collections[name] = &c
// Add a collection to the Atlas
pub fn (mut a Atlas) add_collection(args AddCollectionArgs) !&Collection {
name := texttools.name_fix(args.name)
console.print_item('Known collections: ${a.collections.keys()}')
console.print_item("Adding collection '${name}' to Atlas '${a.name}' at path '${args.path}'")
if name in a.collections {
return error('Collection ${name} already exists in Atlas ${a.name}')
}
mut col := a.new_collection(name: name, path: args.path)!
col.scan()!
a.collections[name] = &col
return &col
}
// Scan a path for collections
@[params]
pub struct ScanArgs {
pub mut:
path string @[required]
meta_path string // where collection json files will be stored
ignore []string // list of directory names to ignore
}
pub fn (mut a Atlas) scan(args ScanArgs) ! {
mut path := pathlib.get_dir(path: args.path)!
a.scan_directory(mut path, args.ignore)!
a.validate_links()!
a.fix_links()!
return c
}
// Get a collection by name
@@ -156,3 +97,64 @@ pub fn (a Atlas) groups_get(session Session) []&Group {
return matching
}
pub fn (mut a Atlas) validate() ! {
a.validate_links()!
a.fix_links()!
}
//////////////////SCAN
// Scan a path for collections
@[params]
pub struct ScanArgs {
pub mut:
path string @[required]
ignore []string // list of directory names to ignore
}
fn (mut a Atlas) scan(args ScanArgs) ! {
mut path := pathlib.get_dir(path: args.path)!
mut ignore := args.ignore.clone()
ignore = ignore.map(it.to_lower())
a.scan_(mut path, ignore)!
}
// Scan a directory for collections
fn (mut a Atlas) scan_(mut dir pathlib.Path, ignore_ []string) ! {
console.print_item('Scanning directory: ${dir.path}')
if !dir.is_dir() {
return error('Path is not a directory: ${dir.path}')
}
// Check if this directory is a collection
if dir.file_exists('.collection') {
collname := dir.name_fix_no_ext()
if collname.to_lower() in ignore_ {
return
}
mut col := a.add_collection(mut dir)!
if collname == 'groups' {
col.scan_groups()!
}
return
}
// Scan subdirectories
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
if !entry.is_dir() || should_skip_dir(entry) {
continue
}
mut mutable_entry := entry
a.scan_(mut mutable_entry, ignore_)!
}
}
// Check if directory should be skipped
fn should_skip_dir(entry pathlib.Path) bool {
name := entry.name()
return name.starts_with('.') || name.starts_with('_')
}

View File

@@ -2,7 +2,7 @@ module atlas
import incubaid.herolib.core.pathlib
import incubaid.herolib.core.texttools
import incubaid.herolib.core.base
import incubaid.herolib.develop.gittools
import incubaid.herolib.data.paramsparser { Params }
import incubaid.herolib.ui.console
import os
@@ -17,83 +17,70 @@ pub mut:
@[heap]
pub struct Collection {
pub mut:
name string @[required]
path pathlib.Path @[required]
pages map[string]&Page
images map[string]&File
files map[string]&File
atlas &Atlas @[skip; str: skip]
errors []CollectionError
error_cache map[string]bool
git_url string // NEW: URL to the git repository for editing
git_branch string // NEW: Git branch for this collection
git_edit_url string @[skip]
acl_read []string // Group names allowed to read (lowercase)
acl_write []string // Group names allowed to write (lowercase)
name string
path string // absolute path
pages map[string]&Page
files map[string]&File
atlas &Atlas @[skip; str: skip]
errors []CollectionError
error_cache map[string]bool
git_url string
acl_read []string // Group names allowed to read (lowercase)
acl_write []string // Group names allowed to write (lowercase)
}
@[params]
pub struct CollectionNewArgs {
pub mut:
name string @[required]
path string @[required]
// Read content without processing includes
pub fn (mut c Collection) path() !pathlib.Path {
return pathlib.get_dir(path: c.path, create: false)!
}
// Create a new collection
fn (mut self Atlas) new_collection(args CollectionNewArgs) !Collection {
mut name := texttools.name_fix(args.name)
mut path := pathlib.get_dir(path: args.path)!
mut col := Collection{
name: name
path: path
atlas: &self // Set atlas reference
error_cache: map[string]bool{}
}
return col
fn (mut c Collection) init() ! {
mut p := mut c.path()!
c.scan(mut p)!
c.scan_acl()!
}
////////////////////////////////////////////////////////////////////////////////////////////////////////
// Add a page to the collection
fn (mut c Collection) add_page(mut p pathlib.Path) ! {
name := p.name_fix_no_ext()
fn (mut c Collection) add_page(mut path pathlib.Path) ! {
name := path.name_fix_no_ext()
if name in c.pages {
return error('Page ${name} already exists in collection ${c.name}')
}
relativepath := path.path_relative(c.path()!.path)!
p_new := new_page(
mut p_new := Page{
name: name
path: p
path: relativepath
collection_name: c.name
collection: &c
)!
}
c.pages[name] = &p_new
}
// Add an image to the collection
fn (mut c Collection) add_image(mut p pathlib.Path) ! {
name := p.name_fix_no_ext()
if name in c.images {
return error('Image ${name} already exists in collection ${c.name}')
}
mut img := new_file(path: p)!
c.images[name] = &img
}
// Add a file to the collection
fn (mut c Collection) add_file(mut p pathlib.Path) ! {
name := p.name_fix_no_ext()
if name in c.files {
return error('File ${name} already exists in collection ${c.name}')
return error('Page ${name} already exists in collection ${c.name}')
}
relativepath := p.path_relative(c.path()!.path)!
mut file_new := File{
name: name
ext: p.extension_lower()
path: relativepath // relative path of file in the collection
collection: &c
}
mut file := new_file(path: p)!
c.files[name] = &file
if p.is_image() {
file_new.ftype = .image
} else {
file_new.ftype = .file
}
c.files[name] = &file_new
}
// Get a page by name
@@ -106,18 +93,26 @@ pub fn (c Collection) page_get(name string) !&Page {
// Get an image by name
pub fn (c Collection) image_get(name string) !&File {
return c.images[name] or { return FileNotFound{
mut img := c.files[name] or { return FileNotFound{
collection: c.name
file: name
} }
if img.ftype != .image {
return error('File `${name}` in collection ${c.name} is not an image')
}
return img
}
// Get a file by name
pub fn (c Collection) file_get(name string) !&File {
return c.files[name] or { return FileNotFound{
mut f := c.files[name] or { return FileNotFound{
collection: c.name
file: name
} }
if f.ftype != .file {
return error('File `${name}` in collection ${c.name} is not a file')
}
return f
}
// Check if page exists
@@ -127,101 +122,14 @@ pub fn (c Collection) page_exists(name string) bool {
// Check if image exists
pub fn (c Collection) image_exists(name string) bool {
return name in c.images
f := c.files[name] or { return false }
return f.ftype == .image
}
// Check if file exists
pub fn (c Collection) file_exists(name string) bool {
return name in c.files
}
@[params]
pub struct CollectionExportArgs {
pub mut:
destination pathlib.Path @[required]
reset bool = true
include bool = true // process includes during export
redis bool = true
}
// Export a single collection
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
// Create collection directory
mut col_dir := pathlib.get_dir(
path: '${args.destination.path}/${c.name}'
create: true
)!
if args.reset {
col_dir.empty()!
}
// Write .collection file
mut cfile := pathlib.get_file(path: '${col_dir.path}/.collection', create: true)!
cfile.write("name:${c.name} src:'${c.path.path}'")!
// Export pages with cross-collection link handling
for _, mut page in c.pages {
content := page.content(include: args.include)!
// NEW: Process cross-collection links
processed_content := process_cross_collection_links(content, c, mut col_dir, c.atlas)!
mut dest_file := pathlib.get_file(path: '${col_dir.path}/${page.name}.md', create: true)!
dest_file.write(processed_content)!
// Redis operations...
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', page.name, '${page.name}.md')!
}
}
// Export images
if c.images.len > 0 {
img_dir := pathlib.get_dir(
path: '${col_dir.path}/img'
create: true
)!
for _, mut img in c.images {
dest_path := '${img_dir.path}/${img.file_name()}'
img.path.copy(dest: dest_path)!
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', img.file_name(), 'img/${img.file_name()}')!
}
}
}
// Export files
if c.files.len > 0 {
files_dir := pathlib.get_dir(
path: '${col_dir.path}/files'
create: true
)!
for _, mut file in c.files {
dest_path := '${files_dir.path}/${file.file_name()}'
file.path.copy(dest: dest_path)!
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', file.file_name(), 'files/${file.file_name()}')!
}
}
}
// Store collection metadata in Redis
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:path', c.name, col_dir.path)!
}
f := c.files[name] or { return false }
return f.ftype == .file
}
@[params]
@@ -311,7 +219,8 @@ pub fn (c Collection) print_errors() {
// Validate all links in collection
pub fn (mut c Collection) validate_links() ! {
for _, mut page in c.pages {
page.validate_links()!
content := page.content(include: true)!
page.find_links(content)! // will walk over links see if errors and add errors
}
}
@@ -319,14 +228,15 @@ pub fn (mut c Collection) validate_links() ! {
pub fn (mut c Collection) fix_links() ! {
for _, mut page in c.pages {
// Read original content
content := page.read_content()!
content := page.content()!
// Fix links
fixed_content := page.fix_links(content)!
fixed_content := page.content_with_fixed_links()!
// Write back if changed
if fixed_content != content {
page.path.write(fixed_content)!
mut p := page.path()!
p.write(fixed_content)!
}
}
}
@@ -375,19 +285,96 @@ pub fn (c Collection) can_write(session Session) bool {
return false
}
// Detect git repository URL for a collection
fn (mut c Collection) init_git_info() ! {
mut current_path := c.path()!
// Walk up directory tree to find .git
mut git_repo := current_path.parent_find('.git') or {
// No git repo found
return
}
if git_repo.path == '' {
panic('Unexpected empty git repo path')
}
mut gs := gittools.new()!
mut p := c.path()!
mut location := gs.gitlocation_from_path(p.path)!
c.git_url = location.web_url()!
}
////////////SCANNING FUNCTIONS ?//////////////////////////////////////////////////////
fn (mut c Collection) scan(mut dir pathlib.Path) ! {
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
// Skip hidden files/dirs
if entry.name().starts_with('.') || entry.name().starts_with('_') {
continue
}
if entry.is_dir() {
// Recursively scan subdirectories
mut mutable_entry := entry
c.scan(mut mutable_entry)!
continue
}
// Process files based on extension
match entry.extension_lower() {
'md' {
mut mutable_entry := entry
c.add_page(mut mutable_entry)!
}
else {
mut mutable_entry := entry
c.add_file(mut mutable_entry)!
}
}
}
}
// Scan for ACL files
fn (mut c Collection) scan_acl() ! {
// Look for read.acl in collection directory
read_acl_path := '${c.path()!.path}/read.acl'
if os.exists(read_acl_path) {
content := os.read_file(read_acl_path)!
// Split by newlines and normalize
c.acl_read = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
// Look for write.acl in collection directory
write_acl_path := '${c.path()!.path}/write.acl'
if os.exists(write_acl_path) {
content := os.read_file(write_acl_path)!
// Split by newlines and normalize
c.acl_write = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
}
// scan_groups scans the collection's directory for .group files and loads them into memory.
pub fn (mut c Collection) scan_groups() ! {
if c.name != 'groups' {
return error('scan_groups only works on "groups" collection')
}
mut entries := c.path.list(recursive: false)!
mut p := c.path()!
mut entries := p.list(recursive: false)!
for mut entry in entries.paths {
if entry.extension_lower() == 'group' {
filename := entry.name_fix_no_ext()
mut visited := map[string]bool{}
mut group := parse_group_file(filename, c.path.path, mut visited)!
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
c.atlas.group_add(mut group)!
}

View File

@@ -1,7 +1,8 @@
module atlas
import incubaid.herolib.core.pathlib
import incubaid.herolib.develop.gittools
import incubaid.herolib.core.base
import json
@[params]
pub struct ExportArgs {
@@ -13,43 +14,10 @@ pub mut:
redis bool = true
}
// Generate edit URL for a page in the repository
pub fn (p Page) get_edit_url() !string {
col := p.collection
if col.git_url == '' {
return error('No git URL available for collection ${col.name}')
}
mut gs := gittools.new()!
mut location := gs.gitlocation_from_url(col.git_url)!
location.branch_or_tag = col.git_branch
location.path = p.path.name()
// Determine the provider and build appropriate edit URL
provider := location.provider
mut url_base := 'https://${provider}.com/${location.account}/${location.name}'
if provider.contains('gitea') || provider.contains('git.') {
return '${url_base}/src/branch/${location.branch_or_tag}/${location.path}'
}
if provider == 'github' {
return '${url_base}/edit/${location.branch_or_tag}/${location.path}'
}
if provider == 'gitlab' {
return '${url_base}/-/edit/${location.branch_or_tag}/${location.path}'
}
// Fallback for unknown providers
return '${url_base}/edit/${location.branch_or_tag}/${location.path}'
}
// Export all collections
pub fn (mut a Atlas) export(args ExportArgs) ! {
mut dest := pathlib.get_dir(path: args.destination, create: true)!
// NEW: Save metadata if destination_meta is provided
if args.destination_meta.len > 0 {
a.save(args.destination_meta)!
}
if args.reset {
dest.empty()!
}
@@ -64,14 +32,100 @@ pub fn (mut a Atlas) export(args ExportArgs) ! {
include: args.include
redis: args.redis
)!
}
}
// Print collection info including git URL
if col.has_errors() {
col.print_errors()
@[params]
pub struct CollectionExportArgs {
pub mut:
destination pathlib.Path @[required]
reset bool = true
include bool = true // process includes during export
redis bool = true
}
// Export a single collection
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
// Create collection directory
mut col_dir := pathlib.get_dir(
path: '${args.destination.path}/content/${c.name}'
create: true
)!
mut col_dir_meta := pathlib.get_dir(
path: '${args.destination.path}/meta/${c.name}'
create: true
)!
if args.reset {
col_dir.empty()!
col_dir_meta.empty()!
}
c.init_git_info()!
if c.has_errors() {
c.print_errors()
}
for _, mut page in c.pages {
content := page.content(include: args.include)!
// NEW: Process cross-collection links
processed_content := process_cross_collection_links(content, c, mut col_dir, c.atlas)!
mut dest_file := pathlib.get_file(path: '${col_dir.path}/${page.name}.md', create: true)!
dest_file.write(processed_content)!
// Redis operations...
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', page.name, page.path)!
}
if col.git_url != '' {
println('Collection ${col.name} source: ${col.git_url} (branch: ${col.git_branch})')
meta := json.encode_pretty(page)
mut json_file := pathlib.get_file(
path: '${col_dir_meta.path}/${page.name}.json'
create: true
)!
json_file.write(meta)!
}
// Export images
if c.images.len > 0 {
img_dir := pathlib.get_dir(
path: '${col_dir.path}/img'
create: true
)!
for _, mut img in c.images {
dest_path := '${img_dir.path}/${img.file_name()}'
img.path.copy(dest: dest_path)!
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', img.file_name(), img.path.path)!
}
}
}
// Export files
if c.files.len > 0 {
files_dir := pathlib.get_dir(
path: '${col_dir.path}/files'
create: true
)!
for _, mut file in c.files {
dest_path := '${files_dir.path}/${file.file_name()}'
file.path.copy(dest: dest_path)!
if args.redis {
mut context := base.context()!
mut redis := context.redis()!
redis.hset('atlas:${c.name}', file.file_name(), file.path.path)!
}
}
}
}

59
lib/data/atlas/factory.v Normal file
View File

@@ -0,0 +1,59 @@
module atlas
import incubaid.herolib.core.texttools
import incubaid.herolib.core.pathlib
import incubaid.herolib.ui.console
import incubaid.herolib.data.paramsparser
__global (
atlases shared map[string]&Atlas
)
@[params]
pub struct AtlasNewArgs {
pub mut:
name string = 'default'
}
// Create a new Atlas
pub fn new(args AtlasNewArgs) !&Atlas {
mut name := texttools.name_fix(args.name)
mut a := Atlas{
name: name
}
set(a)
return &a
}
// Get Atlas from global map
pub fn get(name string) !&Atlas {
rlock atlases {
if name in atlases {
return atlases[name] or { return error('Atlas ${name} not found') }
}
}
return error("Atlas '${name}' not found")
}
// Check if Atlas exists
pub fn exists(name string) bool {
rlock atlases {
return name in atlases
}
}
// List all Atlas names
pub fn list() []string {
rlock atlases {
return atlases.keys()
}
}
// Store Atlas in global map
fn set(atlas Atlas) {
lock atlases {
atlases[atlas.name] = &atlas
}
}

View File

@@ -3,49 +3,29 @@ module atlas
import incubaid.herolib.core.pathlib
pub enum FileType {
file
image
file
image
}
pub struct File {
pub mut:
name string // name without extension
ext string // file extension
path pathlib.Path // full path to file
ftype FileType // file or image
name string // name without extension
ext string // file extension
path string // relative path of file in the collection
ftype FileType // file or image
collection &Collection @[skip; str: skip] // Reference to parent collection
}
@[params]
pub struct NewFileArgs {
pub:
path pathlib.Path @[required]
}
pub fn new_file(args NewFileArgs) !File {
mut f := File{
path: args.path
}
f.init()!
return f
}
fn (mut f File) init() ! {
// Determine file type
if f.path.is_image() {
f.ftype = .image
} else {
f.ftype = .file
}
// Extract name and extension
f.name = f.path.name_fix_no_ext()
f.ext = f.path.extension_lower()
// Read content without processing includes
pub fn (mut f File) path() !pathlib.Path {
panic('todo')
return pathlib.get_file(path: f.path, create: false)!
}
pub fn (f File) file_name() string {
return '${f.name}.${f.ext}'
return '${f.name}.${f.ext}'
}
pub fn (f File) is_image() bool {
return f.ftype == .image
}
return f.ftype == .image
}

View File

@@ -2,101 +2,213 @@ module atlas
import incubaid.herolib.core.texttools
import incubaid.herolib.core.pathlib
import os
// Link represents a markdown link found in content
pub struct Link {
pub mut:
text string // Link text [text]
target string // Original link target
line int // Line number
col_start int // Column start position
col_end int // Column end position
collection string // Target collection (if specified)
page string // Target page name (normalized)
is_local bool // Whether link points to local page
valid bool // Whether link target exists
src string // Source content where link was found (what to replace)
text string // Link text [text]
target string // Original link target (the source text)
line int // Line number where link was found
target_collection_name string
target_page_name string
status LinkStatus
page &Page @[skip; str: skip] // Reference to page where this link is found
}
pub enum LinkStatus {
init
external
page_found
page_not_found
anchor
error
}
fn (mut self Link) key() string {
return '${self.target_collection_name}:${self.target_page_name}'
}
// is the link in the same collection as the page containing the link
fn (mut self Link) is_local_in_collection() bool {
return self.target_collection_name == self.page.collection.name
}
// is the link pointing to an external resource e.g. http, git, mailto, ftp
pub fn (mut self Link) is_external() bool {
return self.status == .external
}
pub fn (mut self Link) target_page() !&Page {
if self.status == .external {
return error('External links do not have a target page')
}
return self.page.collection.atlas.page_get(self.key())
}
// Find all markdown links in content
pub fn find_links(content string) []Link {
fn (mut p Page) find_links(content string) ![]Link {
mut links := []Link{}
lines := content.split_into_lines()
mut lines := content.split_into_lines()
for line_idx, line in lines {
mut pos := 0
for {
// Find next [
open_bracket := line.index_after('[', pos) or { break }
// Find matching ]
close_bracket := line.index_after(']', open_bracket) or { break }
// Check for (
if close_bracket + 1 >= line.len || line[close_bracket + 1] != `(` {
pos = close_bracket + 1
continue
}
// Find matching )
open_paren := close_bracket + 1
close_paren := line.index_after(')', open_paren) or { break }
// Extract link components
text := line[open_bracket + 1..close_bracket]
target := line[open_paren + 1..close_paren]
mut link := Link{
text: text
target: target.trim_space()
line: line_idx + 1
col_start: open_bracket
col_end: close_paren + 1
src: line[open_bracket..close_paren + 1]
text: text
target: target.trim_space()
line: line_idx + 1
page: &p
}
parse_link_target(mut link)
p.parse_link_target(mut link)
links << link
pos = close_paren + 1
}
}
return links
}
// Parse link target to extract collection and page
fn parse_link_target(mut link Link) {
fn (mut p Page) parse_link_target(mut link Link) {
target := link.target
// Skip external links
if target.starts_with('http://') || target.starts_with('https://')
if target.starts_with('http://') || target.starts_with('https://')
|| target.starts_with('mailto:') || target.starts_with('ftp://') {
link.status = .external
return
}
// Skip anchors
if target.starts_with('#') {
link.status = .anchor
return
}
link.is_local = true
// Format: $collection:$pagename or $collection:$pagename.md
if target.contains(':') {
parts := target.split(':')
if parts.len >= 2 {
link.collection = texttools.name_fix(parts[0])
link.page = normalize_page_name(parts[1])
link.target_collection_name = texttools.name_fix(parts[0])
link.target_page_name = normalize_page_name(parts[1])
}
return
} else {
link.target_page_name = normalize_page_name(target).trim_space()
link.target_collection_name = p.collection.name
}
if !p.collection.atlas.page_exists(link.key()) {
p.collection.error(
category: .invalid_page_reference
page_key: p.key()
message: 'Broken link to `${link.key()}` at line ${link.line}: `${link.src}`'
show_console: false
)
link.status = .page_not_found
} else {
link.status = .page_found
}
// For all other formats, extract filename from path (ignore path components)
// Handles: $page, path/to/$page, /path/to/$page, /path/to/$page.md
filename := os.base(target)
link.page = normalize_page_name(filename)
}
////////////////FIX PAGES FOR THE LINKS///////////////////////
// Fix links in page content - rewrites links with proper relative paths
fn (mut p Page) content_with_fixed_links() !string {
mut content := p.content(include: false)!
if p.links.len == 0 {
return content
}
// Process links in reverse order to maintain positions
for mut link in p.links.reverse() {
// if page not existing no point in fixing
if link.status != .page_found {
continue
}
// if not local then no point in fixing
if !link.is_local_in_collection() {
continue
}
// Get target page
mut target_page := link.target_page()!
mut target_path := target_page.path()!
relative_path := target_path.path_relative(p.path()!.path)!
new_link := '[${link.text}](${relative_path})'
// Replace in content
content = content.replace(link.src, new_link)
}
return content
}
// process_cross_collection_links handles exporting cross-collection references
// It:
// 1. Finds all cross-collection links (collection:page format)
// 2. Copies the target page to the export directory
// 3. Renames the link to avoid conflicts (collectionname_pagename.md)
// 4. Rewrites the link in the content
fn (mut p Page) process_cross_collection_links(mut export_dir pathlib.Path) !string {
mut c := p.content(include: true)!
mut links := p.find_links(c)!
// Process links in reverse order to maintain string positions
for mut link in links.reverse() {
if link.status != .page_found {
continue
}
mut target_page := link.target_page()!
mut target_path := target_page.path()!
// Copy target page with renamed filename
exported_filename := '${target_page.collection.name}_${target_page.name}.md'
page_content := target_page.content(include: true)!
mut exported_file := pathlib.get_file(
path: '${export_dir.path}/${exported_filename}'
create: true
)!
exported_file.write(page_content)!
// Update link in source content
new_link := '[${link.text}](${exported_filename})'
c = c.replace(link.src, new_link)
panic('need to do for files too')
}
return c
}
/////////////TOOLS//////////////////////////////////
// Normalize page name (remove .md, apply name_fix)
fn normalize_page_name(name string) string {
mut clean := name
@@ -105,222 +217,3 @@ fn normalize_page_name(name string) string {
}
return texttools.name_fix(clean)
}
// Validate links in page
pub fn (mut p Page) validate_links() ! {
content := p.read_content()!
links := find_links(content)
for link in links {
if !link.is_local {
continue
}
// Determine target collection
mut target_collection := link.collection
if target_collection == '' {
target_collection = p.collection_name
}
// Check if page exists
page_key := '${target_collection}:${link.page}'
if !p.collection.atlas.page_exists(page_key) {
p.collection.error(
category: .invalid_page_reference
page_key: p.key()
message: 'Broken link to `${page_key}` at line ${link.line}: [${link.text}](${link.target})'
show_console: false
)
}
}
}
// Fix links in page content - rewrites links with proper relative paths
pub fn (mut p Page) fix_links(content string) !string {
links := find_links(content)
if links.len == 0 {
return content
}
mut result := content
// Process links in reverse order to maintain positions
for link in links.reverse() {
if !link.is_local || link.page == '' {
continue
}
// Determine target collection
mut target_collection := link.collection
if target_collection == '' {
target_collection = p.collection_name
}
// Only fix links within same collection
if target_collection != p.collection_name {
continue
}
// Get target page
page_key := '${target_collection}:${link.page}'
mut target_page := p.collection.atlas.page_get(page_key) or {
// Skip if page doesn't exist - error already reported in validate
continue
}
// Calculate relative path
relative_path := calculate_relative_path(mut p.path, mut target_page.path)
// Build replacement
old_link := '[${link.text}](${link.target})'
new_link := '[${link.text}](${relative_path})'
// Replace in content
result = result.replace(old_link, new_link)
}
return result
}
// Calculate relative path from source file to target file with .md extension
fn calculate_relative_path(mut from pathlib.Path, mut to pathlib.Path) string {
from_dir := from.path_dir()
to_dir := to.path_dir()
to_name := to.name_fix_no_ext()
// If in same directory, just return filename with .md
if from_dir == to_dir {
return '${to_name}.md'
}
// Split paths into parts
from_parts := from_dir.split(os.path_separator).filter(it != '')
to_parts := to_dir.split(os.path_separator).filter(it != '')
// Find common base
mut common_len := 0
for i := 0; i < from_parts.len && i < to_parts.len; i++ {
if from_parts[i] == to_parts[i] {
common_len = i + 1
} else {
break
}
}
// Build relative path
mut rel_parts := []string{}
// Add ../ for each directory we need to go up
up_count := from_parts.len - common_len
for _ in 0..up_count {
rel_parts << '..'
}
// Add path down to target
for i := common_len; i < to_parts.len; i++ {
rel_parts << to_parts[i]
}
// Add filename with .md extension
rel_parts << '${to_name}.md'
return rel_parts.join('/')
}
// process_cross_collection_links handles exporting cross-collection references
// It:
// 1. Finds all cross-collection links (collection:page format)
// 2. Copies the target page to the export directory
// 3. Renames the link to avoid conflicts (collectionname_pagename.md)
// 4. Rewrites the link in the content
pub fn process_cross_collection_links(
content string,
source_col Collection,
mut export_dir pathlib.Path,
atlas &Atlas
) !string {
mut result := content
links := find_links(content)
// Process links in reverse order to maintain string positions
for link in links.reverse() {
if !link.is_local || link.page == '' {
continue
}
// Determine target collection
mut target_collection := link.collection
if target_collection == '' {
target_collection = source_col.name
}
// Skip same-collection links (already handled by fix_links)
if target_collection == source_col.name {
continue
}
// Get the target page
page_key := '${target_collection}:${link.page}'
mut target_page := atlas.page_get(page_key) or {
// Link target doesn't exist, leave as-is
continue
}
// Copy target page with renamed filename
exported_filename := '${target_collection}_${target_page.name}.md'
page_content := target_page.content(include: true)!
mut exported_file := pathlib.get_file(
path: '${export_dir.path}/${exported_filename}'
create: true
)!
exported_file.write(page_content)!
// Update link in source content
old_link := '[${link.text}](${link.target})'
new_link := '[${link.text}](${exported_filename})'
result = result.replace(old_link, new_link)
}
return result
}
// process_cross_collection_images handles exporting images from other collections
// Similar to process_cross_collection_links but for images
pub fn process_cross_collection_images(
content string,
source_col Collection,
mut export_dir pathlib.Path,
atlas &Atlas
) !string {
// Extract image references: ![alt](collection:image.png)
// Copy images to img/ directory with renamed filename
// Update references in content
// Pattern: ![alt](collection:filename.ext)
// Update to: ![alt](img/collection_filename.ext)
mut result := content
// Find image markdown syntax: ![alt](path)
lines := result.split_into_lines()
mut processed_lines := []string{}
for line in lines {
mut processed_line := line
// Find image references - look for ![...](...) with cross-collection prefix
// This is a simplified approach; full regex would be better
if line.contains('![') && line.contains(']:') {
// Extract and process cross-collection image references
// For each reference like [imagename](othercol:image.png)
// Copy from othercol to img/ as othercol_image.png
// Update link to img/othercol_image.png
// TODO: Implement image extraction and copying
}
processed_lines << processed_line
}
return processed_lines.join_lines()
}

View File

@@ -7,47 +7,42 @@ import incubaid.herolib.core.texttools
pub struct Page {
pub mut:
name string
path pathlib.Path
path string // in collection
collection_name string
collection &Collection @[skip; str: skip] // Reference to parent collection
links []Link
// macros []Macro
collection &Collection @[skip; str: skip] // Reference to parent collection
}
@[params]
pub struct NewPageArgs {
pub:
name string @[required]
path pathlib.Path @[required]
collection_name string @[required]
collection &Collection @[required]
}
pub fn new_page(args NewPageArgs) !Page {
return Page{
name: args.name
path: args.path
collection_name: args.collection_name
collection: args.collection
}
name string @[required]
path string @[required]
collection_name string @[required]
collection &Collection @[required]
}
// Read content without processing includes
pub fn (mut p Page) read_content() !string {
return p.path.read()!
pub fn (mut p Page) path() !pathlib.Path {
curpath := p.collection.path()!
return pathlib.get_file(path: '${curpath.path}/${p.path}', create: false)! // should be relative to collection
}
// Read content with includes processed (default behavior)
@[params]
pub struct ReadContentArgs {
pub mut:
include bool = true
include bool
}
// Read content without processing includes
pub fn (mut p Page) content(args ReadContentArgs) !string {
mut content := p.path.read()!
mut mypath := p.path()!
mut content := mypath.read()!
if args.include {
mut v := map[string]bool{}
return p.process_includes(content, mut v)!
content = p.process_includes(content, mut v)!
}
return content
}
@@ -124,7 +119,7 @@ fn (mut p Page) process_includes(content string, mut visited map[string]bool) !s
}
// Recursively process the included page
include_content := include_page.process_includes(include_page.read_content()!, mut
include_content := include_page.process_includes(include_page.content()!, mut
visited)!
processed_lines << include_content

View File

@@ -31,11 +31,13 @@ pub fn play(mut plbook PlayBook) ! {
// NEW: Support git URL as source
mut git_url := p.get_default('git_url', '')!
mut git_pull := p.get_default_false('git_pull')
if git_url != '' {
// Clone or get the repository using gittools
mut gs := gittools.new(coderoot: p.get_default('git_root', '~/code')!)!
mut repo := gs.get_repo(url: git_url)!
path = repo.path()
path = gittools.path(
git_pull: git_pull
git_url: git_url
)!.path
}
if path == '' {
return error('Either "path" or "git_url" must be provided for atlas.scan action.')
@@ -54,7 +56,6 @@ pub fn play(mut plbook PlayBook) ! {
mut p := action.params
name := p.get_default('name', 'main')!
destination := p.get('destination')!
destination_meta := p.get_default('destination_meta', '')! // NEW
reset := p.get_default_true('reset')
include := p.get_default_true('include')
redis := p.get_default_true('redis')
@@ -64,11 +65,10 @@ pub fn play(mut plbook PlayBook) ! {
}
atlas_instance.export(
destination: destination
destination_meta: destination_meta // NEW
reset: reset
include: include
redis: redis
destination: destination
reset: reset
include: include
redis: redis
)!
action.done = true
}

View File

@@ -0,0 +1,4 @@
- first find all pages
- then for each page find all links

View File

@@ -1,85 +0,0 @@
module atlas
import json
import incubaid.herolib.core.pathlib
// Save collection to .collection.json in the collection directory
pub fn (c Collection) save(path string) ! {
// json.encode automatically skips fields marked with [skip]
json_str := c.encode_json()
mut json_file := pathlib.get_file(
path: '${path}/${c.name}.json'
create: true
)!
json_file.write(json_str)!
}
// encode_json is a custom JSON encoder for the Collection struct.
// It converts the struct to a map, adds the git_edit_url, and then encodes it to a JSON string.
pub fn (c Collection) encode_json() string {
// First, encode the struct to a JSON string, then decode it back into a map.
// This is a common way to convert a struct to a map in V when you need to add dynamic fields.
json_str := json.encode(c)
mut data := json.decode(map[string]string, json_str) or { return '{}' }
data['git_edit_url'] = json.encode(c.git_edit_url)
return json.encode_pretty(data)
}
// Save all collections in atlas to their respective directories
pub fn (a Atlas) save(path string) ! {
for _, col in a.collections {
col.save(path)!
}
}
// // Load collection from .collection.json file
// pub fn (mut a Atlas) load_meta(path string) !&Collection {
// mut json_file := pathlib.get_file(path: '${path}/.collection.json')!
// json_str := json_file.read()!
// mut col := json.decode(Collection, json_str)!
// // Fix circular references that were skipped during encode
// col.atlas = &a
// // Rebuild error cache from errors
// col.error_cache = map[string]bool{}
// for err in col.errors {
// col.error_cache[err.hash()] = true
// }
// // Fix page references to collection
// for name, mut page in col.pages {
// page.collection = &col
// col.pages[name] = page
// }
// a.collections[col.name] = &col
// return &col
// }
// Load all collections from a directory tree
pub fn (mut a Atlas) load_from_directory(path string) ! {
mut dir := pathlib.get_dir(path: path)!
a.scan_and_load(mut dir)!
}
// Scan directory for .collection.json files and load them
fn (mut a Atlas) scan_and_load(mut dir pathlib.Path) ! {
// Check if this directory has .collection.json
// if dir.file_exists('.collection.json') {
// a.load_collection(dir.path)!
// return
// }
// Scan subdirectories
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
if !entry.is_dir() || should_skip_dir(entry) {
continue
}
mut mutable_entry := entry
a.scan_and_load(mut mutable_entry)!
}
}

View File

@@ -1,164 +0,0 @@
module atlas
import incubaid.herolib.core.pathlib
import incubaid.herolib.core.texttools
import incubaid.herolib.ui.console
// import incubaid.herolib.core.base
// import incubaid.herolib.develop.gittools
import incubaid.herolib.data.paramsparser
import os
// Scan a directory for collections
fn (mut a Atlas) scan_directory(mut dir pathlib.Path, ignore_ []string) ! {
console.print_item('Scanning directory: ${dir.path}')
if !dir.is_dir() {
return error('Path is not a directory: ${dir.path}')
}
mut ignore := ignore_.clone()
ignore = ignore.map(it.to_lower())
// Check if this directory is a collection
if is_collection_dir(dir) {
collection_name := get_collection_name(mut dir)!
if collection_name.to_lower() in ignore {
return
}
mut col := a.add_collection(path: dir.path, name: collection_name)!
if collection_name == 'groups' {
col.scan_groups()!
}
return
}
// Scan subdirectories
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
if !entry.is_dir() || should_skip_dir(entry) {
continue
}
mut mutable_entry := entry
a.scan_directory(mut mutable_entry, ignore)!
}
}
// Detect git repository URL for a collection
fn (mut c Collection) detect_git_url() ! {
mut current_path := c.path
// Walk up directory tree to find .git
mut git_repo := current_path.parent_find('.git') or {
// No git repo found
return
}
if git_repo.path == '' {
return
}
// Get git origin URL
origin_url := os.execute('cd ${git_repo.path} && git config --get remote.origin.url')
if origin_url.exit_code == 0 {
c.git_url = origin_url.output.trim_space()
}
// Get current branch
branch_result := os.execute('cd ${git_repo.path} && git branch --show-current')
if branch_result.exit_code == 0 {
c.git_branch = branch_result.output.trim_space()
}
}
// Check if directory is a collection
fn is_collection_dir(path pathlib.Path) bool {
return path.file_exists('.collection')
}
// Get collection name from .collection file
fn get_collection_name(mut path pathlib.Path) !string {
mut collection_name := path.name()
mut filepath := path.file_get('.collection')!
content := filepath.read()!
if content.trim_space() != '' {
mut params := paramsparser.parse(content)!
if params.exists('name') {
collection_name = params.get('name')!
}
}
return texttools.name_fix(collection_name)
}
// Check if directory should be skipped
fn should_skip_dir(entry pathlib.Path) bool {
name := entry.name()
return name.starts_with('.') || name.starts_with('_')
}
// Scan collection directory for files
fn (mut c Collection) scan() ! {
c.scan_path(mut c.path)!
c.scan_acl()! // NEW: scan ACL files
c.detect_git_url() or {
console.print_debug('Could not detect git URL for collection ${c.name}: ${err}')
}
}
fn (mut c Collection) scan_path(mut dir pathlib.Path) ! {
mut entries := dir.list(recursive: false)!
for mut entry in entries.paths {
// Skip hidden files/dirs
if entry.name().starts_with('.') || entry.name().starts_with('_') {
continue
}
if entry.is_dir() {
// Recursively scan subdirectories
mut mutable_entry := entry
c.scan_path(mut mutable_entry)!
continue
}
// Process files based on extension
match entry.extension_lower() {
'md' {
mut mutable_entry := entry
c.add_page(mut mutable_entry)!
}
'png', 'jpg', 'jpeg', 'gif', 'svg' {
mut mutable_entry := entry
c.add_image(mut mutable_entry)!
}
else {
mut mutable_entry := entry
c.add_file(mut mutable_entry)!
}
}
}
}
// Scan for ACL files
fn (mut c Collection) scan_acl() ! {
// Look for read.acl in collection directory
read_acl_path := '${c.path.path}/read.acl'
if os.exists(read_acl_path) {
content := os.read_file(read_acl_path)!
// Split by newlines and normalize
c.acl_read = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
// Look for write.acl in collection directory
write_acl_path := '${c.path.path}/write.acl'
if os.exists(write_acl_path) {
content := os.read_file(write_acl_path)!
// Split by newlines and normalize
c.acl_write = content.split('\n')
.map(it.trim_space())
.filter(it.len > 0)
.map(it.to_lower())
}
}

View File

@@ -91,7 +91,7 @@ import incubaid.herolib.develop.gittools
// git_pull bool
// currentdir bool // can use currentdir, if true, will use current directory as base path if not giturl or path specified
mydocs_path:=gittools.path(
pull:true,
git_pull:true,
git_url:'https://git.threefold.info/tfgrid/info_docs_depin/src/branch/main/docs'
)!

View File

@@ -145,3 +145,25 @@ fn normalize_url(url string) string {
}
return url.replace(':', '/').replace('//', '/').trim('/')
}
pub fn (self GitLocation) web_url() !string {
println(self)
mut provider := self.provider
if provider == 'github' {
provider = 'github.com'
}
mut url_base := 'https://${provider}/${self.account}/${self.name}'
mut url := '${url_base}/src/branch/${self.branch_or_tag}/${self.path}'
// if provider.contains('gitea') || provider.contains('git.') {
// url = '${url_base}/src/branch/${self.branch_or_tag}/${self.path}'
// }
// if provider == 'github' {
// return '${url_base}/edit/${self.branch_or_tag}/${self.path}'
// }
// if provider == 'gitlab' {
// return '${url_base}/-/edit/${self.branch_or_tag}/${self.path}'
// }
println(url)
$dbg;
return url
}