...
This commit is contained in:
@@ -1,95 +0,0 @@
|
||||
# Doctree Module
|
||||
|
||||
The primary goal of this module is to transform structured document collections into a format suitable for various outputs. It handles the complexities of finding collections, loading their content, processing includes, definitions, and macros, and exporting the final result while managing assets like images and files.
|
||||
|
||||
## Key Concepts
|
||||
|
||||
* **Tree:** The central component (`doctree.Tree`) that holds one or more `Collection` instances. It orchestrates the scanning, processing, and exporting of all contained collections.
|
||||
* **Collection:** A directory that is marked as a collection by the presence of a `.collection` file. A collection groups related documents (pages, images, files) and can have its own configuration defined within the `.collection` file.
|
||||
* **.collection file:** A file placed in a directory to designate it as a collection. This file can optionally contain parameters (using the `paramsparser` format) such as a custom name for the collection.
|
||||
|
||||
## How it Works (Workflow)
|
||||
|
||||
The typical workflow involves creating a `Tree`, scanning for collections, and then exporting the processed content.å
|
||||
|
||||
1. **Create Tree:** Initialize a `doctree.Tree` instance using `doctree.new()`.
|
||||
2. **Scan:** Use the `tree.scan()` method, providing a path to a directory or a Git repository URL. The scanner recursively looks for directories containing a `.collection` file.
|
||||
3. **Load Content:** For each identified collection, the module loads its content, including markdown pages, images, and other files.
|
||||
4. **Process Content:** The loaded content is processed. This includes handling definitions, includes (content from other files), and macros (dynamic content generation or transformation).
|
||||
5. **Generate Output Paths:** The module determines the final paths for all processed files and assets in the destination directory.
|
||||
6. **Export:** The `tree.export()` method writes the processed content and assets to the specified destination directory, maintaining the desired structure.
|
||||
|
||||
## Usage (For Developers)
|
||||
|
||||
Here's a basic example of how to use the `doctree` module in your V project:
|
||||
|
||||
```v
|
||||
import incubaid.herolib.data.doctree
|
||||
// 1. Create a new Tree instance
|
||||
mut tree := doctree.new(name: 'my_documentation')!
|
||||
|
||||
// 2. Scan a directory containing your collections
|
||||
// Replace './docs' with the actual path to your document collections
|
||||
tree.scan(path: './docs')!
|
||||
|
||||
// use from URL
|
||||
//git_url string
|
||||
//git_reset bool
|
||||
//git_pull bool
|
||||
tree.scan(git_url: 'https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections')!
|
||||
|
||||
// 3. Export the processed content to a destination directory
|
||||
// Replace './output' with your desired output path
|
||||
// if redis then the metadata will be put in redis
|
||||
tree.export(destination: './output', redis:true)!
|
||||
|
||||
println('Documentation successfully exported to ./output')
|
||||
|
||||
```
|
||||
|
||||
## Structure of a Collection
|
||||
|
||||
A collection is a directory containing a `.collection` file. Inside a collection directory, you would typically organize your content like this:
|
||||
|
||||
```
|
||||
my_collection/
|
||||
├── .collection
|
||||
├── page1.md
|
||||
├── page2.md
|
||||
├── images/
|
||||
│ ├── image1.png
|
||||
│ └── image2.jpg
|
||||
└── files/
|
||||
├── document.pdf
|
||||
└── data.csv
|
||||
```
|
||||
|
||||
Markdown files (`.md`) are treated as pages.
|
||||
|
||||
## use Play
|
||||
|
||||
```heroscript
|
||||
|
||||
!!doctree.collection name:"my_local_docs" path:"./docs"
|
||||
|
||||
!!doctree.collection name:"tfgrid_docs"
|
||||
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections"
|
||||
git_reset: true
|
||||
git_pull: true
|
||||
|
||||
//is optional, if not specified then will be at ${os.home_dir()}/hero/var/doctree/main
|
||||
!!doctree.export name: "my_local_docs", destination: "/tmp/1" exclude_errors:0 reset:1
|
||||
```
|
||||
|
||||
## Redis Structure
|
||||
|
||||
when using the export redis:true argument, which is default
|
||||
|
||||
in redis we will find
|
||||
|
||||
```bash
|
||||
#redis hsets:
|
||||
doctree:$collectionname $pagename $rel_path_in_collection
|
||||
doctree:$collectionname $filename.$ext $rel_path_in_collection
|
||||
doctree:meta $collectionname $collectionpath_on_disk
|
||||
```
|
||||
@@ -1,48 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.core.pathlib { Path }
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
@[heap]
|
||||
pub struct Collection {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path Path @[required]
|
||||
fail_on_error bool
|
||||
heal bool = true
|
||||
pages map[string]&data.Page
|
||||
files map[string]&data.File
|
||||
images map[string]&data.File
|
||||
errors []CollectionError
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
heal bool = true // healing means we fix images, if selected will automatically load, remove stale links
|
||||
load bool = true
|
||||
fail_on_error bool
|
||||
}
|
||||
|
||||
// get a new collection
|
||||
pub fn new(args_ CollectionNewArgs) !Collection {
|
||||
mut args := args_
|
||||
args.name = texttools.name_fix(args.name)
|
||||
|
||||
mut pp := pathlib.get_dir(path: args.path)! // will raise error if path doesn't exist
|
||||
mut collection := Collection{
|
||||
name: args.name
|
||||
path: pp
|
||||
heal: args.heal
|
||||
fail_on_error: args.fail_on_error
|
||||
}
|
||||
|
||||
if args.load {
|
||||
collection.scan() or { return error('Error scanning collection ${args.name}:\n${err}') }
|
||||
}
|
||||
|
||||
return collection
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib { Path }
|
||||
|
||||
pub enum PageErrorCat {
|
||||
unknown
|
||||
file_not_found
|
||||
image_not_found
|
||||
page_not_found
|
||||
def
|
||||
}
|
||||
|
||||
pub struct PageMultiError {
|
||||
Error
|
||||
pub mut:
|
||||
errs []PageError
|
||||
}
|
||||
|
||||
pub fn (err PageMultiError) msg() string {
|
||||
return 'Failed in processing page with one or multiple errors: ${err.errs}'
|
||||
}
|
||||
|
||||
pub struct PageError {
|
||||
Error
|
||||
pub mut:
|
||||
path Path
|
||||
msg string
|
||||
cat PageErrorCat
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub enum FileStatus {
|
||||
unknown
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
pub enum FileType {
|
||||
file
|
||||
image
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct File {
|
||||
pub mut:
|
||||
collection_path pathlib.Path
|
||||
name string // received a name fix
|
||||
ext string
|
||||
path pathlib.Path
|
||||
pathrel string
|
||||
state FileStatus
|
||||
pages_linked []&Page // pointer to pages which use this file
|
||||
ftype FileType
|
||||
collection_name string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewFileArgs {
|
||||
pub:
|
||||
name string // received a name fix
|
||||
collection_path pathlib.Path
|
||||
pathrel string
|
||||
path pathlib.Path
|
||||
collection_name string @[required]
|
||||
}
|
||||
|
||||
pub fn new_file(args NewFileArgs) !File {
|
||||
mut f := File{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_path: args.collection_path
|
||||
pathrel: args.pathrel
|
||||
collection_name: args.collection_name
|
||||
}
|
||||
|
||||
f.init()!
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
pub fn (file File) file_name() string {
|
||||
return '${file.name}.${file.ext}'
|
||||
}
|
||||
|
||||
// parses file name, extension and relative path
|
||||
pub fn (mut file File) init() ! {
|
||||
if file.path.is_image() {
|
||||
file.ftype = .image
|
||||
}
|
||||
|
||||
file.name = file.path.name_fix_no_ext()
|
||||
file.ext = file.path.path.all_after_last('.').to_lower()
|
||||
|
||||
path_rel := file.path.path_relative(file.collection_path.path) or {
|
||||
return error('cannot get relative path.\n${err}')
|
||||
}
|
||||
|
||||
file.pathrel = path_rel.trim('/')
|
||||
}
|
||||
|
||||
fn (mut file File) delete() ! {
|
||||
file.path.delete()!
|
||||
}
|
||||
|
||||
// TODO: what if this is moved to another collection, or outside the scope of the tree?
|
||||
fn (mut file File) mv(dest string) ! {
|
||||
mut destination := pathlib.get_dir(path: dest)! // will fail if dir doesn't exist
|
||||
|
||||
os.mv(file.path.path, destination.path) or {
|
||||
return error('could not move ${file.path.path} to ${destination.path} .\n${err}\n${file}')
|
||||
}
|
||||
|
||||
// need to get relative path in, in relation to collection
|
||||
file.pathrel = destination.path_relative(file.collection_path.path)!
|
||||
file.path = destination
|
||||
}
|
||||
|
||||
fn (mut file File) exists() !bool {
|
||||
return file.path.exists()
|
||||
}
|
||||
|
||||
pub fn (file_ File) copy(dest string) ! {
|
||||
mut file := file_
|
||||
mut dest2 := pathlib.get(dest)
|
||||
file.path.copy(dest: dest2.path, rsync: false) or {
|
||||
return error('Could not copy file: ${file.path.path} to ${dest} .\n${err}\n${file}')
|
||||
}
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.markdown.elements { Action, Doc, Element, Frontmatter2 }
|
||||
import incubaid.herolib.data.markdown
|
||||
|
||||
pub enum PageStatus {
|
||||
unknown
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct Page {
|
||||
mut:
|
||||
doc &Doc @[str: skip]
|
||||
element_cache map[int]Element
|
||||
changed bool
|
||||
pub mut:
|
||||
name string // received a name fix
|
||||
alias string // a proper name for e.g. def
|
||||
path pathlib.Path
|
||||
collection_name string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
}
|
||||
|
||||
pub fn new_page(args NewPageArgs) !Page {
|
||||
if args.collection_name == '' {
|
||||
return error('page collection name must not be empty')
|
||||
}
|
||||
|
||||
if args.name == '' {
|
||||
return error('page name must not be empty')
|
||||
}
|
||||
mut doc := markdown.new(path: args.path.path, collection_name: args.collection_name) or {
|
||||
return error('failed to parse doc for path ${args.path.path}\n${err}')
|
||||
}
|
||||
children := doc.children_recursive()
|
||||
mut element_cache := map[int]Element{}
|
||||
for child in children {
|
||||
element_cache[child.id] = child
|
||||
}
|
||||
mut new_page := Page{
|
||||
element_cache: element_cache
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
doc: &doc
|
||||
}
|
||||
return new_page
|
||||
}
|
||||
|
||||
// return doc, reparse if needed
|
||||
pub fn (mut page Page) doc() !&Doc {
|
||||
if page.changed {
|
||||
content := page.doc.markdown()!
|
||||
page.reparse_doc(content)!
|
||||
}
|
||||
|
||||
return page.doc
|
||||
}
|
||||
|
||||
// return doc, reparse if needed
|
||||
fn (page Page) doc_immute() !&Doc {
|
||||
if page.changed {
|
||||
content := page.doc.markdown()!
|
||||
doc := markdown.new(content: content, collection_name: page.collection_name)!
|
||||
return &doc
|
||||
}
|
||||
return page.doc
|
||||
}
|
||||
|
||||
// reparse doc markdown and assign new doc to page
|
||||
fn (mut page Page) reparse_doc(content string) ! {
|
||||
doc := markdown.new(content: content, collection_name: page.collection_name)!
|
||||
page.element_cache = map[int]Element{}
|
||||
for child in doc.children_recursive() {
|
||||
page.element_cache[child.id] = child
|
||||
}
|
||||
|
||||
page.doc = &doc
|
||||
page.changed = false
|
||||
}
|
||||
|
||||
pub fn (page Page) key() string {
|
||||
return '${page.collection_name}:${page.name}'
|
||||
}
|
||||
|
||||
pub fn (page Page) get_linked_pages() ![]string {
|
||||
doc := page.doc_immute()!
|
||||
return doc.linked_pages
|
||||
}
|
||||
|
||||
pub fn (page Page) get_markdown() !string {
|
||||
mut doc := page.doc_immute()!
|
||||
mut result := ''
|
||||
for element in doc.children {
|
||||
if element is Frontmatter2 {
|
||||
continue
|
||||
}
|
||||
result += element.markdown()!
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_content(content string) ! {
|
||||
page.reparse_doc(content)!
|
||||
}
|
||||
|
||||
fn (mut page Page) get_element(element_id int) !Element {
|
||||
return page.element_cache[element_id] or {
|
||||
return error('no element found with id ${element_id}')
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this should not be allowed (giving access to modify page content to any caller)
|
||||
pub fn (mut page Page) get_all_actions() ![]&Action {
|
||||
mut actions := []&Action{}
|
||||
mut doc := page.doc()!
|
||||
for element in doc.children_recursive() {
|
||||
if element is Action {
|
||||
actions << element
|
||||
}
|
||||
}
|
||||
|
||||
return actions
|
||||
}
|
||||
|
||||
pub fn (page Page) get_include_actions() ![]Action {
|
||||
mut actions := []Action{}
|
||||
// TODO: check if below is necessary
|
||||
// mut doc := page.doc_immute()!
|
||||
for element in page.doc.children_recursive() {
|
||||
if element is Action {
|
||||
if element.action.actor == 'wiki' && element.action.name == 'include' {
|
||||
actions << *element
|
||||
}
|
||||
}
|
||||
}
|
||||
return actions
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_action_element_to_processed(element_id int) ! {
|
||||
mut element := page.element_cache[element_id] or {
|
||||
return error('page ${page.path} doc has no element with id ${element_id}')
|
||||
}
|
||||
|
||||
if mut element is Action {
|
||||
element.action_processed = true
|
||||
page.changed = true
|
||||
return
|
||||
}
|
||||
|
||||
return error('element with id ${element_id} is not an action')
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_element_content_no_reparse(element_id int, content string) ! {
|
||||
mut element := page.element_cache[element_id] or {
|
||||
return error('page ${page.path} doc has no element with id ${element_id}')
|
||||
}
|
||||
|
||||
element.content = content
|
||||
page.changed = true
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.data.markdown.elements
|
||||
|
||||
// returns !!wiki.def actions
|
||||
pub fn (mut page Page) get_def_actions() ![]elements.Action {
|
||||
mut doc := page.doc()!
|
||||
mut def_actions := doc.actionpointers(actor: 'wiki', name: 'def')
|
||||
mut ret := []elements.Action{}
|
||||
for def in def_actions {
|
||||
ret << *def
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// returns page aliases, and removes processed action's content
|
||||
pub fn (mut page Page) process_def_action(element_id int) ![]string {
|
||||
mut action_element := page.get_element(element_id)!
|
||||
|
||||
mut doc := page.doc()!
|
||||
if mut action_element is elements.Action {
|
||||
mut aliases := map[string]bool{}
|
||||
def_action := action_element.action
|
||||
page.alias = def_action.params.get_default('name', '')!
|
||||
if page.alias == '' {
|
||||
page.alias = doc.header_name()!
|
||||
}
|
||||
|
||||
action_element.action_processed = true
|
||||
action_element.content = ''
|
||||
page.changed = true
|
||||
for alias in def_action.params.get_list('alias')! {
|
||||
mut processed_alias := alias
|
||||
if processed_alias.to_lower().ends_with('.md') {
|
||||
// remove the .md at end
|
||||
processed_alias = processed_alias[0..page.collection_name.len - 3]
|
||||
}
|
||||
|
||||
processed_alias = texttools.name_fix(processed_alias).replace('_', '')
|
||||
aliases[processed_alias] = true
|
||||
}
|
||||
|
||||
return aliases.keys()
|
||||
}
|
||||
|
||||
return error('element with id ${element_id} is not an action')
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
fn test_get_def_actions() {
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := "!!wiki.def alias:'tf-dev,cloud-dev,threefold-dev' name:'about us'"
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
def_actions := page1.get_def_actions()!
|
||||
|
||||
assert def_actions.len == 1
|
||||
|
||||
action := def_actions[0].action
|
||||
assert action.params.get('name')! == 'about us'
|
||||
mut aliases := action.params.get_list('alias')!
|
||||
aliases.sort()
|
||||
assert ['cloud-dev', 'tf-dev', 'threefold-dev'] == aliases
|
||||
}
|
||||
|
||||
fn test_process_def_action() {
|
||||
// create page with def action
|
||||
// get actions
|
||||
// process def action
|
||||
// processed page should have action removed and alias set
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := "!!wiki.def alias:'tf-dev,cloud-dev,threefold-dev' name:'about us'"
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
def_actions := page1.get_def_actions()!
|
||||
|
||||
assert def_actions.len == 1
|
||||
|
||||
mut aliases := page1.process_def_action(def_actions[0].id)!
|
||||
assert page1.get_markdown()! == ''
|
||||
assert page1.alias == 'about us'
|
||||
|
||||
aliases.sort()
|
||||
assert ['clouddev', 'tfdev', 'threefolddev'] == aliases
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
module data
|
||||
|
||||
// returns all page def elements (similar to *DEF)
|
||||
pub fn (mut page Page) get_def_names() ![]string {
|
||||
mut defnames := map[string]bool{}
|
||||
mut doc := page.doc()!
|
||||
for defitem in doc.defpointers() {
|
||||
defname := defitem.nameshort
|
||||
defnames[defname] = true
|
||||
}
|
||||
|
||||
return defnames.keys()
|
||||
}
|
||||
|
||||
// removes the def content, and generates a link to the page
|
||||
pub fn (mut page Page) set_def_links(def_data map[string][]string) ! {
|
||||
mut doc := page.doc()!
|
||||
for mut defitem in doc.defpointers() {
|
||||
defname := defitem.nameshort
|
||||
|
||||
v := def_data[defname] or { continue }
|
||||
if v.len != 2 {
|
||||
return error('invalid def data length: expected 2, found ${v.len}')
|
||||
}
|
||||
|
||||
defitem.pagekey = v[0]
|
||||
defitem.pagename = v[1]
|
||||
|
||||
defitem.process_link()!
|
||||
}
|
||||
|
||||
doc.process()!
|
||||
page.changed = true
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import rand
|
||||
|
||||
fn test_process_def_pointers() {
|
||||
// create a page with def pointers to two different pages
|
||||
// set def links on page.
|
||||
// processed page should have links to the other two pages
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
alias1, alias2 := rand.string(5).to_upper(), rand.string(5).to_upper()
|
||||
page1_content := '*${alias1}\n*${alias2}'
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
|
||||
mut defs := map[string][]string{}
|
||||
defs['${alias1.to_lower()}'] = ['col2:page2', 'page2 alias']
|
||||
defs['${alias2.to_lower()}'] = ['col3:page3', 'my page3 alias']
|
||||
|
||||
page1.set_def_links(defs)!
|
||||
|
||||
assert page1.get_markdown()! == '[page2 alias](col2:page2.md)\n[my page3 alias](col3:page3.md)'
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.data.markdown.elements
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
|
||||
// Note: doc should not get reparsed after invoking this method
|
||||
pub fn (page Page) process_links(paths map[string]string) ![]string {
|
||||
mut not_found := map[string]bool{}
|
||||
mut doc := page.doc_immute()!
|
||||
for mut element in doc.children_recursive() {
|
||||
if mut element is elements.Link {
|
||||
if element.cat == .html || (element.cat == .anchor && element.url == '') {
|
||||
// is external link or same page anchor, nothing to process
|
||||
// maybe in the future check if exists
|
||||
continue
|
||||
}
|
||||
mut name := texttools.name_fix_keepext(element.filename)
|
||||
mut site := texttools.name_fix(element.site)
|
||||
if site == '' {
|
||||
site = page.collection_name
|
||||
}
|
||||
pointerstr := '${site}:${name}'
|
||||
|
||||
ptr := pointer.pointer_new(text: pointerstr, collection: page.collection_name)!
|
||||
mut path := paths[ptr.str()] or {
|
||||
not_found[ptr.str()] = true
|
||||
continue
|
||||
}
|
||||
|
||||
if ptr.cat == .page && ptr.str() !in doc.linked_pages {
|
||||
doc.linked_pages << ptr.str()
|
||||
}
|
||||
|
||||
if ptr.collection == page.collection_name {
|
||||
// same directory
|
||||
path = './' + path.all_after_first('/')
|
||||
} else {
|
||||
path = '../${path}'
|
||||
}
|
||||
|
||||
if ptr.cat == .image && element.extra.trim_space() != '' {
|
||||
path += ' ${element.extra.trim_space()}'
|
||||
}
|
||||
|
||||
mut out := '[${element.description}](${path})'
|
||||
if ptr.cat == .image {
|
||||
out = '!${out}'
|
||||
}
|
||||
|
||||
element.content = out
|
||||
element.processed = false
|
||||
element.state = .linkprocessed
|
||||
element.process()!
|
||||
}
|
||||
}
|
||||
|
||||
return not_found.keys()
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
fn test_process_link() {
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := '[some page description](col1:page1.md)\n'
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
|
||||
paths := {
|
||||
'col1:page1.md': 'col1/page1.md'
|
||||
'col2:img.png': 'col2/img/img.png'
|
||||
}
|
||||
|
||||
notfound := page1.process_links(paths)!
|
||||
assert notfound.len == 0
|
||||
|
||||
assert page1.get_markdown()! == '[some page description](./page1.md)\n'
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
module data
|
||||
|
||||
import incubaid.herolib.core.playmacros
|
||||
import incubaid.herolib.data.markdown.elements { Action }
|
||||
|
||||
pub fn (mut page Page) process_macros() ! {
|
||||
mut mydoc := page.doc()!
|
||||
for mut element in mydoc.children_recursive() {
|
||||
if mut element is Action {
|
||||
if element.action.actiontype == .macro {
|
||||
content := playmacros.play_macro(element.action)!
|
||||
page.changed = true
|
||||
if content.len > 0 {
|
||||
element.content = content
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if page.changed {
|
||||
page.reparse_doc(page.doc.markdown()!)!
|
||||
page.process_macros()!
|
||||
}
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.core.pathlib { Path }
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
pub enum CollectionErrorCat {
|
||||
unknown
|
||||
image_double
|
||||
file_double
|
||||
file_not_found
|
||||
image_not_found
|
||||
page_double
|
||||
page_not_found
|
||||
sidebar
|
||||
circular_import
|
||||
def
|
||||
summary
|
||||
include
|
||||
}
|
||||
|
||||
pub struct CollectionError {
|
||||
Error
|
||||
pub mut:
|
||||
path Path
|
||||
msg string
|
||||
cat CollectionErrorCat
|
||||
}
|
||||
|
||||
pub fn (e CollectionError) msg() string {
|
||||
return 'collection error:\n\tPath: ${e.path.path}\n\tError message: ${e.msg}\n\tCategory: ${e.cat}'
|
||||
}
|
||||
|
||||
pub fn (mut collection Collection) error(args CollectionError) ! {
|
||||
if collection.fail_on_error {
|
||||
return args
|
||||
}
|
||||
|
||||
collection.errors << args
|
||||
console.print_stderr(args.msg)
|
||||
}
|
||||
|
||||
pub struct ObjNotFound {
|
||||
Error
|
||||
pub:
|
||||
name string
|
||||
collection string
|
||||
info string
|
||||
}
|
||||
|
||||
pub fn (err ObjNotFound) msg() string {
|
||||
return 'Could not find object with name ${err.name} in collection ${err.collection}: ${err.info}'
|
||||
}
|
||||
|
||||
// write errors.md in the collection, this allows us to see what the errors are
|
||||
pub fn (collection Collection) errors_report(col_name string, dest_ string) ! {
|
||||
// console.print_debug("====== errors report: ${dest_} : ${collection.errors.len}\n${collection.errors}")
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
mut dest := pathlib.get_file(path: dest_, create: true)!
|
||||
if collection.errors.len == 0 {
|
||||
dest.delete()!
|
||||
return
|
||||
}
|
||||
c := $tmpl('template/errors.md')
|
||||
dest.write(c)!
|
||||
redis.hset('doctree:${col_name}', 'errors', 'errors.md')!
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.core.texttools.regext
|
||||
import os
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
file_paths map[string]string
|
||||
reset bool = true
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
exclude_errors bool // wether error reporting should be exported as well
|
||||
replacer ?regext.ReplaceInstructions
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
dir_src := pathlib.get_dir(path: args.destination.path + '/' + c.name, create: true)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: dir_src.path + '/.collection', create: true)! // will auto save it
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('doctree:path', '${c.name}', dir_src.path)!
|
||||
|
||||
c.errors << export_pages(c.name, c.path.path, c.pages.values(),
|
||||
dir_src: dir_src
|
||||
file_paths: args.file_paths
|
||||
keep_structure: args.keep_structure
|
||||
replacer: args.replacer
|
||||
redis: args.redis
|
||||
)!
|
||||
|
||||
c.export_files(c.name, dir_src, args.reset)!
|
||||
c.export_images(c.name, dir_src, args.reset)!
|
||||
c.export_linked_pages(c.name, dir_src)!
|
||||
|
||||
if !args.exclude_errors {
|
||||
c.errors_report(c.name, '${dir_src.path}/errors.md')!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ExportPagesArgs {
|
||||
pub mut:
|
||||
dir_src pathlib.Path
|
||||
file_paths map[string]string
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
replacer ?regext.ReplaceInstructions
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// creates page file, processes page links, then writes page
|
||||
fn export_pages(col_name string, col_path string, pages []&data.Page, args ExportPagesArgs) ![]CollectionError {
|
||||
mut errors := []CollectionError{}
|
||||
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
|
||||
for page in pages {
|
||||
dest := if args.keep_structure {
|
||||
relpath := page.path.path.trim_string_left(col_path)
|
||||
'${args.dir_src.path}/${relpath}'
|
||||
} else {
|
||||
'${args.dir_src.path}/${page.name}.md'
|
||||
}
|
||||
|
||||
not_found := page.process_links(args.file_paths)!
|
||||
|
||||
for pointer_str in not_found {
|
||||
ptr := pointer.pointer_new(text: pointer_str)!
|
||||
cat := match ptr.cat {
|
||||
.page {
|
||||
CollectionErrorCat.page_not_found
|
||||
}
|
||||
.image {
|
||||
CollectionErrorCat.image_not_found
|
||||
}
|
||||
else {
|
||||
CollectionErrorCat.file_not_found
|
||||
}
|
||||
}
|
||||
errors << CollectionError{
|
||||
path: page.path
|
||||
msg: '${ptr.cat} ${ptr.str()} not found'
|
||||
cat: cat
|
||||
}
|
||||
}
|
||||
|
||||
mut dest_path := pathlib.get_file(path: dest, create: true)!
|
||||
mut markdown := page.get_markdown()!
|
||||
if mut replacer := args.replacer {
|
||||
markdown = replacer.replace(text: markdown)!
|
||||
}
|
||||
dest_path.write(markdown)!
|
||||
redis.hset('doctree:${col_name}', page.name, '${page.name}.md')!
|
||||
}
|
||||
return errors
|
||||
}
|
||||
|
||||
fn (c Collection) export_files(col_name string, dir_src pathlib.Path, reset bool) ! {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
for _, file in c.files {
|
||||
mut d := '${dir_src.path}/img/${file.name}.${file.ext}'
|
||||
if reset || !os.exists(d) {
|
||||
file.copy(d)!
|
||||
}
|
||||
redis.hset('doctree:${col_name}', '${file.name}.${file.ext}', 'img/${file.name}.${file.ext}')!
|
||||
}
|
||||
}
|
||||
|
||||
fn (c Collection) export_images(col_name string, dir_src pathlib.Path, reset bool) ! {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
for _, file in c.images {
|
||||
mut d := '${dir_src.path}/img/${file.name}.${file.ext}'
|
||||
redis.hset('doctree:${col_name}', '${file.name}.${file.ext}', 'img/${file.name}.${file.ext}')!
|
||||
if reset || !os.exists(d) {
|
||||
file.copy(d)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (c Collection) export_linked_pages(col_name string, dir_src pathlib.Path) ! {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
collection_linked_pages := c.get_collection_linked_pages()!
|
||||
mut linked_pages_file := pathlib.get_file(path: dir_src.path + '/.linkedpages', create: true)!
|
||||
redis.hset('doctree:${col_name}', 'linkedpages', '${linked_pages_file.name()}.md')!
|
||||
linked_pages_file.write(collection_linked_pages.join_lines())!
|
||||
}
|
||||
|
||||
fn (c Collection) get_collection_linked_pages() ![]string {
|
||||
mut linked_pages_set := map[string]bool{}
|
||||
for _, page in c.pages {
|
||||
for linked_page in page.get_linked_pages()! {
|
||||
linked_pages_set[linked_page] = true
|
||||
}
|
||||
}
|
||||
|
||||
return linked_pages_set.keys()
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/export_test'
|
||||
const tree_dir = '${test_dir}/mytree'
|
||||
const export_dir = '${test_dir}/export'
|
||||
const export_expected_dir = '${test_dir}/export_expected'
|
||||
|
||||
fn testsuite_begin() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('${tree_dir}/dir1')
|
||||
}
|
||||
col.scan()!
|
||||
|
||||
path_dest := pathlib.get_dir(path: '${export_dir}/src', create: true)!
|
||||
col.export(
|
||||
destination: path_dest
|
||||
file_paths: {
|
||||
'col2:file3.md': 'col2/file3.md'
|
||||
}
|
||||
)!
|
||||
|
||||
col1_path := '${export_dir}/src/col1'
|
||||
expected_col1_path := '${export_expected_dir}/src/col1'
|
||||
assert os.read_file('${col1_path}/.collection')! == "name:col1 src:'${tree_dir}/dir1'"
|
||||
assert os.read_file('${col1_path}/.linkedpages')! == os.read_file('${expected_col1_path}/.linkedpages')!
|
||||
assert os.read_file('${col1_path}/errors.md')! == '# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: ${tree_dir}/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
'
|
||||
assert os.read_file('${col1_path}/file1.md')! == os.read_file('${expected_col1_path}/file1.md')!
|
||||
assert os.read_file('${col1_path}/file2.md')! == os.read_file('${expected_col1_path}/file2.md')!
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
|
||||
// gets page with specified name from collection
|
||||
pub fn (collection Collection) page_get(name string) !&data.Page {
|
||||
return collection.pages[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) page_exists(name string) bool {
|
||||
return name in collection.pages
|
||||
}
|
||||
|
||||
// gets image with specified name from collection
|
||||
pub fn (collection Collection) get_image(name string) !&data.File {
|
||||
return collection.images[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) image_exists(name string) bool {
|
||||
return name in collection.images
|
||||
}
|
||||
|
||||
// gets file with specified name form collection
|
||||
pub fn (collection Collection) get_file(name string) !&data.File {
|
||||
return collection.files[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) file_exists(name string) bool {
|
||||
return name in collection.files
|
||||
}
|
||||
@@ -1,250 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.conversiontools.imagemagick
|
||||
import incubaid.herolib.core.pathlib { Path }
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
|
||||
// walk over one specific collection, find all files and pages
|
||||
pub fn (mut collection Collection) scan() ! {
|
||||
collection.scan_directory(mut collection.path)!
|
||||
}
|
||||
|
||||
// path is the full path
|
||||
fn (mut collection Collection) scan_directory(mut p Path) ! {
|
||||
mut entry_list := p.list(recursive: false)!
|
||||
for mut entry in entry_list.paths {
|
||||
if collection.should_skip_entry(mut entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !entry.exists() {
|
||||
collection.error(
|
||||
path: entry
|
||||
msg: 'Entry ${entry.name()} does not exists'
|
||||
cat: .unknown
|
||||
)!
|
||||
continue
|
||||
}
|
||||
|
||||
if mut entry.is_link() {
|
||||
link_real_path := entry.realpath() // this is with the symlink resolved
|
||||
collection_abs_path := collection.path.absolute()
|
||||
if entry.extension_lower() == 'md' {
|
||||
// means we are linking pages,this should not be done, need or change
|
||||
collection.error(
|
||||
path: entry
|
||||
msg: 'Markdown files (${entry.path}) must not be linked'
|
||||
cat: .unknown
|
||||
) or { return error('Failed to collection error ${entry.path}:\n${err}') }
|
||||
continue
|
||||
}
|
||||
|
||||
if !link_real_path.starts_with(collection_abs_path) {
|
||||
// means we are not in the collection so we need to copy
|
||||
entry.unlink()! // will transform link to become the file or dir it points too
|
||||
} else {
|
||||
// TODO: why do we need this?
|
||||
entry.relink()! // will check that the link is on the file with the shortest path
|
||||
}
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
collection.scan_directory(mut entry) or {
|
||||
return error('Failed to scan directory ${entry.path}:\n${err}')
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.extension_lower() == '' {
|
||||
continue
|
||||
}
|
||||
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
collection.add_page(mut entry) or {
|
||||
return error('Failed to add page ${entry.path}:\n${err}')
|
||||
}
|
||||
}
|
||||
else {
|
||||
collection.file_image_remember(mut entry) or {
|
||||
return error('Failed to remember image ${entry.path}:\n${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut c Collection) should_skip_entry(mut entry Path) bool {
|
||||
entry_name := entry.name()
|
||||
|
||||
// entries that start with . or _ are ignored
|
||||
if entry_name.starts_with('.') || entry_name.starts_with('_') {
|
||||
return true
|
||||
}
|
||||
|
||||
// TODO: why do we skip all these???
|
||||
|
||||
if entry.cat == .linkfile {
|
||||
// means we link to a file which is in the folder, so can be loaded later, nothing to do here
|
||||
return true
|
||||
}
|
||||
|
||||
if entry.is_dir() && entry_name.starts_with('gallery_') {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry_name.to_lower() == 'defs.md' {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry_name.contains('.test') {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry.path.starts_with('sidebar') {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// remember the file, so we know if we have duplicates
|
||||
// also fixes the name
|
||||
fn (mut collection Collection) file_image_remember(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
if ptr.is_file_video_html() {
|
||||
collection.add_file(mut p)!
|
||||
return
|
||||
}
|
||||
|
||||
if ptr.is_image() {
|
||||
if collection.heal && imagemagick.installed() {
|
||||
mut image := imagemagick.image_new(mut p)
|
||||
|
||||
imagemagick.downsize(path: p.path)!
|
||||
// after downsize it could be the path has been changed, need to set it on the file
|
||||
if p.path != image.path.path {
|
||||
p.path = image.path.path
|
||||
p.check()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: what are we trying to do?
|
||||
if !collection.image_exists(ptr.name) {
|
||||
collection.add_image(mut p)!
|
||||
}
|
||||
|
||||
mut image_file := collection.get_image(ptr.name)!
|
||||
mut image_file_path := image_file.path.path
|
||||
if p.path.len <= image_file_path.len {
|
||||
// nothing to be done, because the already existing file is shortest or equal
|
||||
return
|
||||
}
|
||||
// file double is the one who already existed, need to change the path and can delete original
|
||||
// TODO: this is clearly a bug
|
||||
image_file.path = image_file.path
|
||||
image_file.init()!
|
||||
if collection.heal {
|
||||
p.delete()!
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
return error('unsupported file type: ${ptr.extension}')
|
||||
}
|
||||
|
||||
// add a page to the collection, specify existing path
|
||||
// the page will be parsed as markdown
|
||||
pub fn (mut collection Collection) add_page(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize() or { return error('Failed to normalize path ${p.path}\n${err}') }
|
||||
}
|
||||
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
) or { return error('Failed to get pointer for ${p.name()}\n${err}') }
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.page_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a page named ${ptr.name} already exists in the collection'
|
||||
cat: .page_double
|
||||
) or { return error('Failed to report collection error for ${p.name()}\n${err}') }
|
||||
return
|
||||
}
|
||||
|
||||
new_page := data.new_page(
|
||||
name: ptr.name
|
||||
path: p
|
||||
collection_name: collection.name
|
||||
) or { return error('Failed to create new page for ${ptr.name}\n${err}') }
|
||||
|
||||
collection.pages[ptr.name] = &new_page
|
||||
}
|
||||
|
||||
// add a file to the collection, specify existing path
|
||||
pub fn (mut collection Collection) add_file(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.file_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a file named ${ptr.name} already exists in the collection'
|
||||
cat: .file_double
|
||||
)!
|
||||
return
|
||||
}
|
||||
|
||||
mut new_file := data.new_file(
|
||||
path: p
|
||||
collection_path: collection.path
|
||||
collection_name: collection.name
|
||||
)!
|
||||
collection.files[ptr.name] = &new_file
|
||||
}
|
||||
|
||||
// add a image to the collection, specify existing path
|
||||
pub fn (mut collection Collection) add_image(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.image_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a file named ${ptr.name} already exists in the collection'
|
||||
cat: .image_double
|
||||
)!
|
||||
return
|
||||
}
|
||||
|
||||
mut image_file := &data.File{
|
||||
path: p
|
||||
collection_path: collection.path
|
||||
}
|
||||
image_file.init()!
|
||||
collection.images[ptr.name] = image_file
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
module collection
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
fn test_add_page_success() {
|
||||
/*
|
||||
create collection
|
||||
add page
|
||||
check page in collection
|
||||
*/
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/page1.md', create: true)!
|
||||
col.add_page(mut page1_path)!
|
||||
assert col.page_exists('page1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/page:hamada.md', create: true)!
|
||||
col.add_page(mut page2_path)!
|
||||
assert col.page_exists('page_hamada')
|
||||
}
|
||||
|
||||
fn test_add_page_already_exists() {
|
||||
/*
|
||||
create collection
|
||||
add page with path /tmp/col1/page1.md
|
||||
add page with path /tmp/col1/dir/page1.md
|
||||
second add should fail and error reported to collection errors
|
||||
*/
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/page1.md', create: true)!
|
||||
col.add_page(mut page1_path)!
|
||||
assert col.page_exists('page1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/dir1/page1.md', create: true)!
|
||||
col.add_page(mut page2_path)!
|
||||
|
||||
assert col.errors.len == 1
|
||||
assert col.errors[0].msg == "Can't add /tmp/col1/dir1/page1.md: a page named page1 already exists in the collection"
|
||||
}
|
||||
|
||||
fn test_add_image_success() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/image.png', create: true)!
|
||||
col.add_image(mut page1_path)!
|
||||
assert col.image_exists('image')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/image:2.jpg', create: true)!
|
||||
col.add_image(mut page2_path)!
|
||||
assert col.image_exists('image_2')
|
||||
}
|
||||
|
||||
fn test_add_file_success() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/file1.html', create: true)!
|
||||
col.add_file(mut page1_path)!
|
||||
assert col.file_exists('file1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/file:2.mp4', create: true)!
|
||||
col.add_file(mut page2_path)!
|
||||
assert col.file_exists('file_2')
|
||||
}
|
||||
|
||||
fn test_file_image_remember() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut file1_path := pathlib.get_file(path: '/tmp/col1/image.png', create: true)!
|
||||
col.file_image_remember(mut file1_path)!
|
||||
assert col.image_exists('image')
|
||||
|
||||
mut file2_path := pathlib.get_file(path: '/tmp/col1/file.html', create: true)!
|
||||
col.file_image_remember(mut file2_path)!
|
||||
assert col.file_exists('file')
|
||||
|
||||
mut file3_path := pathlib.get_file(path: '/tmp/col1/file2.unknownext', create: true)!
|
||||
col.file_image_remember(mut file3_path)!
|
||||
assert col.file_exists('file2')
|
||||
}
|
||||
|
||||
fn test_scan_directory() {
|
||||
mut file := pathlib.get_file(path: '/tmp/mytree/dir1/.collection', create: true)!
|
||||
file.write('name:col1')!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file1.md', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file2.html', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file3.png', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/dir2/file4.md', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/.shouldbeskipped', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/_shouldbeskipped', create: true)!
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/mytree/dir1')
|
||||
}
|
||||
|
||||
col.scan()!
|
||||
assert col.page_exists('file1')
|
||||
assert col.file_exists('file2')
|
||||
assert col.image_exists('file3')
|
||||
assert col.page_exists('file4')
|
||||
assert !col.file_exists('.shouldbeskipped')
|
||||
assert !col.file_exists('_shouldbeskipped')
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
# Errors
|
||||
|
||||
@for error in collection.errors
|
||||
|
||||
## @error.cat
|
||||
|
||||
path: @error.path.path
|
||||
|
||||
msg: @error.msg
|
||||
|
||||
@end
|
||||
@@ -1 +0,0 @@
|
||||
export_test/export
|
||||
@@ -1 +0,0 @@
|
||||
name:col1 src:'/Users/timurgordon/code/github/incubaid/herolib/lib/data/doctree/collection/testdata/export_test/mytree/dir1'
|
||||
@@ -1 +0,0 @@
|
||||
col2:file3.md
|
||||
@@ -1,9 +0,0 @@
|
||||
# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: /Users/timurgordon/code/github/incubaid/herolib/herolib/data/doctree/collection/testdata/export_test/mytree/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
[not existent page](col3:file5.md)
|
||||
@@ -1 +0,0 @@
|
||||
[some page](../col2/file3.md)
|
||||
@@ -1 +0,0 @@
|
||||
name:col1
|
||||
@@ -1 +0,0 @@
|
||||
[not existent page](col3:file5.md)
|
||||
@@ -1 +0,0 @@
|
||||
[some page](col2:file3.md)
|
||||
@@ -1,45 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
|
||||
pub struct ObjNotFound {
|
||||
Error
|
||||
pub:
|
||||
name string
|
||||
collection string
|
||||
info string
|
||||
}
|
||||
|
||||
pub fn (err ObjNotFound) msg() string {
|
||||
return '"Could not find object with name ${err.name} in collection:${err.collection}.\n${err.info}'
|
||||
}
|
||||
|
||||
pub struct CollectionNotFound {
|
||||
Error
|
||||
pub:
|
||||
pointer pointer.Pointer
|
||||
msg string
|
||||
}
|
||||
|
||||
pub fn (err CollectionNotFound) msg() string {
|
||||
if err.msg.len > 0 {
|
||||
return err.msg
|
||||
}
|
||||
return '"Cannot find collection ${err.pointer} in tree.\n}'
|
||||
}
|
||||
|
||||
// the next is our custom error for objects not found
|
||||
pub struct NoOrTooManyObjFound {
|
||||
Error
|
||||
pub:
|
||||
tree &Tree
|
||||
pointer pointer.Pointer
|
||||
nr int
|
||||
}
|
||||
|
||||
pub fn (err NoOrTooManyObjFound) msg() string {
|
||||
if err.nr > 0 {
|
||||
return 'Too many obj found for ${err.tree.name}. Pointer: ${err.pointer}'
|
||||
}
|
||||
return 'No obj found for ${err.tree.name}. Pointer: ${err.pointer}'
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.doctree.collection { Collection }
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.texttools.regext
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct TreeExportArgs {
|
||||
pub mut:
|
||||
destination string
|
||||
reset bool = true
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
exclude_errors bool // wether error reporting should be exported as well
|
||||
toreplace string
|
||||
concurrent bool = true
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// export all collections to chosen directory .
|
||||
// all names will be in name_fixed mode .
|
||||
// all images in img/
|
||||
pub fn (mut tree Tree) export(args_ TreeExportArgs) ! {
|
||||
mut args := args_
|
||||
if args.toreplace.len > 0 {
|
||||
mut ri := regext.regex_instructions_new()
|
||||
ri.add_from_text(args.toreplace)!
|
||||
tree.replacer = ri
|
||||
}
|
||||
|
||||
if args.destination.len == 0 {
|
||||
args.destination = '${os.home_dir()}/hero/var/doctree/main'
|
||||
}
|
||||
console.print_header('export tree: name:${tree.name} to ${args.destination}')
|
||||
|
||||
mut dest_path := pathlib.get_dir(path: args.destination, create: true)!
|
||||
if args.reset {
|
||||
dest_path.empty()!
|
||||
}
|
||||
|
||||
tree.process_defs()!
|
||||
tree.process_includes()!
|
||||
tree.process_actions_and_macros()! // process other actions and macros
|
||||
|
||||
file_paths := tree.generate_paths()!
|
||||
|
||||
console.print_green('exporting collections')
|
||||
|
||||
if args.concurrent {
|
||||
mut ths := []thread !{}
|
||||
for _, mut col in tree.collections {
|
||||
ths << spawn fn (mut col Collection, dest_path pathlib.Path, file_paths map[string]string, args TreeExportArgs) ! {
|
||||
col.export(
|
||||
destination: dest_path
|
||||
file_paths: file_paths
|
||||
reset: args.reset
|
||||
keep_structure: args.keep_structure
|
||||
exclude_errors: args.exclude_errors
|
||||
redis: args.redis
|
||||
// TODO: replacer: tree.replacer
|
||||
)!
|
||||
}(mut col, dest_path, file_paths, args)
|
||||
}
|
||||
for th in ths {
|
||||
th.wait() or { panic(err) }
|
||||
}
|
||||
} else {
|
||||
for _, mut col in tree.collections {
|
||||
col.export(
|
||||
destination: dest_path
|
||||
file_paths: file_paths
|
||||
reset: args.reset
|
||||
keep_structure: args.keep_structure
|
||||
exclude_errors: args.exclude_errors
|
||||
replacer: tree.replacer
|
||||
redis: args.redis
|
||||
)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut t Tree) generate_paths() !map[string]string {
|
||||
mut paths := map[string]string{}
|
||||
for _, col in t.collections {
|
||||
for _, page in col.pages {
|
||||
paths['${col.name}:${page.name}.md'] = '${col.name}/${page.name}.md'
|
||||
}
|
||||
|
||||
for _, image in col.images {
|
||||
paths['${col.name}:${image.file_name()}'] = '${col.name}/img/${image.file_name()}'
|
||||
}
|
||||
|
||||
for _, file in col.files {
|
||||
paths['${col.name}:${file.file_name()}'] = '${col.name}/img/${file.file_name()}'
|
||||
}
|
||||
}
|
||||
|
||||
return paths
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/export_test'
|
||||
const tree_dir = '${test_dir}/mytree'
|
||||
const export_dir = '${test_dir}/export'
|
||||
const export_expected_dir = '${test_dir}/export_expected'
|
||||
|
||||
fn testsuite_begin() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
/*
|
||||
tree_root/
|
||||
dir1/
|
||||
.collection
|
||||
dir2/
|
||||
file1.md
|
||||
file2.md
|
||||
image.png
|
||||
dir3/
|
||||
.collection
|
||||
file3.md
|
||||
|
||||
export:
|
||||
export_dest/
|
||||
src/
|
||||
col1/
|
||||
.collection
|
||||
.linkedpages
|
||||
errors.md
|
||||
img/
|
||||
image.png
|
||||
file1.md
|
||||
file2.md
|
||||
col2/
|
||||
.collection
|
||||
.linkedpages
|
||||
file3.md
|
||||
|
||||
.edit/
|
||||
|
||||
test:
|
||||
- create tree
|
||||
- add files/pages and collections to tree
|
||||
- export tree
|
||||
- ensure tree structure is valid
|
||||
*/
|
||||
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${tree_dir}/dir1', name: 'col1')!
|
||||
tree.add_collection(path: '${tree_dir}/dir3', name: 'col2')!
|
||||
|
||||
tree.export(destination: '${export_dir}')!
|
||||
|
||||
col1_path := '${export_dir}/col1'
|
||||
expected_col1_path := '${export_expected_dir}/col1'
|
||||
assert os.read_file('${col1_path}/.collection')! == "name:col1 src:'${tree_dir}/dir1'"
|
||||
assert os.read_file('${col1_path}/.linkedpages')! == os.read_file('${expected_col1_path}/.linkedpages')!
|
||||
assert os.read_file('${col1_path}/errors.md')! == '# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: ${tree_dir}/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
'
|
||||
assert os.read_file('${col1_path}/file1.md')! == os.read_file('${expected_col1_path}/file1.md')!
|
||||
assert os.read_file('${col1_path}/file2.md')! == os.read_file('${expected_col1_path}/file2.md')!
|
||||
|
||||
col2_path := '${export_dir}/col2'
|
||||
expected_col2_path := '${export_expected_dir}/col2'
|
||||
assert os.read_file('${col2_path}/.linkedpages')! == ''
|
||||
assert os.read_file('${col2_path}/.collection')! == "name:col2 src:'${tree_dir}/dir3'"
|
||||
assert os.read_file('${col2_path}/file3.md')! == ''
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.data.doctree.collection
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
|
||||
pub fn (tree Tree) get_collection(name string) !&collection.Collection {
|
||||
col := tree.collections[name] or { return error('collection ${name} not found') }
|
||||
|
||||
return col
|
||||
}
|
||||
|
||||
pub fn (tree Tree) get_collection_with_pointer(p pointer.Pointer) !&collection.Collection {
|
||||
return tree.get_collection(p.collection) or {
|
||||
return CollectionNotFound{
|
||||
pointer: p
|
||||
msg: '${err}'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// get the page from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) page_get(pointerstr string) !&data.Page {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
return tree.get_page_with_pointer(p)!
|
||||
}
|
||||
|
||||
fn (tree Tree) get_page_with_pointer(p pointer.Pointer) !&data.Page {
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
new_page := col.page_get(p.name)!
|
||||
|
||||
return new_page
|
||||
}
|
||||
|
||||
// get the page from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) get_image(pointerstr string) !&data.File {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
image := col.get_image(p.name)!
|
||||
|
||||
return image
|
||||
}
|
||||
|
||||
// get the file from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) get_file(pointerstr string) !&data.File {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
new_file := col.get_file(p.name)!
|
||||
|
||||
return new_file
|
||||
}
|
||||
|
||||
pub fn (tree Tree) page_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.page_exists(p.name)
|
||||
}
|
||||
|
||||
pub fn (tree Tree) image_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.image_exists(p.name)
|
||||
}
|
||||
|
||||
pub fn (tree Tree) file_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.file_exists(p.name)
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
fn test_page_get() {
|
||||
mut file1_path := pathlib.get_file(path: '/tmp/mytree/dir1/file2.md', create: true)!
|
||||
file1_path.write('[some page](col2:file3.md)')!
|
||||
mut file2_path := pathlib.get_file(path: '/tmp/mytree/dir1/image.png', create: true)!
|
||||
mut file3_path := pathlib.get_file(path: '/tmp/mytree/dir1/dir2/file1.md', create: true)!
|
||||
file3_path.write('[not existent page](col3:file5.md)')!
|
||||
mut file4_path := pathlib.get_file(path: '/tmp/mytree/dir1/.collection', create: true)!
|
||||
file4_path.write('name:col1')!
|
||||
|
||||
mut file5_path := pathlib.get_file(path: '/tmp/mytree/dir3/.collection', create: true)!
|
||||
file5_path.write('name:col2')!
|
||||
mut file6_path := pathlib.get_file(path: '/tmp/mytree/dir3/file3.md', create: true)!
|
||||
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: file1_path.parent()!.path, name: 'col1')!
|
||||
tree.add_collection(path: file6_path.parent()!.path, name: 'col2')!
|
||||
|
||||
mut page := tree.page_get('col1:file2.md')!
|
||||
assert page.name == 'file2'
|
||||
|
||||
mut image := tree.get_image('col1:image.png')!
|
||||
assert image.file_name() == 'image.png'
|
||||
|
||||
// these page pointers are faulty
|
||||
|
||||
apple_ptr_faulty0 := 'col3:file1.md'
|
||||
if p := tree.page_get('col3:file1.md') {
|
||||
assert false, 'this should fail: faulty pointer ${apple_ptr_faulty0}'
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// list_pages returns a map of collection names to a list of page names within that collection.
|
||||
// The structure is map[collectionname][]pagename.
|
||||
pub fn (mut t Tree) list_pages() map[string][]string {
|
||||
mut result := map[string][]string{}
|
||||
mut sorted_collections := t.collections.values()
|
||||
sorted_collections.sort(a.name < b.name)
|
||||
|
||||
for _, col in sorted_collections {
|
||||
mut page_names := []string{}
|
||||
mut sorted_pages := col.pages.values()
|
||||
sorted_pages.sort(a.name < b.name)
|
||||
for _, page in sorted_pages {
|
||||
page_names << page.name
|
||||
}
|
||||
result[col.name] = page_names
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// list_markdown returns the collections and their pages in markdown format.
|
||||
pub fn (mut t Tree) list_markdown() string {
|
||||
mut markdown_output := ''
|
||||
pages_map := t.list_pages()
|
||||
|
||||
if pages_map.len == 0 {
|
||||
return 'No collections or pages found in this doctree.'
|
||||
}
|
||||
|
||||
for col_name, page_names in pages_map {
|
||||
markdown_output += '## ${col_name}\n'
|
||||
if page_names.len == 0 {
|
||||
markdown_output += ' * No pages in this collection.\n'
|
||||
} else {
|
||||
for page_name in page_names {
|
||||
markdown_output += ' * ${page_name}\n'
|
||||
}
|
||||
}
|
||||
markdown_output += '\n' // Add a newline for spacing between collections
|
||||
}
|
||||
return markdown_output
|
||||
}
|
||||
|
||||
// print_pages prints the collections and their pages in a nice, easy-to-see format.
|
||||
pub fn (mut t Tree) print_pages() {
|
||||
pages_map := t.list_pages()
|
||||
console.print_header('Doctree: ${t.name}')
|
||||
if pages_map.len == 0 {
|
||||
console.print_green('No collections or pages found in this doctree.')
|
||||
return
|
||||
}
|
||||
for col_name, page_names in pages_map {
|
||||
console.print_green('Collection: ${col_name}')
|
||||
if page_names.len == 0 {
|
||||
console.print_green(' No pages in this collection.')
|
||||
} else {
|
||||
for page_name in page_names {
|
||||
console.print_item(' ${page_name}')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
// import incubaid.herolib.ui.console
|
||||
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
if !plbook.exists(filter: 'doctree.') {
|
||||
return
|
||||
}
|
||||
|
||||
mut doctrees := map[string]&Tree{}
|
||||
|
||||
mut collection_actions := plbook.find(filter: 'doctree.scan')!
|
||||
for mut action in collection_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
mut doctree := doctrees[name] or {
|
||||
mut newdtr := new(name: name)!
|
||||
doctrees[name] = newdtr
|
||||
newdtr
|
||||
}
|
||||
path := p.get_default('path', '')!
|
||||
git_url := p.get_default('git_url', '')!
|
||||
git_reset := p.get_default_false('git_reset')
|
||||
git_pull := p.get_default_false('git_pull')
|
||||
doctree.scan(path: path, git_url: git_url, git_reset: git_reset, git_pull: git_pull)!
|
||||
action.done = true
|
||||
tree_set(doctree)
|
||||
}
|
||||
|
||||
mut export_actions := plbook.find(filter: 'doctree.export')!
|
||||
if export_actions.len == 0 && collection_actions.len > 0 {
|
||||
// Only auto-export if we have collections to export
|
||||
name0 := 'main'
|
||||
mut doctree0 := doctrees[name0] or { panic("can't find doctree with name ${name0}") }
|
||||
doctree0.export()!
|
||||
}
|
||||
if export_actions.len > 0 {
|
||||
if collection_actions.len == 0 {
|
||||
println(plbook)
|
||||
return error('No collections configured, use !!doctree.collection..., otherwise cannot export')
|
||||
}
|
||||
}
|
||||
|
||||
for mut action in export_actions {
|
||||
mut p := action.params
|
||||
name := p.get_default('name', 'main')!
|
||||
destination := p.get('destination')!
|
||||
reset := p.get_default_false('reset')
|
||||
exclude_errors := p.get_default_true('exclude_errors')
|
||||
mut doctree := doctrees[name] or { return error("can't find doctree with name ${name}") }
|
||||
doctree.export(
|
||||
destination: destination
|
||||
reset: reset
|
||||
exclude_errors: exclude_errors
|
||||
)!
|
||||
action.done = true
|
||||
}
|
||||
|
||||
// println(tree_list())
|
||||
// println(tree_get("main")!)
|
||||
// panic("sd")
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
module pointer
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
pub enum PointerCat {
|
||||
page
|
||||
image
|
||||
video
|
||||
file
|
||||
html
|
||||
}
|
||||
|
||||
// links to a page, image or file
|
||||
pub struct Pointer {
|
||||
pub mut:
|
||||
collection string // is the key of a collection
|
||||
name string // is name without extension, all namefixed (lowercase...)
|
||||
cat PointerCat
|
||||
extension string // e.g. jpg
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPointerArgs {
|
||||
pub:
|
||||
// pointer string (e.g. col:page.md)
|
||||
text string
|
||||
// used if text does not have collection information
|
||||
collection string
|
||||
}
|
||||
|
||||
// will return a clean pointer to a page, image or file
|
||||
//```
|
||||
// input is e.g. mycollection:filename.jpg
|
||||
// or filename.jpg
|
||||
// or mypage.md
|
||||
//
|
||||
//```
|
||||
pub fn pointer_new(args NewPointerArgs) !Pointer {
|
||||
mut txt := args.text.trim_space().replace('\\', '/').replace('//', '/')
|
||||
|
||||
// take colon parts out
|
||||
split_colons := txt.split(':')
|
||||
if split_colons.len > 2 {
|
||||
return error("pointer can only have 1 ':' inside. ${txt}")
|
||||
}
|
||||
|
||||
mut collection_name := args.collection
|
||||
mut file_name := ''
|
||||
if split_colons.len == 2 {
|
||||
collection_name = texttools.name_fix_keepext(split_colons[0].all_after_last('/'))
|
||||
file_name = texttools.name_fix_keepext(split_colons[1].all_after_last('/'))
|
||||
}
|
||||
|
||||
if collection_name == '' {
|
||||
return error('provided args do not have collection information: ${args}')
|
||||
}
|
||||
|
||||
if split_colons.len == 1 {
|
||||
file_name = texttools.name_fix_keepext(split_colons[0].all_after_last('/'))
|
||||
}
|
||||
|
||||
split_file_name := file_name.split('.')
|
||||
file_name_no_extension := split_file_name[0]
|
||||
mut extension := 'md'
|
||||
if split_file_name.len > 1 {
|
||||
extension = split_file_name[1]
|
||||
}
|
||||
|
||||
mut file_cat := PointerCat.page
|
||||
match extension {
|
||||
'md' {
|
||||
file_cat = .page
|
||||
}
|
||||
'jpg', 'jpeg', 'svg', 'gif', 'png' {
|
||||
file_cat = .image
|
||||
}
|
||||
'html' {
|
||||
file_cat = .html
|
||||
}
|
||||
'mp4', 'mov' {
|
||||
file_cat = .video
|
||||
}
|
||||
else {
|
||||
file_cat = .file
|
||||
}
|
||||
}
|
||||
|
||||
return Pointer{
|
||||
name: file_name_no_extension
|
||||
collection: collection_name
|
||||
extension: extension
|
||||
cat: file_cat
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (p Pointer) is_image() bool {
|
||||
return p.cat == .image
|
||||
}
|
||||
|
||||
pub fn (p Pointer) is_file_video_html() bool {
|
||||
return p.cat == .file || p.cat == .video || p.cat == .html
|
||||
}
|
||||
|
||||
pub fn (p Pointer) str() string {
|
||||
return '${p.collection}:${p.name}.${p.extension}'
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
module pointer
|
||||
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// import incubaid.herolib.core.pathlib
|
||||
// import incubaid.herolib.core.texttools
|
||||
|
||||
// fn test_pointerpath() {
|
||||
// p1 := pointerpath_new(path: '/tmp/A file.md') or { panic(err) }
|
||||
// console.print_debug(p1)
|
||||
// p1_compare := PointerPath{
|
||||
// pointer: Pointer{
|
||||
// collection: ''
|
||||
// name: 'a_file'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// path: pathlib.Path{
|
||||
// path: '/tmp/A file.md'
|
||||
// cat: .unknown
|
||||
// exist: .no
|
||||
// }
|
||||
// }
|
||||
// assert p1 == p1_compare
|
||||
|
||||
// p2 := pointerpath_new(path: '/tmp/ss/A__file.jpeg') or { panic(err) }
|
||||
// p2_compare := PointerPath{
|
||||
// pointer: Pointer{
|
||||
// collection: ''
|
||||
// name: 'a_file'
|
||||
// cat: .image
|
||||
// extension: 'jpeg'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// path: pathlib.Path{
|
||||
// path: '/tmp/A__file.jpeg'
|
||||
// cat: .unknown
|
||||
// exist: .no
|
||||
// }
|
||||
// }
|
||||
|
||||
// // assert p2==p2_compare
|
||||
// }
|
||||
|
||||
fn test_pointer() {
|
||||
// p := pointer_new('Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// collection: ''
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
}
|
||||
|
||||
// fn test_pointer2() {
|
||||
// p := pointer_new('collectionAAA:Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// collection: 'collectionaaa'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer3() {
|
||||
// p := pointer_new('MY_Book:collection_AAA:Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// collection: 'collection_aaa'
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer4() {
|
||||
// p := pointer_new('MY_Book:collection_AAA:aImage__.jpg') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'aimage'
|
||||
// cat: .image
|
||||
// extension: 'jpg'
|
||||
// collection: 'collection_aaa'
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer5() {
|
||||
// p := pointer_new('MY_Book::aImage__.jpg') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'aimage'
|
||||
// cat: .image
|
||||
// extension: 'jpg'
|
||||
// collection: ''
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer6() {
|
||||
// p := pointer_new('MY_Book::aImage__.jpg') or { panic(err) }
|
||||
// assert p.str() == 'my_book::aimage.jpg'
|
||||
|
||||
// p2 := pointer_new('ddd:aImage__.jpg') or { panic(err) }
|
||||
// assert p2.str() == 'ddd:aimage.jpg'
|
||||
|
||||
// p3 := pointer_new('aImage__.jpg') or { panic(err) }
|
||||
// assert p3.str() == 'aimage.jpg'
|
||||
|
||||
// i := 40
|
||||
// p4 := pointer_new('collectionAAA:Page__${i}.md') or { panic(err) }
|
||||
// assert p4.str() == 'collectionaaa:page_40.md'
|
||||
// }
|
||||
|
||||
// fn test_pointer7() {
|
||||
// r := texttools.name_fix_keepext('page_40.md')
|
||||
// assert r == 'page_40.md'
|
||||
// }
|
||||
@@ -1,83 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.data.doctree.collection { CollectionError }
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// process definitions (!!wiki.def actions, elements.Def elements)
|
||||
// this must be done before processing includes.
|
||||
pub fn (mut tree Tree) process_defs() ! {
|
||||
console.print_green('Processing tree defs')
|
||||
|
||||
for _, mut col in tree.collections {
|
||||
for _, mut page in col.pages {
|
||||
mut p := page
|
||||
mut c := col
|
||||
tree.process_page_def_actions(mut p, mut c)!
|
||||
}
|
||||
}
|
||||
|
||||
for _, mut col in tree.collections {
|
||||
for _, mut page in mut col.pages {
|
||||
mut p := page
|
||||
errors := tree.replace_page_defs_with_links(mut p)!
|
||||
// report accrued errors when replacing defs with links
|
||||
for err in errors {
|
||||
col.error(err)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) process_page_def_actions(mut p data.Page, mut c collection.Collection) ! {
|
||||
def_actions := p.get_def_actions()!
|
||||
if def_actions.len > 1 {
|
||||
c.error(
|
||||
path: p.path
|
||||
msg: 'a page can have at most one def action'
|
||||
cat: .def
|
||||
)!
|
||||
}
|
||||
|
||||
if def_actions.len == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
aliases := p.process_def_action(def_actions[0].id)!
|
||||
for alias in aliases {
|
||||
if alias in tree.defs {
|
||||
c.error(
|
||||
path: p.path
|
||||
msg: 'alias ${alias} is already used'
|
||||
cat: .def
|
||||
)!
|
||||
continue
|
||||
}
|
||||
|
||||
tree.defs[alias] = p
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) replace_page_defs_with_links(mut p data.Page) ![]CollectionError {
|
||||
defs := p.get_def_names()!
|
||||
|
||||
mut def_data := map[string][]string{}
|
||||
mut errors := []CollectionError{}
|
||||
for def in defs {
|
||||
if referenced_page := tree.defs[def] {
|
||||
def_data[def] = [referenced_page.key(), referenced_page.alias]
|
||||
} else {
|
||||
// accrue errors that occur
|
||||
errors << CollectionError{
|
||||
path: p.path
|
||||
msg: 'def ${def} is not defined'
|
||||
cat: .def
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
p.set_def_links(def_data)!
|
||||
// return accrued collection errors for collection to handle
|
||||
return errors
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/process_defs_test'
|
||||
|
||||
fn test_process_defs() {
|
||||
/*
|
||||
1- use files with def actions and elements from testdata
|
||||
2- create tree
|
||||
3- invoke process defs
|
||||
4- check pages markdown
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
tree.process_defs()!
|
||||
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
assert page1.get_markdown()! == ''
|
||||
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
assert page2.get_markdown()! == '[about us](col1:page1.md)\n[about us](col1:page1.md)\n[about us](col1:page1.md)'
|
||||
}
|
||||
@@ -1,154 +0,0 @@
|
||||
module doctree
|
||||
|
||||
// import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.data.doctree.pointer
|
||||
import incubaid.herolib.data.doctree.collection { CollectionError }
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
pub fn (mut tree Tree) process_includes() ! {
|
||||
console.print_green('Processing page includes')
|
||||
graph := tree.generate_pages_graph()!
|
||||
|
||||
mut indegree := map[string]int{}
|
||||
for _, c in tree.collections {
|
||||
for _, p in c.pages {
|
||||
indegree[p.key()] = 0
|
||||
}
|
||||
}
|
||||
|
||||
for _, children in graph {
|
||||
for child in children.keys() {
|
||||
indegree[child] += 1
|
||||
}
|
||||
}
|
||||
|
||||
mut queue := []string{}
|
||||
for key, degree in indegree {
|
||||
if degree == 0 {
|
||||
queue << key
|
||||
}
|
||||
}
|
||||
|
||||
for queue.len > 0 {
|
||||
front := queue[0]
|
||||
queue = queue[1..].clone()
|
||||
|
||||
mut page := tree.page_get(front)!
|
||||
mut col := tree.get_collection(page.collection_name)!
|
||||
|
||||
// process page
|
||||
for element in page.get_include_actions()! {
|
||||
page_pointer := get_include_page_pointer(col.name, element.action) or { continue }
|
||||
|
||||
mut include_page := tree.get_page_with_pointer(page_pointer) or { continue }
|
||||
|
||||
page.set_element_content_no_reparse(element.id, include_page.get_markdown()!)!
|
||||
page.set_action_element_to_processed(element.id)!
|
||||
}
|
||||
|
||||
// update indegree
|
||||
for child in graph[page.key()].keys() {
|
||||
indegree[child] -= 1
|
||||
if indegree[child] == 0 {
|
||||
queue << child
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for key, degree in indegree {
|
||||
if degree == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
mut page := tree.page_get(key)!
|
||||
mut col := tree.get_collection(page.collection_name)!
|
||||
col.error(
|
||||
path: page.path
|
||||
msg: 'page ${key} is in an include cycle'
|
||||
cat: .circular_import
|
||||
)!
|
||||
}
|
||||
}
|
||||
|
||||
fn get_include_page_pointer(collection_name string, a playbook.Action) !pointer.Pointer {
|
||||
mut page_pointer_str := a.params.get('page')!
|
||||
|
||||
// handle includes
|
||||
mut page_pointer := pointer.pointer_new(collection: collection_name, text: page_pointer_str)!
|
||||
if page_pointer.collection == '' {
|
||||
page_pointer.collection = collection_name
|
||||
}
|
||||
|
||||
return page_pointer
|
||||
}
|
||||
|
||||
fn (mut tree Tree) generate_pages_graph() !map[string]map[string]bool {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
mut ths := []thread !map[string]map[string]bool{}
|
||||
for _, mut col in tree.collections {
|
||||
ths << spawn fn (mut tree Tree, col &collection.Collection) !map[string]map[string]bool {
|
||||
return tree.collection_page_graph(col)!
|
||||
}(mut tree, col)
|
||||
}
|
||||
for th in ths {
|
||||
col_graph := th.wait()!
|
||||
for k, v in col_graph {
|
||||
graph[k] = v.clone()
|
||||
}
|
||||
}
|
||||
return graph
|
||||
}
|
||||
|
||||
fn (mut tree Tree) collection_page_graph(col &collection.Collection) !map[string]map[string]bool {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
_ := []thread !GraphResponse{}
|
||||
for _, page in col.pages {
|
||||
resp := tree.generate_page_graph(page, col.name)!
|
||||
for k, v in resp.graph {
|
||||
graph[k] = v.clone()
|
||||
}
|
||||
}
|
||||
|
||||
return graph
|
||||
}
|
||||
|
||||
pub struct GraphResponse {
|
||||
pub:
|
||||
graph map[string]map[string]bool
|
||||
errors []CollectionError
|
||||
}
|
||||
|
||||
fn (tree Tree) generate_page_graph(current_page &data.Page, col_name string) !GraphResponse {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
mut errors := []CollectionError{}
|
||||
|
||||
include_action_elements := current_page.get_include_actions()!
|
||||
for element in include_action_elements {
|
||||
page_pointer := get_include_page_pointer(col_name, element.action) or {
|
||||
errors << CollectionError{
|
||||
path: current_page.path
|
||||
msg: 'failed to get page pointer for include ${element.action.heroscript()}: ${err}'
|
||||
cat: .include
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
include_page := tree.get_page_with_pointer(page_pointer) or {
|
||||
// TODO
|
||||
// col.error(
|
||||
// path: current_page.path
|
||||
// msg: 'failed to get page for include ${element.action.heroscript()}: ${err.msg()}'
|
||||
// cat: .include
|
||||
// )!
|
||||
continue
|
||||
}
|
||||
|
||||
graph[include_page.key()][current_page.key()] = true
|
||||
}
|
||||
return GraphResponse{
|
||||
graph: graph
|
||||
errors: errors
|
||||
}
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import os
|
||||
import incubaid.herolib.core.pathlib
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/process_includes_test'
|
||||
|
||||
fn test_process_includes() {
|
||||
/*
|
||||
1- use 3 pages in testdata:
|
||||
- page1 includes page2
|
||||
- page2 includes page3
|
||||
2- create tree
|
||||
3- invoke process_includes
|
||||
4- check pages markdown
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
tree.process_includes()!
|
||||
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
mut page3 := tree.page_get('col2:page3.md')!
|
||||
|
||||
assert page1.get_markdown()! == 'page3 content'
|
||||
assert page2.get_markdown()! == 'page3 content'
|
||||
assert page3.get_markdown()! == 'page3 content'
|
||||
}
|
||||
|
||||
fn test_generate_pages_graph() {
|
||||
/*
|
||||
1- use 3 pages in testdata:
|
||||
- page1 includes page2
|
||||
- page2 includes page3
|
||||
2- create tree
|
||||
3- invoke generate_pages_graph
|
||||
4- check graph
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
mut page3 := tree.page_get('col2:page3.md')!
|
||||
|
||||
graph := tree.generate_pages_graph()!
|
||||
assert graph == {
|
||||
'${page3.key()}': {
|
||||
'${page2.key()}': true
|
||||
}
|
||||
'${page2.key()}': {
|
||||
'${page1.key()}': true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.data.doctree.collection { Collection }
|
||||
import incubaid.herolib.data.markdown.elements
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.core.playmacros
|
||||
|
||||
@[params]
|
||||
pub struct MacroGetArgs {
|
||||
pub mut:
|
||||
actor string
|
||||
name string
|
||||
}
|
||||
|
||||
// adds all action elements to a playbook, calls playmacros.play on the plbook,
|
||||
// which processes the macros, then reprocesses every page with the actions' new content
|
||||
pub fn (mut tree Tree) process_actions_and_macros() ! {
|
||||
console.print_green('Processing actions and macros')
|
||||
|
||||
// first process the generic actions, which can be executed as is
|
||||
mut plbook := playbook.new()!
|
||||
for element_action in tree.get_actions()! {
|
||||
plbook.actions << &element_action.action
|
||||
}
|
||||
|
||||
playmacros.play_actions(mut plbook)!
|
||||
|
||||
// now get specific actions which need to return content
|
||||
mut ths := []thread !{}
|
||||
for _, mut col in tree.collections {
|
||||
ths << spawn fn (mut col Collection) ! {
|
||||
for _, mut page in col.pages {
|
||||
page.process_macros()! // calls play_macro in playmacros...
|
||||
}
|
||||
}(mut col)
|
||||
}
|
||||
|
||||
for th in ths {
|
||||
th.wait()!
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) get_actions(args_ MacroGetArgs) ![]&elements.Action {
|
||||
// console.print_green('get actions for tree: name:${tree.name}')
|
||||
mut res := []&elements.Action{}
|
||||
for _, mut collection in tree.collections {
|
||||
// console.print_green("export collection: name:${name}")
|
||||
for _, mut page in collection.pages {
|
||||
res << page.get_all_actions()!
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
@@ -1,236 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.pathlib { Path }
|
||||
import incubaid.herolib.data.paramsparser
|
||||
import incubaid.herolib.data.doctree.collection { Collection }
|
||||
import incubaid.herolib.develop.gittools
|
||||
import os
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
@[params]
|
||||
pub struct TreeScannerArgs {
|
||||
pub mut:
|
||||
path string
|
||||
heal bool = true // healing means we fix images
|
||||
git_url string
|
||||
git_reset bool
|
||||
git_root string
|
||||
git_pull bool
|
||||
load bool = true // means we scan automatically the added collection
|
||||
}
|
||||
|
||||
// walk over directory find dirs with .book or .collection inside and add to the tree .
|
||||
// a path will not be added unless .collection is in the path of a collection dir or .book in a book
|
||||
// ```
|
||||
// path string
|
||||
// heal bool // healing means we fix images, if selected will automatically load, remove stale links
|
||||
// git_url string
|
||||
// git_reset bool
|
||||
// git_root string
|
||||
// git_pull bool
|
||||
// ```
|
||||
pub fn (mut tree Tree) scan(args TreeScannerArgs) ! {
|
||||
mut path := gittools.path(
|
||||
path: args.path
|
||||
git_url: args.git_url
|
||||
git_reset: args.git_reset
|
||||
git_root: args.git_root
|
||||
git_pull: args.git_pull
|
||||
)!
|
||||
|
||||
console.print_item('doctree.scan: ${path.path}')
|
||||
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
if path.file_exists('.site') {
|
||||
move_site_to_collection(mut path)!
|
||||
}
|
||||
|
||||
if is_collection_dir(path) {
|
||||
collection_name := get_collection_name(mut path)!
|
||||
|
||||
tree.add_collection(
|
||||
path: path.path
|
||||
name: collection_name
|
||||
heal: args.heal
|
||||
load: true
|
||||
fail_on_error: tree.fail_on_error
|
||||
)!
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
mut entries := path.list(recursive: false) or {
|
||||
return error('cannot list: ${path.path} \n${error}')
|
||||
}
|
||||
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || is_ignored_dir(entry)! {
|
||||
continue
|
||||
}
|
||||
|
||||
tree.scan(path: entry.path, heal: args.heal, load: args.load) or {
|
||||
return error('failed to scan ${entry.path} :${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut tree Tree) scan_concurrent(args_ TreeScannerArgs) ! {
|
||||
mut args := args_
|
||||
if args.git_url.len > 0 {
|
||||
mut gs := gittools.new(coderoot: args.git_root)!
|
||||
mut repo := gs.get_repo(
|
||||
url: args.git_url
|
||||
pull: args.git_pull
|
||||
reset: args.git_reset
|
||||
)!
|
||||
args.path = repo.get_path_of_url(args.git_url)!
|
||||
}
|
||||
|
||||
if args.path.len == 0 {
|
||||
return error('Path needs to be provided.')
|
||||
}
|
||||
|
||||
path := pathlib.get_dir(path: args.path)!
|
||||
mut collection_paths := scan_helper(path)!
|
||||
mut threads := []thread !Collection{}
|
||||
for mut col_path in collection_paths {
|
||||
mut col_name := get_collection_name(mut col_path)!
|
||||
col_name = texttools.name_fix(col_name)
|
||||
|
||||
if col_name in tree.collections {
|
||||
if tree.fail_on_error {
|
||||
return error('Collection with name ${col_name} already exits')
|
||||
}
|
||||
// TODO: handle error
|
||||
continue
|
||||
}
|
||||
|
||||
threads << spawn fn (args CollectionNewArgs) !Collection {
|
||||
mut args_ := collection.CollectionNewArgs{
|
||||
name: args.name
|
||||
path: args.path
|
||||
heal: args.heal
|
||||
load: args.load
|
||||
fail_on_error: args.fail_on_error
|
||||
}
|
||||
return collection.new(args_)!
|
||||
}(
|
||||
name: col_name
|
||||
path: col_path.path
|
||||
heal: args.heal
|
||||
fail_on_error: tree.fail_on_error
|
||||
)
|
||||
}
|
||||
|
||||
for _, t in threads {
|
||||
new_collection := t.wait() or { return error('Error executing thread: ${err}') }
|
||||
tree.collections[new_collection.name] = &new_collection
|
||||
}
|
||||
}
|
||||
|
||||
// internal function that recursively returns
|
||||
// the paths of collections in a given path
|
||||
fn scan_helper(path_ Path) ![]Path {
|
||||
mut path := path_
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
|
||||
if path.file_exists('.site') {
|
||||
move_site_to_collection(mut path)!
|
||||
}
|
||||
|
||||
if is_collection_dir(path) {
|
||||
return [path]
|
||||
}
|
||||
|
||||
mut entries := path.list(recursive: false) or {
|
||||
return error('cannot list: ${path.path} \n${error}')
|
||||
}
|
||||
|
||||
mut paths := []Path{}
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || is_ignored_dir(entry)! {
|
||||
continue
|
||||
}
|
||||
|
||||
paths << scan_helper(entry) or { return error('failed to scan ${entry.path} :${err}') }
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
heal bool = true // healing means we fix images, if selected will automatically load, remove stale links
|
||||
load bool = true
|
||||
fail_on_error bool
|
||||
}
|
||||
|
||||
// get a new collection
|
||||
pub fn (mut tree Tree) add_collection(args_ CollectionNewArgs) ! {
|
||||
mut args := args_
|
||||
args.name = texttools.name_fix(args.name)
|
||||
|
||||
if args.name in tree.collections {
|
||||
if args.fail_on_error {
|
||||
return error('Collection with name ${args.name} already exits')
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
mut pp := pathlib.get_dir(path: args.path)! // will raise error if path doesn't exist
|
||||
mut new_collection := collection.new(
|
||||
name: args.name
|
||||
path: pp.path
|
||||
heal: args.heal
|
||||
fail_on_error: args.fail_on_error
|
||||
)!
|
||||
|
||||
tree.collections[new_collection.name] = &new_collection
|
||||
}
|
||||
|
||||
// returns true if directory should be ignored while scanning
|
||||
fn is_ignored_dir(path_ Path) !bool {
|
||||
mut path := path_
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
name := path.name()
|
||||
return name.starts_with('.') || name.starts_with('_') || name == 'img'
|
||||
}
|
||||
|
||||
// gets collection name from .collection file
|
||||
// if no name param, uses the directory name
|
||||
fn get_collection_name(mut path Path) !string {
|
||||
mut collection_name := path.name()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
|
||||
// now we found a collection we need to add
|
||||
content := filepath.read()!
|
||||
if content.trim_space() != '' {
|
||||
// means there are params in there
|
||||
mut params_ := paramsparser.parse(content)!
|
||||
if params_.exists('name') {
|
||||
collection_name = params_.get('name')!
|
||||
}
|
||||
}
|
||||
|
||||
return collection_name
|
||||
}
|
||||
|
||||
fn is_collection_dir(path Path) bool {
|
||||
return path.file_exists('.collection')
|
||||
}
|
||||
|
||||
// moves .site file to .collection file
|
||||
fn move_site_to_collection(mut path Path) ! {
|
||||
collectionfilepath1 := path.extend_file('.site')!
|
||||
collectionfilepath2 := path.extend_file('.collection')!
|
||||
os.mv(collectionfilepath1.path, collectionfilepath2.path)!
|
||||
}
|
||||
1
lib/data/doctree/testdata/.gitignore
vendored
1
lib/data/doctree/testdata/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
export_test/export
|
||||
@@ -1 +0,0 @@
|
||||
actions
|
||||
@@ -1,7 +0,0 @@
|
||||
# actions 2
|
||||
|
||||
```js
|
||||
!!payment3.add account:something description:'TF Wallet for TFT'
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar //holochain maybe?
|
||||
```
|
||||
@@ -1,15 +0,0 @@
|
||||
# web3gw_proxy server functionality
|
||||
|
||||
- [stellar](./stellar.md)
|
||||
|
||||
|
||||
```js
|
||||
!!payment.add account:something description:'TF Wallet for TFT' person:fatayera preferred:false
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar //holochain maybe?
|
||||
```
|
||||
|
||||
!!payment.add2
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
name:col1 src:'/Users/timurgordon/code/github/incubaid/herolib/herolib/data/doctree/testdata/export_test/mytree/dir1'
|
||||
@@ -1 +0,0 @@
|
||||
col2:file3.md
|
||||
@@ -1,9 +0,0 @@
|
||||
# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: /Users/timurgordon/code/github/incubaid/herolib/herolib/data/doctree/testdata/export_test/mytree/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
[not existent page](col3:file5.md)
|
||||
@@ -1 +0,0 @@
|
||||
[some page](../col2/file3.md)
|
||||
@@ -1 +0,0 @@
|
||||
name:col2 src:'/Users/timurgordon/code/github/incubaid/herolib/herolib/data/doctree/testdata/export_test/mytree/dir3'
|
||||
@@ -1 +0,0 @@
|
||||
name:col1
|
||||
@@ -1 +0,0 @@
|
||||
[not existent page](col3:file5.md)
|
||||
@@ -1 +0,0 @@
|
||||
[some page](col2:file3.md)
|
||||
@@ -1 +0,0 @@
|
||||
name:col2
|
||||
@@ -1 +0,0 @@
|
||||
!!wiki.def alias:'tf-dev,cloud-dev,threefold-dev' name:'about us'
|
||||
@@ -1,3 +0,0 @@
|
||||
*TFDEV
|
||||
*CLOUDDEV
|
||||
*THREEFOLDDEV
|
||||
@@ -1 +0,0 @@
|
||||
!!wiki.include page:'col2:page2.md'
|
||||
@@ -1 +0,0 @@
|
||||
!!wiki.include page:'col2:page3.md'
|
||||
@@ -1 +0,0 @@
|
||||
page3 content
|
||||
1
lib/data/doctree/testdata/rpc/.collection
vendored
1
lib/data/doctree/testdata/rpc/.collection
vendored
@@ -1 +0,0 @@
|
||||
name:rpc
|
||||
130
lib/data/doctree/testdata/rpc/eth.md
vendored
130
lib/data/doctree/testdata/rpc/eth.md
vendored
@@ -1,130 +0,0 @@
|
||||
# Eth
|
||||
|
||||
TODO
|
||||
|
||||
## Remote Procedure Calls
|
||||
|
||||
In this section you'll find the json rpc requests and responses of all the remote procedure calls. The fields params can contain text formated as <MODEL_*>. These represent json objects that are defined further down the document in section [Models](#models).
|
||||
|
||||
### Load
|
||||
|
||||
****Request****
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth.Load",
|
||||
"params": {
|
||||
"url": string,
|
||||
"secret": string
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Balance
|
||||
|
||||
****Request****
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth.Balance",
|
||||
"params": "<address>",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": i64,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Height
|
||||
|
||||
****Request****
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth.Height",
|
||||
"params": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": u64,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Transfer
|
||||
|
||||
Transaction id is returned
|
||||
|
||||
****Request****
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth.transfer",
|
||||
"params": {
|
||||
"destination": string,
|
||||
"amount": u64
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**
|
||||
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### EthTftSpendingAllowance
|
||||
|
||||
****Request****
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "eth.EthTftSpendingAllowance",
|
||||
"params": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
**Response**
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
12
lib/data/doctree/testdata/rpc/rpc.md
vendored
12
lib/data/doctree/testdata/rpc/rpc.md
vendored
@@ -1,12 +0,0 @@
|
||||
# RPC methods
|
||||
|
||||
You can find OpenRPC descriptions of RPC methods in the playground pages below:
|
||||
- [All clients](playground/?schemaUrl=../openrpc/openrpc.json)
|
||||
- [Bitcoin](playground/?schemaUrl=../openrpc/btc/openrpc.json)
|
||||
- [Ethereum](playground/?schemaUrl=../openrpc/eth/openrpc.json)
|
||||
- [Explorer](playground/?schemaUrl=../openrpc/explorer/openrpc.json)
|
||||
- [IPFS](playground/?schemaUrl=../openrpc/ipfs/openrpc.json)
|
||||
- [Nostr](playground/?schemaUrl=../openrpc/nostr/openrpc.json)
|
||||
- [Stellar](playground/?schemaUrl=../openrpc/stellar/openrpc.json)
|
||||
- [TFChain](playground/?schemaUrl=../openrpc/tfchain/openrpc.json)
|
||||
- [TFGrid](playground/?schemaUrl=../openrpc/tfgrid/openrpc.json)
|
||||
342
lib/data/doctree/testdata/rpc/stellar.md
vendored
342
lib/data/doctree/testdata/rpc/stellar.md
vendored
@@ -1,342 +0,0 @@
|
||||
|
||||
# Stellar
|
||||
|
||||
## Creating an account
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- network: the network you want to create the account on (public or testnet)
|
||||
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.CreateAccount",
|
||||
"params":[
|
||||
"public"
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- seed: the seed of the account that was generated
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"seed_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Loading your key
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- network: the network you want to connect to (public or testnet)
|
||||
- secret: the secret of your stellar account
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Load",
|
||||
"params":[{
|
||||
"network":"public",
|
||||
"secret":"SA33FBB67CPIMHWTZYVR489Q6UKHFUPLKTLPG9BKAVG89I2J3SZNMW21"
|
||||
}],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response will be empty:
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Asking your public address
|
||||
|
||||
Json RPC 2.0 request (no parameters):
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Address",
|
||||
"params":[],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- address: the public address of the loaded account
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"public_address_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Transfer tokens from one account to another
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- amount: the amount of tft to transfer (string)
|
||||
- destination: the public address that should receive the tokens
|
||||
- memo: the memo to add to the transaction
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Transfer",
|
||||
"params":[{
|
||||
"amount": "1520.0",
|
||||
"destination": "some_public_stellar_address",
|
||||
"memo": "your_memo_comes_here"
|
||||
}],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- hash: the hash of the transaction that was executed
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"hash_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Swap tokens from one asset to the other
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- amount: the amount of tokens to swap (string)
|
||||
- source_asset: the source asset to swap (should be tft or xlm)
|
||||
- destination_asset: the asset to swap to (should be tft or xlm)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Swap",
|
||||
"params":[{
|
||||
"amount": "5.0",
|
||||
"source_asset": "tft",
|
||||
"destination_asset": "xlm"
|
||||
}],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- hash: the hash of the transaction that was executed
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"hash_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Get the balance of an account
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- address: the public address of an account to get the balance from (leave empty for your own account)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Balance",
|
||||
"params":[
|
||||
"you_can_pass_public_address_here"
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- balance: the balance of the account (string)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"balance_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Bridge stellar tft to ethereum
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- amount: the amount of tft to bridge (string)
|
||||
- destination: the ethereum public address that should receive the tokens
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.BridgeToEth",
|
||||
"params":[{
|
||||
"amount": "298.0",
|
||||
"destination": "eth_public_address_here"
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- hash: the hash of the transaction that was executed
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"hash_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Bridge stellar tft to tfchain
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- amount: the amount of tft on stellar to bridge to tfchain
|
||||
- twin_id: the twin id that should receive the tokens
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.BridgeToTfchain",
|
||||
"params":[{
|
||||
"amount": "21.0",
|
||||
"twin_id": 122
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- hash: the hash of the transaction that was executed
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":"hash_will_be_here",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Waiting for a transaction on the Ethereum bridge
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- memo: the memo to look for in the transactions
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.AwaitTransactionOnEthBridge",
|
||||
"params":[
|
||||
"provide_the_memo_here"
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response: empty result
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Listing transactions
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- account: a public stellar address to get the transactions for (leave empty for your own account)
|
||||
- limit: how many transactions you want to get (default 10)
|
||||
- include_failed: include the failed transactions in the result (default is false)
|
||||
- cursor: where to start listing the transactions from (default is the top)
|
||||
- ascending: whether to sort the transactions in ascending order (default is false, so in descending order)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.Transactions",
|
||||
"params":[{
|
||||
"account": "some_account_here_or_leave_empty",
|
||||
"limit": 12,
|
||||
"include_failed": false,
|
||||
"cursor": "leave_empty_for_top",
|
||||
"ascending": false
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- a list of transactions (see [here](https://github.com/stellar/go/blob/01c7aa30745a56d7ffcc75bb8ededd38ba582a58/protocols/horizon/main.go#L484) for the definition of a transaction)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result":[
|
||||
{
|
||||
"id": "some_id",
|
||||
// many more attributes
|
||||
}
|
||||
],
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
|
||||
## Showing the data related to an account
|
||||
|
||||
Json RPC 2.0 request:
|
||||
|
||||
- address: the stellar public address to get the account data for (leave empty for your own account)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"method":"stellar.AccountData",
|
||||
"params":[
|
||||
"account_or_leave_empty_for_your_account"
|
||||
],
|
||||
"id":"a_unique_id_here"
|
||||
}
|
||||
```
|
||||
|
||||
Json RPC 2.0 response:
|
||||
|
||||
- account data (see [here](https://github.com/stellar/go/blob/01c7aa30745a56d7ffcc75bb8ededd38ba582a58/protocols/horizon/main.go#L33) for the definition of account data)
|
||||
|
||||
```json
|
||||
{
|
||||
"jsonrpc":"2.0",
|
||||
"result": {
|
||||
"id": "some_id",
|
||||
// many more attributes
|
||||
},
|
||||
"id":"id_send_in_request"
|
||||
}
|
||||
```
|
||||
251
lib/data/doctree/testdata/rpc/tfchain.md
vendored
251
lib/data/doctree/testdata/rpc/tfchain.md
vendored
@@ -1,251 +0,0 @@
|
||||
|
||||
# TFChain
|
||||
TODO: intro
|
||||
|
||||
## Remote Procedure Calls
|
||||
|
||||
### Load
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.Load",
|
||||
"params": {
|
||||
"passphrase": string,
|
||||
"network": string
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Transfer
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.Transfer",
|
||||
"params": {
|
||||
"destination": string,
|
||||
"memo": string,
|
||||
"amount": u64
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Balance
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.Balance",
|
||||
"params": "<address>",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": i64,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GetTwin
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.TwinGet",
|
||||
"params": <id>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_TWIN>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GetNode
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.NodeGet",
|
||||
"params": <id>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_NODE>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GetFarm
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfchain.FarmGet",
|
||||
"params": <id>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_FARM>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
## Models
|
||||
|
||||
### MODEL_TWIN
|
||||
```
|
||||
{
|
||||
"id": u32,
|
||||
"account": string,
|
||||
"relay": string,
|
||||
"entities": [MODEL_ENTITYPROOF],
|
||||
"pk": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_ENTITYPROOF
|
||||
```
|
||||
{
|
||||
"entityid": u32,
|
||||
"signature": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_NODE
|
||||
```
|
||||
{
|
||||
"id": u32,
|
||||
"farmid": u32,
|
||||
"twinid": u32,
|
||||
"resources": <MODEL_RESOURCES>,
|
||||
"location": <MODEL_LOCATION>,
|
||||
"public_config": {
|
||||
"ip": <MODEL_IP>,
|
||||
"ip6": <MODEL_IP>,
|
||||
"domain": string
|
||||
},
|
||||
"created": u64,
|
||||
"farmingpolicy": u32,
|
||||
"interfaces": [MODEL_INTERFACE],
|
||||
"certification": "string",
|
||||
"secureboot": bool,
|
||||
"virtualized": bool,
|
||||
"serial": string,
|
||||
"connectionprice": u32
|
||||
}
|
||||
```
|
||||
### MODEL_RESOURCES
|
||||
```
|
||||
{
|
||||
"hru": u64,
|
||||
"sru": u64,
|
||||
"cru": u64,
|
||||
"mru": u64
|
||||
}
|
||||
```
|
||||
### MODEL_LOCATION
|
||||
|
||||
```
|
||||
{
|
||||
"city": string,
|
||||
"country": string,
|
||||
"latitude": string,
|
||||
"longitude": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_IP
|
||||
|
||||
```
|
||||
{
|
||||
"ip": string,
|
||||
"gw": string
|
||||
}
|
||||
```
|
||||
### MODEL_INTERFACE
|
||||
|
||||
```
|
||||
{
|
||||
"name": string,
|
||||
"mac": string,
|
||||
"ips": [string]
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_FARM
|
||||
|
||||
```
|
||||
{
|
||||
"id": u32,
|
||||
"name": string,
|
||||
"twinid": u32,
|
||||
"pricingpolicyid": u32,
|
||||
"certificationtype": string,
|
||||
"publicips": [MODEL_PUBLICIP],
|
||||
"dedicated": bool,
|
||||
"farmingpolicylimit": <MODEL_FARMINGPOLICYLIMIT>
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_PUBLICIP
|
||||
|
||||
```
|
||||
{
|
||||
"ip": string,
|
||||
"gateway": string,
|
||||
"contractid": u64
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_FARMINGPOLICYLIMIT
|
||||
```
|
||||
{
|
||||
"farmingpolicyid": u32,
|
||||
"cu": u64,
|
||||
"su": u64,
|
||||
"end": u64,
|
||||
"nodecount": u32,
|
||||
"nodecertification": bool
|
||||
}
|
||||
```
|
||||
651
lib/data/doctree/testdata/rpc/tfgrid.md
vendored
651
lib/data/doctree/testdata/rpc/tfgrid.md
vendored
@@ -1,651 +0,0 @@
|
||||
|
||||
# TFgrid
|
||||
TFgrid is one of the clients that web3 proxy opens up. Below you can find the remote procedure calls it can handle. We use the json rpc 2.0 protocol. All possible json rpc request are shown below with the corresponding response that the web3 proxy will send back.
|
||||
|
||||
## Remote Procedure Calls
|
||||
In this section you'll find the json rpc requests and responses of all the remote procedure calls. The fields params can contain text formated as <MODEL_*>. These represent json objects that are defined further down the document in section [Models](#models).
|
||||
|
||||
### Login
|
||||
This rpc is used to login. It requires you to pass your menmonic and the network you want to deploy on.
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.Load",
|
||||
"params": [
|
||||
"<menomonic>",
|
||||
"<network>"
|
||||
],
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### Gateway Name Deploy
|
||||
This rpc allows you to deploy a gateway name. It requires you to pass the information required for a gateway name. Upon success it will return you that same information extended with some extra useful data.
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.gateway.name.deploy",
|
||||
"params": [<MODEL_GATEWAYNAME>],
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_GATEWAYNAMERESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GatewayNameDelete
|
||||
This rpc allows you to delete a deployed gateway name. You should send the name in the params field. The operation succeeded if you receive a valid json rpc 2.0 result.
|
||||
|
||||
****Request****
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.GatewayNameDelete",
|
||||
"params": ["<name>"],
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GatewayNameGet
|
||||
You can always ask for information on a gateway name via the rpc shown below. Just set the name in the params field of the json rpc 2.0 request. The response will contain the requested information.
|
||||
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.GatewayNameGet",
|
||||
"params": "<name>",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_GATEWAYNAMERESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GatewayFQDNDeploy
|
||||
If you wish for a fully qualified domain name you should use the rpc shown below. It requires the data shown in [this model](#model_gatewayfqdn) and returns that same data augmented with [some extra fields](#model_gatewayfqdnresult).
|
||||
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.GatewayFQDNDeploy",
|
||||
"params": <MODEL_GATEWAYFQDN>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_GATEWAYFQDNRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GatewayFQDNDelete
|
||||
You can delete your requested fully qualified domain name with the rpc shown below. Just fill in the name in the json rpc request.
|
||||
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.GatewayFQDNDelete",
|
||||
"params": "<name>",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### GatewayFQDNGet
|
||||
Once created you can always retrieve the [data](#model_gatewayfqdnresult) related to your fully qualified domain name via the rpc method *tfgrid.GatewayFQDNget*.
|
||||
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.GatewayFQDNGet",
|
||||
"params": "<name>",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_GATEWAYFQDNRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### K8sDeploy
|
||||
|
||||
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.K8sDeploy",
|
||||
"params": <MODEL_K8SCLUSTER>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_K8SCLUSTERRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### K8sDelete
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.K8sDelete",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### K8sGet
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.K8sGet",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_K8SCLUSTERRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### K8sGet
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.K8sAddnode",
|
||||
"params": {
|
||||
"name": string,
|
||||
"node": <MODEL_K8SNODE>
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_K8SCLUSTERRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### K8sRemoveNode
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.K8sRemovenode",
|
||||
"params": {
|
||||
"name": string,
|
||||
"nodename": string
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_K8SCLUSTERRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### MachinesDeploy
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.MachinesDeploy",
|
||||
"params": <MODEL_MACHINES>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_MACHINESRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### MachinesDelete
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.MachinesDelete",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### MachinesGet
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.MachinesGet",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_MACHINESRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### MachineAdd
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.MachinesAdd",
|
||||
"params": {
|
||||
"project_name": string,
|
||||
"machine": <MODEL_MACHINE>
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_MACHINESRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### MachineRemove
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.MachinesRemove",
|
||||
"params": {
|
||||
"machine_name": string,
|
||||
"project_name": string
|
||||
},
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_MACHINESRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### DeploymentDeploy
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.DeploymentCreate",
|
||||
"params": <MODEL_DEPLOYMENT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_DEPLOYMENTRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### DeploymentUpdate
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.DeploymentUpdate",
|
||||
"params": <MODEL_DEPLOYMENT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_DEPLOYMENTRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### DeploymentCancel
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.DeploymentCancel",
|
||||
"params": i64,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### DeploymentGet
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.DeploymentGet",
|
||||
"params": i64,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_DEPLOYMENT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### ZDBDeploy
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.ZdbDeploy",
|
||||
"params": <MODEL_ZDB>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_ZDBRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### ZDBDelete
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.ZdbDelete",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": "",
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
### ZDBGet
|
||||
**Request**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "tfgrid.ZdbGet",
|
||||
"params": string,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
**Response**
|
||||
```
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"result": <MODEL_ZDBRESULT>,
|
||||
"id": "<GUID>"
|
||||
}
|
||||
```
|
||||
|
||||
## Models
|
||||
|
||||
### MODEL_CREDENTIALS
|
||||
```
|
||||
{
|
||||
"mnemonics": string,
|
||||
"network": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_GATEWAYNAME
|
||||
```
|
||||
{
|
||||
"nodeid": U32,
|
||||
"name": string,
|
||||
"backends": [string],
|
||||
"tlspassthrough": bool,
|
||||
"description": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_GATEWAYNAMERESULT
|
||||
```
|
||||
{
|
||||
"nodeid": U32,
|
||||
"name": string,
|
||||
"backends": [string],
|
||||
"tlspassthrough": bool,
|
||||
"description": string,
|
||||
"fqdn": string,
|
||||
"namecontractid": u64,
|
||||
"contractid": u64
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_GATEWAYFQDN
|
||||
```
|
||||
{
|
||||
"nodeid": U32,
|
||||
"backends": [string],
|
||||
"fqdn": string,
|
||||
"name": string,
|
||||
"tlspassthrough": bool,
|
||||
"description": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_GATEWAYFQDNRESULT
|
||||
```
|
||||
{
|
||||
"nodeid": U32,
|
||||
"backends": [string],
|
||||
"fqdn": string,
|
||||
"name": string,
|
||||
"tlspassthrough": bool,
|
||||
"description": string,
|
||||
"contractid": u64
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_K8SCLUSTER
|
||||
```
|
||||
{
|
||||
"name": string,
|
||||
"master": MODEL_K8SNODE,
|
||||
"workers": [MODEL_K8SNODE],
|
||||
"token": string,
|
||||
"ssh_key": string,
|
||||
}
|
||||
```
|
||||
### MODEL_K8SCLUSTER_RESULT
|
||||
```
|
||||
{
|
||||
"name": string,
|
||||
"master": MODEL_K8SNODE,
|
||||
"workers": [MODEL_K8SNODE],
|
||||
"token": string,
|
||||
"ssh_key": string,
|
||||
"node_deployment_id": map[u32]u64
|
||||
}
|
||||
```
|
||||
### MODEL_K8SNODE
|
||||
```
|
||||
{
|
||||
"name": string,
|
||||
"nodeid": string,
|
||||
"public_ip": bool,
|
||||
"public_ip6": bool,
|
||||
"planetary": bool,
|
||||
"flist": string,
|
||||
"cpu": u32,
|
||||
"memory": u32, //in MBs
|
||||
"disk_size": u32 // in GB, monted in /mydisk
|
||||
}
|
||||
```
|
||||
### MODEL_K8SNODERESULT
|
||||
```
|
||||
{
|
||||
"name": string,
|
||||
"nodeid": string,
|
||||
"public_ip": bool,
|
||||
"public_ip6": bool,
|
||||
"planetary": bool,
|
||||
"flist": string,
|
||||
"cpu": u32,
|
||||
"memory": u32, //in MBs
|
||||
"disk_size": u32, // in GB, monted in /mydisk
|
||||
"computed_ip4": string,
|
||||
"computed_ip6": string,
|
||||
"wg_ip": string,
|
||||
"planetary_ip": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_DEPLOYMENT
|
||||
```
|
||||
{
|
||||
"version": int,
|
||||
"twin_id": u32,
|
||||
"contract_id": u64,
|
||||
"expiration": i64,
|
||||
"metadata": string,
|
||||
"description": string,
|
||||
"workloads": [MODEL_WORKLOAD],
|
||||
"signature_requirement": SignatureRequirement
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_ZDB
|
||||
```
|
||||
{
|
||||
"node_id": u32,
|
||||
"name": string,
|
||||
"password": string,
|
||||
"public": bool,
|
||||
"size": u32, // in GB
|
||||
"description": string,
|
||||
"mode": string
|
||||
}
|
||||
```
|
||||
|
||||
### MODEL_ZDBRESULT
|
||||
```
|
||||
{
|
||||
"node_id": u32,
|
||||
"name": string,
|
||||
"password": string,
|
||||
"public": bool,
|
||||
"size": u32, // in GB
|
||||
"description": string,
|
||||
"mode": string,
|
||||
"namespace": string,
|
||||
"port": u32,
|
||||
"ips": [string]
|
||||
}
|
||||
```
|
||||
@@ -1 +0,0 @@
|
||||
name:fruits
|
||||
@@ -1,9 +0,0 @@
|
||||
# Apple
|
||||
|
||||
An apple a day keeps the doctor away!
|
||||
|
||||
## Fun Fact
|
||||
|
||||
The apple can be the same color as the following:
|
||||
- Red as [strawberry](berries/strawberry.md)
|
||||
- Green as [broccoli](vegetables/tomato.md)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 123 KiB |
@@ -1,5 +0,0 @@
|
||||
# Strawberry
|
||||
|
||||
Strawberries are red
|
||||
|
||||

|
||||
@@ -1,3 +0,0 @@
|
||||
# Fruits
|
||||
|
||||
- [Apple](./apple.md)
|
||||
@@ -1 +0,0 @@
|
||||
name:test_vegetables
|
||||
@@ -1,3 +0,0 @@
|
||||
# Broccoli
|
||||
|
||||
Broccoli looks like a small tree.
|
||||
@@ -1,3 +0,0 @@
|
||||
# Vegetables
|
||||
|
||||
- [Tomato](./tomato.md)
|
||||
@@ -1,3 +0,0 @@
|
||||
# Tomato
|
||||
|
||||
Tomato tomato
|
||||
@@ -1,87 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.data.doctree.collection
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.core.texttools.regext
|
||||
|
||||
__global (
|
||||
doctrees shared map[string]&Tree
|
||||
)
|
||||
|
||||
pub enum TreeState {
|
||||
init
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct Tree {
|
||||
pub:
|
||||
name string
|
||||
fail_on_error bool
|
||||
pub mut:
|
||||
collections map[string]&collection.Collection
|
||||
defs map[string]&data.Page
|
||||
state TreeState
|
||||
// context context.Context
|
||||
cid string = '000'
|
||||
replacer ?regext.ReplaceInstructions
|
||||
}
|
||||
|
||||
// the unique key to remember a tree .
|
||||
// is unique per circle (based on cid)
|
||||
pub fn (tree Tree) key() string {
|
||||
return '${tree.cid}__${tree.name}'
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct TreeArgsGet {
|
||||
pub mut:
|
||||
name string = 'default'
|
||||
fail_on_error bool
|
||||
}
|
||||
|
||||
// new creates a new tree and stores it in global map
|
||||
pub fn new(args_ TreeArgsGet) !&Tree {
|
||||
mut args := args_
|
||||
args.name = texttools.name_fix(args.name)
|
||||
mut t := Tree{
|
||||
name: args.name
|
||||
fail_on_error: args.fail_on_error
|
||||
}
|
||||
tree_set(t)
|
||||
return &t
|
||||
}
|
||||
|
||||
// tree_get gets tree from global map
|
||||
pub fn tree_get(name string) !&Tree {
|
||||
rlock doctrees {
|
||||
if name in doctrees {
|
||||
return doctrees[name] or { return error('Doctree ${name} not found') }
|
||||
}
|
||||
}
|
||||
return error("can't get doctree:'${name}'")
|
||||
}
|
||||
|
||||
pub fn tree_exist(name string) bool {
|
||||
rlock doctrees {
|
||||
if name in doctrees {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn tree_list() []string {
|
||||
rlock doctrees {
|
||||
return doctrees.keys()
|
||||
}
|
||||
}
|
||||
|
||||
// tree_set stores tree in global map
|
||||
pub fn tree_set(tree Tree) {
|
||||
lock doctrees {
|
||||
doctrees[tree.name] = &tree
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
module doctree
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.data.doctree.collection.data
|
||||
import incubaid.herolib.data.doctree.collection
|
||||
import os
|
||||
|
||||
const collections_path = os.dir(@FILE) + '/testdata/tree_test'
|
||||
const tree_name = 'tree_test_tree'
|
||||
|
||||
fn test_write_tree() {
|
||||
write_dir1 := pathlib.get_dir(path: '/tmp/tree_write1', empty: true)!
|
||||
write_dir2 := pathlib.get_dir(path: '/tmp/tree_write2', empty: true)!
|
||||
write_dir3 := pathlib.get_dir(path: '/tmp/tree_write3', empty: true)!
|
||||
|
||||
// read tree1
|
||||
mut tree1 := new(name: tree_name)!
|
||||
tree1.scan(path: collections_path)!
|
||||
tree1.export(destination: write_dir1.path)!
|
||||
|
||||
// create tree2 from the written tree
|
||||
mut tree2 := new(name: tree_name)!
|
||||
tree2.scan(path: write_dir1.path)!
|
||||
tree2.export(destination: write_dir2.path)!
|
||||
|
||||
// write tree2 another time to compare the output of the two
|
||||
mut tree3 := new(name: tree_name)!
|
||||
tree3.scan(path: write_dir2.path)!
|
||||
tree3.export(destination: write_dir3.path)!
|
||||
|
||||
// assert the 1e tree matches the third one
|
||||
assert tree1.collections.len == tree3.collections.len
|
||||
for k, mut col1 in tree1.collections {
|
||||
mut col3 := tree3.collections[k] or { panic('collection ${k} is not in tree copy') }
|
||||
match_collections(mut *col1, mut *col3)!
|
||||
}
|
||||
|
||||
// assert the 2nd tree matches the third one
|
||||
assert tree2.collections.len == tree3.collections.len
|
||||
for k, mut col2 in tree2.collections {
|
||||
mut col3 := tree3.collections[k] or { panic('collection ${k} is not in tree copy') }
|
||||
match_collections(mut *col2, mut *col3)!
|
||||
}
|
||||
}
|
||||
|
||||
fn match_files(mut files1 map[string]&data.File, mut files2 map[string]&data.File) ! {
|
||||
assert files1.len == files2.len
|
||||
for name, mut file1 in files1 {
|
||||
mut file2 := files2[name] or { return error("${name} doesn't exist in both collections") }
|
||||
file1_cont := file1.path.read()!
|
||||
file2_cont := file2.path.read()!
|
||||
if file1_cont != file2_cont {
|
||||
return error('${name} content mismatch')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn match_pages(mut pages1 map[string]&data.Page, mut pages2 map[string]&data.Page) ! {
|
||||
// errors are added so not same as original
|
||||
if 'errors' in pages1.keys() {
|
||||
pages1.delete('errors')
|
||||
}
|
||||
if 'errors' in pages2.keys() {
|
||||
pages2.delete('errors')
|
||||
}
|
||||
if pages1.len != pages2.len {
|
||||
return error('nr of pages does not correspond in both collection')
|
||||
}
|
||||
for name, mut page1 in pages1 {
|
||||
mut page2 := pages2[name] or { return error("${name} doesn't exist in both collections") }
|
||||
assert page1.get_markdown()!.trim_space() == page2.get_markdown()!.trim_space()
|
||||
}
|
||||
}
|
||||
|
||||
fn match_collections(mut col1 collection.Collection, mut col2 collection.Collection) ! {
|
||||
match_files(mut col1.files, mut col2.files)!
|
||||
match_files(mut col1.images, mut col2.images)!
|
||||
match_pages(mut col1.pages, mut col2.pages)!
|
||||
}
|
||||
Reference in New Issue
Block a user