wip: fix doctree example
This commit is contained in:
@@ -19,7 +19,7 @@ for project in 'projectinca, legal, why, web4,tfgrid3'.split(',').map(it.trim_sp
|
||||
}
|
||||
|
||||
tree.export(
|
||||
dest: '/tmp/test'
|
||||
destination: '/tmp/test'
|
||||
reset: true
|
||||
keep_structure: true
|
||||
exclude_errors: false
|
||||
|
||||
1
lib/biz/biz.v
Normal file
1
lib/biz/biz.v
Normal file
@@ -0,0 +1 @@
|
||||
module biz
|
||||
39
lib/biz/bizmodel/factory.v
Normal file
39
lib/biz/bizmodel/factory.v
Normal file
@@ -0,0 +1,39 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.biz.spreadsheet
|
||||
|
||||
__global (
|
||||
bizmodels shared map[string]&BizModel
|
||||
)
|
||||
|
||||
pub fn get(name string) !&BizModel {
|
||||
rlock bizmodels {
|
||||
if name in bizmodels {
|
||||
return bizmodels[name] or { panic('bug') }
|
||||
}
|
||||
}
|
||||
return error("cann't find biz model:'${name}' in global bizmodels")
|
||||
}
|
||||
|
||||
// get bizmodel from global
|
||||
pub fn getset(name string) !&BizModel {
|
||||
lock bizmodels {
|
||||
if name !in bizmodels {
|
||||
mut sh := spreadsheet.sheet_new(name: 'bizmodel_${name}')!
|
||||
mut bizmodel := BizModel{
|
||||
sheet: sh
|
||||
name: name
|
||||
// currencies: cs
|
||||
}
|
||||
bizmodels[bizmodel.name] = &bizmodel
|
||||
}
|
||||
return bizmodels[name] or { panic('bug') }
|
||||
}
|
||||
panic('bug')
|
||||
}
|
||||
|
||||
pub fn set(bizmodel BizModel) {
|
||||
lock bizmodels {
|
||||
bizmodels[bizmodel.name] = &bizmodel
|
||||
}
|
||||
}
|
||||
110
lib/biz/bizmodel/macros.v
Normal file
110
lib/biz/bizmodel/macros.v
Normal file
@@ -0,0 +1,110 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn playmacro(action playbook.Action) !string {
|
||||
p := action.params
|
||||
|
||||
bizname := action.params.get('bizname') or {
|
||||
return error("Can't find param:'bizname' for action: ${action.name}, please specify as bizname: ...")
|
||||
}
|
||||
|
||||
mut sim := get(bizname)!
|
||||
|
||||
if action.name == 'employee_wiki' {
|
||||
return employee_wiki(p, sim)!
|
||||
} else if action.name == 'employees_wiki' {
|
||||
return employees_wiki(p, sim)!
|
||||
} else if action.name == 'department_wiki' {
|
||||
return department_wiki(p, sim)!
|
||||
} else if action.name == 'revenues_wiki' {
|
||||
return revenues_wiki(p, mut sim)!
|
||||
}
|
||||
|
||||
return error("couldn't find macro '${action.name}' for bizmodel.")
|
||||
}
|
||||
|
||||
fn employee_wiki(p paramsparser.Params, sim BizModel) !string {
|
||||
console.print_green('playmacro employee_wiki')
|
||||
mut id := p.get_default('id', '')!
|
||||
if id !in sim.employees {
|
||||
id = p.get_default('name', '')!
|
||||
}
|
||||
|
||||
if id !in sim.employees {
|
||||
println(id)
|
||||
println(sim.employees)
|
||||
panic('sss')
|
||||
return error('employee with name \'${id}\' not found')
|
||||
}
|
||||
|
||||
employee := sim.employees[id] or { panic('bug') }
|
||||
|
||||
println(employee)
|
||||
|
||||
// OUTPUTS:
|
||||
// &bizmodel.Employee{
|
||||
// name: 'despiegk'
|
||||
// description: 'CTO'
|
||||
// department: 'engineering'
|
||||
// cost: '1:12000EUR,60:21258.73200000001'
|
||||
// cost_percent_revenue: 0.0
|
||||
// nrpeople: '1'
|
||||
// indexation: 0.1
|
||||
// cost_center: 'default_costcenter'
|
||||
// page: 'cto.md'
|
||||
// }
|
||||
|
||||
// if true{panic("s")}
|
||||
|
||||
// theme := 'light'
|
||||
// theme := 'dark' // Removed unused variable
|
||||
mut t := $tmpl('./templates/employee.md')
|
||||
return t
|
||||
}
|
||||
|
||||
fn employees_wiki(p paramsparser.Params, sim BizModel) !string {
|
||||
mut deps := []Department{}
|
||||
for _, dep in sim.departments {
|
||||
deps << dep
|
||||
}
|
||||
deps.sort(a.order < b.order)
|
||||
|
||||
mut employee_names := map[string]string{}
|
||||
for _, empl in sim.employees {
|
||||
employee_names[empl.name] = empl.name
|
||||
if empl.page.len > 0 {
|
||||
employee_names[empl.name] = '[${empl.name}](${empl.page})'
|
||||
}
|
||||
}
|
||||
mut t := $tmpl('./templates/departments.md')
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
fn department_wiki(p paramsparser.Params, sim BizModel) !string {
|
||||
return ''
|
||||
}
|
||||
|
||||
fn revenues_wiki(p paramsparser.Params, mut sim BizModel) !string {
|
||||
// mut revs := map[string]string{} // Removed unused variable
|
||||
|
||||
// for name,_ in sim.products{
|
||||
// myrow:=sim.sheet.row_get('${name}_rev_total') or { panic("bug in revenues_wiki macro") }
|
||||
// println(myrow)
|
||||
// }
|
||||
|
||||
// if true{
|
||||
// panic("s")
|
||||
// }
|
||||
|
||||
panic('fix template below')
|
||||
// mut t:=$tmpl('./templates/revenue_overview.md')
|
||||
|
||||
// title:'REVENUE FOR ${name1.to_lower().replace("_"," ")}'
|
||||
|
||||
// return t
|
||||
}
|
||||
56
lib/biz/bizmodel/model.v
Normal file
56
lib/biz/bizmodel/model.v
Normal file
@@ -0,0 +1,56 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.biz.spreadsheet
|
||||
|
||||
pub struct BizModel {
|
||||
pub mut:
|
||||
name string
|
||||
sheet &spreadsheet.Sheet
|
||||
employees map[string]&Employee
|
||||
departments map[string]&Department
|
||||
costcenters map[string]&Costcenter
|
||||
products map[string]&Product
|
||||
}
|
||||
|
||||
pub struct Employee {
|
||||
pub:
|
||||
name string
|
||||
description string
|
||||
title string
|
||||
department string
|
||||
cost string
|
||||
cost_percent_revenue f64
|
||||
nrpeople string
|
||||
indexation f64
|
||||
cost_center string
|
||||
page string
|
||||
fulltime_perc f64
|
||||
}
|
||||
|
||||
pub struct Department {
|
||||
pub:
|
||||
name string
|
||||
description string
|
||||
page string
|
||||
title string
|
||||
order int
|
||||
}
|
||||
|
||||
pub struct Costcenter {
|
||||
pub:
|
||||
name string
|
||||
description string
|
||||
department string
|
||||
}
|
||||
|
||||
pub struct Product {
|
||||
pub mut:
|
||||
name string
|
||||
title string
|
||||
description string
|
||||
order int
|
||||
has_revenue bool
|
||||
has_items bool
|
||||
has_oneoffs bool
|
||||
nr_months_recurring int
|
||||
}
|
||||
89
lib/biz/bizmodel/play.v
Normal file
89
lib/biz/bizmodel/play.v
Normal file
@@ -0,0 +1,89 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
// import freeflowuniverse.herolib.core.texttools
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.biz.spreadsheet
|
||||
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
// first make sure we find a run action to know the name
|
||||
mut actions4 := plbook.actions_find(actor: 'bizmodel')!
|
||||
|
||||
if actions4.len == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
knownactions := ['revenue_define', 'employee_define', 'department_define', 'funding_define',
|
||||
'costcenter_define', 'cost_define']
|
||||
|
||||
for action in actions4 {
|
||||
// biz name needs to be specified in the the bizmodel hero actions
|
||||
bizname := action.params.get('bizname') or {
|
||||
return error("Can't find param: 'bizname' for ${action.actor}.${action.name} macro, is a requirement argument.")
|
||||
}
|
||||
mut sim := getset(bizname)!
|
||||
|
||||
if action.name !in knownactions {
|
||||
return error("Can't find macro with name: ${action.name} for macro's for bizmodel.")
|
||||
}
|
||||
|
||||
console.print_debug(action.name)
|
||||
match action.name {
|
||||
'revenue_define' {
|
||||
sim.revenue_action(action)!
|
||||
}
|
||||
'funding_define' {
|
||||
sim.funding_define_action(action)!
|
||||
}
|
||||
'costcenter_define' {
|
||||
sim.costcenter_define_action(action)!
|
||||
}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
|
||||
console.print_debug('TOTALS for bizmodel play')
|
||||
// now we have processed the macro's, we can calculate the totals
|
||||
rlock bizmodels {
|
||||
for _, mut sim in bizmodels {
|
||||
// sim.hr_total()!
|
||||
sim.cost_total()!
|
||||
sim.revenue_total()!
|
||||
sim.funding_total()!
|
||||
}
|
||||
}
|
||||
|
||||
for action in actions4 {
|
||||
console.print_debug(action.name)
|
||||
// biz name needs to be specified in the the bizmodel hero actions
|
||||
bizname := action.params.get('bizname') or {
|
||||
return error("Can't find param: 'bizname' for bizmodel macro, is a requirement argument.")
|
||||
}
|
||||
|
||||
mut sim := get(bizname)!
|
||||
|
||||
if action.name !in knownactions {
|
||||
return error("Can't find macro with name: ${action.name} for macro's for bizmodel.")
|
||||
}
|
||||
|
||||
match action.name {
|
||||
'cost_define' {
|
||||
sim.cost_define_action(action)!
|
||||
}
|
||||
'department_define' {
|
||||
sim.department_define_action(action)!
|
||||
}
|
||||
'employee_define' {
|
||||
sim.employee_define_action(action)!
|
||||
}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
|
||||
// mut sim:=get("test")!
|
||||
// //println(sim.sheet.rows.keys())
|
||||
// //println(spreadsheet.sheets_keys())
|
||||
// println(spreadsheet.sheet_get('bizmodel_test')!)
|
||||
// if true{panic("sss")}
|
||||
}
|
||||
85
lib/biz/bizmodel/play_cost.v
Normal file
85
lib/biz/bizmodel/play_cost.v
Normal file
@@ -0,0 +1,85 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
fn (mut m BizModel) cost_define_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get('description')!
|
||||
}
|
||||
if name.len == 0 {
|
||||
// make name ourselves
|
||||
name = texttools.name_fix(descr) // TODO:limit len
|
||||
}
|
||||
mut cost := action.params.get_default('cost', '0.0')! // is extrapolated
|
||||
mut cost_one := action.params.get_default('cost_one', '')!
|
||||
|
||||
department := action.params.get_default('department', 'unknown department')!
|
||||
cost_percent_revenue := action.params.get_percentage_default('cost_percent_revenue',
|
||||
'0%')!
|
||||
|
||||
indexation := action.params.get_percentage_default('indexation', '0%')!
|
||||
|
||||
if indexation > 0 {
|
||||
if cost.contains(':') {
|
||||
return error('cannot specify cost growth and indexation, should be no : inside cost param.')
|
||||
}
|
||||
// TODO: need to be able to go from e.g. month 6 and still do indexation
|
||||
mut cost_ := cost.int()
|
||||
cost2 := cost_ * (1 + indexation) * (1 + indexation) * (1 + indexation) * (1 + indexation) * (
|
||||
1 + indexation) * (1 + indexation) // 6 years, maybe need to look at months
|
||||
cost = '0:${cost},59:${cost2}'
|
||||
// console.print_debug(cost)
|
||||
}
|
||||
|
||||
mut extrap := false
|
||||
if cost_one != '' {
|
||||
// if cost!=""{
|
||||
// return error("Cannot specify cost:'${cost}' and cost_one:'${cost_one}'.")
|
||||
// }
|
||||
extrap = false
|
||||
cost = cost_one
|
||||
} else {
|
||||
// if cost_one!=""{
|
||||
// return error("Cannot specify cost:'${cost}' and cost_one:'${cost_one}'.")
|
||||
// }
|
||||
extrap = true
|
||||
}
|
||||
|
||||
mut cost_row := m.sheet.row_new(
|
||||
name: 'cost_${name}'
|
||||
growth: cost
|
||||
tags: 'department:${department} ocost'
|
||||
descr: 'cost overhead for department ${department}'
|
||||
extrapolate: extrap
|
||||
)!
|
||||
cost_row.action(action: .reverse)!
|
||||
|
||||
if cost_percent_revenue > 0 {
|
||||
mut revtotal := m.sheet.row_get('revenue_total')!
|
||||
mut cost_min := revtotal.action(
|
||||
action: .multiply
|
||||
val: cost_percent_revenue
|
||||
name: 'tmp3'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
cost_min.action(action: .forwardavg)! // avg out forward looking for 12 months
|
||||
cost_min.action(action: .reverse)!
|
||||
cost_row.action(
|
||||
action: .min
|
||||
rows: [cost_min]
|
||||
)!
|
||||
m.sheet.row_delete('tmp3')
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut sim BizModel) cost_total() ! {
|
||||
sim.sheet.group2row(
|
||||
name: 'hr_cost_total'
|
||||
include: ['hrcost']
|
||||
tags: 'pl'
|
||||
descr: 'total cost for hr'
|
||||
)!
|
||||
}
|
||||
23
lib/biz/bizmodel/play_costcenter.v
Normal file
23
lib/biz/bizmodel/play_costcenter.v
Normal file
@@ -0,0 +1,23 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
fn (mut m BizModel) costcenter_define_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get('description')!
|
||||
}
|
||||
mut department := action.params.get_default('department', '')!
|
||||
if name.len == 0 {
|
||||
// make name ourselves
|
||||
name = texttools.name_fix(descr) // TODO:limit len
|
||||
}
|
||||
mut cc := Costcenter{
|
||||
name: name
|
||||
description: descr
|
||||
department: department
|
||||
}
|
||||
m.costcenters[name] = &cc
|
||||
}
|
||||
41
lib/biz/bizmodel/play_funding.v
Normal file
41
lib/biz/bizmodel/play_funding.v
Normal file
@@ -0,0 +1,41 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
// populate the params for hr .
|
||||
// !!hr.funding_define .
|
||||
// - name, e.g. for a specific person .
|
||||
// - descr: description of the funding .
|
||||
// - investment is month:amount,month:amount, ... .
|
||||
// - type: loan or capital .
|
||||
fn (mut m BizModel) funding_define_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get('description')!
|
||||
}
|
||||
if name.len == 0 {
|
||||
// make name ourselves
|
||||
name = texttools.name_fix(descr) // TODO:limit len
|
||||
}
|
||||
mut investment := action.params.get_default('investment', '0.0')!
|
||||
fundingtype := action.params.get_default('type', 'capital')!
|
||||
|
||||
m.sheet.row_new(
|
||||
name: 'funding_${name}'
|
||||
growth: investment
|
||||
tags: 'funding type:${fundingtype}'
|
||||
descr: descr
|
||||
extrapolate: false
|
||||
)!
|
||||
}
|
||||
|
||||
fn (mut sim BizModel) funding_total() ! {
|
||||
sim.sheet.group2row(
|
||||
name: 'funding_total'
|
||||
include: ['funding']
|
||||
tags: 'pl'
|
||||
descr: 'total funding'
|
||||
)!
|
||||
}
|
||||
148
lib/biz/bizmodel/play_hr.v
Normal file
148
lib/biz/bizmodel/play_hr.v
Normal file
@@ -0,0 +1,148 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
// import freeflowuniverse.herolib.data.paramsparser
|
||||
// import freeflowuniverse.herolib.core.pathlib
|
||||
// import rand
|
||||
|
||||
// populate the params for hr
|
||||
// !!hr.employee_define
|
||||
// descr:'Junior Engineer'
|
||||
// nrpeople:'1:5,60:30'
|
||||
// cost:'4000USD'
|
||||
// indexation:'5%'
|
||||
// department:'engineering'
|
||||
// cost_percent_revenue e.g. 4%, will make sure the cost will be at least 4% of revenue
|
||||
|
||||
fn (mut m BizModel) employee_define_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get('description')!
|
||||
}
|
||||
if name.len == 0 {
|
||||
// make name ourselves
|
||||
name = texttools.name_fix(descr) // TODO:limit len
|
||||
}
|
||||
mut cost := action.params.get_default('cost', '0.0')!
|
||||
// mut cost_year := action.params.get_currencyfloat_default('cost_year', 0.0)!
|
||||
// if cost_year > 0 {
|
||||
// cost = cost_year / 12
|
||||
// }
|
||||
// mut cost_growth := action.params.get_default('cost_growth', '')!
|
||||
// growth := action.params.get_default('growth', '1:1')!
|
||||
department := action.params.get_default('department', '')!
|
||||
page := action.params.get_default('page', '')!
|
||||
|
||||
cost_percent_revenue := action.params.get_percentage_default('cost_percent_revenue',
|
||||
'0%')!
|
||||
nrpeople := action.params.get_default('nrpeople', '1')!
|
||||
|
||||
indexation := action.params.get_percentage_default('indexation', '0%')!
|
||||
|
||||
cost_center := action.params.get_default('costcenter', 'default_costcenter')!
|
||||
|
||||
// // cost per person
|
||||
// namecostperson := 'nr_${name}'
|
||||
// if cost_growth.len > 0 && cost > 0 {
|
||||
// return error('cannot specify cost and cost growth together, chose one please.')
|
||||
// }
|
||||
if indexation > 0 {
|
||||
if cost.contains(':') {
|
||||
return error('cannot specify cost growth and indexation, should be no : inside cost param.')
|
||||
}
|
||||
mut cost_ := cost.int()
|
||||
cost2 := cost_ * (1 + indexation) * (1 + indexation) * (1 + indexation) * (1 + indexation) * (
|
||||
1 + indexation) * (1 + indexation) // 6 years, maybe need to look at months
|
||||
cost = '1:${cost},60:${cost2}'
|
||||
}
|
||||
|
||||
mut costpeople_row := m.sheet.row_new(
|
||||
name: 'hr_cost_${name}'
|
||||
growth: cost
|
||||
tags: 'department:${department} hrcost'
|
||||
descr: 'Department ${department}'
|
||||
subgroup: 'HR cost per department.'
|
||||
)!
|
||||
costpeople_row.action(action: .reverse)!
|
||||
|
||||
// multiply with nr of people if any
|
||||
if nrpeople != '1' {
|
||||
mut nrpeople_row := m.sheet.row_new(
|
||||
name: 'nrpeople_${name}'
|
||||
growth: nrpeople
|
||||
tags: 'hrnr'
|
||||
descr: '# people for ${descr}'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
_ := costpeople_row.action(action: .multiply, rows: [nrpeople_row])!
|
||||
}
|
||||
if cost_percent_revenue > 0 {
|
||||
mut revtotal := m.sheet.row_get('revenue_total')!
|
||||
mut cost_min := revtotal.action(
|
||||
action: .multiply
|
||||
val: cost_percent_revenue
|
||||
name: 'tmp3'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
cost_min.action(action: .forwardavg)! // avg out forward looking for 12 months
|
||||
cost_min.action(action: .reverse)!
|
||||
costpeople_row.action(
|
||||
action: .min
|
||||
rows: [cost_min]
|
||||
)!
|
||||
m.sheet.row_delete('tmp3')
|
||||
}
|
||||
employee := Employee{
|
||||
name: name
|
||||
description: descr
|
||||
department: department
|
||||
cost: cost
|
||||
cost_percent_revenue: cost_percent_revenue
|
||||
nrpeople: nrpeople
|
||||
indexation: indexation
|
||||
cost_center: cost_center
|
||||
page: page
|
||||
fulltime_perc: action.params.get_percentage_default('fulltime', '100%')!
|
||||
}
|
||||
|
||||
// println(employee)
|
||||
|
||||
// todo: use existing id gen
|
||||
|
||||
if name != '' {
|
||||
// sid = smartid.sid_new('')!
|
||||
// // TODO: this isn't necessary if sid_new works correctly
|
||||
// // but lets keep it in here for now until we test smartid
|
||||
// for (sid in m.employees) {
|
||||
// sid = smartid.sid_new('')!
|
||||
// }
|
||||
m.employees[name] = &employee
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut m BizModel) department_define_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get_default('description', '')!
|
||||
}
|
||||
|
||||
department := Department{
|
||||
name: name
|
||||
description: descr
|
||||
title: action.params.get_default('title', '')!
|
||||
page: action.params.get_default('page', '')!
|
||||
}
|
||||
|
||||
// println(department)
|
||||
|
||||
if name != '' {
|
||||
m.departments[name] = &department
|
||||
}
|
||||
}
|
||||
|
||||
// fn (mut sim BizModel) hr_total() ! {
|
||||
|
||||
// }
|
||||
345
lib/biz/bizmodel/play_product_revenue.v
Normal file
345
lib/biz/bizmodel/play_product_revenue.v
Normal file
@@ -0,0 +1,345 @@
|
||||
module bizmodel
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
// - name, e.g. for a specific project
|
||||
// - descr, description of the revenue line item
|
||||
// - revenue_setup, revenue for 1 item '1000usd'
|
||||
// - revenue_setup_delay
|
||||
// - revenue_monthly, revenue per month for 1 item
|
||||
// - revenue_monthly_delay, how many months before monthly revenue starts
|
||||
// - maintenance_month_perc, how much percent of revenue_setup will come back over months
|
||||
// - cogs_setup, cost of good for 1 item at setup
|
||||
// - cogs_setup_delay, how many months before setup cogs starts, after sales
|
||||
// - cogs_setup_perc: what is percentage of the cogs (can change over time) for setup e.g. 0:50%
|
||||
|
||||
// - cogs_monthly, cost of goods for the monthly per 1 item
|
||||
// - cogs_monthly_delay, how many months before monthly cogs starts, after sales
|
||||
// - cogs_monthly_perc: what is percentage of the cogs (can change over time) for monthly e.g. 0:5%,12:10%
|
||||
|
||||
// - nr_sold: how many do we sell per month (is in growth format e.g. 10:100,20:200)
|
||||
// - nr_months_recurring: how many months is recurring, if 0 then no recurring
|
||||
//
|
||||
fn (mut m BizModel) revenue_action(action Action) ! {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
descr = action.params.get_default('description', '')!
|
||||
}
|
||||
if name.len == 0 {
|
||||
// make name ourselves
|
||||
name = texttools.name_fix(descr)
|
||||
}
|
||||
|
||||
name = texttools.name_fix(name)
|
||||
if name.len == 0 {
|
||||
return error('name and description is empty for ${action}')
|
||||
}
|
||||
name2 := name.replace('_', ' ').replace('-', ' ')
|
||||
descr = descr.replace('_', ' ').replace('-', ' ')
|
||||
|
||||
mut product := Product{
|
||||
name: name
|
||||
title: action.params.get_default('title', name)!
|
||||
description: descr
|
||||
}
|
||||
m.products[name] = &product
|
||||
|
||||
mut nr_months_recurring := action.params.get_int_default('nr_months_recurring', 60)!
|
||||
|
||||
if nr_months_recurring == 0 {
|
||||
nr_months_recurring = 1
|
||||
}
|
||||
|
||||
product.nr_months_recurring = nr_months_recurring
|
||||
|
||||
mut revenue := m.sheet.row_new(
|
||||
name: '${name}_revenue'
|
||||
growth: action.params.get_default('revenue', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'Revenue for ${name2}'
|
||||
extrapolate: false
|
||||
)!
|
||||
|
||||
mut revenue_setup := m.sheet.row_new(
|
||||
name: '${name}_revenue_setup'
|
||||
growth: action.params.get_default('revenue_setup', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'Setup Sales price for ${name2}'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut revenue_setup_delay := action.params.get_int_default('revenue_setup_delay', 0)!
|
||||
|
||||
mut revenue_monthly := m.sheet.row_new(
|
||||
name: '${name}_revenue_monthly'
|
||||
growth: action.params.get_default('revenue_monthly', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'Monthly Sales price for ${name2}'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut revenue_monthly_delay := action.params.get_int_default('revenue_monthly_delay',
|
||||
1)!
|
||||
|
||||
mut cogs := m.sheet.row_new(
|
||||
name: '${name}_cogs'
|
||||
growth: action.params.get_default('cogs', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'COGS for ${name2}'
|
||||
extrapolate: false
|
||||
)!
|
||||
|
||||
if revenue.max() > 0 || cogs.max() > 0 {
|
||||
product.has_oneoffs = true
|
||||
}
|
||||
|
||||
_ := m.sheet.row_new(
|
||||
name: '${name}_cogs_perc'
|
||||
growth: action.params.get_default('cogs_perc', '0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'COGS as percent of revenue for ${name2}'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut cogs_setup := m.sheet.row_new(
|
||||
name: '${name}_cogs_setup'
|
||||
growth: action.params.get_default('cogs_setup', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'COGS for ${name2} Setup'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut cogs_setup_delay := action.params.get_int_default('cogs_setup_delay', 1)!
|
||||
|
||||
mut cogs_setup_perc := m.sheet.row_new(
|
||||
name: '${name}_cogs_setup_perc'
|
||||
growth: action.params.get_default('cogs_setup_perc', '0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'COGS as percent of revenue for ${name2} Setup'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut cogs_monthly := m.sheet.row_new(
|
||||
name: '${name}_cogs_monthly'
|
||||
growth: action.params.get_default('cogs_monthly', '0:0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'Cost of Goods (COGS) for ${name2} Monthly'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
mut cogs_monthly_delay := action.params.get_int_default('cogs_monthly_delay', 1)!
|
||||
|
||||
mut cogs_monthly_perc := m.sheet.row_new(
|
||||
name: '${name}_cogs_monthly_perc'
|
||||
growth: action.params.get_default('cogs_monthly_perc', '0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'COGS as percent of revenue for ${name2} Monthly'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
// if true{
|
||||
// println(cogs_setup_perc)
|
||||
// println(cogs_monthly_perc)
|
||||
// panic("sdsd")
|
||||
// }
|
||||
|
||||
mut nr_sold := m.sheet.row_new(
|
||||
name: '${name}_nr_sold'
|
||||
growth: action.params.get_default('nr_sold', '0')!
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'nr of items sold/month for ${name2}'
|
||||
aggregatetype: .avg
|
||||
)!
|
||||
|
||||
if nr_sold.max() > 0 {
|
||||
product.has_items = true
|
||||
}
|
||||
|
||||
// CALCULATE THE TOTAL (multiply with nr sold)
|
||||
|
||||
mut revenue_setup_total := revenue_setup.action(
|
||||
name: '${name}_revenue_setup_total'
|
||||
descr: 'Setup sales for ${name2} total'
|
||||
action: .multiply
|
||||
rows: [nr_sold]
|
||||
delaymonths: revenue_setup_delay
|
||||
)!
|
||||
|
||||
mut revenue_monthly_total := revenue_monthly.action(
|
||||
name: '${name}_revenue_monthly_total'
|
||||
descr: 'Monthly sales for ${name2} total'
|
||||
action: .multiply
|
||||
rows: [nr_sold]
|
||||
delaymonths: revenue_monthly_delay
|
||||
)!
|
||||
|
||||
mut cogs_setup_total := cogs_setup.action(
|
||||
name: '${name}_cogs_setup_total'
|
||||
descr: 'Setup COGS for ${name2} total'
|
||||
action: .multiply
|
||||
rows: [nr_sold]
|
||||
delaymonths: cogs_setup_delay
|
||||
)!
|
||||
|
||||
mut cogs_monthly_total := cogs_monthly.action(
|
||||
name: '${name}_cogs_monthly_total'
|
||||
descr: 'Monthly COGS for ${name2} total'
|
||||
action: .multiply
|
||||
rows: [nr_sold]
|
||||
delaymonths: cogs_monthly_delay
|
||||
)!
|
||||
|
||||
// DEAL WITH RECURRING
|
||||
|
||||
if nr_months_recurring > 0 {
|
||||
revenue_monthly_total = revenue_monthly_total.recurring(
|
||||
name: '${name}_revenue_monthly_recurring'
|
||||
descr: 'Revenue monthly recurring for ${name2}'
|
||||
nrmonths: nr_months_recurring
|
||||
)!
|
||||
cogs_monthly_total = cogs_monthly_total.recurring(
|
||||
name: '${name}_cogs_monthly_recurring'
|
||||
descr: 'COGS recurring for ${name2}'
|
||||
nrmonths: nr_months_recurring
|
||||
)!
|
||||
|
||||
_ := nr_sold.recurring(
|
||||
name: '${name}_nr_sold_recurring'
|
||||
descr: 'Nr products active because of recurring for ${name2}'
|
||||
nrmonths: nr_months_recurring
|
||||
aggregatetype: .max
|
||||
)!
|
||||
// if true{
|
||||
// println(nr_sold_recurring)
|
||||
// panic('sd')
|
||||
// }
|
||||
}
|
||||
|
||||
// cogs as percentage of revenue
|
||||
mut cogs_setup_from_perc := cogs_setup_perc.action(
|
||||
action: .multiply
|
||||
rows: [revenue_setup_total]
|
||||
name: '${name}_cogs_setup_from_perc'
|
||||
)!
|
||||
mut cogs_monthly_from_perc := cogs_monthly_perc.action(
|
||||
action: .multiply
|
||||
rows: [revenue_monthly_total]
|
||||
name: '${name}_cogs_monthly_from_perc'
|
||||
)!
|
||||
|
||||
// if true{
|
||||
// println(revenue_setup_total)
|
||||
// println(cogs_setup_perc)
|
||||
// println(cogs_setup_from_perc)
|
||||
// println("montlhy")
|
||||
// println(revenue_monthly_total)
|
||||
// println(cogs_monthly_perc)
|
||||
// println(cogs_monthly_from_perc)
|
||||
// panic("sdsd")
|
||||
// }
|
||||
|
||||
// mut cogs_from_perc:=cogs_perc.action(action:.multiply,rows:[revenue],name:"cogs_from_perc")!
|
||||
|
||||
// DEAL WITH MAINTENANCE
|
||||
|
||||
// make sum of all past revenue (all one off revenue, needed to calculate maintenance)
|
||||
mut temp_past := revenue.recurring(
|
||||
nrmonths: nr_months_recurring
|
||||
name: 'temp_past'
|
||||
// delaymonths:4
|
||||
)!
|
||||
|
||||
mut maintenance_month_perc := action.params.get_percentage_default('maintenance_month_perc',
|
||||
'0%')!
|
||||
|
||||
mut maintenance_month := m.sheet.row_new(
|
||||
name: '${name}_maintenance_month'
|
||||
growth: '0:${maintenance_month_perc:.2f}'
|
||||
tags: 'rev name:${name}'
|
||||
descr: 'maintenance fee for ${name2}'
|
||||
)!
|
||||
|
||||
maintenance_month.action(action: .multiply, rows: [temp_past])!
|
||||
|
||||
// temp_past.delete()
|
||||
|
||||
// TOTALS
|
||||
|
||||
mut revenue_total := m.sheet.row_new(
|
||||
name: '${name}_revenue_total'
|
||||
growth: '0:0'
|
||||
tags: 'rev revtotal name:${name}'
|
||||
descr: 'Revenue total for ${name2}.'
|
||||
)!
|
||||
|
||||
mut cogs_total := m.sheet.row_new(
|
||||
name: '${name}_cogs_total'
|
||||
growth: '0:0'
|
||||
tags: 'rev cogstotal name:${name}'
|
||||
descr: 'COGS total for ${name2}.'
|
||||
)!
|
||||
|
||||
if revenue_total.max() > 0.0 || cogs_total.max() > 0.0 {
|
||||
product.has_revenue
|
||||
}
|
||||
|
||||
revenue_total = revenue_total.action(
|
||||
action: .add
|
||||
rows: [revenue, revenue_monthly_total, revenue_setup_total, maintenance_month]
|
||||
)!
|
||||
|
||||
if revenue_total.max() > 0 {
|
||||
product.has_revenue = true
|
||||
}
|
||||
|
||||
cogs_total = cogs_total.action(
|
||||
action: .add
|
||||
rows: [cogs, cogs_monthly_total, cogs_setup_total, cogs_setup_from_perc,
|
||||
cogs_monthly_from_perc]
|
||||
)!
|
||||
|
||||
// if true{
|
||||
// //println(m.sheet)
|
||||
// println(revenue_total)
|
||||
// println(cogs_total)
|
||||
// println(cogs)
|
||||
// println(cogs_monthly_total)
|
||||
// println(cogs_setup_total)
|
||||
// println(cogs_setup_from_perc)
|
||||
// println(cogs_monthly_from_perc)
|
||||
// panic("sdsd")
|
||||
|
||||
// }
|
||||
}
|
||||
|
||||
// revenue_total calculates and aggregates the total revenue and cost of goods sold (COGS) for the business model
|
||||
fn (mut sim BizModel) revenue_total() ! {
|
||||
// Create a new row in the sheet to represent the total revenue across all products
|
||||
sim.sheet.group2row(
|
||||
name: 'revenue_total'
|
||||
tags: ''
|
||||
descr: 'total revenue.'
|
||||
)!
|
||||
|
||||
// Create a new row in the sheet to represent the total COGS across all products
|
||||
sim.sheet.group2row(
|
||||
name: 'cogs_total'
|
||||
tags: ''
|
||||
descr: 'total cogs.'
|
||||
)!
|
||||
|
||||
// Note: The following commented-out code block seems to be for debugging or future implementation
|
||||
// It demonstrates how to create a smaller version of the sheet with specific filters
|
||||
// if true{
|
||||
// // name string
|
||||
// // namefilter []string // only include the exact names as specified for the rows
|
||||
// // includefilter []string // matches for the tags
|
||||
// // excludefilter []string // matches for the tags
|
||||
// // period_months int = 12
|
||||
// mut r:=sim.sheet.tosmaller(name:"tmp",includefilter:["cogstotal"],period_months:12)!
|
||||
// println(r)
|
||||
// panic("sdsd")
|
||||
// }
|
||||
}
|
||||
16
lib/biz/bizmodel/templates/departments.md
Normal file
16
lib/biz/bizmodel/templates/departments.md
Normal file
@@ -0,0 +1,16 @@
|
||||
|
||||
@for dept in deps
|
||||
|
||||
@if dept.title.len>0
|
||||
## @{dept.title}
|
||||
@else
|
||||
## @{dept.name}
|
||||
@end
|
||||
|
||||
| Name | Title | Nr People |
|
||||
|------|-------|-------|
|
||||
@for employee in sim.employees.values().filter(it.department == dept.name)
|
||||
| @{employee_names[employee.name]} | @{employee.title} | @{employee.nrpeople} |
|
||||
@end
|
||||
|
||||
@end
|
||||
28
lib/biz/bizmodel/templates/employee.md
Normal file
28
lib/biz/bizmodel/templates/employee.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# @{employee.name}
|
||||
|
||||
|
||||
`@{employee.description}`
|
||||
|
||||
> department: `@{employee.department}`
|
||||
|
||||
**Cost To The Company:**
|
||||
|
||||
`@{employee.cost}`
|
||||
|
||||
|
||||
@if employee.cost_percent_revenue > 0.0
|
||||
|
||||
**Cost Percent Revenue:**
|
||||
|
||||
`@{employee.cost_percent_revenue}%`
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@if employee.nrpeople.len > 1
|
||||
|
||||
**Number of People in this group**
|
||||
|
||||
`@{employee.nrpeople}`
|
||||
|
||||
@end
|
||||
21
lib/biz/bizmodel/templates/employee2.md
Normal file
21
lib/biz/bizmodel/templates/employee2.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Employee Wiki
|
||||
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@@picocss/pico@@2.0.6/css/pico.classless.min.css">
|
||||
|
||||
<form action="/update_employee" method="POST">
|
||||
|
||||
<label for="name">Name</label>
|
||||
<input type="text" id="name" name="name" value="John Doe" required>
|
||||
|
||||
<label for="description">Description</label>
|
||||
<textarea id="description" name="description" rows="3" required>Description of the employee</textarea>
|
||||
|
||||
<label for="department">Department</label>
|
||||
<input type="text" id="department" name="department" value="HR" required>
|
||||
|
||||
<label for="cost">Cost</label>
|
||||
<input type="number" id="cost" name="cost" value="10000" required>
|
||||
|
||||
<button type="submit" class="primary">Save Changes</button>
|
||||
|
||||
</form>
|
||||
54
lib/biz/bizmodel/templates/intro.md
Normal file
54
lib/biz/bizmodel/templates/intro.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# This is our business model planner
|
||||
|
||||
## FUNDING
|
||||
|
||||
!!bizmodel.sheet_wiki includefilter:'funding'
|
||||
|
||||
## REVENUE vs COGS
|
||||
|
||||
!!bizmodel.sheet_wiki includefilter:rev
|
||||
|
||||
#### Revenue Lines
|
||||
|
||||
!!bizmodel.sheet_wiki title:'Revenue Total' includefilter:'revtotal'
|
||||
|
||||
#### COGS Lines
|
||||
|
||||
!!bizmodel.sheet_wiki title:'COGS' includefilter:'cogstotal'
|
||||
|
||||
## HR
|
||||
!!bizmodel.sheet_wiki title:'HR Teams' includefilter:'hrnr'
|
||||
|
||||
!!bizmodel.sheet_wiki title:'HR Costs' includefilter:'hrcost'
|
||||
|
||||
## Operational Costs
|
||||
|
||||
!!bizmodel.sheet_wiki title:'COSTS' includefilter:'ocost'
|
||||
|
||||
|
||||
## P&L Overview
|
||||
|
||||
<!-- period is in months, 3 means every quarter -->
|
||||
|
||||
!!bizmodel.sheet_wiki title:'P&L Overview' includefilter:'pl'
|
||||
|
||||
|
||||
!!bizmodel.graph_bar_row rowname:revenue_total unit:million title:'A Title' title_sub:'Sub'
|
||||
|
||||
Unit is in Million USD.
|
||||
|
||||
!!bizmodel.graph_bar_row rowname:revenue_total unit:million
|
||||
|
||||
!!bizmodel.graph_line_row rowname:revenue_total unit:million
|
||||
|
||||
!!bizmodel.graph_pie_row rowname:revenue_total unit:million size:'80%'
|
||||
|
||||
|
||||
## Some Details
|
||||
|
||||
> show how we can do per month
|
||||
|
||||
!!bizmodel.sheet_wiki includefilter:'pl' period_months:1
|
||||
|
||||
|
||||
|
||||
74
lib/biz/bizmodel/templates/revenue_overview.md
Normal file
74
lib/biz/bizmodel/templates/revenue_overview.md
Normal file
@@ -0,0 +1,74 @@
|
||||
|
||||
# Revenue Overview
|
||||
|
||||
@for name1,product in sim.products
|
||||
|
||||
@if product.has_revenue
|
||||
|
||||
## ${product.title}
|
||||
|
||||
${product.description}
|
||||
|
||||
#### parameters for the product
|
||||
|
||||
@if product.has_oneoffs
|
||||
|
||||
Product ${name1} has revenue events (one offs)
|
||||
|
||||
!!!spreadsheet.sheet_wiki
|
||||
namefilter:'${name1}_revenue,${name1}_cogs,${name1}_cogs_perc,${name1}_maintenance_month_perc' sheetname:'bizmodel_tf9
|
||||
|
||||
- COGS = Cost of Goods Sold (is our cost to deliver the product/service)
|
||||
- maintenance is fee we charge to the customer per month in relation to the revenue we charged e.g. 1% of a product which was sold for 1m EUR means we charge 1% of 1 m EUR per month.
|
||||
|
||||
@end //one offs
|
||||
|
||||
@if product.has_items
|
||||
|
||||
Product sold and its revenue/cost of goods
|
||||
|
||||
!!!spreadsheet.sheet_wiki
|
||||
namefilter:'${name1}_nr_sold,${name1}_revenue_setup,${name1}_revenue_monthly,${name1}_cogs_setup,${name1}_cogs_setup_perc,${name1}_cogs_monthly,${name1}_cogs_monthly_perc'
|
||||
sheetname:'bizmodel_tf9
|
||||
|
||||
- nr sold, is the nr sold per month of ${name1}
|
||||
- revenue setup is setup per item for ${name1}, this is the money we receive. Similar there is a revenue monthly.
|
||||
- cogs = Cost of Goods Sold (is our cost to deliver the product)
|
||||
- can we as a setup per item, or per month per item
|
||||
|
||||
@if product.nr_months_recurring>1
|
||||
|
||||
This product ${name1} is recurring, means customer pays per month ongoing, the period customer is paying for in months is: **${product.nr_months_recurring}**
|
||||
|
||||
@end //recurring
|
||||
|
||||
@end
|
||||
|
||||
#### the revenue/cogs calculated
|
||||
|
||||
|
||||
!!!spreadsheet.sheet_wiki
|
||||
namefilter:'${name1}_nr_sold_recurring'
|
||||
sheetname:'bizmodel_tf9
|
||||
|
||||
This results in following revenues and cogs:
|
||||
|
||||
!!!spreadsheet.sheet_wiki
|
||||
namefilter:'${name1}_revenue_setup_total,${name1}_revenue_monthly_total,${name1}_cogs_setup_total,${name1}_cogs_monthly_total,${name1}_cogs_setup_from_perc,${name1}_cogs_monthly_from_perc,${name1}_maintenance_month,
|
||||
${name1}_revenue_monthly_recurring,${name1}_cogs_monthly_recurring'
|
||||
sheetname:'bizmodel_tf9
|
||||
|
||||
resulting revenues:
|
||||
!!!spreadsheet.sheet_wiki
|
||||
namefilter:'${name1}_revenue_total,${name1}_cogs_total'
|
||||
sheetname:'bizmodel_tf9
|
||||
|
||||
|
||||
!!!spreadsheet.graph_line_row rowname:'${name1}_cogs_total' unit:million sheetname:'bizmodel_tf9'
|
||||
|
||||
!!!spreadsheet.graph_line_row rowname:'${name1}_revenue_total' unit:million sheetname:'bizmodel_tf9'
|
||||
|
||||
|
||||
@end //product has_revenue
|
||||
|
||||
@end //loop
|
||||
6
lib/biz/bizmodel/templates/rows_overview.md
Normal file
6
lib/biz/bizmodel/templates/rows_overview.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Overview of the rows in the biz model sheet
|
||||
|
||||
!!bizmodel.wiki_row_overview
|
||||
|
||||
|
||||
|
||||
24
lib/biz/bizmodel/templates/summary.md
Normal file
24
lib/biz/bizmodel/templates/summary.md
Normal file
@@ -0,0 +1,24 @@
|
||||
- [bizmodel](bizmodel_example:bizmodel.md)
|
||||
- [Revenue](bizmodel_example:revenue.md)
|
||||
- [parameters](bizmodel_example:rows_overview.md)
|
||||
- [revenue_params](bizmodel_example:params/revenue_params.md)
|
||||
- [funding_params](bizmodel_example:params/funding_params.md)
|
||||
- [hr_params](bizmodel_example:params/hr_params.md)
|
||||
- [costs_params](bizmodel_example:params/costs_params.md)
|
||||
- [rows overview](bizmodel_example:rows_overview.md)
|
||||
- [manual](bizmodel_manual:configuration.md)
|
||||
- [widgets](bizmodel_manual:widgets.md)
|
||||
- [graph_bar_row](bizmodel_manual:graph_bar_row.md)
|
||||
- [sheet_tables](bizmodel_manual:sheet_tables.md)
|
||||
- [widget_args](bizmodel_manual:widget_args.md)
|
||||
- [params](bizmodel_manual:configuration.md)
|
||||
- [revenue params](bizmodel_manual:revenue_params.md)
|
||||
- [funding params](bizmodel_manual:funding_params.md)
|
||||
- [hr params](bizmodel_manual:hr_params.md)
|
||||
- [costs params](bizmodel_manual:costs_params.md)
|
||||
- [employees](bizmodel_example:employees.md)
|
||||
- [CTO](bizmodel_example:cto.md)
|
||||
- [concepts](bizmodel_manual:concepts.md)
|
||||
<!-- QUESTION: where is namefilter.md -->
|
||||
<!-- - [namefilter](bizmodel_manual:namefilter.md) -->
|
||||
|
||||
34
lib/biz/investortool/company.v
Normal file
34
lib/biz/investortool/company.v
Normal file
@@ -0,0 +1,34 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
@[heap]
|
||||
pub struct Company {
|
||||
pub mut:
|
||||
oid string
|
||||
short_code string
|
||||
name string
|
||||
current_nr_shares int
|
||||
current_share_value string
|
||||
description string
|
||||
admins []string
|
||||
comments []string
|
||||
}
|
||||
|
||||
fn play_company(mut investortool InvestorTool, mut plbook playbook.PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investortool.company_define')! {
|
||||
mut p := action.params
|
||||
mut company := Company{
|
||||
oid: p.get_default('oid', '')!
|
||||
short_code: p.get_default('short_code', '')!
|
||||
name: p.get_default('name', '')!
|
||||
current_nr_shares: p.get_int_default('current_nr_shares', 0)!
|
||||
current_share_value: p.get_default('current_share_value', '')!
|
||||
description: p.get_default('description', '')!
|
||||
admins: p.get_list_default('admins', [])!
|
||||
comments: p.get_list_default('comments', [])!
|
||||
}
|
||||
println(company)
|
||||
investortool.company_add(company)!
|
||||
}
|
||||
}
|
||||
64
lib/biz/investortool/employee.v
Normal file
64
lib/biz/investortool/employee.v
Normal file
@@ -0,0 +1,64 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
|
||||
// TODO add currency and ourtime types
|
||||
@[heap]
|
||||
pub struct Employee {
|
||||
pub mut:
|
||||
oid string
|
||||
user_ref string
|
||||
company_ref string
|
||||
status string
|
||||
start_date ?ourtime.OurTime
|
||||
end_date ?ourtime.OurTime
|
||||
salary ?currency.Amount
|
||||
salary_low ?currency.Amount
|
||||
outstanding ?currency.Amount
|
||||
tft_grant f64
|
||||
reward_pool_points int
|
||||
salary_low_date ?ourtime.OurTime
|
||||
comments string
|
||||
}
|
||||
|
||||
fn play_employee(mut investortool InvestorTool, mut plbook playbook.PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investortool.employee_define')! {
|
||||
mut p := action.params
|
||||
mut employee := Employee{
|
||||
oid: p.get_default('oid', '')!
|
||||
user_ref: p.get_default('user_ref', '')!
|
||||
company_ref: p.get_default('company_ref', '')!
|
||||
status: p.get_default('status', '')!
|
||||
start_date: if p.exists('start_date') { p.get_time('start_date')! } else { none }
|
||||
end_date: if p.exists('end_date') { p.get_time('end_date')! } else { none }
|
||||
salary: if p.exists('salary') && p.get('salary')!.trim(' ').len > 0 {
|
||||
p.get_currencyamount('salary')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
salary_low: if p.exists('salary_low') && p.get('salary_low')!.trim(' ').len > 0 {
|
||||
p.get_currencyamount('salary_low')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
outstanding: if p.exists('outstanding')
|
||||
&& p.get('outstanding')!.trim(' ').len > 0 {
|
||||
p.get_currencyamount('outstanding')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
tft_grant: p.get_float_default('tft_grant', 0.0)!
|
||||
reward_pool_points: p.get_int_default('reward_pool_points', 0)!
|
||||
salary_low_date: if p.exists('salary_low_date') {
|
||||
p.get_time('salary_low_date')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
comments: p.get_default('comments', '')!
|
||||
}
|
||||
println(employee)
|
||||
investortool.employee_add(employee)!
|
||||
}
|
||||
}
|
||||
139
lib/biz/investortool/factory.v
Normal file
139
lib/biz/investortool/factory.v
Normal file
@@ -0,0 +1,139 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
// TODO: need to do a global
|
||||
__global (
|
||||
investortools shared map[string]&InvestorTool
|
||||
)
|
||||
|
||||
@[heap]
|
||||
pub struct InvestorTool {
|
||||
pub mut:
|
||||
companies map[string]&Company
|
||||
employees map[string]&Employee
|
||||
investments map[string]&InvestmentShares
|
||||
investors map[string]&Investor
|
||||
users map[string]&User
|
||||
}
|
||||
|
||||
// Factory methods
|
||||
pub fn new() &InvestorTool {
|
||||
return &InvestorTool{}
|
||||
}
|
||||
|
||||
pub fn get() !&InvestorTool {
|
||||
if 'default' in investortools {
|
||||
return investortools['default']
|
||||
}
|
||||
return error("can't find default investor tool")
|
||||
}
|
||||
|
||||
// Factory methods
|
||||
pub fn (mut it InvestorTool) user_new() &User {
|
||||
return &User{}
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) company_new() &Company {
|
||||
return &Company{}
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) employee_new() &Employee {
|
||||
return &Employee{}
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) investment_shares_new() &InvestmentShares {
|
||||
return &InvestmentShares{}
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) investor_new() &Investor {
|
||||
return &Investor{}
|
||||
}
|
||||
|
||||
// Add methods
|
||||
pub fn (mut it InvestorTool) user_add(user &User) ! {
|
||||
it.users[user.oid] = user
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) company_add(company &Company) ! {
|
||||
it.companies[company.oid] = company
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) employee_add(employee &Employee) ! {
|
||||
it.employees[employee.oid] = employee
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) investment_shares_add(investment &InvestmentShares) ! {
|
||||
it.investments[investment.oid] = investment
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) investor_add(investor &Investor) ! {
|
||||
it.investors[investor.oid] = investor
|
||||
}
|
||||
|
||||
pub fn play(mut plbook playbook.PlayBook) !&InvestorTool {
|
||||
mut it := new()
|
||||
play_company(mut it, mut plbook)!
|
||||
play_employee(mut it, mut plbook)!
|
||||
play_investmentshares(mut it, mut plbook)!
|
||||
play_investor(mut it, mut plbook)!
|
||||
play_user(mut it, mut plbook)!
|
||||
|
||||
investortools['default'] = it
|
||||
return it
|
||||
}
|
||||
|
||||
pub fn (mut it InvestorTool) check() ! {
|
||||
// TODO: walk over all objects check all relationships
|
||||
// TODO: make helpers on e.g. employee, ... to get the related ones
|
||||
|
||||
for _, cmp in it.companies {
|
||||
for admin in cmp.admins {
|
||||
if admin !in it.users {
|
||||
return error('admin ${admin} from company ${cmp.oid} is not found')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, emp in it.employees {
|
||||
if emp.user_ref !in it.users {
|
||||
return error('user ${emp.user_ref} from employee ${emp.oid} is not found')
|
||||
}
|
||||
|
||||
if emp.company_ref !in it.companies {
|
||||
return error('company ${emp.company_ref} from employee ${emp.oid} is not found')
|
||||
}
|
||||
}
|
||||
|
||||
for _, inv in it.investments {
|
||||
if inv.company_ref != '' && inv.company_ref !in it.companies {
|
||||
return error('company ${inv.company_ref} from investment ${inv.oid} is not found')
|
||||
}
|
||||
|
||||
if inv.investor_ref !in it.investors {
|
||||
return error('investor ${inv.investor_ref} from investment ${inv.oid} is not found')
|
||||
}
|
||||
}
|
||||
|
||||
for _, inv in it.investors {
|
||||
for user in inv.user_refs {
|
||||
if user !in it.users {
|
||||
return error('user ${user} from investor ${inv.oid} is not found')
|
||||
}
|
||||
}
|
||||
|
||||
for admin in inv.admins {
|
||||
if admin !in it.users {
|
||||
return error('admin ${admin} from investor ${inv.oid} is not found')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, user in it.users {
|
||||
for inv in user.investor_ids {
|
||||
if inv !in it.investors {
|
||||
return error('investor ${inv} from user ${user.oid} is not found')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
55
lib/biz/investortool/investment_share.v
Normal file
55
lib/biz/investortool/investment_share.v
Normal file
@@ -0,0 +1,55 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
|
||||
@[heap]
|
||||
pub struct InvestmentShares {
|
||||
pub mut:
|
||||
oid string
|
||||
company_ref string
|
||||
investor_ref string
|
||||
nr_shares f64
|
||||
share_class string
|
||||
investment_value ?currency.Amount
|
||||
interest ?currency.Amount
|
||||
description string
|
||||
investment_date ?ourtime.OurTime
|
||||
type_ string
|
||||
comments []string
|
||||
}
|
||||
|
||||
fn play_investmentshares(mut investortool InvestorTool, mut plbook playbook.PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investortool.investment_shares_define')! {
|
||||
mut p := action.params
|
||||
mut investment_shares := InvestmentShares{
|
||||
oid: p.get_default('oid', '')!
|
||||
company_ref: p.get_default('company_ref', '')!.trim(' ')
|
||||
investor_ref: p.get_default('investor_ref', '')!
|
||||
nr_shares: p.get_float_default('nr_shares', 0)!
|
||||
share_class: p.get_default('share_class', '')!
|
||||
investment_value: if p.exists('investment_value')
|
||||
&& p.get('investment_value')!.trim(' ').len > 0 {
|
||||
p.get_currencyamount('investment_value')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
interest: if p.exists('interest') && p.get('interest')!.trim(' ').len > 0 {
|
||||
p.get_currencyamount('interest')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
description: p.get_default('description', '')!
|
||||
investment_date: if p.exists('investment_date') {
|
||||
p.get_time('investment_date')!
|
||||
} else {
|
||||
none
|
||||
}
|
||||
type_: p.get_default('type', '')!
|
||||
comments: p.get_list_default('comments', [])!
|
||||
}
|
||||
println(investment_shares)
|
||||
investortool.investment_shares_add(investment_shares)!
|
||||
}
|
||||
}
|
||||
33
lib/biz/investortool/investor.v
Normal file
33
lib/biz/investortool/investor.v
Normal file
@@ -0,0 +1,33 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
@[heap]
|
||||
pub struct Investor {
|
||||
pub mut:
|
||||
oid string
|
||||
name string
|
||||
code string
|
||||
description string
|
||||
user_refs []string
|
||||
admins []string
|
||||
comments []string
|
||||
}
|
||||
|
||||
fn play_investor(mut investortool InvestorTool, mut plbook playbook.PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investortool.investor_define')! {
|
||||
mut p := action.params
|
||||
mut investor := Investor{
|
||||
oid: p.get_default('oid', '')!
|
||||
name: p.get_default('name', '')!
|
||||
code: p.get_default('code', '')!
|
||||
description: p.get_default('description', '')!
|
||||
user_refs: p.get_list_default('user_refs', [])!
|
||||
admins: p.get_list_default('admins', [])!
|
||||
comments: p.get_list_default('comments', [])!
|
||||
}
|
||||
// println(investor)
|
||||
|
||||
investortool.investor_add(investor)!
|
||||
}
|
||||
}
|
||||
70
lib/biz/investortool/investortool.v
Normal file
70
lib/biz/investortool/investortool.v
Normal file
@@ -0,0 +1,70 @@
|
||||
module investortool
|
||||
|
||||
// struct Investor {
|
||||
// id string
|
||||
// name string
|
||||
// code string
|
||||
// description string
|
||||
// user_refs string
|
||||
// admins string
|
||||
// comments string
|
||||
// }
|
||||
|
||||
// struct InvestmentShares {
|
||||
// id string
|
||||
// company_ref string
|
||||
// investor_ref string
|
||||
// nr_shares f64
|
||||
// share_class string
|
||||
// investment_value string
|
||||
// interest string
|
||||
// description string
|
||||
// investment_date string
|
||||
// type string
|
||||
// comments string
|
||||
// }
|
||||
|
||||
// struct InvestorTool {
|
||||
// mut:
|
||||
// investors []Investor
|
||||
// investments []InvestmentShares
|
||||
// }
|
||||
|
||||
// fn new_investor_tool() InvestorTool {
|
||||
// return InvestorTool{
|
||||
// investors: []
|
||||
// investments: []
|
||||
// }
|
||||
// }
|
||||
|
||||
// fn (mut it InvestorTool) investor_define(params string) ! {
|
||||
// mut p := paramsparser.new(params)!
|
||||
// investor := Investor{
|
||||
// id: p.get_default('id', '')!
|
||||
// name: p.get_default('name', '')!
|
||||
// code: p.get_default('code', '')!
|
||||
// description: p.get_default('description', '')!
|
||||
// user_refs: p.get_default('user_refs', '')!
|
||||
// admins: p.get_default('admins', '')!
|
||||
// comments: p.get_default('comments', '')!
|
||||
// }
|
||||
// it.investors << investor
|
||||
// }
|
||||
|
||||
// fn (mut it InvestorTool) investment_shares_define(params string) ! {
|
||||
// mut p := paramsparser.new(params)!
|
||||
// investment := InvestmentShares{
|
||||
// id: p.get_default('id', '')!
|
||||
// company_ref: p.get_default('company_ref', '')!
|
||||
// investor_ref: p.get_default('investor_ref', '')!
|
||||
// nr_shares: p.get_f64('nr_shares')!
|
||||
// share_class: p.get_default('share_class', '')!
|
||||
// investment_value: p.get_default('investment_value', '')!
|
||||
// interest: p.get_default('interest', '')!
|
||||
// description: p.get_default('description', '')!
|
||||
// investment_date: p.get_default('investment_date', '')!
|
||||
// type: p.get_default('type', '')!
|
||||
// comments: p.get_default('comments', '')!
|
||||
// }
|
||||
// it.investments << investment
|
||||
// }
|
||||
30
lib/biz/investortool/investortool2.v
Normal file
30
lib/biz/investortool/investortool2.v
Normal file
@@ -0,0 +1,30 @@
|
||||
// struct User {
|
||||
// id string
|
||||
// usercode string
|
||||
// name string
|
||||
// investor_ids string
|
||||
// status string
|
||||
// info_links string
|
||||
// telnrs string
|
||||
// emails string
|
||||
// secret string
|
||||
// }
|
||||
|
||||
// struct InvestorTool {
|
||||
// mut:
|
||||
// users []User
|
||||
// // ... other fields like investors and investments
|
||||
// }
|
||||
|
||||
// fn (mut it InvestorTool) user_define(params string) ! {
|
||||
// mut p := paramsparser.new(params)!
|
||||
// user := User{
|
||||
// id: p.get_default('id', '')!
|
||||
// usercode: p.get_default('usercode', '')!
|
||||
// name: p.get_default('name', '')!
|
||||
// investor_ids: p.get_default('investor_ids', '')!
|
||||
// status: p.get_default('status', '')!
|
||||
// info_links: p.get_default('info_links', '')!
|
||||
// telnrs: p.get_default('telnrs', '')!
|
||||
// emails: p.get_default('emails', '')!
|
||||
// secret: p.get_
|
||||
31
lib/biz/investortool/investortool3.v
Normal file
31
lib/biz/investortool/investortool3.v
Normal file
@@ -0,0 +1,31 @@
|
||||
// struct Company {
|
||||
// id string
|
||||
// short_code string
|
||||
// name string
|
||||
// current_nr_shares int
|
||||
// current_share_value string
|
||||
// description string
|
||||
// admins string
|
||||
// comments string
|
||||
// }
|
||||
|
||||
// struct InvestorTool {
|
||||
// mut:
|
||||
// companies []Company
|
||||
// // ... other fields like users, investors, and investments
|
||||
// }
|
||||
|
||||
// fn (mut it InvestorTool) company_define(params string) ! {
|
||||
// mut p := paramsparser.new(params)!
|
||||
// company := Company{
|
||||
// id: p.get_default('id', '')!
|
||||
// short_code: p.get_default('short_code', '')!
|
||||
// name: p.get_default('name', '')!
|
||||
// current_nr_shares: p.get_int('current_nr_shares')!
|
||||
// current_share_value: p.get_default('current_share_value', '')!
|
||||
// description: p.get_default('description', '')!
|
||||
// admins: p.get_default('admins', '')!
|
||||
// comments: p.get_default('comments', '')!
|
||||
// }
|
||||
// it.companies << company
|
||||
// }
|
||||
4
lib/biz/investortool/readme.md
Normal file
4
lib/biz/investortool/readme.md
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
|
||||
use by ThreeFold to work with our administration around Shares, ...
|
||||
|
||||
4
lib/biz/investortool/simulator/captable.v
Normal file
4
lib/biz/investortool/simulator/captable.v
Normal file
@@ -0,0 +1,4 @@
|
||||
module investorsimulator
|
||||
|
||||
pub struct CapTable {
|
||||
}
|
||||
53
lib/biz/investortool/simulator/play.v
Normal file
53
lib/biz/investortool/simulator/play.v
Normal file
@@ -0,0 +1,53 @@
|
||||
module investorsimulator
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||
import freeflowuniverse.herolib.biz.investortool
|
||||
|
||||
pub fn (mut s Simulator) play(mut plbook PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investorsimulator.user_view_add')! {
|
||||
/*
|
||||
!!!investorsimulator.user_view_add
|
||||
view: view1
|
||||
oid: abc
|
||||
*/
|
||||
mut p := action.params
|
||||
view := p.get_default('view', 'default')!
|
||||
user_oid := p.get('oid')!
|
||||
|
||||
user := if user_oid in s.it.users {
|
||||
s.it.users[user_oid]
|
||||
} else {
|
||||
return error('user with oid ${user_oid} is not found')
|
||||
}
|
||||
|
||||
mut v := if view in s.user_views {
|
||||
s.user_views[view]
|
||||
} else {
|
||||
s.user_views[view] = []
|
||||
s.user_views[view]
|
||||
}
|
||||
|
||||
v << user
|
||||
}
|
||||
|
||||
for mut action in plbook.find(filter: 'investorsimulator.investor_view_add')! {
|
||||
mut p := action.params
|
||||
view := p.get_default('view', 'default')!
|
||||
investor_oid := p.get('oid')!
|
||||
|
||||
investor := if investor_oid in s.it.investors {
|
||||
s.it.investors[investor_oid]
|
||||
} else {
|
||||
return error('investor with oid ${investor_oid} is not found')
|
||||
}
|
||||
|
||||
mut v := if view in s.investor_views {
|
||||
s.investor_views[view]
|
||||
} else {
|
||||
s.investor_views[view] = []
|
||||
s.investor_views[view]
|
||||
}
|
||||
|
||||
v << user
|
||||
}
|
||||
}
|
||||
50
lib/biz/investortool/simulator/simulator.v
Normal file
50
lib/biz/investortool/simulator/simulator.v
Normal file
@@ -0,0 +1,50 @@
|
||||
module investorsimulator
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||
import freeflowuniverse.herolib.biz.investortool
|
||||
|
||||
__global (
|
||||
simulators map[string]Simulator
|
||||
)
|
||||
|
||||
@[params]
|
||||
pub struct NewSimulatorArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
data_path string @[requried]
|
||||
}
|
||||
|
||||
pub struct Simulator {
|
||||
pub mut:
|
||||
name string
|
||||
it &investortool.InvestorTool
|
||||
user_views map[string][]&investortool.User
|
||||
investor_views map[string][]&investortool.Investor
|
||||
// captable_views map[string]CapTable
|
||||
}
|
||||
|
||||
pub fn new(args NewSimulatorArgs) !Simulator {
|
||||
mut plbook := playbook.new(path: args.data_path)!
|
||||
mut it := investortool.play(mut plbook)!
|
||||
|
||||
return Simulator{
|
||||
name: args.name
|
||||
it: it
|
||||
user_views: map[string][]&investortool.User{}
|
||||
investor_views: map[string][]&investortool.Investor{}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investorsimulator.run')! {
|
||||
name := action.params.get_default('name', 'default')!
|
||||
data_path := action.params.get('data_path')!
|
||||
mut sim := new(name, data_path)!
|
||||
|
||||
lock simulators {
|
||||
simulators[name] = sim
|
||||
}
|
||||
|
||||
sim.play(mut plbook)!
|
||||
}
|
||||
}
|
||||
7
lib/biz/investortool/simulator/templates/investor.md
Normal file
7
lib/biz/investortool/simulator/templates/investor.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Investors
|
||||
|
||||
@for investor in it.investors
|
||||
## @{investor.name}
|
||||
- Investor Code: @{investor.code}
|
||||
- Admins IDs: (x@{investor.admins})
|
||||
@end
|
||||
9
lib/biz/investortool/simulator/templates/user.md
Normal file
9
lib/biz/investortool/simulator/templates/user.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Users
|
||||
|
||||
@for user in it.users
|
||||
## @{user.name}
|
||||
- User Code: @{user.code}
|
||||
- Status: @{user.status}
|
||||
- Telephone Numbers: @{user.telnrs}
|
||||
- Emails: @{user.emails}
|
||||
@end
|
||||
37
lib/biz/investortool/user.v
Normal file
37
lib/biz/investortool/user.v
Normal file
@@ -0,0 +1,37 @@
|
||||
module investortool
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
@[heap]
|
||||
pub struct User {
|
||||
pub mut:
|
||||
oid string
|
||||
usercode string
|
||||
name string
|
||||
investor_ids []string
|
||||
status string
|
||||
info_links []string
|
||||
telnrs []string
|
||||
emails []string
|
||||
secret string
|
||||
}
|
||||
|
||||
fn play_user(mut investortool InvestorTool, mut plbook playbook.PlayBook) ! {
|
||||
for mut action in plbook.find(filter: 'investortool.user_define')! {
|
||||
mut p := action.params
|
||||
mut user := User{
|
||||
oid: p.get_default('oid', '')!
|
||||
usercode: p.get_default('usercode', '')!
|
||||
name: p.get_default('name', '')!
|
||||
investor_ids: p.get_list_default('investor_ids', [])!
|
||||
status: p.get_default('status', '')!
|
||||
info_links: p.get_list_default('info_links', [])!
|
||||
telnrs: p.get_telnrs_default('telnrs', [])!
|
||||
emails: p.get_emails_default('emails', [])!
|
||||
secret: p.get_default('secret', '')!
|
||||
}
|
||||
// println(user)
|
||||
investortool.user_add(user)!
|
||||
// TODO: now we need to do some mapping to make sure telnr's and emails are normalized (no . in tel nr, no spaces ...)
|
||||
}
|
||||
}
|
||||
132
lib/biz/spreadsheet/calc_test.v
Normal file
132
lib/biz/spreadsheet/calc_test.v
Normal file
@@ -0,0 +1,132 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
fn test_sheets() {
|
||||
mut sh := sheet_new() or { panic(err) }
|
||||
|
||||
mut nrnodes := sh.row_new(
|
||||
name: 'nrnodes'
|
||||
growth: '5:100,55:1000'
|
||||
tags: 'cat:nodes color:yellow urgent'
|
||||
)!
|
||||
mut curtest := sh.row_new(name: 'curtest', growth: '1:100EUR,55:1000AED,56:0')!
|
||||
|
||||
mut nrnodes2 := sh.row_new(
|
||||
name: 'nrnodes2'
|
||||
growth: '5:100,55:1000,60:500'
|
||||
tags: 'cat:nodes delay color:green'
|
||||
)!
|
||||
|
||||
mut nrnodes3 := sh.row_new(
|
||||
name: 'nrnodes3'
|
||||
growth: '0:100'
|
||||
)!
|
||||
|
||||
mut incrementalrow := sh.row_new(name: 'incrementalrow', growth: '0:0,60:59')!
|
||||
|
||||
mut smartrow := sh.row_new(name: 'oem', growth: '10:1000USD,40:2000', extrapolate: false)!
|
||||
|
||||
assert smartrow.cells[8].val == 0.0
|
||||
assert smartrow.cells[10].val == 1000.0
|
||||
assert smartrow.cells[40].val == 2000.0
|
||||
|
||||
console.print_debug('${nrnodes}')
|
||||
|
||||
console.print_debug('${incrementalrow}')
|
||||
|
||||
mut toincrement := sh.row_new(name: 'incr2', growth: '0:0,60:59')!
|
||||
inc1row := toincrement.recurring(name: 'testrecurring1', delaymonths: 0)!
|
||||
inc2row := toincrement.recurring(name: 'testrecurring2', delaymonths: 3)!
|
||||
|
||||
console.print_debug('${toincrement}')
|
||||
|
||||
a1 := toincrement.look_forward_avg(50, 20)!
|
||||
a2 := toincrement.look_forward_avg(12, 12)!
|
||||
|
||||
// console.print_debug(a1)
|
||||
// console.print_debug(a2)
|
||||
|
||||
// if true{panic("sss")}
|
||||
|
||||
console.print_debug(inc1row)
|
||||
console.print_debug(inc2row)
|
||||
|
||||
assert inc1row.cells[4].val == 10.0
|
||||
assert inc2row.cells[7].val == 10.0
|
||||
|
||||
// if true{panic("sds")}
|
||||
|
||||
// SUM
|
||||
|
||||
mut res := []Row{}
|
||||
|
||||
res << nrnodes.action(name: 'sum', action: .add, val: 100)!
|
||||
assert res.last().cells[1].val == nrnodes.cells[1].val + 100.0
|
||||
assert res.last().cells[30].val == nrnodes.cells[30].val + 100.0
|
||||
|
||||
res << nrnodes.action(name: 'minus', action: .substract, val: 100)!
|
||||
assert res.last().cells[1].val == nrnodes.cells[1].val - 100.0
|
||||
assert res.last().cells[30].val == nrnodes.cells[30].val - 100.0
|
||||
|
||||
res << nrnodes.action(name: 'sum2', action: .add, rows: [incrementalrow])!
|
||||
assert res.last().cells[20].val == nrnodes.cells[20].val + 20.0
|
||||
|
||||
res << nrnodes.action(name: 'minus2', action: .substract, rows: [incrementalrow])!
|
||||
assert res.last().cells[20].val == nrnodes.cells[20].val - 20.0
|
||||
|
||||
res << nrnodes.action(name: 'minus3', action: .substract, rows: [incrementalrow, incrementalrow])!
|
||||
assert res.last().cells[20].val == nrnodes.cells[20].val - 40.0
|
||||
|
||||
res << nrnodes.action(name: 'max1', action: .max, rows: [incrementalrow])!
|
||||
assert res.last().cells[2].val == 2.0
|
||||
|
||||
res << nrnodes3.action(name: 'max2', action: .max, val: 3.0)!
|
||||
assert res.last().cells[20].val == 100.0
|
||||
|
||||
res << nrnodes3.action(name: 'max3', action: .max, val: 300.0)!
|
||||
assert res.last().cells[20].val == 300.0
|
||||
|
||||
res << nrnodes3.action(name: 'min1', action: .min, val: 1.0)!
|
||||
assert res.last().cells[20].val == 1.0
|
||||
|
||||
res << incrementalrow.action(name: 'aggr1', action: .aggregate, val: 1.0)!
|
||||
assert res.last().cells[3].val == 6.0
|
||||
|
||||
console.print_debug(res.last())
|
||||
|
||||
incrementalrow.delay(3)!
|
||||
assert incrementalrow.cells[6].val == 3
|
||||
|
||||
// mut nrnodessum := nrnodes.add('nrnodessum', nrnodes2)!
|
||||
|
||||
mut shyear := sh.toyear(name: 'shyear', includefilter: ['cat:nodes'])!
|
||||
mut shq := sh.toquarter(name: 'nrnodesq', includefilter: ['cat:nodes'])!
|
||||
|
||||
console.print_debug(shyear)
|
||||
console.print_debug(shq)
|
||||
// r:=shq.json()!
|
||||
// console.print_debug(r)
|
||||
wiki := sh.wiki(description: 'is my description.')!
|
||||
console.print_debug(wiki)
|
||||
// panic('test1')
|
||||
}
|
||||
|
||||
fn test_curr() {
|
||||
mut sh := sheet_new(name: 'test2') or { panic(err) }
|
||||
|
||||
currency.set_default('AED', 0.25)!
|
||||
currency.set_default('EUR', 0.9)!
|
||||
|
||||
mut pricetft := sh.row_new(name: 'something', growth: '0:100aed,55:1000eur')!
|
||||
|
||||
console.print_debug(sh.rows['something'].cells[0])
|
||||
assert sh.rows['something']!.cells[0].val == 25.0
|
||||
assert sh.rows['something']!.cells[60 - 1].val == 900.0
|
||||
|
||||
// TODO: we need to create tests for it
|
||||
|
||||
console.print_debug(sh)
|
||||
panic('test1')
|
||||
}
|
||||
35
lib/biz/spreadsheet/cell.v
Normal file
35
lib/biz/spreadsheet/cell.v
Normal file
@@ -0,0 +1,35 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
|
||||
pub struct Cell {
|
||||
pub mut:
|
||||
val f64
|
||||
row &Row @[skip; str: skip]
|
||||
empty bool = true
|
||||
}
|
||||
|
||||
pub fn (mut c Cell) set(v string) ! {
|
||||
// means we insert a currency so need to do the exchange
|
||||
mut amount := currency.amount_get(v)!
|
||||
assert amount.currency.name != ''
|
||||
mut amount2 := amount.exchange(c.row.sheet.currency)! // do the exchange to the local currency
|
||||
c.val = amount2.val
|
||||
c.empty = false
|
||||
}
|
||||
|
||||
pub fn (mut c Cell) add(v f64) {
|
||||
c.val += v
|
||||
c.empty = false
|
||||
}
|
||||
|
||||
pub fn (mut c Cell) repr() string {
|
||||
if c.empty {
|
||||
return '-'
|
||||
}
|
||||
return float_repr(c.val, c.row.reprtype)
|
||||
}
|
||||
|
||||
pub fn (mut c Cell) str() string {
|
||||
return c.repr()
|
||||
}
|
||||
116
lib/biz/spreadsheet/extrapolate.v
Normal file
116
lib/biz/spreadsheet/extrapolate.v
Normal file
@@ -0,0 +1,116 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// smartstring is something like 3:2,10:5 means end month 3 we start with 2, it grows to 5 on end month 10 .
|
||||
// the cells out of the mentioned ranges are not filled if they are already set .
|
||||
// the cells which are empty at start of row will become 0 .
|
||||
// the cells which are empty at the back will just be same value as the last one .
|
||||
// currencies can be used e.g. 3:10usd,20:30aed (so we can even mix) .
|
||||
// first cell is 1, the start is 0 (month 0) .
|
||||
// if the smartstr, is empty then will use existing values in the row to extra/intra polate, the empty values will be filled in
|
||||
pub fn (mut r Row) extrapolate(smartstr string) ! {
|
||||
// put the values in the row
|
||||
// console.print_debug("extrapolate: ${smartstr}")
|
||||
for mut part in smartstr.split(',') {
|
||||
part = part.trim_space()
|
||||
if part.contains(':') {
|
||||
splitted := part.split(':')
|
||||
if splitted.len != 2 {
|
||||
return error("smartextrapolate needs '3:2,10:5' as format, now ${smartstr} ")
|
||||
}
|
||||
mut x := splitted[0].int()
|
||||
if x < 0 {
|
||||
return error('Cannot do smartstr, because the X is out of scope.\n${smartstr}')
|
||||
}
|
||||
if x > r.sheet.nrcol - 1 {
|
||||
x = r.sheet.nrcol - 1
|
||||
}
|
||||
r.cells[x].set(splitted[1])!
|
||||
}
|
||||
}
|
||||
|
||||
mut xlast := 0 // remembers where there was last non empty value
|
||||
mut has_previous_value := false
|
||||
mut xlastval := 0.0 // the value at that position
|
||||
mut xlastwidth := 0 // need to know how fast to go up from the xlast to xnew
|
||||
mut xnewval := 0.0
|
||||
// console.print_debug(r)
|
||||
for x in 0 .. r.cells.len {
|
||||
// console.print_debug("$x empty:${r.cells[x].empty} xlastwidth:$xlastwidth xlastval:$xlastval xlast:$xlast")
|
||||
if r.cells[x].empty && !has_previous_value {
|
||||
continue
|
||||
}
|
||||
has_previous_value = true
|
||||
if r.cells[x].empty == false && xlastwidth == 0 {
|
||||
// we get new value, just go to next
|
||||
xlast = x
|
||||
xlastval = r.cells[x].val
|
||||
xlastwidth = 0
|
||||
// console.print_debug(" lastval:$xlastval")
|
||||
continue // no need to do anything
|
||||
}
|
||||
// if we get here we get an empty after having a non empty before
|
||||
xlastwidth += 1
|
||||
if r.cells[x].empty == false {
|
||||
// now we find the next one not being empty so we need to do the interpolation
|
||||
xnewval = r.cells[x].val
|
||||
// now we need to walk over the inbetween and set the values
|
||||
yincr := (xnewval - xlastval) / xlastwidth
|
||||
mut yy := xlastval
|
||||
// console.print_debug(" yincr:$yincr")
|
||||
for xx in (xlast + 1) .. x {
|
||||
yy += yincr
|
||||
r.cells[xx].set('${yy:.2f}')!
|
||||
}
|
||||
xlast = x
|
||||
xlastval = xnewval
|
||||
xlastwidth = 0
|
||||
xnewval = 0.0
|
||||
}
|
||||
}
|
||||
// console.print_debug("ROW1:$r")
|
||||
|
||||
// now fill in the last ones
|
||||
xlastval = 0.0
|
||||
for x in 0 .. r.cells.len {
|
||||
if r.cells[x].empty == false {
|
||||
xlastval = r.cells[x].val
|
||||
continue
|
||||
}
|
||||
r.cells[x].set('${xlastval:.2f}')!
|
||||
}
|
||||
|
||||
// console.print_debug("ROW:$r")
|
||||
// if true{panic("s")}
|
||||
}
|
||||
|
||||
// something like 3:2,10:5 means end month 3 we set 2, month 10 5
|
||||
// there i no interpolation, all other fields are set on 0
|
||||
pub fn (mut r Row) smartfill(smartstr string) ! {
|
||||
// console.print_debug("smartfill: ${smartstr}")
|
||||
for mut part in smartstr.split(',') {
|
||||
part = part.trim_space()
|
||||
if part.contains(':') {
|
||||
splitted := part.split(':')
|
||||
if splitted.len != 2 {
|
||||
return error("smartextrapolate needs '3:2,10:5' as format, now ${smartstr} ")
|
||||
}
|
||||
x := splitted[0].int()
|
||||
if x < 0 {
|
||||
return error('Cannot do smartstr, because the X is out of scope.\n${smartstr}')
|
||||
}
|
||||
if x > r.sheet.nrcol {
|
||||
return error('Cannot do smartstr, because the X is out of scope, needs to be 1+.\n${smartstr}')
|
||||
}
|
||||
r.cells[x].set(splitted[1])!
|
||||
} else {
|
||||
r.cells[0].set(part)!
|
||||
}
|
||||
}
|
||||
for x in 0 .. r.cells.len {
|
||||
if r.cells[x].empty {
|
||||
r.cells[x].set('0.0')!
|
||||
}
|
||||
}
|
||||
}
|
||||
60
lib/biz/spreadsheet/factory.v
Normal file
60
lib/biz/spreadsheet/factory.v
Normal file
@@ -0,0 +1,60 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
|
||||
__global (
|
||||
sheets shared map[string]&Sheet
|
||||
)
|
||||
|
||||
@[params]
|
||||
pub struct SheetNewArgs {
|
||||
pub mut:
|
||||
name string = 'main'
|
||||
nrcol int = 60
|
||||
visualize_cur bool = true // if we want to show e.g. $44.4 in a cell or just 44.4
|
||||
curr string = 'usd' // preferred currency to work with
|
||||
}
|
||||
|
||||
// get a sheet
|
||||
// has y nr of rows, each row has a name
|
||||
// each row has X nr of columns which represent months
|
||||
// we can do manipulations with the rows, is very useful for e.g. business planning
|
||||
// params:
|
||||
// nrcol int = 60
|
||||
// visualize_cur bool //if we want to show e.g. $44.4 in a cell or just 44.4
|
||||
pub fn sheet_new(args SheetNewArgs) !&Sheet {
|
||||
mut sh := Sheet{
|
||||
nrcol: args.nrcol
|
||||
params: SheetParams{
|
||||
visualize_cur: args.visualize_cur
|
||||
}
|
||||
currency: currency.get(args.curr)!
|
||||
name: args.name
|
||||
}
|
||||
sheet_set(&sh)
|
||||
return &sh
|
||||
}
|
||||
|
||||
// get sheet from global
|
||||
pub fn sheet_get(name string) !&Sheet {
|
||||
rlock sheets {
|
||||
if name in sheets {
|
||||
return sheets[name] or { return error('Sheet ${name} not found') }
|
||||
}
|
||||
}
|
||||
return error("cann't find sheet:'${name}' in global sheets")
|
||||
}
|
||||
|
||||
// remember sheet in global
|
||||
pub fn sheet_set(sh &Sheet) {
|
||||
lock sheets {
|
||||
sheets[sh.name] = sh
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sheets_keys() []string {
|
||||
rlock sheets {
|
||||
return sheets.keys()
|
||||
}
|
||||
panic('bug')
|
||||
}
|
||||
33
lib/biz/spreadsheet/number.v
Normal file
33
lib/biz/spreadsheet/number.v
Normal file
@@ -0,0 +1,33 @@
|
||||
module spreadsheet
|
||||
|
||||
import math
|
||||
|
||||
pub enum ReprType {
|
||||
number // will use k, m, ...
|
||||
currency
|
||||
}
|
||||
|
||||
// represent a
|
||||
pub fn float_repr(nr_ f64, reprtype ReprType) string {
|
||||
mut out := ''
|
||||
mut nr := nr_
|
||||
mut nr_pos := math.abs(nr)
|
||||
mut ext := ''
|
||||
if reprtype == .number || reprtype == .currency {
|
||||
if nr_pos > 1000 * 1000 {
|
||||
nr = nr / 1000000
|
||||
ext = 'm'
|
||||
} else if nr_pos > 1000 {
|
||||
ext = 'k'
|
||||
nr = nr / 1000
|
||||
}
|
||||
if nr > 1000 {
|
||||
out = '${nr:.0}${ext}'
|
||||
} else if nr > 100 {
|
||||
out = '${nr:.1}${ext}'
|
||||
} else {
|
||||
out = '${nr:.2}${ext}'
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
116
lib/biz/spreadsheet/playmacro.v
Normal file
116
lib/biz/spreadsheet/playmacro.v
Normal file
@@ -0,0 +1,116 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook { Action }
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn playmacro(action Action) !string {
|
||||
console.print_green('playmacro for worksheet')
|
||||
|
||||
sheet_name := action.params.get('sheetname') or {
|
||||
return error("can't find sheetname from spreadsheet macro's, define it as sheetname:... .")
|
||||
}
|
||||
mut sh := sheet_get(sheet_name) or {
|
||||
return error("Couldn't find sheetname: ${sheet_name} \nerror:\n${err}")
|
||||
}
|
||||
|
||||
// sheet_name := action.params.get('sheetname') or {return error("can't find sheetname from sheet.chart macro.")}
|
||||
// mut sh:= sheet_get(sheet_name)!
|
||||
// console.print_debug(sh)
|
||||
|
||||
supported_actions := ['sheet_wiki', 'graph_pie_row', 'graph_line_row', 'graph_bar_row',
|
||||
'graph_title_row', 'wiki_row_overview']
|
||||
|
||||
if action.name !in supported_actions {
|
||||
return error("Couldn't find macro ${action.name} for spreadsheet:${sheet_name}.")
|
||||
}
|
||||
// rowname string // if specified then its one name
|
||||
// namefilter []string // only include the exact names as secified for the rows
|
||||
// includefilter []string // to use with tags filter e.g. ['location:belgium_*'] //would match all words starting with belgium
|
||||
// excludefilter []string
|
||||
// period_type PeriodType // year, month, quarter
|
||||
// aggregate bool = true // if more than 1 row matches should we aggregate or not
|
||||
// aggregatetype RowAggregateType = .sum // important if used with include/exclude, because then we group
|
||||
// unit UnitType
|
||||
// title string
|
||||
// title_sub string
|
||||
// size string
|
||||
// rowname_show bool = true // show the name of the row
|
||||
// description string
|
||||
|
||||
mut p := action.params
|
||||
|
||||
rowname := p.get_default('rowname', '')!
|
||||
namefilter := p.get_list_default('namefilter', [])!
|
||||
includefilter := p.get_list_default('includefilter', [])!
|
||||
excludefilter := p.get_list_default('excludefilter', [])!
|
||||
size := p.get_default('size', '')!
|
||||
title_sub := p.get_default('title_sub', '')!
|
||||
title := p.get_default('title', '')!
|
||||
unit := p.get_default('unit', 'normal')!
|
||||
unit_e := match unit {
|
||||
'thousand' { UnitType.thousand }
|
||||
'million' { UnitType.million }
|
||||
'billion' { UnitType.billion }
|
||||
else { UnitType.normal }
|
||||
}
|
||||
period_type := p.get_default('period_type', 'year')!
|
||||
if period_type !in ['year', 'month', 'quarter'] {
|
||||
return error('period type needs to be in year,month,quarter')
|
||||
}
|
||||
period_type_e := match period_type {
|
||||
'year' { PeriodType.year }
|
||||
'month' { PeriodType.month }
|
||||
'quarter' { PeriodType.quarter }
|
||||
else { PeriodType.error }
|
||||
}
|
||||
if period_type_e == .error {
|
||||
return error('period type needs to be in year,month,quarter')
|
||||
}
|
||||
|
||||
rowname_show := p.get_default_true('rowname_show')
|
||||
descr_show := p.get_default_true('descr_show')
|
||||
|
||||
args := RowGetArgs{
|
||||
rowname: rowname
|
||||
namefilter: namefilter
|
||||
includefilter: includefilter
|
||||
excludefilter: excludefilter
|
||||
period_type: period_type_e
|
||||
unit: unit_e
|
||||
title_sub: title_sub
|
||||
title: title
|
||||
size: size
|
||||
rowname_show: rowname_show
|
||||
descr_show: descr_show
|
||||
}
|
||||
|
||||
mut content := ''
|
||||
|
||||
match action.name {
|
||||
// which action is associated with wiki() method
|
||||
'sheet_wiki' {
|
||||
content = sh.wiki(args) or { panic(err) }
|
||||
}
|
||||
'graph_title_row' {
|
||||
content = sh.wiki_title_chart(args)
|
||||
}
|
||||
'graph_line_row' {
|
||||
content = sh.wiki_line_chart(args)!
|
||||
}
|
||||
'graph_bar_row' {
|
||||
content = sh.wiki_bar_chart(args)!
|
||||
}
|
||||
'graph_pie_row' {
|
||||
content = sh.wiki_pie_chart(args)!
|
||||
}
|
||||
'wiki_row_overview' {
|
||||
content = sh.wiki_row_overview(args)!
|
||||
}
|
||||
else {
|
||||
return error('unexpected action name ${action.name} for sheet macro.')
|
||||
}
|
||||
}
|
||||
|
||||
content += '\n<BR>\n'
|
||||
return content
|
||||
}
|
||||
63
lib/biz/spreadsheet/readme.md
Normal file
63
lib/biz/spreadsheet/readme.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Sheet
|
||||
|
||||
The idea is to have a module which allows us to make software representation of a spreadsheet.
|
||||
|
||||
The spreadsheet has a currency linked to it and also multi currency behavior, it also has powerful extra/intrapolation possibilities.
|
||||
|
||||
A sheet has following format
|
||||
|
||||
If we have 60 months representation (5 year), we have 60 columns
|
||||
|
||||
- rows, each row represent something e.g. salary for a person per month over 5 years
|
||||
- the rows can be grouped per tags
|
||||
- each row has 60 cols = cells, each cell has a value
|
||||
- each row has a name
|
||||
|
||||
A sheet can also be represented per year or per quarter, if per year then there would be 5 columns only.
|
||||
|
||||
There is also functionality to export a sheet to wiki (markdown) or html representation.
|
||||
|
||||
## offline
|
||||
|
||||
if you need to work offline e.g. for development do
|
||||
|
||||
```bash
|
||||
export OFFLINE=1
|
||||
```
|
||||
|
||||
## Macro's
|
||||
|
||||
|
||||
|
||||
```js
|
||||
!!sheet.graph_pie_row sheetname:'tfgridsim_run1'
|
||||
rowname:'revenue_usd'
|
||||
period_type:quarter
|
||||
title:'a title'
|
||||
```
|
||||
|
||||
- supported_actions:
|
||||
- 'sheet_wiki'
|
||||
- 'graph_pie_row' = pie chart for 1 row
|
||||
- 'graph_line_row'
|
||||
- 'graph_bar_row'
|
||||
- 'graph_title_row'
|
||||
- 'wiki_row_overview'
|
||||
|
||||
|
||||
Properties to use in heroscript
|
||||
|
||||
- rowname string - if specified then its one name
|
||||
- namefilter []string - only include the exact names as specified for the rows
|
||||
- includefilter []string - to use with tags filter e.g. ['location:belgium_*'] //would match all words starting with belgium
|
||||
- excludefilter []string
|
||||
- period_type PeriodType - year, month, quarter
|
||||
- aggregate bool = true - if more than 1 row matches should we aggregate or not
|
||||
- aggregatetype RowAggregateType = .sum - important if used with include/exclude, because then we group
|
||||
- unit UnitType
|
||||
- title string
|
||||
- title_sub string
|
||||
- size string
|
||||
- rowname_show bool = true - show the name of the row
|
||||
- descr_show bool = false - show the description of the row, if this is on then rowname_show will be put on 0
|
||||
- description string
|
||||
184
lib/biz/spreadsheet/row.v
Normal file
184
lib/biz/spreadsheet/row.v
Normal file
@@ -0,0 +1,184 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct Row {
|
||||
pub mut:
|
||||
name string
|
||||
alias string
|
||||
description string
|
||||
cells []Cell
|
||||
sheet &Sheet @[skip; str: skip]
|
||||
aggregatetype RowAggregateType
|
||||
reprtype ReprType // how to represent it
|
||||
tags string
|
||||
subgroup string
|
||||
}
|
||||
|
||||
// pub enum RowType{
|
||||
// cur
|
||||
// integer
|
||||
// float
|
||||
// }
|
||||
|
||||
pub enum RowAggregateType {
|
||||
unknown
|
||||
sum
|
||||
avg
|
||||
max
|
||||
min
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct RowNewParams {
|
||||
pub mut:
|
||||
name string
|
||||
growth string
|
||||
aggregatetype RowAggregateType
|
||||
tags string
|
||||
descr string
|
||||
subgroup string
|
||||
extrapolate bool = true
|
||||
}
|
||||
|
||||
// get a row with a certain name
|
||||
// you can use the smart extrapolate function to populate the row
|
||||
// params:
|
||||
// name string
|
||||
// growth string (this is input for the extrapolate function)
|
||||
// aggregatetype e.g. sum,avg,max,min is used to go from months to e.g. year or quarter
|
||||
// tags []string e.g. ["hr","hrdev"] attach a tag to a row, can be used later to group
|
||||
// smart exptrapolation is 3:2,10:5 means end month 3 we start with 2, it grows to 5 on end month 10
|
||||
pub fn (mut s Sheet) row_new(args_ RowNewParams) !&Row {
|
||||
mut args := args_
|
||||
if args.aggregatetype == .unknown {
|
||||
args.aggregatetype = .sum
|
||||
}
|
||||
name := args.name.to_lower()
|
||||
if name.trim_space() == '' {
|
||||
return error('name cannot be empty')
|
||||
}
|
||||
mut r := Row{
|
||||
sheet: &s
|
||||
name: name
|
||||
aggregatetype: args.aggregatetype
|
||||
tags: args.tags
|
||||
description: args.descr
|
||||
subgroup: args.subgroup
|
||||
}
|
||||
s.rows[name] = &r
|
||||
for _ in 0 .. s.nrcol {
|
||||
r.cells << Cell{
|
||||
row: &r
|
||||
}
|
||||
}
|
||||
assert r.cells.len == s.nrcol
|
||||
if args.growth.len > 0 {
|
||||
if args.extrapolate {
|
||||
if !args.growth.contains(',') && !args.growth.contains(':') {
|
||||
args.growth = '0:${args.growth}'
|
||||
}
|
||||
r.extrapolate(args.growth)!
|
||||
} else {
|
||||
r.smartfill(args.growth)!
|
||||
}
|
||||
}
|
||||
return &r
|
||||
}
|
||||
|
||||
pub fn (mut r Row) cell_get(colnr int) !&Cell {
|
||||
if colnr > r.cells.len {
|
||||
return error("Cannot find cell, the cell is out of bounds, the colnr:'${colnr}' is larger than nr of cells:'${r.cells.len}'")
|
||||
}
|
||||
return &r.cells[colnr]
|
||||
}
|
||||
|
||||
pub fn (mut r Row) values_get() []f64 {
|
||||
mut out := []f64{}
|
||||
for cell in r.cells {
|
||||
out << cell.val
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// starting from cell look forward for nrcolls
|
||||
// make the average
|
||||
pub fn (r Row) look_forward_avg(colnr_ int, nrcols_ int) !f64 {
|
||||
mut colnr := colnr_
|
||||
mut nrcols := nrcols_
|
||||
if colnr > r.cells.len {
|
||||
return error("Cannot find cell, the cell is out of bounds, the colnr:'${colnr}' is larger than nr of cells:'${r.cells.len}'")
|
||||
}
|
||||
if colnr + nrcols > r.cells.len {
|
||||
colnr = r.cells.len - nrcols_
|
||||
}
|
||||
mut v := 0.0
|
||||
for i in colnr .. colnr + nrcols {
|
||||
v += r.cells[i].val
|
||||
}
|
||||
avg := v / f64(nrcols)
|
||||
return avg
|
||||
}
|
||||
|
||||
pub fn (r Row) min() f64 {
|
||||
mut v := 9999999999999.0
|
||||
for cell in r.cells {
|
||||
// console.print_debug(cell.val)
|
||||
if cell.val < v {
|
||||
v = cell.val
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
pub fn (r Row) max() f64 {
|
||||
mut v := 0.0
|
||||
for cell in r.cells {
|
||||
// console.print_debug(cell.val)
|
||||
if cell.val > v {
|
||||
v = cell.val
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// apply the namefilter, include & exclude filter, if match return true
|
||||
pub fn (row Row) filter(args_ RowGetArgs) !bool {
|
||||
mut ok := false
|
||||
mut args := args_
|
||||
|
||||
if args.rowname != '' {
|
||||
if args.rowname !in args.namefilter {
|
||||
args.namefilter << args.rowname
|
||||
}
|
||||
}
|
||||
|
||||
if args.namefilter.len == 0 && args.includefilter.len == 0 && args.excludefilter.len == 0 {
|
||||
// this means we match all
|
||||
return true
|
||||
}
|
||||
|
||||
if args.includefilter.len > 0 || args.excludefilter.len > 0 {
|
||||
tagstofilter := paramsparser.parse(row.tags)!
|
||||
ok = tagstofilter.filter_match(
|
||||
include: args.includefilter
|
||||
exclude: args.excludefilter
|
||||
)!
|
||||
}
|
||||
for name1 in args.namefilter {
|
||||
if name1.to_lower() == row.name.to_lower() {
|
||||
ok = true
|
||||
}
|
||||
}
|
||||
if ok == false {
|
||||
return false
|
||||
}
|
||||
|
||||
return ok
|
||||
}
|
||||
|
||||
pub fn (mut row Row) delete() {
|
||||
row.sheet.delete(row.name)
|
||||
}
|
||||
159
lib/biz/spreadsheet/row_actions.v
Normal file
159
lib/biz/spreadsheet/row_actions.v
Normal file
@@ -0,0 +1,159 @@
|
||||
module spreadsheet
|
||||
|
||||
pub enum RowAction {
|
||||
add // add rows
|
||||
substract
|
||||
divide
|
||||
multiply
|
||||
aggregate
|
||||
difference
|
||||
roundint
|
||||
max
|
||||
min
|
||||
reverse //+1 becomes -1
|
||||
forwardavg // try to find 12 forward looking cells and do avg where we are
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct RowActionArgs {
|
||||
pub mut:
|
||||
name string
|
||||
action RowAction
|
||||
val f64
|
||||
rows []&Row
|
||||
tags string
|
||||
descr string
|
||||
subgroup string
|
||||
aggregatetype RowAggregateType = .sum
|
||||
delaymonths int // how many months should we delay the output
|
||||
}
|
||||
|
||||
// add one row to the other
|
||||
//
|
||||
// '''
|
||||
// name string optional: if not used then row will be modified itself
|
||||
// action RowAction
|
||||
// val f64 optional: if we want to e.g. multiply every cell with same val
|
||||
// rows []Row optional: a row if we want to add each val of item of row, can be more than 1
|
||||
// tags string how to recognize a row (selection)
|
||||
// aggregatetype RowAggregateType is unknown, sum, avg, max, min
|
||||
// delaymonths int //how many months should we delay the output
|
||||
// descr string
|
||||
// subgroup string
|
||||
// '''
|
||||
// row action is
|
||||
// '''
|
||||
// add // add rows
|
||||
// substract
|
||||
// divide
|
||||
// multiply
|
||||
// aggregate
|
||||
// difference
|
||||
// roundint
|
||||
// max
|
||||
// min
|
||||
// reverse //+1 becomes -1
|
||||
// forwardavg // try to find 12 forward looking cells and do avg where we are
|
||||
// '''
|
||||
//
|
||||
pub fn (mut r Row) action(args_ RowActionArgs) !&Row {
|
||||
mut args := args_
|
||||
if args.name == '' {
|
||||
args.name = r.name
|
||||
r.sheet.delete(r.name)
|
||||
}
|
||||
|
||||
mut row_result := r.copy(
|
||||
name: args.name
|
||||
tags: args.tags
|
||||
descr: args.descr
|
||||
subgroup: args.subgroup
|
||||
aggregatetype: args.aggregatetype
|
||||
)!
|
||||
|
||||
mut prevval := 0.0
|
||||
for x in 0 .. r.sheet.nrcol {
|
||||
row_result.cells[x].empty = false
|
||||
row_result.cells[x].val = r.cells[x].val
|
||||
if args.rows.len > 0 {
|
||||
for r2 in args.rows {
|
||||
if args.action == .add {
|
||||
row_result.cells[x].val = row_result.cells[x].val + r2.cells[x].val
|
||||
} else if args.action == .substract {
|
||||
row_result.cells[x].val = row_result.cells[x].val - r2.cells[x].val
|
||||
} else if args.action == .multiply {
|
||||
row_result.cells[x].val = row_result.cells[x].val * r2.cells[x].val
|
||||
} else if args.action == .divide {
|
||||
row_result.cells[x].val = row_result.cells[x].val / r2.cells[x].val
|
||||
} else if args.action == .max {
|
||||
if r2.cells[x].val > row_result.cells[x].val {
|
||||
row_result.cells[x].val = r2.cells[x].val
|
||||
}
|
||||
} else if args.action == .min {
|
||||
if r2.cells[x].val < row_result.cells[x].val {
|
||||
row_result.cells[x].val = r2.cells[x].val
|
||||
}
|
||||
} else {
|
||||
return error('Action wrongly specified for ${r} with\nargs:${args}')
|
||||
}
|
||||
}
|
||||
}
|
||||
if args.val > 0.0 {
|
||||
if args.action == .add {
|
||||
row_result.cells[x].val = row_result.cells[x].val + args.val
|
||||
} else if args.action == .substract {
|
||||
row_result.cells[x].val = row_result.cells[x].val - args.val
|
||||
} else if args.action == .multiply {
|
||||
row_result.cells[x].val = row_result.cells[x].val * args.val
|
||||
} else if args.action == .divide {
|
||||
row_result.cells[x].val = row_result.cells[x].val / args.val
|
||||
} else if args.action == .aggregate {
|
||||
row_result.cells[x].val = row_result.cells[x].val + prevval
|
||||
prevval = row_result.cells[x].val
|
||||
} else if args.action == .difference {
|
||||
row_result.cells[x].val = row_result.cells[x].val - r.cells[x - 1].val
|
||||
} else if args.action == .roundint {
|
||||
row_result.cells[x].val = int(row_result.cells[x].val)
|
||||
} else if args.action == .max {
|
||||
if args.val > row_result.cells[x].val {
|
||||
row_result.cells[x].val = args.val
|
||||
}
|
||||
} else if args.action == .min {
|
||||
if args.val < row_result.cells[x].val {
|
||||
row_result.cells[x].val = args.val
|
||||
}
|
||||
} else {
|
||||
return error('Action wrongly specified for ${r} with\nargs:${args}')
|
||||
}
|
||||
}
|
||||
|
||||
if args.action == .reverse {
|
||||
row_result.cells[x].val = -row_result.cells[x].val
|
||||
}
|
||||
if args.action == .forwardavg {
|
||||
a := row_result.look_forward_avg(x, 6)!
|
||||
row_result.cells[x].val = a
|
||||
}
|
||||
}
|
||||
if args.delaymonths > 0 {
|
||||
row_result.delay(args.delaymonths)!
|
||||
}
|
||||
return row_result
|
||||
}
|
||||
|
||||
// pub fn (mut r Row) add(name string, r2 Row) !&Row {
|
||||
// return r.action(name:name, rows:[]r2, tags:r.tags)
|
||||
// }
|
||||
pub fn (mut r Row) delay(monthdelay int) ! {
|
||||
mut todelay := []f64{}
|
||||
for x in 0 .. r.sheet.nrcol {
|
||||
todelay << r.cells[x].val
|
||||
}
|
||||
for x in 0 .. r.sheet.nrcol {
|
||||
if x < monthdelay {
|
||||
r.cells[x].val = 0.0
|
||||
} else {
|
||||
r.cells[x].val = todelay[x - monthdelay]
|
||||
}
|
||||
}
|
||||
}
|
||||
50
lib/biz/spreadsheet/row_copy.v
Normal file
50
lib/biz/spreadsheet/row_copy.v
Normal file
@@ -0,0 +1,50 @@
|
||||
module spreadsheet
|
||||
|
||||
import math
|
||||
|
||||
@[params]
|
||||
pub struct RowCopyArgs {
|
||||
pub mut:
|
||||
name string
|
||||
tags string
|
||||
descr string
|
||||
subgroup string
|
||||
aggregatetype RowAggregateType = .sum
|
||||
}
|
||||
|
||||
pub fn (mut r Row) copy(args_ RowCopyArgs) !&Row {
|
||||
mut row_result := r
|
||||
mut args := args_
|
||||
if args.name == '' {
|
||||
return error('name cannot be empty for copy, args:${args} \non ${r}')
|
||||
}
|
||||
if args.tags == '' {
|
||||
args.tags = r.tags
|
||||
}
|
||||
if args.descr == '' {
|
||||
args.descr = r.description
|
||||
}
|
||||
if args.subgroup == '' {
|
||||
args.subgroup = r.subgroup
|
||||
}
|
||||
if args.aggregatetype == .unknown {
|
||||
args.aggregatetype = r.aggregatetype
|
||||
}
|
||||
if args.name.len > 0 {
|
||||
mut r3 := r.sheet.row_new(
|
||||
name: args.name
|
||||
aggregatetype: args.aggregatetype
|
||||
descr: args.descr
|
||||
subgroup: args.subgroup
|
||||
tags: args.tags
|
||||
)!
|
||||
row_result = *r3
|
||||
for x in 0 .. r.sheet.nrcol {
|
||||
row_result.cells[x].empty = false
|
||||
row_result.cells[x].val = r.cells[x].val
|
||||
}
|
||||
} else {
|
||||
return error('name need to be specified:\n${args_}')
|
||||
}
|
||||
return &row_result
|
||||
}
|
||||
47
lib/biz/spreadsheet/row_recurring.v
Normal file
47
lib/biz/spreadsheet/row_recurring.v
Normal file
@@ -0,0 +1,47 @@
|
||||
module spreadsheet
|
||||
|
||||
import math
|
||||
|
||||
@[params]
|
||||
pub struct RowRecurringArgs {
|
||||
RowCopyArgs
|
||||
pub mut:
|
||||
nrmonths int = 60
|
||||
delaymonths int // how many months should we delay the output
|
||||
}
|
||||
|
||||
pub fn (mut r Row) recurring(args_ RowRecurringArgs) !&Row {
|
||||
mut args := args_
|
||||
if args.name == '' {
|
||||
args.name = r.name
|
||||
r.sheet.delete(r.name)
|
||||
}
|
||||
|
||||
if args.nrmonths < 5 {
|
||||
return error('nrmonths should be at least 5 for recurring, args:${args} \non ${r}')
|
||||
}
|
||||
|
||||
mut row_result := r.copy(
|
||||
name: args.name
|
||||
tags: args.tags
|
||||
descr: args.descr
|
||||
subgroup: args.subgroup
|
||||
aggregatetype: args.aggregatetype
|
||||
)!
|
||||
|
||||
for x in 0 .. r.sheet.nrcol {
|
||||
mut aggregated := 0.0
|
||||
startnr := math.max(0, x - args.nrmonths)
|
||||
|
||||
for x2 in startnr .. x + 1 {
|
||||
// println("${startnr}-${x} ${x2}:${r.cells[x2].val}")
|
||||
aggregated += r.cells[x2].val // go back max nrmonths months and aggregate it all
|
||||
}
|
||||
row_result.cells[x].empty = false
|
||||
row_result.cells[x].val = aggregated
|
||||
}
|
||||
if args.delaymonths > 0 {
|
||||
row_result.delay(args.delaymonths)!
|
||||
}
|
||||
return row_result
|
||||
}
|
||||
293
lib/biz/spreadsheet/sheet.v
Normal file
293
lib/biz/spreadsheet/sheet.v
Normal file
@@ -0,0 +1,293 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.currency
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct Sheet {
|
||||
pub mut:
|
||||
name string
|
||||
rows map[string]&Row
|
||||
nrcol int = 60
|
||||
params SheetParams
|
||||
currency currency.Currency = currency.get('USD')!
|
||||
}
|
||||
|
||||
pub struct SheetParams {
|
||||
pub mut:
|
||||
visualize_cur bool // if we want to show e.g. $44.4 in a cell or just 44.4
|
||||
}
|
||||
|
||||
// find maximum length of a cell (as string representation for a colnr)
|
||||
// 0 is the first col
|
||||
// the headers if used are never counted
|
||||
pub fn (mut s Sheet) cells_width(colnr int) !int {
|
||||
mut lmax := 0
|
||||
for _, mut row in s.rows {
|
||||
if row.cells.len > colnr {
|
||||
mut c := row.cell_get(colnr)!
|
||||
ll := c.repr().len
|
||||
if ll > lmax {
|
||||
lmax = ll
|
||||
}
|
||||
}
|
||||
}
|
||||
return lmax
|
||||
}
|
||||
|
||||
// walk over all rows, return the max width of the name and/or alias field of a row
|
||||
pub fn (mut s Sheet) rows_names_width_max() int {
|
||||
mut res := 0
|
||||
for _, mut row in s.rows {
|
||||
if row.name.len > res {
|
||||
res = row.name.len
|
||||
}
|
||||
if row.alias.len > res {
|
||||
res = row.alias.len
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// walk over all rows, return the max width of the description field of a row
|
||||
pub fn (mut s Sheet) rows_description_width_max() int {
|
||||
mut res := 0
|
||||
for _, mut row in s.rows {
|
||||
if row.description.len > res {
|
||||
res = row.description.len
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct Group2RowArgs {
|
||||
pub mut:
|
||||
name string
|
||||
include []string // to use with params filter e.g. ['location:belgium_*'] //would match all words starting with belgium
|
||||
exclude []string
|
||||
tags string
|
||||
descr string
|
||||
subgroup string
|
||||
aggregatetype RowAggregateType = .sum
|
||||
}
|
||||
|
||||
// find all rows which have one of the tags
|
||||
// aggregate (sum) them into one row
|
||||
// returns a row with the result
|
||||
// useful to e.g. make new row which makes sum of all salaries for e.g. dev and engineering tag
|
||||
pub fn (mut s Sheet) group2row(args Group2RowArgs) !&Row {
|
||||
name := args.name
|
||||
if name == '' {
|
||||
return error('name cannot be empty')
|
||||
}
|
||||
mut rowout := s.row_new(
|
||||
name: name
|
||||
tags: args.tags
|
||||
descr: args.descr
|
||||
subgroup: args.subgroup
|
||||
aggregatetype: args.aggregatetype
|
||||
)!
|
||||
for _, row in s.rows {
|
||||
tagstofilter := paramsparser.parse(row.tags)!
|
||||
matched := tagstofilter.filter_match(include: args.include, exclude: args.exclude)!
|
||||
if matched {
|
||||
// console.print_debug("MMMMMAAAAATCH: \n${args.include} ${row.tags}")
|
||||
// console.print_debug(row)
|
||||
// if true{panic("SDSD")}
|
||||
mut x := 0
|
||||
for cell in row.cells {
|
||||
rowout.cells[x].val += cell.val
|
||||
rowout.cells[x].empty = false
|
||||
x += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
return rowout
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ToYearQuarterArgs {
|
||||
pub mut:
|
||||
name string
|
||||
namefilter []string // only include the exact names as specified for the rows
|
||||
includefilter []string // matches for the tags
|
||||
excludefilter []string // matches for the tags
|
||||
period_months int = 12
|
||||
}
|
||||
|
||||
// internal function used by to year and by to quarter
|
||||
pub fn (s Sheet) tosmaller(args_ ToYearQuarterArgs) !&Sheet {
|
||||
mut args := args_
|
||||
mut sheetname := args.name
|
||||
if sheetname == '' {
|
||||
sheetname = s.name + '_year'
|
||||
}
|
||||
// console.print_debug("to smaller for sheet: ${s.name} rows:${s.rows.len}")
|
||||
nrcol_new := int(s.nrcol / args.period_months)
|
||||
// println("nr cols: ${s.nrcol} ${args.period_months} ${nrcol_new} ")
|
||||
if f64(nrcol_new) != s.nrcol / args.period_months {
|
||||
// means we can't do it
|
||||
panic('is bug, can only be 4 or 12')
|
||||
}
|
||||
mut sheet_out := sheet_new(
|
||||
name: sheetname
|
||||
nrcol: nrcol_new
|
||||
visualize_cur: s.params.visualize_cur
|
||||
curr: s.currency.name
|
||||
)!
|
||||
for _, row in s.rows {
|
||||
// QUESTION: how to parse period_months
|
||||
ok := row.filter(
|
||||
rowname: args.name
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
period_type: .month
|
||||
)!
|
||||
// console.print_debug("process row in to smaller: ${row.name}, result ${ok}")
|
||||
if ok == false {
|
||||
continue
|
||||
}
|
||||
// means filter not specified or filtered
|
||||
mut rnew := sheet_out.row_new(
|
||||
name: row.name
|
||||
aggregatetype: row.aggregatetype
|
||||
tags: row.tags
|
||||
growth: '0:0.0'
|
||||
descr: row.description
|
||||
)!
|
||||
for x in 0 .. nrcol_new {
|
||||
mut newval := 0.0
|
||||
for xsub in 0 .. args.period_months {
|
||||
xtot := x * args.period_months + xsub
|
||||
// console.print_debug("${row.name} $xtot ${row.cells.len}")
|
||||
// if row.cells.len < xtot+1{
|
||||
// console.print_debug(row)
|
||||
// panic("too many cells")
|
||||
// }
|
||||
if row.aggregatetype == .sum || row.aggregatetype == .avg {
|
||||
newval += row.cells[xtot].val
|
||||
} else if row.aggregatetype == .max {
|
||||
if row.cells[xtot].val > newval {
|
||||
newval = row.cells[xtot].val
|
||||
}
|
||||
} else if row.aggregatetype == .min {
|
||||
if row.cells[xtot].val < newval {
|
||||
newval = row.cells[xtot].val
|
||||
}
|
||||
} else {
|
||||
panic('not implemented')
|
||||
}
|
||||
}
|
||||
if row.aggregatetype == .sum || row.aggregatetype == .max || row.aggregatetype == .min {
|
||||
// console.print_debug("sum/max/min ${row.name} $x ${rnew.cells.len}")
|
||||
rnew.cells[x].val = newval
|
||||
} else {
|
||||
// avg
|
||||
// console.print_debug("avg ${row.name} $x ${rnew.cells.len}")
|
||||
rnew.cells[x].val = newval / args.period_months
|
||||
}
|
||||
}
|
||||
}
|
||||
// console.print_debug("to smaller done")
|
||||
return sheet_out
|
||||
}
|
||||
|
||||
// make a copy of the sheet and aggregate on year
|
||||
// params
|
||||
// name string
|
||||
// rowsfilter []string
|
||||
// tagsfilter []string
|
||||
// tags if set will see that there is at least one corresponding tag per row
|
||||
// rawsfilter is list of names of rows which will be included
|
||||
pub fn (mut s Sheet) toyear(args ToYearQuarterArgs) !&Sheet {
|
||||
mut args2 := args
|
||||
args2.period_months = 12
|
||||
return s.tosmaller(args2)
|
||||
}
|
||||
|
||||
// make a copy of the sheet and aggregate on quarter
|
||||
// params
|
||||
// name string
|
||||
// rowsfilter []string
|
||||
// tagsfilter []string
|
||||
// tags if set will see that there is at least one corresponding tag per row
|
||||
// rawsfilter is list of names of rows which will be included
|
||||
pub fn (mut s Sheet) toquarter(args ToYearQuarterArgs) !&Sheet {
|
||||
mut args2 := args
|
||||
args2.period_months = 3
|
||||
return s.tosmaller(args2)
|
||||
}
|
||||
|
||||
// return array with same amount of items as cols in the rows
|
||||
//
|
||||
// for year we return Y1, Y2, ...
|
||||
// for quarter we return Q1, Q2, ...
|
||||
// for months we returm m1, m2, ...
|
||||
pub fn (mut s Sheet) header() ![]string {
|
||||
// if col + 40 = months
|
||||
if s.nrcol > 40 {
|
||||
mut res := []string{}
|
||||
for x in 1 .. s.nrcol + 1 {
|
||||
res << 'M${x}'
|
||||
}
|
||||
return res
|
||||
}
|
||||
// if col + 10 = quarters
|
||||
if s.nrcol > 10 {
|
||||
mut res := []string{}
|
||||
for x in 1 .. s.nrcol + 1 {
|
||||
res << 'Q${x}'
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// else is years
|
||||
mut res := []string{}
|
||||
for x in 1 .. s.nrcol + 1 {
|
||||
res << 'Y${x}'
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
pub fn (mut s Sheet) json() string {
|
||||
// TODO: not done yet
|
||||
// return json.encode_pretty(s)
|
||||
return ''
|
||||
}
|
||||
|
||||
// find row, report error if not found
|
||||
pub fn (mut s Sheet) row_get(name string) !&Row {
|
||||
mut row := s.rows[name] or { return error('could not find row with name: ${name}') }
|
||||
return row
|
||||
}
|
||||
|
||||
pub fn (mut s Sheet) values_get(name string) ![]f64 {
|
||||
mut r := s.row_get(name)!
|
||||
vs := r.values_get()
|
||||
return vs
|
||||
}
|
||||
|
||||
pub fn (mut s Sheet) row_delete(name string) {
|
||||
if name in s.rows {
|
||||
s.rows.delete(name)
|
||||
}
|
||||
}
|
||||
|
||||
// find row, report error if not found
|
||||
pub fn (mut s Sheet) cell_get(row string, col int) !&Cell {
|
||||
mut r := s.row_get(row)!
|
||||
mut c := r.cells[col] or {
|
||||
return error('could not find cell from col:${col} for row name: ${row}')
|
||||
}
|
||||
return &c
|
||||
}
|
||||
|
||||
// find row, report error if not found
|
||||
pub fn (mut s Sheet) delete(name string) {
|
||||
if name in s.rows {
|
||||
s.rows.delete(name)
|
||||
}
|
||||
}
|
||||
185
lib/biz/spreadsheet/sheet_getters.v
Normal file
185
lib/biz/spreadsheet/sheet_getters.v
Normal file
@@ -0,0 +1,185 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import math
|
||||
|
||||
fn remove_empty_line(txt string) string {
|
||||
mut out := ''
|
||||
for line in txt.split_into_lines() {
|
||||
if line.trim_space() == '' {
|
||||
continue
|
||||
}
|
||||
out += '${line}\n'
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct RowGetArgs {
|
||||
pub mut:
|
||||
rowname string // if specified then its one name
|
||||
namefilter []string // only include the exact names as secified for the rows
|
||||
includefilter []string // to use with params filter e.g. ['location:belgium_*'] //would match all words starting with belgium
|
||||
excludefilter []string
|
||||
period_type PeriodType // year, month, quarter
|
||||
aggregate bool = true // if more than 1 row matches should we aggregate or not
|
||||
aggregatetype RowAggregateType = .sum // important if used with include/exclude, because then we group
|
||||
unit UnitType
|
||||
title string
|
||||
title_sub string
|
||||
size string
|
||||
rowname_show bool = true // show the name of the row
|
||||
descr_show bool
|
||||
description string
|
||||
}
|
||||
|
||||
pub enum UnitType {
|
||||
normal
|
||||
thousand
|
||||
million
|
||||
billion
|
||||
}
|
||||
|
||||
pub enum PeriodType {
|
||||
year
|
||||
month
|
||||
quarter
|
||||
error
|
||||
}
|
||||
|
||||
// find rownames which match RowGetArgs
|
||||
pub fn (s Sheet) rownames_get(args RowGetArgs) ![]string {
|
||||
mut res := []string{}
|
||||
for _, row in s.rows {
|
||||
if row.filter(args)! {
|
||||
res << row.name
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// get one rowname, if more than 1 will fail, if 0 will fail
|
||||
pub fn (s Sheet) rowname_get(args RowGetArgs) !string {
|
||||
r := s.rownames_get(args)!
|
||||
if r.len == 1 {
|
||||
return r[0]
|
||||
}
|
||||
if r.len == 0 {
|
||||
return error("Didn't find rows for ${s.name}.\n${args}")
|
||||
}
|
||||
return error('Found too many rows for ${s.name}.\n${args}')
|
||||
}
|
||||
|
||||
// return e.g. "'Y1', 'Y2', 'Y3', 'Y4', 'Y5', 'Y6'" if year, is for header
|
||||
pub fn (mut s Sheet) header_get_as_list(period_type PeriodType) ![]string {
|
||||
str := s.header_get_as_string(period_type)!
|
||||
return str.split(',')
|
||||
}
|
||||
|
||||
// return e.g. "'Y1', 'Y2', 'Y3', 'Y4', 'Y5', 'Y6'" if year, is for header
|
||||
pub fn (mut s Sheet) data_get_as_list(args RowGetArgs) ![]string {
|
||||
str := s.data_get_as_string(args)!
|
||||
return str.split(',')
|
||||
}
|
||||
|
||||
// return e.g. "'Y1', 'Y2', 'Y3', 'Y4', 'Y5', 'Y6'" if year, is for header
|
||||
pub fn (mut s Sheet) header_get_as_string(period_type PeriodType) !string {
|
||||
err_pre := "Can't get header for sheet:${s.name}\n"
|
||||
nryears := int(s.nrcol / 12)
|
||||
mut out := ''
|
||||
match period_type {
|
||||
.year {
|
||||
for i in 1 .. (nryears + 1) {
|
||||
out += "'Y${i}', "
|
||||
}
|
||||
}
|
||||
.quarter {
|
||||
for i in 1 .. (nryears * 4 + 1) {
|
||||
out += "'Q${i}', "
|
||||
}
|
||||
}
|
||||
.month {
|
||||
for i in 1 .. (12 * nryears + 1) {
|
||||
out += "'M${i}', "
|
||||
}
|
||||
}
|
||||
else {
|
||||
return error('${err_pre}Period type not well specified')
|
||||
}
|
||||
}
|
||||
out = out.trim_space().trim(',').trim_space()
|
||||
return out
|
||||
}
|
||||
|
||||
// return the values
|
||||
pub fn (mut s Sheet) data_get_as_string(args RowGetArgs) !string {
|
||||
if args.rowname == '' {
|
||||
return error('rowname needs to be specified')
|
||||
}
|
||||
nryears := 5
|
||||
err_pre := "Can't get data for sheet:${s.name} row:${args.rowname}.\n"
|
||||
mut s2 := s
|
||||
|
||||
if args.period_type == .year {
|
||||
s.toyear(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
if args.period_type == .quarter {
|
||||
s.toquarter(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
mut out := ''
|
||||
|
||||
// console.print_debug(s2.row_get(args.rowname)!)
|
||||
mut vals := s2.values_get(args.rowname)!
|
||||
if args.period_type == .year && vals.len != nryears {
|
||||
return error('${err_pre}Vals.len need to be 6, for year.\nhere:\n${vals}')
|
||||
}
|
||||
if args.period_type == .quarter && vals.len != nryears * 4 {
|
||||
return error('${err_pre}vals.len need to be 6*4, for quarter.\nhere:\n${vals}')
|
||||
}
|
||||
if args.period_type == .month && vals.len != nryears * 12 {
|
||||
return error('${err_pre}vals.len need to be 6*12, for month.\nhere:\n${vals}')
|
||||
}
|
||||
|
||||
for mut val in vals {
|
||||
if args.unit == .thousand {
|
||||
val = val / 1000.0
|
||||
}
|
||||
if args.unit == .million {
|
||||
val = val / 1000000.0
|
||||
}
|
||||
if args.unit == .billion {
|
||||
val = val / 1000000000.0
|
||||
}
|
||||
out += ',${math.round_sig(val, 1)}'
|
||||
}
|
||||
return out.trim(',')
|
||||
}
|
||||
|
||||
// use RowGetArgs to get to smaller version of sheet
|
||||
pub fn (mut s Sheet) filter(args RowGetArgs) !&Sheet {
|
||||
period_months := match args.period_type {
|
||||
.year { 12 }
|
||||
.month { 1 }
|
||||
.quarter { 3 }
|
||||
else { panic('bug') }
|
||||
}
|
||||
|
||||
tga := ToYearQuarterArgs{
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
period_months: period_months
|
||||
}
|
||||
|
||||
return s.tosmaller(tga)!
|
||||
}
|
||||
17
lib/biz/spreadsheet/tools.v
Normal file
17
lib/biz/spreadsheet/tools.v
Normal file
@@ -0,0 +1,17 @@
|
||||
module spreadsheet
|
||||
|
||||
pub fn array2float(list []int) []f64 {
|
||||
mut list2 := []f64{}
|
||||
for i in list {
|
||||
list2 << f64(i)
|
||||
}
|
||||
return list2
|
||||
}
|
||||
|
||||
pub fn array2int(list []f64) []int {
|
||||
mut list2 := []int{}
|
||||
for i in list {
|
||||
list2 << int(i)
|
||||
}
|
||||
return list2
|
||||
}
|
||||
86
lib/biz/spreadsheet/wiki.v
Normal file
86
lib/biz/spreadsheet/wiki.v
Normal file
@@ -0,0 +1,86 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
// format a sheet properly in wiki format
|
||||
|
||||
pub fn (mut s Sheet) wiki(args_ RowGetArgs) !string {
|
||||
mut args := args_
|
||||
|
||||
_ := match args.period_type {
|
||||
.year { 12 }
|
||||
.month { 1 }
|
||||
.quarter { 3 }
|
||||
else { panic('bug') }
|
||||
}
|
||||
|
||||
// console.print_debug("wiki with args:${args}")
|
||||
mut sheet := s.filter(args)! // this will do the filtering and if needed make smaller
|
||||
|
||||
mut out := ''
|
||||
if args.title.len > 0 {
|
||||
out = '## ${args.title}\n\n'
|
||||
}
|
||||
if args.title != '' {
|
||||
out += args.title + '\n\n'
|
||||
}
|
||||
|
||||
mut colmax := []int{}
|
||||
for x in 0 .. sheet.nrcol {
|
||||
colmaxval := sheet.cells_width(x)!
|
||||
colmax << colmaxval
|
||||
}
|
||||
|
||||
header := sheet.header()!
|
||||
|
||||
// get the width of name and optionally description
|
||||
mut names_width := sheet.rows_names_width_max()
|
||||
|
||||
mut header_wiki_items := []string{}
|
||||
mut header_wiki_items2 := []string{}
|
||||
if args.rowname_show && names_width > 0 {
|
||||
header_wiki_items << texttools.expand('|', names_width + 1, ' ')
|
||||
header_wiki_items2 << texttools.expand('|', names_width + 1, '-')
|
||||
}
|
||||
for x in 0 .. sheet.nrcol {
|
||||
colmaxval := colmax[x]
|
||||
headername := header[x]
|
||||
item := texttools.expand(headername, colmaxval, ' ')
|
||||
header_wiki_items << '|${item}'
|
||||
item2 := texttools.expand('', colmaxval, '-')
|
||||
header_wiki_items2 << '|${item2}'
|
||||
}
|
||||
header_wiki_items << '|'
|
||||
header_wiki_items2 << '|'
|
||||
header_wiki := header_wiki_items.join('')
|
||||
header_wiki2 := header_wiki_items2.join('')
|
||||
|
||||
out += header_wiki + '\n'
|
||||
out += header_wiki2 + '\n'
|
||||
|
||||
for _, mut row in sheet.rows {
|
||||
mut wiki_items := []string{}
|
||||
mut rowname := row.name
|
||||
if row.description.len > 0 {
|
||||
names_width = sheet.rows_description_width_max()
|
||||
rowname = row.description
|
||||
}
|
||||
if args.rowname_show && names_width > 0 {
|
||||
if names_width > 60 {
|
||||
names_width = 60
|
||||
}
|
||||
wiki_items << texttools.expand('|${rowname}', names_width + 1, ' ')
|
||||
}
|
||||
for x in 0 .. sheet.nrcol {
|
||||
colmaxval := colmax[x]
|
||||
val := row.cells[x].str()
|
||||
item := texttools.expand(val, colmaxval, ' ')
|
||||
wiki_items << '|${item}'
|
||||
}
|
||||
wiki_items << '|'
|
||||
wiki2 := wiki_items.join('')
|
||||
out += wiki2 + '\n'
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
188
lib/biz/spreadsheet/wiki_charts.v
Normal file
188
lib/biz/spreadsheet/wiki_charts.v
Normal file
@@ -0,0 +1,188 @@
|
||||
module spreadsheet
|
||||
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn (mut s Sheet) wiki_title_chart(args RowGetArgs) string {
|
||||
if args.title.len > 0 {
|
||||
titletxt := "
|
||||
title: {
|
||||
text: '${args.title}',
|
||||
subtext: '${args.title_sub}',
|
||||
left: 'center'
|
||||
},
|
||||
"
|
||||
return titletxt
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
pub fn (mut s_ Sheet) wiki_row_overview(args RowGetArgs) !string {
|
||||
mut s := s_.filter(args)!
|
||||
|
||||
rows_values := s.rows.values().map([it.name, it.description, it.tags])
|
||||
mut rows := []elements.Row{}
|
||||
for values in rows_values {
|
||||
rows << elements.Row{
|
||||
cells: values.map(&elements.Paragraph{
|
||||
content: it
|
||||
})
|
||||
}
|
||||
}
|
||||
header_items := ['Row Name', 'Description', 'Tags']
|
||||
table := elements.Table{
|
||||
header: header_items.map(&elements.Paragraph{
|
||||
content: it
|
||||
})
|
||||
// TODO: need to use the build in mechanism to filter rows
|
||||
rows: rows
|
||||
alignments: [.left, .left, .left]
|
||||
}
|
||||
return table.markdown()
|
||||
}
|
||||
|
||||
// produce a nice looking bar chart see
|
||||
// https://echarts.apache.org/examples/en/editor.html?c=line-stack
|
||||
pub fn (mut s Sheet) wiki_line_chart(args_ RowGetArgs) !string {
|
||||
mut args := args_
|
||||
|
||||
rownames := s.rownames_get(args)!
|
||||
header := s.header_get_as_string(args.period_type)!
|
||||
mut series_lines := []string{}
|
||||
|
||||
for rowname in rownames {
|
||||
data := s.data_get_as_string(RowGetArgs{
|
||||
...args
|
||||
rowname: rowname
|
||||
})!
|
||||
series_lines << '{
|
||||
name: \'${rowname}\',
|
||||
type: \'line\',
|
||||
stack: \'Total\',
|
||||
data: [${data}]
|
||||
}'
|
||||
}
|
||||
|
||||
// TODO: need to implement the multiple results which can come back from the args, can be more than 1
|
||||
|
||||
// header := s.header_get_as_string(args.period_type)!
|
||||
// data := s.data_get_as_string(args)!
|
||||
// console.print_debug('HERE! ${header}')
|
||||
// console.print_debug('HERE!! ${data}')
|
||||
|
||||
template := "
|
||||
${s.wiki_title_chart(args)}
|
||||
tooltip: {
|
||||
trigger: 'axis'
|
||||
},
|
||||
legend: {
|
||||
data: ${rownames}
|
||||
},
|
||||
grid: {
|
||||
left: '3%',
|
||||
right: '4%',
|
||||
bottom: '3%',
|
||||
containLabel: true
|
||||
},
|
||||
toolbox: {
|
||||
feature: {
|
||||
saveAsImage: {}
|
||||
}
|
||||
},
|
||||
xAxis: {
|
||||
type: 'category',
|
||||
boundaryGap: false,
|
||||
data: [${header}]
|
||||
},
|
||||
yAxis: {
|
||||
type: 'value'
|
||||
},
|
||||
series: [${series_lines.join(',')}]
|
||||
"
|
||||
out := remove_empty_line('```echarts\n{${template}\n};\n```\n')
|
||||
return out
|
||||
}
|
||||
|
||||
// produce a nice looking bar chart see
|
||||
// https://echarts.apache.org/examples/en/index.html#chart-type-bar
|
||||
pub fn (mut s Sheet) wiki_bar_chart(args_ RowGetArgs) !string {
|
||||
mut args := args_
|
||||
args.rowname = s.rowname_get(args)!
|
||||
header := s.header_get_as_string(args.period_type)!
|
||||
data := s.data_get_as_string(args)!
|
||||
bar1 := "
|
||||
${s.wiki_title_chart(args)}
|
||||
xAxis: {
|
||||
type: 'category',
|
||||
data: [${header}]
|
||||
},
|
||||
yAxis: {
|
||||
type: 'value'
|
||||
},
|
||||
series: [
|
||||
{
|
||||
data: [${data}],
|
||||
type: 'bar',
|
||||
showBackground: true,
|
||||
backgroundStyle: {
|
||||
color: 'rgba(180, 180, 180, 0.2)'
|
||||
}
|
||||
}
|
||||
]
|
||||
"
|
||||
out := remove_empty_line('```echarts\n{${bar1}\n};\n```\n')
|
||||
return out
|
||||
}
|
||||
|
||||
// produce a nice looking bar chart see
|
||||
// https://echarts.apache.org/examples/en/index.html#chart-type-bar
|
||||
pub fn (mut s Sheet) wiki_pie_chart(args_ RowGetArgs) !string {
|
||||
mut args := args_
|
||||
args.rowname = s.rowname_get(args)!
|
||||
header := s.header_get_as_list(args.period_type)!
|
||||
data := s.data_get_as_list(args)!
|
||||
|
||||
mut radius := ''
|
||||
if args.size.len > 0 {
|
||||
radius = "radius: '${args.size}',"
|
||||
}
|
||||
|
||||
if header.len != data.len {
|
||||
return error('data and header lengths must match.\n${header}\n${data}')
|
||||
}
|
||||
|
||||
mut data_lines := []string{}
|
||||
for i, _ in data {
|
||||
data_lines << '{ value: ${data[i]}, name: ${header[i]}}'
|
||||
}
|
||||
data_str := '[${data_lines.join(',')}]'
|
||||
|
||||
bar1 := "
|
||||
${s.wiki_title_chart(args)}
|
||||
tooltip: {
|
||||
trigger: 'item'
|
||||
},
|
||||
legend: {
|
||||
orient: 'vertical',
|
||||
left: 'left'
|
||||
},
|
||||
series: [
|
||||
{
|
||||
name: 'Access From',
|
||||
type: 'pie',
|
||||
${radius}
|
||||
data: ${data_str},
|
||||
emphasis: {
|
||||
itemStyle: {
|
||||
shadowBlur: 10,
|
||||
shadowOffsetX: 0,
|
||||
shadowColor: 'rgba(0, 0, 0, 0.5)'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
"
|
||||
out := remove_empty_line('```echarts\n{${bar1}\n};\n```\n')
|
||||
return out
|
||||
}
|
||||
48
lib/data/doctree/collection/collection.v
Normal file
48
lib/data/doctree/collection/collection.v
Normal file
@@ -0,0 +1,48 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib { Path }
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
@[heap]
|
||||
pub struct Collection {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path Path @[required]
|
||||
fail_on_error bool
|
||||
heal bool = true
|
||||
pages map[string]&data.Page
|
||||
files map[string]&data.File
|
||||
images map[string]&data.File
|
||||
errors []CollectionError
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
heal bool = true // healing means we fix images, if selected will automatically load, remove stale links
|
||||
load bool = true
|
||||
fail_on_error bool
|
||||
}
|
||||
|
||||
// get a new collection
|
||||
pub fn new(args_ CollectionNewArgs) !Collection {
|
||||
mut args := args_
|
||||
args.name = texttools.name_fix(args.name)
|
||||
|
||||
mut pp := pathlib.get_dir(path: args.path)! // will raise error if path doesn't exist
|
||||
mut collection := Collection{
|
||||
name: args.name
|
||||
path: pp
|
||||
heal: args.heal
|
||||
fail_on_error: args.fail_on_error
|
||||
}
|
||||
|
||||
if args.load {
|
||||
collection.scan() or { return error('Error scanning collection ${args.name}:\n${err}') }
|
||||
}
|
||||
|
||||
return collection
|
||||
}
|
||||
29
lib/data/doctree/collection/data/error.v
Normal file
29
lib/data/doctree/collection/data/error.v
Normal file
@@ -0,0 +1,29 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib { Path }
|
||||
|
||||
pub enum PageErrorCat {
|
||||
unknown
|
||||
file_not_found
|
||||
image_not_found
|
||||
page_not_found
|
||||
def
|
||||
}
|
||||
|
||||
pub struct PageMultiError {
|
||||
Error
|
||||
pub mut:
|
||||
errs []PageError
|
||||
}
|
||||
|
||||
pub fn (err PageMultiError) msg() string {
|
||||
return 'Failed in processing page with one or multiple errors: ${err.errs}'
|
||||
}
|
||||
|
||||
pub struct PageError {
|
||||
Error
|
||||
pub mut:
|
||||
path Path
|
||||
msg string
|
||||
cat PageErrorCat
|
||||
}
|
||||
102
lib/data/doctree/collection/data/file.v
Normal file
102
lib/data/doctree/collection/data/file.v
Normal file
@@ -0,0 +1,102 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub enum FileStatus {
|
||||
unknown
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
pub enum FileType {
|
||||
file
|
||||
image
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct File {
|
||||
pub mut:
|
||||
collection_path pathlib.Path
|
||||
name string // received a name fix
|
||||
ext string
|
||||
path pathlib.Path
|
||||
pathrel string
|
||||
state FileStatus
|
||||
pages_linked []&Page // pointer to pages which use this file
|
||||
ftype FileType
|
||||
collection_name string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewFileArgs {
|
||||
pub:
|
||||
name string // received a name fix
|
||||
collection_path pathlib.Path
|
||||
pathrel string
|
||||
path pathlib.Path
|
||||
collection_name string @[required]
|
||||
}
|
||||
|
||||
pub fn new_file(args NewFileArgs) !File {
|
||||
mut f := File{
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_path: args.collection_path
|
||||
pathrel: args.pathrel
|
||||
collection_name: args.collection_name
|
||||
}
|
||||
|
||||
f.init()!
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
pub fn (file File) file_name() string {
|
||||
return '${file.name}.${file.ext}'
|
||||
}
|
||||
|
||||
// parses file name, extension and relative path
|
||||
pub fn (mut file File) init() ! {
|
||||
if file.path.is_image() {
|
||||
file.ftype = .image
|
||||
}
|
||||
|
||||
file.name = file.path.name_fix_no_ext()
|
||||
file.ext = file.path.path.all_after_last('.').to_lower()
|
||||
|
||||
path_rel := file.path.path_relative(file.collection_path.path) or {
|
||||
return error('cannot get relative path.\n${err}')
|
||||
}
|
||||
|
||||
file.pathrel = path_rel.trim('/')
|
||||
}
|
||||
|
||||
fn (mut file File) delete() ! {
|
||||
file.path.delete()!
|
||||
}
|
||||
|
||||
// TODO: what if this is moved to another collection, or outside the scope of the tree?
|
||||
fn (mut file File) mv(dest string) ! {
|
||||
mut destination := pathlib.get_dir(path: dest)! // will fail if dir doesn't exist
|
||||
|
||||
os.mv(file.path.path, destination.path) or {
|
||||
return error('could not move ${file.path.path} to ${destination.path} .\n${err}\n${file}')
|
||||
}
|
||||
|
||||
// need to get relative path in, in relation to collection
|
||||
file.pathrel = destination.path_relative(file.collection_path.path)!
|
||||
file.path = destination
|
||||
}
|
||||
|
||||
fn (mut file File) exists() !bool {
|
||||
return file.path.exists()
|
||||
}
|
||||
|
||||
pub fn (file_ File) copy(dest string) ! {
|
||||
mut file := file_
|
||||
mut dest2 := pathlib.get(dest)
|
||||
file.path.copy(dest: dest2.path, rsync: false) or {
|
||||
return error('Could not copy file: ${file.path.path} to ${dest} .\n${err}\n${file}')
|
||||
}
|
||||
}
|
||||
164
lib/data/doctree/collection/data/page.v
Normal file
164
lib/data/doctree/collection/data/page.v
Normal file
@@ -0,0 +1,164 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements { Action, Doc, Element }
|
||||
import freeflowuniverse.herolib.data.markdownparser
|
||||
|
||||
pub enum PageStatus {
|
||||
unknown
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct Page {
|
||||
mut:
|
||||
doc &Doc @[str: skip]
|
||||
element_cache map[int]Element
|
||||
changed bool
|
||||
pub mut:
|
||||
name string // received a name fix
|
||||
alias string // a proper name for e.g. def
|
||||
path pathlib.Path
|
||||
collection_name string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
name string @[required]
|
||||
path pathlib.Path @[required]
|
||||
collection_name string @[required]
|
||||
}
|
||||
|
||||
pub fn new_page(args NewPageArgs) !Page {
|
||||
if args.collection_name == '' {
|
||||
return error('page collection name must not be empty')
|
||||
}
|
||||
|
||||
if args.name == '' {
|
||||
return error('page name must not be empty')
|
||||
}
|
||||
mut doc := markdownparser.new(path: args.path.path, collection_name: args.collection_name) or {
|
||||
return error('failed to parse doc for path ${args.path.path}\n${err}')
|
||||
}
|
||||
children := doc.children_recursive()
|
||||
mut element_cache := map[int]Element{}
|
||||
for child in children {
|
||||
element_cache[child.id] = child
|
||||
}
|
||||
mut new_page := Page{
|
||||
element_cache: element_cache
|
||||
name: args.name
|
||||
path: args.path
|
||||
collection_name: args.collection_name
|
||||
doc: &doc
|
||||
}
|
||||
return new_page
|
||||
}
|
||||
|
||||
// return doc, reparse if needed
|
||||
fn (mut page Page) doc() !&Doc {
|
||||
if page.changed {
|
||||
content := page.doc.markdown()!
|
||||
page.reparse_doc(content)!
|
||||
}
|
||||
|
||||
return page.doc
|
||||
}
|
||||
|
||||
// return doc, reparse if needed
|
||||
fn (page Page) doc_immute() !&Doc {
|
||||
if page.changed {
|
||||
content := page.doc.markdown()!
|
||||
doc := markdownparser.new(content: content, collection_name: page.collection_name)!
|
||||
return &doc
|
||||
}
|
||||
return page.doc
|
||||
}
|
||||
|
||||
// reparse doc markdown and assign new doc to page
|
||||
fn (mut page Page) reparse_doc(content string) ! {
|
||||
doc := markdownparser.new(content: content, collection_name: page.collection_name)!
|
||||
page.element_cache = map[int]Element{}
|
||||
for child in doc.children_recursive() {
|
||||
page.element_cache[child.id] = child
|
||||
}
|
||||
|
||||
page.doc = &doc
|
||||
page.changed = false
|
||||
}
|
||||
|
||||
pub fn (page Page) key() string {
|
||||
return '${page.collection_name}:${page.name}'
|
||||
}
|
||||
|
||||
pub fn (page Page) get_linked_pages() ![]string {
|
||||
doc := page.doc_immute()!
|
||||
return doc.linked_pages
|
||||
}
|
||||
|
||||
pub fn (page Page) get_markdown() !string {
|
||||
mut doc := page.doc_immute()!
|
||||
return doc.markdown()!
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_content(content string) ! {
|
||||
page.reparse_doc(content)!
|
||||
}
|
||||
|
||||
fn (mut page Page) get_element(element_id int) !Element {
|
||||
return page.element_cache[element_id] or {
|
||||
return error('no element found with id ${element_id}')
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this should not be allowed (giving access to modify page content to any caller)
|
||||
pub fn (mut page Page) get_all_actions() ![]&Action {
|
||||
mut actions := []&Action{}
|
||||
mut doc := page.doc()!
|
||||
for element in doc.children_recursive() {
|
||||
if element is Action {
|
||||
actions << element
|
||||
}
|
||||
}
|
||||
|
||||
return actions
|
||||
}
|
||||
|
||||
pub fn (page Page) get_include_actions() ![]Action {
|
||||
mut actions := []Action{}
|
||||
// TODO: check if below is necessary
|
||||
// mut doc := page.doc_immute()!
|
||||
for element in page.doc.children_recursive() {
|
||||
if element is Action {
|
||||
if element.action.actor == 'wiki' && element.action.name == 'include' {
|
||||
actions << *element
|
||||
}
|
||||
}
|
||||
}
|
||||
return actions
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_action_element_to_processed(element_id int) ! {
|
||||
mut element := page.element_cache[element_id] or {
|
||||
return error('page ${page.path} doc has no element with id ${element_id}')
|
||||
}
|
||||
|
||||
if mut element is Action {
|
||||
element.action_processed = true
|
||||
page.changed = true
|
||||
return
|
||||
}
|
||||
|
||||
return error('element with id ${element_id} is not an action')
|
||||
}
|
||||
|
||||
pub fn (mut page Page) set_element_content_no_reparse(element_id int, content string) ! {
|
||||
mut element := page.element_cache[element_id] or {
|
||||
return error('page ${page.path} doc has no element with id ${element_id}')
|
||||
}
|
||||
|
||||
element.content = content
|
||||
page.changed = true
|
||||
}
|
||||
49
lib/data/doctree/collection/data/process_aliases.v
Normal file
49
lib/data/doctree/collection/data/process_aliases.v
Normal file
@@ -0,0 +1,49 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements
|
||||
|
||||
// returns !!wiki.def actions
|
||||
pub fn (mut page Page) get_def_actions() ![]elements.Action {
|
||||
mut doc := page.doc()!
|
||||
mut def_actions := doc.actionpointers(actor: 'wiki', name: 'def')
|
||||
mut ret := []elements.Action{}
|
||||
for def in def_actions {
|
||||
ret << *def
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
// returns page aliases, and removes processed action's content
|
||||
pub fn (mut page Page) process_def_action(element_id int) ![]string {
|
||||
mut action_element := page.get_element(element_id)!
|
||||
|
||||
mut doc := page.doc()!
|
||||
if mut action_element is elements.Action {
|
||||
mut aliases := map[string]bool{}
|
||||
def_action := action_element.action
|
||||
page.alias = def_action.params.get_default('name', '')!
|
||||
if page.alias == '' {
|
||||
page.alias = doc.header_name()!
|
||||
}
|
||||
|
||||
action_element.action_processed = true
|
||||
action_element.content = ''
|
||||
page.changed = true
|
||||
for alias in def_action.params.get_list('alias')! {
|
||||
mut processed_alias := alias
|
||||
if processed_alias.to_lower().ends_with('.md') {
|
||||
// remove the .md at end
|
||||
processed_alias = processed_alias[0..page.collection_name.len - 3]
|
||||
}
|
||||
|
||||
processed_alias = texttools.name_fix(processed_alias).replace('_', '')
|
||||
aliases[processed_alias] = true
|
||||
}
|
||||
|
||||
return aliases.keys()
|
||||
}
|
||||
|
||||
return error('element with id ${element_id} is not an action')
|
||||
}
|
||||
40
lib/data/doctree/collection/data/process_aliases_test.v
Normal file
40
lib/data/doctree/collection/data/process_aliases_test.v
Normal file
@@ -0,0 +1,40 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
fn test_get_def_actions() {
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := "!!wiki.def alias:'tf-dev,cloud-dev,threefold-dev' name:'about us'"
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
def_actions := page1.get_def_actions()!
|
||||
|
||||
assert def_actions.len == 1
|
||||
|
||||
action := def_actions[0].action
|
||||
assert action.params.get('name')! == 'about us'
|
||||
mut aliases := action.params.get_list('alias')!
|
||||
aliases.sort()
|
||||
assert ['cloud-dev', 'tf-dev', 'threefold-dev'] == aliases
|
||||
}
|
||||
|
||||
fn test_process_def_action() {
|
||||
// create page with def action
|
||||
// get actions
|
||||
// process def action
|
||||
// processed page should have action removed and alias set
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := "!!wiki.def alias:'tf-dev,cloud-dev,threefold-dev' name:'about us'"
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
def_actions := page1.get_def_actions()!
|
||||
|
||||
assert def_actions.len == 1
|
||||
|
||||
mut aliases := page1.process_def_action(def_actions[0].id)!
|
||||
assert page1.get_markdown()! == ''
|
||||
assert page1.alias == 'about us'
|
||||
|
||||
aliases.sort()
|
||||
assert ['clouddev', 'tfdev', 'threefolddev'] == aliases
|
||||
}
|
||||
34
lib/data/doctree/collection/data/process_def_pointers.v
Normal file
34
lib/data/doctree/collection/data/process_def_pointers.v
Normal file
@@ -0,0 +1,34 @@
|
||||
module data
|
||||
|
||||
// returns all page def elements (similar to *DEF)
|
||||
pub fn (mut page Page) get_def_names() ![]string {
|
||||
mut defnames := map[string]bool{}
|
||||
mut doc := page.doc()!
|
||||
for defitem in doc.defpointers() {
|
||||
defname := defitem.nameshort
|
||||
defnames[defname] = true
|
||||
}
|
||||
|
||||
return defnames.keys()
|
||||
}
|
||||
|
||||
// removes the def content, and generates a link to the page
|
||||
pub fn (mut page Page) set_def_links(def_data map[string][]string) ! {
|
||||
mut doc := page.doc()!
|
||||
for mut defitem in doc.defpointers() {
|
||||
defname := defitem.nameshort
|
||||
|
||||
v := def_data[defname] or { continue }
|
||||
if v.len != 2 {
|
||||
return error('invalid def data length: expected 2, found ${v.len}')
|
||||
}
|
||||
|
||||
defitem.pagekey = v[0]
|
||||
defitem.pagename = v[1]
|
||||
|
||||
defitem.process_link()!
|
||||
}
|
||||
|
||||
doc.process()!
|
||||
page.changed = true
|
||||
}
|
||||
23
lib/data/doctree/collection/data/process_def_pointers_test.v
Normal file
23
lib/data/doctree/collection/data/process_def_pointers_test.v
Normal file
@@ -0,0 +1,23 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import rand
|
||||
|
||||
fn test_process_def_pointers() {
|
||||
// create a page with def pointers to two different pages
|
||||
// set def links on page.
|
||||
// processed page should have links to the other two pages
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
alias1, alias2 := rand.string(5).to_upper(), rand.string(5).to_upper()
|
||||
page1_content := '*${alias1}\n*${alias2}'
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
|
||||
mut defs := map[string][]string{}
|
||||
defs['${alias1.to_lower()}'] = ['col2:page2', 'page2 alias']
|
||||
defs['${alias2.to_lower()}'] = ['col3:page3', 'my page3 alias']
|
||||
|
||||
page1.set_def_links(defs)!
|
||||
|
||||
assert page1.get_markdown()! == '[page2 alias](col2:page2.md)\n[my page3 alias](col3:page3.md)'
|
||||
}
|
||||
59
lib/data/doctree/collection/data/process_link.v
Normal file
59
lib/data/doctree/collection/data/process_link.v
Normal file
@@ -0,0 +1,59 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
|
||||
// Note: doc should not get reparsed after invoking this method
|
||||
pub fn (page Page) process_links(paths map[string]string) ![]string {
|
||||
mut not_found := map[string]bool{}
|
||||
mut doc := page.doc_immute()!
|
||||
for mut element in doc.children_recursive() {
|
||||
if mut element is elements.Link {
|
||||
if element.cat == .html || (element.cat == .anchor && element.url == '') {
|
||||
// is external link or same page anchor, nothing to process
|
||||
// maybe in the future check if exists
|
||||
continue
|
||||
}
|
||||
mut name := texttools.name_fix_keepext(element.filename)
|
||||
mut site := texttools.name_fix(element.site)
|
||||
if site == '' {
|
||||
site = page.collection_name
|
||||
}
|
||||
pointerstr := '${site}:${name}'
|
||||
|
||||
ptr := pointer.pointer_new(text: pointerstr, collection: page.collection_name)!
|
||||
mut path := paths[ptr.str()] or {
|
||||
not_found[ptr.str()] = true
|
||||
continue
|
||||
}
|
||||
|
||||
if ptr.cat == .page && ptr.str() !in doc.linked_pages {
|
||||
doc.linked_pages << ptr.str()
|
||||
}
|
||||
|
||||
if ptr.collection == page.collection_name {
|
||||
// same directory
|
||||
path = './' + path.all_after_first('/')
|
||||
} else {
|
||||
path = '../${path}'
|
||||
}
|
||||
|
||||
if ptr.cat == .image && element.extra.trim_space() != '' {
|
||||
path += ' ${element.extra.trim_space()}'
|
||||
}
|
||||
|
||||
mut out := '[${element.description}](${path})'
|
||||
if ptr.cat == .image {
|
||||
out = '!${out}'
|
||||
}
|
||||
|
||||
element.content = out
|
||||
element.processed = false
|
||||
element.state = .linkprocessed
|
||||
element.process()!
|
||||
}
|
||||
}
|
||||
|
||||
return not_found.keys()
|
||||
}
|
||||
20
lib/data/doctree/collection/data/process_link_test.v
Normal file
20
lib/data/doctree/collection/data/process_link_test.v
Normal file
@@ -0,0 +1,20 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
fn test_process_link() {
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/page1', create: true)!
|
||||
page1_content := '[some page description](col1:page1.md)\n'
|
||||
page1_path.write(page1_content)!
|
||||
mut page1 := new_page(name: 'page1', path: page1_path, collection_name: 'col1')!
|
||||
|
||||
paths := {
|
||||
'col1:page1.md': 'col1/page1.md'
|
||||
'col2:img.png': 'col2/img/img.png'
|
||||
}
|
||||
|
||||
notfound := page1.process_links(paths)!
|
||||
assert notfound.len == 0
|
||||
|
||||
assert page1.get_markdown()! == '[some page description](./page1.md)\n'
|
||||
}
|
||||
24
lib/data/doctree/collection/data/process_macros.v
Normal file
24
lib/data/doctree/collection/data/process_macros.v
Normal file
@@ -0,0 +1,24 @@
|
||||
module data
|
||||
|
||||
import freeflowuniverse.herolib.core.playmacros
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements { Action }
|
||||
|
||||
pub fn (mut page Page) process_macros() ! {
|
||||
mut mydoc := page.doc()!
|
||||
for mut element in mydoc.children_recursive() {
|
||||
if mut element is Action {
|
||||
if element.action.actiontype == .macro {
|
||||
content := playmacros.play_macro(element.action)!
|
||||
page.changed = true
|
||||
if content.len > 0 {
|
||||
element.content = content
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if page.changed {
|
||||
page.reparse_doc(page.doc.markdown()!)!
|
||||
page.process_macros()!
|
||||
}
|
||||
}
|
||||
64
lib/data/doctree/collection/error.v
Normal file
64
lib/data/doctree/collection/error.v
Normal file
@@ -0,0 +1,64 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib { Path }
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub enum CollectionErrorCat {
|
||||
unknown
|
||||
image_double
|
||||
file_double
|
||||
file_not_found
|
||||
image_not_found
|
||||
page_double
|
||||
page_not_found
|
||||
sidebar
|
||||
circular_import
|
||||
def
|
||||
summary
|
||||
include
|
||||
}
|
||||
|
||||
pub struct CollectionError {
|
||||
Error
|
||||
pub mut:
|
||||
path Path
|
||||
msg string
|
||||
cat CollectionErrorCat
|
||||
}
|
||||
|
||||
pub fn (e CollectionError) msg() string {
|
||||
return 'collection error:\n\tPath: ${e.path.path}\n\tError message: ${e.msg}\n\tCategory: ${e.cat}'
|
||||
}
|
||||
|
||||
pub fn (mut collection Collection) error(args CollectionError) ! {
|
||||
if collection.fail_on_error {
|
||||
return args
|
||||
}
|
||||
|
||||
collection.errors << args
|
||||
console.print_stderr(args.msg)
|
||||
}
|
||||
|
||||
pub struct ObjNotFound {
|
||||
Error
|
||||
pub:
|
||||
name string
|
||||
collection string
|
||||
info string
|
||||
}
|
||||
|
||||
pub fn (err ObjNotFound) msg() string {
|
||||
return 'Could not find object with name ${err.name} in collection ${err.collection}: ${err.info}'
|
||||
}
|
||||
|
||||
// write errors.md in the collection, this allows us to see what the errors are
|
||||
pub fn (collection Collection) errors_report(dest_ string, errors []CollectionError) ! {
|
||||
// console.print_debug("====== errors report: ${dest_} : ${collection.errors.len}\n${collection.errors}")
|
||||
mut dest := pathlib.get_file(path: dest_, create: true)!
|
||||
if errors.len == 0 {
|
||||
dest.delete()!
|
||||
return
|
||||
}
|
||||
c := $tmpl('template/errors.md')
|
||||
dest.write(c)!
|
||||
}
|
||||
129
lib/data/doctree/collection/export.v
Normal file
129
lib/data/doctree/collection/export.v
Normal file
@@ -0,0 +1,129 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.core.texttools.regext
|
||||
import os
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
file_paths map[string]string
|
||||
reset bool = true
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
exclude_errors bool // wether error reporting should be exported as well
|
||||
replacer ?regext.ReplaceInstructions
|
||||
}
|
||||
|
||||
pub fn (c Collection) export(args CollectionExportArgs) ! {
|
||||
dir_src := pathlib.get_dir(path: args.destination.path + '/' + c.name, create: true)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: dir_src.path + '/.collection', create: true)! // will auto save it
|
||||
cfile.write("name:${c.name} src:'${c.path.path}'")!
|
||||
|
||||
mut errors := c.errors.clone()
|
||||
errors << export_pages(c.path.path, c.pages.values(),
|
||||
dir_src: dir_src
|
||||
file_paths: args.file_paths
|
||||
keep_structure: args.keep_structure
|
||||
replacer: args.replacer
|
||||
)!
|
||||
|
||||
c.export_files(dir_src, args.reset)!
|
||||
c.export_images(dir_src, args.reset)!
|
||||
c.export_linked_pages(dir_src)!
|
||||
|
||||
if !args.exclude_errors {
|
||||
c.errors_report('${dir_src.path}/errors.md', errors)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ExportPagesArgs {
|
||||
pub mut:
|
||||
dir_src pathlib.Path
|
||||
file_paths map[string]string
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
replacer ?regext.ReplaceInstructions
|
||||
}
|
||||
|
||||
// creates page file, processes page links, then writes page
|
||||
fn export_pages(col_path string, pages []&data.Page, args ExportPagesArgs) ![]CollectionError {
|
||||
mut errors := []CollectionError{}
|
||||
for page in pages {
|
||||
dest := if args.keep_structure {
|
||||
relpath := page.path.path.trim_string_left(col_path)
|
||||
'${args.dir_src.path}/${relpath}'
|
||||
} else {
|
||||
'${args.dir_src.path}/${page.name}.md'
|
||||
}
|
||||
|
||||
not_found := page.process_links(args.file_paths)!
|
||||
|
||||
for pointer_str in not_found {
|
||||
ptr := pointer.pointer_new(text: pointer_str)!
|
||||
cat := match ptr.cat {
|
||||
.page {
|
||||
CollectionErrorCat.page_not_found
|
||||
}
|
||||
.image {
|
||||
CollectionErrorCat.image_not_found
|
||||
}
|
||||
else {
|
||||
CollectionErrorCat.file_not_found
|
||||
}
|
||||
}
|
||||
errors << CollectionError{
|
||||
path: page.path
|
||||
msg: '${ptr.cat} ${ptr.str()} not found'
|
||||
cat: cat
|
||||
}
|
||||
}
|
||||
|
||||
mut dest_path := pathlib.get_file(path: dest, create: true)!
|
||||
mut markdown := page.get_markdown()!
|
||||
if mut replacer := args.replacer {
|
||||
markdown = replacer.replace(text: markdown)!
|
||||
}
|
||||
|
||||
dest_path.write(markdown)!
|
||||
}
|
||||
return errors
|
||||
}
|
||||
|
||||
fn (c Collection) export_files(dir_src pathlib.Path, reset bool) ! {
|
||||
for _, file in c.files {
|
||||
mut d := '${dir_src.path}/img/${file.name}.${file.ext}'
|
||||
if reset || !os.exists(d) {
|
||||
file.copy(d)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (c Collection) export_images(dir_src pathlib.Path, reset bool) ! {
|
||||
for _, file in c.images {
|
||||
mut d := '${dir_src.path}/img/${file.name}.${file.ext}'
|
||||
if reset || !os.exists(d) {
|
||||
file.copy(d)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (c Collection) export_linked_pages(dir_src pathlib.Path) ! {
|
||||
collection_linked_pages := c.get_collection_linked_pages()!
|
||||
mut linked_pages_file := pathlib.get_file(path: dir_src.path + '/.linkedpages', create: true)!
|
||||
linked_pages_file.write(collection_linked_pages.join_lines())!
|
||||
}
|
||||
|
||||
fn (c Collection) get_collection_linked_pages() ![]string {
|
||||
mut linked_pages_set := map[string]bool{}
|
||||
for _, page in c.pages {
|
||||
for linked_page in page.get_linked_pages()! {
|
||||
linked_pages_set[linked_page] = true
|
||||
}
|
||||
}
|
||||
|
||||
return linked_pages_set.keys()
|
||||
}
|
||||
47
lib/data/doctree/collection/export_test.v
Normal file
47
lib/data/doctree/collection/export_test.v
Normal file
@@ -0,0 +1,47 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/export_test'
|
||||
const tree_dir = '${test_dir}/mytree'
|
||||
const export_dir = '${test_dir}/export'
|
||||
const export_expected_dir = '${test_dir}/export_expected'
|
||||
|
||||
fn testsuite_begin() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('${tree_dir}/dir1')
|
||||
}
|
||||
col.scan()!
|
||||
|
||||
path_dest := pathlib.get_dir(path: '${export_dir}/src', create: true)!
|
||||
col.export(
|
||||
destination: path_dest
|
||||
file_paths: {
|
||||
'col2:file3.md': 'col2/file3.md'
|
||||
}
|
||||
)!
|
||||
|
||||
col1_path := '${export_dir}/src/col1'
|
||||
expected_col1_path := '${export_expected_dir}/src/col1'
|
||||
assert os.read_file('${col1_path}/.collection')! == os.read_file('${expected_col1_path}/.collection')!
|
||||
assert os.read_file('${col1_path}/.linkedpages')! == os.read_file('${expected_col1_path}/.linkedpages')!
|
||||
assert os.read_file('${col1_path}/errors.md')! == os.read_file('${expected_col1_path}/errors.md')!
|
||||
assert os.read_file('${col1_path}/file1.md')! == os.read_file('${expected_col1_path}/file1.md')!
|
||||
assert os.read_file('${col1_path}/file2.md')! == os.read_file('${expected_col1_path}/file2.md')!
|
||||
}
|
||||
45
lib/data/doctree/collection/getters.v
Normal file
45
lib/data/doctree/collection/getters.v
Normal file
@@ -0,0 +1,45 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
|
||||
// gets page with specified name from collection
|
||||
pub fn (collection Collection) page_get(name string) !&data.Page {
|
||||
return collection.pages[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) page_exists(name string) bool {
|
||||
return name in collection.pages
|
||||
}
|
||||
|
||||
// gets image with specified name from collection
|
||||
pub fn (collection Collection) get_image(name string) !&data.File {
|
||||
return collection.images[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) image_exists(name string) bool {
|
||||
return name in collection.images
|
||||
}
|
||||
|
||||
// gets file with specified name form collection
|
||||
pub fn (collection Collection) get_file(name string) !&data.File {
|
||||
return collection.files[name] or {
|
||||
return ObjNotFound{
|
||||
collection: collection.name
|
||||
name: name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (collection Collection) file_exists(name string) bool {
|
||||
return name in collection.files
|
||||
}
|
||||
250
lib/data/doctree/collection/scan.v
Normal file
250
lib/data/doctree/collection/scan.v
Normal file
@@ -0,0 +1,250 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.conversiontools.imagemagick
|
||||
import freeflowuniverse.herolib.core.pathlib { Path }
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
|
||||
// walk over one specific collection, find all files and pages
|
||||
pub fn (mut collection Collection) scan() ! {
|
||||
collection.scan_directory(mut collection.path)!
|
||||
}
|
||||
|
||||
// path is the full path
|
||||
fn (mut collection Collection) scan_directory(mut p Path) ! {
|
||||
mut entry_list := p.list(recursive: false)!
|
||||
for mut entry in entry_list.paths {
|
||||
if collection.should_skip_entry(mut entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !entry.exists() {
|
||||
collection.error(
|
||||
path: entry
|
||||
msg: 'Entry ${entry.name()} does not exists'
|
||||
cat: .unknown
|
||||
)!
|
||||
continue
|
||||
}
|
||||
|
||||
if mut entry.is_link() {
|
||||
link_real_path := entry.realpath() // this is with the symlink resolved
|
||||
collection_abs_path := collection.path.absolute()
|
||||
if entry.extension_lower() == 'md' {
|
||||
// means we are linking pages,this should not be done, need or change
|
||||
collection.error(
|
||||
path: entry
|
||||
msg: 'Markdown files (${entry.path}) must not be linked'
|
||||
cat: .unknown
|
||||
) or { return error('Failed to collection error ${entry.path}:\n${err}') }
|
||||
continue
|
||||
}
|
||||
|
||||
if !link_real_path.starts_with(collection_abs_path) {
|
||||
// means we are not in the collection so we need to copy
|
||||
entry.unlink()! // will transform link to become the file or dir it points too
|
||||
} else {
|
||||
// TODO: why do we need this?
|
||||
entry.relink()! // will check that the link is on the file with the shortest path
|
||||
}
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
collection.scan_directory(mut entry) or {
|
||||
return error('Failed to scan directory ${entry.path}:\n${err}')
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.extension_lower() == '' {
|
||||
continue
|
||||
}
|
||||
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
collection.add_page(mut entry) or {
|
||||
return error('Failed to add page ${entry.path}:\n${err}')
|
||||
}
|
||||
}
|
||||
else {
|
||||
collection.file_image_remember(mut entry) or {
|
||||
return error('Failed to remember image ${entry.path}:\n${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut c Collection) should_skip_entry(mut entry Path) bool {
|
||||
entry_name := entry.name()
|
||||
|
||||
// entries that start with . or _ are ignored
|
||||
if entry_name.starts_with('.') || entry_name.starts_with('_') {
|
||||
return true
|
||||
}
|
||||
|
||||
// TODO: why do we skip all these???
|
||||
|
||||
if entry.cat == .linkfile {
|
||||
// means we link to a file which is in the folder, so can be loaded later, nothing to do here
|
||||
return true
|
||||
}
|
||||
|
||||
if entry.is_dir() && entry_name.starts_with('gallery_') {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry_name.to_lower() == 'defs.md' {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry_name.contains('.test') {
|
||||
return true
|
||||
}
|
||||
|
||||
if entry.path.starts_with('sidebar') {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// remember the file, so we know if we have duplicates
|
||||
// also fixes the name
|
||||
fn (mut collection Collection) file_image_remember(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
if ptr.is_file_video_html() {
|
||||
collection.add_file(mut p)!
|
||||
return
|
||||
}
|
||||
|
||||
if ptr.is_image() {
|
||||
if collection.heal && imagemagick.installed() {
|
||||
mut image := imagemagick.image_new(mut p)
|
||||
|
||||
imagemagick.downsize(path: p.path)!
|
||||
// after downsize it could be the path has been changed, need to set it on the file
|
||||
if p.path != image.path.path {
|
||||
p.path = image.path.path
|
||||
p.check()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: what are we trying to do?
|
||||
if !collection.image_exists(ptr.name) {
|
||||
collection.add_image(mut p)!
|
||||
}
|
||||
|
||||
mut image_file := collection.get_image(ptr.name)!
|
||||
mut image_file_path := image_file.path.path
|
||||
if p.path.len <= image_file_path.len {
|
||||
// nothing to be done, because the already existing file is shortest or equal
|
||||
return
|
||||
}
|
||||
// file double is the one who already existed, need to change the path and can delete original
|
||||
// TODO: this is clearly a bug
|
||||
image_file.path = image_file.path
|
||||
image_file.init()!
|
||||
if collection.heal {
|
||||
p.delete()!
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
return error('unsupported file type: ${ptr.extension}')
|
||||
}
|
||||
|
||||
// add a page to the collection, specify existing path
|
||||
// the page will be parsed as markdown
|
||||
pub fn (mut collection Collection) add_page(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize() or { return error('Failed to normalize path ${p.path}\n${err}') }
|
||||
}
|
||||
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
) or { return error('Failed to get pointer for ${p.name()}\n${err}') }
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.page_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a page named ${ptr.name} already exists in the collection'
|
||||
cat: .page_double
|
||||
) or { return error('Failed to report collection error for ${p.name()}\n${err}') }
|
||||
return
|
||||
}
|
||||
|
||||
new_page := data.new_page(
|
||||
name: ptr.name
|
||||
path: p
|
||||
collection_name: collection.name
|
||||
) or { return error('Failed to create new page for ${ptr.name}\n${err}') }
|
||||
|
||||
collection.pages[ptr.name] = &new_page
|
||||
}
|
||||
|
||||
// add a file to the collection, specify existing path
|
||||
pub fn (mut collection Collection) add_file(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.file_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a file named ${ptr.name} already exists in the collection'
|
||||
cat: .file_double
|
||||
)!
|
||||
return
|
||||
}
|
||||
|
||||
mut new_file := data.new_file(
|
||||
path: p
|
||||
collection_path: collection.path
|
||||
collection_name: collection.name
|
||||
)!
|
||||
collection.files[ptr.name] = &new_file
|
||||
}
|
||||
|
||||
// add a image to the collection, specify existing path
|
||||
pub fn (mut collection Collection) add_image(mut p Path) ! {
|
||||
if collection.heal {
|
||||
p.path_normalize()!
|
||||
}
|
||||
mut ptr := pointer.pointer_new(
|
||||
collection: collection.name
|
||||
text: p.name()
|
||||
)!
|
||||
|
||||
// in case heal is true pointer_new can normalize the path
|
||||
if collection.image_exists(ptr.name) {
|
||||
collection.error(
|
||||
path: p
|
||||
msg: 'Can\'t add ${p.path}: a file named ${ptr.name} already exists in the collection'
|
||||
cat: .image_double
|
||||
)!
|
||||
return
|
||||
}
|
||||
|
||||
mut image_file := &data.File{
|
||||
path: p
|
||||
collection_path: collection.path
|
||||
}
|
||||
image_file.init()!
|
||||
collection.images[ptr.name] = image_file
|
||||
}
|
||||
121
lib/data/doctree/collection/scan_test.v
Normal file
121
lib/data/doctree/collection/scan_test.v
Normal file
@@ -0,0 +1,121 @@
|
||||
module collection
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
fn test_add_page_success() {
|
||||
/*
|
||||
create collection
|
||||
add page
|
||||
check page in collection
|
||||
*/
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/page1.md', create: true)!
|
||||
col.add_page(mut page1_path)!
|
||||
assert col.page_exists('page1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/page:hamada.md', create: true)!
|
||||
col.add_page(mut page2_path)!
|
||||
assert col.page_exists('page_hamada')
|
||||
}
|
||||
|
||||
fn test_add_page_already_exists() {
|
||||
/*
|
||||
create collection
|
||||
add page with path /tmp/col1/page1.md
|
||||
add page with path /tmp/col1/dir/page1.md
|
||||
second add should fail and error reported to collection errors
|
||||
*/
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/page1.md', create: true)!
|
||||
col.add_page(mut page1_path)!
|
||||
assert col.page_exists('page1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/dir1/page1.md', create: true)!
|
||||
col.add_page(mut page2_path)!
|
||||
|
||||
assert col.errors.len == 1
|
||||
assert col.errors[0].msg == "Can't add /tmp/col1/dir1/page1.md: a page named page1 already exists in the collection"
|
||||
}
|
||||
|
||||
fn test_add_image_success() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/image.png', create: true)!
|
||||
col.add_image(mut page1_path)!
|
||||
assert col.image_exists('image')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/image:2.jpg', create: true)!
|
||||
col.add_image(mut page2_path)!
|
||||
assert col.image_exists('image_2')
|
||||
}
|
||||
|
||||
fn test_add_file_success() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut page1_path := pathlib.get_file(path: '/tmp/col1/file1.html', create: true)!
|
||||
col.add_file(mut page1_path)!
|
||||
assert col.file_exists('file1')
|
||||
|
||||
mut page2_path := pathlib.get_file(path: '/tmp/col1/file:2.mp4', create: true)!
|
||||
col.add_file(mut page2_path)!
|
||||
assert col.file_exists('file_2')
|
||||
}
|
||||
|
||||
fn test_file_image_remember() {
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/col1')
|
||||
}
|
||||
|
||||
mut file1_path := pathlib.get_file(path: '/tmp/col1/image.png', create: true)!
|
||||
col.file_image_remember(mut file1_path)!
|
||||
assert col.image_exists('image')
|
||||
|
||||
mut file2_path := pathlib.get_file(path: '/tmp/col1/file.html', create: true)!
|
||||
col.file_image_remember(mut file2_path)!
|
||||
assert col.file_exists('file')
|
||||
|
||||
mut file3_path := pathlib.get_file(path: '/tmp/col1/file2.unknownext', create: true)!
|
||||
col.file_image_remember(mut file3_path)!
|
||||
assert col.file_exists('file2')
|
||||
}
|
||||
|
||||
fn test_scan_directory() {
|
||||
mut file := pathlib.get_file(path: '/tmp/mytree/dir1/.collection', create: true)!
|
||||
file.write('name:col1')!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file1.md', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file2.html', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/file3.png', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/dir2/file4.md', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/.shouldbeskipped', create: true)!
|
||||
file = pathlib.get_file(path: '/tmp/mytree/dir1/_shouldbeskipped', create: true)!
|
||||
|
||||
mut col := Collection{
|
||||
name: 'col1'
|
||||
path: pathlib.get('/tmp/mytree/dir1')
|
||||
}
|
||||
|
||||
col.scan()!
|
||||
assert col.page_exists('file1')
|
||||
assert col.file_exists('file2')
|
||||
assert col.image_exists('file3')
|
||||
assert col.page_exists('file4')
|
||||
assert !col.file_exists('.shouldbeskipped')
|
||||
assert !col.file_exists('_shouldbeskipped')
|
||||
}
|
||||
11
lib/data/doctree/collection/template/errors.md
Normal file
11
lib/data/doctree/collection/template/errors.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Errors
|
||||
|
||||
@for error in collection.errors
|
||||
|
||||
## @error.cat
|
||||
|
||||
path: @error.path.path
|
||||
|
||||
msg: @error.msg
|
||||
|
||||
@end
|
||||
1
lib/data/doctree/collection/testdata/.gitignore
vendored
Normal file
1
lib/data/doctree/collection/testdata/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export_test/export
|
||||
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/.collection
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/.collection
vendored
Normal file
@@ -0,0 +1 @@
|
||||
name:col1 src:'/Users/timurgordon/code/github/freeflowuniverse/crystallib/crystallib/data/doctree/collection/testdata/export_test/mytree/dir1'
|
||||
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/.linkedpages
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/.linkedpages
vendored
Normal file
@@ -0,0 +1 @@
|
||||
col2:file3.md
|
||||
9
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/errors.md
vendored
Normal file
9
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/errors.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: /Users/timurgordon/code/github/freeflowuniverse/crystallib/crystallib/data/doctree/collection/testdata/export_test/mytree/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/file1.md
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/file1.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[not existent page](col3:file5.md)
|
||||
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/file2.md
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/file2.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[some page](../col2/file3.md)
|
||||
0
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/img/image.png
vendored
Normal file
0
lib/data/doctree/collection/testdata/export_test/export_expected/src/col1/img/image.png
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/.collection
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/.collection
vendored
Normal file
@@ -0,0 +1 @@
|
||||
name:col1
|
||||
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/dir2/file1.md
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/dir2/file1.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[not existent page](col3:file5.md)
|
||||
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/file2.md
vendored
Normal file
1
lib/data/doctree/collection/testdata/export_test/mytree/dir1/file2.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[some page](col2:file3.md)
|
||||
0
lib/data/doctree/collection/testdata/export_test/mytree/dir1/image.png
vendored
Normal file
0
lib/data/doctree/collection/testdata/export_test/mytree/dir1/image.png
vendored
Normal file
45
lib/data/doctree/error.v
Normal file
45
lib/data/doctree/error.v
Normal file
@@ -0,0 +1,45 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
|
||||
pub struct ObjNotFound {
|
||||
Error
|
||||
pub:
|
||||
name string
|
||||
collection string
|
||||
info string
|
||||
}
|
||||
|
||||
pub fn (err ObjNotFound) msg() string {
|
||||
return '"Could not find object with name ${err.name} in collection:${err.collection}.\n${err.info}'
|
||||
}
|
||||
|
||||
pub struct CollectionNotFound {
|
||||
Error
|
||||
pub:
|
||||
pointer pointer.Pointer
|
||||
msg string
|
||||
}
|
||||
|
||||
pub fn (err CollectionNotFound) msg() string {
|
||||
if err.msg.len > 0 {
|
||||
return err.msg
|
||||
}
|
||||
return '"Cannot find collection ${err.pointer} in tree.\n}'
|
||||
}
|
||||
|
||||
// the next is our custom error for objects not found
|
||||
pub struct NoOrTooManyObjFound {
|
||||
Error
|
||||
pub:
|
||||
tree &Tree
|
||||
pointer pointer.Pointer
|
||||
nr int
|
||||
}
|
||||
|
||||
pub fn (err NoOrTooManyObjFound) msg() string {
|
||||
if err.nr > 0 {
|
||||
return 'Too many obj found for ${err.tree.name}. Pointer: ${err.pointer}'
|
||||
}
|
||||
return 'No obj found for ${err.tree.name}. Pointer: ${err.pointer}'
|
||||
}
|
||||
92
lib/data/doctree/export.v
Normal file
92
lib/data/doctree/export.v
Normal file
@@ -0,0 +1,92 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.data.doctree.collection { Collection }
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.core.texttools.regext
|
||||
|
||||
@[params]
|
||||
pub struct TreeExportArgs {
|
||||
pub mut:
|
||||
destination string @[required]
|
||||
reset bool = true
|
||||
keep_structure bool // wether the structure of the src collection will be preserved or not
|
||||
exclude_errors bool // wether error reporting should be exported as well
|
||||
toreplace string
|
||||
concurrent bool = true
|
||||
}
|
||||
|
||||
// export all collections to chosen directory .
|
||||
// all names will be in name_fixed mode .
|
||||
// all images in img/
|
||||
pub fn (mut tree Tree) export(args TreeExportArgs) ! {
|
||||
console.print_header('export tree: name:${tree.name} to ${args.destination}')
|
||||
if args.toreplace.len > 0 {
|
||||
mut ri := regext.regex_instructions_new()
|
||||
ri.add_from_text(args.toreplace)!
|
||||
tree.replacer = ri
|
||||
}
|
||||
|
||||
mut dest_path := pathlib.get_dir(path: args.destination, create: true)!
|
||||
if args.reset {
|
||||
dest_path.empty()!
|
||||
}
|
||||
|
||||
tree.process_defs()!
|
||||
tree.process_includes()!
|
||||
tree.process_actions_and_macros()! // process other actions and macros
|
||||
|
||||
file_paths := tree.generate_paths()!
|
||||
|
||||
console.print_green('exporting collections')
|
||||
|
||||
if args.concurrent {
|
||||
mut ths := []thread !{}
|
||||
for _, col in tree.collections {
|
||||
ths << spawn fn (col Collection, dest_path pathlib.Path, file_paths map[string]string, args TreeExportArgs) ! {
|
||||
col.export(
|
||||
destination: dest_path
|
||||
file_paths: file_paths
|
||||
reset: args.reset
|
||||
keep_structure: args.keep_structure
|
||||
exclude_errors: args.exclude_errors
|
||||
// TODO: replacer: tree.replacer
|
||||
)!
|
||||
}(col, dest_path, file_paths, args)
|
||||
}
|
||||
for th in ths {
|
||||
th.wait() or { panic(err) }
|
||||
}
|
||||
} else {
|
||||
for _, mut col in tree.collections {
|
||||
col.export(
|
||||
destination: dest_path
|
||||
file_paths: file_paths
|
||||
reset: args.reset
|
||||
keep_structure: args.keep_structure
|
||||
exclude_errors: args.exclude_errors
|
||||
replacer: tree.replacer
|
||||
)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut t Tree) generate_paths() !map[string]string {
|
||||
mut paths := map[string]string{}
|
||||
for _, col in t.collections {
|
||||
for _, page in col.pages {
|
||||
paths['${col.name}:${page.name}.md'] = '${col.name}/${page.name}.md'
|
||||
}
|
||||
|
||||
for _, image in col.images {
|
||||
paths['${col.name}:${image.file_name()}'] = '${col.name}/img/${image.file_name()}'
|
||||
}
|
||||
|
||||
for _, file in col.files {
|
||||
paths['${col.name}:${file.file_name()}'] = '${col.name}/img/${file.file_name()}'
|
||||
}
|
||||
}
|
||||
|
||||
return paths
|
||||
}
|
||||
82
lib/data/doctree/export_test.v
Normal file
82
lib/data/doctree/export_test.v
Normal file
@@ -0,0 +1,82 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/export_test'
|
||||
const tree_dir = '${test_dir}/mytree'
|
||||
const export_dir = '${test_dir}/export'
|
||||
const export_expected_dir = '${test_dir}/export_expected'
|
||||
|
||||
fn testsuite_begin() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
pathlib.get_dir(
|
||||
path: export_dir
|
||||
empty: true
|
||||
)!
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
/*
|
||||
tree_root/
|
||||
dir1/
|
||||
.collection
|
||||
dir2/
|
||||
file1.md
|
||||
file2.md
|
||||
image.png
|
||||
dir3/
|
||||
.collection
|
||||
file3.md
|
||||
|
||||
export:
|
||||
export_dest/
|
||||
src/
|
||||
col1/
|
||||
.collection
|
||||
.linkedpages
|
||||
errors.md
|
||||
img/
|
||||
image.png
|
||||
file1.md
|
||||
file2.md
|
||||
col2/
|
||||
.collection
|
||||
.linkedpages
|
||||
file3.md
|
||||
|
||||
.edit/
|
||||
|
||||
test:
|
||||
- create tree
|
||||
- add files/pages and collections to tree
|
||||
- export tree
|
||||
- ensure tree structure is valid
|
||||
*/
|
||||
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${tree_dir}/dir1', name: 'col1')!
|
||||
tree.add_collection(path: '${tree_dir}/dir3', name: 'col2')!
|
||||
|
||||
tree.export(destination: '${export_dir}')!
|
||||
|
||||
col1_path := '${export_dir}/col1'
|
||||
expected_col1_path := '${export_expected_dir}/col1'
|
||||
assert os.read_file('${col1_path}/.collection')! == os.read_file('${expected_col1_path}/.collection')!
|
||||
assert os.read_file('${col1_path}/.linkedpages')! == os.read_file('${expected_col1_path}/.linkedpages')!
|
||||
assert os.read_file('${col1_path}/errors.md')! == os.read_file('${expected_col1_path}/errors.md')!
|
||||
assert os.read_file('${col1_path}/file1.md')! == os.read_file('${expected_col1_path}/file1.md')!
|
||||
assert os.read_file('${col1_path}/file2.md')! == os.read_file('${expected_col1_path}/file2.md')!
|
||||
|
||||
col2_path := '${export_dir}/col2'
|
||||
expected_col2_path := '${export_expected_dir}/col2'
|
||||
assert os.read_file('${col2_path}/.linkedpages')! == ''
|
||||
assert os.read_file('${col2_path}/.collection')! == os.read_file('${expected_col2_path}/.collection')!
|
||||
assert os.read_file('${col2_path}/file3.md')! == ''
|
||||
}
|
||||
72
lib/data/doctree/getters.v
Normal file
72
lib/data/doctree/getters.v
Normal file
@@ -0,0 +1,72 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.data.doctree.collection
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
|
||||
pub fn (tree Tree) get_collection(name string) !&collection.Collection {
|
||||
col := tree.collections[name] or { return error('collection ${name} not found') }
|
||||
|
||||
return col
|
||||
}
|
||||
|
||||
pub fn (tree Tree) get_collection_with_pointer(p pointer.Pointer) !&collection.Collection {
|
||||
return tree.get_collection(p.collection) or {
|
||||
return CollectionNotFound{
|
||||
pointer: p
|
||||
msg: '${err}'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// get the page from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) page_get(pointerstr string) !&data.Page {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
return tree.get_page_with_pointer(p)!
|
||||
}
|
||||
|
||||
fn (tree Tree) get_page_with_pointer(p pointer.Pointer) !&data.Page {
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
new_page := col.page_get(p.name)!
|
||||
|
||||
return new_page
|
||||
}
|
||||
|
||||
// get the page from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) get_image(pointerstr string) !&data.File {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
image := col.get_image(p.name)!
|
||||
|
||||
return image
|
||||
}
|
||||
|
||||
// get the file from pointer string: $tree:$collection:$name or
|
||||
// $collection:$name or $name
|
||||
pub fn (tree Tree) get_file(pointerstr string) !&data.File {
|
||||
p := pointer.pointer_new(text: pointerstr)!
|
||||
col := tree.get_collection_with_pointer(p)!
|
||||
new_file := col.get_file(p.name)!
|
||||
|
||||
return new_file
|
||||
}
|
||||
|
||||
pub fn (tree Tree) page_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.page_exists(p.name)
|
||||
}
|
||||
|
||||
pub fn (tree Tree) image_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.image_exists(p.name)
|
||||
}
|
||||
|
||||
pub fn (tree Tree) file_exists(pointerstr string) bool {
|
||||
p := pointer.pointer_new(text: pointerstr) or { return false }
|
||||
col := tree.get_collection_with_pointer(p) or { return false }
|
||||
return col.file_exists(p.name)
|
||||
}
|
||||
35
lib/data/doctree/getters_test.v
Normal file
35
lib/data/doctree/getters_test.v
Normal file
@@ -0,0 +1,35 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
fn test_page_get() {
|
||||
mut file1_path := pathlib.get_file(path: '/tmp/mytree/dir1/file2.md', create: true)!
|
||||
file1_path.write('[some page](col2:file3.md)')!
|
||||
mut file2_path := pathlib.get_file(path: '/tmp/mytree/dir1/image.png', create: true)!
|
||||
mut file3_path := pathlib.get_file(path: '/tmp/mytree/dir1/dir2/file1.md', create: true)!
|
||||
file3_path.write('[not existent page](col3:file5.md)')!
|
||||
mut file4_path := pathlib.get_file(path: '/tmp/mytree/dir1/.collection', create: true)!
|
||||
file4_path.write('name:col1')!
|
||||
|
||||
mut file5_path := pathlib.get_file(path: '/tmp/mytree/dir3/.collection', create: true)!
|
||||
file5_path.write('name:col2')!
|
||||
mut file6_path := pathlib.get_file(path: '/tmp/mytree/dir3/file3.md', create: true)!
|
||||
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: file1_path.parent()!.path, name: 'col1')!
|
||||
tree.add_collection(path: file6_path.parent()!.path, name: 'col2')!
|
||||
|
||||
mut page := tree.page_get('col1:file2.md')!
|
||||
assert page.name == 'file2'
|
||||
|
||||
mut image := tree.get_image('col1:image.png')!
|
||||
assert image.file_name() == 'image.png'
|
||||
|
||||
// these page pointers are faulty
|
||||
|
||||
apple_ptr_faulty0 := 'col3:file1.md'
|
||||
if p := tree.page_get('col3:file1.md') {
|
||||
assert false, 'this should fail: faulty pointer ${apple_ptr_faulty0}'
|
||||
}
|
||||
}
|
||||
106
lib/data/doctree/pointer/pointer.v
Normal file
106
lib/data/doctree/pointer/pointer.v
Normal file
@@ -0,0 +1,106 @@
|
||||
module pointer
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
pub enum PointerCat {
|
||||
page
|
||||
image
|
||||
video
|
||||
file
|
||||
html
|
||||
}
|
||||
|
||||
// links to a page, image or file
|
||||
pub struct Pointer {
|
||||
pub mut:
|
||||
collection string // is the key of a collection
|
||||
name string // is name without extension, all namefixed (lowercase...)
|
||||
cat PointerCat
|
||||
extension string // e.g. jpg
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPointerArgs {
|
||||
pub:
|
||||
// pointer string (e.g. col:page.md)
|
||||
text string
|
||||
// used if text does not have collection information
|
||||
collection string
|
||||
}
|
||||
|
||||
// will return a clean pointer to a page, image or file
|
||||
//```
|
||||
// input is e.g. mycollection:filename.jpg
|
||||
// or filename.jpg
|
||||
// or mypage.md
|
||||
//
|
||||
//```
|
||||
pub fn pointer_new(args NewPointerArgs) !Pointer {
|
||||
mut txt := args.text.trim_space().replace('\\', '/').replace('//', '/')
|
||||
|
||||
// take colon parts out
|
||||
split_colons := txt.split(':')
|
||||
if split_colons.len > 2 {
|
||||
return error("pointer can only have 1 ':' inside. ${txt}")
|
||||
}
|
||||
|
||||
mut collection_name := args.collection
|
||||
mut file_name := ''
|
||||
if split_colons.len == 2 {
|
||||
collection_name = texttools.name_fix_keepext(split_colons[0].all_after_last('/'))
|
||||
file_name = texttools.name_fix_keepext(split_colons[1].all_after_last('/'))
|
||||
}
|
||||
|
||||
if collection_name == '' {
|
||||
return error('provided args do not have collection information: ${args}')
|
||||
}
|
||||
|
||||
if split_colons.len == 1 {
|
||||
file_name = texttools.name_fix_keepext(split_colons[0].all_after_last('/'))
|
||||
}
|
||||
|
||||
split_file_name := file_name.split('.')
|
||||
file_name_no_extension := split_file_name[0]
|
||||
mut extension := 'md'
|
||||
if split_file_name.len > 1 {
|
||||
extension = split_file_name[1]
|
||||
}
|
||||
|
||||
mut file_cat := PointerCat.page
|
||||
match extension {
|
||||
'md' {
|
||||
file_cat = .page
|
||||
}
|
||||
'jpg', 'jpeg', 'svg', 'gif', 'png' {
|
||||
file_cat = .image
|
||||
}
|
||||
'html' {
|
||||
file_cat = .html
|
||||
}
|
||||
'mp4', 'mov' {
|
||||
file_cat = .video
|
||||
}
|
||||
else {
|
||||
file_cat = .file
|
||||
}
|
||||
}
|
||||
|
||||
return Pointer{
|
||||
name: file_name_no_extension
|
||||
collection: collection_name
|
||||
extension: extension
|
||||
cat: file_cat
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (p Pointer) is_image() bool {
|
||||
return p.cat == .image
|
||||
}
|
||||
|
||||
pub fn (p Pointer) is_file_video_html() bool {
|
||||
return p.cat == .file || p.cat == .video || p.cat == .html
|
||||
}
|
||||
|
||||
pub fn (p Pointer) str() string {
|
||||
return '${p.collection}:${p.name}.${p.extension}'
|
||||
}
|
||||
139
lib/data/doctree/pointer/pointer_test.v
Normal file
139
lib/data/doctree/pointer/pointer_test.v
Normal file
@@ -0,0 +1,139 @@
|
||||
module pointer
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// import freeflowuniverse.herolib.core.pathlib
|
||||
// import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
// fn test_pointerpath() {
|
||||
// p1 := pointerpath_new(path: '/tmp/A file.md') or { panic(err) }
|
||||
// console.print_debug(p1)
|
||||
// p1_compare := PointerPath{
|
||||
// pointer: Pointer{
|
||||
// collection: ''
|
||||
// name: 'a_file'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// path: pathlib.Path{
|
||||
// path: '/tmp/A file.md'
|
||||
// cat: .unknown
|
||||
// exist: .no
|
||||
// }
|
||||
// }
|
||||
// assert p1 == p1_compare
|
||||
|
||||
// p2 := pointerpath_new(path: '/tmp/ss/A__file.jpeg') or { panic(err) }
|
||||
// p2_compare := PointerPath{
|
||||
// pointer: Pointer{
|
||||
// collection: ''
|
||||
// name: 'a_file'
|
||||
// cat: .image
|
||||
// extension: 'jpeg'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// path: pathlib.Path{
|
||||
// path: '/tmp/A__file.jpeg'
|
||||
// cat: .unknown
|
||||
// exist: .no
|
||||
// }
|
||||
// }
|
||||
|
||||
// // assert p2==p2_compare
|
||||
// }
|
||||
|
||||
fn test_pointer() {
|
||||
// p := pointer_new('Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// collection: ''
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
}
|
||||
|
||||
// fn test_pointer2() {
|
||||
// p := pointer_new('collectionAAA:Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// collection: 'collectionaaa'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer3() {
|
||||
// p := pointer_new('MY_Book:collection_AAA:Page__.md') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'page'
|
||||
// cat: .page
|
||||
// extension: 'md'
|
||||
// collection: 'collection_aaa'
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer4() {
|
||||
// p := pointer_new('MY_Book:collection_AAA:aImage__.jpg') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'aimage'
|
||||
// cat: .image
|
||||
// extension: 'jpg'
|
||||
// collection: 'collection_aaa'
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer5() {
|
||||
// p := pointer_new('MY_Book::aImage__.jpg') or { panic(err) }
|
||||
// console.print_debug(p)
|
||||
// p_compare := Pointer{
|
||||
// name: 'aimage'
|
||||
// cat: .image
|
||||
// extension: 'jpg'
|
||||
// collection: ''
|
||||
// book: 'my_book'
|
||||
// error: ''
|
||||
// state: .unknown
|
||||
// }
|
||||
// assert p == p_compare
|
||||
// }
|
||||
|
||||
// fn test_pointer6() {
|
||||
// p := pointer_new('MY_Book::aImage__.jpg') or { panic(err) }
|
||||
// assert p.str() == 'my_book::aimage.jpg'
|
||||
|
||||
// p2 := pointer_new('ddd:aImage__.jpg') or { panic(err) }
|
||||
// assert p2.str() == 'ddd:aimage.jpg'
|
||||
|
||||
// p3 := pointer_new('aImage__.jpg') or { panic(err) }
|
||||
// assert p3.str() == 'aimage.jpg'
|
||||
|
||||
// i := 40
|
||||
// p4 := pointer_new('collectionAAA:Page__${i}.md') or { panic(err) }
|
||||
// assert p4.str() == 'collectionaaa:page_40.md'
|
||||
// }
|
||||
|
||||
// fn test_pointer7() {
|
||||
// r := texttools.name_fix_keepext('page_40.md')
|
||||
// assert r == 'page_40.md'
|
||||
// }
|
||||
83
lib/data/doctree/process_defs.v
Normal file
83
lib/data/doctree/process_defs.v
Normal file
@@ -0,0 +1,83 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.data.doctree.collection { CollectionError }
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// process definitions (!!wiki.def actions, elements.Def elements)
|
||||
// this must be done before processing includes.
|
||||
pub fn (mut tree Tree) process_defs() ! {
|
||||
console.print_green('Processing tree defs')
|
||||
|
||||
for _, mut col in tree.collections {
|
||||
for _, mut page in col.pages {
|
||||
mut p := page
|
||||
mut c := col
|
||||
tree.process_page_def_actions(mut p, mut c)!
|
||||
}
|
||||
}
|
||||
|
||||
for _, mut col in tree.collections {
|
||||
for _, mut page in mut col.pages {
|
||||
mut p := page
|
||||
errors := tree.replace_page_defs_with_links(mut p)!
|
||||
// report accrued errors when replacing defs with links
|
||||
for err in errors {
|
||||
col.error(err)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) process_page_def_actions(mut p data.Page, mut c collection.Collection) ! {
|
||||
def_actions := p.get_def_actions()!
|
||||
if def_actions.len > 1 {
|
||||
c.error(
|
||||
path: p.path
|
||||
msg: 'a page can have at most one def action'
|
||||
cat: .def
|
||||
)!
|
||||
}
|
||||
|
||||
if def_actions.len == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
aliases := p.process_def_action(def_actions[0].id)!
|
||||
for alias in aliases {
|
||||
if alias in tree.defs {
|
||||
c.error(
|
||||
path: p.path
|
||||
msg: 'alias ${alias} is already used'
|
||||
cat: .def
|
||||
)!
|
||||
continue
|
||||
}
|
||||
|
||||
tree.defs[alias] = p
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) replace_page_defs_with_links(mut p data.Page) ![]CollectionError {
|
||||
defs := p.get_def_names()!
|
||||
|
||||
mut def_data := map[string][]string{}
|
||||
mut errors := []CollectionError{}
|
||||
for def in defs {
|
||||
if referenced_page := tree.defs[def] {
|
||||
def_data[def] = [referenced_page.key(), referenced_page.alias]
|
||||
} else {
|
||||
// accrue errors that occur
|
||||
errors << CollectionError{
|
||||
path: p.path
|
||||
msg: 'def ${def} is not defined'
|
||||
cat: .def
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
p.set_def_links(def_data)!
|
||||
// return accrued collection errors for collection to handle
|
||||
return errors
|
||||
}
|
||||
26
lib/data/doctree/process_defs_test.v
Normal file
26
lib/data/doctree/process_defs_test.v
Normal file
@@ -0,0 +1,26 @@
|
||||
module doctree
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/process_defs_test'
|
||||
|
||||
fn test_process_defs() {
|
||||
/*
|
||||
1- use files with def actions and elements from testdata
|
||||
2- create tree
|
||||
3- invoke process defs
|
||||
4- check pages markdown
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
tree.process_defs()!
|
||||
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
assert page1.get_markdown()! == ''
|
||||
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
assert page2.get_markdown()! == '[about us](col1:page1.md)\n[about us](col1:page1.md)\n[about us](col1:page1.md)'
|
||||
}
|
||||
153
lib/data/doctree/process_includes.v
Normal file
153
lib/data/doctree/process_includes.v
Normal file
@@ -0,0 +1,153 @@
|
||||
module doctree
|
||||
|
||||
// import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.data.doctree.pointer
|
||||
import freeflowuniverse.herolib.data.doctree.collection { CollectionError }
|
||||
import freeflowuniverse.herolib.data.doctree.collection.data
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn (mut tree Tree) process_includes() ! {
|
||||
console.print_green('Processing page includes')
|
||||
graph := tree.generate_pages_graph()!
|
||||
|
||||
mut indegree := map[string]int{}
|
||||
for _, c in tree.collections {
|
||||
for _, p in c.pages {
|
||||
indegree[p.key()] = 0
|
||||
}
|
||||
}
|
||||
|
||||
for _, children in graph {
|
||||
for child in children.keys() {
|
||||
indegree[child] += 1
|
||||
}
|
||||
}
|
||||
|
||||
mut queue := []string{}
|
||||
for key, degree in indegree {
|
||||
if degree == 0 {
|
||||
queue << key
|
||||
}
|
||||
}
|
||||
|
||||
for queue.len > 0 {
|
||||
front := queue[0]
|
||||
queue = queue[1..]
|
||||
|
||||
mut page := tree.page_get(front)!
|
||||
mut col := tree.get_collection(page.collection_name)!
|
||||
|
||||
// process page
|
||||
for element in page.get_include_actions()! {
|
||||
page_pointer := get_include_page_pointer(col.name, element.action) or { continue }
|
||||
|
||||
mut include_page := tree.get_page_with_pointer(page_pointer) or { continue }
|
||||
|
||||
page.set_element_content_no_reparse(element.id, include_page.get_markdown()!)!
|
||||
}
|
||||
|
||||
// update indegree
|
||||
for child in graph[page.key()].keys() {
|
||||
indegree[child] -= 1
|
||||
if indegree[child] == 0 {
|
||||
queue << child
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for key, degree in indegree {
|
||||
if degree == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
mut page := tree.page_get(key)!
|
||||
mut col := tree.get_collection(page.collection_name)!
|
||||
col.error(
|
||||
path: page.path
|
||||
msg: 'page ${key} is in an include cycle'
|
||||
cat: .circular_import
|
||||
)!
|
||||
}
|
||||
}
|
||||
|
||||
fn get_include_page_pointer(collection_name string, a playbook.Action) !pointer.Pointer {
|
||||
mut page_pointer_str := a.params.get('page')!
|
||||
|
||||
// handle includes
|
||||
mut page_pointer := pointer.pointer_new(collection: collection_name, text: page_pointer_str)!
|
||||
if page_pointer.collection == '' {
|
||||
page_pointer.collection = collection_name
|
||||
}
|
||||
|
||||
return page_pointer
|
||||
}
|
||||
|
||||
fn (mut tree Tree) generate_pages_graph() !map[string]map[string]bool {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
mut ths := []thread !map[string]map[string]bool{}
|
||||
for _, mut col in tree.collections {
|
||||
ths << spawn fn (mut tree Tree, col &collection.Collection) !map[string]map[string]bool {
|
||||
return tree.collection_page_graph(col)!
|
||||
}(mut tree, col)
|
||||
}
|
||||
for th in ths {
|
||||
col_graph := th.wait()!
|
||||
for k, v in col_graph {
|
||||
graph[k] = v.clone()
|
||||
}
|
||||
}
|
||||
return graph
|
||||
}
|
||||
|
||||
fn (mut tree Tree) collection_page_graph(col &collection.Collection) !map[string]map[string]bool {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
_ := []thread !GraphResponse{}
|
||||
for _, page in col.pages {
|
||||
resp := tree.generate_page_graph(page, col.name)!
|
||||
for k, v in resp.graph {
|
||||
graph[k] = v.clone()
|
||||
}
|
||||
}
|
||||
|
||||
return graph
|
||||
}
|
||||
|
||||
pub struct GraphResponse {
|
||||
pub:
|
||||
graph map[string]map[string]bool
|
||||
errors []CollectionError
|
||||
}
|
||||
|
||||
fn (tree Tree) generate_page_graph(current_page &data.Page, col_name string) !GraphResponse {
|
||||
mut graph := map[string]map[string]bool{}
|
||||
mut errors := []CollectionError{}
|
||||
|
||||
include_action_elements := current_page.get_include_actions()!
|
||||
for element in include_action_elements {
|
||||
page_pointer := get_include_page_pointer(col_name, element.action) or {
|
||||
errors << CollectionError{
|
||||
path: current_page.path
|
||||
msg: 'failed to get page pointer for include ${element.action.heroscript()}: ${err}'
|
||||
cat: .include
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
include_page := tree.get_page_with_pointer(page_pointer) or {
|
||||
// TODO
|
||||
// col.error(
|
||||
// path: current_page.path
|
||||
// msg: 'failed to get page for include ${element.action.heroscript()}: ${err.msg()}'
|
||||
// cat: .include
|
||||
// )!
|
||||
continue
|
||||
}
|
||||
|
||||
graph[include_page.key()][current_page.key()] = true
|
||||
}
|
||||
return GraphResponse{
|
||||
graph: graph
|
||||
errors: errors
|
||||
}
|
||||
}
|
||||
56
lib/data/doctree/process_includes_test.v
Normal file
56
lib/data/doctree/process_includes_test.v
Normal file
@@ -0,0 +1,56 @@
|
||||
module doctree
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
const test_dir = '${os.dir(@FILE)}/testdata/process_includes_test'
|
||||
|
||||
fn test_process_includes() {
|
||||
/*
|
||||
1- use 3 pages in testdata:
|
||||
- page1 includes page2
|
||||
- page2 includes page3
|
||||
2- create tree
|
||||
3- invoke process_includes
|
||||
4- check pages markdown
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
tree.process_includes()!
|
||||
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
mut page3 := tree.page_get('col2:page3.md')!
|
||||
|
||||
assert page1.get_markdown()! == 'page3 content'
|
||||
assert page2.get_markdown()! == 'page3 content'
|
||||
assert page3.get_markdown()! == 'page3 content'
|
||||
}
|
||||
|
||||
fn test_generate_pages_graph() {
|
||||
/*
|
||||
1- use 3 pages in testdata:
|
||||
- page1 includes page2
|
||||
- page2 includes page3
|
||||
2- create tree
|
||||
3- invoke generate_pages_graph
|
||||
4- check graph
|
||||
*/
|
||||
mut tree := new(name: 'mynewtree')!
|
||||
tree.add_collection(path: '${test_dir}/col1', name: 'col1')!
|
||||
tree.add_collection(path: '${test_dir}/col2', name: 'col2')!
|
||||
mut page1 := tree.page_get('col1:page1.md')!
|
||||
mut page2 := tree.page_get('col2:page2.md')!
|
||||
mut page3 := tree.page_get('col2:page3.md')!
|
||||
|
||||
graph := tree.generate_pages_graph()!
|
||||
assert graph == {
|
||||
'${page3.key()}': {
|
||||
'${page2.key()}': true
|
||||
}
|
||||
'${page2.key()}': {
|
||||
'${page1.key()}': true
|
||||
}
|
||||
}
|
||||
}
|
||||
54
lib/data/doctree/process_macros.v
Normal file
54
lib/data/doctree/process_macros.v
Normal file
@@ -0,0 +1,54 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.data.doctree.collection { Collection }
|
||||
import freeflowuniverse.herolib.data.markdownparser.elements
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playmacros
|
||||
|
||||
@[params]
|
||||
pub struct MacroGetArgs {
|
||||
pub mut:
|
||||
actor string
|
||||
name string
|
||||
}
|
||||
|
||||
// adds all action elements to a playbook, calls playmacros.play on the playbook,
|
||||
// which processes the macros, then reprocesses every page with the actions' new content
|
||||
pub fn (mut tree Tree) process_actions_and_macros() ! {
|
||||
console.print_green('Processing actions and macros')
|
||||
|
||||
// first process the generic actions, which can be executed as is
|
||||
mut plbook := playbook.new()!
|
||||
for element_action in tree.get_actions()! {
|
||||
plbook.actions << &element_action.action
|
||||
}
|
||||
|
||||
playmacros.play_actions(mut plbook)!
|
||||
|
||||
// now get specific actions which need to return content
|
||||
mut ths := []thread !{}
|
||||
for _, mut col in tree.collections {
|
||||
ths << spawn fn (mut col Collection) ! {
|
||||
for _, mut page in col.pages {
|
||||
page.process_macros()! // calls play_macro in playmacros...
|
||||
}
|
||||
}(mut col)
|
||||
}
|
||||
|
||||
for th in ths {
|
||||
th.wait()!
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut tree Tree) get_actions(args_ MacroGetArgs) ![]&elements.Action {
|
||||
// console.print_green('get actions for tree: name:${tree.name}')
|
||||
mut res := []&elements.Action{}
|
||||
for _, mut collection in tree.collections {
|
||||
// console.print_green("export collection: name:${name}")
|
||||
for _, mut page in collection.pages {
|
||||
res << page.get_all_actions()!
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
244
lib/data/doctree/scan.v
Normal file
244
lib/data/doctree/scan.v
Normal file
@@ -0,0 +1,244 @@
|
||||
module doctree
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib { Path }
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import freeflowuniverse.herolib.data.doctree.collection { Collection }
|
||||
import freeflowuniverse.herolib.develop.gittools
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
@[params]
|
||||
pub struct TreeScannerArgs {
|
||||
pub mut:
|
||||
path string
|
||||
heal bool = true // healing means we fix images
|
||||
git_url string
|
||||
git_reset bool
|
||||
git_root string
|
||||
git_pull bool
|
||||
load bool = true // means we scan automatically the added collection
|
||||
}
|
||||
|
||||
// walk over directory find dirs with .book or .collection inside and add to the tree .
|
||||
// a path will not be added unless .collection is in the path of a collection dir or .book in a book
|
||||
// ```
|
||||
// path string
|
||||
// heal bool // healing means we fix images, if selected will automatically load, remove stale links
|
||||
// git_url string
|
||||
// git_reset bool
|
||||
// git_root string
|
||||
// git_pull bool
|
||||
// ```
|
||||
pub fn (mut tree Tree) scan(args_ TreeScannerArgs) ! {
|
||||
mut args := args_
|
||||
if args.git_url.len > 0 {
|
||||
mut gs := gittools.get(coderoot: args.git_root)!
|
||||
mut repo := gs.get_repo(
|
||||
url: args.git_url
|
||||
pull: args.git_pull
|
||||
reset: args.git_reset
|
||||
reload: false
|
||||
)!
|
||||
args.path = repo.get_path_of_url(args.git_url)!
|
||||
}
|
||||
|
||||
if args.path.len == 0 {
|
||||
return error('Path needs to be provided.')
|
||||
}
|
||||
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
|
||||
if path.file_exists('.site') {
|
||||
move_site_to_collection(mut path)!
|
||||
}
|
||||
|
||||
if is_collection_dir(path) {
|
||||
collection_name := get_collection_name(mut path)!
|
||||
|
||||
tree.add_collection(
|
||||
path: path.path
|
||||
name: collection_name
|
||||
heal: args.heal
|
||||
load: true
|
||||
fail_on_error: tree.fail_on_error
|
||||
)!
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
mut entries := path.list(recursive: false) or {
|
||||
return error('cannot list: ${path.path} \n${error}')
|
||||
}
|
||||
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || is_ignored_dir(entry)! {
|
||||
continue
|
||||
}
|
||||
|
||||
tree.scan(path: entry.path, heal: args.heal, load: args.load) or {
|
||||
return error('failed to scan ${entry.path} :${err}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut tree Tree) scan_concurrent(args_ TreeScannerArgs) ! {
|
||||
mut args := args_
|
||||
if args.git_url.len > 0 {
|
||||
mut gs := gittools.get(coderoot: args.git_root)!
|
||||
mut repo := gs.get_repo(
|
||||
url: args.git_url
|
||||
pull: args.git_pull
|
||||
reset: args.git_reset
|
||||
reload: false
|
||||
)!
|
||||
args.path = repo.get_path_of_url(args.git_url)!
|
||||
}
|
||||
|
||||
if args.path.len == 0 {
|
||||
return error('Path needs to be provided.')
|
||||
}
|
||||
|
||||
path := pathlib.get_dir(path: args.path)!
|
||||
mut collection_paths := scan_helper(path)!
|
||||
mut threads := []thread !Collection{}
|
||||
for mut col_path in collection_paths {
|
||||
mut col_name := get_collection_name(mut col_path)!
|
||||
col_name = texttools.name_fix(col_name)
|
||||
|
||||
if col_name in tree.collections {
|
||||
if tree.fail_on_error {
|
||||
return error('Collection with name ${col_name} already exits')
|
||||
}
|
||||
// TODO: handle error
|
||||
continue
|
||||
}
|
||||
|
||||
threads << spawn fn (args CollectionNewArgs) !Collection {
|
||||
mut args_ := collection.CollectionNewArgs{
|
||||
name: args.name
|
||||
path: args.path
|
||||
heal: args.heal
|
||||
load: args.load
|
||||
fail_on_error: args.fail_on_error
|
||||
}
|
||||
return collection.new(args_)!
|
||||
}(
|
||||
name: col_name
|
||||
path: col_path.path
|
||||
heal: args.heal
|
||||
fail_on_error: tree.fail_on_error
|
||||
)
|
||||
}
|
||||
|
||||
for _, t in threads {
|
||||
new_collection := t.wait() or { return error('Error executing thread: ${err}') }
|
||||
tree.collections[new_collection.name] = &new_collection
|
||||
}
|
||||
}
|
||||
|
||||
// internal function that recursively returns
|
||||
// the paths of collections in a given path
|
||||
fn scan_helper(path_ Path) ![]Path {
|
||||
mut path := path_
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
|
||||
if path.file_exists('.site') {
|
||||
move_site_to_collection(mut path)!
|
||||
}
|
||||
|
||||
if is_collection_dir(path) {
|
||||
return [path]
|
||||
}
|
||||
|
||||
mut entries := path.list(recursive: false) or {
|
||||
return error('cannot list: ${path.path} \n${error}')
|
||||
}
|
||||
|
||||
mut paths := []Path{}
|
||||
for mut entry in entries.paths {
|
||||
if !entry.is_dir() || is_ignored_dir(entry)! {
|
||||
continue
|
||||
}
|
||||
|
||||
paths << scan_helper(entry) or { return error('failed to scan ${entry.path} :${err}') }
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionNewArgs {
|
||||
mut:
|
||||
name string @[required]
|
||||
path string @[required]
|
||||
heal bool = true // healing means we fix images, if selected will automatically load, remove stale links
|
||||
load bool = true
|
||||
fail_on_error bool
|
||||
}
|
||||
|
||||
// get a new collection
|
||||
pub fn (mut tree Tree) add_collection(args_ CollectionNewArgs) ! {
|
||||
mut args := args_
|
||||
args.name = texttools.name_fix(args.name)
|
||||
|
||||
if args.name in tree.collections {
|
||||
if args.fail_on_error {
|
||||
return error('Collection with name ${args.name} already exits')
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
mut pp := pathlib.get_dir(path: args.path)! // will raise error if path doesn't exist
|
||||
mut new_collection := collection.new(
|
||||
name: args.name
|
||||
path: pp.path
|
||||
heal: args.heal
|
||||
fail_on_error: args.fail_on_error
|
||||
)!
|
||||
|
||||
tree.collections[new_collection.name] = &new_collection
|
||||
}
|
||||
|
||||
// returns true if directory should be ignored while scanning
|
||||
fn is_ignored_dir(path_ Path) !bool {
|
||||
mut path := path_
|
||||
if !path.is_dir() {
|
||||
return error('path is not a directory')
|
||||
}
|
||||
name := path.name()
|
||||
return name.starts_with('.') || name.starts_with('_')
|
||||
}
|
||||
|
||||
// gets collection name from .collection file
|
||||
// if no name param, uses the directory name
|
||||
fn get_collection_name(mut path Path) !string {
|
||||
mut collection_name := path.name()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
|
||||
// now we found a collection we need to add
|
||||
content := filepath.read()!
|
||||
if content.trim_space() != '' {
|
||||
// means there are params in there
|
||||
mut params_ := paramsparser.parse(content)!
|
||||
if params_.exists('name') {
|
||||
collection_name = params_.get('name')!
|
||||
}
|
||||
}
|
||||
|
||||
return collection_name
|
||||
}
|
||||
|
||||
fn is_collection_dir(path Path) bool {
|
||||
return path.file_exists('.collection')
|
||||
}
|
||||
|
||||
// moves .site file to .collection file
|
||||
fn move_site_to_collection(mut path Path) ! {
|
||||
collectionfilepath1 := path.extend_file('.site')!
|
||||
collectionfilepath2 := path.extend_file('.collection')!
|
||||
os.mv(collectionfilepath1.path, collectionfilepath2.path)!
|
||||
}
|
||||
1
lib/data/doctree/testdata/.gitignore
vendored
Normal file
1
lib/data/doctree/testdata/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export_test/export
|
||||
1
lib/data/doctree/testdata/actions/.collection
vendored
Normal file
1
lib/data/doctree/testdata/actions/.collection
vendored
Normal file
@@ -0,0 +1 @@
|
||||
actions
|
||||
7
lib/data/doctree/testdata/actions/actions1.md
vendored
Normal file
7
lib/data/doctree/testdata/actions/actions1.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# actions 2
|
||||
|
||||
```js
|
||||
!!payment3.add account:something description:'TF Wallet for TFT'
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar //holochain maybe?
|
||||
```
|
||||
15
lib/data/doctree/testdata/actions/functionality/actions2.md
vendored
Normal file
15
lib/data/doctree/testdata/actions/functionality/actions2.md
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# web3gw_proxy server functionality
|
||||
|
||||
- [stellar](./stellar.md)
|
||||
|
||||
|
||||
```js
|
||||
!!payment.add account:something description:'TF Wallet for TFT' person:fatayera preferred:false
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar //holochain maybe?
|
||||
```
|
||||
|
||||
!!payment.add2
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar
|
||||
|
||||
1
lib/data/doctree/testdata/export_test/export_expected/col1/.collection
vendored
Normal file
1
lib/data/doctree/testdata/export_test/export_expected/col1/.collection
vendored
Normal file
@@ -0,0 +1 @@
|
||||
name:col1 src:'/Users/timurgordon/code/github/freeflowuniverse/crystallib/crystallib/data/doctree/testdata/export_test/mytree/dir1'
|
||||
1
lib/data/doctree/testdata/export_test/export_expected/col1/.linkedpages
vendored
Normal file
1
lib/data/doctree/testdata/export_test/export_expected/col1/.linkedpages
vendored
Normal file
@@ -0,0 +1 @@
|
||||
col2:file3.md
|
||||
9
lib/data/doctree/testdata/export_test/export_expected/col1/errors.md
vendored
Normal file
9
lib/data/doctree/testdata/export_test/export_expected/col1/errors.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Errors
|
||||
|
||||
|
||||
## page_not_found
|
||||
|
||||
path: /Users/timurgordon/code/github/freeflowuniverse/crystallib/crystallib/data/doctree/testdata/export_test/mytree/dir1/dir2/file1.md
|
||||
|
||||
msg: page col3:file5.md not found
|
||||
|
||||
1
lib/data/doctree/testdata/export_test/export_expected/col1/file1.md
vendored
Normal file
1
lib/data/doctree/testdata/export_test/export_expected/col1/file1.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[not existent page](col3:file5.md)
|
||||
1
lib/data/doctree/testdata/export_test/export_expected/col1/file2.md
vendored
Normal file
1
lib/data/doctree/testdata/export_test/export_expected/col1/file2.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
[some page](../col2/file3.md)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user