This commit is contained in:
2025-08-12 15:52:13 +02:00
parent 0e1450b5db
commit ffff44f347
23 changed files with 437 additions and 388 deletions

View File

@@ -40,7 +40,7 @@ pub fn jina_model_from_string(s string) !JinaModel {
'jina-embeddings-v2-base-zh' { JinaModel.jina_embeddings_v2_base_zh }
'jina-embeddings-v2-base-code' { JinaModel.jina_embeddings_v2_base_code }
'jina-embeddings-v3' { JinaModel.jina_embeddings_v3 }
else { error('Invalid Jina model string: ${s}') }
else { return error('Invalid Jina model string: ${s}') }
}
}
@@ -66,7 +66,7 @@ pub fn truncate_type_from_string(s string) !TruncateType {
'NONE' { TruncateType.none_ }
'START' { TruncateType.start }
'END' { TruncateType.end }
else { error('Invalid truncate type string: ${s}') }
else { return error('Invalid truncate type string: ${s}') }
}
}
@@ -95,7 +95,7 @@ pub fn embedding_type_from_string(s string) !EmbeddingType {
'base64' { EmbeddingType.base64 }
'binary' { EmbeddingType.binary }
'ubinary' { EmbeddingType.ubinary }
else { error('Invalid embedding type string: ${s}') }
else { return error('Invalid embedding type string: ${s}') }
}
}
@@ -116,7 +116,7 @@ pub fn task_type_from_string(s string) !TaskType {
'text-matching' { TaskType.text_matching }
'classification' { TaskType.classification }
'separation' { TaskType.separation }
else { error('Invalid task type string: ${s}') }
else { return error('Invalid task type string: ${s}') }
}
}

View File

@@ -63,7 +63,7 @@ pub fn jina_rerank_model_from_string(s string) !JinaRerankModel {
'jina-reranker-v1-tiny-en' { JinaRerankModel.reranker_v1_tiny_en }
'jina-reranker-v1-turbo-en' { JinaRerankModel.reranker_v1_turbo_en }
'jina-colbert-v1-en' { JinaRerankModel.colbert_v1_en }
else { error('Invalid JinaRerankModel string: ${s}') }
else { return error('Invalid JinaRerankModel string: ${s}') }
}
}

View File

@@ -138,16 +138,15 @@ fn cmd_docusaurus_execute(cmd Command) ! {
)!
// `docusaurus_path` is a pathlib.Path we need its string representation
if !os.exists(os.join_path(docusaurus_path.path, 'cfg')) {
error('Docusaurus configuration directory not found at: ${os.join_path(docusaurus_path.path,
if os.exists(os.join_path(docusaurus_path.path, 'cfg'))==false {
return error('Docusaurus configuration directory not found at: ${os.join_path(docusaurus_path.path,
'cfg')}')
}
console.print_header('Running Docusaurus for: ${docusaurus_path}')
console.print_header('Running Docusaurus for: ${docusaurus_path.path}')
// The `playcmds.run` helper expects a string path. Use the underlying
// filesystem path from the pathlib.Path value.
println('DEBUG: The heroscript path is: ${docusaurus_path.path}')
playcmds.run(
heroscript_path: docusaurus_path.path
reset: false

View File

@@ -30,7 +30,7 @@ pub mut:
// if overwrite this means will overwrite the last one in the directory
pub fn (mut path Path) backup_path(args BackupArgs) !Path {
if !path.exists() && args.restore == false {
error('cannot find path, so cannot create backup for ${path}')
return error('cannot find path, so cannot create backup for ${path}')
}
mut dest := ''
mut rel := ''

View File

@@ -13,6 +13,7 @@ pub mut:
git_reset bool
prio int = 50
priorities map[int]string // filter and give priority, see filtersort method to know how to use
replace map[string]string
// session ?&base.Session
}
@@ -25,10 +26,12 @@ pub mut:
// git_branch string
// git_reset bool
// session &base.Session
// replace map[string]string
// ```
pub fn new(args_ PlayBookNewArgs) !PlayBook {
mut args := args_
mut c := base.context() or { return error('failed to get context: ${err}') }
mut s := c.session_new()!

View File

@@ -94,10 +94,10 @@ pub fn (mut plbook PlayBook) find(args FindArgs) ![]&Action {
// use this in play function to make sure we only have one of those actions, the one action is then returned
pub fn (mut plbook PlayBook) ensure_once(args FindArgs) !&Action {
println('DEBUG: In the error')
// println('DEBUG: In the error')
mut res := plbook.find(args) or { [] }
println('res: ${res}')
// println('res: ${res}')
if res.len == 0 {
return error('No actions found based on filter: ${args.filter}')
}

View File

@@ -14,6 +14,7 @@ pub mut:
result string // if any result
nractions int
done []int // which actions did we already find/run?
path string
session &base.Session @[skip; str: skip]
}
@@ -83,7 +84,7 @@ pub fn (mut plbook PlayBook) actions_sorted(args SortArgs) ![]&Action {
@[params]
pub struct HeroScriptArgs {
pub mut:
show_done bool = true
show_done bool
}
// serialize to heroscript

View File

@@ -12,6 +12,18 @@ enum State {
othertext
}
// pub struct PlayBookNewArgs {
// path string
// text string
// git_url string
// git_pull bool
// git_branch string
// git_reset bool
// prio int = 50
// priorities map[int]string // filter and give priority, see filtersort method to know how to use
// replace map[string]string
// }
pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
mut args := args_
@@ -21,7 +33,25 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
git_pull: args.git_pull
git_reset: args.git_reset
}
args.path = gittools.path(git_path_args)!.path
newpath := gittools.path(git_path_args)!
args.path = newpath.path
}
if plbook.path=="" && args.path!="" {
plbook.path = args.path
}
if args.text.len>0 && args.replace.len>0{
//now we need to replace any placeholders in the text
for key, value in args.replace {
if key.starts_with('@') || key.starts_with('$') || key.starts_with('[') || key.starts_with('{') {
args.text = args.text.replace(key, value)
}else{
args.text = args.text.replace("@${key}", value)
args.text = args.text.replace("$\{${key}\}", value)
args.text = args.text.replace("\{${key}\}", value)
}
}
}
// walk over directory
@@ -33,7 +63,7 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
}
if p.is_file() {
c := p.read()!
plbook.add(text: c, prio: args.prio)!
plbook.add(text: c, prio: args.prio, replace: args.replace)!
return
} else if p.is_dir() {
// get .md and .hero files from dir
@@ -45,7 +75,7 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
paths << ol2.paths
for mut p2 in paths {
c2 := p2.read()!
plbook.add(text: c2, prio: args.prio)!
plbook.add(text: c2, prio: args.prio, replace: args.replace)!
}
return
}
@@ -122,7 +152,7 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
paramsdata << line_strip.all_after_first(' ').trim_space()
}
if actionname.starts_with('!!!!!') {
error('there is no action starting with 5 x !')
return error('there is no action starting with 5 x !')
} else if actionname.starts_with('!!!!') {
action.actiontype = .wal
} else if actionname.starts_with('!!!') {

View File

@@ -2,50 +2,59 @@ module playbook
import freeflowuniverse.herolib.develop.gittools // Added import for gittools
// Include external playbook actions (from git repo or local path)
// based on actions defined as `!!play.include`.
// Parameters:
// git_url git repository URL (optional)
// git_pull pull latest changes (bool, default false)
// git_reset reset local copy (bool, default false)
// path local path to include (optional)
pub fn (mut plbook PlayBook) include() ! {
// Find all include actions in the playbook
mut inc_actions := plbook.find(filter: 'play.include')!
if inc_actions.len == 0 {
return
}
//REMARK: include is done in play_core
for mut inc in inc_actions {
mut p := inc.params
// // Include external playbook actions (from git repo or local path)
// // based on actions defined as `!!play.include`.
// // Parameters:
// // git_url git repository URL (optional)
// // git_pull pull latest changes (bool, default false)
// // git_reset reset local copy (bool, default false)
// // path local path to include (optional)
// pub fn (mut plbook PlayBook) include() ! {
// // Find all include actions in the playbook
// Extract parameters with sensible defaults
git_url := p.get_default('git_url', '')!
git_pull := p.get_default_false('git_pull')
git_reset := p.get_default_false('git_reset')
path := p.get_default('path', '')!
// println(plbook)
// if true{panic("568")}
// Resolve the path to include
mut includepath := ''
if git_url != '' {
// Resolve a git repository path (may clone / pull)
includepath = gittools.path(
git_url: git_url
path: path
git_pull: git_pull
git_reset: git_reset
)!.path
} else {
includepath = path
}
// mut inc_actions := plbook.find(filter: 'play.include')!
// if inc_actions.len == 0 {
// return
// }
// Add the found content (files / directories) to the current playbook.
// `add` will handle reading files, recursing into directories, etc.
if includepath != '' {
plbook.add(path: includepath)!
}
// println(plbook)
// if true{panic("56")}
// Mark this include action as processed
inc.done = true
}
}
// for mut inc in inc_actions {
// mut p := inc.params
// // Extract parameters with sensible defaults
// git_url := p.get_default('git_url', '')!
// git_pull := p.get_default_false('git_pull')
// git_reset := p.get_default_false('git_reset')
// path := p.get_default('path', '')!
// // Resolve the path to include
// mut includepath := ''
// if git_url != '' {
// // Resolve a git repository path (may clone / pull)
// includepath = gittools.path(
// git_url: git_url
// path: path
// git_pull: git_pull
// git_reset: git_reset
// )!.path
// } else {
// includepath = path
// }
// // Add the found content (files / directories) to the current playbook.
// // `add` will handle reading files, recursing into directories, etc.
// if includepath != '' {
// plbook.add(path: includepath)!
// }
// // Mark this include action as processed
// inc.done = true
// }
// }

View File

@@ -22,15 +22,14 @@ pub mut:
pub fn run(args_ PlayArgs) ! {
mut args := args_
println('DEBUG: the args is: ${args}')
// println('DEBUG: the args is: ${args}')
mut plbook := args.plbook or {
playbook.new(text: args.heroscript, path: args.heroscript_path)!
}
println('DEBUG: The playbook is ${plbook}')
// Core actions
play_core(mut plbook)!
// Git actions
play_git(mut plbook)!
@@ -42,12 +41,17 @@ pub fn run(args_ PlayArgs) ! {
// Website / docs
site.play(mut plbook)!
println('DEBUG: Site play is done')
doctree.play(mut plbook)!
println('DEBUG: doctree play is done')
docusaurus.play(mut plbook)!
println('DEBUG: docusaurus play is done')
println("=========____________===========")
println(plbook)
println("=========____________===========")
if true{panic("sdsds")}
// Ensure we did not leave any actions unprocessed
plbook.empty_check()!

View File

@@ -4,6 +4,7 @@ import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import os
// -------------------------------------------------------------------
// Core playcommand processing (context, session, envsubst, etc)
@@ -16,10 +17,10 @@ fn play_core(mut plbook PlayBook) ! {
// Track included paths to prevent infinite recursion
mut included_paths := map[string]bool{}
for action_ in plbook.find(filter: 'play.*')! {
for mut action_ in plbook.find(filter: 'play.*')! {
if action_.name == 'include' {
console.print_debug('play run:${action_}')
mut action := *action_
mut toreplace := action.params.get_default('replace', '')!
mut playrunpath := action.params.get_default('path', '')!
if playrunpath.len == 0 {
action.name = 'pull'
@@ -34,15 +35,24 @@ fn play_core(mut plbook PlayBook) ! {
return error("can't run a heroscript didn't find url or path.")
}
// console.print_debug('play run:\n${action_}')
if ! playrunpath.starts_with('/') {
playrunpath=os.abs_path("${plbook.path}/${playrunpath}")
}
console.print_debug('play run include path:${playrunpath}')
// Check for cycle detection
if playrunpath in included_paths {
console.print_debug('Skipping already included path: ${playrunpath}')
continue
}
console.print_debug('play run path:${playrunpath}')
toreplacedict:=texttools.to_map(toreplace)
included_paths[playrunpath] = true
plbook.add(path: playrunpath)!
plbook.add(path: playrunpath,replace:toreplacedict)!
action.done = true
}
if action_.name == 'echo' {
content := action_.params.get_default('content', "didn't find content")!

View File

@@ -23,6 +23,26 @@ pub fn to_array_int(r string) []int {
return r2
}
//convert a:b ,c:d,e:f to dict with keys a,c,e and corresponding values b,d,f
pub fn to_map(mapstring string) map[string]string {
mut result := map[string]string{}
mut mapstring_array := to_array(mapstring)
for item in mapstring_array {
if item.contains(':') {
parts := item.split(':')
if parts.len == 2 {
result[parts[0].trim_space()] = parts[1].trim_space().trim("'\"").trim_space()
} else {
panic('to_map: expected key:value pairs, got: ${item}')
}
} else {
panic('to_map: expected key:value pairs, got: ${item}')
}
}
return result
}
// intelligent way how to map a line to a map
//```
// r:=texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
@@ -37,7 +57,7 @@ pub fn to_array_int(r string) []int {
// "root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 \n \n")
// assert {'name': 'root', 'pid': '1360', 'path': ''} == r3
//```
pub fn to_map(mapstring string, line string, delimiter_ string) map[string]string {
pub fn to_map_special(mapstring string, line string, delimiter_ string) map[string]string {
mapstring_array := split_smart(mapstring, '')
mut line_array := split_smart(line, '')
mut result := map[string]string{}
@@ -71,7 +91,7 @@ pub fn to_list_map(mapstring string, txt_ string, delimiter_ string) []map[strin
mut txt := remove_empty_lines(txt_)
txt = dedent(txt)
for line in txt.split_into_lines() {
result << to_map(mapstring, line, delimiter_)
result << to_map_special(mapstring, line, delimiter_)
}
return result
}

View File

@@ -6,8 +6,8 @@ import freeflowuniverse.herolib.core.playbook { PlayBook }
pub fn play(mut plbook PlayBook) ! {
mut doctrees := map[string]&Tree{}
collection_actions := plbook.find(filter: 'doctree.scan')!
for action in collection_actions {
mut collection_actions := plbook.find(filter: 'doctree.scan')!
for mut action in collection_actions {
mut p := action.params
name := p.get_default('name', 'main')!
mut doctree := doctrees[name] or {
@@ -20,11 +20,11 @@ pub fn play(mut plbook PlayBook) ! {
git_reset := p.get_default_false('git_reset')
git_pull := p.get_default_false('git_pull')
doctree.scan(path: path, git_url: git_url, git_reset: git_reset, git_pull: git_pull)!
action.done = true
tree_set(doctree)
}
export_actions := plbook.find(filter: 'doctree.export')!
mut export_actions := plbook.find(filter: 'doctree.export')!
if export_actions.len == 0 && collection_actions.len > 0 {
// Only auto-export if we have collections to export
name0 := 'main'
@@ -38,7 +38,7 @@ pub fn play(mut plbook PlayBook) ! {
}
}
for action in export_actions {
for mut action in export_actions {
mut p := action.params
name := p.get_default('name', 'main')!
destination := p.get('destination')!
@@ -50,6 +50,7 @@ pub fn play(mut plbook PlayBook) ! {
reset: reset
exclude_errors: exclude_errors
)!
action.done = true
}
// println(tree_list())

View File

@@ -53,7 +53,7 @@ pub fn ping(args PingArgs) !PingResult {
}
else {
// println("${err} ${err.code()}")
error("can't ping on osx (${err.code()})\n${err}")
return error("can't ping on osx (${err.code()})\n${err}")
}
}
} else if platform_ == .ubuntu {

View File

@@ -388,11 +388,11 @@ fn (mut self TFDeployment) save() ! {
}
fn (self TFDeployment) compress(data []u8) ![]u8 {
return zlib.compress(data) or { error('Cannot compress the data due to: ${err}') }
return zlib.compress(data) or { return error('Cannot compress the data due to: ${err}') }
}
fn (self TFDeployment) decompress(data []u8) ![]u8 {
return zlib.decompress(data) or { error('Cannot decompress the data due to: ${err}') }
return zlib.decompress(data) or { return error('Cannot decompress the data due to: ${err}') }
}
fn (self TFDeployment) encrypt(compressed []u8) ![]u8 {

View File

@@ -13,13 +13,14 @@ pub struct DocSite {
pub mut:
name string
url string
path_src pathlib.Path
// path_src pathlib.Path
path_publish pathlib.Path
path_build pathlib.Path
errors []SiteError
config Configuration
website sitemodule.Site
}
importparams []ImportParams
}
pub fn (mut s DocSite) build() ! {
s.generate()!
@@ -81,104 +82,6 @@ pub fn (mut s DocSite) dev(args DevArgs) ! {
s.open()!
}
pub fn (mut s DocSite) dev_watch(args DevArgs) ! {
s.generate()!
// Create screen session for docusaurus development server
mut screen_name := 'docusaurus'
mut sf := screen.new()!
// Add and start a new screen session
mut scr := sf.add(
name: screen_name
cmd: '/bin/bash'
start: true
attach: false
reset: true
)!
// Send commands to the screen session
console.print_item('To view the server output:: cd ${s.path_build.path}')
scr.cmd_send('cd ${s.path_build.path}')!
// Start script recording in the screen session for log streaming
log_file := '/tmp/docusaurus_${screen_name}.log'
script_cmd := 'script -f ${log_file}'
scr.cmd_send(script_cmd)!
// Small delay to ensure script is ready
time.sleep(500 * time.millisecond)
// Start bun in the scripted session
bun_cmd := 'bun start -p ${args.port} -h ${args.host}'
scr.cmd_send(bun_cmd)!
// Stream the log output to current terminal
console.print_header(' Docusaurus Development Server')
console.print_item('Streaming server output... Press Ctrl+C to detach and leave server running')
console.print_item('Server will be available at: http://${args.host}:${args.port}')
console.print_item('To reattach later: screen -r ${screen_name}')
println('')
// Stream logs until user interrupts
s.stream_logs(log_file, screen_name)!
// After user interrupts, show final instructions
console.print_header(' Server Running in Background')
console.print_item(' Development server is running in background')
console.print_item('Server URL: http://${args.host}:${args.port}')
console.print_item('To reattach: screen -r ${screen_name}')
console.print_item('To stop server: screen -S ${screen_name} -X kill')
console.print_item('The site content is on: ${s.path_src.path}/docs')
// Start the watcher in a separate thread
// mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
// tf.wait()!
println('\n')
if args.open {
s.open()!
}
if args.watch_changes {
docs_path := '${s.path_src.path}/docs'
watch_docs(docs_path, s.path_src.path, s.path_build.path)!
}
}
// Stream logs from script file to current terminal until user interrupts
fn (mut s DocSite) stream_logs(log_file string, screen_name string) ! {
// Wait a moment for the log file to be created
mut attempts := 0
for !os.exists(log_file) && attempts < 10 {
time.sleep(200 * time.millisecond)
attempts++
}
if !os.exists(log_file) {
console.print_stderr('Warning: Log file not created, falling back to screen attach')
console.print_item('Attaching to screen session... Press Ctrl+A then D to detach')
// Fallback to direct screen attach
osal.execute_interactive('screen -r ${screen_name}')!
return
}
// Use tail -f to stream the log file
// The -f flag follows the file as it grows
tail_cmd := 'tail -f ${log_file}'
// Execute tail in interactive mode - this will stream until Ctrl+C
osal.execute_interactive(tail_cmd) or {
// If tail fails, try alternative approach
console.print_stderr('Log streaming failed, attaching to screen session...')
osal.execute_interactive('screen -r ${screen_name}')!
return
}
// Clean up the log file after streaming
os.rm(log_file) or {}
}
@[params]
pub struct ErrorArgs {
pub mut:

View File

@@ -66,39 +66,4 @@ pub fn (mut site DocSite) generate() ! {
site: website
)!
site.process_imports()!
}
pub fn (mut site DocSite) process_imports() ! {
mut gs := gittools.new()!
mut f := factory_get()!
for item in site.website.siteconfig.imports {
if true {
panic('not implemented import')
}
mypath := gs.get_path(
pull: false
reset: false
url: item.url
)!
mut mypatho := pathlib.get(mypath)
mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: false)!
println(item)
// replace: {'NAME': 'MyName', 'URGENCY': 'red'}
mut ri := regext.regex_instructions_new()
for key, val in item.replace {
ri.add_item('\{${key}\}', val)!
}
ri.replace_in_dir(
path: '${f.path_build.path}/docs/${item.dest}'
extensions: [
'md',
]
)!
}
}

View File

@@ -0,0 +1,49 @@
module docusaurus
import freeflowuniverse.herolib.osal.screen
import freeflowuniverse.herolib.develop.gittools
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.web.site as sitemodule
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import time
@[params]
pub struct ImportParams {
path string
git_url string
git_reset bool
git_root string
git_pull bool
}
pub fn (mut site DocSite) import(args ImportParams) ! {
mypath := gittools.get_repo_path(
git_pull: args.git_pull
git_reset: args.git_reset
git_url: args.git_url
path: args.path
)!
println(site)
if true{panic("3456789")}
mut mypatho := pathlib.get(mypath)
// mypatho.copy(dest: '${f.path_build.path}/docs/${item.dest}', delete: false)!
// println(item)
// // replace: {'NAME': 'MyName', 'URGENCY': 'red'}
// mut ri := regext.regex_instructions_new()
// for key, val in item.replace {
// ri.add_item('\{${key}\}', val)!
// }
// ri.replace_in_dir(
// path: '${f.path_build.path}/docs/${item.dest}'
// extensions: [
// 'md',
// ]
// )!
}

View File

@@ -12,76 +12,30 @@ import freeflowuniverse.herolib.osal.core as osal
pub struct AddArgs {
pub mut:
sitename string // needs to exist in web.site module
path string // site of the docusaurus site with the config as is needed to populate the docusaurus site
git_url string
git_reset bool
git_root string
git_pull bool
path_publish string
}
pub fn dsite_add(args_ AddArgs) !&DocSite {
mut args := args_
args.sitename = texttools.name_fix(args_.sitename)
pub fn dsite_define(sitename string) ! {
console.print_header('Add Docusaurus Site: ${args.sitename}')
if args.sitename in docusaurus_sites {
return error('Docusaurus site ${args.sitename} already exists, no need to add again.')
}
mut path := gittools.path(
path: args.path
git_url: args.git_url
git_reset: args.git_reset
git_root: args.git_root
git_pull: args.git_pull
currentdir: false
)!
args.path = path.path
if !path.is_dir() {
return error('path is not a directory')
}
if !os.exists('${args.path}/cfg') {
return error('config directory for docusaurus does not exist in ${args.path}/cfg.\n${args}')
}
configpath := '${args.path}/cfg'
if !os.exists(configpath) {
return error("can't find config file for docusaurus in ${configpath}")
}
osal.rm('${args.path}/cfg/main.json')!
osal.rm('${args.path}/cfg/footer.json')!
osal.rm('${args.path}/cfg/navbar.json')!
osal.rm('${args.path}/build.sh')!
osal.rm('${args.path}/develop.sh')!
osal.rm('${args.path}/sync.sh')!
osal.rm('${args.path}/.DS_Store')!
console.print_header('Add Docusaurus Site: ${sitename}')
mut f := factory_get()!
if args.path_publish == '' {
args.path_publish = '${f.path_publish.path}/${args.sitename}'
}
path_build_ := '${f.path_build.path}/${args.sitename}'
path_publish := '${f.path_publish.path}/${sitename}'
path_build_ := '${f.path_build.path}/${sitename}'
// Get the site object after processing, this is the website which is a generic definition of a site
mut website := site.get(name: args.sitename)!
mut website := site.get(name: sitename)!
// Create the DocSite instance
mut dsite := &DocSite{
name: args.sitename
path_src: pathlib.get_dir(path: args.path, create: false)!
path_publish: pathlib.get_dir(path: args.path_publish, create: true)!
name: sitename
path_publish: pathlib.get_dir(path: path_publish, create: true)!
path_build: pathlib.get_dir(path: path_build_, create: true)!
config: new_configuration(website.siteconfig)!
website: website
}
docusaurus_sites[args.sitename] = dsite
return dsite
docusaurus_sites[sitename] = dsite
}
pub fn dsite_get(name_ string) !&DocSite {

View File

@@ -1,6 +1,6 @@
module docusaurus
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.core.playbook { PlayBook, Action }
import freeflowuniverse.herolib.web.site
pub fn play(mut plbook PlayBook) ! {
@@ -12,35 +12,37 @@ pub fn play(mut plbook PlayBook) ! {
// This populates the global `site.websites` map.
site.play(mut plbook)!
mut a := plbook.ensure_once(filter: 'docusaurus.define')!
// check if docusaurus.define exists, if not, we create a default factory
mut p2 := a.params
mut f := factory_set(
path_build: p2.get_default('path_build', '')!
path_publish: p2.get_default('path_publish', '')!
reset: p2.get_default_false('reset')
template_update: p2.get_default_false('template_update')
install: p2.get_default_false('install')
)!
a.done = true
mut action_define := plbook.ensure_once(filter: 'docusaurus.define')!
// 3. Process `docusaurus.add` actions to create sites.
for mut action in plbook.find(filter: 'docusaurus.add')! {
mut p := action.params
site_name := p.get('sitename') or {
return error('In docusaurus.add, param "sitename" is required.')
}
mut param_define := action_define.params
dsite_add(
sitename: site_name
mut f := factory_set(
path_build: param_define.get_default('path_build', '')!
path_publish: param_define.get_default('path_publish', '')!
reset: param_define.get_default_false('reset')
template_update: param_define.get_default_false('template_update')
install: param_define.get_default_false('install')
)!
site_name := param_define.get('name') or {
return error('In docusaurus.add, param "name" is required.')
}
dsite_define(site_name)!
action_define.done = true
mut dsite := dsite_get(site_name)!
//imports
mut actions_import := plbook.find(filter: 'docusaurus.import')!
for mut action in actions_import {
mut p := action.params
dsite.importparams << ImportParams{
path: p.get_default('path', '')!
git_url: p.get_default('git_url', '')!
git_reset: p.get_default_false('git_reset')
git_root: p.get_default('git_root', '')!
git_pull: p.get_default_false('git_pull')
path_publish: p.get_default('path_publish', f.path_publish.path)!
)!
}
action.done = true
}
@@ -50,13 +52,10 @@ pub fn play(mut plbook PlayBook) ! {
}
for mut action in actions_dev {
mut p := action.params
site_name := p.get('site')!
mut dsite := dsite_get(site_name)!
dsite.dev(
host: p.get_default('host', 'localhost')!
port: p.get_int_default('port', 3000)!
open: p.get_default_false('open')
watch_changes: p.get_default_false('watch_changes')
)!
action.done = true
}
@@ -67,8 +66,6 @@ pub fn play(mut plbook PlayBook) ! {
}
for mut action in actions_build {
mut p := action.params
site_name := p.get('site')!
mut dsite := dsite_get(site_name)!
dsite.build()!
action.done = true
}

View File

@@ -1,96 +1,195 @@
module docusaurus
import freeflowuniverse.herolib.osal.notifier
import os
//not longer working because is coming from doctree
fn watch_docs(docs_path string, path_src string, path_build string) ! {
mut n := notifier.new('docsite_watcher') or {
eprintln('Failed to create watcher: ${err}')
return
}
// import freeflowuniverse.herolib.osal.notifier
// import os
n.args['path_src'] = path_src
n.args['path_build'] = path_build
// fn watch_docs(docs_path string, path_src string, path_build string) ! {
// mut n := notifier.new('docsite_watcher') or {
// eprintln('Failed to create watcher: ${err}')
// return
// }
// Add watch with captured args
n.add_watch(docs_path, fn (event notifier.NotifyEvent, path string, args map[string]string) {
handle_file_change(event, path, args) or { eprintln('Error handling file change: ${err}') }
})!
// n.args['path_src'] = path_src
// n.args['path_build'] = path_build
n.start()!
}
// // Add watch with captured args
// n.add_watch(docs_path, fn (event notifier.NotifyEvent, path string, args map[string]string) {
// handle_file_change(event, path, args) or { eprintln('Error handling file change: ${err}') }
// })!
// handle_file_change processes file system events
fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]string) ! {
file_base := os.base(path)
is_dir := os.is_dir(path)
// n.start()!
// }
// Skip files starting with #
if file_base.starts_with('#') {
return
}
// // handle_file_change processes file system events
// fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]string) ! {
// file_base := os.base(path)
// is_dir := os.is_dir(path)
// For files (not directories), check extensions
if !is_dir {
ext := os.file_ext(path).to_lower()
if ext !in ['.md', '.png', '.jpeg', '.jpg'] {
return
}
}
// // Skip files starting with #
// if file_base.starts_with('#') {
// return
// }
// Get relative path from docs directory
rel_path := path.replace('${args['path_src']}/docs/', '')
dest_path := '${args['path_build']}/docs/${rel_path}'
// // For files (not directories), check extensions
// if !is_dir {
// ext := os.file_ext(path).to_lower()
// if ext !in ['.md', '.png', '.jpeg', '.jpg'] {
// return
// }
// }
match event {
.create, .modify {
if is_dir {
// For directories, just ensure they exist
os.mkdir_all(dest_path) or {
return error('Failed to create directory ${dest_path}: ${err}')
}
println('Created directory: ${rel_path}')
} else {
// For files, ensure parent directory exists and copy
os.mkdir_all(os.dir(dest_path)) or {
return error('Failed to create directory ${os.dir(dest_path)}: ${err}')
}
os.cp(path, dest_path) or {
return error('Failed to copy ${path} to ${dest_path}: ${err}')
}
println('Updated: ${rel_path}')
}
}
.delete {
if os.exists(dest_path) {
if is_dir {
os.rmdir_all(dest_path) or {
return error('Failed to delete directory ${dest_path}: ${err}')
}
println('Deleted directory: ${rel_path}')
} else {
os.rm(dest_path) or { return error('Failed to delete ${dest_path}: ${err}') }
println('Deleted: ${rel_path}')
}
}
}
.rename {
// For rename events, fswatch provides the new path in the event
// The old path is already removed, so we just need to handle the new path
if is_dir {
os.mkdir_all(dest_path) or {
return error('Failed to create directory ${dest_path}: ${err}')
}
println('Renamed directory to: ${rel_path}')
} else {
os.mkdir_all(os.dir(dest_path)) or {
return error('Failed to create directory ${os.dir(dest_path)}: ${err}')
}
os.cp(path, dest_path) or {
return error('Failed to copy ${path} to ${dest_path}: ${err}')
}
println('Renamed to: ${rel_path}')
}
}
}
}
// // Get relative path from docs directory
// rel_path := path.replace('${args['path_src']}/docs/', '')
// dest_path := '${args['path_build']}/docs/${rel_path}'
// match event {
// .create, .modify {
// if is_dir {
// // For directories, just ensure they exist
// os.mkdir_all(dest_path) or {
// return error('Failed to create directory ${dest_path}: ${err}')
// }
// println('Created directory: ${rel_path}')
// } else {
// // For files, ensure parent directory exists and copy
// os.mkdir_all(os.dir(dest_path)) or {
// return error('Failed to create directory ${os.dir(dest_path)}: ${err}')
// }
// os.cp(path, dest_path) or {
// return error('Failed to copy ${path} to ${dest_path}: ${err}')
// }
// println('Updated: ${rel_path}')
// }
// }
// .delete {
// if os.exists(dest_path) {
// if is_dir {
// os.rmdir_all(dest_path) or {
// return error('Failed to delete directory ${dest_path}: ${err}')
// }
// println('Deleted directory: ${rel_path}')
// } else {
// os.rm(dest_path) or { return error('Failed to delete ${dest_path}: ${err}') }
// println('Deleted: ${rel_path}')
// }
// }
// }
// .rename {
// // For rename events, fswatch provides the new path in the event
// // The old path is already removed, so we just need to handle the new path
// if is_dir {
// os.mkdir_all(dest_path) or {
// return error('Failed to create directory ${dest_path}: ${err}')
// }
// println('Renamed directory to: ${rel_path}')
// } else {
// os.mkdir_all(os.dir(dest_path)) or {
// return error('Failed to create directory ${os.dir(dest_path)}: ${err}')
// }
// os.cp(path, dest_path) or {
// return error('Failed to copy ${path} to ${dest_path}: ${err}')
// }
// println('Renamed to: ${rel_path}')
// }
// }
// }
// }
// pub fn (mut s DocSite) dev_watch(args DevArgs) ! {
// s.generate()!
// // Create screen session for docusaurus development server
// mut screen_name := 'docusaurus'
// mut sf := screen.new()!
// // Add and start a new screen session
// mut scr := sf.add(
// name: screen_name
// cmd: '/bin/bash'
// start: true
// attach: false
// reset: true
// )!
// // Send commands to the screen session
// console.print_item('To view the server output:: cd ${s.path_build.path}')
// scr.cmd_send('cd ${s.path_build.path}')!
// // Start script recording in the screen session for log streaming
// log_file := '/tmp/docusaurus_${screen_name}.log'
// script_cmd := 'script -f ${log_file}'
// scr.cmd_send(script_cmd)!
// // Small delay to ensure script is ready
// time.sleep(500 * time.millisecond)
// // Start bun in the scripted session
// bun_cmd := 'bun start -p ${args.port} -h ${args.host}'
// scr.cmd_send(bun_cmd)!
// // Stream the log output to current terminal
// console.print_header(' Docusaurus Development Server')
// console.print_item('Streaming server output... Press Ctrl+C to detach and leave server running')
// console.print_item('Server will be available at: http://${args.host}:${args.port}')
// console.print_item('To reattach later: screen -r ${screen_name}')
// println('')
// // Stream logs until user interrupts
// s.stream_logs(log_file, screen_name)!
// // After user interrupts, show final instructions
// console.print_header(' Server Running in Background')
// console.print_item(' Development server is running in background')
// console.print_item('Server URL: http://${args.host}:${args.port}')
// console.print_item('To reattach: screen -r ${screen_name}')
// console.print_item('To stop server: screen -S ${screen_name} -X kill')
// // console.print_item('The site content is on: ${s.path_src.path}/docs')
// // Start the watcher in a separate thread
// // mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
// // tf.wait()!
// println('\n')
// if args.open {
// s.open()!
// }
// }
// // Stream logs from script file to current terminal until user interrupts
// fn (mut s DocSite) stream_logs(log_file string, screen_name string) ! {
// // Wait a moment for the log file to be created
// mut attempts := 0
// for !os.exists(log_file) && attempts < 10 {
// time.sleep(200 * time.millisecond)
// attempts++
// }
// if !os.exists(log_file) {
// console.print_stderr('Warning: Log file not created, falling back to screen attach')
// console.print_item('Attaching to screen session... Press Ctrl+A then D to detach')
// // Fallback to direct screen attach
// osal.execute_interactive('screen -r ${screen_name}')!
// return
// }
// // Use tail -f to stream the log file
// // The -f flag follows the file as it grows
// tail_cmd := 'tail -f ${log_file}'
// // Execute tail in interactive mode - this will stream until Ctrl+C
// osal.execute_interactive(tail_cmd) or {
// // If tail fails, try alternative approach
// console.print_stderr('Log streaming failed, attaching to screen session...')
// osal.execute_interactive('screen -r ${screen_name}')!
// return
// }
// // Clean up the log file after streaming
// os.rm(log_file) or {}
// }

View File

@@ -46,6 +46,7 @@ pub fn play(mut plbook PlayBook) ! {
}
config_action.done = true // Mark the action as done
meta_action.done = true
play_import(mut plbook, mut config)!
play_menu(mut plbook, mut config)!
@@ -54,6 +55,7 @@ pub fn play(mut plbook PlayBook) ! {
play_build_dest_dev(mut plbook, mut config)!
play_pages(mut plbook, mut website)!
}
fn play_import(mut plbook PlayBook, mut config SiteConfig) ! {

View File

@@ -19,9 +19,9 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
}
// LETS FIRST DO THE CATEGORIES
category_actions := plbook.find(filter: 'site.page_category')!
mut category_actions := plbook.find(filter: 'site.page_category')!
mut section := Section{}
for action in category_actions {
for mut action in category_actions {
// println(action)
mut p := action.params
section.position = p.get_int_default('position', 20)!
@@ -32,9 +32,10 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
return error('need to specify path in site.page_category')
}
site.sections << section
action.done = true // Mark the action as done
}
page_actions := plbook.find(filter: 'site.page')!
mut page_actions := plbook.find(filter: 'site.page')!
mut mypage := Page{
src: ''
path: ''
@@ -42,7 +43,7 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
mut position_next := 1
mut position := 0
mut path := ''
for action in page_actions {
for mut action in page_actions {
// println(action)
mut p := action.params
pathnew := p.get_default('path', '')!
@@ -82,5 +83,7 @@ fn play_pages(mut plbook PlayBook, mut site Site) ! {
mypage.title_nr = p.get_int_default('title_nr', 0)!
site.pages << mypage
action.done = true // Mark the action as done
}
}