This commit is contained in:
2025-02-06 06:26:44 +03:00
parent 5bbb99c3f9
commit 5ad2062e5c
55 changed files with 516 additions and 590 deletions

View File

@@ -14,8 +14,6 @@ println('Downloading and importing location data (this may take a few minutes)..
loc.download_and_import(false) or { panic(err) }
println('Data import complete')
// // Example 1: Search for a city
// println('\nSearching for London...')
// results := loc.search('London', 'GB', 5, true) or { panic(err) }

View File

@@ -2,10 +2,7 @@
import freeflowuniverse.herolib.virt.docker
fn build() ! {
mut engine := docker.new(prefix: '', localonly: true)!
mut r := engine.recipe_new(name: 'dev_tools', platform: .alpine)
@@ -19,14 +16,12 @@ fn build()!{
r.add_sshserver()!
r.build(true)!
}
// build()!
mut engine := docker.new(prefix: '', localonly: true)!
// Check if dev_tools image exists
if !engine.image_exists(repo: 'dev_tools')! {
eprintln("image dev_tools doesn't exist, build it")
@@ -45,8 +40,10 @@ mut container := engine.container_get(
name: 'dev2'
image_repo: 'dev_tools'
remove_when_done: false
forwarded_ports: ["8022:22/tcp"] //this forward 8022 on host to 22 on container
env:{"SSH_KEY":"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com"}
forwarded_ports: ['8022:22/tcp'] // this forward 8022 on host to 22 on container
env: {
'SSH_KEY': 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com'
}
)!
}

View File

@@ -2,10 +2,7 @@
import freeflowuniverse.herolib.virt.docker
fn build() ! {
mut engine := docker.new(prefix: '', localonly: true)!
mut r := engine.recipe_new(name: 'dev_ubuntu', platform: .ubuntu)
@@ -19,14 +16,12 @@ fn build()!{
r.add_sshserver()!
r.build(true)!
}
build()!
mut engine := docker.new(prefix: '', localonly: true)!
// Check if dev_ubuntu image exists
if !engine.image_exists(repo: 'dev_ubuntu')! {
eprintln("image dev_ubuntu doesn't exist, build it")
@@ -45,8 +40,10 @@ mut container := engine.container_get(
name: 'dev3'
image_repo: 'dev_ubuntu'
remove_when_done: false
forwarded_ports: ["8023:22/tcp"] //this forward 8022 on host to 22 on container
env:{"SSH_KEY":"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com"}
forwarded_ports: ['8023:22/tcp'] // this forward 8022 on host to 22 on container
env: {
'SSH_KEY': 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com'
}
)!
}

View File

@@ -45,6 +45,14 @@ pub fn cmd_docusaurus(mut cmdroot Command) {
description: 'build dev version and publish.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'update'
abbrev: 'p'
description: 'update your environment the template and the repo you are working on (git pull).'
})
cmd_run.add_flag(Flag{
flag: .bool
@@ -58,7 +66,7 @@ pub fn cmd_docusaurus(mut cmdroot Command) {
}
fn cmd_docusaurus_execute(cmd Command) ! {
// mut reset := cmd.flags.get_bool('reset') or { false }
mut update := cmd.flags.get_bool('update') or { false }
mut url := cmd.flags.get_string('url') or { '' }
// mut path := cmd.flags.get_string('path') or { '' }
@@ -76,15 +84,13 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// exit(1)
// }
mut docs := docusaurus.new(
// build_path: '/tmp/docusaurus_build'
)!
mut docs := docusaurus.new(update:update)!
if build {
// Create a new docusaurus site
_ := docs.build(
url: url
update:update
)!
}
@@ -92,6 +98,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// Create a new docusaurus site
_ := docs.build_dev(
url: url
update:update
)!
}
@@ -99,7 +106,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// Create a new docusaurus site
_ := docs.dev(
url: url
update:update
)!
}
}

View File

@@ -112,7 +112,6 @@ pub fn cmd_git(mut cmdroot Command) {
abbrev: 'l'
description: 'reload the data in cache.'
})
}
mut allcmdscommit := [&push_command, &pull_command, &commit_command]
@@ -229,7 +228,6 @@ fn cmd_git_execute(cmd Command) ! {
mut is_silent := cmd.flags.get_bool('silent') or { false }
mut reload := cmd.flags.get_bool('load') or { false }
if is_silent || cmd.name == 'cd' {
console.silent_set()
}

View File

@@ -9,7 +9,6 @@ import term
const ws_url = 'ws://localhost:8080'
// Helper function to send request and receive response
fn send_request(mut ws websocket.Client, request OpenRPCRequest) !OpenRPCResponse {
// Send request
@@ -42,20 +41,19 @@ fn send_request(mut ws websocket.Client, request OpenRPCRequest) !OpenRPCRespons
// OpenRPC request/response structures (copied from handler.v)
struct OpenRPCRequest {
jsonrpc string [required]
method string [required]
jsonrpc string @[required]
method string @[required]
params []string
id int [required]
id int @[required]
}
struct OpenRPCResponse {
jsonrpc string [required]
jsonrpc string @[required]
result string
error string
id int [required]
id int @[required]
}
// Initialize and configure WebSocket client
fn init_client() !&websocket.Client {
mut ws := websocket.new_client(ws_url)!
@@ -90,9 +88,7 @@ fn init_client() !&websocket.Client {
// Main client logic
mut ws := init_client()!
defer {
ws.close(1000, 'normal') or {
eprintln(term.red('Error closing connection: ${err}'))
}
ws.close(1000, 'normal') or { eprintln(term.red('Error closing connection: ${err}')) }
}
println(term.green('Connected to ${ws_url}'))

View File

@@ -11,7 +11,6 @@ fn start_rpc_server( mut wg sync.WaitGroup) ! {
// Create OpenRPC server
openrpc.server_start()!
}
fn start_ws_server(mut wg sync.WaitGroup) ! {

View File

@@ -1,4 +1,5 @@
module openrpc
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.core.jobs.model
@@ -19,7 +20,6 @@ pub fn server_start() ! {
queue: &redisclient.RedisQueue{
key: rpc_queue
redis: redis
}
runner: runner
}

View File

@@ -3,8 +3,6 @@ module openrpc
import freeflowuniverse.herolib.core.redisclient
import json
// Start the server and listen for requests
pub fn (mut s OpenRPCServer) start() ! {
println('Starting OpenRPC server.')
@@ -32,7 +30,7 @@ pub fn (mut s OpenRPCServer) start() ! {
// Send response back to Redis using response queue
response_json := json.encode(response)
key := '${rpc_queue}:${request.id}'
println("response: \n${response}\n put on return queue ${key} ")
println('response: \n${response}\n put on return queue ${key} ')
mut response_queue := &redisclient.RedisQueue{
key: key
redis: s.redis
@@ -45,27 +43,26 @@ pub fn (mut s OpenRPCServer) start() ! {
fn (mut s OpenRPCServer) handle_request(request OpenRPCRequest) !OpenRPCResponse {
method := request.method.to_lower()
println("process: method: '${method}'")
if method.starts_with("job.") {
if method.starts_with('job.') {
return s.handle_request_job(request) or {
return rpc_response_error(request.id,"error in request job:\n${err}")
return rpc_response_error(request.id, 'error in request job:\n${err}')
}
}
if method.starts_with("agent.") {
if method.starts_with('agent.') {
return s.handle_request_agent(request) or {
return rpc_response_error(request.id,"error in request agent:\n${err}")
return rpc_response_error(request.id, 'error in request agent:\n${err}')
}
}
if method.starts_with("group.") {
if method.starts_with('group.') {
return s.handle_request_group(request) or {
return rpc_response_error(request.id,"error in request group:\n${err}")
return rpc_response_error(request.id, 'error in request group:\n${err}')
}
}
if method.starts_with("service.") {
if method.starts_with('service.') {
return s.handle_request_service(request) or {
return rpc_response_error(request.id,"error in request service:\n${err}")
return rpc_response_error(request.id, 'error in request service:\n${err}')
}
}
return rpc_response_error(request.id,"Could not find handler for ${method}")
return rpc_response_error(request.id, 'Could not find handler for ${method}')
}

View File

@@ -3,12 +3,10 @@ module openrpc
import freeflowuniverse.herolib.core.jobs.model
import json
pub fn (mut h OpenRPCServer) handle_request_agent(request OpenRPCRequest) !OpenRPCResponse {
mut response := rpc_response_new(request.id)
method:=request.method.all_after_first("agent.")
method := request.method.all_after_first('agent.')
println("request agent:'${method}'")

View File

@@ -5,7 +5,7 @@ import json
pub fn (mut h OpenRPCServer) handle_request_group(request OpenRPCRequest) !OpenRPCResponse {
mut response := rpc_response_new(request.id)
method:=request.method.all_after_first("group.")
method := request.method.all_after_first('group.')
println("request group:'${method}'")
match method {
'new' {

View File

@@ -6,7 +6,7 @@ import json
pub fn (mut h OpenRPCServer) handle_request_job(request OpenRPCRequest) !OpenRPCResponse {
mut response := rpc_response_new(request.id)
method:=request.method.all_after_first("job.")
method := request.method.all_after_first('job.')
println("request job:'${method}'")
println(request)
match method {

View File

@@ -5,7 +5,7 @@ import json
pub fn (mut h OpenRPCServer) handle_request_service(request OpenRPCRequest) !OpenRPCResponse {
mut response := rpc_response_new(request.id)
method:=request.method.all_after_first("service.")
method := request.method.all_after_first('service.')
println("request service:'${method}'")
match method {
'new' {
@@ -65,12 +65,10 @@ pub fn (mut h OpenRPCServer) handle_request_service(request OpenRPCRequest) !Ope
}
// Parse groups array from JSON string
groups := json.decode([]string, request.params[3])!
has_access := h.runner.services.check_access(
request.params[0], // actor
has_access := h.runner.services.check_access(request.params[0], // actor
request.params[1], // action
request.params[2], // user_pubkey
groups
)!
groups)!
response.result = json.encode(has_access)
}
else {

View File

@@ -17,7 +17,6 @@ pub mut:
id int @[required]
}
fn rpc_response_new(id int) OpenRPCResponse {
mut response := OpenRPCResponse{
jsonrpc: '2.0'
@@ -35,5 +34,4 @@ fn rpc_response_error(id int, errormsg string)OpenRPCResponse {
return response
}
const rpc_queue = 'herorunner:q:rpc'

View File

@@ -89,7 +89,5 @@ pub fn (mut s WSServer) start() ! {
// Start server
println('WebSocket server listening on port ${s.port}')
ws_server.listen() or {
return error('Failed to start WebSocket server: ${err}')
}
ws_server.listen() or { return error('Failed to start WebSocket server: ${err}') }
}

View File

@@ -37,7 +37,6 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure {
ssh_key_name: args.ssh_key_name
}
return get(coderoot: args.coderoot, reload: args.reload, cfg: cfg)
}

View File

@@ -61,7 +61,6 @@ pub fn (mut gitstructure GitStructure) load(reload bool) ! {
return error(msg)
}
}
}
// Recursively loads repositories from the provided path, updating their statuses, does not check the status

View File

@@ -51,7 +51,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
args.path = os.getwd()
}
// see if its one repo we are in, based on current path
if args.repo == '' && args.account == '' && args.provider == '' && args.filter == '' {
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
@@ -66,7 +65,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
// see if a url was used means we are in 1 repo
if args.url.len > 0 {
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == '') {
return error("when specify url cannot specify repo, account, profider or filter")
return error('when specify url cannot specify repo, account, profider or filter')
}
mut r0 := gs.get_repo(url: args.url)!
args.repo = r0.name
@@ -122,7 +121,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
}
if args.cmd in 'pull,push,commit,delete'.split(',') {
gs.repos_print(
filter: args.filter
name: args.repo
@@ -141,16 +139,15 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
// check on repos who needs what
for mut g in repos {
if args.cmd in ["push"] && g.need_push_or_pull()! {
if args.cmd == 'push' && g.need_push_or_pull()! {
need_push0 = true
}
if args.cmd in ["push","pull"] && (need_push0 || g.need_push_or_pull()!){
if args.cmd in ['push', 'pull'] && (need_push0 || g.need_push_or_pull()!) {
need_pull0 = true
}
if args.cmd in ["push","pull","commit"] && (g.need_commit()!) {
if args.cmd in ['push', 'pull', 'commit'] && (g.need_commit()!) {
need_commit0 = true
}
}
@@ -180,7 +177,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
if need_commit0 || need_pull0 || need_push0 {
ok = ui.ask_yesno(question: 'Is above ok?')!
} else {
console.print_green("nothing to do")
console.print_green('nothing to do')
}
}
@@ -198,7 +195,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
)!
}
}
}
if args.cmd == 'delete' {
@@ -215,10 +211,10 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut has_changed := false
for mut g in repos {
need_push_repo := need_push0 && g.need_push_or_pull()!
need_pull_repo := need_push_repo || (need_pull0 && g.need_push_or_pull()!)
need_commit_repo := need_push_repo || need_pull_repo || (need_commit0 && g.need_commit()!)
need_commit_repo := need_push_repo || need_pull_repo
|| (need_commit0 && g.need_commit()!)
// console.print_debug(" --- git_do ${g.cache_key()} \n need_commit_repo:${need_commit_repo} \n need_pull_repo:${need_pull_repo} \n need_push_repo:${need_push_repo}")

View File

@@ -137,11 +137,8 @@ fn repo_match_check(repo GitRepo, args ReposGetArgs) !bool {
}
return r
}
// Retrieves a single repository path based on the provided arguments (goes inside repo).
// if pull will force a pull, if it can't will be error, if reset will remove the changes
// If the repository does not exist, it will clone it

View File

@@ -92,7 +92,6 @@ pub fn (mut repo GitRepo) commit(msg string) ! {
}
}
// Push local changes to the remote repository.
pub fn (mut repo GitRepo) push() ! {
repo.status_update()!
@@ -311,10 +310,11 @@ fn (mut repo GitRepo) update_submodules() ! {
}
fn (repo GitRepo) exec(cmd_ string) !string {
repo_path := repo.path()
cmd := 'cd ${repo_path} && ${cmd_}'
// console.print_debug(cmd)
r := os.execute(cmd)
import os { quoted_path }
repo_path := quoted_path(repo.path())
cmd_args := ["sh", "-c", "cd ${repo_path} && ${cmd_}"]
// console.print_debug(cmd_args.join(" "))
r := os.execute_opt(cmd_args)!
if r.exit_code != 0 {
return error('Repo failed to exec cmd: ${cmd}\n${r.output})')
}

View File

@@ -11,20 +11,14 @@ pub struct StatusUpdateArgs {
pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Check current time vs last check, if needed (check period) then load
repo.cache_get() or {
return error('Failed to get cache for repo ${repo.name}: ${err}')
} // Ensure we have the situation from redis
repo.init() or {
return error('Failed to initialize repo ${repo.name}: ${err}')
}
repo.cache_get() or { return error('Failed to get cache for repo ${repo.name}: ${err}') } // Ensure we have the situation from redis
repo.init() or { return error('Failed to initialize repo ${repo.name}: ${err}') }
current_time := int(time.now().unix())
if args.reload || repo.last_load == 0
|| current_time - repo.last_load >= repo.config.remote_check_period {
// console.print_debug('${repo.name} ${current_time}-${repo.last_load} (${current_time - repo.last_load >= repo.config.remote_check_period}): ${repo.config.remote_check_period} +++')
// if true{exit(0)}
repo.load() or {
return error('Failed to load repository ${repo.name}: ${err}')
}
repo.load() or { return error('Failed to load repository ${repo.name}: ${err}') }
}
}
@@ -32,9 +26,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Does not check cache, it is the callers responsibility to check cache and load accordingly.
fn (mut repo GitRepo) load() ! {
console.print_header('load ${repo.print_key()}')
repo.init() or {
return error('Failed to initialize repo during load operation: ${err}')
}
repo.init() or { return error('Failed to initialize repo during load operation: ${err}') }
git_path := '${repo.path()}/.git'
if os.exists(git_path) == false {
@@ -45,13 +37,9 @@ fn (mut repo GitRepo) load() ! {
return error('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. Please check network connection and repository access.')
}
repo.load_branches() or {
return error('Failed to load branches for ${repo.name}: ${err}')
}
repo.load_branches() or { return error('Failed to load branches for ${repo.name}: ${err}') }
repo.load_tags() or {
return error('Failed to load tags for ${repo.name}: ${err}')
}
repo.load_tags() or { return error('Failed to load tags for ${repo.name}: ${err}') }
repo.last_load = int(time.now().unix())
@@ -100,26 +88,26 @@ fn (mut repo GitRepo) load_branches() ! {
if mybranch.len == 1 {
repo.status_local.branch = mybranch[0].trim_space()
} else {
return error("bug: git branch does not give branchname.\n${mybranch}")
return error('bug: git branch does not give branchname.\n${mybranch}')
}
}
// Helper to load remote tags
fn (mut repo GitRepo) load_tags() ! {
tags_result := repo.exec('git tag --list') or {
tags_result := repo.exec('git show-ref --tags') or {
return error('Failed to list tags: ${err}. Please ensure git is installed and repository is accessible.')
}
//println(tags_result)
for line in tags_result.split('\n') {
line_trimmed := line.trim_space()
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
//console.print_debug('Skipping malformed tag line: ${line_trimmed}')
if line_trimmed == '' {
continue
}
if parts := line_trimmed.split(' refs/tags/') {
if parts.len != 2 {
continue
}
commit_hash := parts[0].trim_space()
tag_name := parts[1].all_after('refs/tags/').trim_space()
tag_name := parts[1].trim_space()
// Update remote tags info
repo.status_remote.tags[tag_name] = commit_hash

View File

@@ -1,4 +1,3 @@
module docker
import freeflowuniverse.herolib.ui.console

View File

@@ -5,12 +5,8 @@ import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.installers.ulist
import os
//////////////////// following actions are not specific to instance of the object
// checks if a certain version or above is installed
@@ -26,7 +22,7 @@ fn installed() !bool {
if r.len != 1 {
return error("couldn't parse bun version.\n${res.output}")
}
println(" ${texttools.version(version)} <= ${texttools.version(r[0])}")
println(' ${texttools.version(version)} <= ${texttools.version(r[0])}')
if texttools.version(version) <= texttools.version(r[0]) {
return true
}
@@ -45,7 +41,6 @@ fn upload() ! {
// cmdname: 'bun'
// source: '${gitpath}/target/x86_64-unknown-linux-musl/release/bun'
// )!
}
fn install() ! {
@@ -53,9 +48,7 @@ fn install() ! {
osal.exec(cmd: 'curl -fsSL https://bun.sh/install | bash')!
}
fn destroy() ! {
// osal.process_kill_recursive(name:'bun')!
osal.cmd_delete('bun')!
@@ -65,11 +58,9 @@ fn destroy() ! {
')!
// will remove all paths where bun is found
osal.profile_path_add_remove(paths2delete:"bun")!
osal.profile_path_add_remove(paths2delete: 'bun')!
osal.rm("
osal.rm('
~/.bun
")!
')!
}

View File

@@ -1,10 +1,8 @@
module bun
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
@@ -26,8 +24,6 @@ pub fn get(args_ ArgsGet) !&Bun {
return &Bun{}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -40,21 +36,20 @@ fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManag
// systemd
match cat {
.zinit {
console.print_debug("startupmanager: zinit")
console.print_debug('startupmanager: zinit')
return startupmanager.get(cat: .zinit)!
}
.systemd {
console.print_debug("startupmanager: systemd")
console.print_debug('startupmanager: systemd')
return startupmanager.get(cat: .systemd)!
}else{
console.print_debug("startupmanager: auto")
}
else {
console.print_debug('startupmanager: auto')
return startupmanager.get()!
}
}
}
@[params]
pub struct InstallArgs {
pub mut:
@@ -68,14 +63,11 @@ pub fn (mut self Bun) install(args InstallArgs) ! {
}
}
pub fn (mut self Bun) destroy() ! {
switch(self.name)
destroy()!
}
// switch instance to be used for bun
pub fn switch(name string) {
bun_default = name

View File

@@ -1,4 +1,5 @@
module bun
import freeflowuniverse.herolib.data.paramsparser
import os
@@ -6,7 +7,6 @@ pub const version = '1.2.2'
const singleton = true
const default = true
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct Bun {
@@ -14,7 +14,6 @@ pub mut:
name string = 'default'
}
fn obj_init(obj_ Bun) !Bun {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
@@ -25,5 +24,3 @@ fn obj_init(obj_ Bun)!Bun{
fn configure() ! {
// mut installer := get()!
}

View File

@@ -109,10 +109,10 @@ pub fn profile_path_source() !string {
}
pp := profile_path()!
if os.exists(pp) {
res := os.execute("source ${pp}")
res := os.execute('source ${pp}')
if res.exit_code != 0 {
console.print_stderr("WARNING: your profile is corrupt: ${pp}")
return error("profile corrupt")
console.print_stderr('WARNING: your profile is corrupt: ${pp}')
return error('profile corrupt')
} else {
return 'source ${pp}'
}
@@ -123,9 +123,7 @@ pub fn profile_path_source() !string {
// return source $path && .
// or empty if it doesn't exist
pub fn profile_path_source_and() !string {
p:=profile_path_source() or {
return ""
}
p := profile_path_source() or { return '' }
return '${p} && '
}

View File

@@ -26,10 +26,10 @@ pub mut:
pub fn download(args_ DownloadArgs) !pathlib.Path {
mut args := args_
args.dest = args.dest.trim(" ").trim_right("/")
args.expand_dir = args.expand_dir.trim(" ").trim_right("/")
args.expand_file = args.expand_file.replace("//","/")
args.dest = args.dest.replace("//","/")
args.dest = args.dest.trim(' ').trim_right('/')
args.expand_dir = args.expand_dir.trim(' ').trim_right('/')
args.expand_file = args.expand_file.replace('//', '/')
args.dest = args.dest.replace('//', '/')
console.print_header('download: ${args.url}')
if args.name == '' {

View File

@@ -87,7 +87,7 @@ pub fn (mut n Notifier) start() ! {
n.is_watching = true
if n.watch_list.len > 1 {
return error("only support watchers with len 1 for now")
return error('only support watchers with len 1 for now')
}
// Start a watcher for each path

View File

@@ -37,7 +37,6 @@ pub mut:
command string
}
// create/start container (first need to get a dockercontainer before we can start)
pub fn (mut container DockerContainer) start() ! {
exec(cmd: 'docker start ${container.id}')!

View File

@@ -3,7 +3,6 @@ module docker
import freeflowuniverse.herolib.osal { exec }
import freeflowuniverse.herolib.virt.utils
@[params]
pub struct DockerContainerCreateArgs {
pub mut:
@@ -19,7 +18,6 @@ pub mut:
command string
}
pub fn (mut e DockerEngine) container_create(args DockerContainerCreateArgs) !&DockerContainer {
// Validate required parameters
if args.name.trim_space() == '' {

View File

@@ -72,7 +72,6 @@ pub fn (mut b DockerBuilderRecipe) add_package(args PackageArgs) ! {
}
}
// lets now check of the package has already not been set before
for package0 in b.items {
if package0 is PackageItem {

View File

@@ -4,7 +4,7 @@ import os
import strings
pub fn (mut site DocSite) clean(args ErrorArgs) ! {
toclean := "
toclean := '
/node_modules
babel.config.js
@@ -44,7 +44,7 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
sidebars.ts
tsconfig.json
"
'
mut sb := strings.new_builder(200)
for line in toclean.split_into_lines() {
@@ -74,9 +74,7 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
for file in files {
if file.starts_with(base_pattern) {
file_path := os.join_path(base_dir, file)
os.rm(file_path) or {
sb.writeln('Failed to remove ${file_path}: ${err}')
}
os.rm(file_path) or { sb.writeln('Failed to remove ${file_path}: ${err}') }
}
}
}
@@ -90,11 +88,8 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
sb.writeln('Failed to remove directory ${full_path}: ${err}')
}
} else {
os.rm(full_path) or {
sb.writeln('Failed to remove file ${full_path}: ${err}')
os.rm(full_path) or { sb.writeln('Failed to remove file ${full_path}: ${err}') }
}
}
}
}
}

View File

@@ -1,15 +1,14 @@
module docusaurus
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.osal.screen
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools
import json
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
@[heap]
pub struct DocSite {
@@ -35,6 +34,7 @@ pub mut:
build_path string
production bool
watch_changes bool = true
update bool
}
pub fn (mut f DocusaurusFactory) build_dev(args_ DSiteNewArgs) !&DocSite {
@@ -93,6 +93,13 @@ pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
console.print_item(' 1. Attach to screen: screen -r ${screen_name}')
console.print_item(' 2. To detach from screen: Press Ctrl+A then D')
console.print_item(' 3. To list all screens: screen -ls')
console.print_item('The site content is on::')
console.print_item(' 1. location of documents: ${s.path_src.path}/docs')
if osal.cmd_exists("code"){
console.print_item(' 2. We opened above dir in vscode.')
osal.exec(cmd:'code ${s.path_src.path}/docs')!
}
// Start the watcher in a separate thread
//mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
@@ -137,7 +144,7 @@ pub fn (mut f DocusaurusFactory) add(args_ DSiteNewArgs) !&DocSite {
}
mut gs := gittools.new()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:args.update)!
mut template_path := r.patho()!
// First ensure cfg directory exists in src, if not copy from template

View File

@@ -1,6 +1,5 @@
module docusaurus
import os
// import freeflowuniverse.herolib.data.doctree.collection
import freeflowuniverse.herolib.core.pathlib
@@ -23,12 +22,13 @@ pub mut:
// publish_path string
build_path string
production bool
update bool
}
pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
mut args := args_
if args.build_path == ""{
args.build_path = "${os.home_dir()}/hero/var/docusaurus"
if args.build_path == '' {
args.build_path = '${os.home_dir()}/hero/var/docusaurus'
}
// if args.publish_path == ""{
// args.publish_path = "${os.home_dir()}/hero/var/docusaurus/publish"
@@ -39,8 +39,7 @@ pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
// path_publish: pathlib.get_dir(path: args_.publish_path, create: true)!
}
ds.template_install()!
ds.template_install(args.update)!
return ds
}

View File

@@ -1,32 +1,28 @@
module docusaurus
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.installers.web.bun
fn (mut site DocusaurusFactory) template_install() ! {
fn (mut site DocusaurusFactory) template_install(update bool) ! {
mut gs := gittools.new()!
mut r:=gs.get_repo(url:'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:update)!
mut template_path := r.patho()!
for item in ['package.json', 'sidebars.ts', 'tsconfig.json'] {
mut aa := template_path.file_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
aa.copy(dest: '${site.path_build.path}/${item}')!
}
// install bun
mut installer := bun.get()!
installer.install()!
osal.exec(cmd: '
osal.exec(
cmd: '
cd ${site.path_build.path}
bun install
')!
'
)!
}

View File

@@ -1,22 +1,20 @@
module docusaurus
import freeflowuniverse.herolib.osal.notifier
import os
fn watch_docs(docs_path string, path_src string, path_build string) ! {
mut n := notifier.new('docsite_watcher') or {
eprintln('Failed to create watcher: ${err}')
return
}
n.args["path_src"]=path_src
n.args["path_build"]=path_build
n.args['path_src'] = path_src
n.args['path_build'] = path_build
// Add watch with captured args
n.add_watch(docs_path, fn (event notifier.NotifyEvent, path string, args map[string]string) {
handle_file_change(event, path, args) or {
eprintln('Error handling file change: ${err}')
}
handle_file_change(event, path, args) or { eprintln('Error handling file change: ${err}') }
})!
n.start()!
@@ -41,8 +39,8 @@ fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]s
}
// Get relative path from docs directory
rel_path := path.replace('${args["path_src"]}/docs/', '')
dest_path := '${args["path_build"]}/docs/${rel_path}'
rel_path := path.replace('${args['path_src']}/docs/', '')
dest_path := '${args['path_build']}/docs/${rel_path}'
match event {
.create, .modify {
@@ -71,9 +69,7 @@ fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]s
}
println('Deleted directory: ${rel_path}')
} else {
os.rm(dest_path) or {
return error('Failed to delete ${dest_path}: ${err}')
}
os.rm(dest_path) or { return error('Failed to delete ${dest_path}: ${err}') }
println('Deleted: ${rel_path}')
}
}