This commit is contained in:
2025-02-06 06:26:44 +03:00
parent 5bbb99c3f9
commit 5ad2062e5c
55 changed files with 516 additions and 590 deletions

View File

@@ -10,12 +10,10 @@ println('Location database initialized')
// This only needs to be done once or when updating data
println('Downloading and importing location data (this may take a few minutes)...')
//the arg is if we redownload
// the arg is if we redownload
loc.download_and_import(false) or { panic(err) }
println('Data import complete')
// // Example 1: Search for a city
// println('\nSearching for London...')
// results := loc.search('London', 'GB', 5, true) or { panic(err) }

View File

@@ -2,10 +2,7 @@
import freeflowuniverse.herolib.virt.docker
fn build()!{
fn build() ! {
mut engine := docker.new(prefix: '', localonly: true)!
mut r := engine.recipe_new(name: 'dev_tools', platform: .alpine)
@@ -19,21 +16,19 @@ fn build()!{
r.add_sshserver()!
r.build(true)!
}
// build()!
mut engine := docker.new(prefix: '', localonly: true)!
// Check if dev_tools image exists
if ! engine.image_exists(repo: 'dev_tools') !{
if !engine.image_exists(repo: 'dev_tools')! {
eprintln("image dev_tools doesn't exist, build it")
build()!
}
engine.container_delete( name: 'dev2') or {}
engine.container_delete(name: 'dev2') or {}
// Check if container exists and get its status
mut container := engine.container_get(
@@ -45,8 +40,10 @@ mut container := engine.container_get(
name: 'dev2'
image_repo: 'dev_tools'
remove_when_done: false
forwarded_ports: ["8022:22/tcp"] //this forward 8022 on host to 22 on container
env:{"SSH_KEY":"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com"}
forwarded_ports: ['8022:22/tcp'] // this forward 8022 on host to 22 on container
env: {
'SSH_KEY': 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com'
}
)!
}

View File

@@ -2,10 +2,7 @@
import freeflowuniverse.herolib.virt.docker
fn build()!{
fn build() ! {
mut engine := docker.new(prefix: '', localonly: true)!
mut r := engine.recipe_new(name: 'dev_ubuntu', platform: .ubuntu)
@@ -19,21 +16,19 @@ fn build()!{
r.add_sshserver()!
r.build(true)!
}
build()!
mut engine := docker.new(prefix: '', localonly: true)!
// Check if dev_ubuntu image exists
if ! engine.image_exists(repo: 'dev_ubuntu') !{
if !engine.image_exists(repo: 'dev_ubuntu')! {
eprintln("image dev_ubuntu doesn't exist, build it")
build()!
}
engine.container_delete( name: 'dev3') or {}
engine.container_delete(name: 'dev3') or {}
// Check if container exists and get its status
mut container := engine.container_get(
@@ -45,8 +40,10 @@ mut container := engine.container_get(
name: 'dev3'
image_repo: 'dev_ubuntu'
remove_when_done: false
forwarded_ports: ["8023:22/tcp"] //this forward 8022 on host to 22 on container
env:{"SSH_KEY":"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com"}
forwarded_ports: ['8023:22/tcp'] // this forward 8022 on host to 22 on container
env: {
'SSH_KEY': 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIahWiRRm9cWAKktH9dndn3R45grKqzPC3mKX8IjGgH6 kristof@incubaid.com'
}
)!
}

View File

@@ -45,6 +45,14 @@ pub fn cmd_docusaurus(mut cmdroot Command) {
description: 'build dev version and publish.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'update'
abbrev: 'p'
description: 'update your environment the template and the repo you are working on (git pull).'
})
cmd_run.add_flag(Flag{
flag: .bool
@@ -58,7 +66,7 @@ pub fn cmd_docusaurus(mut cmdroot Command) {
}
fn cmd_docusaurus_execute(cmd Command) ! {
// mut reset := cmd.flags.get_bool('reset') or { false }
mut update := cmd.flags.get_bool('update') or { false }
mut url := cmd.flags.get_string('url') or { '' }
// mut path := cmd.flags.get_string('path') or { '' }
@@ -76,30 +84,29 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// exit(1)
// }
mut docs := docusaurus.new(update:update)!
mut docs := docusaurus.new(
// build_path: '/tmp/docusaurus_build'
)!
if build{
if build {
// Create a new docusaurus site
_ := docs.build(
url:url
url: url
update:update
)!
}
if builddev{
if builddev {
// Create a new docusaurus site
_ := docs.build_dev(
url:url
url: url
update:update
)!
}
if dev{
if dev {
// Create a new docusaurus site
_ := docs.dev(
url:url
url: url
update:update
)!
}
}

View File

@@ -112,7 +112,6 @@ pub fn cmd_git(mut cmdroot Command) {
abbrev: 'l'
description: 'reload the data in cache.'
})
}
mut allcmdscommit := [&push_command, &pull_command, &commit_command]
@@ -229,7 +228,6 @@ fn cmd_git_execute(cmd Command) ! {
mut is_silent := cmd.flags.get_bool('silent') or { false }
mut reload := cmd.flags.get_bool('load') or { false }
if is_silent || cmd.name == 'cd' {
console.silent_set()
}

View File

@@ -9,7 +9,6 @@ import term
const ws_url = 'ws://localhost:8080'
// Helper function to send request and receive response
fn send_request(mut ws websocket.Client, request OpenRPCRequest) !OpenRPCResponse {
// Send request
@@ -42,20 +41,19 @@ fn send_request(mut ws websocket.Client, request OpenRPCRequest) !OpenRPCRespons
// OpenRPC request/response structures (copied from handler.v)
struct OpenRPCRequest {
jsonrpc string [required]
method string [required]
jsonrpc string @[required]
method string @[required]
params []string
id int [required]
id int @[required]
}
struct OpenRPCResponse {
jsonrpc string [required]
jsonrpc string @[required]
result string
error string
id int [required]
id int @[required]
}
// Initialize and configure WebSocket client
fn init_client() !&websocket.Client {
mut ws := websocket.new_client(ws_url)!
@@ -90,9 +88,7 @@ fn init_client() !&websocket.Client {
// Main client logic
mut ws := init_client()!
defer {
ws.close(1000, 'normal') or {
eprintln(term.red('Error closing connection: ${err}'))
}
ws.close(1000, 'normal') or { eprintln(term.red('Error closing connection: ${err}')) }
}
println(term.green('Connected to ${ws_url}'))
@@ -102,7 +98,7 @@ new_job := send_request(mut ws, OpenRPCRequest{
jsonrpc: '2.0'
method: 'job.new'
params: []string{}
id: rand.i32_in_range(1,10000000)!
id: rand.i32_in_range(1, 10000000)!
}) or {
eprintln(term.red('Failed to create new job: ${err}'))
exit(1)

View File

@@ -6,15 +6,14 @@ import time
import sync
import os
fn start_rpc_server( mut wg sync.WaitGroup) ! {
fn start_rpc_server(mut wg sync.WaitGroup) ! {
defer { wg.done() }
// Create OpenRPC server
openrpc.server_start()!
}
fn start_ws_server( mut wg sync.WaitGroup) ! {
fn start_ws_server(mut wg sync.WaitGroup) ! {
defer { wg.done() }
// Get port from environment variable or use default

View File

@@ -1,4 +1,5 @@
module openrpc
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.core.jobs.model
@@ -14,14 +15,13 @@ mut:
pub fn server_start() ! {
redis := redisclient.core_get()!
mut runner := model.new()!
mut s:= &OpenRPCServer{
mut s := &OpenRPCServer{
redis: redis
queue: &redisclient.RedisQueue{
key: rpc_queue
redis: redis
}
runner:runner
runner: runner
}
s.start()!
}

View File

@@ -3,8 +3,6 @@ module openrpc
import freeflowuniverse.herolib.core.redisclient
import json
// Start the server and listen for requests
pub fn (mut s OpenRPCServer) start() ! {
println('Starting OpenRPC server.')
@@ -13,7 +11,7 @@ pub fn (mut s OpenRPCServer) start() ! {
// Get message from queue
msg := s.queue.get(5000)!
if msg.len==0{
if msg.len == 0 {
println("queue '${rpc_queue}' empty")
continue
}
@@ -31,8 +29,8 @@ pub fn (mut s OpenRPCServer) start() ! {
// Send response back to Redis using response queue
response_json := json.encode(response)
key:='${rpc_queue}:${request.id}'
println("response: \n${response}\n put on return queue ${key} ")
key := '${rpc_queue}:${request.id}'
println('response: \n${response}\n put on return queue ${key} ')
mut response_queue := &redisclient.RedisQueue{
key: key
redis: s.redis
@@ -45,27 +43,26 @@ pub fn (mut s OpenRPCServer) start() ! {
fn (mut s OpenRPCServer) handle_request(request OpenRPCRequest) !OpenRPCResponse {
method := request.method.to_lower()
println("process: method: '${method}'")
if method.starts_with("job.") {
if method.starts_with('job.') {
return s.handle_request_job(request) or {
return rpc_response_error(request.id,"error in request job:\n${err}")
return rpc_response_error(request.id, 'error in request job:\n${err}')
}
}
if method.starts_with("agent.") {
if method.starts_with('agent.') {
return s.handle_request_agent(request) or {
return rpc_response_error(request.id,"error in request agent:\n${err}")
return rpc_response_error(request.id, 'error in request agent:\n${err}')
}
}
if method.starts_with("group.") {
if method.starts_with('group.') {
return s.handle_request_group(request) or {
return rpc_response_error(request.id,"error in request group:\n${err}")
return rpc_response_error(request.id, 'error in request group:\n${err}')
}
}
if method.starts_with("service.") {
if method.starts_with('service.') {
return s.handle_request_service(request) or {
return rpc_response_error(request.id,"error in request service:\n${err}")
return rpc_response_error(request.id, 'error in request service:\n${err}')
}
}
return rpc_response_error(request.id,"Could not find handler for ${method}")
return rpc_response_error(request.id, 'Could not find handler for ${method}')
}

View File

@@ -3,16 +3,14 @@ module openrpc
import freeflowuniverse.herolib.core.jobs.model
import json
pub fn (mut h OpenRPCServer) handle_request_agent(request OpenRPCRequest) !OpenRPCResponse {
mut response := rpc_response_new(request.id)
mut response:=rpc_response_new(request.id)
method:=request.method.all_after_first("agent.")
method := request.method.all_after_first('agent.')
println("request agent:'${method}'")
match method{
match method {
'new' {
agent := h.runner.agents.new()
response.result = json.encode(agent)

View File

@@ -4,8 +4,8 @@ import freeflowuniverse.herolib.core.jobs.model
import json
pub fn (mut h OpenRPCServer) handle_request_group(request OpenRPCRequest) !OpenRPCResponse {
mut response:=rpc_response_new(request.id)
method:=request.method.all_after_first("group.")
mut response := rpc_response_new(request.id)
method := request.method.all_after_first('group.')
println("request group:'${method}'")
match method {
'new' {

View File

@@ -4,9 +4,9 @@ import freeflowuniverse.herolib.core.jobs.model
import json
pub fn (mut h OpenRPCServer) handle_request_job(request OpenRPCRequest) !OpenRPCResponse {
mut response:=rpc_response_new(request.id)
mut response := rpc_response_new(request.id)
method:=request.method.all_after_first("job.")
method := request.method.all_after_first('job.')
println("request job:'${method}'")
println(request)
match method {

View File

@@ -4,10 +4,10 @@ import freeflowuniverse.herolib.core.jobs.model
import json
pub fn (mut h OpenRPCServer) handle_request_service(request OpenRPCRequest) !OpenRPCResponse {
mut response:=rpc_response_new(request.id)
method:=request.method.all_after_first("service.")
mut response := rpc_response_new(request.id)
method := request.method.all_after_first('service.')
println("request service:'${method}'")
match method{
match method {
'new' {
service := h.runner.services.new()
response.result = json.encode(service)
@@ -65,12 +65,10 @@ pub fn (mut h OpenRPCServer) handle_request_service(request OpenRPCRequest) !Ope
}
// Parse groups array from JSON string
groups := json.decode([]string, request.params[3])!
has_access := h.runner.services.check_access(
request.params[0], // actor
has_access := h.runner.services.check_access(request.params[0], // actor
request.params[1], // action
request.params[2], // user_pubkey
groups
)!
groups)!
response.result = json.encode(has_access)
}
else {

View File

@@ -17,8 +17,7 @@ pub mut:
id int @[required]
}
fn rpc_response_new(id int)OpenRPCResponse {
fn rpc_response_new(id int) OpenRPCResponse {
mut response := OpenRPCResponse{
jsonrpc: '2.0'
id: id
@@ -26,14 +25,13 @@ fn rpc_response_new(id int)OpenRPCResponse {
return response
}
fn rpc_response_error(id int, errormsg string)OpenRPCResponse {
fn rpc_response_error(id int, errormsg string) OpenRPCResponse {
mut response := OpenRPCResponse{
jsonrpc: '2.0'
id: id
error:errormsg
error: errormsg
}
return response
}
const rpc_queue = 'herorunner:q:rpc'

View File

@@ -14,8 +14,8 @@ mut:
}
// Create new WebSocket server
pub fn new_ws_server( port int) !&WSServer {
mut redis:= redisclient.core_get()!
pub fn new_ws_server(port int) !&WSServer {
mut redis := redisclient.core_get()!
return &WSServer{
redis: redis
queue: &redisclient.RedisQueue{
@@ -59,14 +59,14 @@ pub fn (mut s WSServer) start() ! {
// Generate unique request ID if not provided
mut req_id := request.id
if req_id == 0 {
req_id = rand.i32_in_range(1,10000000)!
req_id = rand.i32_in_range(1, 10000000)!
}
println('WebSocket put on queue: \'${rpc_queue}\' (msg: ${msg.payload.bytestr()})')
// Send request to Redis queue
s.queue.add(msg.payload.bytestr())!
returnkey:='${rpc_queue}:${req_id}'
returnkey := '${rpc_queue}:${req_id}'
mut queue_return := &redisclient.RedisQueue{
key: returnkey
redis: s.redis
@@ -89,7 +89,5 @@ pub fn (mut s WSServer) start() ! {
// Start server
println('WebSocket server listening on port ${s.port}')
ws_server.listen() or {
return error('Failed to start WebSocket server: ${err}')
}
ws_server.listen() or { return error('Failed to start WebSocket server: ${err}') }
}

View File

@@ -76,7 +76,7 @@ pub fn (mut path Path) expand(dest string) !Path {
if path.name().to_lower().ends_with('.tar.gz') || path.name().to_lower().ends_with('.tgz') {
cmd := 'tar -xzvf ${path.path} -C ${desto.path}'
//console.print_debug(cmd)
// console.print_debug(cmd)
res := os.execute(cmd)
if res.exit_code > 0 {
return error('Could not expand.\n${res}')
@@ -136,7 +136,7 @@ pub fn find_common_ancestor(paths_ []string) string {
}
}
paths := paths_.map(os.abs_path(os.real_path(it))) // get the real path (symlinks... resolved)
//console.print_debug(paths.str())
// console.print_debug(paths.str())
parts := paths[0].split('/')
mut totest_prev := '/'
for i in 1 .. parts.len {
@@ -223,7 +223,7 @@ pub fn (mut path Path) move(args MoveArgs) ! {
// that last dir needs to move 1 up
pub fn (mut path Path) moveup_single_subdir() ! {
mut plist := path.list(recursive: false, ignoredefault: true, dirs_only: true)!
//console.print_debug(plist.str())
// console.print_debug(plist.str())
if plist.paths.len != 1 {
return error('could not find one subdir in ${path.path} , so cannot move up')
}

View File

@@ -37,7 +37,6 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure {
ssh_key_name: args.ssh_key_name
}
return get(coderoot: args.coderoot, reload: args.reload, cfg: cfg)
}
@@ -56,8 +55,8 @@ pub fn get(args_ GitStructureArgGet) !&GitStructure {
args.coderoot = '${os.home_dir()}/code'
}
//make sure coderoot exists
if ! os.exists(args.coderoot){
// make sure coderoot exists
if !os.exists(args.coderoot) {
os.mkdir_all(args.coderoot)!
}

View File

@@ -61,7 +61,6 @@ pub fn (mut gitstructure GitStructure) load(reload bool) ! {
return error(msg)
}
}
}
// Recursively loads repositories from the provided path, updating their statuses, does not check the status

View File

@@ -20,10 +20,10 @@ pub mut:
msg string
url string
branch string
path string //path to start from
path string // path to start from
recursive bool
pull bool
reload bool //means reload the info into the cache
reload bool // means reload the info into the cache
script bool = true // run non interactive
reset bool = true // means we will lose changes (only relevant for clone, pull)
}
@@ -45,14 +45,13 @@ pub mut:
//```
pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut args := args_
//console.print_debug('git do ${args.cmd}')
// console.print_debug('git do ${args.cmd}')
if args.path == '' {
args.path = os.getwd()
}
//see if its one repo we are in, based on current path
// see if its one repo we are in, based on current path
if args.repo == '' && args.account == '' && args.provider == '' && args.filter == '' {
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
@@ -63,10 +62,10 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
args.provider = r0.provider
}
}
//see if a url was used means we are in 1 repo
// see if a url was used means we are in 1 repo
if args.url.len > 0 {
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == ''){
return error("when specify url cannot specify repo, account, profider or filter")
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == '') {
return error('when specify url cannot specify repo, account, profider or filter')
}
mut r0 := gs.get_repo(url: args.url)!
args.repo = r0.name
@@ -85,9 +84,9 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
provider: args.provider
)!
//reset the status for the repo
if args.reload{
for mut repo in repos{
// reset the status for the repo
if args.reload {
for mut repo in repos {
repo.cache_last_load_clear()!
}
}
@@ -102,7 +101,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
return ''
}
//means we are on 1 repo
// means we are on 1 repo
if args.cmd in 'sourcetree,edit'.split(',') {
if repos.len == 0 {
return error('please specify at least 1 repo for cmd:${args.cmd}')
@@ -122,7 +121,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
}
if args.cmd in 'pull,push,commit,delete'.split(',') {
gs.repos_print(
filter: args.filter
name: args.repo
@@ -141,22 +139,21 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
// check on repos who needs what
for mut g in repos {
if args.cmd in ["push"] && g.need_push_or_pull()! {
if args.cmd == 'push' && g.need_push_or_pull()! {
need_push0 = true
}
if args.cmd in ["push","pull"] && (need_push0 || g.need_push_or_pull()!){
if args.cmd in ['push', 'pull'] && (need_push0 || g.need_push_or_pull()!) {
need_pull0 = true
}
if args.cmd in ["push","pull","commit"] && (g.need_commit()!) {
if args.cmd in ['push', 'pull', 'commit'] && (g.need_commit()!) {
need_commit0 = true
}
}
//console.print_debug(" --- status all repo's\n need_commit0:${need_commit0} \n need_pull0:${need_pull0} \n need_push0:${need_push0}")
//exit(0)
// console.print_debug(" --- status all repo's\n need_commit0:${need_commit0} \n need_pull0:${need_pull0} \n need_push0:${need_push0}")
// exit(0)
mut ok := false
if need_commit0 || need_pull0 || need_push0 {
@@ -179,8 +176,8 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
} else {
if need_commit0 || need_pull0 || need_push0 {
ok = ui.ask_yesno(question: 'Is above ok?')!
}else{
console.print_green("nothing to do")
} else {
console.print_green('nothing to do')
}
}
@@ -188,7 +185,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
return error('cannot continue with action, you asked me to stop.\n${args}')
}
if need_commit0{
if need_commit0 {
if args.msg.len == 0 && args.script {
return error('message needs to be specified for commit.')
}
@@ -198,7 +195,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
)!
}
}
}
if args.cmd == 'delete' {
@@ -215,12 +211,12 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut has_changed := false
for mut g in repos {
need_push_repo := need_push0 && g.need_push_or_pull()!
need_pull_repo := need_push_repo || (need_pull0 && g.need_push_or_pull()!)
need_commit_repo := need_push_repo || need_pull_repo || (need_commit0 && g.need_commit()!)
need_commit_repo := need_push_repo || need_pull_repo
|| (need_commit0 && g.need_commit()!)
//console.print_debug(" --- git_do ${g.cache_key()} \n need_commit_repo:${need_commit_repo} \n need_pull_repo:${need_pull_repo} \n need_push_repo:${need_push_repo}")
// console.print_debug(" --- git_do ${g.cache_key()} \n need_commit_repo:${need_commit_repo} \n need_pull_repo:${need_pull_repo} \n need_push_repo:${need_push_repo}")
if need_commit_repo {
mut msg := args.msg

View File

@@ -14,8 +14,8 @@ pub mut:
provider string // Git provider (e.g., GitHub).
pull bool // Pull the last changes.
reset bool // Reset the changes.
status_clean bool //make sure each cache status is but on 0, if we also do status_update this will result in a reload
status_update bool //make sure each repo get's status updated
status_clean bool // make sure each cache status is but on 0, if we also do status_update this will result in a reload
status_update bool // make sure each repo get's status updated
url string // Repository URL
}
@@ -56,17 +56,17 @@ pub fn (mut gitstructure GitStructure) get_repos(args_ ReposGetArgs) ![]&GitRepo
}
}
for mut repo in res{
if args.status_clean{
for mut repo in res {
if args.status_clean {
repo.cache_last_load_clear()!
}
if args.status_update{
if args.status_update {
repo.status_update()!
}
if args.reset{
if args.reset {
repo.reset()!
}else{
if args.pull{
} else {
if args.pull {
repo.pull()!
}
}
@@ -126,22 +126,19 @@ pub fn (mut gitstructure GitStructure) get_repo(args_ ReposGetArgs) !&GitRepo {
// Returns:
// - bool: True if the repository matches, false otherwise.
fn repo_match_check(repo GitRepo, args ReposGetArgs) !bool {
mut r:= (args.name.len == 0 || repo.name == args.name)
mut r := (args.name.len == 0 || repo.name == args.name)
&& (args.account.len == 0 || repo.account == args.account)
&& (args.provider.len == 0 || repo.provider == args.provider)
relpath := repo.get_relative_path()!
if r{
if args.filter != '' && ! (relpath.contains(args.filter)) {
if r {
if args.filter != '' && !(relpath.contains(args.filter)) {
return false
}
}
return r
}
// Retrieves a single repository path based on the provided arguments (goes inside repo).
// if pull will force a pull, if it can't will be error, if reset will remove the changes
// If the repository does not exist, it will clone it
@@ -164,7 +161,7 @@ fn repo_match_check(repo GitRepo, args ReposGetArgs) !bool {
// Raises:
// - Error: If multiple repositories are found with similar names or if cloning fails.
pub fn (mut gitstructure GitStructure) get_path(args_ ReposGetArgs) !string {
mut r:=gitstructure.get_repo(args_)!
mut mypath:=r.get_path_of_url(args_.url)!
mut r := gitstructure.get_repo(args_)!
mut mypath := r.get_path_of_url(args_.url)!
return mypath
}

View File

@@ -41,7 +41,7 @@ pub fn (mut gitstructure GitStructure) repos_print(args ReposGetArgs) ! {
// Collect repository information based on the provided criteria
for _, repo in gitstructure.get_repos(args)! {
//repo.status_update()!
// repo.status_update()!
repo_data << format_repo_info(repo)!
}

View File

@@ -92,7 +92,6 @@ pub fn (mut repo GitRepo) commit(msg string) ! {
}
}
// Push local changes to the remote repository.
pub fn (mut repo GitRepo) push() ! {
repo.status_update()!
@@ -311,10 +310,11 @@ fn (mut repo GitRepo) update_submodules() ! {
}
fn (repo GitRepo) exec(cmd_ string) !string {
repo_path := repo.path()
cmd := 'cd ${repo_path} && ${cmd_}'
// console.print_debug(cmd)
r := os.execute(cmd)
import os { quoted_path }
repo_path := quoted_path(repo.path())
cmd_args := ["sh", "-c", "cd ${repo_path} && ${cmd_}"]
// console.print_debug(cmd_args.join(" "))
r := os.execute_opt(cmd_args)!
if r.exit_code != 0 {
return error('Repo failed to exec cmd: ${cmd}\n${r.output})')
}

View File

@@ -39,7 +39,7 @@ fn (mut repo GitRepo) cache_delete() ! {
return
}
//put the data of last load on 0, means first time a git status check will be done it will update its info
// put the data of last load on 0, means first time a git status check will be done it will update its info
fn (mut repo GitRepo) cache_last_load_clear() ! {
repo.cache_get()!
repo.last_load = 0

View File

@@ -11,20 +11,14 @@ pub struct StatusUpdateArgs {
pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Check current time vs last check, if needed (check period) then load
repo.cache_get() or {
return error('Failed to get cache for repo ${repo.name}: ${err}')
} // Ensure we have the situation from redis
repo.init() or {
return error('Failed to initialize repo ${repo.name}: ${err}')
}
repo.cache_get() or { return error('Failed to get cache for repo ${repo.name}: ${err}') } // Ensure we have the situation from redis
repo.init() or { return error('Failed to initialize repo ${repo.name}: ${err}') }
current_time := int(time.now().unix())
if args.reload || repo.last_load == 0
|| current_time - repo.last_load >= repo.config.remote_check_period {
//console.print_debug('${repo.name} ${current_time}-${repo.last_load} (${current_time - repo.last_load >= repo.config.remote_check_period}): ${repo.config.remote_check_period} +++')
// console.print_debug('${repo.name} ${current_time}-${repo.last_load} (${current_time - repo.last_load >= repo.config.remote_check_period}): ${repo.config.remote_check_period} +++')
// if true{exit(0)}
repo.load() or {
return error('Failed to load repository ${repo.name}: ${err}')
}
repo.load() or { return error('Failed to load repository ${repo.name}: ${err}') }
}
}
@@ -32,9 +26,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Does not check cache, it is the callers responsibility to check cache and load accordingly.
fn (mut repo GitRepo) load() ! {
console.print_header('load ${repo.print_key()}')
repo.init() or {
return error('Failed to initialize repo during load operation: ${err}')
}
repo.init() or { return error('Failed to initialize repo during load operation: ${err}') }
git_path := '${repo.path()}/.git'
if os.exists(git_path) == false {
@@ -45,13 +37,9 @@ fn (mut repo GitRepo) load() ! {
return error('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. Please check network connection and repository access.')
}
repo.load_branches() or {
return error('Failed to load branches for ${repo.name}: ${err}')
}
repo.load_branches() or { return error('Failed to load branches for ${repo.name}: ${err}') }
repo.load_tags() or {
return error('Failed to load tags for ${repo.name}: ${err}')
}
repo.load_tags() or { return error('Failed to load tags for ${repo.name}: ${err}') }
repo.last_load = int(time.now().unix())
@@ -71,11 +59,11 @@ fn (mut repo GitRepo) load_branches() ! {
}
for line in tags_result.split('\n') {
line_trimmed := line.trim_space()
//println(line_trimmed)
// println(line_trimmed)
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
//console.print_debug('Info: skipping malformed branch/tag line: ${line_trimmed}')
// console.print_debug('Info: skipping malformed branch/tag line: ${line_trimmed}')
continue
}
commit_hash := parts[0].trim_space()
@@ -99,27 +87,27 @@ fn (mut repo GitRepo) load_branches() ! {
}.split_into_lines().filter(it.trim_space() != '')
if mybranch.len == 1 {
repo.status_local.branch = mybranch[0].trim_space()
}else{
return error("bug: git branch does not give branchname.\n${mybranch}")
} else {
return error('bug: git branch does not give branchname.\n${mybranch}')
}
}
// Helper to load remote tags
fn (mut repo GitRepo) load_tags() ! {
tags_result := repo.exec('git tag --list') or {
tags_result := repo.exec('git show-ref --tags') or {
return error('Failed to list tags: ${err}. Please ensure git is installed and repository is accessible.')
}
//println(tags_result)
for line in tags_result.split('\n') {
line_trimmed := line.trim_space()
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
//console.print_debug('Skipping malformed tag line: ${line_trimmed}')
if line_trimmed == '' {
continue
}
if parts := line_trimmed.split(' refs/tags/') {
if parts.len != 2 {
continue
}
commit_hash := parts[0].trim_space()
tag_name := parts[1].all_after('refs/tags/').trim_space()
tag_name := parts[1].trim_space()
// Update remote tags info
repo.status_remote.tags[tag_name] = commit_hash

View File

@@ -30,7 +30,7 @@ fn installed() !bool {
fn install() ! {
console.print_header('install garage_s3')
//mut installer := get()!
// mut installer := get()!
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
mut url := ''
if core.is_linux_arm()! {

View File

@@ -1,4 +1,3 @@
module docker
import freeflowuniverse.herolib.ui.console

View File

@@ -5,17 +5,13 @@ import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.installers.ulist
import os
//////////////////// following actions are not specific to instance of the object
// checks if a certain version or above is installed
fn installed() !bool {
checkcmd:='${osal.profile_path_source_and()!} bun -version'
checkcmd := '${osal.profile_path_source_and()!} bun -version'
res := os.execute(checkcmd)
if res.exit_code != 0 {
println(res)
@@ -26,26 +22,25 @@ fn installed() !bool {
if r.len != 1 {
return error("couldn't parse bun version.\n${res.output}")
}
println(" ${texttools.version(version)} <= ${texttools.version(r[0])}")
println(' ${texttools.version(version)} <= ${texttools.version(r[0])}')
if texttools.version(version) <= texttools.version(r[0]) {
return true
}
return false
}
//get the Upload List of the files
// get the Upload List of the files
fn ulist_get() !ulist.UList {
//optionally build a UList which is all paths which are result of building, is then used e.g. in upload
// optionally build a UList which is all paths which are result of building, is then used e.g. in upload
return ulist.UList{}
}
//uploads to S3 server if configured
// uploads to S3 server if configured
fn upload() ! {
// installers.upload(
// cmdname: 'bun'
// source: '${gitpath}/target/x86_64-unknown-linux-musl/release/bun'
// )!
}
fn install() ! {
@@ -53,9 +48,7 @@ fn install() ! {
osal.exec(cmd: 'curl -fsSL https://bun.sh/install | bash')!
}
fn destroy() ! {
// osal.process_kill_recursive(name:'bun')!
osal.cmd_delete('bun')!
@@ -64,12 +57,10 @@ fn destroy() ! {
bun
')!
//will remove all paths where bun is found
osal.profile_path_add_remove(paths2delete:"bun")!
// will remove all paths where bun is found
osal.profile_path_add_remove(paths2delete: 'bun')!
osal.rm("
osal.rm('
~/.bun
")!
')!
}

View File

@@ -1,10 +1,8 @@
module bun
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
@@ -17,7 +15,7 @@ __global (
/////////FACTORY
@[params]
pub struct ArgsGet{
pub struct ArgsGet {
pub mut:
name string
}
@@ -26,8 +24,6 @@ pub fn get(args_ ArgsGet) !&Bun {
return &Bun{}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -38,25 +34,24 @@ fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManag
// zinit
// tmux
// systemd
match cat{
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
match cat {
.zinit {
console.print_debug('startupmanager: zinit')
return startupmanager.get(cat: .zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
}else{
console.print_debug("startupmanager: auto")
.systemd {
console.print_debug('startupmanager: systemd')
return startupmanager.get(cat: .systemd)!
}
else {
console.print_debug('startupmanager: auto')
return startupmanager.get()!
}
}
}
@[params]
pub struct InstallArgs{
pub struct InstallArgs {
pub mut:
reset bool
}
@@ -68,15 +63,12 @@ pub fn (mut self Bun) install(args InstallArgs) ! {
}
}
pub fn (mut self Bun) destroy() ! {
switch(self.name)
destroy()!
}
//switch instance to be used for bun
// switch instance to be used for bun
pub fn switch(name string) {
bun_default = name
}

View File

@@ -1,4 +1,5 @@
module bun
import freeflowuniverse.herolib.data.paramsparser
import os
@@ -6,24 +7,20 @@ pub const version = '1.2.2'
const singleton = true
const default = true
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct Bun {
pub mut:
name string = 'default'
}
fn obj_init(obj_ Bun)!Bun{
//never call get here, only thing we can do here is work on object itself
mut obj:=obj_
fn obj_init(obj_ Bun) !Bun {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
return obj
}
//called before start if done
// called before start if done
fn configure() ! {
//mut installer := get()!
// mut installer := get()!
}

View File

@@ -109,11 +109,11 @@ pub fn profile_path_source() !string {
}
pp := profile_path()!
if os.exists(pp) {
res := os.execute("source ${pp}")
res := os.execute('source ${pp}')
if res.exit_code != 0 {
console.print_stderr("WARNING: your profile is corrupt: ${pp}")
return error("profile corrupt")
}else{
console.print_stderr('WARNING: your profile is corrupt: ${pp}')
return error('profile corrupt')
} else {
return 'source ${pp}'
}
}
@@ -123,9 +123,7 @@ pub fn profile_path_source() !string {
// return source $path && .
// or empty if it doesn't exist
pub fn profile_path_source_and() !string {
p:=profile_path_source() or {
return ""
}
p := profile_path_source() or { return '' }
return '${p} && '
}

View File

@@ -26,10 +26,10 @@ pub mut:
pub fn download(args_ DownloadArgs) !pathlib.Path {
mut args := args_
args.dest = args.dest.trim(" ").trim_right("/")
args.expand_dir = args.expand_dir.trim(" ").trim_right("/")
args.expand_file = args.expand_file.replace("//","/")
args.dest = args.dest.replace("//","/")
args.dest = args.dest.trim(' ').trim_right('/')
args.expand_dir = args.expand_dir.trim(' ').trim_right('/')
args.expand_file = args.expand_file.replace('//', '/')
args.dest = args.dest.replace('//', '/')
console.print_header('download: ${args.url}')
if args.name == '' {
@@ -77,23 +77,23 @@ pub fn download(args_ DownloadArgs) !pathlib.Path {
// Clean up all related files when resetting
if os.exists(args.dest) {
if os.is_dir(args.dest) {
os.rmdir_all(args.dest) or { }
os.rmdir_all(args.dest) or {}
} else {
os.rm(args.dest) or { }
os.rm(args.dest) or {}
}
}
if os.exists(args.dest + '_') {
if os.is_dir(args.dest + '_') {
os.rmdir_all(args.dest + '_') or { }
os.rmdir_all(args.dest + '_') or {}
} else {
os.rm(args.dest + '_') or { }
os.rm(args.dest + '_') or {}
}
}
if os.exists(args.dest + '.meta') {
if os.is_dir(args.dest + '.meta') {
os.rmdir_all(args.dest + '.meta') or { }
os.rmdir_all(args.dest + '.meta') or {}
} else {
os.rm(args.dest + '.meta') or { }
os.rm(args.dest + '.meta') or {}
}
}
// Recreate meta file after cleanup
@@ -119,9 +119,9 @@ pub fn download(args_ DownloadArgs) !pathlib.Path {
// Clean up any existing temporary file/directory before download
if os.exists(dest0.path) {
if os.is_dir(dest0.path) {
os.rmdir_all(dest0.path) or { }
os.rmdir_all(dest0.path) or {}
} else {
os.rm(dest0.path) or { }
os.rm(dest0.path) or {}
}
}
cmd := '

View File

@@ -12,7 +12,7 @@ pub enum NotifyEvent {
}
// NotifyCallback is the function signature for event callbacks
pub type NotifyCallback = fn (event NotifyEvent, path string , args map[string]string)
pub type NotifyCallback = fn (event NotifyEvent, path string, args map[string]string)
// WatchEntry represents a watched path and its associated callback
struct WatchEntry {
@@ -86,13 +86,13 @@ pub fn (mut n Notifier) start() ! {
n.is_watching = true
if n.watch_list.len>1{
return error("only support watchers with len 1 for now")
if n.watch_list.len > 1 {
return error('only support watchers with len 1 for now')
}
// Start a watcher for each path
for mut entry in n.watch_list {
//spawn n.watch_path(mut entry)
// spawn n.watch_path(mut entry)
n.watch_path(mut entry)
}
}
@@ -140,7 +140,7 @@ fn (mut n Notifier) watch_path(mut entry WatchEntry) {
}
if cb := entry.callback {
cb(event, path,n.args)
cb(event, path, n.args)
}
}
}

View File

@@ -18,7 +18,7 @@ pub mut:
ssd string
url string
reputation int
uptime int //0..100
uptime int // 0..100
continent string
country string
passmark int

View File

@@ -37,7 +37,6 @@ pub mut:
command string
}
// create/start container (first need to get a dockercontainer before we can start)
pub fn (mut container DockerContainer) start() ! {
exec(cmd: 'docker start ${container.id}')!

View File

@@ -3,7 +3,6 @@ module docker
import freeflowuniverse.herolib.osal { exec }
import freeflowuniverse.herolib.virt.utils
@[params]
pub struct DockerContainerCreateArgs {
pub mut:
@@ -19,7 +18,6 @@ pub mut:
command string
}
pub fn (mut e DockerEngine) container_create(args DockerContainerCreateArgs) !&DockerContainer {
// Validate required parameters
if args.name.trim_space() == '' {
@@ -67,7 +65,7 @@ pub fn (mut e DockerEngine) container_create(args DockerContainerCreateArgs) !&D
image += ':${args.image_tag}'
} else {
// Check if image exists with 'local' tag first
mut local_check := exec(cmd: 'docker images ${args.image_repo}:local -q',debug:true)!
mut local_check := exec(cmd: 'docker images ${args.image_repo}:local -q', debug: true)!
if local_check.output != '' {
image += ':local'
} else {

View File

@@ -72,7 +72,6 @@ pub fn (mut b DockerBuilderRecipe) add_package(args PackageArgs) ! {
}
}
// lets now check of the package has already not been set before
for package0 in b.items {
if package0 is PackageItem {

View File

@@ -4,7 +4,7 @@ import os
import strings
pub fn (mut site DocSite) clean(args ErrorArgs) ! {
toclean := "
toclean := '
/node_modules
babel.config.js
@@ -44,7 +44,7 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
sidebars.ts
tsconfig.json
"
'
mut sb := strings.new_builder(200)
for line in toclean.split_into_lines() {
@@ -74,9 +74,7 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
for file in files {
if file.starts_with(base_pattern) {
file_path := os.join_path(base_dir, file)
os.rm(file_path) or {
sb.writeln('Failed to remove ${file_path}: ${err}')
}
os.rm(file_path) or { sb.writeln('Failed to remove ${file_path}: ${err}') }
}
}
}
@@ -90,11 +88,8 @@ pub fn (mut site DocSite) clean(args ErrorArgs) ! {
sb.writeln('Failed to remove directory ${full_path}: ${err}')
}
} else {
os.rm(full_path) or {
sb.writeln('Failed to remove file ${full_path}: ${err}')
os.rm(full_path) or { sb.writeln('Failed to remove file ${full_path}: ${err}') }
}
}
}
}
}

View File

@@ -1,15 +1,14 @@
module docusaurus
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.osal.screen
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools
import json
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
@[heap]
pub struct DocSite {
@@ -35,6 +34,7 @@ pub mut:
build_path string
production bool
watch_changes bool = true
update bool
}
pub fn (mut f DocusaurusFactory) build_dev(args_ DSiteNewArgs) !&DocSite {
@@ -93,6 +93,13 @@ pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
console.print_item(' 1. Attach to screen: screen -r ${screen_name}')
console.print_item(' 2. To detach from screen: Press Ctrl+A then D')
console.print_item(' 3. To list all screens: screen -ls')
console.print_item('The site content is on::')
console.print_item(' 1. location of documents: ${s.path_src.path}/docs')
if osal.cmd_exists("code"){
console.print_item(' 2. We opened above dir in vscode.')
osal.exec(cmd:'code ${s.path_src.path}/docs')!
}
// Start the watcher in a separate thread
//mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
@@ -137,7 +144,7 @@ pub fn (mut f DocusaurusFactory) add(args_ DSiteNewArgs) !&DocSite {
}
mut gs := gittools.new()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:args.update)!
mut template_path := r.patho()!
// First ensure cfg directory exists in src, if not copy from template

View File

@@ -1,6 +1,5 @@
module docusaurus
import os
// import freeflowuniverse.herolib.data.doctree.collection
import freeflowuniverse.herolib.core.pathlib
@@ -23,12 +22,13 @@ pub mut:
// publish_path string
build_path string
production bool
update bool
}
pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
mut args:=args_
if args.build_path == ""{
args.build_path = "${os.home_dir()}/hero/var/docusaurus"
mut args := args_
if args.build_path == '' {
args.build_path = '${os.home_dir()}/hero/var/docusaurus'
}
// if args.publish_path == ""{
// args.publish_path = "${os.home_dir()}/hero/var/docusaurus/publish"
@@ -39,8 +39,7 @@ pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
// path_publish: pathlib.get_dir(path: args_.publish_path, create: true)!
}
ds.template_install()!
ds.template_install(args.update)!
return ds
}

View File

@@ -1,32 +1,28 @@
module docusaurus
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.installers.web.bun
fn (mut site DocusaurusFactory) template_install() ! {
fn (mut site DocusaurusFactory) template_install(update bool) ! {
mut gs := gittools.new()!
mut r:=gs.get_repo(url:'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut template_path:=r.patho()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:update)!
mut template_path := r.patho()!
for item in ['package.json', 'sidebars.ts', 'tsconfig.json'] {
mut aa:= template_path.file_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
mut aa := template_path.file_get(item)!
aa.copy(dest: '${site.path_build.path}/${item}')!
}
//install bun
mut installer:= bun.get()!
// install bun
mut installer := bun.get()!
installer.install()!
osal.exec(cmd: '
osal.exec(
cmd: '
cd ${site.path_build.path}
bun install
')!
'
)!
}

View File

@@ -1,22 +1,20 @@
module docusaurus
import freeflowuniverse.herolib.osal.notifier
import os
fn watch_docs(docs_path string, path_src string, path_build string) ! {
mut n := notifier.new('docsite_watcher') or {
eprintln('Failed to create watcher: ${err}')
return
}
n.args["path_src"]=path_src
n.args["path_build"]=path_build
n.args['path_src'] = path_src
n.args['path_build'] = path_build
// Add watch with captured args
n.add_watch(docs_path, fn (event notifier.NotifyEvent, path string, args map[string]string) {
handle_file_change(event, path, args) or {
eprintln('Error handling file change: ${err}')
}
handle_file_change(event, path, args) or { eprintln('Error handling file change: ${err}') }
})!
n.start()!
@@ -41,8 +39,8 @@ fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]s
}
// Get relative path from docs directory
rel_path := path.replace('${args["path_src"]}/docs/', '')
dest_path := '${args["path_build"]}/docs/${rel_path}'
rel_path := path.replace('${args['path_src']}/docs/', '')
dest_path := '${args['path_build']}/docs/${rel_path}'
match event {
.create, .modify {
@@ -71,9 +69,7 @@ fn handle_file_change(event notifier.NotifyEvent, path string, args map[string]s
}
println('Deleted directory: ${rel_path}')
} else {
os.rm(dest_path) or {
return error('Failed to delete ${dest_path}: ${err}')
}
os.rm(dest_path) or { return error('Failed to delete ${dest_path}: ${err}') }
println('Deleted: ${rel_path}')
}
}