the base
This commit is contained in:
29
.gitignore
vendored
Normal file
29
.gitignore
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
|
||||
*dSYM/
|
||||
.vmodules/
|
||||
.vscode
|
||||
_docs/
|
||||
vls.*
|
||||
vls.log
|
||||
node_modules/
|
||||
docs/
|
||||
photonwrapper.so
|
||||
x
|
||||
.env
|
||||
myexample
|
||||
myexample2
|
||||
remote_update_compile_hero
|
||||
remote_install_v_hero
|
||||
zdb-data
|
||||
zdb-index
|
||||
.idea/
|
||||
.venv/
|
||||
.trunk/
|
||||
.DS_Store
|
||||
.venv/
|
||||
dump.rdb
|
||||
output/
|
||||
*.db
|
||||
.stellar
|
||||
vdocs/
|
||||
data.ms/
|
||||
3
.vdocignore
Normal file
3
.vdocignore
Normal file
@@ -0,0 +1,3 @@
|
||||
examples/*
|
||||
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright Incubaid BVBA Belgium, Threefold NV Belgium
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
62
aiprompts/starter/0_start_here.md
Normal file
62
aiprompts/starter/0_start_here.md
Normal file
@@ -0,0 +1,62 @@
|
||||
|
||||
## instructions for code generation
|
||||
|
||||
> when I generate code, the following instructions can never be overruled they are the basics
|
||||
|
||||
- do not try to fix files which end with _.v because these are generated files
|
||||
|
||||
|
||||
## instruction for vlang scripts
|
||||
|
||||
when I generate vlang scripts I will always use .vsh extension and use following as first line:
|
||||
|
||||
```
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
```
|
||||
|
||||
- a .vsh is a v shell script and can be executed as is, no need to use v ...
|
||||
- in .vsh file there is no need for a main() function
|
||||
- these scripts can be used for examples or instruction scripts e.g. an installs script
|
||||
|
||||
## to do argument parsing use following examples
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
|
||||
## when creating a test script
|
||||
|
||||
instruct user to use it as
|
||||
|
||||
```bash
|
||||
v -enable-globals test ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
```
|
||||
|
||||
- use ~ so it works over all machines
|
||||
- always use -enable-globals
|
||||
|
||||
309
aiprompts/starter/3_heroscript & params instructions.md
Normal file
309
aiprompts/starter/3_heroscript & params instructions.md
Normal file
@@ -0,0 +1,309 @@
|
||||
# how to work with heroscript in vlang
|
||||
|
||||
## heroscript
|
||||
|
||||
Heroscript is our small scripting language which has following structure
|
||||
|
||||
an example of a heroscript is
|
||||
|
||||
```heroscript
|
||||
|
||||
!!dagu.script_define
|
||||
name: 'test_dag'
|
||||
homedir:''
|
||||
title:'a title'
|
||||
reset:1
|
||||
start:true //trie or 1 is same
|
||||
colors: 'green,red,purple' //lists are comma separated
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'hello_world'
|
||||
command: 'echo hello world'
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'last_step'
|
||||
command: 'echo last step'
|
||||
|
||||
|
||||
```
|
||||
|
||||
Notice how:
|
||||
- every action starts with !!
|
||||
- the first part is the actor e.g. dagu in this case
|
||||
- the 2e part is the action name
|
||||
- multilines are supported see the description field
|
||||
|
||||
## how to process heroscript in Vlang
|
||||
|
||||
- heroscript can be converted to a struct,
|
||||
- the methods available to get the params are in 'params' section further in this doc
|
||||
|
||||
|
||||
```vlang
|
||||
|
||||
fn test_play_dagu() ! {
|
||||
mut plbook := playbook.new(text: thetext_from_above)!
|
||||
play_dagu(mut plbook)! //see below in vlang block there it all happens
|
||||
}
|
||||
|
||||
|
||||
pub fn play_dagu(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//find all actions are !!$actor.$actionname. in this case above the actor is !!dagu, we check with the fitler if it exists, if not we return
|
||||
dagu_actions := plbook.find(filter: 'dagu.')!
|
||||
if dagu_actions.len == 0 {
|
||||
return
|
||||
}
|
||||
play_dagu_basic(mut plbook)!
|
||||
}
|
||||
|
||||
pub struct DaguScript {
|
||||
pub mut:
|
||||
name string
|
||||
homedir string
|
||||
title string
|
||||
reset bool
|
||||
start bool
|
||||
colors []string
|
||||
}
|
||||
|
||||
// play_dagu plays the dagu play commands
|
||||
pub fn play_dagu_basic(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//now find the specific ones for dagu.script_define
|
||||
mut actions := plbook.find(filter: 'dagu.script_define')!
|
||||
|
||||
if actions.len > 0 {
|
||||
for myaction in actions {
|
||||
mut p := myaction.params //get the params object from the action object, this can then be processed using the param getters
|
||||
mut obj := DaguScript{
|
||||
//INFO: all details about the get methods can be found in 'params get methods' section
|
||||
name : p.get('name')! //will give error if not exist
|
||||
homedir : p.get('homedir')!
|
||||
title : p.get_default('title', 'My Hero DAG')! //uses a default if not set
|
||||
reset : p.get_default_false('reset')
|
||||
start : p.get_default_true('start')
|
||||
colors : p.get_list('colors')
|
||||
description : p.get_default('description','')!
|
||||
}
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
//there can be more actions which will have other filter
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## params get methods (param getters)
|
||||
|
||||
```vlang
|
||||
|
||||
fn (params &Params) exists(key_ string) bool
|
||||
|
||||
//check if arg exist (arg is just a value in the string e.g. red, not value:something)
|
||||
fn (params &Params) exists_arg(key_ string) bool
|
||||
|
||||
//see if the kwarg with the key exists if yes return as string trimmed
|
||||
fn (params &Params) get(key_ string) !string
|
||||
|
||||
//return the arg with nr, 0 is the first
|
||||
fn (params &Params) get_arg(nr int) !string
|
||||
|
||||
//return arg, if the nr is larger than amount of args, will return the defval
|
||||
fn (params &Params) get_arg_default(nr int, defval string) !string
|
||||
|
||||
fn (params &Params) get_default(key string, defval string) !string
|
||||
|
||||
fn (params &Params) get_default_false(key string) bool
|
||||
|
||||
fn (params &Params) get_default_true(key string) bool
|
||||
|
||||
fn (params &Params) get_float(key string) !f64
|
||||
|
||||
fn (params &Params) get_float_default(key string, defval f64) !f64
|
||||
|
||||
fn (params &Params) get_from_hashmap(key_ string, defval string, hashmap map[string]string) !string
|
||||
|
||||
fn (params &Params) get_int(key string) !int
|
||||
|
||||
fn (params &Params) get_int_default(key string, defval int) !int
|
||||
|
||||
//Looks for a list of strings in the parameters. ',' are used as deliminator to list
|
||||
fn (params &Params) get_list(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_f32(key string) ![]f32
|
||||
|
||||
fn (params &Params) get_list_f32_default(key string, def []f32) []f32
|
||||
|
||||
fn (params &Params) get_list_f64(key string) ![]f64
|
||||
|
||||
fn (params &Params) get_list_f64_default(key string, def []f64) []f64
|
||||
|
||||
fn (params &Params) get_list_i16(key string) ![]i16
|
||||
|
||||
fn (params &Params) get_list_i16_default(key string, def []i16) []i16
|
||||
|
||||
fn (params &Params) get_list_i64(key string) ![]i64
|
||||
|
||||
fn (params &Params) get_list_i64_default(key string, def []i64) []i64
|
||||
|
||||
fn (params &Params) get_list_i8(key string) ![]i8
|
||||
|
||||
fn (params &Params) get_list_i8_default(key string, def []i8) []i8
|
||||
|
||||
fn (params &Params) get_list_int(key string) ![]int
|
||||
|
||||
fn (params &Params) get_list_int_default(key string, def []int) []int
|
||||
|
||||
fn (params &Params) get_list_namefix(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_namefix_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_u16(key string) ![]u16
|
||||
|
||||
fn (params &Params) get_list_u16_default(key string, def []u16) []u16
|
||||
|
||||
fn (params &Params) get_list_u32(key string) ![]u32
|
||||
|
||||
fn (params &Params) get_list_u32_default(key string, def []u32) []u32
|
||||
|
||||
fn (params &Params) get_list_u64(key string) ![]u64
|
||||
|
||||
fn (params &Params) get_list_u64_default(key string, def []u64) []u64
|
||||
|
||||
fn (params &Params) get_list_u8(key string) ![]u8
|
||||
|
||||
fn (params &Params) get_list_u8_default(key string, def []u8) []u8
|
||||
|
||||
fn (params &Params) get_map() map[string]string
|
||||
|
||||
fn (params &Params) get_path(key string) !string
|
||||
|
||||
fn (params &Params) get_path_create(key string) !string
|
||||
|
||||
fn (params &Params) get_percentage(key string) !f64
|
||||
|
||||
fn (params &Params) get_percentage_default(key string, defval string) !f64
|
||||
|
||||
//convert GB, MB, KB to bytes e.g. 10 GB becomes bytes in u64
|
||||
fn (params &Params) get_storagecapacity_in_bytes(key string) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_bytes_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_gigabytes(key string) !u64
|
||||
|
||||
//Get Expiration object from time string input input can be either relative or absolute## Relative time
|
||||
fn (params &Params) get_time(key string) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_interval(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp_default(key string, defval Duration) !Duration
|
||||
|
||||
fn (params &Params) get_u32(key string) !u32
|
||||
|
||||
fn (params &Params) get_u32_default(key string, defval u32) !u32
|
||||
|
||||
fn (params &Params) get_u64(key string) !u64
|
||||
|
||||
fn (params &Params) get_u64_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_u8(key string) !u8
|
||||
|
||||
fn (params &Params) get_u8_default(key string, defval u8) !u8
|
||||
|
||||
```
|
||||
|
||||
## how internally a heroscript gets parsed for params
|
||||
|
||||
- example to show how a heroscript gets parsed in action with params
|
||||
- params are part of action object
|
||||
|
||||
```heroscript
|
||||
example text to parse (heroscript)
|
||||
|
||||
id:a1 name6:aaaaa
|
||||
name:'need to do something 1'
|
||||
description:
|
||||
'
|
||||
## markdown works in it
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
|
||||
name2: test
|
||||
name3: hi
|
||||
name10:'this is with space' name11:aaa11
|
||||
|
||||
name4: 'aaa'
|
||||
|
||||
//somecomment
|
||||
name5: 'aab'
|
||||
```
|
||||
|
||||
the params are part of the action and are represented as follow for the above:
|
||||
|
||||
```vlang
|
||||
Params{
|
||||
params: [Param{
|
||||
key: 'id'
|
||||
value: 'a1'
|
||||
}, Param{
|
||||
key: 'name6'
|
||||
value: 'aaaaa'
|
||||
}, Param{
|
||||
key: 'name'
|
||||
value: 'need to do something 1'
|
||||
}, Param{
|
||||
key: 'description'
|
||||
value: '## markdown works in it
|
||||
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
}, Param{
|
||||
key: 'name2'
|
||||
value: 'test'
|
||||
}, Param{
|
||||
key: 'name3'
|
||||
value: 'hi'
|
||||
}, Param{
|
||||
key: 'name10'
|
||||
value: 'this is with space'
|
||||
}, Param{
|
||||
key: 'name11'
|
||||
value: 'aaa11'
|
||||
}, Param{
|
||||
key: 'name4'
|
||||
value: 'aaa'
|
||||
}, Param{
|
||||
key: 'name5'
|
||||
value: 'aab'
|
||||
}]
|
||||
}
|
||||
```
|
||||
76
cli/compile.vsh
Executable file
76
cli/compile.vsh
Executable file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
if additional_args.len > 0 {
|
||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Change to the hero directory
|
||||
hero_dir := os.join_path(os.home_dir(), 'code/github/freeflowuniverse/crystallib/cli/hero')
|
||||
os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}') }
|
||||
|
||||
// Set HEROPATH based on OS
|
||||
mut heropath := '/usr/local/bin/hero'
|
||||
if os.user_os() == 'macos' {
|
||||
heropath = os.join_path(os.home_dir(), 'hero/bin/hero')
|
||||
}
|
||||
|
||||
// Set compilation command based on OS and mode
|
||||
compile_cmd := if os.user_os() == 'macos' {
|
||||
if prod_mode {
|
||||
'v -enable-globals -w -n -prod hero.v'
|
||||
} else {
|
||||
'v -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
'v -cg -enable-globals -parallel-cc -w -n hero.v'
|
||||
} else {
|
||||
'v -cg -enable-globals -w -n hero.v'
|
||||
}
|
||||
}
|
||||
|
||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||
|
||||
if os.system(compile_cmd) != 0 {
|
||||
panic('Failed to compile hero.v with command: ${compile_cmd}')
|
||||
}
|
||||
|
||||
// Make executable
|
||||
os.chmod('hero', 0o755) or { panic('Failed to make hero binary executable: ${err}') }
|
||||
|
||||
// Ensure destination directory exists
|
||||
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
|
||||
|
||||
// Copy to destination paths
|
||||
os.cp('hero', heropath) or { panic('Failed to copy hero binary to ${heropath}: ${err}') }
|
||||
os.cp('hero', '/tmp/hero') or { panic('Failed to copy hero binary to /tmp/hero: ${err}') }
|
||||
|
||||
// Clean up
|
||||
os.rm('hero') or { panic('Failed to remove temporary hero binary: ${err}') }
|
||||
|
||||
println('**COMPILE OK**')
|
||||
102
cli/hero.v
Normal file
102
cli/hero.v
Normal file
@@ -0,0 +1,102 @@
|
||||
module main
|
||||
|
||||
import os
|
||||
import cli { Command, Flag }
|
||||
import freeflowuniverse.herolib.hero.cmds
|
||||
// import freeflowuniverse.herolib.hero.publishing
|
||||
import freeflowuniverse.herolib.installers.base
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.ui
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
// import freeflowuniverse.herolib.core.playcmds
|
||||
|
||||
fn playcmds_do(path string) ! {
|
||||
mut plbook := playbook.new(path: path)!
|
||||
playcmds.run(mut plbook, false)!
|
||||
}
|
||||
|
||||
fn do() ! {
|
||||
if os.args.len == 2 {
|
||||
mypath := os.args[1]
|
||||
if mypath.to_lower().ends_with('.hero') {
|
||||
// hero was called from a file
|
||||
playcmds_do(mypath)!
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
mut cmd := Command{
|
||||
name: 'hero'
|
||||
description: 'Your HERO toolset.'
|
||||
version: '2.0.0'
|
||||
}
|
||||
|
||||
cmd.add_flag(Flag{
|
||||
flag: .string
|
||||
name: 'url'
|
||||
abbrev: 'u'
|
||||
global: true
|
||||
description: 'url of playbook'
|
||||
})
|
||||
|
||||
// herocmds.cmd_run_add_flags(mut cmd)
|
||||
|
||||
mut toinstall := false
|
||||
if !osal.cmd_exists('mc') || !osal.cmd_exists('redis-cli') {
|
||||
toinstall = true
|
||||
}
|
||||
|
||||
if osal.is_osx() {
|
||||
if !osal.cmd_exists('brew') {
|
||||
console.clear()
|
||||
mut myui := ui.new()!
|
||||
toinstall = myui.ask_yesno(
|
||||
question: "we didn't find brew installed is it ok to install for you?"
|
||||
default: true
|
||||
)!
|
||||
if toinstall {
|
||||
base.install()!
|
||||
}
|
||||
console.clear()
|
||||
console.print_stderr('Brew installed, please follow instructions and do hero ... again.')
|
||||
exit(0)
|
||||
}
|
||||
} else {
|
||||
if toinstall {
|
||||
base.install()!
|
||||
}
|
||||
}
|
||||
|
||||
base.redis_install()!
|
||||
|
||||
//herocmds.cmd_bootstrap(mut cmd)
|
||||
// herocmds.cmd_run(mut cmd)
|
||||
// herocmds.cmd_git(mut cmd)
|
||||
// herocmds.cmd_init(mut cmd)
|
||||
// herocmds.cmd_imagedownsize(mut cmd)
|
||||
// herocmds.cmd_biztools(mut cmd)
|
||||
// herocmds.cmd_gen(mut cmd)
|
||||
// herocmds.cmd_sshagent(mut cmd)
|
||||
// herocmds.cmd_installers(mut cmd)
|
||||
// herocmds.cmd_configure(mut cmd)
|
||||
// herocmds.cmd_postgres(mut cmd)
|
||||
// herocmds.cmd_mdbook(mut cmd)
|
||||
// herocmds.cmd_luadns(mut cmd)
|
||||
//herocmds.cmd_caddy(mut cmd)
|
||||
//herocmds.cmd_zola(mut cmd)
|
||||
// herocmds.cmd_juggler(mut cmd)
|
||||
// herocmds.cmd_generator(mut cmd)
|
||||
// herocmds.cmd_docsorter(mut cmd)
|
||||
// cmd.add_command(publishing.cmd_publisher(pre_func))
|
||||
cmd.setup()
|
||||
cmd.parse(os.args)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
do() or { panic(err) }
|
||||
}
|
||||
|
||||
fn pre_func(cmd Command) ! {
|
||||
herocmds.plbook_run(cmd)!
|
||||
}
|
||||
66
doc.vsh
Normal file
66
doc.vsh
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
|
||||
abs_dir_of_script := dir(@FILE)
|
||||
|
||||
// Format code
|
||||
println('Formatting code...')
|
||||
os.system('v fmt -w ${abs_dir_of_script}/examples') or {
|
||||
eprintln('Warning: Failed to format examples: ${err}')
|
||||
}
|
||||
os.system('v fmt -w ${abs_dir_of_script}/herolib') or {
|
||||
eprintln('Warning: Failed to format herolib: ${err}')
|
||||
}
|
||||
|
||||
// Clean existing docs
|
||||
println('Cleaning existing documentation...')
|
||||
os.rmdir_all('${abs_dir_of_script}/docs') or {}
|
||||
|
||||
herolib_path := os.join_path(abs_dir_of_script, 'herolib')
|
||||
os.chdir(herolib_path) or {
|
||||
panic('Failed to change directory to herolib: ${err}')
|
||||
}
|
||||
|
||||
os.rmdir_all('_docs') or {}
|
||||
os.rmdir_all('docs') or {}
|
||||
|
||||
// Generate HTML documentation
|
||||
println('Generating HTML documentation...')
|
||||
os.system('v doc -m -f html . -readme -comments -no-timestamp') or {
|
||||
panic('Failed to generate HTML documentation: ${err}')
|
||||
}
|
||||
|
||||
// Move docs to parent directory
|
||||
os.rename('_docs', '${abs_dir_of_script}/docs') or {
|
||||
panic('Failed to move documentation to parent directory: ${err}')
|
||||
}
|
||||
|
||||
// Generate Markdown documentation
|
||||
println('Generating Markdown documentation...')
|
||||
os.rmdir_all('vdocs') or {}
|
||||
os.mkdir_all('vdocs/v') or {
|
||||
panic('Failed to create v docs directory: ${err}')
|
||||
}
|
||||
os.mkdir_all('vdocs/crystal') or {
|
||||
panic('Failed to create crystal docs directory: ${err}')
|
||||
}
|
||||
|
||||
os.system('v doc -m -no-color -f md -o vdocs/v/') or {
|
||||
panic('Failed to generate V markdown documentation: ${err}')
|
||||
}
|
||||
os.system('v doc -m -no-color -f md -o vdocs/crystal/') or {
|
||||
panic('Failed to generate Crystal markdown documentation: ${err}')
|
||||
}
|
||||
|
||||
// Open documentation in browser on non-Linux systems
|
||||
$if !linux {
|
||||
os.chdir(abs_dir_of_script) or {
|
||||
panic('Failed to change directory: ${err}')
|
||||
}
|
||||
os.system('open docs/index.html') or {
|
||||
eprintln('Warning: Failed to open documentation in browser: ${err}')
|
||||
}
|
||||
}
|
||||
|
||||
println('Documentation generation completed successfully!')
|
||||
25
install.vsh
Executable file
25
install.vsh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
vroot := @VROOT
|
||||
abs_dir_of_script := dir(@FILE)
|
||||
|
||||
// Reset symlinks if requested
|
||||
println('Resetting all symlinks...')
|
||||
os.rmdir_all('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
||||
os.rmdir_all('${os.home_dir()}/.vmodules/vlang/testing') or {}
|
||||
os.rm('/usr/local/bin/herolib') or {}
|
||||
|
||||
// Create necessary directories
|
||||
os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
|
||||
panic('Failed to create directory ~/.vmodules/freeflowuniverse: ${err}')
|
||||
}
|
||||
|
||||
// Create new symlinks
|
||||
os.symlink('${abs_dir_of_script}/herolib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
|
||||
panic('Failed to create herolib symlink: ${err}')
|
||||
}
|
||||
|
||||
println('Herolib installation completed successfully!')
|
||||
38
lib/code/codemodel/README.md
Normal file
38
lib/code/codemodel/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Code Model
|
||||
|
||||
A set of models that represent code, such as structs and functions. The motivation behind this module is to provide a more generic, and lighter alternative to v.ast code models, that can be used for code parsing and code generation across multiple languages.
|
||||
|
||||
## Using Codemodel
|
||||
|
||||
While the models in this module can be used in any domain, the models here are used extensively in the modules [codeparser](../codeparser/) and codegen (under development). Below are examples on how codemodel can be used for parsing and generating code.
|
||||
## Code parsing with codemodel
|
||||
|
||||
As shown in the example below, the codemodels returned by the parser can be used to infer information about the code written
|
||||
|
||||
```js
|
||||
code := codeparser.parse("somedir") // code is a list of code models
|
||||
|
||||
num_functions := code.filter(it is Function).len
|
||||
structs := code.filter(it is Struct)
|
||||
println("This directory has ${num_functions} functions")
|
||||
println('The directory has the structs: ${structs.map(it.name)}')
|
||||
|
||||
```
|
||||
|
||||
or can be used as intermediate structures to serialize code into some other format:
|
||||
|
||||
```js
|
||||
code_md := ''
|
||||
|
||||
// describes the struct in markdown format
|
||||
for struct in structs {
|
||||
code_md += '# ${struct.name}'
|
||||
code_md += 'Type: ${struct.typ.symbol}'
|
||||
code_md += '## Fields:'
|
||||
for field in struct.fields {
|
||||
code_md += '- ${field.name}'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The [openrpc/docgen](../openrpc/docgen/) module demonstrates a good use case, where codemodels are serialized into JSON schema's, to generate an OpenRPC description document from a client in v.
|
||||
99
lib/code/codemodel/codefile.v
Normal file
99
lib/code/codemodel/codefile.v
Normal file
@@ -0,0 +1,99 @@
|
||||
module codemodel
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub struct CodeFile {
|
||||
pub mut:
|
||||
name string
|
||||
mod string
|
||||
imports []Import
|
||||
consts []Const
|
||||
items []CodeItem
|
||||
content string
|
||||
}
|
||||
|
||||
pub fn new_file(config CodeFile) CodeFile {
|
||||
return CodeFile{
|
||||
...config
|
||||
mod: texttools.name_fix(config.mod)
|
||||
items: config.items
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut file CodeFile) add_import(import_ Import) ! {
|
||||
for mut i in file.imports {
|
||||
if i.mod == import_.mod {
|
||||
i.add_types(import_.types)
|
||||
return
|
||||
}
|
||||
}
|
||||
file.imports << import_
|
||||
}
|
||||
|
||||
pub fn (code CodeFile) write_v(path string, options WriteOptions) ! {
|
||||
filename := '${options.prefix}${texttools.name_fix(code.name)}.v'
|
||||
mut filepath := pathlib.get('${path}/${filename}')
|
||||
|
||||
if !options.overwrite && filepath.exists() {
|
||||
return
|
||||
}
|
||||
|
||||
imports_str := code.imports.map(it.vgen()).join_lines()
|
||||
|
||||
code_str := if code.content != '' {
|
||||
code.content
|
||||
} else {
|
||||
vgen(code.items)
|
||||
}
|
||||
|
||||
consts_str := if code.consts.len > 1 {
|
||||
stmts := code.consts.map('${it.name} = ${it.value}')
|
||||
'\nconst(\n${stmts.join('\n')}\n)\n'
|
||||
} else if code.consts.len == 1 {
|
||||
'\nconst ${code.consts[0].name} = ${code.consts[0].value}\n'
|
||||
} else {
|
||||
''
|
||||
}
|
||||
|
||||
mut file := pathlib.get_file(
|
||||
path: filepath.path
|
||||
create: true
|
||||
)!
|
||||
file.write('module ${code.mod}\n${imports_str}\n${consts_str}\n${code_str}')!
|
||||
if options.format {
|
||||
os.execute('v fmt -w ${file.path}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (file CodeFile) get_function(name string) ?Function {
|
||||
functions := file.items.filter(it is Function).map(it as Function)
|
||||
target_lst := functions.filter(it.name == name)
|
||||
|
||||
if target_lst.len == 0 {
|
||||
return none
|
||||
}
|
||||
if target_lst.len > 1 {
|
||||
panic('This should never happen')
|
||||
}
|
||||
return target_lst[0]
|
||||
}
|
||||
|
||||
pub fn (mut file CodeFile) set_function(function Function) ! {
|
||||
function_names := file.items.map(if it is Function { it.name } else { '' })
|
||||
|
||||
index := function_names.index(function.name)
|
||||
if index == -1 {
|
||||
return error('function not found')
|
||||
}
|
||||
file.items[index] = function
|
||||
}
|
||||
|
||||
pub fn (file CodeFile) functions() []Function {
|
||||
return file.items.filter(it is Function).map(it as Function)
|
||||
}
|
||||
|
||||
pub fn (file CodeFile) structs() []Struct {
|
||||
return file.items.filter(it is Struct).map(it as Struct)
|
||||
}
|
||||
9
lib/code/codemodel/example.v
Normal file
9
lib/code/codemodel/example.v
Normal file
@@ -0,0 +1,9 @@
|
||||
module codemodel
|
||||
|
||||
pub struct Example {
|
||||
function Function
|
||||
values map[string]Value
|
||||
result Value
|
||||
}
|
||||
|
||||
pub type Value = string
|
||||
205
lib/code/codemodel/model.v
Normal file
205
lib/code/codemodel/model.v
Normal file
@@ -0,0 +1,205 @@
|
||||
module codemodel
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
// Code is a list of statements
|
||||
// pub type Code = []CodeItem
|
||||
|
||||
pub type CodeItem = Alias | Comment | CustomCode | Function | Import | Struct | Sumtype
|
||||
|
||||
// item for adding custom code in
|
||||
pub struct CustomCode {
|
||||
pub:
|
||||
text string
|
||||
}
|
||||
|
||||
pub struct Comment {
|
||||
pub:
|
||||
text string
|
||||
is_multi bool
|
||||
}
|
||||
|
||||
pub struct Struct {
|
||||
pub mut:
|
||||
name string
|
||||
description string
|
||||
mod string
|
||||
is_pub bool
|
||||
embeds []Struct @[str: skip]
|
||||
generics map[string]string @[str: skip]
|
||||
attrs []Attribute
|
||||
fields []StructField
|
||||
}
|
||||
|
||||
pub struct Sumtype {
|
||||
pub:
|
||||
name string
|
||||
description string
|
||||
types []Type
|
||||
}
|
||||
|
||||
pub struct StructField {
|
||||
pub mut:
|
||||
comments []Comment
|
||||
attrs []Attribute
|
||||
name string
|
||||
description string
|
||||
default string
|
||||
is_pub bool
|
||||
is_mut bool
|
||||
is_ref bool
|
||||
anon_struct Struct @[str: skip] // sometimes fields may hold anonymous structs
|
||||
typ Type
|
||||
structure Struct @[str: skip]
|
||||
}
|
||||
|
||||
pub struct Attribute {
|
||||
pub:
|
||||
name string // [name]
|
||||
has_arg bool
|
||||
arg string // [name: arg]
|
||||
}
|
||||
|
||||
pub struct Function {
|
||||
pub:
|
||||
name string
|
||||
receiver Param
|
||||
is_pub bool
|
||||
mod string
|
||||
pub mut:
|
||||
description string
|
||||
params []Param
|
||||
body string
|
||||
result Result
|
||||
has_return bool
|
||||
}
|
||||
|
||||
pub fn parse_function(code_ string) !Function {
|
||||
mut code := code_.trim_space()
|
||||
is_pub := code.starts_with('pub ')
|
||||
if is_pub {
|
||||
code = code.trim_string_left('pub ').trim_space()
|
||||
}
|
||||
|
||||
is_fn := code.starts_with('fn ')
|
||||
if !is_fn {
|
||||
return error('invalid function format')
|
||||
}
|
||||
code = code.trim_string_left('fn ').trim_space()
|
||||
|
||||
receiver := if code.starts_with('(') {
|
||||
param_str := code.all_after('(').all_before(')').trim_space()
|
||||
code = code.all_after(')').trim_space()
|
||||
parse_param(param_str)!
|
||||
} else {
|
||||
Param{}
|
||||
}
|
||||
|
||||
name := code.all_before('(').trim_space()
|
||||
code = code.trim_string_left(name).trim_space()
|
||||
|
||||
params_str := code.all_after('(').all_before(')')
|
||||
params := if params_str.trim_space() != '' {
|
||||
params_str_lst := params_str.split(',')
|
||||
params_str_lst.map(parse_param(it)!)
|
||||
} else {
|
||||
[]Param{}
|
||||
}
|
||||
result := parse_result(code.all_after(')').all_before('{').replace(' ', ''))!
|
||||
|
||||
body := if code.contains('{') { code.all_after('{').all_before_last('}') } else { '' }
|
||||
return Function{
|
||||
name: name
|
||||
receiver: receiver
|
||||
params: params
|
||||
result: result
|
||||
body: body
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_param(code_ string) !Param {
|
||||
mut code := code_.trim_space()
|
||||
is_mut := code.starts_with('mut ')
|
||||
if is_mut {
|
||||
code = code.trim_string_left('mut ').trim_space()
|
||||
}
|
||||
split := code.split(' ').filter(it != '')
|
||||
if split.len != 2 {
|
||||
return error('invalid param format: ${code_}')
|
||||
}
|
||||
return Param{
|
||||
name: split[0]
|
||||
typ: Type{
|
||||
symbol: split[1]
|
||||
}
|
||||
mutable: is_mut
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_result(code_ string) !Result {
|
||||
code := code_.replace(' ', '').trim_space()
|
||||
|
||||
return Result{
|
||||
result: code_.starts_with('!')
|
||||
optional: code_.starts_with('?')
|
||||
typ: Type{
|
||||
symbol: code.trim('!?')
|
||||
is_optional: code.starts_with('?')
|
||||
is_result: code.starts_with('!')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Param {
|
||||
pub:
|
||||
required bool
|
||||
mutable bool
|
||||
is_shared bool
|
||||
is_optional bool
|
||||
description string
|
||||
name string
|
||||
typ Type
|
||||
struct_ Struct
|
||||
}
|
||||
|
||||
pub struct Result {
|
||||
pub mut:
|
||||
typ Type
|
||||
description string
|
||||
name string
|
||||
result bool // whether is result type
|
||||
optional bool // whether is result type
|
||||
structure Struct
|
||||
}
|
||||
|
||||
// todo: maybe make 'is_' fields methods?
|
||||
pub struct Type {
|
||||
pub mut:
|
||||
is_reference bool @[str: skip]
|
||||
is_map bool @[str: skip]
|
||||
is_array bool
|
||||
is_mutable bool @[str: skip]
|
||||
is_shared bool @[str: skip]
|
||||
is_optional bool @[str: skip]
|
||||
is_result bool @[str: skip]
|
||||
symbol string
|
||||
mod string @[str: skip]
|
||||
}
|
||||
|
||||
pub struct File {
|
||||
pub mut:
|
||||
name string
|
||||
extension string
|
||||
content string
|
||||
}
|
||||
|
||||
pub fn (f File) write(path string) ! {
|
||||
mut fd_file := pathlib.get_file(path: '${path}/${f.name}.${f.extension}')!
|
||||
fd_file.write(f.content)!
|
||||
}
|
||||
|
||||
pub struct Alias {
|
||||
pub:
|
||||
name string
|
||||
description string
|
||||
typ Type
|
||||
}
|
||||
42
lib/code/codemodel/model_const.v
Normal file
42
lib/code/codemodel/model_const.v
Normal file
@@ -0,0 +1,42 @@
|
||||
module codemodel
|
||||
|
||||
pub struct Const {
|
||||
name string
|
||||
value string
|
||||
}
|
||||
|
||||
pub fn parse_const(code_ string) !Const {
|
||||
code := code_.trim_space().all_before('\n')
|
||||
if !code.contains('=') {
|
||||
return error('code <${code_}> is not of const')
|
||||
}
|
||||
return Const{
|
||||
name: code.split('=')[0].trim_space()
|
||||
value: code.split('=')[1].trim_space()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_consts(code_ string) ![]Const {
|
||||
mut code := code_.trim_space()
|
||||
code = code.replace('const (', 'const(')
|
||||
|
||||
const_codes := code.split('\n').filter(it.trim_space().starts_with('const '))
|
||||
|
||||
mut consts := const_codes.map(parse_const(it)!)
|
||||
|
||||
const_blocks := code.split('const(')
|
||||
|
||||
if const_blocks.len == 1 {
|
||||
return consts
|
||||
}
|
||||
|
||||
for i, block in const_blocks {
|
||||
if i == 0 {
|
||||
continue
|
||||
}
|
||||
stmts := block.trim_string_left('const(').all_before('\n)').trim_space().split('\n')
|
||||
consts << stmts.map(parse_const(it)!)
|
||||
}
|
||||
|
||||
return consts
|
||||
}
|
||||
24
lib/code/codemodel/model_import.v
Normal file
24
lib/code/codemodel/model_import.v
Normal file
@@ -0,0 +1,24 @@
|
||||
module codemodel
|
||||
|
||||
pub struct Import {
|
||||
pub mut:
|
||||
mod string
|
||||
types []string
|
||||
}
|
||||
|
||||
pub fn (mut i Import) add_types(types []string) {
|
||||
i.types << types.filter(it !in i.types)
|
||||
}
|
||||
|
||||
pub fn parse_import(code_ string) Import {
|
||||
code := code_.trim_space().trim_string_left('import').trim_space()
|
||||
types_str := if code.contains(' ') { code.all_after(' ').trim('{}') } else { '' }
|
||||
return Import{
|
||||
mod: code.all_before(' ')
|
||||
types: if types_str != '' {
|
||||
types_str.split(',').map(it.trim_space())
|
||||
} else {
|
||||
[]string{}
|
||||
}
|
||||
}
|
||||
}
|
||||
38
lib/code/codemodel/module.v
Normal file
38
lib/code/codemodel/module.v
Normal file
@@ -0,0 +1,38 @@
|
||||
module codemodel
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub struct Module {
|
||||
pub mut:
|
||||
name string
|
||||
files []CodeFile
|
||||
misc_files []File
|
||||
// model CodeFile
|
||||
// methods CodeFile
|
||||
}
|
||||
|
||||
pub fn (mod Module) write_v(path string, options WriteOptions) ! {
|
||||
mut module_dir := pathlib.get_dir(
|
||||
path: '${path}/${mod.name}'
|
||||
empty: options.overwrite
|
||||
)!
|
||||
|
||||
if !options.overwrite && module_dir.exists() {
|
||||
return
|
||||
}
|
||||
|
||||
for file in mod.files {
|
||||
file.write_v(module_dir.path, options)!
|
||||
}
|
||||
for file in mod.misc_files {
|
||||
file.write(module_dir.path)!
|
||||
}
|
||||
|
||||
if options.format {
|
||||
os.execute('v fmt -w ${module_dir.path}')
|
||||
}
|
||||
if options.document {
|
||||
os.execute('v doc -f html -o ${module_dir.path}/docs ${module_dir.path}')
|
||||
}
|
||||
}
|
||||
0
lib/code/codemodel/templates/comment/comment.py
Normal file
0
lib/code/codemodel/templates/comment/comment.py
Normal file
1
lib/code/codemodel/templates/comment/comment.v
Normal file
1
lib/code/codemodel/templates/comment/comment.v
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
@if function.description != ''
|
||||
// @{function.description}
|
||||
@endif
|
||||
pub fn @receiver @{function.name}(@{params}) @{function.result.vgen()} {
|
||||
@{function.body.trim_space().replace('\t', '')}
|
||||
}
|
||||
0
lib/code/codemodel/templates/function/method.py
Normal file
0
lib/code/codemodel/templates/function/method.py
Normal file
26
lib/code/codemodel/templates/struct/struct.v.template
Normal file
26
lib/code/codemodel/templates/struct/struct.v.template
Normal file
@@ -0,0 +1,26 @@
|
||||
@{struct_.description}
|
||||
@if struct_.attrs.len > 0
|
||||
[
|
||||
@for attr in struct_.attrs
|
||||
@{attr.name}
|
||||
@end
|
||||
]
|
||||
@end
|
||||
@{prefix} struct @{name} {
|
||||
@for embed in struct_.embeds
|
||||
@{embed.get_type_symbol()}
|
||||
@end
|
||||
@{priv_fields.join_lines()}
|
||||
@if pub_fields.len > 0
|
||||
pub:
|
||||
@{pub_fields.join_lines()}
|
||||
@end
|
||||
@if mut_fields.len > 0
|
||||
mut:
|
||||
@{mut_fields.join_lines()}
|
||||
@end
|
||||
@if pub_mut_fields.len > 0
|
||||
pub mut:
|
||||
@{pub_mut_fields.join_lines()}
|
||||
@end
|
||||
}
|
||||
92
lib/code/codemodel/utils.v
Normal file
92
lib/code/codemodel/utils.v
Normal file
@@ -0,0 +1,92 @@
|
||||
module codemodel
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import rand
|
||||
|
||||
pub struct GetStruct {
|
||||
pub:
|
||||
code []CodeItem
|
||||
mod string
|
||||
name string
|
||||
}
|
||||
|
||||
pub fn get_struct(params GetStruct) ?Struct {
|
||||
structs_ := params.code.filter(it is Struct).map(it as Struct)
|
||||
structs := structs_.filter(it.name == params.name)
|
||||
if structs.len == 0 {
|
||||
return none
|
||||
} else if structs.len > 1 {
|
||||
panic('Multiple structs with same name found. This should never happen.')
|
||||
}
|
||||
return structs[0]
|
||||
}
|
||||
|
||||
pub fn inflate_types(mut code []CodeItem) {
|
||||
for mut item in code {
|
||||
if item is Struct {
|
||||
// TODO: handle this when refactoring types / structs
|
||||
|
||||
inflate_struct_fields(code, mut item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inflate_struct_fields(code []CodeItem, mut struct_ CodeItem) {
|
||||
for mut field in (struct_ as Struct).fields {
|
||||
// TODO: fix inflation for imported types
|
||||
if field.typ.symbol.starts_with_capital() {
|
||||
field.structure = get_struct(
|
||||
code: code
|
||||
name: field.typ.symbol
|
||||
) or { continue }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct GenerateCallParams {
|
||||
pub:
|
||||
receiver string
|
||||
}
|
||||
|
||||
pub fn (func Function) generate_call(params GenerateCallParams) !string {
|
||||
mut call := ''
|
||||
if func.result.typ.symbol != '' {
|
||||
call = 'result := '
|
||||
}
|
||||
call += if params.receiver != '' {
|
||||
'${params.receiver}.${func.name}'
|
||||
} else if func.receiver.name != '' {
|
||||
'${func.receiver.name}.${func.name}'
|
||||
} else {
|
||||
func.name
|
||||
}
|
||||
|
||||
call += if func.params.len != 0 {
|
||||
'(${func.params.map(it.generate_value()!).join(',')})'
|
||||
} else {
|
||||
'()'
|
||||
}
|
||||
|
||||
if func.result.result {
|
||||
call += '!'
|
||||
}
|
||||
return call
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct GenerateValueParams {
|
||||
}
|
||||
|
||||
pub fn (param Param) generate_value() !string {
|
||||
if param.typ.symbol == 'string' {
|
||||
return "'mock_string_${rand.string(3)}'"
|
||||
} else if param.typ.symbol == 'int' || param.typ.symbol == 'u32' {
|
||||
return '42'
|
||||
} else if param.typ.symbol[0].is_capital() {
|
||||
return '${param.typ.symbol}{}'
|
||||
} else {
|
||||
console.print_debug('mock values for types other than strings and ints are not yet supported')
|
||||
}
|
||||
return ''
|
||||
}
|
||||
274
lib/code/codemodel/vgen.v
Normal file
274
lib/code/codemodel/vgen.v
Normal file
@@ -0,0 +1,274 @@
|
||||
module codemodel
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub struct WriteCode {
|
||||
destination string
|
||||
}
|
||||
|
||||
interface ICodeItem {
|
||||
vgen() string
|
||||
}
|
||||
|
||||
pub fn vgen(code []CodeItem) string {
|
||||
mut str := ''
|
||||
for item in code {
|
||||
if item is Function {
|
||||
str += '\n${item.vgen()}'
|
||||
}
|
||||
if item is Struct {
|
||||
str += '\n${item.vgen()}'
|
||||
}
|
||||
if item is CustomCode {
|
||||
str += '\n${item.vgen()}'
|
||||
}
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
// pub fn (code Code) vgen() string {
|
||||
// return code.items.map(it.vgen()).join_lines()
|
||||
// }
|
||||
|
||||
// vgen_import generates an import statement for a given type
|
||||
pub fn (import_ Import) vgen() string {
|
||||
types_str := if import_.types.len > 0 {
|
||||
'{${import_.types.join(', ')}}'
|
||||
} else {
|
||||
''
|
||||
} // comma separated string list of types
|
||||
return 'import ${import_.mod} ${types_str}'
|
||||
}
|
||||
|
||||
// TODO: enfore that cant be both mutable and shared
|
||||
pub fn (type_ Type) vgen() string {
|
||||
mut type_str := ''
|
||||
if type_.is_mutable {
|
||||
type_str += 'mut '
|
||||
} else if type_.is_shared {
|
||||
type_str += 'shared '
|
||||
}
|
||||
|
||||
if type_.is_optional {
|
||||
type_str += '?'
|
||||
} else if type_.is_result {
|
||||
type_str += '!'
|
||||
}
|
||||
|
||||
return '${type_str} ${type_.symbol}'
|
||||
}
|
||||
|
||||
pub fn (field StructField) vgen() string {
|
||||
symbol := field.get_type_symbol()
|
||||
mut vstr := '${field.name} ${symbol}'
|
||||
if field.description != '' {
|
||||
vstr += '// ${field.description}'
|
||||
}
|
||||
return vstr
|
||||
}
|
||||
|
||||
pub fn (field StructField) get_type_symbol() string {
|
||||
mut field_str := if field.structure.name != '' {
|
||||
field.structure.get_type_symbol()
|
||||
} else {
|
||||
field.typ.symbol
|
||||
}
|
||||
|
||||
if field.is_ref {
|
||||
field_str = '&${field_str}'
|
||||
}
|
||||
|
||||
return field_str
|
||||
}
|
||||
|
||||
pub fn (structure Struct) get_type_symbol() string {
|
||||
mut symbol := if structure.mod != '' {
|
||||
'${structure.mod.all_after_last('.')}.${structure.name}'
|
||||
} else {
|
||||
structure.name
|
||||
}
|
||||
if structure.generics.len > 0 {
|
||||
symbol = '${symbol}${vgen_generics(structure.generics)}'
|
||||
}
|
||||
|
||||
return symbol
|
||||
}
|
||||
|
||||
pub fn vgen_generics(generics map[string]string) string {
|
||||
if generics.keys().len == 0 {
|
||||
return ''
|
||||
}
|
||||
mut vstr := '['
|
||||
for key, val in generics {
|
||||
vstr += if val != '' { val } else { key }
|
||||
}
|
||||
return '${vstr}]'
|
||||
}
|
||||
|
||||
// vgen_function generates a function statement for a function
|
||||
pub fn (function Function) vgen(options WriteOptions) string {
|
||||
mut params_ := function.params.map(Param{
|
||||
...it
|
||||
typ: Type{
|
||||
symbol: if it.struct_.name != '' {
|
||||
it.struct_.name
|
||||
} else {
|
||||
it.typ.symbol
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
optionals := params_.filter(it.is_optional)
|
||||
options_struct := Struct{
|
||||
name: '${texttools.name_fix_snake_to_pascal(function.name)}Options'
|
||||
attrs: [Attribute{
|
||||
name: 'params'
|
||||
}]
|
||||
fields: optionals.map(StructField{
|
||||
name: it.name
|
||||
description: it.description
|
||||
typ: Type{
|
||||
symbol: it.typ.symbol
|
||||
}
|
||||
})
|
||||
}
|
||||
if optionals.len > 0 {
|
||||
params_ << Param{
|
||||
name: 'options'
|
||||
typ: Type{
|
||||
symbol: options_struct.name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
params := params_.filter(!it.is_optional).map('${it.name} ${it.typ.symbol}').join(', ')
|
||||
|
||||
receiver := function.receiver.vgen()
|
||||
|
||||
mut function_str := $tmpl('templates/function/function.v.template')
|
||||
|
||||
// if options.format {
|
||||
// result := os.execute_opt('echo "${function_str.replace('$', '\\$')}" | v fmt') or {
|
||||
// panic('${function_str}\n${err}')
|
||||
// }
|
||||
// function_str = result.output
|
||||
// }
|
||||
function_str = function_str.split_into_lines().filter(!it.starts_with('import ')).join('\n')
|
||||
|
||||
return if options_struct.fields.len != 0 {
|
||||
'${options_struct.vgen()}\n${function_str}'
|
||||
} else {
|
||||
function_str
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (param Param) vgen() string {
|
||||
if param.name == '' {
|
||||
return ''
|
||||
}
|
||||
sym := if param.struct_.name != '' {
|
||||
param.struct_.get_type_symbol()
|
||||
} else {
|
||||
param.typ.symbol
|
||||
}
|
||||
|
||||
mut vstr := '${param.name} ${sym}'
|
||||
if param.typ.is_reference {
|
||||
vstr = '&${vstr}'
|
||||
}
|
||||
if param.mutable {
|
||||
vstr = 'mut ${vstr}'
|
||||
}
|
||||
return '(${vstr})'
|
||||
}
|
||||
|
||||
// vgen_function generates a function statement for a function
|
||||
pub fn (struct_ Struct) vgen() string {
|
||||
gen := VGenerator{false}
|
||||
return gen.generate_struct(struct_) or { panic(err) }
|
||||
// mut struct_str := $tmpl('templates/struct/struct.v.template')
|
||||
// return struct_str
|
||||
// result := os.execute_opt('echo "${struct_str.replace('$', '\$')}" | v fmt') or {panic(err)}
|
||||
// return result.output
|
||||
}
|
||||
|
||||
pub struct VGenerator {
|
||||
format bool
|
||||
}
|
||||
|
||||
pub fn (gen VGenerator) generate_struct(struct_ Struct) !string {
|
||||
name := if struct_.generics.len > 0 {
|
||||
'${struct_.name}${vgen_generics(struct_.generics)}'
|
||||
} else {
|
||||
struct_.name
|
||||
}
|
||||
|
||||
prefix := if struct_.is_pub {
|
||||
'pub'
|
||||
} else {
|
||||
''
|
||||
}
|
||||
|
||||
priv_fields := struct_.fields.filter(!it.is_mut && !it.is_pub).map(gen.generate_struct_field(it))
|
||||
pub_fields := struct_.fields.filter(!it.is_mut && it.is_pub).map(gen.generate_struct_field(it))
|
||||
mut_fields := struct_.fields.filter(it.is_mut && !it.is_pub).map(gen.generate_struct_field(it))
|
||||
pub_mut_fields := struct_.fields.filter(it.is_mut && it.is_pub).map(gen.generate_struct_field(it))
|
||||
|
||||
mut struct_str := $tmpl('templates/struct/struct.v.template')
|
||||
if gen.format {
|
||||
result := os.execute_opt('echo "${struct_str.replace('$', '\$')}" | v fmt') or {
|
||||
console.print_debug(struct_str)
|
||||
panic(err)
|
||||
}
|
||||
return result.output
|
||||
}
|
||||
return struct_str
|
||||
}
|
||||
|
||||
pub fn (gen VGenerator) generate_struct_field(field StructField) string {
|
||||
symbol := field.get_type_symbol()
|
||||
mut vstr := '${field.name} ${symbol}'
|
||||
if field.description != '' {
|
||||
vstr += '// ${field.description}'
|
||||
}
|
||||
return vstr
|
||||
}
|
||||
|
||||
pub fn (custom CustomCode) vgen() string {
|
||||
return custom.text
|
||||
}
|
||||
|
||||
// vgen_function generates a function statement for a function
|
||||
pub fn (result Result) vgen() string {
|
||||
result_type := if result.structure.name != '' {
|
||||
result.structure.get_type_symbol()
|
||||
} else if result.typ.symbol == 'void' {
|
||||
''
|
||||
} else {
|
||||
if result.typ.is_array {
|
||||
'[]${result.typ.symbol}'
|
||||
} else {
|
||||
result.typ.symbol
|
||||
}
|
||||
}
|
||||
str := if result.result {
|
||||
'!'
|
||||
} else if result.typ.is_result {
|
||||
'!'
|
||||
} else {
|
||||
''
|
||||
}
|
||||
return '${str}${result_type}'
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct WriteOptions {
|
||||
pub:
|
||||
format bool
|
||||
overwrite bool
|
||||
document bool
|
||||
prefix string
|
||||
}
|
||||
179
lib/code/codeparser/README.md
Normal file
179
lib/code/codeparser/README.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# Code Parser
|
||||
|
||||
A library of code parsers that parse code and comments into defined code primitives in the [CodeModel](../codemodel/README.md) library.
|
||||
|
||||
## What it does
|
||||
|
||||
- The codeparser parses code into the same generic code models.
|
||||
|
||||
This allows programs that use the code parser to be able to parse from all the languages the codeparser library supports (though currently only V) without having to change implementation.
|
||||
|
||||
- The codeparser parses comments into the code models.
|
||||
|
||||
This introduces styling guidelines around writing comments in programming languages, which if used can help the parser parse in a lot of structured information into code models. See for instance how the codeparser can harvest a lot of information from the below V function's comments:
|
||||
|
||||
```go
|
||||
// hello generates a list of greeting strings for a specific name
|
||||
// - name: the name of the person being greeted
|
||||
// - times: the number of greeting messages to be generated
|
||||
// returns hello messages, a list of messages that greets a person with their name
|
||||
fn hello(name string, times int) []string {
|
||||
return "hello $name"
|
||||
}
|
||||
```
|
||||
|
||||
The VParser parses the above function into the following models:
|
||||
|
||||
```py
|
||||
Function {
|
||||
name: 'hello'
|
||||
description: 'generates a greeting string for a specific name'
|
||||
body: 'return "hello $name"'
|
||||
params: [
|
||||
Param {
|
||||
name: 'name'
|
||||
description: 'the name of the person being greeted'
|
||||
typ: Type {
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
Param {
|
||||
name: 'times'
|
||||
description: 'the number of greeting messages to be generated'
|
||||
typ: Type {
|
||||
symbol: 'int'
|
||||
}
|
||||
}
|
||||
]
|
||||
result: Result {
|
||||
name: 'hello messages'
|
||||
description: 'a list of messages that greets a person with their name'
|
||||
typ: Type {
|
||||
symbol: '[]string'
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
While this example contains a lot of comments for a simple function, this can come in especially useful when parsing more complex functions, and parsing for documentation generation (see [OpenRPC Document Generator](#openrpc-document-generator)).
|
||||
|
||||
## Getting started
|
||||
|
||||
1. Have a code directory or file to parse.
|
||||
2. Follow annotations guidelines for the coding languages in your project to annotate your code in the format codeparser can parse from.
|
||||
3. Run `v run `
|
||||
|
||||
## Annotations
|
||||
|
||||
Currently, the codeparser can parse annotations on struct declarations and function declarations, and gather the following information:
|
||||
|
||||
**Struct declaration annotations**
|
||||
|
||||
- struct description
|
||||
- field descriptions for each field
|
||||
|
||||
**Function declaration annotations**
|
||||
|
||||
- function description
|
||||
- parameter descriptions for each parameter
|
||||
- result name and description of what the function returns
|
||||
|
||||
The codeparser expects code to be annotated in a certain format to be able to parse descriptive comments into ['code items'](). While failure to follow this formatting won't cause any errors, some of the comments may end up not being parsed into the ['code model']() outputted. The format of annotations expected in each programming language the codeparser supports are detailed below.
|
||||
|
||||
### Annotating code in V
|
||||
|
||||
- Struct annotations:
|
||||
|
||||
```go
|
||||
// this is a description of the struct
|
||||
struct Example {
|
||||
field0 string // this comment describes field0
|
||||
field1 int // this comment describes field1
|
||||
}
|
||||
```
|
||||
|
||||
This struct is parsed as the following:
|
||||
|
||||
```py
|
||||
Struct {
|
||||
name: 'Example'
|
||||
description: 'this is a description of the struct'
|
||||
fields: [
|
||||
StructField {
|
||||
name: 'field0'
|
||||
description: 'this comment describes field0'
|
||||
typ: Type {
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
StructField {
|
||||
name: 'field1'
|
||||
description: 'this comment describes field1'
|
||||
typ: Type {
|
||||
symbol: 'int'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- Function annotations:
|
||||
|
||||
```go
|
||||
// some_function is described by the words following the functions name
|
||||
// - param0: this sentence after the colon describes param0
|
||||
// - param1: this sentence after the colon describes param1
|
||||
// returns the desired result, this sentence after the comma describes 'the desired result'
|
||||
fn some_function(param0 string, param1 int) result []string {}
|
||||
```
|
||||
|
||||
This function is parsed as the following:
|
||||
|
||||
```py
|
||||
Function {
|
||||
name: 'some_function'
|
||||
description: 'is described by the words following the functions name'
|
||||
body: ''
|
||||
params: [
|
||||
Param {
|
||||
name: 'param0'
|
||||
description: 'this sentence after the colon describes param0'
|
||||
typ: Type {
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
Param {
|
||||
name: 'param1'
|
||||
description: 'this sentence after the colon describes param1'
|
||||
typ: Type {
|
||||
symbol: 'int'
|
||||
}
|
||||
}
|
||||
]
|
||||
result: Result {
|
||||
name: 'the desired result'
|
||||
description: 'this sentence after the comma describes \'the desired result\''
|
||||
typ: Type {
|
||||
symbol: '[]string'
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## VParser
|
||||
|
||||
NB: v.parser refers to the parser in v standard library, whereas VParser refers to the codeparser for V in this module.
|
||||
|
||||
The VParser uses the v.ast and v.parser libraries to parse through the code in V files. The main purpose of the VParser in this library is to provide a simpler alternative to the builtin v.parser, for less complex applications. As the v.parser module is used in parsing and compiling V itself, it's ast models for function and struct declarations come with a lot of overhead that is not necessary for simpler applications.
|
||||
|
||||
### Using VParser
|
||||
|
||||
The vparser contains only one public function: `pub fn parse_v(path_ string, parser VParser)`.
|
||||
|
||||
The VParser struct can be configured to determine how the parsing should be done on a path_ containing V files. See the [docs]() for more information on using the parse_v function.
|
||||
|
||||
### Example applications
|
||||
|
||||
#### [OpenRPC Document Generator](../openrpc/docgen/)
|
||||
|
||||
The OpenRPC document generator uses the VParser to parse through OpenRPC Client code in V, to create an OpenRPC Document from the parsed code.
|
||||
32
lib/code/codeparser/parse_example.v
Normal file
32
lib/code/codeparser/parse_example.v
Normal file
@@ -0,0 +1,32 @@
|
||||
module codeparser
|
||||
|
||||
// import freeflowuniverse.herolib.core.codemodel {Example}
|
||||
// import freeflowuniverse.herolib.rpc.openrpc {ExamplePairing}
|
||||
|
||||
// pub fn parse_example_pairing(text_ string) !ExamplePairing {
|
||||
// if !text_.contains('Example:') { return error('no example found fitting format') }
|
||||
// mut text := text_.all_after('Example:').trim_space()
|
||||
|
||||
// mut pairing := ExamplePairing{}
|
||||
|
||||
// if text.contains('assert') {
|
||||
// pairing.name = if text.all_before('assert').trim_space() != '' {
|
||||
// text.all_before('assert').trim_space()
|
||||
// } else {text.all_after('assert').all_before('(').trim_space()}
|
||||
// value := text.all_after('==').all_before('//').trim_space()
|
||||
// pairing.result = parse_example()
|
||||
// description := text.all_after('//').trim_space()
|
||||
// }
|
||||
|
||||
// return pairing
|
||||
// }
|
||||
|
||||
// pub fn parse_examples(text string) []openrpc.Example {
|
||||
|
||||
// }
|
||||
|
||||
// pub fn parse_example(text string) openrpc.Example {
|
||||
// return Example{
|
||||
|
||||
// }
|
||||
// }
|
||||
31
lib/code/codeparser/parse_example_test.v
Normal file
31
lib/code/codeparser/parse_example_test.v
Normal file
@@ -0,0 +1,31 @@
|
||||
module codeparser
|
||||
|
||||
// const example_txt = "
|
||||
// Example: Get pet example.
|
||||
// assert some_function('input_string') == 'output_string'
|
||||
// "
|
||||
|
||||
// // "examples": [
|
||||
// // {
|
||||
// // "name": "getPetExample",
|
||||
// // "description": "get pet example",
|
||||
// // "params": [
|
||||
// // {
|
||||
// // "name": "petId",
|
||||
// // "value": 7
|
||||
// // }
|
||||
// // ],
|
||||
// // "result": {
|
||||
// // "name": "getPetExampleResult",
|
||||
// // "value": {
|
||||
// // "name": "fluffy",
|
||||
// // "tag": "poodle",
|
||||
// // "id": 7
|
||||
// // }
|
||||
// // }
|
||||
// // }
|
||||
|
||||
// fn test_parse_example() ! {
|
||||
// example := parse_example(example_txt)
|
||||
// panic('example ${example}')
|
||||
// }
|
||||
31
lib/code/codeparser/testdata/file.v
vendored
Normal file
31
lib/code/codeparser/testdata/file.v
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
module testdata
|
||||
|
||||
// file_func0 is the first function of file
|
||||
fn file_func0() {}
|
||||
|
||||
// file_func1 is the second function of file
|
||||
// - name: a name that the function will do nothing with
|
||||
pub fn file_func1(name string) {}
|
||||
|
||||
// FileStruct0 defines the configuration params of file_func2
|
||||
@[params]
|
||||
pub struct FileStruct0 {
|
||||
param1 string //
|
||||
param2 int //
|
||||
}
|
||||
|
||||
// file_func2 is the third function of the file
|
||||
// - config: configuration for file_func2
|
||||
pub fn file_func2(config FileStruct0) {}
|
||||
|
||||
pub struct FileStruct1 {}
|
||||
|
||||
// file_func3 is the fourth function of the file
|
||||
// is does something with param1 and param2 and creates FileStruct1
|
||||
// returns the created filestruct1, a FileStruc1 struct filled in with params 1 and 2
|
||||
pub fn file_func3(param1 string, param2 int) FileStruct1 {
|
||||
return FileStruc1{
|
||||
firstname: firstname
|
||||
lastname: lastname
|
||||
}
|
||||
}
|
||||
32
lib/code/codeparser/testdata/flatdir/anotherfile.v
vendored
Normal file
32
lib/code/codeparser/testdata/flatdir/anotherfile.v
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
module flatdir
|
||||
|
||||
// anotherfile_func0 is the first function of file
|
||||
fn anotherfile_func0() {}
|
||||
|
||||
// anotherfile_func1 is the second function of file
|
||||
// - name: a name that the function will do nothing with
|
||||
pub fn anotherfile_func1(name string) {}
|
||||
|
||||
// AnotherfileStruct0 defines the configuration params of anotherfile_func2
|
||||
@[params]
|
||||
pub struct AnotherfileStruct0 {
|
||||
param1 string //
|
||||
param2 int //
|
||||
}
|
||||
|
||||
// anotherfile_func2 is the third function of the file
|
||||
// - config: configuration for anotherfile_func2
|
||||
pub fn anotherfile_func2(config AnotherfileStruct0) {}
|
||||
|
||||
pub struct AnotherfileStruct1 {
|
||||
param string
|
||||
}
|
||||
|
||||
// anotherfile_func3 is the fourth function of the file
|
||||
// is does something with param1 and param2 and creates AnotherfileStruct1
|
||||
// returns the created filestruct1, a FileStruc1 struct filled in with params 1 and 2
|
||||
pub fn anotherfile_func3(param1 string, param2 string) AnotherfileStruct1 {
|
||||
return AnotherfileStruct1{
|
||||
param: param1 + param2
|
||||
}
|
||||
}
|
||||
32
lib/code/codeparser/testdata/flatdir/subfile.v
vendored
Normal file
32
lib/code/codeparser/testdata/flatdir/subfile.v
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
module flatdir
|
||||
|
||||
// subfile_func0 is the first function of file
|
||||
fn subfile_func0() {}
|
||||
|
||||
// subfile_func1 is the second function of file
|
||||
// - name: a name that the function will do nothing with
|
||||
pub fn subfile_func1(name string) {}
|
||||
|
||||
// SubfileStruct0 defines the configuration params of subfile_func2
|
||||
@[params]
|
||||
pub struct SubfileStruct0 {
|
||||
param1 string //
|
||||
param2 int //
|
||||
}
|
||||
|
||||
// subfile_func2 is the third function of the file
|
||||
// - config: configuration for subfile_func2
|
||||
pub fn subfile_func2(config SubfileStruct0) {}
|
||||
|
||||
pub struct SubfileStruct1 {
|
||||
param string
|
||||
}
|
||||
|
||||
// subfile_func3 is the fourth function of the file
|
||||
// is does something with param1 and param2 and creates SubfileStruct1
|
||||
// returns the created filestruct1, a FileStruc1 struct filled in with params 1 and 2
|
||||
pub fn subfile_func3(param1 string, param2 string) SubfileStruct1 {
|
||||
return SubfileStruct1{
|
||||
param: param1 + param2
|
||||
}
|
||||
}
|
||||
529
lib/code/codeparser/vparser.v
Normal file
529
lib/code/codeparser/vparser.v
Normal file
@@ -0,0 +1,529 @@
|
||||
module codeparser
|
||||
|
||||
import v.ast
|
||||
import v.parser
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.core.codemodel { CodeFile, CodeItem, Function, Import, Module, Param, Result, Struct, StructField, Sumtype, Type, parse_consts, parse_import }
|
||||
import v.pref
|
||||
|
||||
// VParser holds configuration of parsing
|
||||
// has methods that implement parsing
|
||||
@[params]
|
||||
pub struct VParser {
|
||||
pub:
|
||||
exclude_dirs []string // directories to be excluded from parsing
|
||||
exclude_files []string // files to be excluded from parsing
|
||||
only_pub bool // whether to only parse public functions and structs
|
||||
recursive bool // whether subdirs should be parsed as well
|
||||
}
|
||||
|
||||
// parse_v takes in a path to parse V code from and
|
||||
// vparser configuration params, returns a list of parsed codeitems
|
||||
pub fn parse_v(path_ string, vparser VParser) ![]CodeItem {
|
||||
mut path := pathlib.get(path_)
|
||||
|
||||
$if debug {
|
||||
console.print_debug('Parsing path `${path.path}` with cofiguration:\n${vparser}\n')
|
||||
}
|
||||
|
||||
if !path.exists() {
|
||||
return error('Path `${path.path}` doesn\'t exist.')
|
||||
}
|
||||
|
||||
path.check()
|
||||
mut table := ast.new_table()
|
||||
return vparser.parse_vpath(mut path, mut table)!
|
||||
}
|
||||
|
||||
// parse_vpath parses the v code files and returns codeitems in a given path
|
||||
// can be recursive or not based on the parsers configuration
|
||||
fn (vparser VParser) parse_vpath(mut path pathlib.Path, mut table ast.Table) ![]CodeItem {
|
||||
mut code := []CodeItem{}
|
||||
// mut table := ast.new_table()
|
||||
// fpref := &pref.Preferences{ // preferences for parsing
|
||||
// is_fmt: true
|
||||
// }
|
||||
if path.is_dir() {
|
||||
dir_is_excluded := vparser.exclude_dirs.any(path.path.ends_with(it))
|
||||
if dir_is_excluded {
|
||||
return code
|
||||
}
|
||||
|
||||
if vparser.recursive {
|
||||
// parse subdirs if configured recursive
|
||||
mut flist := path.list(recursive: true)!
|
||||
for mut subdir in flist.paths {
|
||||
if subdir.is_dir() {
|
||||
code << vparser.parse_vpath(mut subdir, mut table)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mut fl := path.list(recursive: false)!
|
||||
for mut file in fl.paths {
|
||||
if !file.is_dir() {
|
||||
code << vparser.parse_vpath(mut file, mut table)!
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
file_is_excluded := vparser.exclude_files.any(path.path.ends_with(it))
|
||||
// todo: use pathlib list regex param to filter non-v files
|
||||
if file_is_excluded || !path.path.ends_with('.v') {
|
||||
return code
|
||||
}
|
||||
code << vparser.parse_vfile(path.path, mut table)
|
||||
} else {
|
||||
return error('Path being parsed must either be a directory or a file.')
|
||||
}
|
||||
// codemodel.inflate_types(mut code)
|
||||
return code
|
||||
}
|
||||
|
||||
// parse_vfile parses and returns code items from a v code file
|
||||
pub fn parse_file(path string, vparser VParser) !CodeFile {
|
||||
mut file := pathlib.get_file(path: path)!
|
||||
mut table := ast.new_table()
|
||||
items := vparser.parse_vfile(file.path, mut table)
|
||||
return CodeFile{
|
||||
name: file.name().trim_string_right('.v')
|
||||
imports: parse_imports(file.read()!)
|
||||
consts: parse_consts(file.read()!)!
|
||||
items: items
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_imports(code string) []Import {
|
||||
return code.split('\n').filter(it.starts_with('import ')).map(parse_import(it))
|
||||
}
|
||||
|
||||
// parse_vfile parses and returns code items from a v code file
|
||||
fn (vparser VParser) parse_vfile(path string, mut table ast.Table) []CodeItem {
|
||||
$if debug {
|
||||
console.print_debug('Parsing file `${path}`')
|
||||
}
|
||||
mut code := []CodeItem{}
|
||||
|
||||
// mut table := ast.new_table()
|
||||
fpref := &pref.Preferences{ // preferences for parsing
|
||||
is_fmt: true
|
||||
}
|
||||
file_ast := parser.parse_file(path, mut table, .parse_comments, fpref)
|
||||
mut file := pathlib.get_file(path: path) or { panic(err) }
|
||||
file_text := file.read() or { panic(err) }
|
||||
mut preceeding_comments := []ast.Comment{}
|
||||
|
||||
for stmt in file_ast.stmts {
|
||||
// code block from vlib/v/doc/doc.v
|
||||
if stmt is ast.ExprStmt {
|
||||
// Collect comments
|
||||
if stmt.expr is ast.Comment {
|
||||
preceeding_comments << stmt.expr as ast.Comment
|
||||
continue
|
||||
}
|
||||
}
|
||||
if stmt is ast.FnDecl {
|
||||
fn_decl := stmt as ast.FnDecl
|
||||
if fn_decl.attrs.len > 0 {
|
||||
openrpc_attrs := fn_decl.attrs.filter(it.name == 'openrpc')
|
||||
{
|
||||
if openrpc_attrs.any(it.arg == 'exclude') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if fn_decl.is_pub || !vparser.only_pub {
|
||||
code << CodeItem(vparser.parse_vfunc(
|
||||
fn_decl: fn_decl
|
||||
table: table
|
||||
comments: preceeding_comments
|
||||
text: file_text
|
||||
))
|
||||
}
|
||||
preceeding_comments = []ast.Comment{}
|
||||
} else if stmt is ast.TypeDecl {
|
||||
if stmt is ast.SumTypeDecl {
|
||||
sumtype_decl := stmt as ast.SumTypeDecl
|
||||
if sumtype_decl.attrs.len > 0 {
|
||||
openrpc_attrs := sumtype_decl.attrs.filter(it.name == 'openrpc')
|
||||
{
|
||||
if openrpc_attrs.any(it.arg == 'exclude') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if sumtype_decl.is_pub || !vparser.only_pub {
|
||||
code << CodeItem(vparser.parse_vsumtype(
|
||||
sumtype_decl: sumtype_decl
|
||||
table: table
|
||||
comments: preceeding_comments
|
||||
))
|
||||
}
|
||||
preceeding_comments = []ast.Comment{}
|
||||
}
|
||||
} else if stmt is ast.StructDecl {
|
||||
struct_decl := stmt as ast.StructDecl
|
||||
if struct_decl.attrs.len > 0 {
|
||||
openrpc_attrs := struct_decl.attrs.filter(it.name == 'openrpc')
|
||||
{
|
||||
if openrpc_attrs.any(it.arg == 'exclude') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if struct_decl.is_pub || !vparser.only_pub {
|
||||
code << CodeItem(vparser.parse_vstruct(
|
||||
struct_decl: struct_decl
|
||||
table: table
|
||||
comments: preceeding_comments
|
||||
))
|
||||
}
|
||||
preceeding_comments = []ast.Comment{}
|
||||
}
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
// parse_vfile parses and returns code items from a v code file
|
||||
pub fn parse_module(path_ string, vparser VParser) !Module {
|
||||
mut path := pathlib.get(path_)
|
||||
if !path.exists() {
|
||||
return error('Path `${path.path}` doesn\'t exist.')
|
||||
}
|
||||
|
||||
mut table := ast.new_table()
|
||||
mut code := []CodeFile{}
|
||||
// fpref := &pref.Preferences{ // preferences for parsing
|
||||
// is_fmt: true
|
||||
// }
|
||||
mut mod := Module{
|
||||
name: path.name()
|
||||
}
|
||||
if path.is_dir() {
|
||||
dir_is_excluded := vparser.exclude_dirs.any(path.path.ends_with(it))
|
||||
if dir_is_excluded {
|
||||
return Module{
|
||||
...mod
|
||||
files: code
|
||||
}
|
||||
}
|
||||
|
||||
if vparser.recursive {
|
||||
return error('recursive module parsing not yet supported')
|
||||
}
|
||||
|
||||
mut fl := path.list(recursive: false)!
|
||||
for mut file in fl.paths {
|
||||
if !file.is_dir() {
|
||||
code << parse_file(file.path, vparser)!
|
||||
}
|
||||
}
|
||||
} else if path.is_file() {
|
||||
file_is_excluded := vparser.exclude_files.any(path.path.ends_with(it))
|
||||
// todo: use pathlib list regex param to filter non-v files
|
||||
if file_is_excluded || !path.path.ends_with('.v') {
|
||||
return Module{
|
||||
...mod
|
||||
files: code
|
||||
}
|
||||
}
|
||||
code << parse_file(path.path, vparser)!
|
||||
} else {
|
||||
return error('Path being parsed must either be a directory or a file.')
|
||||
}
|
||||
// codemodel.inflate_types(mut code)
|
||||
return Module{
|
||||
...mod
|
||||
files: code
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
struct VFuncArgs {
|
||||
comments []ast.Comment // v comments that belong to the function
|
||||
fn_decl ast.FnDecl // v.ast parsed function declaration
|
||||
table &ast.Table // ast table used for getting typesymbols from
|
||||
text string
|
||||
}
|
||||
|
||||
// parse_vfunc parses function args into function struct
|
||||
pub fn (vparser VParser) parse_vfunc(args VFuncArgs) Function {
|
||||
$if debug {
|
||||
console.print_debug('Parsing function: ${args.fn_decl.short_name}')
|
||||
}
|
||||
|
||||
// get function params excluding receiver
|
||||
receiver_name := args.fn_decl.receiver.name
|
||||
receiver_type := args.table.type_to_str(args.fn_decl.receiver.typ).all_after_last('.')
|
||||
fn_params := args.fn_decl.params.filter(it.name != receiver_name)
|
||||
|
||||
receiver := Param{
|
||||
name: receiver_name
|
||||
typ: Type{
|
||||
symbol: receiver_type
|
||||
}
|
||||
mutable: args.fn_decl.rec_mut
|
||||
}
|
||||
|
||||
params := vparser.parse_params(
|
||||
comments: args.comments
|
||||
params: fn_params
|
||||
table: args.table
|
||||
)
|
||||
|
||||
result := vparser.parse_result(
|
||||
comments: args.comments
|
||||
return_type: args.fn_decl.return_type
|
||||
table: args.table
|
||||
)
|
||||
|
||||
mut fn_comments := []string{}
|
||||
for comment in args.comments.map(it.text.trim_string_left('\u0001').trim_space()) {
|
||||
if !comment.starts_with('-') && !comment.starts_with('returns') {
|
||||
fn_comments << comment.trim_string_left('${args.fn_decl.short_name} ')
|
||||
}
|
||||
}
|
||||
|
||||
text_lines := args.text.split('\n')
|
||||
fn_lines := text_lines.filter(it.contains('fn') && it.contains(' ${args.fn_decl.short_name}('))
|
||||
fn_line := fn_lines[0] or { panic('this should never happen') }
|
||||
line_i := text_lines.index(fn_line)
|
||||
end_i := line_i + text_lines[line_i..].index('}')
|
||||
|
||||
fn_text := text_lines[line_i..end_i + 1].join('\n')
|
||||
// mut fn_index := args.text.index(args.fn_decl.short_name) or {panic('this should never happen1')}
|
||||
// text_cropped := args.text[..fn_index] or {panic('this should never happen2')}
|
||||
// fn_start := text_cropped.last_index('fn ') or {panic('this should never happen3 \n-${text_cropped}')}
|
||||
// fn_text := args.text[fn_start..] or {panic('this should never happen4')}
|
||||
fn_parsed := codemodel.parse_function(fn_text) or { panic(err) }
|
||||
|
||||
return Function{
|
||||
name: args.fn_decl.short_name
|
||||
description: fn_comments.join(' ')
|
||||
mod: args.fn_decl.mod
|
||||
receiver: receiver
|
||||
params: params
|
||||
result: fn_parsed.result
|
||||
body: fn_parsed.body
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
struct ParamsArgs {
|
||||
comments []ast.Comment // comments of the function
|
||||
params []ast.Param // ast type of what function returns
|
||||
table &ast.Table // ast table for getting type names
|
||||
}
|
||||
|
||||
// parse_params parses ast function parameters into function parameters
|
||||
fn (vparser VParser) parse_params(args ParamsArgs) []Param {
|
||||
mut params := []Param{}
|
||||
for param in args.params {
|
||||
mut description := ''
|
||||
// parse comment line that describes param
|
||||
for comment in args.comments {
|
||||
if start := comment.text.index('- ${param.name}: ') {
|
||||
description = comment.text[start..].trim_string_left('- ${param.name}: ')
|
||||
}
|
||||
}
|
||||
|
||||
params << Param{
|
||||
name: param.name
|
||||
description: description
|
||||
typ: Type{
|
||||
symbol: args.table.type_to_str(param.typ).all_after_last('.')
|
||||
}
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
@[params]
|
||||
struct ParamArgs {
|
||||
comments []ast.Comment // comments of the function
|
||||
param ast.Param // ast type of what function returns
|
||||
table &ast.Table // ast table for getting type names
|
||||
}
|
||||
|
||||
// parse_params parses ast function parameters into function parameters
|
||||
fn (vparser VParser) parse_param(args ParamArgs) Param {
|
||||
mut description := ''
|
||||
// parse comment line that describes param
|
||||
for comment in args.comments {
|
||||
if start := comment.text.index('- ${args.param.name}: ') {
|
||||
description = comment.text[start..].trim_string_left('- ${args.param.name}: ')
|
||||
}
|
||||
}
|
||||
|
||||
return Param{
|
||||
name: args.param.name
|
||||
description: description
|
||||
typ: Type{
|
||||
symbol: args.table.type_to_str(args.param.typ).all_after_last('.')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ReturnArgs {
|
||||
comments []ast.Comment // comments of the function
|
||||
return_type ast.Type // v.ast type of what function returns
|
||||
table &ast.Table // v.ast table for getting type names
|
||||
}
|
||||
|
||||
// parse_result parses a function's comments and return type
|
||||
// returns a result struct that represents what the function's result is
|
||||
fn (vparser VParser) parse_result(args ReturnArgs) Result {
|
||||
comment_str := args.comments.map(it.text).join('')
|
||||
|
||||
// parse comments to get return name and description
|
||||
mut name := ''
|
||||
mut description := ''
|
||||
if start := comment_str.index('returns') {
|
||||
mut end := comment_str.index_after('.', start)
|
||||
if end == -1 {
|
||||
end = comment_str.len
|
||||
}
|
||||
return_str := comment_str[start..end].trim_string_left('returns ')
|
||||
|
||||
split := return_str.split(', ')
|
||||
name = split[0]
|
||||
if split.len > 1 {
|
||||
description = split[1..].join(', ')
|
||||
}
|
||||
}
|
||||
return_symbol := args.table.type_to_str(args.return_type).all_after_last('.')
|
||||
|
||||
return Result{
|
||||
name: name
|
||||
description: description
|
||||
typ: Type{
|
||||
symbol: return_symbol
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parse_params parses ast function parameters into function parameters
|
||||
fn (vparser VParser) parse_type(typ ast.Type, table &ast.Table) Type {
|
||||
type_str := table.type_to_str(typ).all_after_last('.')
|
||||
return Type{
|
||||
symbol: type_str
|
||||
}
|
||||
}
|
||||
|
||||
struct VStructArgs {
|
||||
comments []ast.Comment // comments that belong to the struct declaration
|
||||
struct_decl ast.StructDecl // v.ast Struct declaration for struct being parsed
|
||||
table &ast.Table // v.ast table for getting type names
|
||||
}
|
||||
|
||||
// parse_params parses struct args into struct
|
||||
fn (vparser VParser) parse_vstruct(args VStructArgs) Struct {
|
||||
$if debug {
|
||||
console.print_debug('Parsing struct: ${args.struct_decl.name}')
|
||||
}
|
||||
|
||||
comments := args.comments.map(it.text.trim_string_left('\u0001').trim_space())
|
||||
mut fields := vparser.parse_fields(args.struct_decl.fields, args.table)
|
||||
fields << vparser.parse_embeds(args.struct_decl.embeds, args.table)
|
||||
return Struct{
|
||||
name: args.struct_decl.name.all_after_last('.')
|
||||
description: comments.join(' ')
|
||||
fields: fields
|
||||
mod: args.struct_decl.name.all_before_last('.')
|
||||
attrs: args.struct_decl.attrs.map(codemodel.Attribute{ name: it.name })
|
||||
is_pub: args.struct_decl.is_pub
|
||||
}
|
||||
}
|
||||
|
||||
struct VSumTypeArgs {
|
||||
comments []ast.Comment // comments that belong to the struct declaration
|
||||
sumtype_decl ast.SumTypeDecl // v.ast Struct declaration for struct being parsed
|
||||
table &ast.Table // v.ast table for getting type names
|
||||
}
|
||||
|
||||
// parse_params parses struct args into struct
|
||||
fn (vparser VParser) parse_vsumtype(args VSumTypeArgs) Sumtype {
|
||||
$if debug {
|
||||
console.print_debug('Parsing sumtype: ${args.sumtype_decl.name}')
|
||||
}
|
||||
|
||||
comments := args.comments.map(it.text.trim_string_left('\u0001').trim_space())
|
||||
|
||||
return Sumtype{
|
||||
name: args.sumtype_decl.name.all_after_last('.')
|
||||
description: comments.join(' ')
|
||||
types: vparser.parse_variants(args.sumtype_decl.variants, args.table)
|
||||
}
|
||||
}
|
||||
|
||||
// parse_fields parses ast struct fields into struct fields
|
||||
fn (vparser VParser) parse_fields(fields []ast.StructField, table &ast.Table) []StructField {
|
||||
mut fields_ := []StructField{}
|
||||
for field in fields {
|
||||
mut anon_struct := Struct{}
|
||||
if table.type_to_str(field.typ).all_after_last('.').starts_with('_VAnon') {
|
||||
anon_struct = vparser.parse_vstruct(
|
||||
table: table
|
||||
struct_decl: field.anon_struct_decl
|
||||
)
|
||||
}
|
||||
|
||||
description := field.comments.map(it.text.trim_string_left('\u0001').trim_space()).join(' ')
|
||||
fields_ << StructField{
|
||||
attrs: field.attrs.map(codemodel.Attribute{
|
||||
name: it.name
|
||||
has_arg: it.has_arg
|
||||
arg: it.arg
|
||||
})
|
||||
name: field.name
|
||||
anon_struct: anon_struct
|
||||
description: description
|
||||
typ: Type{
|
||||
symbol: table.type_to_str(field.typ).all_after_last('.')
|
||||
is_array: table.type_to_str(field.typ).contains('[]')
|
||||
is_map: table.type_to_str(field.typ).contains('map[')
|
||||
}
|
||||
is_pub: field.is_pub
|
||||
is_mut: field.is_mut
|
||||
default: field.default_val
|
||||
}
|
||||
}
|
||||
return fields_
|
||||
|
||||
// return fields.map(
|
||||
// StructField{
|
||||
// name: it.name
|
||||
// typ: Type{
|
||||
// symbol: table.type_to_str(it.typ).all_after_last('.')
|
||||
// }
|
||||
// }
|
||||
// )
|
||||
}
|
||||
|
||||
// parse_embeds parses ast.embeds into struct fields
|
||||
// TODO: Support requiresive fields
|
||||
fn (vparser VParser) parse_embeds(embeds []ast.Embed, table &ast.Table) []StructField {
|
||||
mut fields := []StructField{}
|
||||
for embed in embeds {
|
||||
$if debug {
|
||||
console.print_debug('Parsing embed: ${table.sym(embed.typ).info}')
|
||||
}
|
||||
embed_info := table.sym(embed.typ).info
|
||||
if embed_info is ast.Struct {
|
||||
// embeds: vparser.parse_embeds(embed_info.embeds, table)
|
||||
fields << vparser.parse_fields(embed_info.fields, table)
|
||||
}
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
// parse_fields parses ast struct fields into struct fields
|
||||
fn (vparser VParser) parse_variants(variants []ast.TypeNode, table &ast.Table) []Type {
|
||||
mut types := []Type{}
|
||||
for variant in variants {
|
||||
types << Type{
|
||||
symbol: table.type_to_str(variant.typ).all_after_last('.')
|
||||
}
|
||||
}
|
||||
return types
|
||||
}
|
||||
643
lib/code/codeparser/vparser_test.v
Normal file
643
lib/code/codeparser/vparser_test.v
Normal file
@@ -0,0 +1,643 @@
|
||||
module codeparser
|
||||
|
||||
import freeflowuniverse.herolib.core.codemodel { CodeItem, Function, Struct }
|
||||
import os
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
const testpath = os.dir(@FILE) + '/testdata'
|
||||
|
||||
// is a map of test files used in these tests and their complete codeitems
|
||||
// used to make assertions and verify test outputs
|
||||
const testcode = {
|
||||
'anotherfile.v': [
|
||||
CodeItem(Function{
|
||||
name: 'anotherfile_func0'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the first function of file'
|
||||
params: []
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'anotherfile_func1'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the second function of file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'a name that the function will do nothing with'
|
||||
name: 'name'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'AnotherfileStruct0'
|
||||
description: 'AnotherfileStruct0 defines the configuration params of anotherfile_func2'
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
is_pub: true
|
||||
attrs: [
|
||||
codemodel.Attribute{
|
||||
name: 'params'
|
||||
has_arg: false
|
||||
arg: ''
|
||||
},
|
||||
]
|
||||
fields: [
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param1'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param2'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'int'
|
||||
}
|
||||
},
|
||||
]
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'anotherfile_func2'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the third function of the file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'configuration for anotherfile_func2'
|
||||
name: 'config'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'AnotherfileStruct0'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'AnotherfileStruct1'
|
||||
description: ''
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
is_pub: true
|
||||
fields: [
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'anotherfile_func3'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the fourth function of the file is does something with param1 and param2 and creates AnotherfileStruct1'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param1'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param2'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'AnotherfileStruct1'
|
||||
}
|
||||
description: 'a FileStruc1 struct filled in with params 1 and 2'
|
||||
name: 'the created filestruct1'
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
]
|
||||
'subfile.v': [
|
||||
CodeItem(Function{
|
||||
name: 'subfile_func0'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the first function of file'
|
||||
params: []
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'subfile_func1'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the second function of file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'a name that the function will do nothing with'
|
||||
name: 'name'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'SubfileStruct0'
|
||||
description: 'SubfileStruct0 defines the configuration params of subfile_func2'
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
is_pub: true
|
||||
attrs: [
|
||||
codemodel.Attribute{
|
||||
name: 'params'
|
||||
has_arg: false
|
||||
arg: ''
|
||||
},
|
||||
]
|
||||
fields: [
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param1'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param2'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'int'
|
||||
}
|
||||
},
|
||||
]
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'subfile_func2'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the third function of the file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'configuration for subfile_func2'
|
||||
name: 'config'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'SubfileStruct0'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'SubfileStruct1'
|
||||
description: ''
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
is_pub: true
|
||||
fields: [
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'subfile_func3'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata.flatdir'
|
||||
description: 'is the fourth function of the file is does something with param1 and param2 and creates SubfileStruct1'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param1'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param2'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'SubfileStruct1'
|
||||
}
|
||||
description: 'a FileStruc1 struct filled in with params 1 and 2'
|
||||
name: 'the created filestruct1'
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
]
|
||||
'file.v': [
|
||||
CodeItem(Function{
|
||||
name: 'file_func0'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata'
|
||||
description: 'is the first function of file'
|
||||
params: []
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'file_func1'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata'
|
||||
description: 'is the second function of file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'a name that the function will do nothing with'
|
||||
name: 'name'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'FileStruct0'
|
||||
description: 'FileStruct0 defines the configuration params of file_func2'
|
||||
mod: 'core.codeparser.testdata'
|
||||
is_pub: true
|
||||
attrs: [
|
||||
codemodel.Attribute{
|
||||
name: 'params'
|
||||
has_arg: false
|
||||
arg: ''
|
||||
},
|
||||
]
|
||||
fields: [
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param1'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.StructField{
|
||||
comments: []
|
||||
attrs: []
|
||||
name: 'param2'
|
||||
description: ''
|
||||
anon_struct: Struct{
|
||||
name: ''
|
||||
description: ''
|
||||
fields: []
|
||||
}
|
||||
typ: codemodel.Type{
|
||||
symbol: 'int'
|
||||
}
|
||||
},
|
||||
]
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'file_func2'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata'
|
||||
description: 'is the third function of the file'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: 'configuration for file_func2'
|
||||
name: 'config'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'FileStruct0'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
description: ''
|
||||
name: ''
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
CodeItem(Struct{
|
||||
name: 'FileStruct1'
|
||||
description: ''
|
||||
fields: []
|
||||
mod: 'core.codeparser.testdata'
|
||||
is_pub: true
|
||||
}),
|
||||
CodeItem(Function{
|
||||
name: 'file_func3'
|
||||
receiver: codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: ''
|
||||
typ: codemodel.Type{
|
||||
symbol: 'void'
|
||||
}
|
||||
}
|
||||
mod: 'core.codeparser.testdata'
|
||||
description: 'is the fourth function of the file is does something with param1 and param2 and creates FileStruct1'
|
||||
params: [
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param1'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'string'
|
||||
}
|
||||
},
|
||||
codemodel.Param{
|
||||
required: false
|
||||
description: ''
|
||||
name: 'param2'
|
||||
typ: codemodel.Type{
|
||||
symbol: 'int'
|
||||
}
|
||||
},
|
||||
]
|
||||
body: ''
|
||||
result: codemodel.Result{
|
||||
typ: codemodel.Type{
|
||||
symbol: 'FileStruct1'
|
||||
}
|
||||
description: 'a FileStruc1 struct filled in with params 1 and 2'
|
||||
name: 'the created filestruct1'
|
||||
}
|
||||
has_return: false
|
||||
}),
|
||||
]
|
||||
}
|
||||
|
||||
fn test_vparse_blankdir() ! {
|
||||
os.mkdir_all('${testpath}/blankdir', os.MkdirParams{})!
|
||||
code := parse_v('${testpath}/blankdir')!
|
||||
assert code.len == 0
|
||||
}
|
||||
|
||||
fn test_vparse_flat_directory() ! {
|
||||
code := parse_v('${testpath}/flatdir')!
|
||||
assert code.len == 12
|
||||
assert code[0] == testcode['anotherfile.v'][0]
|
||||
assert code[0..6] == testcode['anotherfile.v'][0..6], '<${code[0..6]}> vs <${testcode['anotherfile.v'][0..6]}>'
|
||||
assert code[6..12] == testcode['subfile.v'][0..6], '<${code[6..12]}> vs <${testcode['subfile.v'][0..6]}>'
|
||||
}
|
||||
|
||||
fn test_vparse_non_recursive() ! {
|
||||
code := parse_v(testpath)!
|
||||
assert code.len == 6
|
||||
assert code[0] == testcode['file.v'][0]
|
||||
assert code[0..6] == testcode['file.v'][0..6], '<${code[0..6]}> vs <${testcode['file.v'][0..6]}>'
|
||||
}
|
||||
|
||||
fn test_vparse_recursive() ! {
|
||||
$if debug {
|
||||
console.print_debug('\nTEST: test_vparse_recursive\n')
|
||||
}
|
||||
code := parse_v(testpath, recursive: true)!
|
||||
assert code.len == 18
|
||||
assert code[0..6] == testcode['anotherfile.v'][0..6]
|
||||
assert code[6..12] == testcode['subfile.v'][0..6]
|
||||
assert code[12..18] == testcode['file.v'][0..6]
|
||||
}
|
||||
|
||||
fn test_vparse_exclude_directories() ! {
|
||||
code := parse_v(testpath,
|
||||
recursive: true
|
||||
exclude_dirs: ['flatdir']
|
||||
)!
|
||||
assert code.len == 6
|
||||
assert code[0..6] == testcode['file.v'][0..6]
|
||||
}
|
||||
|
||||
fn test_vparse_exclude_files() ! {
|
||||
code := parse_v(testpath,
|
||||
recursive: true
|
||||
exclude_files: ['flatdir/anotherfile.v']
|
||||
)!
|
||||
assert code.len == 12
|
||||
assert code[0..6] == testcode['subfile.v'][0..6]
|
||||
assert code[6..12] == testcode['file.v'][0..6]
|
||||
}
|
||||
|
||||
fn test_vparse_only_public() ! {
|
||||
code := parse_v(testpath,
|
||||
recursive: true
|
||||
only_pub: true
|
||||
)!
|
||||
|
||||
// first function of each code file is private so should skip those
|
||||
assert code.len == 15
|
||||
assert code[0..5] == testcode['anotherfile.v'][1..6]
|
||||
assert code[5..10] == testcode['subfile.v'][1..6]
|
||||
assert code[10..15] == testcode['file.v'][1..6]
|
||||
}
|
||||
108
lib/core/installers/redis.v
Normal file
108
lib/core/installers/redis.v
Normal file
@@ -0,0 +1,108 @@
|
||||
module redis
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.core.rootpath
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import time
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct InstallArgs {
|
||||
pub mut:
|
||||
port int = 6379
|
||||
datadir string = '${rootpath.vardir()}/redis'
|
||||
ipaddr string = 'localhost' // can be more than 1, space separated
|
||||
reset bool
|
||||
start bool
|
||||
restart bool // do not put on true
|
||||
}
|
||||
|
||||
// ```
|
||||
// struct InstallArgs {
|
||||
// port int = 6379
|
||||
// datadir string
|
||||
// ipaddr string = "localhost"
|
||||
// reset bool
|
||||
// start bool
|
||||
// restart bool = true
|
||||
// }
|
||||
// ```
|
||||
pub fn install(args_ InstallArgs) ! {
|
||||
mut args := args_
|
||||
|
||||
if !args.reset {
|
||||
if check() {
|
||||
return
|
||||
}
|
||||
}
|
||||
console.print_header('install redis.')
|
||||
|
||||
if !(osal.cmd_exists_profile('redis-server')) {
|
||||
if osal.is_linux() {
|
||||
osal.package_install('redis-server')!
|
||||
} else {
|
||||
osal.package_install('redis')!/Users/despiegk1/code/github/freeflowuniverse/crystallib/crystallib/installers/db/redis/template
|
||||
}
|
||||
}
|
||||
osal.execute_silent('mkdir -p ${args.datadir}')!
|
||||
|
||||
if args.restart {
|
||||
stop()!
|
||||
}
|
||||
start(args)!
|
||||
}
|
||||
|
||||
fn configfilepath(args InstallArgs) string {
|
||||
if osal.is_linux() {
|
||||
return '/etc/redis/redis.conf'
|
||||
} else {
|
||||
return '${args.datadir}/redis.conf'
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(args InstallArgs) ! {
|
||||
c := $tmpl('template/redis_config.conf')
|
||||
pathlib.template_write(c, configfilepath(), true)!
|
||||
}
|
||||
|
||||
pub fn check(args InstallArgs) bool {
|
||||
res := os.execute('redis-cli -c -p ${args.port} ping > /dev/null 2>&1')
|
||||
if res.exit_code == 0 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn start(args InstallArgs) ! {
|
||||
if check() {
|
||||
return
|
||||
}
|
||||
|
||||
configure(args)!
|
||||
// remove all redis in memory
|
||||
osal.process_kill_recursive(name: 'redis-server')!
|
||||
|
||||
if osal.platform() == .osx {
|
||||
osal.exec(cmd: 'redis-server ${configfilepath()} --daemonize yes')!
|
||||
// osal.exec(cmd:"brew services start redis") or {
|
||||
// osal.exec(cmd:"redis-server ${configfilepath()} --daemonize yes")!
|
||||
// }
|
||||
} else {
|
||||
mut sm := startupmanager.get()!
|
||||
sm.new(name: 'redis', cmd: 'redis-server ${configfilepath()}', start: true)!
|
||||
}
|
||||
|
||||
for _ in 0 .. 100 {
|
||||
if check() {
|
||||
console.print_debug('redis started.')
|
||||
return
|
||||
}
|
||||
time.sleep(100)
|
||||
}
|
||||
return error("Redis did not install propertly could not do:'redis-cli -c ping'")
|
||||
}
|
||||
|
||||
pub fn stop() ! {
|
||||
osal.execute_silent('redis-cli shutdown')!
|
||||
}
|
||||
2320
lib/core/installers/template/redis_config.conf
Normal file
2320
lib/core/installers/template/redis_config.conf
Normal file
File diff suppressed because it is too large
Load Diff
95
lib/core/playbook/action.v
Normal file
95
lib/core/playbook/action.v
Normal file
@@ -0,0 +1,95 @@
|
||||
module playbook
|
||||
|
||||
import crypto.blake2b
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
// import freeflowuniverse.herolib.core.smartid
|
||||
|
||||
pub struct Action {
|
||||
pub mut:
|
||||
id int
|
||||
cid string
|
||||
name string
|
||||
actor string
|
||||
priority int = 10 // 0 is highest, do 10 as default
|
||||
params paramsparser.Params
|
||||
result paramsparser.Params // can be used to remember outputs
|
||||
// run bool = true // certain actions can be defined but meant to be executed directly
|
||||
actiontype ActionType = .sal
|
||||
comments string
|
||||
done bool // if done then no longer need to process
|
||||
}
|
||||
|
||||
pub enum ActionType {
|
||||
unknown
|
||||
dal
|
||||
sal
|
||||
wal
|
||||
macro
|
||||
}
|
||||
|
||||
pub fn (action Action) str() string {
|
||||
mut out := action.heroscript()
|
||||
if !action.result.empty() {
|
||||
out += '\n\nResult:\n'
|
||||
out += texttools.indent(action.result.heroscript(), ' ')
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// serialize to heroscript
|
||||
pub fn (action Action) heroscript() string {
|
||||
mut out := ''
|
||||
if action.comments.len > 0 {
|
||||
out += texttools.indent(action.comments, '// ')
|
||||
}
|
||||
if action.actiontype == .sal {
|
||||
out += '!!'
|
||||
} else if action.actiontype == .macro {
|
||||
out += '!!!'
|
||||
} else {
|
||||
panic('only action sal and macro supported for now,\n${action}')
|
||||
}
|
||||
|
||||
if action.actor != '' {
|
||||
out += '${action.actor}.'
|
||||
}
|
||||
out += '${action.name} '
|
||||
if action.id > 0 {
|
||||
out += 'id:${action.id} '
|
||||
}
|
||||
if !action.params.empty() {
|
||||
heroscript := action.params.heroscript()
|
||||
heroscript_lines := heroscript.split_into_lines()
|
||||
out += heroscript_lines[0] + '\n'
|
||||
for line in heroscript_lines[1..] {
|
||||
out += ' ' + line + '\n'
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// return list of names .
|
||||
// the names are normalized (no special chars, lowercase, ... )
|
||||
pub fn (action Action) names() []string {
|
||||
mut names := []string{}
|
||||
for name in action.name.split('.') {
|
||||
names << texttools.name_fix(name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
pub enum ActionState {
|
||||
init // first state
|
||||
next // will continue with next steps
|
||||
restart
|
||||
error
|
||||
done // means we don't process the next ones
|
||||
}
|
||||
|
||||
// get hash from the action, should always be the same for the same action
|
||||
pub fn (action Action) hashkey() string {
|
||||
txt := action.heroscript()
|
||||
bs := blake2b.sum160(txt.bytes())
|
||||
return bs.hex()
|
||||
}
|
||||
57
lib/core/playbook/factory.v
Normal file
57
lib/core/playbook/factory.v
Normal file
@@ -0,0 +1,57 @@
|
||||
module playbook
|
||||
|
||||
import freeflowuniverse.herolib.core.base
|
||||
|
||||
@[params]
|
||||
pub struct PlayBookNewArgs {
|
||||
pub mut:
|
||||
path string
|
||||
text string
|
||||
git_url string
|
||||
git_pull bool
|
||||
git_branch string
|
||||
git_reset bool
|
||||
prio int = 50
|
||||
priorities map[int]string // filter and give priority, see filtersort method to know how to use
|
||||
session ?&base.Session
|
||||
}
|
||||
|
||||
// get a new playbook, can scan a directory or just add text
|
||||
// ```
|
||||
// path string
|
||||
// text string
|
||||
// git_url string
|
||||
// git_pull bool
|
||||
// git_branch string
|
||||
// git_reset bool
|
||||
// session &base.Session
|
||||
// ```
|
||||
pub fn new(args_ PlayBookNewArgs) !PlayBook {
|
||||
mut args := args_
|
||||
|
||||
mut c := base.context()!
|
||||
|
||||
mut s := c.session_new()!
|
||||
|
||||
mut plbook := PlayBook{
|
||||
session: &s
|
||||
}
|
||||
if args.path.len > 0 || args.text.len > 0 || args.git_url.len > 0 {
|
||||
plbook.add(
|
||||
path: args.path
|
||||
text: args.text
|
||||
git_url: args.git_url
|
||||
git_pull: args.git_pull
|
||||
git_branch: args.git_branch
|
||||
git_reset: args.git_reset
|
||||
prio: args.prio
|
||||
session: args.session
|
||||
)!
|
||||
}
|
||||
|
||||
if args.priorities.len > 0 {
|
||||
plbook.filtersort(priorities: args.priorities)!
|
||||
}
|
||||
|
||||
return plbook
|
||||
}
|
||||
83
lib/core/playbook/filter1_test.v
Normal file
83
lib/core/playbook/filter1_test.v
Normal file
@@ -0,0 +1,83 @@
|
||||
module playbook
|
||||
|
||||
const text3 = "
|
||||
//select the circle, can come from context as has been set before
|
||||
//
|
||||
//now every person added will be added in this circle
|
||||
//
|
||||
!!select_actor people
|
||||
!!select_circle aaa
|
||||
|
||||
//delete everything as found in current circle
|
||||
!!person.delete cid:1g
|
||||
|
||||
!!person.define
|
||||
//is optional will be filled in automatically, but maybe we want to update
|
||||
cid: '1gt'
|
||||
//name as selected in this group, can be used to find someone back
|
||||
name: fatayera
|
||||
firstname: 'Adnan'
|
||||
lastname: 'Fatayerji'
|
||||
description: 'Head of Business Development'
|
||||
email: 'adnan@threefold.io,fatayera@threefold.io'
|
||||
|
||||
!!circle_link
|
||||
//can define as cid or as name, name needs to be in same circle
|
||||
person: '1gt'
|
||||
//can define as cid or as name
|
||||
circle:tftech
|
||||
role:'stakeholder'
|
||||
description:''
|
||||
//is the name as given to the link
|
||||
name:'vpsales'
|
||||
|
||||
!!people.circle_comment cid:'1g'
|
||||
comment:'
|
||||
this is a comment
|
||||
can be multiline
|
||||
'
|
||||
|
||||
!!circle.comment cid:'1g'
|
||||
comment:
|
||||
another comment
|
||||
|
||||
!!digital_payment.add
|
||||
person:fatayera
|
||||
name: 'TF Wallet'
|
||||
blockchain: 'stellar'
|
||||
account: ''
|
||||
description: 'TF Wallet for TFT'
|
||||
preferred: false
|
||||
|
||||
!!test.myaction
|
||||
key: value
|
||||
|
||||
!!person.define
|
||||
cid: 'eg'
|
||||
name: despiegk //this is a remark
|
||||
|
||||
"
|
||||
|
||||
// test filter with only two names in filter
|
||||
fn test_filter1() ! {
|
||||
mut plbook := new(
|
||||
text: text3
|
||||
)!
|
||||
|
||||
assert plbook.actions.len == 10
|
||||
|
||||
assert plbook.hashkey() == '6936aafcd18b2d839e6b5c5f20b8817243c237da'
|
||||
|
||||
plbook.filtersort(
|
||||
priorities: {
|
||||
2: 'digital_payment:*'
|
||||
}
|
||||
)!
|
||||
assert plbook.priorities[2].len == 1
|
||||
|
||||
mut asorted := plbook.actions_sorted()!
|
||||
|
||||
assert asorted.map('${it.actor}:${it.name}') == ['digital_payment:add', 'core:select_actor',
|
||||
'core:select_circle', 'person:delete', 'person:define', 'core:circle_link',
|
||||
'people:circle_comment', 'circle:comment', 'test:myaction', 'person:define']
|
||||
}
|
||||
189
lib/core/playbook/filter_sort.v
Normal file
189
lib/core/playbook/filter_sort.v
Normal file
@@ -0,0 +1,189 @@
|
||||
module playbook
|
||||
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[params]
|
||||
pub struct FilterSortArgs {
|
||||
pub:
|
||||
priorities map[int]string // filter and give priority
|
||||
}
|
||||
|
||||
// filter parser based on the criteria
|
||||
//```
|
||||
// string for filter is $actor:$action, ... name and globs are possible (*,?)
|
||||
//
|
||||
// struct FilterSortArgs {
|
||||
// priorities map[int]string //filter and give priority
|
||||
//```
|
||||
// the action_names or actor_names can be a glob in match_glob .
|
||||
// see https://modules.vlang.io/index.html#string.match_glob .
|
||||
// the highest priority will always be chosen . (it can be a match happens 2x)
|
||||
// return []Action (will only return actions which wered filtered, included in the filter-sort args)
|
||||
pub fn (mut plbook PlayBook) filtersort(args FilterSortArgs) ![]&Action {
|
||||
mut nrs := args.priorities.keys()
|
||||
nrs.sort()
|
||||
plbook.priorities = map[int][]int{} // reset the prio's
|
||||
for prio in nrs {
|
||||
if prio > 49 {
|
||||
return error('prio cannot be higher than 49')
|
||||
}
|
||||
argsfilter := args.priorities[prio] or { panic('bug') }
|
||||
mut actionsfound := plbook.find(filter: argsfilter)!
|
||||
// console.print_header('- ${prio}:(${actionsfound.len})\n${argsfilter}')
|
||||
for mut actionfiltered in actionsfound {
|
||||
if actionfiltered.id in plbook.done {
|
||||
continue
|
||||
}
|
||||
actionfiltered.priority = prio
|
||||
if prio !in plbook.priorities {
|
||||
plbook.priorities[prio] = []int{}
|
||||
}
|
||||
if actionfiltered.id !in plbook.done {
|
||||
plbook.priorities[prio] << actionfiltered.id
|
||||
plbook.done << actionfiltered.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// the remainder now needs to be put on prio 50
|
||||
for mut action in plbook.actions {
|
||||
if 50 !in plbook.priorities {
|
||||
plbook.priorities[50] = []int{}
|
||||
}
|
||||
if action.id !in plbook.done {
|
||||
plbook.priorities[50] << action.id
|
||||
plbook.done << action.id
|
||||
}
|
||||
}
|
||||
|
||||
return plbook.actions_sorted()
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct FindArgs {
|
||||
pub:
|
||||
filter string
|
||||
include_done bool
|
||||
}
|
||||
|
||||
// filter is of form $actor.$action, ... name and globs are possible (*,?) .
|
||||
// comma separated, actor and name needs to be specified, if more than one use * glob .
|
||||
// e.g. find("core.person_select,myactor.*,green*.*")
|
||||
pub fn (mut plbook PlayBook) find(args FindArgs) ![]&Action {
|
||||
filter := args.filter.replace(':', '.').trim_space()
|
||||
mut res := []&Action{}
|
||||
mut items := []string{}
|
||||
if filter.contains(',') {
|
||||
items = filter.split(',').map(it.trim_space())
|
||||
} else {
|
||||
items << filter.trim_space()
|
||||
}
|
||||
for action in plbook.actions {
|
||||
// console.print_debug("${action.actor}:${action.name}:${action.id}")
|
||||
if action.match_items(items) {
|
||||
// console.print_debug(" OK")
|
||||
if !args.include_done && action.done {
|
||||
continue
|
||||
}
|
||||
res << action
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) exists_once(args FindArgs) bool {
|
||||
mut res := plbook.find(args) or { [] }
|
||||
return res.len == 1
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) find_one(args FindArgs) !&Action {
|
||||
mut res := plbook.find(args)!
|
||||
if res.len == 0 {
|
||||
return error("can't find action: '${args.filter}'")
|
||||
} else if res.len > 1 {
|
||||
return error("found more than one action: '${args.filter}'")
|
||||
}
|
||||
return res[0] or { panic('bug') }
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) find_max_one(args FindArgs) ![]&Action {
|
||||
mut res := plbook.find(args)!
|
||||
if res.len > 1 {
|
||||
return error("found more than one action: '${args.filter}'")
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
fn (action Action) match_items(items []string) bool {
|
||||
for p in items {
|
||||
mut actor := ''
|
||||
mut name := ''
|
||||
if p.contains('.') {
|
||||
actor = p.all_before('.').trim_space()
|
||||
name = p.all_after_last('.').trim_space()
|
||||
} else {
|
||||
name = p.trim_space()
|
||||
actor = 'core'
|
||||
}
|
||||
// console.print_header('- checkmatch:${actor}:${name}")
|
||||
if action.checkmatch(actor: actor, name: name) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct MatchFilter {
|
||||
pub mut:
|
||||
actor string
|
||||
name string
|
||||
cid string
|
||||
}
|
||||
|
||||
// check if the action matches following the filter args .
|
||||
// the action_names or actor_names can be a glob in match_glob .
|
||||
// see https://modules.vlang.io/index.html#string.match_glob
|
||||
fn (action Action) checkmatch(args MatchFilter) bool {
|
||||
if args.cid.len > 0 {
|
||||
if args.cid != action.cid {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if args.actor.len > 0 {
|
||||
if args.actor.contains('*') || args.actor.contains('?') || args.actor.contains('[') {
|
||||
if !action.actor.match_glob(args.actor) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
if action.actor != args.actor.to_lower().trim_space() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if args.name.len > 0 {
|
||||
if args.name.contains('*') || args.name.contains('?') || args.name.contains('[') {
|
||||
if !action.name.match_glob(args.name) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
if action.name != args.name.to_lower().trim_space() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// find all relevant parser, return the params out of one .
|
||||
// filter is of form $actor.$action, ... name and globs are possible (*,?) .
|
||||
// comma separated, actor and name needs to be specified, if more than one use * glob .
|
||||
// e.g. find("core.person_select,myactor.*,green*.*")
|
||||
pub fn (mut plbook PlayBook) params_get(filter string) !paramsparser.Params {
|
||||
mut paramsresult := paramsparser.new('')!
|
||||
for action in plbook.find(filter: filter)! {
|
||||
paramsresult.merge(action.params)!
|
||||
}
|
||||
return paramsresult
|
||||
}
|
||||
148
lib/core/playbook/filter_test.v
Normal file
148
lib/core/playbook/filter_test.v
Normal file
@@ -0,0 +1,148 @@
|
||||
module playbook
|
||||
|
||||
const text2 = "
|
||||
//select the circle, can come from context as has been set before
|
||||
//now every person added will be added in this circle
|
||||
!!select_actor people
|
||||
!!select_circle aaa
|
||||
|
||||
//delete everything as found in current circle
|
||||
!!person_delete cid:1g
|
||||
|
||||
!!person_define
|
||||
//is optional will be filled in automatically, but maybe we want to update
|
||||
cid: '1gt'
|
||||
//name as selected in this group, can be used to find someone back
|
||||
name: fatayera
|
||||
firstname: 'Adnan'
|
||||
lastname: 'Fatayerji'
|
||||
description: 'Head of Business Development'
|
||||
email: 'adnan@threefold.io,fatayera@threefold.io'
|
||||
|
||||
!!circle_link
|
||||
//can define as cid or as name, name needs to be in same circle
|
||||
person: '1gt'
|
||||
//can define as cid or as name
|
||||
circle:tftech
|
||||
role:'stakeholder'
|
||||
description:''
|
||||
//is the name as given to the link
|
||||
name:'vpsales'
|
||||
|
||||
!!people.circle_comment cid:'1g'
|
||||
comment:
|
||||
this is a comment
|
||||
can be multiline
|
||||
|
||||
!!circle_comment cid:'1g'
|
||||
comment:
|
||||
another comment
|
||||
|
||||
!!digital_payment_add
|
||||
person:fatayera
|
||||
name: 'TF Wallet'
|
||||
blockchain: 'stellar'
|
||||
account: ''
|
||||
description: 'TF Wallet for TFT'
|
||||
preferred: false
|
||||
|
||||
!!select_actor test
|
||||
|
||||
!!test_action
|
||||
key: value
|
||||
|
||||
!!select_circle bbb
|
||||
!!select_actor people
|
||||
|
||||
!!person_define
|
||||
cid: 'eg'
|
||||
name: despiegk //this is a remark
|
||||
|
||||
"
|
||||
|
||||
// QUESTION: how to better organize these tests
|
||||
// ANSWER: split them up, this test is testing too much, tests should be easy to read and easy to modify
|
||||
// TODO: FIX THE TESTS, THEY ARE BROKEN NOW
|
||||
|
||||
fn test_filter_on_circle_aaa() ! {
|
||||
// test filter circle:aaa
|
||||
mut parser := new(text: text2)!
|
||||
assert parser.actions.len == 13
|
||||
}
|
||||
|
||||
// test filter with names:[*]
|
||||
fn test_filter_with_names_asterix() ! {
|
||||
mut parser := new(text: text2)!
|
||||
assert parser.actions.len == 13
|
||||
assert parser.actions.map(it.name) == ['select_actor', 'select_circle', 'person_delete',
|
||||
'person_define', 'circle_link', 'circle_comment', 'circle_comment', 'digital_payment_add',
|
||||
'select_actor', 'test_action', 'select_circle', 'select_actor', 'person_define']
|
||||
|
||||
sorted := parser.find(filter: '*.*')!
|
||||
assert sorted.len == 13
|
||||
assert sorted.map(it.name) == ['select_actor', 'select_circle', 'person_delete', 'person_define',
|
||||
'circle_link', 'circle_comment', 'circle_comment', 'digital_payment_add', 'select_actor',
|
||||
'test_action', 'select_circle', 'select_actor', 'person_define']
|
||||
}
|
||||
|
||||
// test filtering with names_filter with one empty string
|
||||
fn test_filter_with_names_list_with_empty_string() ! {
|
||||
// QUESTION: should this return empty list?
|
||||
// ANSWER: I think yes as you technically want the parser where the name is an empty string
|
||||
|
||||
// NOTE: empty name does not filter by name, it's simply ignored
|
||||
mut parser := new(
|
||||
text: text2
|
||||
)!
|
||||
|
||||
assert parser.actions.len == 13
|
||||
assert parser.actions.map(it.name) == ['select_actor', 'select_circle', 'person_delete',
|
||||
'person_define', 'circle_link', 'circle_comment', 'circle_comment', 'digital_payment_add',
|
||||
'select_actor', 'test_action', 'select_circle', 'select_actor', 'person_define']
|
||||
|
||||
filtered := parser.find(filter: '*.')!
|
||||
assert filtered.len == 13
|
||||
assert filtered.map(it.name) == ['select_actor', 'select_circle', 'person_delete',
|
||||
'person_define', 'circle_link', 'circle_comment', 'circle_comment', 'digital_payment_add',
|
||||
'select_actor', 'test_action', 'select_circle', 'select_actor', 'person_define']
|
||||
}
|
||||
|
||||
// test filter with names in same order as parser
|
||||
fn test_filter_with_names_in_same_order() ! {
|
||||
mut parser := new(
|
||||
text: text2
|
||||
)!
|
||||
|
||||
sorted := parser.find(filter: 'person_define,circle_link,circle_comment,digital_payment_add')!
|
||||
assert sorted.len == 5
|
||||
assert sorted.map(it.name) == ['person_define', 'circle_link', 'circle_comment',
|
||||
'digital_payment_add', 'person_define']
|
||||
}
|
||||
|
||||
// test filter with names in different order than parser
|
||||
fn test_filter_with_names_in_different_order() ! {
|
||||
mut parser := new(
|
||||
text: text2
|
||||
)!
|
||||
|
||||
sorted := parser.find(
|
||||
filter: 'people.circle_comment,person_define,digital_payment_add,person_delete,circle_link'
|
||||
)!
|
||||
|
||||
assert sorted.len == 6
|
||||
assert sorted.map(it.name) == ['person_delete', 'person_define', 'circle_link', 'circle_comment',
|
||||
'digital_payment_add', 'person_define']
|
||||
}
|
||||
|
||||
// test filter with only two names in filter
|
||||
fn test_filter_with_only_two_names_in_filter() ! {
|
||||
// QUESTION: if we only have one name, is it just that action?
|
||||
// ANSWER: yes
|
||||
mut parser := new(
|
||||
text: text2
|
||||
)!
|
||||
|
||||
sorted := parser.find(filter: 'person_define,person_delete')!
|
||||
assert sorted.len == 3
|
||||
assert sorted.map(it.name) == ['person_delete', 'person_define', 'person_define']
|
||||
}
|
||||
56
lib/core/playbook/parser_test.v
Normal file
56
lib/core/playbook/parser_test.v
Normal file
@@ -0,0 +1,56 @@
|
||||
module playbook
|
||||
|
||||
const text1 = "
|
||||
//comment for the action
|
||||
!!payment.add person:fatayera
|
||||
//comment for name
|
||||
name: 'TF Wallet'
|
||||
blockchain: 'stellar' //holochain maybe?
|
||||
account: 'something'
|
||||
description: 'TF Wallet for TFT'
|
||||
preferred: false
|
||||
"
|
||||
|
||||
fn test_parse_1() {
|
||||
mut a := new(text: text1) or { panic(err) }
|
||||
|
||||
assert a.actions.len == 1
|
||||
mut s := a.actions_sorted()!
|
||||
assert s.len == 1
|
||||
// mut sorted := a.actions_sorted(prio_only: true)!
|
||||
// assert sorted.len == 0
|
||||
|
||||
mut myaction := s[0] or { panic('bug') }
|
||||
|
||||
assert myaction.comments == 'comment for the action'
|
||||
assert myaction.params.params.len == 6
|
||||
assert myaction.id == 1
|
||||
|
||||
assert a.hashkey() == '95c585c8bf01b4c432cb7096dc7c974fc1a14b5a'
|
||||
c := a.heroscript()!
|
||||
b := new(text: c) or { panic(err) }
|
||||
|
||||
assert b.hashkey() == '95c585c8bf01b4c432cb7096dc7c974fc1a14b5a'
|
||||
}
|
||||
|
||||
fn test_parser() {
|
||||
mut pb := new(text: text1) or { panic(err) }
|
||||
mut a := pb.actions[0]
|
||||
assert a.actor == 'payment'
|
||||
assert a.name == 'add'
|
||||
assert a.params.get('name')! == 'TF Wallet'
|
||||
assert a.params.get('blockchain')! == 'stellar'
|
||||
assert a.params.get('account')! == 'something'
|
||||
assert a.params.get('description')! == 'TF Wallet for TFT'
|
||||
assert a.params.get_default_false('preferred') == false
|
||||
}
|
||||
|
||||
fn test_parser2() {
|
||||
mut pb := new(
|
||||
text: "!!play.run url:'https://git.ourworld.tf/despiegk/cfg/src/branch/main/myit/hetzner.md'"
|
||||
) or { panic(err) }
|
||||
mut a := pb.actions[0]
|
||||
assert a.actor == 'play'
|
||||
assert a.name == 'run'
|
||||
assert a.params.get('url')! == 'https://git.ourworld.tf/despiegk/cfg/src/branch/main/myit/hetzner.md'
|
||||
}
|
||||
202
lib/core/playbook/playbook.v
Normal file
202
lib/core/playbook/playbook.v
Normal file
@@ -0,0 +1,202 @@
|
||||
module playbook
|
||||
|
||||
import freeflowuniverse.herolib.core.base
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import crypto.blake2b
|
||||
|
||||
@[heap]
|
||||
pub struct PlayBook {
|
||||
pub mut:
|
||||
actions []&Action
|
||||
priorities map[int][]int // first key is the priority, the list of int's is position in list self.actions
|
||||
othertext string // in case there is text outside of the actions
|
||||
result string // if any result
|
||||
nractions int
|
||||
done []int // which actions did we already find/run?
|
||||
session &base.Session
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ActionNewArgs {
|
||||
pub mut:
|
||||
cid string
|
||||
name string
|
||||
actor string
|
||||
priority int = 10 // 0 is highest, do 10 as default
|
||||
// run bool = true // certain actions can be defined but meant to be executed directly
|
||||
actiontype ActionType
|
||||
}
|
||||
|
||||
// add action to the book
|
||||
fn (mut plbook PlayBook) action_new(args ActionNewArgs) &Action {
|
||||
plbook.nractions += 1
|
||||
mut a := Action{
|
||||
id: plbook.nractions
|
||||
cid: args.cid
|
||||
name: args.name
|
||||
actor: args.actor
|
||||
priority: args.priority
|
||||
// run: args.run
|
||||
actiontype: args.actiontype
|
||||
params: paramsparser.Params{}
|
||||
result: paramsparser.Params{}
|
||||
}
|
||||
plbook.actions << &a
|
||||
return &a
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) str() string {
|
||||
return plbook.heroscript() or { 'Cannot visualize playbook properly.\n${plbook.actions}' }
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct SortArgs {
|
||||
pub mut:
|
||||
prio_only bool // if true only show the actions which were prioritized before
|
||||
}
|
||||
|
||||
// only return the actions which are not done yet
|
||||
// if filtered is set, it means we only get the ones which were prioritized before
|
||||
pub fn (mut plbook PlayBook) actions_sorted(args SortArgs) ![]&Action {
|
||||
mut res := []&Action{}
|
||||
mut nrs := plbook.priorities.keys()
|
||||
nrs.sort()
|
||||
if nrs.len == 0 {
|
||||
// means sorting did not happen before
|
||||
return plbook.actions
|
||||
}
|
||||
for nr in nrs {
|
||||
if args.prio_only && nr > 49 {
|
||||
continue
|
||||
}
|
||||
action_ids := plbook.priorities[nr] or { panic('bug') }
|
||||
for id in action_ids {
|
||||
mut a := plbook.action_get(id: id)!
|
||||
res << a
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct HeroScriptArgs {
|
||||
pub mut:
|
||||
show_done bool = true
|
||||
}
|
||||
|
||||
// serialize to heroscript
|
||||
pub fn (mut plbook PlayBook) heroscript(args HeroScriptArgs) !string {
|
||||
mut out := ''
|
||||
for action in plbook.actions_sorted()! {
|
||||
if args.show_done == false && action.done {
|
||||
continue
|
||||
}
|
||||
out += '${action.heroscript()}\n'
|
||||
}
|
||||
if plbook.othertext.len > 0 {
|
||||
out += '${plbook.othertext}'
|
||||
}
|
||||
out = texttools.remove_empty_js_blocks(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// return list of names .
|
||||
// the names are normalized (no special chars, lowercase, ... )
|
||||
pub fn (mut plbook PlayBook) names() ![]string {
|
||||
mut names := []string{}
|
||||
for action in plbook.actions_sorted()! {
|
||||
names << action.name
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ActionGetArgs {
|
||||
pub mut:
|
||||
id int
|
||||
actor string
|
||||
name string
|
||||
actiontype ActionType = .sal
|
||||
}
|
||||
|
||||
// Find all actions based on ActionGetArgs
|
||||
// - If id == 0, then matches all ids; when id is specified, can only return 1.
|
||||
// - If actor == "", then matches all actors.
|
||||
// - If name == "", then matches all actions from the defined actor (if defined).
|
||||
// - If actiontype == .unknown, then matches all action types; when specified, filters by the action type, default .sal
|
||||
pub fn (mut plbook PlayBook) actions_find(args ActionGetArgs) ![]&Action {
|
||||
mut res := []&Action{}
|
||||
for a in plbook.actions {
|
||||
// If id is specified, return only the action with that id
|
||||
if args.id != 0 {
|
||||
if a.id == args.id {
|
||||
return [a]
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Filter by actor if specified
|
||||
if args.actor.len > 0 && a.actor != args.actor {
|
||||
continue
|
||||
}
|
||||
// Filter by name if specified
|
||||
if args.name.len > 0 && a.name != args.name {
|
||||
continue
|
||||
}
|
||||
// Filter by actiontype if specified
|
||||
if args.actiontype != .unknown && a.actiontype != args.actiontype {
|
||||
continue
|
||||
}
|
||||
// If the action passes all filters, add it to the result
|
||||
res << a
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) action_exists(args ActionGetArgs) bool {
|
||||
// Use actions_find to get the filtered actions
|
||||
actions := plbook.actions_find(args) or { return false }
|
||||
if actions.len == 1 {
|
||||
return true
|
||||
} else if actions.len == 0 {
|
||||
return false
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) action_get(args ActionGetArgs) !&Action {
|
||||
// Use actions_find to get the filtered actions
|
||||
actions := plbook.actions_find(args)!
|
||||
if actions.len == 1 {
|
||||
return actions[0]
|
||||
} else if actions.len == 0 {
|
||||
return error("couldn't find action with args: ${args}")
|
||||
} else {
|
||||
return error('multiple actions found with args: ${args}, expected only one')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (plbook PlayBook) hashkey() string {
|
||||
mut out := []string{}
|
||||
for action in plbook.actions {
|
||||
out << action.hashkey()
|
||||
}
|
||||
txt := out.join_lines()
|
||||
bs := blake2b.sum160(txt.bytes())
|
||||
return bs.hex()
|
||||
}
|
||||
|
||||
// check if playbook is empty,if not will give error, means there are actions left to be exected
|
||||
pub fn (mut plbook PlayBook) empty_check() ! {
|
||||
mut actions := []&Action{}
|
||||
for a in plbook.actions {
|
||||
if a.done == false {
|
||||
actions << a
|
||||
}
|
||||
}
|
||||
if actions.len > 0 {
|
||||
msg := plbook.heroscript(show_done: false)!
|
||||
return error('There are actions left to execute, see below:\n\n${msg}\n\n')
|
||||
}
|
||||
}
|
||||
174
lib/core/playbook/playbook_add.v
Normal file
174
lib/core/playbook/playbook_add.v
Normal file
@@ -0,0 +1,174 @@
|
||||
module playbook
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
// import freeflowuniverse.herolib.core.base
|
||||
import freeflowuniverse.herolib.develop.gittools
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
enum State {
|
||||
start
|
||||
comment_for_action_maybe
|
||||
action
|
||||
othertext
|
||||
}
|
||||
|
||||
pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
|
||||
mut args := args_
|
||||
|
||||
if args.git_url.len > 0 {
|
||||
mut gs := gittools.get()!
|
||||
mut repo := gs.get_repo(
|
||||
url: args.git_url
|
||||
pull: args.git_pull
|
||||
reset: args.git_reset
|
||||
)!
|
||||
args.path = repo.get_path()!
|
||||
}
|
||||
|
||||
// walk over directory
|
||||
if args.path.len > 0 {
|
||||
// console.print_header("PLBOOK add path:'${args.path}'")
|
||||
mut p := pathlib.get(args.path)
|
||||
if !p.exists() {
|
||||
return error("can't find path:${p.path}")
|
||||
}
|
||||
if p.is_file() {
|
||||
c := p.read()!
|
||||
plbook.add(text: c, prio: args.prio, session: args_.session)!
|
||||
return
|
||||
} else if p.is_dir() {
|
||||
// get .md and .hero files from dir
|
||||
mut ol0 := p.list(recursive: true, regex: [r'.*\.md$'])!
|
||||
mut paths := ol0.paths.clone()
|
||||
mut ol1 := p.list(recursive: true, regex: [r'.*\.hero$'])!
|
||||
paths << ol1.paths
|
||||
|
||||
for mut p2 in paths {
|
||||
c2 := p2.read()!
|
||||
plbook.add(text: c2, prio: args.prio, session: args_.session)!
|
||||
}
|
||||
return
|
||||
}
|
||||
return error("can't process path: ${args.path}, unknown type.")
|
||||
}
|
||||
// console.print_header('PLBOOK add text')
|
||||
// console.print_stdout(args.text)
|
||||
|
||||
args.text = texttools.dedent(args.text)
|
||||
mut state := State.start
|
||||
|
||||
mut action := &Action{}
|
||||
mut comments := []string{}
|
||||
mut paramsdata := []string{}
|
||||
|
||||
for line_ in args.text.split_into_lines() {
|
||||
line := line_.replace('\t', ' ')
|
||||
line_strip := line.trim_space()
|
||||
|
||||
if line_strip.len == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// console.print_header(' state:${state} action:'${action.name}' comments:'${comments.len}' -> '${line}'")
|
||||
|
||||
if state == .action {
|
||||
if !line.starts_with(' ') || line_strip == '' || line_strip.starts_with('!') {
|
||||
state = .start
|
||||
// means we found end of action
|
||||
// console.print_debug("+++${paramsdata.join('\n')}+++")
|
||||
action.params = paramsparser.new(paramsdata.join('\n'))!
|
||||
action.params.delete('id')
|
||||
comments = []string{}
|
||||
paramsdata = []string{}
|
||||
action = &Action{}
|
||||
// console.print_header(' action end')
|
||||
} else {
|
||||
paramsdata << line
|
||||
}
|
||||
}
|
||||
|
||||
if state == .comment_for_action_maybe {
|
||||
if line.starts_with('//') {
|
||||
comments << line_strip.trim_left('/ ')
|
||||
} else {
|
||||
if line_strip.starts_with('!') {
|
||||
// we are at end of comment
|
||||
state = .start
|
||||
} else {
|
||||
state = .start
|
||||
plbook.othertext += comments.join('\n')
|
||||
if !plbook.othertext.ends_with('\n') {
|
||||
plbook.othertext += '\n'
|
||||
}
|
||||
comments = []string{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if state == .start {
|
||||
if line_strip.starts_with('!') && !line_strip.starts_with('![') {
|
||||
// start with new action
|
||||
state = .action
|
||||
action = plbook.action_new(
|
||||
priority: args.prio
|
||||
)
|
||||
action.comments = comments.join('\n')
|
||||
comments = []string{}
|
||||
paramsdata = []string{}
|
||||
mut actionname := line_strip
|
||||
if line_strip.contains(' ') {
|
||||
actionname = line_strip.all_before(' ').trim_space()
|
||||
paramsdata << line_strip.all_after_first(' ').trim_space()
|
||||
}
|
||||
if actionname.starts_with('!!!!!') {
|
||||
error('there is no action starting with 5 x !')
|
||||
} else if actionname.starts_with('!!!!') {
|
||||
action.actiontype = .wal
|
||||
} else if actionname.starts_with('!!!') {
|
||||
action.actiontype = .macro
|
||||
} else if actionname.starts_with('!!') {
|
||||
action.actiontype = .sal
|
||||
} else if actionname.starts_with('!') {
|
||||
action.actiontype = .dal
|
||||
} else {
|
||||
print_backtrace()
|
||||
panic('bug')
|
||||
}
|
||||
actionname = actionname.trim_left('!')
|
||||
splitted := actionname.split('.')
|
||||
if splitted.len == 1 {
|
||||
action.actor = 'core'
|
||||
action.name = texttools.name_fix(splitted[0])
|
||||
} else if splitted.len == 2 {
|
||||
action.actor = texttools.name_fix(splitted[0])
|
||||
action.name = texttools.name_fix(splitted[1])
|
||||
} else {
|
||||
print_backtrace()
|
||||
return error('for now we only support actions with 1 or 2 parts.\n${actionname}')
|
||||
}
|
||||
// console.print_header(' action new: ${action.actor}:${action.name} params:${paramsdata}')
|
||||
continue
|
||||
} else if line.starts_with('//') {
|
||||
state = .comment_for_action_maybe
|
||||
comments << line_strip.trim_left('/ ')
|
||||
// } else {
|
||||
// plbook.othertext += '${line_strip}\n'
|
||||
}
|
||||
}
|
||||
}
|
||||
// process the last one
|
||||
if state == .action {
|
||||
if action.id != 0 {
|
||||
action.params = paramsparser.new(paramsdata.join('\n'))!
|
||||
action.params.delete('id')
|
||||
}
|
||||
}
|
||||
if state == .comment_for_action_maybe {
|
||||
plbook.othertext += comments.join('\n')
|
||||
}
|
||||
// if state == .start{
|
||||
// plbook.othertext+=line_strip
|
||||
// }
|
||||
}
|
||||
84
lib/core/playbook/playbook_test.v
Normal file
84
lib/core/playbook/playbook_test.v
Normal file
@@ -0,0 +1,84 @@
|
||||
module playbook
|
||||
|
||||
import os
|
||||
import crypto.sha256
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
const testpath = os.dir(@FILE) + '/testdata'
|
||||
|
||||
// TODO: fix
|
||||
|
||||
const text1 = "
|
||||
//comment for the action
|
||||
!!payment.add person:fatayera
|
||||
//comment for name
|
||||
name: 'TF Wallet'
|
||||
blockchain: 'stellar' //holochain maybe?
|
||||
account: 'something'
|
||||
description: 'TF Wallet for TFT'
|
||||
preferred: false
|
||||
|
||||
//comment2
|
||||
!!payment.add person:despiegk
|
||||
name: 'TF Wallet2'
|
||||
|
||||
"
|
||||
|
||||
const text2 = "
|
||||
//comment for the action
|
||||
!!payment.add person:fatayera
|
||||
name: 'TF Wallet'
|
||||
|
||||
!!payment.else person:despiegk
|
||||
name: 'TF Wallet2'
|
||||
|
||||
!!actor2.else person:despiegk
|
||||
name: 'TF Wallet2'
|
||||
|
||||
"
|
||||
|
||||
fn test_parse_1() {
|
||||
mut a := new(text: text1) or { panic(err) }
|
||||
|
||||
console.print_debug('${a}')
|
||||
|
||||
console.print_debug("EXPECTED OUTPUT:
|
||||
// comment for the action
|
||||
!!payment.add account:something description:'TF Wallet for TFT' person:fatayera preferred:false
|
||||
name:'TF Wallet' //comment for name
|
||||
blockchain:stellar //holochain maybe?
|
||||
|
||||
// comment2
|
||||
!!payment.add name:'TF Wallet2' person:despiegk
|
||||
")
|
||||
|
||||
assert sha256.hexhash(a.str()) == 'e86eb063d8556c8501f63494a863fc78415112d6990ba6f1d0d5db16ff26e954'
|
||||
}
|
||||
|
||||
fn test_hashkey() {
|
||||
mut a := new(text: text1) or { panic(err) }
|
||||
t := a.hashkey()
|
||||
|
||||
console.print_debug(t)
|
||||
|
||||
assert t == 'a5e85c3a8e4c132bd40c88acc0dcc3d9a2af56c5'
|
||||
}
|
||||
|
||||
fn test_filter() {
|
||||
mut a := new(text: text2) or { panic(err) }
|
||||
|
||||
mut b := a.find(filter: 'payment.*')!
|
||||
assert b.len == 2
|
||||
|
||||
mut c := a.find(filter: 'payment.else')!
|
||||
assert c.len == 1
|
||||
|
||||
mut d := a.find(filter: 'actor2.*')!
|
||||
assert d.len == 1
|
||||
|
||||
mut e := a.find(filter: 'actor2.else')!
|
||||
assert e.len == 1
|
||||
|
||||
mut f := a.find(filter: 'actor2:else2')!
|
||||
assert f.len == 0
|
||||
}
|
||||
129
lib/core/playbook/readme.md
Normal file
129
lib/core/playbook/readme.md
Normal file
@@ -0,0 +1,129 @@
|
||||
# heroscript
|
||||
|
||||
is our small language which allows us to run parser
|
||||
|
||||
|
||||
## execute a playbook
|
||||
|
||||
the following will load heroscript and execute
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
|
||||
// path string
|
||||
// text string
|
||||
// git_url string
|
||||
// git_pull bool
|
||||
// git_branch string
|
||||
// git_reset bool
|
||||
// session ?&base.Session is optional
|
||||
mut plbook := playbook.new(path: "....")!
|
||||
|
||||
//now we run all the commands as they are pre-defined in crystallib (herolib)
|
||||
playcmds.run(mut plbook)!
|
||||
|
||||
|
||||
```
|
||||
|
||||
## execute a heroscript and make executable
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env hero
|
||||
|
||||
!!play.echo content:'this is just a test'
|
||||
|
||||
!!play.echo content:'this is just another test'
|
||||
```
|
||||
|
||||
you can now just execute this script and hero will interprete the content
|
||||
|
||||
|
||||
## parser
|
||||
|
||||
are text based representatsions of parser which need to be executed
|
||||
|
||||
example
|
||||
|
||||
```js
|
||||
!!tflibrary.circlesmanager.circle_add
|
||||
gitsource:'books'
|
||||
path:'technology/src'
|
||||
name:technology
|
||||
```
|
||||
|
||||
the first one is the action, the rest are the params
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
|
||||
|
||||
|
||||
mut plbook := playbook.new(text: "....")!
|
||||
|
||||
```
|
||||
## way how to use for a module
|
||||
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
|
||||
// !!hr.employee_define
|
||||
// descr:'Junior Engineer'
|
||||
// growth:'1:5,60:30' cost:'4000USD' indexation:'5%'
|
||||
// department:'engineering'
|
||||
|
||||
|
||||
// populate the params for hr
|
||||
fn (mut m BizModel) hr_actions(actions playbook.PlayBook) ! {
|
||||
mut actions2 := actions.find('hr.*,vm.start')!
|
||||
for action in actions2 {
|
||||
if action.name == 'employee_define' {
|
||||
mut name := action.params.get_default('name', '')!
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
//...
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## we can also use the filtersort
|
||||
|
||||
```v
|
||||
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
|
||||
mut plbook := playbook.new(path: "....") or { panic(err) }
|
||||
|
||||
// filter parser based on the criteria
|
||||
//```
|
||||
// string for filter is $actor:$action, ... name and globs are possible (*,?)
|
||||
//
|
||||
// struct FilterSortArgs
|
||||
// priorities map[int]string //filter and give priority
|
||||
//```
|
||||
// the action_names or actor_names can be a glob in match_glob .
|
||||
// see https://modules.vlang.io/index.html#string.match_glob .
|
||||
// the highest priority will always be chosen . (it can be a match happens 2x)
|
||||
// return []Action
|
||||
actions:=plbook.filtersort({
|
||||
5:"sshagent:*",
|
||||
10:"doctree:*",
|
||||
11:"mdbooks:*",
|
||||
12:"mdbook:*",
|
||||
})!
|
||||
|
||||
//now process the actions if we want to do it ourselves
|
||||
for a in actions{
|
||||
mut p := action.params
|
||||
mut repo := p.get_default('repo', '')!
|
||||
if p.exists('coderoot') {
|
||||
coderoot = p.get_path_create('coderoot')!
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
48
lib/core/rootpath/README.md
Normal file
48
lib/core/rootpath/README.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# Rootpath Module
|
||||
|
||||
The rootpath module provides functionality for managing the Hero environment directory structure and path handling. It ensures consistent access to Hero-specific directories and provides utilities for path manipulation.
|
||||
|
||||
## Core Functions
|
||||
|
||||
### Directory Management
|
||||
|
||||
- `herodir()` - Returns the root directory for the Hero environment (`~/hero`)
|
||||
- `bindir()` - Returns the binary directory (`~/hero/bin`)
|
||||
- `vardir()` - Returns the variable directory (`~/hero/var`)
|
||||
- `cfgdir()` - Returns the configuration directory (`~/hero/cfg`)
|
||||
- `ensure_hero_dirs()` - Creates all necessary Hero directories if they don't exist
|
||||
|
||||
### Path Utilities
|
||||
|
||||
- `shell_expansion(s string)` - Expands shell-like path expressions (e.g., `~` or `{HOME}`) to full paths
|
||||
- `path_ensure(s string)` - Ensures a given path exists by creating it if necessary
|
||||
- `hero_path(s string)` - Constructs a path underneath the Hero root directory
|
||||
- `hero_path_ensure(s string)` - Ensures a Hero-specific path exists and returns it
|
||||
|
||||
## Usage Example
|
||||
|
||||
```vsh
|
||||
import freeflowuniverse.herolib.core.rootpath
|
||||
|
||||
// Get and ensure Hero directories exist
|
||||
hero_root := rootpath.ensure_hero_dirs()
|
||||
|
||||
// Work with Hero-specific paths
|
||||
ensured_path := rootpath.hero_path_ensure('data/myapp')
|
||||
|
||||
// Expand shell paths
|
||||
full_path := rootpath.shell_expansion('~/hero/custom/path')
|
||||
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
The module manages the following directory structure:
|
||||
|
||||
```
|
||||
~/hero/
|
||||
├── bin/ # Binary files
|
||||
├── var/ # Variable data
|
||||
└── cfg/ # Configuration files
|
||||
```
|
||||
|
||||
72
lib/core/rootpath/rootpath.v
Normal file
72
lib/core/rootpath/rootpath.v
Normal file
@@ -0,0 +1,72 @@
|
||||
module rootpath
|
||||
|
||||
import os
|
||||
|
||||
// replace ~ to home dir in string as given
|
||||
pub fn shell_expansion(s_ string) string {
|
||||
mut s := s_
|
||||
home := os.real_path(os.home_dir())
|
||||
for x in ['{HOME}', '~'] {
|
||||
if s.contains(x) {
|
||||
s = s.replace(x, home)
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// ensure_hero_dirs creates all necessary hero directories
|
||||
pub fn ensure_hero_dirs() string {
|
||||
path_ensure(herodir())
|
||||
path_ensure(bindir())
|
||||
path_ensure(vardir())
|
||||
path_ensure(cfgdir())
|
||||
return herodir()
|
||||
}
|
||||
|
||||
|
||||
// root dir for our hero environment
|
||||
pub fn herodir() string {
|
||||
return shell_expansion('~/hero')
|
||||
}
|
||||
|
||||
// bin dir
|
||||
pub fn bindir() string {
|
||||
return '${herodir()}/bin'
|
||||
}
|
||||
|
||||
// var dir
|
||||
pub fn vardir() string {
|
||||
return '${herodir()}/var'
|
||||
}
|
||||
|
||||
// cfg dir
|
||||
pub fn cfgdir() string {
|
||||
return '${herodir()}/cfg'
|
||||
}
|
||||
|
||||
// path_ensure ensures the given path exists and returns it
|
||||
pub fn path_ensure(s string) string {
|
||||
path := shell_expansion(s)
|
||||
if !os.exists(path) {
|
||||
os.mkdir_all(path) or { panic('cannot create dir ${path}') }
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
|
||||
// get path underneath the hero root directory
|
||||
pub fn hero_path(s string) string {
|
||||
path := shell_expansion(s).trim_left(' /')
|
||||
full_path := '${herodir()}/${path}/'
|
||||
return full_path
|
||||
}
|
||||
|
||||
|
||||
// return path and ensure it exists and return the path
|
||||
pub fn hero_path_ensure(s string) string {
|
||||
path := hero_path(s)
|
||||
if !os.exists(path) {
|
||||
os.mkdir_all(path) or { panic('cannot create dir ${path}') }
|
||||
}
|
||||
return path
|
||||
}
|
||||
118
lib/core/smartid/sid.v
Normal file
118
lib/core/smartid/sid.v
Normal file
@@ -0,0 +1,118 @@
|
||||
module smartid
|
||||
|
||||
// import freeflowuniverse.herolib.clients.redisclient
|
||||
import math
|
||||
// import freeflowuniverse.herolib.core.texttools.regext
|
||||
// import rand
|
||||
|
||||
// each part min3 max 6 chars, each char = a...z or 0...9
|
||||
// to create a new one we need to know the circle
|
||||
// pub fn sid_new(cid string) !string {
|
||||
// mut redis := redisclient.core_get()!
|
||||
// key := 'circle:sid:${cid}'
|
||||
// mut sidlast := redis.get(key)! // is the last sid
|
||||
// if sidlast == '' {
|
||||
// redis.set(key, '10')!
|
||||
// sidlast = redis.get(key)! // need to make sure we reserve the first 10 ones
|
||||
// }
|
||||
// sidlasti := sidlast.u32() + 1 // is a new one
|
||||
// redis.set(key, '${sidlasti}')!
|
||||
// return sid_str(sidlasti)
|
||||
// }
|
||||
|
||||
// // make sure redis knows about it, will return true if its not known in redis yet
|
||||
// fn sid_acknowledge(cid string, sid string) !bool {
|
||||
// mut redis := redisclient.core_get()!
|
||||
// key := 'circle:sid:${cid}'
|
||||
// sidlast := redis.get(key)! // is the last sid
|
||||
// sidlasti := sidlast.u32()
|
||||
// sidnewi := sid_int(sid)
|
||||
// if sidnewi > sidlasti {
|
||||
// redis.set(key, '${sidnewi}')!
|
||||
// return true
|
||||
// }
|
||||
// return false
|
||||
// }
|
||||
|
||||
// set the sids in redis, so we remember them all, and we know which one is the latest
|
||||
// this is for all sids as found in text
|
||||
// fn sids_acknowledge(cid string, text string) ! {
|
||||
// res := regext.find_sid(text)
|
||||
// for sid in res {
|
||||
// sid_acknowledge(cid, sid)!
|
||||
// }
|
||||
// }
|
||||
|
||||
// // make sure that we don't use an existing one
|
||||
// pub fn sid_new_unique(existing []string) !string {
|
||||
// idint := rand.u32_in_range(1, 42800) or { panic(err) }
|
||||
// idstr := smartid_string(idint)
|
||||
// if idstr !in existing {
|
||||
// return idstr
|
||||
// }
|
||||
// return error('Could not find unique smartid, run out of tries')
|
||||
// }
|
||||
|
||||
// convert sid to int
|
||||
pub fn sid_int(sid string) u32 {
|
||||
mut result := 0
|
||||
mut count := sid.len - 1
|
||||
for i in sid {
|
||||
if i > 47 && i < 58 {
|
||||
result += (i - 48) * int(math.pow(36, count))
|
||||
} else if i > 96 && i < 123 {
|
||||
result += (i - 87) * int(math.pow(36, count))
|
||||
}
|
||||
count -= 1
|
||||
}
|
||||
return u32(result)
|
||||
}
|
||||
|
||||
// represent sid as string, from u32
|
||||
pub fn sid_str(sid u32) string {
|
||||
mut completed := false
|
||||
mut remaining := int(sid)
|
||||
mut decimals := []f64{}
|
||||
mut count := 1
|
||||
for completed == false {
|
||||
if int(math.pow(36, count)) > sid {
|
||||
for i in 0 .. count {
|
||||
decimals << math.floor(f64(remaining / int(math.pow(36, count - 1 - i))))
|
||||
remaining = remaining % int(math.pow(36, count - 1 - i))
|
||||
}
|
||||
completed = true
|
||||
} else {
|
||||
count += 1
|
||||
}
|
||||
}
|
||||
mut strings := []string{}
|
||||
for i in 0 .. (decimals.len) {
|
||||
if decimals[i] >= 0 && decimals[i] <= 9 {
|
||||
strings << u8(decimals[i] + 48).ascii_str()
|
||||
} else {
|
||||
strings << u8(decimals[i] + 87).ascii_str()
|
||||
}
|
||||
}
|
||||
return strings.join('')
|
||||
}
|
||||
|
||||
// check if format is [..5].[..5].[..5] . and [..5] is string
|
||||
// return error if issue
|
||||
pub fn sid_check(sid string) bool {
|
||||
if sid.len > 6 || sid.len < 2 {
|
||||
return false
|
||||
}
|
||||
for cha in sid {
|
||||
if (cha < 48 || cha > 57) && (cha < 97 || cha > 122) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// raise error if smartid not valid
|
||||
pub fn sid_test(sid string) ! {
|
||||
if !sid_check(sid) {
|
||||
return error('sid:${sid} is not valid.')
|
||||
}
|
||||
}
|
||||
70
lib/data/hjson/README.md
Normal file
70
lib/data/hjson/README.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# HJSON Module
|
||||
|
||||
A V module for handling JSON data with additional utility functions for filtering, extracting, and manipulating JSON structures.
|
||||
|
||||
## Features
|
||||
|
||||
- JSON list splitting
|
||||
- JSON dictionary filtering and extraction
|
||||
- Clean ASCII handling option
|
||||
- Support for both string and Any type outputs
|
||||
|
||||
## Main Functions
|
||||
|
||||
### `json_list(r string, clean bool) []string`
|
||||
Splits a list of dictionaries into text blocks. Useful for processing large JSON arrays of objects.
|
||||
|
||||
### `json_dict_get_any(r string, clean bool, key string) !json2.Any`
|
||||
Extracts a value from a JSON dictionary by key, returning it as `json2.Any`.
|
||||
|
||||
### `json_dict_get_string(r string, clean bool, key string) !string`
|
||||
Similar to `json_dict_get_any` but returns the result as a string.
|
||||
|
||||
### `json_dict_filter_any(r string, clean bool, include []string, exclude []string) !map[string]json2.Any`
|
||||
Filters a JSON dictionary based on included and excluded keys.
|
||||
|
||||
### `json_dict_filter_string(r string, clean bool, include []string, exclude []string) !map[string]string`
|
||||
Similar to `json_dict_filter_any` but returns a map of strings.
|
||||
|
||||
### `json_list_dict_get_any(r string, clean bool, key string) ![]json2.Any`
|
||||
Processes a list of dictionaries and extracts values for a specific key from each dictionary.
|
||||
|
||||
### `json_list_dict_get_string(r string, clean bool, key string) ![]string`
|
||||
Similar to `json_list_dict_get_any` but returns an array of strings.
|
||||
|
||||
## Usage Examples
|
||||
|
||||
```v
|
||||
// Get a value from a JSON dictionary
|
||||
json_str := '{"name": "John", "age": 30}'
|
||||
name := json_dict_get_string(json_str, true, "name")!
|
||||
println(name) // Output: "John"
|
||||
|
||||
// Filter JSON dictionary
|
||||
json_str := '{"name": "John", "age": 30, "city": "New York"}'
|
||||
include := ["name", "age"]
|
||||
exclude := []
|
||||
filtered := json_dict_filter_string(json_str, true, include, exclude)!
|
||||
println(filtered) // Output: {"name": "John", "age": 30}
|
||||
|
||||
// Process a list of dictionaries
|
||||
json_list := '[{"user": {"name": "John"}}, {"user": {"name": "Jane"}}]'
|
||||
names := json_list_dict_get_string(json_list, true, "user")!
|
||||
println(names) // Output: [{"name": "John"}, {"name": "Jane"}]
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
- `r string`: The input JSON string to process
|
||||
- `clean bool`: When true, cleans the input string to ensure ASCII compatibility
|
||||
- `key string`: The key to search for in JSON dictionaries
|
||||
- `include []string`: List of keys to include in filtered output
|
||||
- `exclude []string`: List of keys to exclude from filtered output
|
||||
|
||||
## Error Handling
|
||||
|
||||
All functions that can fail return a Result type (`!`). Common error cases include:
|
||||
- Empty input strings
|
||||
- Invalid JSON format
|
||||
- Missing keys
|
||||
- Invalid data types
|
||||
136
lib/data/hjson/hjson.v
Normal file
136
lib/data/hjson/hjson.v
Normal file
@@ -0,0 +1,136 @@
|
||||
module crystaljson
|
||||
|
||||
import x.json2
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
const keep_ascii = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_-+={}[]"\':;!/>.<,|\\~` '
|
||||
|
||||
// rought splitter for json, splits a list of dicts into the text blocks
|
||||
pub fn json_list(r string, clean bool) []string {
|
||||
// mut res := []string{}
|
||||
mut open_counter := 0
|
||||
mut block := []string{}
|
||||
mut blocks := []string{}
|
||||
for ch in r {
|
||||
mut c := ch.ascii_str()
|
||||
// //rough one to debug
|
||||
// if clean && ! keep_ascii.contains(c){
|
||||
// console.print_debug("SKIP")
|
||||
// continue
|
||||
// }
|
||||
// console.print_debug('${c}')
|
||||
if c == '{' {
|
||||
open_counter += 1
|
||||
}
|
||||
if c == '}' {
|
||||
open_counter -= 1
|
||||
}
|
||||
// console.print_debug(open_counter)
|
||||
if open_counter > 0 {
|
||||
block << c
|
||||
// console.print_debug(block.len)
|
||||
}
|
||||
if open_counter == 0 && block.len > 2 {
|
||||
blocks << block.join('') + '}'
|
||||
block = []string{}
|
||||
}
|
||||
}
|
||||
return blocks
|
||||
}
|
||||
|
||||
// get dict out of json
|
||||
// if include used (not empty, then will only match on keys given)
|
||||
pub fn json_dict_get_any(r string, clean bool, key string) !json2.Any {
|
||||
mut r2 := r
|
||||
if clean {
|
||||
r2 = texttools.ascii_clean(r2)
|
||||
}
|
||||
if r2.trim(' \n') == '' {
|
||||
return error('Cannot do json2 raw decode in json_dict_get_any.\ndata was empty.')
|
||||
}
|
||||
data_raw := json2.raw_decode(r2) or {
|
||||
return error('Cannot do json2 raw decode in json_dict_get_any.\ndata:\n${r2}\nerror:${err}')
|
||||
}
|
||||
mut res := data_raw.as_map()
|
||||
if key in res {
|
||||
return res[key]!
|
||||
} else {
|
||||
return error('Could not find key:${key} in ${r}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn json_dict_get_string(r string, clean bool, key string) !string {
|
||||
r2 := json_dict_get_any(r, clean, key)!
|
||||
return r2.json_str()
|
||||
}
|
||||
|
||||
// get dict out of json
|
||||
// if include used (not empty, then will only match on keys given)
|
||||
pub fn json_dict_filter_any(r string, clean bool, include []string, exclude []string) !map[string]json2.Any {
|
||||
mut r2 := r
|
||||
if clean {
|
||||
r2 = texttools.ascii_clean(r2)
|
||||
}
|
||||
if r2.trim(' \n') == '' {
|
||||
return error('Cannot do json2 raw decode in json_dict_filter_any.\ndata was empty.')
|
||||
}
|
||||
data_raw := json2.raw_decode(r2) or {
|
||||
return error('Cannot do json2 raw decode in json_dict_filter_any.\ndata:\n${r2}\nerror:${err}')
|
||||
}
|
||||
mut res := data_raw.as_map()
|
||||
if include != [] {
|
||||
for key in res.keys() {
|
||||
if key !in include {
|
||||
res.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
for key in exclude {
|
||||
res.delete(key)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
pub fn json_dict_filter_string(r string, clean bool, include []string, exclude []string) !map[string]string {
|
||||
mut res := json_dict_filter_any(r, clean, include, exclude)!
|
||||
mut res2 := map[string]string{}
|
||||
for key in res.keys() {
|
||||
res2[key] = res[key]!.json_str()
|
||||
}
|
||||
return res2
|
||||
}
|
||||
|
||||
// the input is a list of dicts e.g. [{"key":{"name":"kristof@incubaid.com",...},{"key":...}]
|
||||
// in this key the key would be key
|
||||
// returns list of json2.any
|
||||
pub fn json_list_dict_get_any(r string, clean bool, key string) ![]json2.Any {
|
||||
mut r2 := r
|
||||
if clean {
|
||||
r2 = texttools.ascii_clean(r2)
|
||||
}
|
||||
if r2.trim(' \n') == '' {
|
||||
return error('Cannot do json2 raw decode in json_dict_get_any.\ndata was empty.')
|
||||
}
|
||||
data_raw := json2.raw_decode(r2) or {
|
||||
return error('Cannot do json2 raw decode in json_dict_get_any.\ndata:\n${r2}\nerror:${err}')
|
||||
}
|
||||
mut res_list := data_raw.arr()
|
||||
mut res_final := []json2.Any{}
|
||||
for item in res_list {
|
||||
mut res := item.as_map()
|
||||
if key in res {
|
||||
res_final << res[key] or { panic('bug') }
|
||||
} else {
|
||||
return error('Could not find key:${key} in ${res} as part of json_list_dict_get_any')
|
||||
}
|
||||
}
|
||||
return res_final
|
||||
}
|
||||
|
||||
// the input is a list of dicts e.g. [{"key":{"name":"kristof@incubaid.com",...},{"key":...}]
|
||||
// in this key the key would be key
|
||||
// returns list strings which can be parsed as json
|
||||
pub fn json_list_dict_get_string(r string, clean bool, key string) ![]string {
|
||||
r2 := json_list_dict_get_any(r, clean, key)!
|
||||
return r2.map(it.json_str())
|
||||
}
|
||||
304
lib/osal/cmds.v
Normal file
304
lib/osal/cmds.v
Normal file
@@ -0,0 +1,304 @@
|
||||
module osal
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
// import regex
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
pub struct CmdAddArgs {
|
||||
pub mut:
|
||||
cmdname string
|
||||
source string @[required] // path where the binary is
|
||||
symlink bool // if rather than copy do a symlink
|
||||
reset bool = true // if existing cmd will delete
|
||||
// bin_repo_url string = 'https://github.com/freeflowuniverse/freeflow_binary' // binary where we put the results
|
||||
}
|
||||
|
||||
// copy a binary to the right location on the local computer .
|
||||
// e.g. is /usr/local/bin on linux .
|
||||
// e.g. is ~/hero/bin on osx .
|
||||
// will also add the bin location to the path of .zprofile and .zshrc (different per platform)
|
||||
pub fn cmd_add(args_ CmdAddArgs) ! {
|
||||
mut args := args_
|
||||
if args.cmdname == '' {
|
||||
args.cmdname = os.base(args.source)
|
||||
}
|
||||
mut dest := bin_path()!
|
||||
|
||||
mut sourcepath := pathlib.get_file(path: args.source, create: false)!
|
||||
mut destpath := '${dest}/${args.cmdname}'
|
||||
|
||||
console.print_debug(destpath)
|
||||
|
||||
// check if there is other file
|
||||
res := os.execute('which ${args.cmdname}')
|
||||
if res.exit_code == 0 {
|
||||
existing_path := res.output.trim_space()
|
||||
if destpath != existing_path {
|
||||
console.print_debug(' - did find a cmd which is not in path we expect:\n expected:${destpath}\n got:${existing_path}')
|
||||
if args.reset {
|
||||
if existing_path.contains('homebrew/bin') {
|
||||
exec(cmd: 'brew uninstall ${args.cmdname}') or {
|
||||
return error('failed to remove existing command using brew')
|
||||
}
|
||||
} else {
|
||||
os.rm(existing_path)!
|
||||
}
|
||||
} else {
|
||||
return error("existing cmd found on: ${existing_path} and can't delete.\nWas trying to install on ${destpath}.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if args.symlink {
|
||||
sourcepath.link(destpath, true)!
|
||||
} else {
|
||||
sourcepath.copy(dest: destpath, rsync: false)!
|
||||
}
|
||||
|
||||
mut destfile := pathlib.get_file(path: destpath, create: false)!
|
||||
|
||||
destfile.chmod(0o770)! // includes read & write & execute
|
||||
|
||||
// lets make sure this path is in profile
|
||||
profile_path_add_remove(paths2add: dest)!
|
||||
}
|
||||
|
||||
pub fn profile_path_add_hero() !string {
|
||||
mut dest := bin_path()!
|
||||
profile_path_add_remove(paths2add: dest)!
|
||||
return dest
|
||||
}
|
||||
|
||||
pub fn bin_path() !string {
|
||||
mut dest := ''
|
||||
if is_osx() {
|
||||
dest = '${os.home_dir()}/hero/bin'
|
||||
dir_ensure(dest)!
|
||||
} else {
|
||||
dest = '/usr/local/bin'
|
||||
}
|
||||
return dest
|
||||
}
|
||||
|
||||
pub fn hero_path() !string {
|
||||
mut dest := ''
|
||||
dest = '${os.home_dir()}/hero'
|
||||
dir_ensure(dest)!
|
||||
return dest
|
||||
}
|
||||
|
||||
///usr/local on linux, ${os.home_dir()}/hero on osx
|
||||
pub fn usr_local_path() !string {
|
||||
mut dest := ''
|
||||
if is_osx() {
|
||||
dest = '${os.home_dir()}/hero'
|
||||
dir_ensure(dest)!
|
||||
} else {
|
||||
dest = '/usr/local'
|
||||
}
|
||||
return dest
|
||||
}
|
||||
|
||||
// return the source statement if the profile exists
|
||||
pub fn profile_path_source() string {
|
||||
if hostname() or { '' } == 'rescue' {
|
||||
return ''
|
||||
}
|
||||
pp := profile_path()
|
||||
if os.exists(pp) {
|
||||
return 'source ${pp}'
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
// return source $path && .
|
||||
// or empty if it doesn't exist
|
||||
pub fn profile_path_source_and() string {
|
||||
if hostname() or { '' } == 'rescue' {
|
||||
return ''
|
||||
}
|
||||
pp := profile_path()
|
||||
if os.exists(pp) {
|
||||
return '. ${pp} &&'
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
fn profile_paths_get(content string) []string {
|
||||
mut paths := []string{}
|
||||
for line in content.split_into_lines() {
|
||||
if line.contains('PATH') {
|
||||
post := line.all_after_last('=').trim('\'" ,')
|
||||
splitted := post.split(':')
|
||||
for item in splitted {
|
||||
item2 := item.trim(' "\'')
|
||||
if item2 !in paths && !item2.contains('PATH') {
|
||||
paths << item2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return paths
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ProfilePathAddRemoveArgs {
|
||||
pub mut:
|
||||
paths_profile string
|
||||
paths2add string
|
||||
paths2delete string
|
||||
allprofiles bool
|
||||
}
|
||||
|
||||
// add and/or remove paths from profiles
|
||||
// if paths_profile not specified it will walk over all of them
|
||||
pub fn profile_path_add_remove(args_ ProfilePathAddRemoveArgs) ! {
|
||||
mut args := args_
|
||||
|
||||
mut paths_profile := texttools.to_array(args.paths_profile)
|
||||
mut paths2add := texttools.to_array(args.paths2add)
|
||||
mut paths2delete := texttools.to_array(args.paths2delete)
|
||||
|
||||
if paths_profile.len == 0 {
|
||||
if args.allprofiles {
|
||||
paths_profile = profile_paths_all()!
|
||||
} else {
|
||||
paths_profile = profile_paths_preferred()!
|
||||
}
|
||||
}
|
||||
|
||||
for path_profile_str in paths_profile {
|
||||
mut path_profile := pathlib.get_file(path: path_profile_str, create: true)!
|
||||
mut c := path_profile.read()!
|
||||
mut c_out := '' // the result file
|
||||
mut paths_existing_inprofile := profile_paths_get(c)
|
||||
console.print_debug(" -- profile path profile:'${path_profile_str}' add:'${args.paths2add}' delete:'${args.paths2delete}'")
|
||||
// Remove paths to delete
|
||||
for mut todelete in paths2delete {
|
||||
todelete = todelete.trim_space()
|
||||
if todelete.len > 0 {
|
||||
if todelete.starts_with('/') || todelete.starts_with('~') {
|
||||
paths_existing_inprofile = paths_existing_inprofile.filter(it != todelete)
|
||||
paths_existing_inprofile = paths_existing_inprofile.filter(it.replace('~',
|
||||
os.home_dir()) != todelete)
|
||||
} else {
|
||||
paths_existing_inprofile = paths_existing_inprofile.filter(!(it.contains(todelete)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add new paths if they don't exist
|
||||
for mut path2add in paths2add {
|
||||
if path2add !in paths_existing_inprofile {
|
||||
path2add = path2add.replace('~', os.home_dir())
|
||||
if !os.exists(path2add) {
|
||||
return error("can't add path to profile, doesn't exist: ${path2add}")
|
||||
}
|
||||
paths_existing_inprofile << path2add
|
||||
}
|
||||
}
|
||||
|
||||
// Remove existing PATH declarations
|
||||
lines := c.split_into_lines()
|
||||
for line in lines {
|
||||
if !line.to_lower().starts_with('export path=') {
|
||||
c_out += line + '\n'
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the paths
|
||||
paths_existing_inprofile.sort()
|
||||
|
||||
// println(paths_existing_inprofile)
|
||||
// if true{panic("ss")}
|
||||
|
||||
// Add the sorted paths
|
||||
for item in paths_existing_inprofile {
|
||||
c_out += 'export PATH=\$PATH:${item}\n'
|
||||
}
|
||||
|
||||
// Only write if the content has changed
|
||||
if c.trim_space() != c_out.trim_space() {
|
||||
path_profile.write(c_out)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// is same as executing which in OS
|
||||
// returns path or error
|
||||
pub fn cmd_path(cmd string) !string {
|
||||
res := os.execute('which ${cmd}')
|
||||
if res.exit_code == 0 {
|
||||
return res.output.trim_space()
|
||||
}
|
||||
return error("can't do find path for cmd: ${cmd}")
|
||||
}
|
||||
|
||||
// delete cmds from found locations
|
||||
// can be one command of multiple
|
||||
pub fn cmd_delete(cmd string) ! {
|
||||
cmds := texttools.to_array(cmd)
|
||||
for cmd2 in cmds {
|
||||
res := cmd_path(cmd2) or { '' }
|
||||
if res.len > 0 {
|
||||
if os.exists(res) {
|
||||
os.rm(res)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// return possible profile paths in OS
|
||||
pub fn profile_paths_all() ![]string {
|
||||
mut profile_files_ := []string{}
|
||||
|
||||
profile_files_ = [
|
||||
'/etc/profile',
|
||||
'/etc/bash.bashrc',
|
||||
'${os.home_dir()}/.bashrc',
|
||||
'${os.home_dir()}/.bash_profile',
|
||||
'${os.home_dir()}/.profile',
|
||||
'${os.home_dir()}/.zprofile',
|
||||
'${os.home_dir()}/.zshrc',
|
||||
]
|
||||
|
||||
mut profile_files2 := []string{}
|
||||
|
||||
for file in profile_files_ {
|
||||
if os.exists(file) {
|
||||
profile_files2 << file
|
||||
}
|
||||
}
|
||||
return profile_files_
|
||||
}
|
||||
|
||||
pub fn profile_paths_preferred() ![]string {
|
||||
mut toadd := []string{}
|
||||
if is_osx() {
|
||||
toadd << '${os.home_dir()}/.zprofile'
|
||||
toadd << '${os.home_dir()}/.zshrc'
|
||||
} else {
|
||||
toadd << '${os.home_dir()}/.bash_profile'
|
||||
toadd << '${os.home_dir()}/.bashrc'
|
||||
toadd << '${os.home_dir()}/.zshrc'
|
||||
}
|
||||
mut profile_files2 := []string{}
|
||||
|
||||
for file in toadd {
|
||||
if os.exists(file) {
|
||||
println('${file} exists')
|
||||
profile_files2 << file
|
||||
}
|
||||
}
|
||||
return profile_files2
|
||||
}
|
||||
|
||||
pub fn profile_path() string {
|
||||
if is_osx() {
|
||||
return '${os.home_dir()}/.zprofile'
|
||||
} else {
|
||||
return '${os.home_dir()}/.bash_profile'
|
||||
}
|
||||
}
|
||||
138
lib/osal/downloader.v
Normal file
138
lib/osal/downloader.v
Normal file
@@ -0,0 +1,138 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
// import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct DownloadArgs {
|
||||
pub mut:
|
||||
name string // optional (otherwise derived out of filename)
|
||||
url string
|
||||
reset bool // will remove
|
||||
hash string // if hash is known, will verify what hash is
|
||||
dest string // if specified will copy to that destination
|
||||
timeout int = 180
|
||||
retry int = 3
|
||||
minsize_kb u32 = 10 // is always in kb
|
||||
maxsize_kb u32
|
||||
expand_dir string
|
||||
expand_file string
|
||||
}
|
||||
|
||||
// if name is not specified, then will be the filename part
|
||||
// if the last ends in an extension like .md .txt .log .text ... the file will be downloaded
|
||||
pub fn download(args_ DownloadArgs) !pathlib.Path {
|
||||
mut args := args_
|
||||
|
||||
console.print_header('download: ${args.url}')
|
||||
if args.name == '' {
|
||||
if args.dest != '' {
|
||||
args.name = args.dest.split('/').last()
|
||||
} else {
|
||||
mut lastname := args.url.split('/').last()
|
||||
if lastname.contains('?') {
|
||||
return error('cannot get name from url if ? in the last part after /')
|
||||
}
|
||||
args.name = lastname
|
||||
}
|
||||
if args.name == '' {
|
||||
return error('cannot find name for download')
|
||||
}
|
||||
}
|
||||
|
||||
if args.dest.contains('@name') {
|
||||
args.dest = args.dest.replace('@name', args.name)
|
||||
}
|
||||
if args.url.contains('@name') {
|
||||
args.url = args.url.replace('@name', args.name)
|
||||
}
|
||||
|
||||
if args.dest == '' {
|
||||
args.dest = '/tmp/${args.name}'
|
||||
}
|
||||
|
||||
if !cmd_exists('curl') {
|
||||
return error('please make sure curl has been installed.')
|
||||
}
|
||||
|
||||
mut dest := pathlib.get_file(path: args.dest, check: false)!
|
||||
|
||||
// now check to see the url is not different
|
||||
mut meta := pathlib.get_file(path: args.dest + '.meta', create: true)!
|
||||
metadata := meta.read()!
|
||||
if metadata.trim_space() != args.url.trim_space() {
|
||||
// means is a new one need to delete
|
||||
args.reset = true
|
||||
dest.delete()!
|
||||
}
|
||||
|
||||
if args.reset {
|
||||
mut dest_delete := pathlib.get_file(path: args.dest + '_', check: false)!
|
||||
dest_delete.delete()!
|
||||
}
|
||||
|
||||
meta.write(args.url.trim_space())!
|
||||
|
||||
// check if the file exists, if yes and right size lets return
|
||||
mut todownload := true
|
||||
if dest.exists() {
|
||||
size := dest.size_kb()!
|
||||
if args.minsize_kb > 0 {
|
||||
if size > args.minsize_kb {
|
||||
todownload = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if todownload {
|
||||
mut dest0 := pathlib.get_file(path: args.dest + '_')!
|
||||
|
||||
cmd := '
|
||||
rm -f ${dest0.path}
|
||||
cd /tmp
|
||||
curl -L \'${args.url}\' -o ${dest0.path}
|
||||
'
|
||||
exec(
|
||||
cmd: cmd
|
||||
timeout: args.timeout
|
||||
retry: args.retry
|
||||
debug: false
|
||||
description: 'download ${args.url} to ${dest0.path}'
|
||||
stdout: true
|
||||
)!
|
||||
|
||||
if dest0.exists() {
|
||||
size0 := dest0.size_kb()!
|
||||
// console.print_debug(size0)
|
||||
if args.minsize_kb > 0 {
|
||||
if size0 < args.minsize_kb {
|
||||
return error('Could not download ${args.url} to ${dest0.path}, size (${size0}) was smaller than ${args.minsize_kb}')
|
||||
}
|
||||
}
|
||||
if args.maxsize_kb > 0 {
|
||||
if size0 > args.maxsize_kb {
|
||||
return error('Could not download ${args.url} to ${dest0.path}, size (${size0}) was larger than ${args.maxsize_kb}')
|
||||
}
|
||||
}
|
||||
}
|
||||
dest0.rename(dest.name())!
|
||||
dest.check()
|
||||
}
|
||||
if args.expand_dir.len > 0 {
|
||||
if os.exists(args.expand_dir) {
|
||||
os.rmdir_all(args.expand_dir)!
|
||||
}
|
||||
|
||||
return dest.expand(args.expand_dir)!
|
||||
}
|
||||
if args.expand_file.len > 0 {
|
||||
if os.exists(args.expand_file) {
|
||||
os.rm(args.expand_file)!
|
||||
}
|
||||
return dest.expand(args.expand_file)!
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
79
lib/osal/env.v
Normal file
79
lib/osal/env.v
Normal file
@@ -0,0 +1,79 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct EnvSet {
|
||||
pub mut:
|
||||
key string @[required]
|
||||
value string @[required]
|
||||
overwrite bool = true
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct EnvSetAll {
|
||||
pub mut:
|
||||
env map[string]string
|
||||
clear_before_set bool
|
||||
overwrite_if_exists bool = true
|
||||
}
|
||||
|
||||
// Sets an environment if it was not set before, it overwrites the enviroment variable if it exists and if overwrite was set to true (default)
|
||||
pub fn env_set(args EnvSet) {
|
||||
os.setenv(args.key, args.value, args.overwrite)
|
||||
}
|
||||
|
||||
// Unsets an environment variable
|
||||
pub fn env_unset(key string) {
|
||||
os.unsetenv(key)
|
||||
}
|
||||
|
||||
// Unsets all environment variables
|
||||
pub fn env_unset_all() {
|
||||
for key, _ in os.environ() {
|
||||
env_unset(key)
|
||||
}
|
||||
}
|
||||
|
||||
// Allows to set multiple enviroment variables in one go, if clear_before_set is true all existing environment variables will be unset before the operation, if overwrite_if_exists is set to true it will overwrite all existing enviromnent variables
|
||||
pub fn env_set_all(args EnvSetAll) {
|
||||
if args.clear_before_set {
|
||||
env_unset_all()
|
||||
}
|
||||
for key, val in args.env {
|
||||
env_set(key: key, value: val, overwrite: args.overwrite_if_exists)
|
||||
}
|
||||
}
|
||||
|
||||
// Returns all existing environment variables
|
||||
pub fn env_get_all() map[string]string {
|
||||
return os.environ()
|
||||
}
|
||||
|
||||
// Returns the requested environment variable if it exists or throws an error if it does not
|
||||
pub fn env_get(key string) !string {
|
||||
return os.environ()[key]!
|
||||
}
|
||||
|
||||
// Returns the requested environment variable if it exists or returns the provided default value if it does not
|
||||
pub fn env_get_default(key string, def string) string {
|
||||
return os.environ()[key] or { return def }
|
||||
}
|
||||
|
||||
pub fn load_env_file(file_path string) ! {
|
||||
mut file := pathlib.get_file(path: file_path)!
|
||||
content := file.read()!
|
||||
lines := content.split_into_lines()
|
||||
for line in lines {
|
||||
if line.len == 0 || line[0] == `#` {
|
||||
continue
|
||||
}
|
||||
if !line.contains('=') {
|
||||
continue
|
||||
}
|
||||
key := line.all_before('=').trim_space()
|
||||
value := line.all_after('=').trim_space()
|
||||
os.setenv(key, value, true)
|
||||
}
|
||||
}
|
||||
41
lib/osal/env_test.v
Normal file
41
lib/osal/env_test.v
Normal file
@@ -0,0 +1,41 @@
|
||||
module osal
|
||||
|
||||
fn test_env_get_default() ! {
|
||||
key := 'keythatshouldnotexist'
|
||||
def_value := 'defaultvalue'
|
||||
|
||||
env_unset(key)
|
||||
|
||||
env_get(key) or {
|
||||
assert env_get_default(key, def_value) == def_value
|
||||
return
|
||||
}
|
||||
return error('The environment value ${key} should have been unset, it was not!')
|
||||
}
|
||||
|
||||
fn test_env_set_env_get_env_unset() ! {
|
||||
key := 'myenvironmentvariable'
|
||||
value := 'somevalue'
|
||||
|
||||
env_set(key: key, value: value)
|
||||
|
||||
assert env_get(key)! == value
|
||||
|
||||
env_unset(key)
|
||||
|
||||
env_get(key) or { return }
|
||||
return error('The environment variable ${key} should have been unset, it was not!')
|
||||
}
|
||||
|
||||
fn test_env_unset_all_and_set_all_and_get_all() {
|
||||
mut env := map[string]string{}
|
||||
env['Dummy'] = 'dummy'
|
||||
|
||||
env_unset_all()
|
||||
|
||||
assert env_get_all() == map[string]string{}
|
||||
|
||||
env_set_all(env: env)
|
||||
|
||||
assert env_get_all() == env
|
||||
}
|
||||
445
lib/osal/exec.v
Normal file
445
lib/osal/exec.v
Normal file
@@ -0,0 +1,445 @@
|
||||
module osal
|
||||
|
||||
// import freeflowuniverse.herolib.core.texttools
|
||||
// import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
// import io.util
|
||||
|
||||
pub struct JobError {
|
||||
Error
|
||||
pub mut:
|
||||
job Job
|
||||
error_type ErrorType
|
||||
}
|
||||
|
||||
pub enum ErrorType {
|
||||
exec
|
||||
timeout
|
||||
args
|
||||
}
|
||||
|
||||
fn (err JobError) msg() string {
|
||||
if err.error_type == .args {
|
||||
return 'Error in arguments:\n${err.job.cmd}'
|
||||
}
|
||||
if err.error_type == .timeout {
|
||||
return 'Execution failed timeout\n${err.job}'
|
||||
}
|
||||
mut msg := 'Execution failed with code ${err.job.exit_code}\n'
|
||||
if err.job.cmd.scriptpath.len > 0 {
|
||||
msg += '\nscript path:${err.job.cmd.scriptpath}'
|
||||
}
|
||||
if err.job.output.len > 0 {
|
||||
msg += '\n\n## stdout:\n${err.job.output}'
|
||||
}
|
||||
if err.job.error.len > 0 {
|
||||
msg += '\n\n## stderr:\n${err.job.error}'
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
fn (err JobError) code() int {
|
||||
if err.error_type == .timeout {
|
||||
return 9999
|
||||
}
|
||||
return err.job.exit_code
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct Command {
|
||||
pub mut:
|
||||
name string // to give a name to your command, good to see logs...
|
||||
cmd string
|
||||
description string
|
||||
timeout int = 3600 // timeout in sec
|
||||
stdout bool = true
|
||||
stdout_log bool = true
|
||||
raise_error bool = true // if false, will not raise an error but still error report
|
||||
ignore_error bool // means if error will just exit and not raise, there will be no error reporting
|
||||
work_folder string // location where cmd will be executed
|
||||
environment map[string]string // env variables
|
||||
ignore_error_codes []int
|
||||
scriptpath string // is the path where the script will be put which is executed
|
||||
scriptkeep bool // means we don't remove the script
|
||||
debug bool // if debug will put +ex in the script which is being executed and will make sure script stays
|
||||
shell bool // means we will execute it in a shell interactive
|
||||
retry int
|
||||
interactive bool = true
|
||||
async bool
|
||||
runtime RunTime
|
||||
}
|
||||
|
||||
pub enum JobStatus {
|
||||
init
|
||||
running
|
||||
error_exec
|
||||
error_timeout
|
||||
error_args
|
||||
done
|
||||
}
|
||||
|
||||
pub enum RunTime {
|
||||
bash
|
||||
python
|
||||
heroscript
|
||||
herocmd
|
||||
v
|
||||
}
|
||||
|
||||
pub struct Job {
|
||||
pub mut:
|
||||
start time.Time
|
||||
end time.Time
|
||||
cmd Command
|
||||
output string
|
||||
error string
|
||||
exit_code int
|
||||
status JobStatus
|
||||
process ?&os.Process @[skip; str: skip]
|
||||
runnr int // nr of time it runs, is for retry
|
||||
}
|
||||
|
||||
// cmd is the cmd to execute can use ' ' and spaces .
|
||||
// if \n in cmd it will write it to ext and then execute with bash .
|
||||
// if die==false then will just return returncode,out but not return error .
|
||||
// if stdout will show stderr and stdout .
|
||||
// .
|
||||
// if cmd starts with find or ls, will give to bash -c so it can execute .
|
||||
// if cmd has no path, path will be found .
|
||||
// .
|
||||
// Command argument: .
|
||||
//```
|
||||
// name string // to give a name to your command, good to see logs...
|
||||
// cmd string
|
||||
// description string
|
||||
// timeout int = 3600 // timeout in sec
|
||||
// stdout bool = true
|
||||
// stdout_log bool = true
|
||||
// raise_error bool = true // if false, will not raise an error but still error report
|
||||
// ignore_error bool // means if error will just exit and not raise, there will be no error reporting
|
||||
// work_folder string // location where cmd will be executed
|
||||
// environment map[string]string // env variables
|
||||
// ignore_error_codes []int
|
||||
// scriptpath string // is the path where the script will be put which is executed
|
||||
// scriptkeep bool // means we don't remove the script
|
||||
// debug bool // if debug will put +ex in the script which is being executed and will make sure script stays
|
||||
// shell bool // means we will execute it in a shell interactive
|
||||
// retry int
|
||||
// interactive bool = true // make sure we run on non interactive way
|
||||
// async bool
|
||||
// runtime RunTime (.bash, .python)
|
||||
//
|
||||
// returns Job:
|
||||
// start time.Time
|
||||
// end time.Time
|
||||
// cmd Command
|
||||
// output []string
|
||||
// error []string
|
||||
// exit_code int
|
||||
// status JobStatus
|
||||
// process os.Process
|
||||
//```
|
||||
// return Job .
|
||||
pub fn exec(cmd Command) !Job {
|
||||
mut job := Job{
|
||||
cmd: cmd
|
||||
}
|
||||
job.start = time.now()
|
||||
|
||||
if job.cmd.debug {
|
||||
job.cmd.stdout = true
|
||||
console.print_header(' execute: ${job.cmd.cmd}')
|
||||
}
|
||||
|
||||
if cmd.shell {
|
||||
// $if debug {
|
||||
// console.print_debug('cmd shell: ${cmd.cmd}')
|
||||
// }
|
||||
scriptpath := cmd_to_script_path(job.cmd)!
|
||||
os.execvp(scriptpath, [])!
|
||||
return job
|
||||
}
|
||||
if !cmd.async {
|
||||
job.execute_retry() or {
|
||||
// println(err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
return job
|
||||
}
|
||||
|
||||
// execute the job and wait on result
|
||||
// will retry as specified
|
||||
pub fn (mut job Job) execute_retry() ! {
|
||||
for x in 0 .. job.cmd.retry + 1 {
|
||||
job.execute() or {
|
||||
if x == job.cmd.retry {
|
||||
// println(job)
|
||||
return err
|
||||
}
|
||||
}
|
||||
// println(job)
|
||||
if job.status == .done {
|
||||
// means we could execute we can stop
|
||||
return
|
||||
}
|
||||
}
|
||||
job.close()!
|
||||
}
|
||||
|
||||
// execute the job, start process, process will not be closed .
|
||||
// important you need to close the process later by job.close()! otherwise we get zombie processes
|
||||
pub fn (mut job Job) execute() ! {
|
||||
job.runnr += 1
|
||||
job.start = time.now()
|
||||
job.status = .running
|
||||
|
||||
job.cmd.scriptpath = cmd_to_script_path(job.cmd)!
|
||||
|
||||
// console.print_debug(" - process execute ${process_args[0]}")
|
||||
mut p := os.new_process(job.cmd.scriptpath)
|
||||
|
||||
if job.cmd.work_folder.len > 0 {
|
||||
p.set_work_folder(job.cmd.work_folder)
|
||||
}
|
||||
if job.cmd.environment.len > 0 {
|
||||
p.set_environment(job.cmd.environment)
|
||||
}
|
||||
p.set_redirect_stdio()
|
||||
// console.print_debug("process setargs ${process_args[1..process_args.len]}")
|
||||
// p.set_args(process_args[1..process_args.len])
|
||||
if job.cmd.stdout {
|
||||
console.print_debug('')
|
||||
}
|
||||
p.run()
|
||||
job.process = p
|
||||
job.wait()!
|
||||
}
|
||||
|
||||
// ORDER IS
|
||||
// EXECUTE
|
||||
// LOOP -> WAIT -> PROCESS -> READ
|
||||
// -> CLOSE
|
||||
|
||||
// wait till the job finishes or goes in error
|
||||
pub fn (mut job Job) wait() ! {
|
||||
// if job.status != .running && job.status != .init {
|
||||
// return error('can only wait for running job')
|
||||
// }
|
||||
|
||||
for {
|
||||
job.process()!
|
||||
// console.print_debug(result)
|
||||
if job.status == .done {
|
||||
// console.print_stderr("wait done")
|
||||
job.close()!
|
||||
return
|
||||
}
|
||||
time.sleep(10 * time.millisecond)
|
||||
}
|
||||
job.close()!
|
||||
}
|
||||
|
||||
// process (read std.err and std.out of process)
|
||||
pub fn (mut job Job) process() ! {
|
||||
// $if debug{console.print_debug(" - job process: $job")}
|
||||
if job.status == .init {
|
||||
panic('should not be here')
|
||||
// job.execute()!
|
||||
}
|
||||
mut p := job.process or { return error('there is not process on job') }
|
||||
|
||||
// mut result := job.read()!
|
||||
|
||||
job.read()!
|
||||
if p.is_alive() {
|
||||
job.read()!
|
||||
// result=job.read()!
|
||||
if time.now().unix() > job.start.unix() + job.cmd.timeout * 1000 {
|
||||
// console.print_stderr("TIMEOUT TIMEOUT TIMEOUT TIMEOUT")
|
||||
p.signal_pgkill()
|
||||
p.close()
|
||||
job.exit_code = 9999
|
||||
job.end = time.now()
|
||||
job.status = .error_timeout
|
||||
if job.cmd.raise_error {
|
||||
return JobError{
|
||||
job: job
|
||||
error_type: .timeout
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// console.print_stderr(" - process stopped")
|
||||
job.read()!
|
||||
job.read()!
|
||||
job.status = .done
|
||||
// result.done = true
|
||||
if p.code > 0 {
|
||||
// console.print_stderr(' ########## Process CODE IS > 0')
|
||||
job.exit_code = p.code
|
||||
job.status = .error_exec
|
||||
job.cmd.scriptkeep = true
|
||||
job.close()!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut job Job) read() ! {
|
||||
mut p := job.process or { return error('there is no process on job') }
|
||||
|
||||
// console.print_debug("READ STDOUT")
|
||||
out_std := p.pipe_read(.stdout) or { '' }
|
||||
// console.print_debug(" OK")
|
||||
if out_std.len > 0 {
|
||||
if job.cmd.stdout {
|
||||
console.print_stdout(out_std)
|
||||
}
|
||||
job.output += out_std
|
||||
}
|
||||
// console.print_debug("READ ERROR")
|
||||
out_error := p.pipe_read(.stderr) or { '' }
|
||||
// console.print_debug(" OK")
|
||||
if out_error.len > 0 {
|
||||
if job.cmd.stdout && job.cmd.ignore_error == false {
|
||||
console.print_stderr(out_error)
|
||||
}
|
||||
job.error += out_error
|
||||
}
|
||||
}
|
||||
|
||||
// will wait & close
|
||||
pub fn (mut job Job) close() ! {
|
||||
mut p := job.process or { return error('there is no process on job') }
|
||||
// console.print_debug("CLOSE")
|
||||
p.signal_pgkill()
|
||||
p.wait()
|
||||
p.close()
|
||||
job.end = time.now()
|
||||
if job.exit_code > 0 && job.exit_code !in job.cmd.ignore_error_codes {
|
||||
if !job.cmd.ignore_error {
|
||||
errorpath := job.cmd.scriptpath.all_before_last('.sh') + '_error.json'
|
||||
errorjson := json.encode_pretty(job)
|
||||
os.write_file(errorpath, errorjson) or {
|
||||
msg := 'cannot write errorjson to ${errorpath}'
|
||||
return error(msg)
|
||||
}
|
||||
|
||||
errorpath2 := job.cmd.scriptpath.all_before_last('.sh') + '_error.log'
|
||||
mut errortxt := '# ERROR:\n\n'
|
||||
errortxt += job.cmd.cmd + '\n'
|
||||
errortxt += '## OUTPUT:\n\n'
|
||||
errortxt += job.output
|
||||
os.write_file(errorpath2, errortxt) or {
|
||||
msg := 'cannot write error to ${errorpath2}'
|
||||
return error(msg)
|
||||
}
|
||||
|
||||
je := JobError{
|
||||
job: job
|
||||
error_type: .exec
|
||||
}
|
||||
if job.cmd.stdout {
|
||||
console.print_debug('Job Error')
|
||||
console.print_debug(je.msg())
|
||||
}
|
||||
if job.cmd.raise_error {
|
||||
return je
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if job.exit_code == 0 && job.cmd.scriptkeep == false && os.exists(job.cmd.scriptpath) {
|
||||
// console.print_debug(job.cmd.scriptpath)
|
||||
os.rm(job.cmd.scriptpath)!
|
||||
}
|
||||
if job.cmd.ignore_error == false && job.cmd.scriptkeep == false && os.exists(job.cmd.scriptpath) {
|
||||
os.rm(job.cmd.scriptpath)!
|
||||
}
|
||||
// job.status = .done
|
||||
|
||||
if job.cmd.raise_error && job.exit_code > 0 {
|
||||
return JobError{
|
||||
job: job
|
||||
error_type: .exec
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// shortcut to execute a job silent
|
||||
pub fn execute_silent(cmd string) !string {
|
||||
job := exec(cmd: cmd, stdout: false)!
|
||||
return job.output
|
||||
}
|
||||
|
||||
pub fn execute_debug(cmd string) !string {
|
||||
job := exec(cmd: cmd, stdout: true, debug: true)!
|
||||
return job.output
|
||||
}
|
||||
|
||||
// shortcut to execute a job to stdout
|
||||
pub fn execute_stdout(cmd string) !string {
|
||||
job := exec(cmd: cmd, stdout: true)!
|
||||
return job.output
|
||||
}
|
||||
|
||||
// shortcut to execute a job interactive means in shell
|
||||
pub fn execute_interactive(cmd string) ! {
|
||||
exec(cmd: cmd, stdout: true, shell: true)!
|
||||
}
|
||||
|
||||
// executes a cmd, if not error return true
|
||||
pub fn execute_ok(cmd string) bool {
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
pub fn cmd_exists(cmd string) bool {
|
||||
cmd1 := 'which ${cmd}'
|
||||
res := os.execute(cmd1)
|
||||
if res.exit_code > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
pub fn cmd_exists_profile(cmd string) bool {
|
||||
cmd1 := '${profile_path_source_and()} which ${cmd}'
|
||||
res := os.execute(cmd1)
|
||||
if res.exit_code > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// cmd is the cmd to execute can use ' ' and spaces
|
||||
// if \n in cmd it will write it to ext and then execute with bash
|
||||
// if die==false then will just return returncode,out but not return error
|
||||
// if stdout will show stderr and stdout
|
||||
//
|
||||
// if cmd starts with find or ls, will give to bash -c so it can execute
|
||||
// if cmd has no path, path will be found
|
||||
// $... are remplaced by environment arguments TODO:implement
|
||||
//
|
||||
// Command argument:
|
||||
// cmd string
|
||||
// timeout int = 600
|
||||
// stdout bool = true
|
||||
// die bool = true
|
||||
// debug bool
|
||||
//
|
||||
// return what needs to be executed can give it to bash -c ...
|
||||
pub fn exec_string(cmd Command) !string {
|
||||
mut job := Job{
|
||||
cmd: cmd
|
||||
}
|
||||
job.start = time.now()
|
||||
job.cmd.scriptpath = cmd_to_script_path(job.cmd)!
|
||||
return job.cmd.scriptpath
|
||||
}
|
||||
78
lib/osal/exec_test.v
Normal file
78
lib/osal/exec_test.v
Normal file
@@ -0,0 +1,78 @@
|
||||
module osal
|
||||
|
||||
// import crypto.md5
|
||||
// import os
|
||||
|
||||
// TODO: needs to be rewritten for process
|
||||
|
||||
// TODO: remove this test, to make the tests pass we need at least one test
|
||||
fn test_does_nothing() {
|
||||
}
|
||||
|
||||
// const (
|
||||
// cmd_create_file_and_print_content = '#!/bin/bash
|
||||
// mkdir -p /tmp/testdirectory
|
||||
// echo text > /tmp/testdirectory/file.txt
|
||||
// cat /tmp/testdirectory/file.txt
|
||||
// '
|
||||
// )
|
||||
|
||||
// // Test that succeeds in creating a file and printing the content of that file
|
||||
// fn test_exec_cmd_create_file_and_print_content() ! {
|
||||
// res := exec(cmd: osal.cmd_create_file_and_print_content, remove_installer: false)!
|
||||
|
||||
// assert res.trim_space() == 'text'
|
||||
// assert os.is_file('/tmp/testdirectory/file.txt')
|
||||
// assert os.is_file('/tmp/installer.sh')
|
||||
|
||||
// // cleanup
|
||||
// os.rmdir_all('/tmp/testdirectory')!
|
||||
// }
|
||||
|
||||
// // Test where the command fails and we retry 2 times and it still fails
|
||||
// fn test_exec_cmd_fail_and_retry() ! {
|
||||
// res := exec(cmd: 'lsk ./', retry: 2) or {
|
||||
// assert err.code() == 127
|
||||
// assert err.msg().contains('Execution failed with code 127'), err.msg()
|
||||
// assert !os.is_file('/tmp/installer.sh')
|
||||
// return
|
||||
// }
|
||||
// return error('The command should fail and return an error!')
|
||||
// }
|
||||
|
||||
// // Test where the execution takes too long and a timeout occurs
|
||||
// fn test_exec_cmd_fail_due_timeout() ! {
|
||||
// res := exec(cmd: 'sleep 10s', retry_timeout: 100) or {
|
||||
// assert err.code() == 9999
|
||||
// assert err.msg().contains('Execution failed timeout'), err.msg()
|
||||
// return
|
||||
// }
|
||||
// return error('The command should fail and return an error!')
|
||||
// }
|
||||
|
||||
// // Test where the command returns in an error but we ignore that error code
|
||||
// fn test_exec_ignore_error_codes() ! {
|
||||
// args := ExecArgs{
|
||||
// cmd: 'exit 10'
|
||||
// ignore_error_codes: [10]
|
||||
// }
|
||||
|
||||
// mut res := exec(args)!
|
||||
// }
|
||||
|
||||
// // Test using a cached result with a period of 10 milliseconds
|
||||
// fn test_exec_cmd_done() ! {
|
||||
// args := ExecArgs{
|
||||
// cmd: 'echo sometext'
|
||||
// remove_installer: false
|
||||
// reset: false
|
||||
// period: 10
|
||||
// }
|
||||
// hhash := md5.hexhash(args.cmd)
|
||||
// mut res := exec(args)!
|
||||
// redis_str := done_get_str('exec_${hhash}')
|
||||
// assert redis_str.trim_space().ends_with('sometext')
|
||||
// assert res.trim_space() == 'sometext'
|
||||
// res = exec(args)!
|
||||
// assert res.trim_space() == 'sometext'
|
||||
// }
|
||||
78
lib/osal/exec_to_scriptpath.v
Normal file
78
lib/osal/exec_to_scriptpath.v
Normal file
@@ -0,0 +1,78 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// will return temporary path which then can be executed, is a helper function for making script out of command
|
||||
pub fn cmd_to_script_path(cmd Command) !string {
|
||||
// all will be done over filessytem now
|
||||
mut cmdcontent := texttools.dedent(cmd.cmd)
|
||||
if !cmdcontent.ends_with('\n') {
|
||||
cmdcontent += '\n'
|
||||
}
|
||||
|
||||
if cmd.environment.len > 0 {
|
||||
mut cmdenv := ''
|
||||
for key, val in cmd.environment {
|
||||
cmdenv += "export ${key}='${val}'\n"
|
||||
}
|
||||
cmdcontent = cmdenv + '\n' + cmdcontent
|
||||
// process.set_environment(args.environment)
|
||||
}
|
||||
|
||||
// use bash debug and die on error features
|
||||
mut firstlines := ''
|
||||
mut extension := 'sh'
|
||||
if cmd.runtime == .bash || cmd.runtime == .herocmd {
|
||||
if !cmd.cmd.contains('#!/bin/bash') {
|
||||
firstlines = '#!/bin/bash\n\n'
|
||||
if !cmd.ignore_error {
|
||||
firstlines += 'set -e\n' // exec 2>&1\n
|
||||
} else {
|
||||
firstlines += 'set +e\n' // exec 2>&1\n
|
||||
}
|
||||
if cmd.debug {
|
||||
firstlines += 'set -x\n' // exec 2>&1\n
|
||||
}
|
||||
}
|
||||
if !cmd.interactive {
|
||||
// firstlines += 'export DEBIAN_FRONTEND=noninteractive TERM=xterm\n\n'
|
||||
firstlines += 'export DEBIAN_FRONTEND=noninteractive\n\n'
|
||||
}
|
||||
if cmd.work_folder.len > 0 {
|
||||
firstlines += 'cd ${cmd.work_folder}\n'
|
||||
}
|
||||
if cmd.runtime == .herocmd {
|
||||
firstlines += 'hero ' // put hero on the next line, the cmdcontent will be appended then
|
||||
extension = 'hero'
|
||||
}
|
||||
} else if cmd.runtime == .python {
|
||||
firstlines = '#!/usr/bin/env python3\n\n'
|
||||
extension = 'py'
|
||||
} else if cmd.runtime == .heroscript {
|
||||
firstlines = '#!/usr/bin/env hero\n\n'
|
||||
extension = 'hero'
|
||||
} else if cmd.runtime == .v {
|
||||
firstlines = '#!/usr/bin/env v\n\n'
|
||||
extension = 'vsh'
|
||||
} else {
|
||||
panic("can't find runtime type")
|
||||
}
|
||||
|
||||
cmdcontent = firstlines + cmdcontent
|
||||
|
||||
mut scriptpath := if cmd.scriptpath.len > 0 {
|
||||
cmd.scriptpath
|
||||
} else {
|
||||
''
|
||||
}
|
||||
scriptpath = pathlib.temp_write(
|
||||
text: cmdcontent
|
||||
path: scriptpath
|
||||
name: cmd.name
|
||||
ext: extension
|
||||
) or { return error('error: cannot write script to execute: ${err}') }
|
||||
// console.print_debug(" - scriptpath: ${cmd.scriptpath}")
|
||||
return scriptpath
|
||||
}
|
||||
61
lib/osal/file.v
Normal file
61
lib/osal/file.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import os
|
||||
|
||||
pub fn file_write(path string, text string) ! {
|
||||
return os.write_file(path, text)
|
||||
}
|
||||
|
||||
pub fn file_read(path string) !string {
|
||||
return os.read_file(path)
|
||||
}
|
||||
|
||||
// remove all if it exists
|
||||
pub fn dir_ensure(path string) ! {
|
||||
if !os.exists(path) {
|
||||
os.mkdir_all(path)!
|
||||
}
|
||||
}
|
||||
|
||||
// remove all if it exists
|
||||
pub fn dir_delete(path string) ! {
|
||||
if os.exists(path) {
|
||||
return os.rmdir_all(path)
|
||||
}
|
||||
}
|
||||
|
||||
// remove all if it exists
|
||||
// and then (re-)create
|
||||
pub fn dir_reset(path string) ! {
|
||||
os.rmdir_all(path)!
|
||||
os.mkdir_all(path)!
|
||||
}
|
||||
|
||||
// can be list of dirs, files
|
||||
// ~ supported
|
||||
// can be \n or , separated
|
||||
pub fn rm(todelete_ string) ! {
|
||||
for mut item in texttools.to_array(todelete_) {
|
||||
if item.trim_space() == '' {
|
||||
continue
|
||||
}
|
||||
item = item.replace('~', os.home_dir())
|
||||
console.print_debug(' - rm: ${item}')
|
||||
if item.starts_with('/') {
|
||||
if os.exists(item) {
|
||||
if os.is_dir(item) {
|
||||
os.rmdir_all(item)!
|
||||
} else {
|
||||
os.rm(item)!
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if item.contains('/') {
|
||||
return error('there should be no / in to remove list')
|
||||
}
|
||||
cmd_delete(item)! // look for the command, if will be removed if found
|
||||
}
|
||||
}
|
||||
}
|
||||
141
lib/osal/hostsfile/hostsfile.v
Normal file
141
lib/osal/hostsfile/hostsfile.v
Normal file
@@ -0,0 +1,141 @@
|
||||
module hostsfile
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
// TODO: will be broken now
|
||||
|
||||
@[heap]
|
||||
pub struct HostsFile {
|
||||
pub mut:
|
||||
sections []Section
|
||||
}
|
||||
|
||||
pub struct Section {
|
||||
pub mut:
|
||||
name string
|
||||
hosts []Host
|
||||
}
|
||||
|
||||
pub struct Host {
|
||||
pub mut:
|
||||
ip string
|
||||
domain string
|
||||
}
|
||||
|
||||
// pub fn new() HostsFile {
|
||||
// mut obj := HostsFile{}
|
||||
|
||||
// mut content := os.read_file('/etc/hosts') or { panic(err) }
|
||||
// mut section := ''
|
||||
|
||||
// for mut line in content.split('\n') {
|
||||
// line = line.trim_space()
|
||||
// if line.starts_with('#') {
|
||||
// section = line.trim('#').trim_space()
|
||||
// continue
|
||||
// }
|
||||
|
||||
// mut splitted := line.fields()
|
||||
// if splitted.len > 1 {
|
||||
// if section !in obj.hosts {
|
||||
// obj.hosts[section] = []map[string]string{}
|
||||
// }
|
||||
// obj.hosts[section] << {
|
||||
// splitted[0]: splitted[1]
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// return obj
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) save(sudo bool) &HostsFile {
|
||||
// mut str := ''
|
||||
// for section, items in hostsfile.hosts {
|
||||
// if section != '' {
|
||||
// str = str + '# ${section}\n\n'
|
||||
// }
|
||||
|
||||
// for item in items {
|
||||
// for ip, domain in item {
|
||||
// str = str + '${ip}\t${domain}\n'
|
||||
// }
|
||||
// }
|
||||
// str = str + '\n\n'
|
||||
// }
|
||||
// if sudo {
|
||||
// osal.execute_interactive('sudo -- sh -c -e "echo \'${str}\' > /etc/hosts"') or {
|
||||
// panic(err)
|
||||
// }
|
||||
// } else {
|
||||
// os.write_file('/etc/hosts', str) or { panic(err) }
|
||||
// }
|
||||
// return hostsfile
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) reset(sections []string) &HostsFile {
|
||||
// for section in sections {
|
||||
// if section in hostsfile.hosts {
|
||||
// hostsfile.hosts[section] = []map[string]string{}
|
||||
// }
|
||||
// }
|
||||
// return hostsfile
|
||||
// }
|
||||
|
||||
// pub struct HostItemArg{
|
||||
// pub mut:
|
||||
// ip string
|
||||
// domain string
|
||||
// section string = "main"
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) add(args HostItemArg) &HostsFile {
|
||||
// if args.section !in hostsfile.hosts {
|
||||
// hostsfile.hosts[args.section] = []map[string]string{}
|
||||
// }
|
||||
// hostsfile.hosts[args.section] << {
|
||||
// ip: domain
|
||||
// }
|
||||
// return hostsfile
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) delete(domain string) &HostsFile {
|
||||
// mut indexes := map[string][]int{}
|
||||
|
||||
// for section, items in hostsfile.hosts {
|
||||
// indexes[section] = []int{}
|
||||
// for i, item in items {
|
||||
// for _, dom in item {
|
||||
// if dom == domain {
|
||||
// indexes[section] << i
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// for section, items in indexes {
|
||||
// for i in items {
|
||||
// hostsfile.hosts[section].delete(i)
|
||||
// }
|
||||
// }
|
||||
|
||||
// return hostsfile
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) delete_section(section string) &HostsFile {
|
||||
// hostsfile.hosts.delete(section)
|
||||
// return hostsfile
|
||||
// }
|
||||
|
||||
// pub fn (mut hostsfile HostsFile) exists(domain string) bool {
|
||||
// for _, items in hostsfile.hosts {
|
||||
// for item in items {
|
||||
// for _, dom in item {
|
||||
// if dom == domain {
|
||||
// return true
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// return false
|
||||
// }
|
||||
108
lib/osal/net.v
Normal file
108
lib/osal/net.v
Normal file
@@ -0,0 +1,108 @@
|
||||
module osal
|
||||
|
||||
import net
|
||||
import time
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub enum PingResult {
|
||||
ok
|
||||
timeout // timeout from ping
|
||||
unknownhost // means we don't know the hostname its a dns issue
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct PingArgs {
|
||||
pub mut:
|
||||
address string @[required]
|
||||
count u8 = 1 // the ping is successful if it got count amount of replies from the other side
|
||||
timeout u16 = 1 // the time in which the other side should respond in seconds
|
||||
retry u8
|
||||
}
|
||||
|
||||
// if reached in timout result will be True
|
||||
// address is e.g. 8.8.8.8
|
||||
// ping means we check if the destination responds
|
||||
pub fn ping(args PingArgs) !PingResult {
|
||||
platform_ := platform()
|
||||
mut cmd := 'ping'
|
||||
if args.address.contains(':') {
|
||||
cmd = 'ping6'
|
||||
}
|
||||
if platform_ == .osx {
|
||||
cmd += ' -c ${args.count} -i ${args.timeout} ${args.address}'
|
||||
} else if platform_ == .ubuntu {
|
||||
cmd += ' -c ${args.count} -w ${args.timeout} ${args.address}'
|
||||
} else {
|
||||
return error('Unsupported platform for ping')
|
||||
}
|
||||
console.print_debug(cmd)
|
||||
_ := exec(cmd: cmd, retry: args.retry, timeout: 0, stdout: false) or {
|
||||
// println("ping failed.error.\n${err}")
|
||||
if err.code() == 9999 {
|
||||
return .timeout
|
||||
}
|
||||
if platform_ == .osx {
|
||||
return match err.code() {
|
||||
2 {
|
||||
.timeout
|
||||
}
|
||||
68 {
|
||||
.unknownhost
|
||||
}
|
||||
else {
|
||||
// println("${err} ${err.code()}")
|
||||
error("can't ping on osx (${err.code()})\n${err}")
|
||||
}
|
||||
}
|
||||
} else if platform_ == .ubuntu {
|
||||
return match err.code() {
|
||||
1 { .timeout }
|
||||
2 { .unknownhost }
|
||||
else { error("can't ping on ubuntu (${err.code()})\n${err}") }
|
||||
}
|
||||
} else {
|
||||
panic('bug, should never get here')
|
||||
}
|
||||
}
|
||||
return .ok
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct TcpPortTestArgs {
|
||||
pub mut:
|
||||
address string @[required] // 192.168.8.8
|
||||
port int = 22
|
||||
timeout u16 = 2000 // total time in milliseconds to keep on trying
|
||||
}
|
||||
|
||||
// test if a tcp port answers
|
||||
//```
|
||||
// address string //192.168.8.8
|
||||
// port int = 22
|
||||
// timeout u16 = 2000 // total time in milliseconds to keep on trying
|
||||
//```
|
||||
pub fn tcp_port_test(args TcpPortTestArgs) bool {
|
||||
start_time := time.now().unix_milli()
|
||||
mut run_time := 0.0
|
||||
for true {
|
||||
run_time = time.now().unix_milli()
|
||||
if run_time > start_time + args.timeout {
|
||||
return false
|
||||
}
|
||||
_ = net.dial_tcp('${args.address}:${args.port}') or {
|
||||
time.sleep(100 * time.millisecond)
|
||||
continue
|
||||
}
|
||||
// console.print_debug(socket)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Returns the ipaddress as known on the public side
|
||||
// is using resolver4.opendns.com
|
||||
pub fn ipaddr_pub_get() !string {
|
||||
cmd := 'dig @resolver4.opendns.com myip.opendns.com +short'
|
||||
ipaddr := exec(cmd: cmd)!
|
||||
return ipaddr.output.trim('\n').trim(' \n')
|
||||
}
|
||||
18
lib/osal/net_test.v
Normal file
18
lib/osal/net_test.v
Normal file
@@ -0,0 +1,18 @@
|
||||
module osal
|
||||
|
||||
fn test_ipaddr_pub_get() {
|
||||
ipaddr := ipaddr_pub_get()!
|
||||
assert ipaddr != ''
|
||||
}
|
||||
|
||||
fn test_ping() {
|
||||
assert ping(address: '127.0.0.1', count: 1) == .ok
|
||||
}
|
||||
|
||||
fn test_ping_timeout() ! {
|
||||
assert ping(address: '192.168.145.154', count: 5, timeout: 1) == .timeout
|
||||
}
|
||||
|
||||
fn test_ping_unknownhost() ! {
|
||||
assert ping(address: '12.902.219.1', count: 1, timeout: 1) == .unknownhost
|
||||
}
|
||||
28
lib/osal/notifier/notifier.v
Normal file
28
lib/osal/notifier/notifier.v
Normal file
@@ -0,0 +1,28 @@
|
||||
module notifier
|
||||
|
||||
import os.notify
|
||||
import os
|
||||
import time
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub struct Notifier {
|
||||
pub mut:
|
||||
name string
|
||||
}
|
||||
|
||||
// TODO: its not working
|
||||
|
||||
pub fn new() !Notifier {
|
||||
mut n := notify.new()!
|
||||
mut f := os.open('/Users/despiegk1/code/github/freeflowuniverse/crystallib/osal/examples/download/download_example.v')!
|
||||
f.close()
|
||||
// how can we know the filedescriptors of what we need?
|
||||
fid := f.fd
|
||||
for i in 0 .. 1000000 {
|
||||
n.add(fid, .write, .edge_trigger)!
|
||||
events := n.wait(time.Duration(time.second * 100))
|
||||
console.print_debug(events)
|
||||
time.sleep(time.Duration(time.second * 1))
|
||||
}
|
||||
return Notifier{}
|
||||
}
|
||||
8
lib/osal/notifier/readme.md
Normal file
8
lib/osal/notifier/readme.md
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
# requirements
|
||||
|
||||
```bash
|
||||
brew install fswatch
|
||||
|
||||
fswatch -r ~/code/github/freeflowuniverse/crystallib.biz.bizmodel
|
||||
```
|
||||
126
lib/osal/osinstaller/diskmgmt.v
Normal file
126
lib/osal/osinstaller/diskmgmt.v
Normal file
@@ -0,0 +1,126 @@
|
||||
module osinstaller
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn (s ServerManager) raid_stop() !bool {
|
||||
if !os.exists('/proc/mdstat') {
|
||||
return false
|
||||
}
|
||||
|
||||
md := os.read_file('/proc/mdstat')!
|
||||
lines := md.split_into_lines()
|
||||
|
||||
for line in lines {
|
||||
if line.contains('active') {
|
||||
dev := line.split(' ')[0]
|
||||
console.print_debug('[+] stopping raid device: ${dev}')
|
||||
|
||||
r := os.execute('mdadm --stop /dev/${dev}')
|
||||
if r.exit_code != 0 {
|
||||
console.print_debug(r.output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
pub fn (s ServerManager) disks_list() ![]string {
|
||||
blocks := os.ls('/sys/class/block')!
|
||||
mut disks := []string{}
|
||||
|
||||
for block in blocks {
|
||||
if os.is_link('/sys/class/block/${block}/device') {
|
||||
// discard cdrom
|
||||
events := os.read_file('/sys/class/block/${block}/events')!
|
||||
if events.contains('eject') {
|
||||
continue
|
||||
}
|
||||
|
||||
// that should be good
|
||||
disks << block
|
||||
}
|
||||
}
|
||||
|
||||
return disks
|
||||
}
|
||||
|
||||
pub fn (s ServerManager) disk_erase(disk string) bool {
|
||||
// make it safe via wipefs
|
||||
r := os.execute('wipefs -a /dev/${disk}')
|
||||
if r.exit_code != 0 {
|
||||
console.print_debug(r.output)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
fn (s ServerManager) disk_partitions(disk string) ![]string {
|
||||
mut files := os.ls('/sys/class/block/${disk}')!
|
||||
mut parts := []string{}
|
||||
|
||||
files.sort()
|
||||
for file in files {
|
||||
if file.starts_with(disk) {
|
||||
parts << file
|
||||
}
|
||||
}
|
||||
|
||||
return parts
|
||||
}
|
||||
|
||||
pub fn (s ServerManager) disk_main_layout(disk string) !map[string]string {
|
||||
s.execute('parted /dev/${disk} mklabel msdos')
|
||||
s.execute('parted -a optimal /dev/${disk} mkpart primary 0% 768MB')
|
||||
s.execute('parted -a optimal /dev/${disk} mkpart primary 768MB 100GB')
|
||||
s.execute('parted -a optimal /dev/${disk} mkpart primary linux-swap 100GB 104GB')
|
||||
s.execute('parted -a optimal /dev/${disk} mkpart primary 104GB 100%')
|
||||
s.execute('parted /dev/${disk} set 1 boot on')
|
||||
|
||||
s.execute('partprobe')
|
||||
|
||||
parts := s.disk_partitions(disk)!
|
||||
if parts.len < 4 {
|
||||
return error("partitions found doesn't match expected map")
|
||||
}
|
||||
|
||||
mut diskmap := map[string]string{}
|
||||
diskmap['/'] = parts[1]
|
||||
diskmap['/boot'] = parts[0]
|
||||
diskmap['swap'] = parts[2]
|
||||
diskmap['/disk1'] = parts[3]
|
||||
|
||||
boot := '/dev/' + parts[0]
|
||||
root := '/dev/' + parts[1]
|
||||
swap := '/dev/' + parts[2]
|
||||
more := '/dev/' + parts[3]
|
||||
|
||||
console.print_debug('[+] partition map:')
|
||||
console.print_debug('[+] / -> ${root} [ext2]')
|
||||
console.print_debug('[+] /boot -> ${boot} [ext4]')
|
||||
console.print_debug('[+] [swap] -> ${swap} [swap]')
|
||||
console.print_debug('[+] [extra] -> ${more} [btrfs]')
|
||||
|
||||
console.print_debug('[+] creating boot partition')
|
||||
s.execute('mkfs.ext2 ${boot}')
|
||||
|
||||
console.print_debug('[+] creating root partition')
|
||||
s.execute('mkfs.ext4 ${root}')
|
||||
|
||||
console.print_debug('[+] creating swap partition')
|
||||
s.execute('mkswap ${swap}')
|
||||
|
||||
console.print_debug('[+] creating storage partition')
|
||||
s.execute('mkfs.btrfs -f ${more}')
|
||||
|
||||
return diskmap
|
||||
}
|
||||
|
||||
pub fn (s ServerManager) disk_create_btrfs(disk string) !bool {
|
||||
console.print_debug('[+] creating btrfs on disk: /dev/${disk}')
|
||||
s.execute('mkfs.btrfs -f /dev/${disk}')
|
||||
|
||||
return true
|
||||
}
|
||||
24
lib/osal/osinstaller/factory.v
Normal file
24
lib/osal/osinstaller/factory.v
Normal file
@@ -0,0 +1,24 @@
|
||||
module osinstaller
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
// import json
|
||||
// import maxux.vssh
|
||||
|
||||
struct ServerManager {
|
||||
root string
|
||||
}
|
||||
|
||||
pub fn new() ServerManager {
|
||||
sm := ServerManager{}
|
||||
return sm
|
||||
}
|
||||
|
||||
fn (s ServerManager) execute(command string) bool {
|
||||
// console.print_debug(command)
|
||||
|
||||
r := os.execute(command)
|
||||
// console.print_debug(r)
|
||||
|
||||
return true
|
||||
}
|
||||
113
lib/osal/package.v
Normal file
113
lib/osal/package.v
Normal file
@@ -0,0 +1,113 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
// update the package list
|
||||
pub fn package_refresh() ! {
|
||||
platform_ := platform()
|
||||
|
||||
if cmd_exists('nix-env') {
|
||||
// means nix package manager is installed
|
||||
// nothing to do
|
||||
return
|
||||
}
|
||||
if platform_ == .ubuntu {
|
||||
exec(cmd: 'apt-get update') or { return error('Could not update packages\nerror:\n${err}') }
|
||||
return
|
||||
} else if platform_ == .osx {
|
||||
exec(cmd: 'brew update') or { return error('Could not update packages\nerror:\n${err}') }
|
||||
return
|
||||
} else if platform_ == .alpine {
|
||||
exec(cmd: 'apk update') or { return error('Could not update packages\nerror:\n${err}') }
|
||||
return
|
||||
} else if platform_ == .arch {
|
||||
exec(cmd: 'pacman -Syu --noconfirm') or {
|
||||
return error('Could not update packages\nerror:\n${err}')
|
||||
}
|
||||
return
|
||||
}
|
||||
return error("Only ubuntu, alpine, arch and osx is supported for now. Found \"${platform_}\"")
|
||||
}
|
||||
|
||||
// install a package will use right commands per platform
|
||||
pub fn package_install(name_ string) ! {
|
||||
names := texttools.to_array(name_)
|
||||
|
||||
// if cmd_exists('nix-env') {
|
||||
// // means nix package manager is installed
|
||||
// names_list := names.join(' ')
|
||||
// console.print_header('package install: ${names_list}')
|
||||
// exec(cmd: 'nix-env --install ${names_list}') or {
|
||||
// return error('could not install package using nix:${names_list}\nerror:\n${err}')
|
||||
// }
|
||||
// return
|
||||
// }
|
||||
|
||||
name := names.join(' ')
|
||||
console.print_header('package install: ${name}')
|
||||
platform_ := platform()
|
||||
cpu := cputype()
|
||||
if platform_ == .osx {
|
||||
if cpu == .arm {
|
||||
exec(cmd: 'arch --arm64 brew install ${name}') or {
|
||||
return error('could not install package: ${name}\nerror:\n${err}')
|
||||
}
|
||||
} else {
|
||||
exec(cmd: 'brew install ${name}') or {
|
||||
return error('could not install package:${name}\nerror:\n${err}')
|
||||
}
|
||||
}
|
||||
} else if platform_ == .ubuntu {
|
||||
exec(
|
||||
cmd: '
|
||||
export TERM=xterm
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt install -y ${name} -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --allow-downgrades --allow-remove-essential --allow-change-held-packages
|
||||
'
|
||||
) or { return error('could not install package:${name}\nerror:\n${err}') }
|
||||
} else if platform_ == .alpine {
|
||||
exec(cmd: 'apk add ${name}') or {
|
||||
return error('could not install package:${name}\nerror:\n${err}')
|
||||
}
|
||||
} else if platform_ == .arch {
|
||||
exec(cmd: 'pacman --noconfirm -Su ${name}') or {
|
||||
return error('could not install package:${name}\nerror:\n${err}')
|
||||
}
|
||||
} else {
|
||||
return error('Only ubuntu, alpine and osx supported for now')
|
||||
}
|
||||
}
|
||||
|
||||
// Remove a package using the appropriate command for each platform
|
||||
pub fn package_remove(name_ string) ! {
|
||||
names := texttools.to_array(name_)
|
||||
name := names.join(' ')
|
||||
console.print_header('package remove: ${name}')
|
||||
platform_ := platform()
|
||||
cpu := cputype()
|
||||
|
||||
if platform_ == .osx {
|
||||
if cpu == .arm {
|
||||
exec(cmd: 'arch --arm64 brew uninstall ${name}', ignore_error: true)!
|
||||
} else {
|
||||
exec(cmd: 'brew uninstall ${name}', ignore_error: true)!
|
||||
}
|
||||
} else if platform_ == .ubuntu {
|
||||
exec(
|
||||
cmd: '
|
||||
export TERM=xterm
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt remove -y ${name} --allow-change-held-packages
|
||||
apt autoremove -y
|
||||
'
|
||||
ignore_error: true
|
||||
)!
|
||||
} else if platform_ == .alpine {
|
||||
exec(cmd: 'apk del ${name}', ignore_error: true)!
|
||||
} else if platform_ == .arch {
|
||||
exec(cmd: 'pacman --noconfirm -R ${name}', ignore_error: true)!
|
||||
} else {
|
||||
return error('Only ubuntu, alpine and osx supported for now')
|
||||
}
|
||||
}
|
||||
42
lib/osal/package_test.v
Normal file
42
lib/osal/package_test.v
Normal file
@@ -0,0 +1,42 @@
|
||||
module osal
|
||||
|
||||
fn test_package_management() {
|
||||
platform_ := platform()
|
||||
|
||||
if platform_ == .osx {
|
||||
// Check if brew is installed
|
||||
if !cmd_exists('brew') {
|
||||
eprintln('WARNING: Homebrew is not installed. Please install it to run package management tests on OSX.')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// First ensure wget is not installed
|
||||
package_remove('wget') or {
|
||||
assert true // Ignore error if package wasn't installed
|
||||
}
|
||||
|
||||
// Verify wget is not installed
|
||||
assert !cmd_exists('wget')
|
||||
|
||||
// Update package list
|
||||
package_refresh() or {
|
||||
assert false, 'Failed to refresh package list: ${err}'
|
||||
}
|
||||
|
||||
// Install wget
|
||||
package_install('wget') or {
|
||||
assert false, 'Failed to install wget: ${err}'
|
||||
}
|
||||
|
||||
// Verify wget is now installed
|
||||
assert cmd_exists('wget')
|
||||
|
||||
// Clean up - remove wget
|
||||
package_remove('wget') or {
|
||||
assert false, 'Failed to remove wget: ${err}'
|
||||
}
|
||||
|
||||
// Verify wget is removed
|
||||
assert !cmd_exists('wget')
|
||||
}
|
||||
148
lib/osal/platform.v
Normal file
148
lib/osal/platform.v
Normal file
@@ -0,0 +1,148 @@
|
||||
module osal
|
||||
|
||||
import os
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
// Returns the enum value that matches the provided string for PlatformType
|
||||
|
||||
pub fn platform_enum_from_string(platform string) PlatformType {
|
||||
return match platform.to_lower() {
|
||||
'osx' { .osx }
|
||||
'ubuntu' { .ubuntu }
|
||||
'alpine' { .alpine }
|
||||
'arch' { .arch }
|
||||
else { .unknown }
|
||||
}
|
||||
}
|
||||
|
||||
pub enum PlatformType {
|
||||
unknown
|
||||
osx
|
||||
ubuntu
|
||||
alpine
|
||||
arch
|
||||
suse
|
||||
}
|
||||
|
||||
// Returns the enum value that matches the provided string for CPUType
|
||||
pub fn cputype_enum_from_string(cpytype string) CPUType {
|
||||
return match cpytype.to_lower() {
|
||||
'intel' { .intel }
|
||||
'arm' { .arm }
|
||||
'intel32' { .intel32 }
|
||||
'arm32' { .arm32 }
|
||||
else { .unknown }
|
||||
}
|
||||
}
|
||||
|
||||
pub enum CPUType {
|
||||
unknown
|
||||
intel
|
||||
arm
|
||||
intel32
|
||||
arm32
|
||||
}
|
||||
|
||||
pub fn platform() PlatformType {
|
||||
mut logger := get_logger()
|
||||
mut platform_ := PlatformType.unknown
|
||||
platform_ = platform_enum_from_string(memdb_get('platformtype'))
|
||||
if platform_ != PlatformType.unknown {
|
||||
return platform_
|
||||
}
|
||||
if cmd_exists('sw_vers') {
|
||||
platform_ = PlatformType.osx
|
||||
} else if cmd_exists('apt-get') {
|
||||
platform_ = PlatformType.ubuntu
|
||||
} else if cmd_exists('apk') {
|
||||
platform_ = PlatformType.alpine
|
||||
} else if cmd_exists('pacman') {
|
||||
platform_ = PlatformType.arch
|
||||
} else {
|
||||
logger.error('Unknown platform')
|
||||
}
|
||||
if platform_ != PlatformType.unknown {
|
||||
memdb_set('platformtype', platform_.str())
|
||||
}
|
||||
return platform_
|
||||
}
|
||||
|
||||
pub fn cputype() CPUType {
|
||||
mut logger := get_logger()
|
||||
mut cputype_ := CPUType.unknown
|
||||
cputype_ = cputype_enum_from_string(memdb_get('cputype'))
|
||||
if cputype_ != CPUType.unknown {
|
||||
return cputype_
|
||||
}
|
||||
sys_info := execute_stdout('uname -m') or {
|
||||
logger.error('Failed to execute uname to get the cputype: ${err}')
|
||||
return CPUType.unknown
|
||||
}
|
||||
cputype_ = match sys_info.to_lower().trim_space() {
|
||||
'x86_64' {
|
||||
CPUType.intel
|
||||
}
|
||||
'arm64' {
|
||||
CPUType.arm
|
||||
}
|
||||
'aarch64' {
|
||||
CPUType.arm
|
||||
}
|
||||
// TODO 32 bit ones!
|
||||
else {
|
||||
logger.error('Unknown cpu type ${sys_info}')
|
||||
CPUType.unknown
|
||||
}
|
||||
}
|
||||
|
||||
if cputype_ != CPUType.unknown {
|
||||
memdb_set('cputype', cputype_.str())
|
||||
}
|
||||
return cputype_
|
||||
}
|
||||
|
||||
pub fn is_osx() bool {
|
||||
return platform() == .osx
|
||||
}
|
||||
|
||||
pub fn is_osx_arm() bool {
|
||||
return platform() == .osx && cputype() == .arm
|
||||
}
|
||||
|
||||
pub fn is_osx_intel() bool {
|
||||
return platform() == .osx && cputype() != .intel
|
||||
}
|
||||
|
||||
pub fn is_ubuntu() bool {
|
||||
return platform() == .ubuntu
|
||||
}
|
||||
|
||||
pub fn is_linux() bool {
|
||||
return platform() == .ubuntu || platform() == .arch || platform() == .suse
|
||||
|| platform() == .alpine
|
||||
}
|
||||
|
||||
pub fn is_linux_arm() bool {
|
||||
// console.print_debug("islinux:${is_linux()} cputype:${cputype()}")
|
||||
return is_linux() && cputype() == .arm
|
||||
}
|
||||
|
||||
pub fn is_linux_intel() bool {
|
||||
return is_linux() && cputype() == .intel
|
||||
}
|
||||
|
||||
pub fn hostname() !string {
|
||||
res := os.execute('hostname')
|
||||
if res.exit_code > 0 {
|
||||
return error("can't get hostname. Error.")
|
||||
}
|
||||
return res.output.trim_space()
|
||||
}
|
||||
|
||||
// e.g. systemd, bash, zinit
|
||||
pub fn initname() !string {
|
||||
res := os.execute('ps -p 1 -o comm=')
|
||||
if res.exit_code > 0 {
|
||||
return error("can't get process with pid 1. Error:\n${res.output}")
|
||||
}
|
||||
return res.output.trim_space()
|
||||
}
|
||||
9
lib/osal/platform_test.v
Normal file
9
lib/osal/platform_test.v
Normal file
@@ -0,0 +1,9 @@
|
||||
module osal
|
||||
|
||||
fn test_platform() {
|
||||
assert platform() != .unknown
|
||||
}
|
||||
|
||||
fn test_cputype() {
|
||||
assert cputype() != .unknown
|
||||
}
|
||||
230
lib/osal/ps_tool.v
Normal file
230
lib/osal/ps_tool.v
Normal file
@@ -0,0 +1,230 @@
|
||||
module osal
|
||||
|
||||
import time
|
||||
import os
|
||||
import math
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub enum PMState {
|
||||
init
|
||||
ok
|
||||
old
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct ProcessMap {
|
||||
pub mut:
|
||||
processes []ProcessInfo
|
||||
lastscan time.Time
|
||||
state PMState
|
||||
pids []int
|
||||
}
|
||||
|
||||
@[heap]
|
||||
pub struct ProcessInfo {
|
||||
pub mut:
|
||||
cpu_perc f32
|
||||
mem_perc f32
|
||||
cmd string
|
||||
pid int
|
||||
ppid int // parentpid
|
||||
// resident memory
|
||||
rss int
|
||||
}
|
||||
|
||||
// make sure to use new first, so that the connection has been initted
|
||||
// then you can get it everywhere
|
||||
pub fn processmap_get() !ProcessMap {
|
||||
mut pm := ProcessMap{}
|
||||
pm.scan()!
|
||||
return pm
|
||||
}
|
||||
|
||||
// get process info from 1 specific process
|
||||
// returns
|
||||
//```
|
||||
// pub struct ProcessInfo {
|
||||
// pub mut:
|
||||
// cpu_perc f32
|
||||
// mem_perc f32
|
||||
// cmd string
|
||||
// pid int
|
||||
// ppid int
|
||||
// //resident memory
|
||||
// rss int
|
||||
// }
|
||||
//```
|
||||
pub fn processinfo_get(pid int) !ProcessInfo {
|
||||
mut pm := processmap_get()!
|
||||
for pi in pm.processes {
|
||||
if pi.pid == pid {
|
||||
return pi
|
||||
}
|
||||
}
|
||||
return error('Cannot find process with pid: ${pid}, to get process info from.')
|
||||
}
|
||||
|
||||
pub fn processinfo_get_byname(name string) ![]ProcessInfo {
|
||||
mut pm := processmap_get()!
|
||||
mut res := []ProcessInfo{}
|
||||
for pi in pm.processes {
|
||||
// console.print_debug(pi.cmd)
|
||||
if pi.cmd.contains(name) {
|
||||
if pi.cmd.starts_with('sudo ') {
|
||||
continue
|
||||
}
|
||||
if pi.cmd.to_lower().starts_with('screen ') {
|
||||
continue
|
||||
}
|
||||
res << pi
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
pub fn process_exists_byname(name string) !bool {
|
||||
res := processinfo_get_byname(name)!
|
||||
return res.len > 0
|
||||
}
|
||||
|
||||
pub fn process_exists(pid int) bool {
|
||||
r := os.execute('kill -0 ${pid}')
|
||||
if r.exit_code > 0 {
|
||||
// return error('could not execute kill -0 ${pid}')
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// return the process and its children
|
||||
pub fn processinfo_with_children(pid int) !ProcessMap {
|
||||
mut pi := processinfo_get(pid)!
|
||||
mut res := processinfo_children(pid)!
|
||||
res.processes << pi
|
||||
return res
|
||||
}
|
||||
|
||||
// get all children of 1 process
|
||||
pub fn processinfo_children(pid int) !ProcessMap {
|
||||
mut pm := processmap_get()!
|
||||
mut res := []ProcessInfo{}
|
||||
pm.children_(mut res, pid)!
|
||||
return ProcessMap{
|
||||
processes: res
|
||||
lastscan: pm.lastscan
|
||||
state: pm.state
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ProcessKillArgs {
|
||||
pub mut:
|
||||
name string
|
||||
pid int
|
||||
}
|
||||
|
||||
// kill process and all the ones underneith
|
||||
pub fn process_kill_recursive(args ProcessKillArgs) ! {
|
||||
if args.name.len > 0 {
|
||||
for pi in processinfo_get_byname(args.name)! {
|
||||
process_kill_recursive(pid: pi.pid)!
|
||||
}
|
||||
return
|
||||
}
|
||||
if args.pid == 0 {
|
||||
return error('need to specify pid or name')
|
||||
}
|
||||
if process_exists(args.pid) {
|
||||
pm := processinfo_with_children(args.pid)!
|
||||
for p in pm.processes {
|
||||
os.execute('kill -9 ${p.pid}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (pm ProcessMap) children_(mut result []ProcessInfo, pid int) ! {
|
||||
// console.print_debug("children: $pid")
|
||||
for p in pm.processes {
|
||||
if p.ppid == pid {
|
||||
// console.print_debug("found parent: ${p}")
|
||||
if result.filter(it.pid == p.pid).len == 0 {
|
||||
result << p
|
||||
pm.children_(mut result, p.pid)! // find children of the one we found
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut p ProcessInfo) str() string {
|
||||
x := math.min(60, p.cmd.len)
|
||||
subst := p.cmd.substr(0, x)
|
||||
return 'pid:${p.pid:-7} parent:${p.ppid:-7} cmd:${subst}'
|
||||
}
|
||||
|
||||
fn (mut pm ProcessMap) str() string {
|
||||
mut out := ''
|
||||
for p in pm.processes {
|
||||
out += '${p}\n'
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
fn (mut pm ProcessMap) scan() ! {
|
||||
now := time.now().unix()
|
||||
// only scan if we didn't do in last 5 seconds
|
||||
if pm.lastscan.unix() > now - 5 {
|
||||
// means scan is ok
|
||||
if pm.state == PMState.ok {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
cmd := 'ps ax -o pid,ppid,stat,%cpu,%mem,rss,command'
|
||||
res := os.execute(cmd)
|
||||
|
||||
if res.exit_code > 0 {
|
||||
return error('Cannot get process info \n${cmd}')
|
||||
}
|
||||
|
||||
pm.processes = []ProcessInfo{}
|
||||
|
||||
// console.print_debug("DID SCAN")
|
||||
for line in res.output.split_into_lines() {
|
||||
if !line.contains('PPID') {
|
||||
mut fields := line.fields()
|
||||
if fields.len < 6 {
|
||||
// console.print_debug(res)
|
||||
// console.print_debug("SSS")
|
||||
// console.print_debug(line)
|
||||
// panic("ss")
|
||||
continue
|
||||
}
|
||||
mut pi := ProcessInfo{}
|
||||
pi.pid = fields[0].int()
|
||||
pi.ppid = fields[1].int()
|
||||
pi.cpu_perc = fields[3].f32()
|
||||
pi.mem_perc = fields[4].f32()
|
||||
pi.rss = fields[5].int()
|
||||
fields.delete_many(0, 6)
|
||||
pi.cmd = fields.join(' ')
|
||||
// console.print_debug(pi.cmd)
|
||||
if pi.pid !in pm.pids {
|
||||
pm.processes << pi
|
||||
pm.pids << pi.pid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pm.lastscan = time.now()
|
||||
pm.state = PMState.ok
|
||||
|
||||
// console.print_debug(pm)
|
||||
}
|
||||
|
||||
pub fn whoami() !string {
|
||||
res := os.execute('whoami')
|
||||
if res.exit_code > 0 {
|
||||
return error('Could not do whoami\n${res}')
|
||||
}
|
||||
return res.output.trim_space()
|
||||
}
|
||||
200
lib/osal/readme.md
Normal file
200
lib/osal/readme.md
Normal file
@@ -0,0 +1,200 @@
|
||||
# Operating System Abstraction Layer (OSAL)
|
||||
|
||||
A comprehensive operating system abstraction layer for V that provides platform-independent system operations, process management, and network utilities.
|
||||
|
||||
## Features
|
||||
|
||||
- Platform detection and system information
|
||||
- Process execution and management
|
||||
- Network utilities (ping, TCP port testing)
|
||||
- Environment variable handling
|
||||
- File system operations
|
||||
- SSH key management
|
||||
- Profile path management
|
||||
|
||||
## Platform Detection
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
// Get platform type
|
||||
platform := osal.platform()
|
||||
if platform == .osx {
|
||||
// macOS specific code
|
||||
}
|
||||
|
||||
// Platform-specific checks
|
||||
if osal.is_linux() {
|
||||
// Linux specific code
|
||||
}
|
||||
if osal.is_osx_arm() {
|
||||
// Apple Silicon specific code
|
||||
}
|
||||
|
||||
// CPU architecture
|
||||
cpu := osal.cputype()
|
||||
if cpu == .arm {
|
||||
// ARM specific code
|
||||
}
|
||||
|
||||
// System information
|
||||
hostname := osal.hostname()!
|
||||
init_system := osal.initname()! // e.g., systemd, bash, zinit
|
||||
```
|
||||
|
||||
## Process Execution
|
||||
|
||||
The module provides flexible process execution with extensive configuration options:
|
||||
|
||||
```v
|
||||
// Simple command execution
|
||||
job := osal.exec(cmd: 'ls -la')!
|
||||
println(job.output)
|
||||
|
||||
// Execute with error handling
|
||||
job := osal.exec(Command{
|
||||
cmd: 'complex_command'
|
||||
timeout: 3600 // timeout in seconds
|
||||
retry: 3 // retry count
|
||||
work_folder: '/tmp' // working directory
|
||||
environment: { // environment variables
|
||||
'PATH': '/usr/local/bin'
|
||||
}
|
||||
stdout: true // show output
|
||||
raise_error: true // raise error on failure
|
||||
})!
|
||||
|
||||
// Silent execution
|
||||
output := osal.execute_silent('command')!
|
||||
|
||||
// Interactive shell execution
|
||||
osal.execute_interactive('bash command')!
|
||||
|
||||
// Debug mode execution
|
||||
output := osal.execute_debug('command')!
|
||||
```
|
||||
|
||||
### Job Status and Error Handling
|
||||
|
||||
```v
|
||||
// Check job status
|
||||
if job.status == .done {
|
||||
println('Success!')
|
||||
} else if job.status == .error_timeout {
|
||||
println('Command timed out')
|
||||
}
|
||||
|
||||
// Error handling with specific error types
|
||||
job := osal.exec(cmd: 'invalid_command') or {
|
||||
match err.error_type {
|
||||
.exec { println('Execution error') }
|
||||
.timeout { println('Command timed out') }
|
||||
.args { println('Invalid arguments') }
|
||||
else { println(err) }
|
||||
}
|
||||
return
|
||||
}
|
||||
```
|
||||
|
||||
## Network Utilities
|
||||
|
||||
### Ping
|
||||
|
||||
```v
|
||||
// Simple ping
|
||||
result := osal.ping(address: '8.8.8.8')!
|
||||
assert result == .ok
|
||||
|
||||
// Advanced ping configuration
|
||||
result := osal.ping(PingArgs{
|
||||
address: '8.8.8.8'
|
||||
count: 3 // number of pings
|
||||
timeout: 2 // timeout in seconds
|
||||
retry: 1 // retry attempts
|
||||
})!
|
||||
|
||||
match result {
|
||||
.ok { println('Host is reachable') }
|
||||
.timeout { println('Host timed out') }
|
||||
.unknownhost { println('Unknown host') }
|
||||
}
|
||||
```
|
||||
|
||||
### TCP Port Testing
|
||||
|
||||
```v
|
||||
// Test if port is open
|
||||
is_open := osal.tcp_port_test(TcpPortTestArgs{
|
||||
address: '192.168.1.1'
|
||||
port: 22
|
||||
timeout: 2000 // milliseconds
|
||||
})
|
||||
|
||||
if is_open {
|
||||
println('Port is open')
|
||||
}
|
||||
|
||||
// Get public IP address
|
||||
pub_ip := osal.ipaddr_pub_get()!
|
||||
println('Public IP: ${pub_ip}')
|
||||
```
|
||||
|
||||
## Profile Management
|
||||
|
||||
Manage system PATH and other profile settings:
|
||||
|
||||
```v
|
||||
// Add/remove paths from system PATH
|
||||
osal.profile_path_add_remove(
|
||||
paths2delete: 'go/bin',
|
||||
paths2add: '~/hero/bin,~/usr/local/bin'
|
||||
)!
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
```v
|
||||
// Get environment variable
|
||||
value := osal.env_get('PATH')!
|
||||
|
||||
// Set environment variable
|
||||
osal.env_set('MY_VAR', 'value')!
|
||||
|
||||
// Check if environment variable exists
|
||||
exists := osal.env_exists('MY_VAR')
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- All commands are executed from temporary scripts in `/tmp/execscripts`
|
||||
- Failed script executions are preserved for debugging
|
||||
- Successful script executions are automatically cleaned up
|
||||
- Platform-specific behavior is automatically handled
|
||||
- Timeout and retry mechanisms are available for robust execution
|
||||
- Environment variables and working directories can be specified per command
|
||||
- Interactive and non-interactive modes are supported
|
||||
- Debug mode provides additional execution information
|
||||
|
||||
## Error Handling
|
||||
|
||||
The module provides detailed error information:
|
||||
|
||||
- Exit codes
|
||||
- Standard output and error streams
|
||||
- Execution time and duration
|
||||
- Process status
|
||||
- Retry counts
|
||||
- Error types (execution, timeout, arguments)
|
||||
|
||||
## Platform Support
|
||||
|
||||
- macOS (Intel and ARM)
|
||||
- Ubuntu
|
||||
- Alpine Linux
|
||||
- Arch Linux
|
||||
- SUSE (partial)
|
||||
|
||||
CPU architectures:
|
||||
- Intel (x86_64)
|
||||
- ARM (arm64/aarch64)
|
||||
- 32-bit variants (intel32, arm32)
|
||||
6
lib/osal/rsync/readme.md
Normal file
6
lib/osal/rsync/readme.md
Normal file
@@ -0,0 +1,6 @@
|
||||
to test
|
||||
|
||||
```bash
|
||||
echo 'mypasswd' > /tmp/passwd
|
||||
rsync -avz --password-file=/tmp/passwd /local/path/ rsync://authorizeduser@yourserver/private
|
||||
```
|
||||
61
lib/osal/rsync/rsync.v
Normal file
61
lib/osal/rsync/rsync.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module rsync
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
@[params]
|
||||
pub struct RsyncArgs {
|
||||
pub mut:
|
||||
source string
|
||||
dest string
|
||||
ipaddr_src string // e.g. root@192.168.5.5:33 (can be without root@ or :port)
|
||||
ipaddr_dst string
|
||||
delete bool // do we want to delete the destination
|
||||
ignore []string // arguments to ignore e.g. ['*.pyc','*.bak']
|
||||
ignore_default bool = true // if set will ignore a common set
|
||||
stdout bool = true
|
||||
fast_rsync bool
|
||||
sshkey string
|
||||
}
|
||||
|
||||
// flexible tool to sync files from to, does even support ssh .
|
||||
// args: .
|
||||
// ```
|
||||
// source string
|
||||
// dest string
|
||||
// delete bool //do we want to delete the destination
|
||||
// ipaddr_src string //e.g. root@192.168.5.5:33 (can be without root@ or :port)
|
||||
// ipaddr_dst string //can only use src or dst, not both
|
||||
// ignore []string //arguments to ignore
|
||||
// ignore_default bool = true //if set will ignore a common set
|
||||
// stdout bool = true
|
||||
// ```
|
||||
// .
|
||||
// see https://github.com/freeflowuniverse/crystallib/blob/development/examples/pathlib.rsync/rsync_example.v
|
||||
pub fn rsync(args_ RsyncArgs) ! {
|
||||
mut args := args_
|
||||
if args.ipaddr_src.len == 0 {
|
||||
pathlib.get(args.source)
|
||||
}
|
||||
args2 := pathlib.RsyncArgs{
|
||||
source: args.source
|
||||
dest: args.dest
|
||||
ipaddr_src: args.ipaddr_src
|
||||
ipaddr_dst: args.ipaddr_dst
|
||||
delete: args.delete
|
||||
ignore: args.ignore
|
||||
ignore_default: args.ignore_default
|
||||
fast_rsync: args.fast_rsync
|
||||
sshkey: args.sshkey
|
||||
}
|
||||
|
||||
// TODO: is only for ssh right now, we prob need support for a real ssh server as well
|
||||
cmdoptions := pathlib.rsync_cmd_options(args2)!
|
||||
cmd := 'rsync ${cmdoptions}'
|
||||
$if debug {
|
||||
console.print_debug('rsync command (osal):\n${cmd}')
|
||||
}
|
||||
// console.print_debug(cmd)
|
||||
osal.exec(cmd: cmd, stdout: args_.stdout)!
|
||||
}
|
||||
60
lib/osal/rsync/rsyncd.v
Normal file
60
lib/osal/rsync/rsyncd.v
Normal file
@@ -0,0 +1,60 @@
|
||||
module rsync
|
||||
|
||||
pub struct RsyncD {
|
||||
pub mut:
|
||||
configpath string = '/etc/rsyncd.conf'
|
||||
sites []RsyncSite
|
||||
usermanager UserManager
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct RsyncSite {
|
||||
pub mut:
|
||||
name string
|
||||
path string
|
||||
comment string
|
||||
readonly bool
|
||||
list bool
|
||||
auth string
|
||||
secrets string
|
||||
}
|
||||
|
||||
pub fn rsyncd() !RsyncD {
|
||||
mut um := usermanager()!
|
||||
mut self := RsyncD{
|
||||
usermanager: um
|
||||
}
|
||||
self.load()!
|
||||
return self
|
||||
}
|
||||
|
||||
// add site to the rsyncd config
|
||||
pub fn (mut self RsyncD) site_add(args_ RsyncSite) ! {
|
||||
_ := args_
|
||||
// self.sites[args.name]=RsyncSite{name:args.name,}
|
||||
}
|
||||
|
||||
// get all info from existing config file, populate the sites
|
||||
pub fn (mut self RsyncD) load() ! {
|
||||
// TODO: check rsync is installed if not use osal package manager to install
|
||||
// TODO: populate sites in the struct
|
||||
}
|
||||
|
||||
pub fn (mut self RsyncD) generate() ! {
|
||||
// TODO: generate a new config file (based on previous info on disk as well as new one)
|
||||
// TODO: make sure we can add more than 1 user to the user manager
|
||||
|
||||
self.reload()!
|
||||
}
|
||||
|
||||
fn (mut self RsyncD) reload() ! {
|
||||
_ := '
|
||||
chmod 600 /etc/rsyncd.secrets
|
||||
systemctl enable rsync
|
||||
systemctl start rsync
|
||||
'
|
||||
|
||||
// TODO: execute, maybe we should check its on linux and there is a systemd active, also prob we need to see if we need to start or restart
|
||||
|
||||
// TODO: we should do a test using rsync
|
||||
}
|
||||
27
lib/osal/rsync/templates/rsyncd.conf
Normal file
27
lib/osal/rsync/templates/rsyncd.conf
Normal file
@@ -0,0 +1,27 @@
|
||||
# /etc/rsyncd.conf
|
||||
#TODO: make template generate to fill in the different sites
|
||||
# Global settings
|
||||
uid = nobody
|
||||
gid = nogroup
|
||||
use chroot = yes
|
||||
max connections = 50
|
||||
log file = /var/log/rsyncd.log
|
||||
pid file = /var/run/rsyncd.pid
|
||||
lock file = /var/run/rsync.lock
|
||||
|
||||
#needs to be loop over the sites, make sure dirs exist
|
||||
[public]
|
||||
path = ${dirpath}
|
||||
comment = Public Read-Only Share
|
||||
read only = yes
|
||||
list = yes
|
||||
auth users =
|
||||
secrets file = /etc/rsyncd.secrets
|
||||
|
||||
[private]
|
||||
path = ${dirpath}
|
||||
comment = Private Read-Write Share
|
||||
read only = no
|
||||
list = yes
|
||||
auth users = authorizeduser
|
||||
secrets file = /etc/rsyncd.secrets
|
||||
2
lib/osal/rsync/templates/rsyncd.secrets
Normal file
2
lib/osal/rsync/templates/rsyncd.secrets
Normal file
@@ -0,0 +1,2 @@
|
||||
#TODO: make template work to fill in the items with loop
|
||||
authorizeduser:yourpassword
|
||||
59
lib/osal/rsync/usermgmt.v
Normal file
59
lib/osal/rsync/usermgmt.v
Normal file
@@ -0,0 +1,59 @@
|
||||
module rsync
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
pub struct UserManager {
|
||||
pub mut:
|
||||
configpath string = '/etc/rsyncd.secrets'
|
||||
users map[string]User
|
||||
}
|
||||
|
||||
pub struct User {
|
||||
pub mut:
|
||||
name string
|
||||
passwd string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct UserArgs {
|
||||
pub mut:
|
||||
name string
|
||||
passwd string
|
||||
}
|
||||
|
||||
pub fn (mut self UserManager) user_add(args_ UserArgs) ! {
|
||||
mut args := args_
|
||||
self.users[args.name] = User{
|
||||
name: args.name
|
||||
passwd: args.passwd
|
||||
}
|
||||
}
|
||||
|
||||
pub fn usermanager() !UserManager {
|
||||
mut self := UserManager{}
|
||||
self.load()!
|
||||
return self
|
||||
}
|
||||
|
||||
pub fn (mut self UserManager) load(args UserArgs) ! {
|
||||
mut p := pathlib.get_file(path: self.configpath, create: true)!
|
||||
content := p.read()!
|
||||
for line in content.split('\n') {
|
||||
if line.trim_space() == '' {
|
||||
continue
|
||||
}
|
||||
if line.contains(':') {
|
||||
items := line.split(':')
|
||||
if items.len != 2 {
|
||||
return error('syntax error in ${self.configpath}.\n${line}')
|
||||
}
|
||||
self.user_add(name: items[0], passwd: items[1])!
|
||||
} else {
|
||||
return error('syntax error in ${self.configpath}.\n${line}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// generate the secrets config file
|
||||
pub fn (mut self UserManager) generate() ! {
|
||||
}
|
||||
177
lib/osal/screen/factory.v
Normal file
177
lib/osal/screen/factory.v
Normal file
@@ -0,0 +1,177 @@
|
||||
module screen
|
||||
|
||||
// import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
// import freeflowuniverse.herolib.screen
|
||||
import os
|
||||
import time
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct ScreensFactory {
|
||||
pub mut:
|
||||
screens []Screen
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct ScreensNewArgs {
|
||||
pub:
|
||||
reset bool
|
||||
}
|
||||
|
||||
// return screen instance
|
||||
pub fn new(args ScreensNewArgs) !ScreensFactory {
|
||||
mut t := ScreensFactory{}
|
||||
t.scan()!
|
||||
if args.reset {
|
||||
t.reset()!
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
pub fn init_screen_object(item_ map[string]string) Screen {
|
||||
mut item := Screen{}
|
||||
state_item := item_['state'] or { panic('bug') }
|
||||
item.state = match state_item.trim('() ').to_lower() {
|
||||
'detached' { .detached }
|
||||
else { .unknown }
|
||||
}
|
||||
pre := item_['pre'] or { panic('bug') }
|
||||
item.pid = pre.all_before('.').trim_space().int()
|
||||
item.name = pre.all_after('.').trim_space()
|
||||
return item
|
||||
}
|
||||
|
||||
// loads screen screen, populate the object
|
||||
pub fn (mut self ScreensFactory) scan() ! {
|
||||
self.screens = []Screen{}
|
||||
os.execute('screen -wipe > /dev/null 2>&1') // make sure its all clean
|
||||
res := os.execute('screen -ls')
|
||||
if res.exit_code > 1 {
|
||||
return error('could not find screen or other error, make sure screen is installed.\n${res.output}')
|
||||
}
|
||||
if res.output.contains('No Sockets found') {
|
||||
return
|
||||
}
|
||||
// there is stuff to parses
|
||||
|
||||
res1 := texttools.remove_empty_lines(res.output)
|
||||
.split_into_lines()
|
||||
.filter(it.starts_with(' ') || it.starts_with('\t'))
|
||||
.join_lines()
|
||||
mut res2 := texttools.to_list_map('pre,state', res1, '').map(init_screen_object(it))
|
||||
for mut item in res2 {
|
||||
if self.exists(item.name) {
|
||||
return error('duplicate screen with name: ${item.name}')
|
||||
}
|
||||
self.screens << item
|
||||
}
|
||||
// console.print_debug(self.str())
|
||||
}
|
||||
|
||||
pub struct ScreenAddArgs {
|
||||
pub mut:
|
||||
name string @[requred]
|
||||
cmd string
|
||||
reset bool
|
||||
start bool = true
|
||||
attach bool
|
||||
}
|
||||
|
||||
// print list of screen screens
|
||||
pub fn (mut self ScreensFactory) add(args_ ScreenAddArgs) !Screen {
|
||||
mut args := args_
|
||||
if args.cmd == '' {
|
||||
args.cmd = '/bin/bash'
|
||||
}
|
||||
if args.name.len < 3 {
|
||||
return error('name needs to be at least 3 chars.')
|
||||
}
|
||||
if self.exists(args.name) {
|
||||
if args.reset {
|
||||
self.kill(args.name)!
|
||||
} else {
|
||||
return self.get(args.name)!
|
||||
}
|
||||
}
|
||||
self.screens << Screen{
|
||||
name: args.name
|
||||
cmd: args.cmd
|
||||
}
|
||||
if args.start {
|
||||
self.start(args.name)!
|
||||
}
|
||||
mut myscreen := self.get(args.name) or {
|
||||
return error('couldnt start screen with name ${args.name}, was not found afterwards.\ncmd:${args.cmd}\nScreens found.\n${self.str()}')
|
||||
}
|
||||
|
||||
if args.attach {
|
||||
myscreen.attach()!
|
||||
}
|
||||
return myscreen
|
||||
}
|
||||
|
||||
// print list of screen screens
|
||||
pub fn (mut self ScreensFactory) exists(name string) bool {
|
||||
for mut screen in self.screens {
|
||||
if screen.name == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn (mut self ScreensFactory) get(name string) !Screen {
|
||||
for mut screen in self.screens {
|
||||
if screen.name == name {
|
||||
return screen
|
||||
}
|
||||
}
|
||||
// print_backtrace()
|
||||
return error('couldnt find screen with name ${name}\nScreens found.\n${self.str()}')
|
||||
}
|
||||
|
||||
pub fn (mut self ScreensFactory) start(name string) ! {
|
||||
mut s := self.get(name) or {
|
||||
return error("can't start screen with name:${name}, couldn't find.\nScreens found.\n${self.str()}")
|
||||
}
|
||||
s.start_()!
|
||||
for {
|
||||
self.scan()!
|
||||
mut s2 := self.get(name) or {
|
||||
return error('couldnt start screen with name ${name}, was not found in screen scan.\ncmd:\n${s.cmd}\nScreens found.\n${self.str()}')
|
||||
}
|
||||
if s2.pid > 0 {
|
||||
return
|
||||
}
|
||||
console.print_debug(s2.str())
|
||||
time.sleep(100000)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut self ScreensFactory) kill(name string) ! {
|
||||
if self.exists(name) {
|
||||
mut s := self.get(name) or { return }
|
||||
s.kill_()!
|
||||
}
|
||||
self.scan()!
|
||||
}
|
||||
|
||||
// print list of screen screens
|
||||
pub fn (mut self ScreensFactory) reset() ! {
|
||||
for mut screen in self.screens {
|
||||
screen.kill_()!
|
||||
}
|
||||
self.scan()!
|
||||
}
|
||||
|
||||
pub fn (mut self ScreensFactory) str() string {
|
||||
if self.screens.len == 0 {
|
||||
return 'No screens found.'
|
||||
}
|
||||
mut out := '# Screens\n'
|
||||
for s in self.screens {
|
||||
out += '${s}\n'
|
||||
}
|
||||
return out
|
||||
}
|
||||
13
lib/osal/screen/readme.md
Normal file
13
lib/osal/screen/readme.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# screen
|
||||
|
||||
```bash
|
||||
#to see sessions which have been created
|
||||
|
||||
screen -ls
|
||||
|
||||
There is a screen on:
|
||||
3230.test (Detached)
|
||||
|
||||
#now to attach to this screen
|
||||
screen -r test
|
||||
```
|
||||
142
lib/osal/screen/screen.v
Normal file
142
lib/osal/screen/screen.v
Normal file
@@ -0,0 +1,142 @@
|
||||
module screen
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.osal
|
||||
import os
|
||||
import time
|
||||
|
||||
@[heap]
|
||||
struct Screen {
|
||||
mut:
|
||||
cmd string
|
||||
name string
|
||||
pid int
|
||||
state ScreenState
|
||||
// factory ?&ScreensFactory @[skip; str: skip]
|
||||
}
|
||||
|
||||
enum ScreenState {
|
||||
unknown
|
||||
detached
|
||||
}
|
||||
|
||||
// checks whether screen server is running
|
||||
pub fn (mut t Screen) is_running() !bool {
|
||||
panic('implement')
|
||||
// res := osal.exec(cmd: 'screen info', stdout: false, name: 'screen_info', raise_error: false) or {
|
||||
// panic('bug')
|
||||
// }
|
||||
// if res.error.contains('no server running') {
|
||||
// // console.print_debug(" TMUX NOT RUNNING")
|
||||
// return false
|
||||
// }
|
||||
// if res.error.contains('no current client') {
|
||||
// return true
|
||||
// }
|
||||
// if res.exit_code > 0 {
|
||||
// return error('could not execute screen info.\n${res}')
|
||||
// }
|
||||
return true
|
||||
}
|
||||
|
||||
pub enum ScreenStatus {
|
||||
unknown
|
||||
active
|
||||
inactive
|
||||
}
|
||||
|
||||
// Method to check the status of a screen process
|
||||
pub fn (self Screen) status() !ScreenStatus {
|
||||
panic('implement')
|
||||
// // Command to list screen sessions
|
||||
// cmd := 'screen -ls'
|
||||
// response := osal.execute_silent(cmd)!
|
||||
|
||||
// // Check if the screen session exists
|
||||
// if !response.contains(self.name) {
|
||||
// return .inactive
|
||||
// }
|
||||
|
||||
// // Command to send a dummy command to the screen session and check response
|
||||
// cmd_check := 'screen -S ${self.name} -X eval "stuff \\"\\003\\"; sleep 0.1; stuff \\"ps\\n\\""'
|
||||
// osal.execute_silent(cmd_check)!
|
||||
|
||||
// // Check if the process is running in the screen session
|
||||
// cmd_ps := 'screen -S ${self.name} -X hardcopy -h /tmp/screen_output; cat /tmp/screen_output | grep "${self.name}"'
|
||||
// ps_response := osal.execute_silent(cmd_ps)!
|
||||
|
||||
// return parse_screen_process_status(ps_response)
|
||||
}
|
||||
|
||||
// Function to parse screen process status output
|
||||
fn parse_screen_process_status(output string) ScreenStatus {
|
||||
lines := output.split_into_lines()
|
||||
for line in lines {
|
||||
if line.contains('SCREEN') || line.contains('PID') {
|
||||
return .active
|
||||
}
|
||||
}
|
||||
return .inactive
|
||||
}
|
||||
|
||||
fn (mut self Screen) kill_() ! {
|
||||
// console.print_debug('kill screen: ${self}')
|
||||
if self.pid == 0 || self.pid < 5 {
|
||||
return error("pid was <5 for ${self}, can't kill")
|
||||
}
|
||||
osal.process_kill_recursive(pid: self.pid)!
|
||||
res := os.execute('export TERM=xterm-color && screen -X -S ${self.name} kill > /dev/null 2>&1')
|
||||
if res.exit_code > 1 {
|
||||
return error('could not kill a screen.\n${res.output}')
|
||||
}
|
||||
time.sleep(100 * time.millisecond) // 0.1 sec wait
|
||||
os.execute('screen -wipe > /dev/null 2>&1')
|
||||
// self.scan()!
|
||||
}
|
||||
|
||||
// fn (mut self Screen) scan() ! {
|
||||
// mut f:=self.factory or {panic("bug, no factory attached to screen.")}
|
||||
// f.scan(false)!
|
||||
// }
|
||||
|
||||
pub fn (mut self Screen) attach() ! {
|
||||
cmd := 'screen -r ${self.pid}.${self.name}'
|
||||
osal.execute_interactive(cmd)!
|
||||
}
|
||||
|
||||
pub fn (mut self Screen) cmd_send(cmd string) ! {
|
||||
mut cmd2 := "screen -S ${self.name} -p 0 -X stuff \"${cmd} \n\" "
|
||||
if osal.is_osx() {
|
||||
cmd2 = "screen -S ${self.name} -p 0 -X stuff \"${cmd}\"\$'\n' "
|
||||
}
|
||||
res := os.execute(cmd2)
|
||||
if res.exit_code > 1 {
|
||||
return error('could not send screen command.\n${cmd2}\n${res.output}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut self Screen) str() string {
|
||||
green := console.color_fg(.green)
|
||||
yellow := console.color_fg(.yellow)
|
||||
reset := console.reset
|
||||
return ' - screen:${green}${self.name:-20}${reset} pid:${yellow}${self.pid:-10}${reset} state:${green}${self.state}${reset}'
|
||||
}
|
||||
|
||||
fn (mut self Screen) start_() ! {
|
||||
if self.pid != 0 {
|
||||
return
|
||||
}
|
||||
if self.name.len == 0 {
|
||||
return error('screen name needs to exist.')
|
||||
}
|
||||
if self.cmd == '' {
|
||||
self.cmd = '/bin/bash'
|
||||
}
|
||||
cmd := 'export TERM=xterm-color && screen -dmS ${self.name} ${self.cmd}'
|
||||
// console.print_debug(" startcmd:'${cmd}'")
|
||||
res := os.execute(cmd)
|
||||
// console.print_debug(res)
|
||||
if res.exit_code > 1 {
|
||||
return error('could not find screen or other error, make sure screen is installed.\n${res.output}')
|
||||
}
|
||||
}
|
||||
17
lib/osal/screen/screen_test.v
Normal file
17
lib/osal/screen/screen_test.v
Normal file
@@ -0,0 +1,17 @@
|
||||
module screen
|
||||
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.osal
|
||||
import os
|
||||
import time
|
||||
|
||||
pub fn testsuite_begin() ! {
|
||||
mut screen_factory := new(reset: true)!
|
||||
}
|
||||
|
||||
pub fn test_screen_status() ! {
|
||||
mut screen_factory := new()!
|
||||
mut screen := screen_factory.add(name: 'testservice', cmd: 'redis-server')!
|
||||
status := screen.status()!
|
||||
// assert status == .active
|
||||
}
|
||||
8
lib/osal/sleep.v
Normal file
8
lib/osal/sleep.v
Normal file
@@ -0,0 +1,8 @@
|
||||
module osal
|
||||
|
||||
import time
|
||||
|
||||
// sleep in seconds
|
||||
pub fn sleep(duration int) {
|
||||
time.sleep(time.second * duration)
|
||||
}
|
||||
86
lib/osal/ssh.v
Normal file
86
lib/osal/ssh.v
Normal file
@@ -0,0 +1,86 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@[params]
|
||||
pub struct SSHConfig {
|
||||
pub:
|
||||
directory string = os.join_path(os.home_dir(), '.ssh')
|
||||
}
|
||||
|
||||
// Returns a specific SSH key with the given name from the default SSH directory (~/.ssh)
|
||||
pub fn get_ssh_key(key_name string, config SSHConfig) ?SSHKey {
|
||||
mut ssh_dir := pathlib.get_dir(path: config.directory) or { return none }
|
||||
|
||||
list := ssh_dir.list(files_only: true) or { return none }
|
||||
for file in list.paths {
|
||||
if file.name() == key_name {
|
||||
return SSHKey{
|
||||
name: file.name()
|
||||
directory: ssh_dir.path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return none
|
||||
}
|
||||
|
||||
// Lists SSH keys in the default SSH directory (~/.ssh) and returns an array of SSHKey structs
|
||||
fn list_ssh_keys(config SSHConfig) ![]SSHKey {
|
||||
mut ssh_dir := pathlib.get_dir(path: config.directory) or {
|
||||
return error('Error getting ssh directory: ${err}')
|
||||
}
|
||||
|
||||
mut keys := []SSHKey{}
|
||||
list := ssh_dir.list(files_only: true) or {
|
||||
return error('Failed to list files in SSH directory')
|
||||
}
|
||||
|
||||
for file in list.paths {
|
||||
if file.extension() == 'pub' || file.name().starts_with('id_') {
|
||||
keys << SSHKey{
|
||||
name: file.name()
|
||||
directory: ssh_dir.path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
// Creates a new SSH key pair to the specified directory
|
||||
pub fn new_ssh_key(key_name string, config SSHConfig) !SSHKey {
|
||||
ssh_dir := pathlib.get_dir(
|
||||
path: config.directory
|
||||
create: true
|
||||
) or { return error('Error getting SSH directory: ${err}') }
|
||||
|
||||
// Paths for the private and public keys
|
||||
priv_key_path := os.join_path(ssh_dir.path, key_name)
|
||||
pub_key_path := '${priv_key_path}.pub'
|
||||
|
||||
// Check if the key already exists
|
||||
if os.exists(priv_key_path) || os.exists(pub_key_path) {
|
||||
return error("Key pair already exists with the name '${key_name}'")
|
||||
}
|
||||
|
||||
panic('implement shhkeygen logic')
|
||||
// Generate a random private key (for demonstration purposes)
|
||||
// Replace this with actual key generation logic (e.g., calling `ssh-keygen` or similar)
|
||||
// private_key_content := '-----BEGIN PRIVATE KEY-----\n${rand.string(64)}\n-----END PRIVATE KEY-----'
|
||||
// public_key_content := 'ssh-rsa ${rand.string(64)} user@host'
|
||||
|
||||
// Save the keys to their respective files
|
||||
// os.write_file(priv_key_path, private_key_content) or {
|
||||
// return error("Failed to write private key: ${err}")
|
||||
// }
|
||||
// os.write_file(pub_key_path, public_key_content) or {
|
||||
// return error("Failed to write public key: ${err}")
|
||||
// }
|
||||
|
||||
return SSHKey{
|
||||
name: key_name
|
||||
directory: ssh_dir.path
|
||||
}
|
||||
}
|
||||
41
lib/osal/ssh_key.v
Normal file
41
lib/osal/ssh_key.v
Normal file
@@ -0,0 +1,41 @@
|
||||
module osal
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@[noinit]
|
||||
pub struct SSHKey {
|
||||
pub:
|
||||
name string
|
||||
directory string
|
||||
}
|
||||
|
||||
// returns the public ssh key's path of the keypair
|
||||
pub fn (key SSHKey) public_key_path() !pathlib.Path {
|
||||
path_str := os.join_path(key.directory, '${key.name}.pub')
|
||||
return pathlib.get_file(path: path_str) or {
|
||||
return error('Failed to get public key path: ${err}')
|
||||
}
|
||||
}
|
||||
|
||||
// returns the private ssh key's path of the keypair
|
||||
pub fn (key SSHKey) private_key_path() !pathlib.Path {
|
||||
path_str := os.join_path(key.directory, '${key.name}')
|
||||
return pathlib.get_file(path: path_str) or {
|
||||
return error('Failed to get public key path: ${err}')
|
||||
}
|
||||
}
|
||||
|
||||
// returns the public ssh key of the keypair
|
||||
pub fn (key SSHKey) public_key() !string {
|
||||
mut path := key.public_key_path()!
|
||||
content := path.read()!
|
||||
return content
|
||||
}
|
||||
|
||||
// returns the private ssh key of the keypair
|
||||
pub fn (key SSHKey) private_key() !string {
|
||||
mut path := key.private_key_path()!
|
||||
content := path.read()!
|
||||
return content
|
||||
}
|
||||
32
lib/osal/sshagent/factory.v
Normal file
32
lib/osal/sshagent/factory.v
Normal file
@@ -0,0 +1,32 @@
|
||||
module sshagent
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
@[params]
|
||||
pub struct SSHAgentNewArgs {
|
||||
pub mut:
|
||||
homepath string
|
||||
}
|
||||
|
||||
pub fn new(args_ SSHAgentNewArgs) !SSHAgent {
|
||||
mut args := args_
|
||||
if args.homepath.len == 0 {
|
||||
args.homepath = '${os.home_dir()}/.ssh'
|
||||
}
|
||||
|
||||
mut agent := SSHAgent{
|
||||
homepath: pathlib.get_dir(path: args.homepath, create: true)!
|
||||
}
|
||||
res := os.execute('ssh-add -l')
|
||||
if res.exit_code == 0 {
|
||||
agent.active = true
|
||||
}
|
||||
agent.init()! // loads the keys known on fs and in ssh-agent
|
||||
return agent
|
||||
}
|
||||
|
||||
pub fn loaded() bool {
|
||||
mut agent := new() or { panic(err) }
|
||||
return agent.active
|
||||
}
|
||||
55
lib/osal/sshagent/get.v
Normal file
55
lib/osal/sshagent/get.v
Normal file
@@ -0,0 +1,55 @@
|
||||
module sshagent
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
@[params]
|
||||
pub struct KeyGetArgs {
|
||||
pub mut:
|
||||
pubkey string
|
||||
// privkey string
|
||||
// privkey_path string
|
||||
name string
|
||||
}
|
||||
|
||||
pub fn (mut agent SSHAgent) get(args_ KeyGetArgs) ?SSHKey {
|
||||
mut args := args_
|
||||
args.pubkey = args.pubkey.trim_space()
|
||||
args.name = texttools.name_fix(args.name)
|
||||
for mut key in agent.keys {
|
||||
mut found := false
|
||||
if args.name.len > 0 && key.name == args.name {
|
||||
found = true
|
||||
}
|
||||
if args.pubkey.len > 0 && key.pubkey == args.pubkey {
|
||||
found = true
|
||||
}
|
||||
if found {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return none
|
||||
}
|
||||
|
||||
fn (mut agent SSHAgent) pop(pubkey_ string) {
|
||||
mut x := 0
|
||||
mut result := 9999
|
||||
for key in agent.keys {
|
||||
if key.pubkey == pubkey_ {
|
||||
result = x
|
||||
break
|
||||
}
|
||||
x += 1
|
||||
}
|
||||
if result != 9999 {
|
||||
if agent.keys.len > result {
|
||||
agent.keys.delete(x)
|
||||
} else {
|
||||
panic('bug')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut agent SSHAgent) exists(args KeyGetArgs) bool {
|
||||
agent.get(args) or { return false }
|
||||
return true
|
||||
}
|
||||
128
lib/osal/sshagent/interactive.v
Normal file
128
lib/osal/sshagent/interactive.v
Normal file
@@ -0,0 +1,128 @@
|
||||
module sshagent
|
||||
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// will see if there is one ssh key in sshagent
|
||||
// or if not, if there is 1 ssh key in ${agent.homepath.path}/ if yes will load
|
||||
// if we were able to define the key to use, it will be returned here
|
||||
// will return the key which will be used
|
||||
// pub fn load_interactive() ! {
|
||||
// mut pubkeys := pubkeys_get()
|
||||
// mut c := console.UIConsole{}
|
||||
// pubkeys.map(listsplit)
|
||||
// if pubkeys.len == 1 {
|
||||
// c.ask_yesno(
|
||||
// description: 'We found sshkey ${pubkeys[0]} in sshagent, want to use this one?'
|
||||
// )!
|
||||
// {
|
||||
// key_load(pubkeys[0])!
|
||||
// return pubkeys[0]
|
||||
// }
|
||||
// }
|
||||
// if pubkeys.len > 1 {
|
||||
// if c.ask_yesno(
|
||||
// description: 'We found more than 1 sshkey in sshagent, want to use one of those!'
|
||||
// )!
|
||||
// {
|
||||
// // keytouse := console.ask_dropdown(
|
||||
// // items: pubkeys
|
||||
// // description: 'Please choose the ssh key you want to use'
|
||||
// // )
|
||||
// // key_load(keytouse)!
|
||||
// // return keytouse
|
||||
// }
|
||||
// }
|
||||
|
||||
// // now means nothing in ssh-agent, lets see if we find 1 key in .ssh directory
|
||||
// mut sshdirpath := pathlib.get_dir(path: '${os.home_dir()}/.ssh', create: true)!
|
||||
|
||||
// mut pubkeys := []string{}
|
||||
// pl := sshdirpath.list(recursive: false)!
|
||||
// for p in pl.paths {
|
||||
// if p.path.ends_with('.pub') {
|
||||
// pubkeys << p.path.replace('.pub', '')
|
||||
// }
|
||||
// }
|
||||
// // console.print_debug(keypaths)
|
||||
|
||||
// if pubkeys.len == 1 {
|
||||
// if c.ask_yesno(
|
||||
// description: 'We found sshkey ${pubkeys[0]} in ${agent.homepath.path} dir, want to use this one?'
|
||||
// )!
|
||||
// {
|
||||
// key_load(pubkeys[0])!
|
||||
// return pubkeys[0]
|
||||
// }
|
||||
// }
|
||||
// if pubkeys.len > 1 {
|
||||
// if c.ask_yesno(
|
||||
// description: 'We found more than 1 sshkey in ${agent.homepath.path} dir, want to use one of those?'
|
||||
// )!
|
||||
// {
|
||||
// // keytouse := console.ask_dropdown(
|
||||
// // items: pubkeys
|
||||
// // description: 'Please choose the ssh key you want to use'
|
||||
// // )
|
||||
// // key_load(keytouse)!
|
||||
// // return keytouse
|
||||
// }
|
||||
// }
|
||||
|
||||
// will see if there is one ssh key in sshagent
|
||||
// or if not, if there is 1 ssh key in ${agent.homepath.path}/ if yes will return
|
||||
// if we were able to define the key to use, it will be returned here
|
||||
// pub fn pubkey_guess() !string {
|
||||
// pubkeys := pubkeys_get()
|
||||
// if pubkeys.len == 1 {
|
||||
// return pubkeys[0]
|
||||
// }
|
||||
// if pubkeys.len > 1 {
|
||||
// return error('There is more than 1 ssh-key loaded in ssh-agent, cannot identify which one to use.')
|
||||
// }
|
||||
// // now means nothing in ssh-agent, lets see if we find 1 key in .ssh directory
|
||||
// mut sshdirpath := pathlib.get_dir(path: '${os.home_dir()}/.ssh', create: true)!
|
||||
|
||||
// // todo: use ourregex field to nly list .pub files
|
||||
// mut fl := sshdirpath.list()!
|
||||
// mut sshfiles := fl.paths
|
||||
// mut keypaths := sshfiles.filter(it.path.ends_with('.pub'))
|
||||
// // console.print_debug(keypaths)
|
||||
|
||||
// if keypaths.len == 1 {
|
||||
// keycontent := keypaths[0].read()!
|
||||
// privkeypath := keypaths[0].path.replace('.pub', '')
|
||||
// key_load(privkeypath)!
|
||||
// return keycontent
|
||||
// }
|
||||
// if keypaths.len > 1 {
|
||||
// return error('There is more than 1 ssh-key in your ${agent.homepath.path} dir, could not automatically load.')
|
||||
// }
|
||||
// return error('Could not find sshkey in your ssh-agent as well as in your ${agent.homepath.path} dir, please generate an ssh-key')
|
||||
// }
|
||||
|
||||
// if c.ask_yesno(description: 'Would you like to generate a new key?') {
|
||||
// // name := console.ask_question(question: 'name', minlen: 3)
|
||||
// // passphrase := console.ask_question(question: 'passphrase', minlen: 5)
|
||||
|
||||
// // keytouse := key_generate(name, passphrase)!
|
||||
|
||||
// // if console.ask_yesno(description:"Please acknowledge you will remember your passphrase for ever (-: ?"){
|
||||
// // key_load(keytouse)?
|
||||
// // return keytouse
|
||||
// // }else{
|
||||
// // return error("Cannot continue, did not find sshkey to use")
|
||||
// // }
|
||||
// // key_load_with_passphrase(keytouse, passphrase)!
|
||||
// }!
|
||||
// return error('Cannot continue, did not find sshkey to use')
|
||||
|
||||
// // url_github_add := "https://library.threefold.me/info/publishtools/#/sshkey_github"
|
||||
|
||||
// // osal.execute_interactive("open $url_github_add")?
|
||||
|
||||
// // if console.ask_yesno(description:"Did you manage to add the github key to this repo ?"){
|
||||
// // console.print_debug(" - CONGRATS: your sshkey is now loaded.")
|
||||
// // }
|
||||
|
||||
// // return keytouse
|
||||
// }
|
||||
44
lib/osal/sshagent/readme.md
Normal file
44
lib/osal/sshagent/readme.md
Normal file
@@ -0,0 +1,44 @@
|
||||
## ssh agent
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.osal.sshagent
|
||||
|
||||
mut agent := sshagent.new()!
|
||||
|
||||
privkey:='
|
||||
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
|
||||
QyNTUxOQAAACDXf9Z/2AH8/8a1ppagCplQdhWyQ8wZAieUw3nNcxsDiQAAAIhb3ybRW98m
|
||||
0QAAAAtzc2gtZWQyNTUxOQAAACDXf9Z/2AH8/8a1ppagCplQdhWyQ8wZAieUw3nNcxsDiQ
|
||||
AAAEC+fcDBPqdJHlJOQJ2zXhU2FztKAIl3TmWkaGCPnyts49d/1n/YAfz/xrWmlqAKmVB2
|
||||
FbJDzBkCJ5TDec1zGwOJAAAABWJvb2tz
|
||||
-----END OPENSSH PRIVATE KEY-----
|
||||
'
|
||||
|
||||
//make sure the name chose is same as original name of the key
|
||||
mut sshkey:=agent.add("mykey:,privkey)!
|
||||
|
||||
|
||||
sshkey.forget()!
|
||||
|
||||
```
|
||||
|
||||
### hero
|
||||
|
||||
there is also a hero command
|
||||
|
||||
```js
|
||||
//will add the key and load (at this stage no support for passphrases)
|
||||
!!sshagent.key_add name:'myname'
|
||||
privkey:'
|
||||
-----BEGIN OPENSSH PRIVATE KEY-----
|
||||
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
|
||||
QyNTUxOQAAACDXf9Z/2AH8/8a1ppagCplQdhWyQ8wZAieUw3nNcxsDiQAAAIhb3ybRW98m
|
||||
0QAAAAtzc2gtZWQysdsdsddsdsdsdsdsdsd8/8a1ppagCplQdhWyQ8wZAieUw3nNcxsDiQ
|
||||
AAAEC+fcDBPqdJHlJOQJ2zXhU2FztKAIl3TmWkaGCPnyts49d/1n/YAfz/xrWmlqAKmVB2
|
||||
FbJDzBkCJ5TDec1zGwOJAAAABWJvb2tz
|
||||
-----END OPENSSH PRIVATE KEY-----
|
||||
'
|
||||
|
||||
```
|
||||
|
||||
186
lib/osal/sshagent/sshagent.v
Normal file
186
lib/osal/sshagent/sshagent.v
Normal file
@@ -0,0 +1,186 @@
|
||||
module sshagent
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
// import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct SSHAgent {
|
||||
pub mut:
|
||||
keys []SSHKey
|
||||
active bool
|
||||
homepath pathlib.Path
|
||||
}
|
||||
|
||||
// get all keys from sshagent and from the local .ssh dir
|
||||
pub fn (mut agent SSHAgent) init() ! {
|
||||
// first get keys out of ssh-add
|
||||
agent.keys = []SSHKey{}
|
||||
res := os.execute('ssh-add -L')
|
||||
if res.exit_code == 0 {
|
||||
for line in res.output.split('\n') {
|
||||
if line.trim(' ') == '' {
|
||||
continue
|
||||
}
|
||||
if line.contains(' ') {
|
||||
splitted := line.split(' ')
|
||||
if splitted.len < 2 {
|
||||
panic('bug')
|
||||
}
|
||||
pubkey := splitted[1]
|
||||
mut sshkey := SSHKey{
|
||||
pubkey: pubkey
|
||||
agent: &agent
|
||||
loaded: true
|
||||
}
|
||||
if splitted[0].contains('ed25519') {
|
||||
sshkey.cat = .ed25519
|
||||
if splitted.len > 2 {
|
||||
sshkey.email = splitted[2] or { panic('bug') }
|
||||
}
|
||||
} else if splitted[0].contains('rsa') {
|
||||
sshkey.cat = .rsa
|
||||
} else {
|
||||
panic('bug: implement other cat for ssh-key.\n${line}')
|
||||
}
|
||||
|
||||
if !(agent.exists(pubkey: pubkey)) {
|
||||
// $if debug{console.print_debug("- add from agent: ${sshkey}")}
|
||||
agent.keys << sshkey
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now get them from the filesystem
|
||||
mut fl := agent.homepath.list()!
|
||||
mut sshfiles := fl.paths.clone()
|
||||
mut pubkeypaths := sshfiles.filter(it.path.ends_with('.pub'))
|
||||
for mut pkp in pubkeypaths {
|
||||
mut c := pkp.read()!
|
||||
c = c.replace(' ', ' ').replace(' ', ' ') // deal with double spaces, or tripple (need to do this 2x
|
||||
splitted := c.trim_space().split(' ')
|
||||
if splitted.len < 2 {
|
||||
panic('bug')
|
||||
}
|
||||
mut name := pkp.name()
|
||||
name = name[0..(name.len - 4)]
|
||||
pubkey2 := splitted[1]
|
||||
// the pop makes sure the key is removed from keys in agent, this means we can add later
|
||||
mut sshkey2 := agent.get(pubkey: pubkey2) or {
|
||||
SSHKey{
|
||||
name: name
|
||||
pubkey: pubkey2
|
||||
agent: &agent
|
||||
}
|
||||
}
|
||||
agent.pop(sshkey2.pubkey)
|
||||
sshkey2.name = name
|
||||
if splitted[0].contains('ed25519') {
|
||||
sshkey2.cat = .ed25519
|
||||
} else if splitted[0].contains('rsa') {
|
||||
sshkey2.cat = .rsa
|
||||
} else {
|
||||
panic('bug: implement other cat for ssh-key')
|
||||
}
|
||||
if splitted.len > 2 {
|
||||
sshkey2.email = splitted[2]
|
||||
}
|
||||
// $if debug{console.print_debug("- add from fs: ${sshkey2}")}
|
||||
agent.keys << sshkey2
|
||||
}
|
||||
}
|
||||
|
||||
// returns path to sshkey
|
||||
pub fn (mut agent SSHAgent) generate(name string, passphrase string) !SSHKey {
|
||||
dest := '${agent.homepath.path}/${name}'
|
||||
if os.exists(dest) {
|
||||
os.rm(dest)!
|
||||
}
|
||||
cmd := 'ssh-keygen -t ed25519 -f ${dest} -P ${passphrase} -q'
|
||||
// console.print_debug(cmd)
|
||||
rc := os.execute(cmd)
|
||||
if !(rc.exit_code == 0) {
|
||||
return error('Could not generated sshkey,\n${rc}')
|
||||
}
|
||||
agent.init()!
|
||||
return agent.get(name: name) or { panic(err) }
|
||||
}
|
||||
|
||||
// unload all ssh keys
|
||||
pub fn (mut agent SSHAgent) reset() ! {
|
||||
if true {
|
||||
panic('reset_ssh')
|
||||
}
|
||||
res := os.execute('ssh-add -D')
|
||||
if res.exit_code > 0 {
|
||||
return error('cannot reset sshkeys.')
|
||||
}
|
||||
agent.init()! // should now be empty for loaded keys
|
||||
}
|
||||
|
||||
// load the key, they key is content (private key) .
|
||||
// a name is required
|
||||
pub fn (mut agent SSHAgent) add(name string, privkey_ string) !SSHKey {
|
||||
mut privkey := privkey_
|
||||
path := '${agent.homepath.path}/${name}'
|
||||
if os.exists(path) {
|
||||
os.rm(path)!
|
||||
}
|
||||
if os.exists('${path}.pub') {
|
||||
os.rm('${path}.pub')!
|
||||
}
|
||||
if !privkey.ends_with('\n') {
|
||||
privkey += '\n'
|
||||
}
|
||||
os.write_file(path, privkey)!
|
||||
os.chmod(path, 0o600)!
|
||||
res4 := os.execute('ssh-keygen -y -f ${path} > ${path}.pub')
|
||||
if res4.exit_code > 0 {
|
||||
return error('cannot generate pubkey ${path}.\n${res4.output}')
|
||||
}
|
||||
return agent.load(path)!
|
||||
}
|
||||
|
||||
// load key starting from path to private key
|
||||
pub fn (mut agent SSHAgent) load(keypath string) !SSHKey {
|
||||
if !os.exists(keypath) {
|
||||
return error('cannot find sshkey: ${keypath}')
|
||||
}
|
||||
if keypath.ends_with('.pub') {
|
||||
return error('can only load private keys')
|
||||
}
|
||||
name := keypath.split('/').last()
|
||||
os.chmod(keypath, 0o600)!
|
||||
res := os.execute('ssh-add ${keypath}')
|
||||
if res.exit_code > 0 {
|
||||
return error('cannot add ssh-key with path ${keypath}.\n${res.output}')
|
||||
}
|
||||
agent.init()!
|
||||
return agent.get(name: name) or {
|
||||
panic("can't find sshkey with name:'${name}' from agent.\n${err}")
|
||||
}
|
||||
}
|
||||
|
||||
// forget the specified key
|
||||
pub fn (mut agent SSHAgent) forget(name string) ! {
|
||||
if true {
|
||||
panic('reset_ssh')
|
||||
}
|
||||
mut key := agent.get(name: name) or { return }
|
||||
agent.pop(key.pubkey)
|
||||
key.forget()!
|
||||
}
|
||||
|
||||
pub fn (mut agent SSHAgent) str() string {
|
||||
mut out := []string{}
|
||||
out << '\n## SSHAGENT:\n'
|
||||
for mut key in agent.keys {
|
||||
out << key.str()
|
||||
}
|
||||
return out.join_lines() + '\n'
|
||||
}
|
||||
|
||||
pub fn (mut agent SSHAgent) keys_loaded() ![]SSHKey {
|
||||
return agent.keys.filter(it.loaded)
|
||||
}
|
||||
88
lib/osal/sshagent/sshkey.v
Normal file
88
lib/osal/sshagent/sshkey.v
Normal file
@@ -0,0 +1,88 @@
|
||||
module sshagent
|
||||
|
||||
import os
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct SSHKey {
|
||||
pub mut:
|
||||
name string
|
||||
pubkey string
|
||||
loaded bool
|
||||
email string
|
||||
agent &SSHAgent @[skip; str: skip]
|
||||
cat SSHKeyCat
|
||||
}
|
||||
|
||||
pub enum SSHKeyCat {
|
||||
ed25519
|
||||
rsa
|
||||
}
|
||||
|
||||
pub fn (mut key SSHKey) keypath() !pathlib.Path {
|
||||
if key.name.len == 0 {
|
||||
return error('cannot have key name empty to get path.')
|
||||
}
|
||||
return key.agent.homepath.file_get_new('${key.name}')!
|
||||
}
|
||||
|
||||
pub fn (mut key SSHKey) keypath_pub() !pathlib.Path {
|
||||
if key.name.len == 0 {
|
||||
return error('cannot have key name empty to get path.')
|
||||
}
|
||||
mut p := key.agent.homepath.file_get_new('${key.name}.pub')!
|
||||
if !(os.exists('${key.agent.homepath.path}/${key.name}.pub')) {
|
||||
p.write(key.pubkey)!
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
pub fn (mut key SSHKey) keypub() !string {
|
||||
mut p := key.keypath_pub()!
|
||||
return p.read()!
|
||||
}
|
||||
|
||||
// load the key, they key is content, other keys will be unloaded
|
||||
pub fn (mut key SSHKey) forget() ! {
|
||||
if key.loaded == false {
|
||||
return
|
||||
}
|
||||
mut keypath := key.keypath_pub() or {
|
||||
return error('keypath not set or known on sshkey: ${key}')
|
||||
}
|
||||
if !os.exists(keypath.path) {
|
||||
return error('cannot find sshkey: ${keypath}')
|
||||
}
|
||||
res := os.execute('ssh-add -d ${keypath.path}')
|
||||
if res.exit_code > 0 {
|
||||
return error('cannot forget ssh-key with path ${keypath.path}')
|
||||
}
|
||||
key.agent.init()!
|
||||
}
|
||||
|
||||
pub fn (mut key SSHKey) str() string {
|
||||
patho := key.keypath_pub() or { pathlib.Path{} }
|
||||
mut l := ' '
|
||||
if key.loaded {
|
||||
l = 'L'
|
||||
}
|
||||
return '${key.name:-15} : ${l} : ${key.cat:-8} : ${key.email:-25} : ${patho.path}'
|
||||
}
|
||||
|
||||
pub fn (mut key SSHKey) load() ! {
|
||||
$if debug {
|
||||
console.print_debug(" - sshkey load: '${key}'")
|
||||
}
|
||||
if key.name.len == 0 {
|
||||
return error('can only load keys which are on filesystem and as such have a name.')
|
||||
}
|
||||
patho := key.keypath() or {
|
||||
return error('cannot load because privkey not on fs.\n${err}\n${key}')
|
||||
}
|
||||
res := os.execute('ssh-add ${patho.path}')
|
||||
if res.exit_code > 0 {
|
||||
return error('cannot add ssh-key with path ${patho.path}.\n${res}')
|
||||
}
|
||||
key.agent.init()!
|
||||
}
|
||||
12
lib/osal/sshagent/tools.v
Normal file
12
lib/osal/sshagent/tools.v
Normal file
@@ -0,0 +1,12 @@
|
||||
module sshagent
|
||||
|
||||
// fn listsplit(key string) string {
|
||||
// if key.trim(' ') == '' {
|
||||
// return ''
|
||||
// }
|
||||
// if key.contains(' ') {
|
||||
// splitted := key.split(' ')
|
||||
// return splitted[splitted.len].replace('.pub', '')
|
||||
// }
|
||||
// return key
|
||||
// }
|
||||
15
lib/osal/systemd/journalctl.v
Normal file
15
lib/osal/systemd/journalctl.v
Normal file
@@ -0,0 +1,15 @@
|
||||
module systemd
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
pub struct JournalArgs {
|
||||
pub:
|
||||
service string // name of service for which logs will be retrieved
|
||||
limit int = 100 // number of last log lines to be shown
|
||||
}
|
||||
|
||||
pub fn journalctl(args JournalArgs) !string {
|
||||
cmd := 'journalctl --no-pager -n ${args.limit} -u ${name_fix(args.service)}'
|
||||
response := osal.execute_silent(cmd) or { return err }
|
||||
return response
|
||||
}
|
||||
7
lib/osal/systemd/readme.md
Normal file
7
lib/osal/systemd/readme.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# a sal to work with systemd
|
||||
|
||||
|
||||
> only basics implemented as we need for our installers
|
||||
|
||||
example see crystallib/examples/...
|
||||
|
||||
184
lib/osal/systemd/systemd.v
Normal file
184
lib/osal/systemd/systemd.v
Normal file
@@ -0,0 +1,184 @@
|
||||
module systemd
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
__global (
|
||||
systemd_global []&Systemd
|
||||
)
|
||||
|
||||
@[heap]
|
||||
pub struct Systemd {
|
||||
pub mut:
|
||||
processes []&SystemdProcess
|
||||
path pathlib.Path
|
||||
path_cmd pathlib.Path
|
||||
status SystemdFactoryStatus
|
||||
}
|
||||
|
||||
pub enum SystemdFactoryStatus {
|
||||
init
|
||||
ok
|
||||
error
|
||||
}
|
||||
|
||||
pub fn new() !&Systemd {
|
||||
if systemd_global.len > 0 {
|
||||
return systemd_global[0]
|
||||
}
|
||||
mut systemd := Systemd{
|
||||
path: pathlib.get_dir(path: '/etc/systemd/system', create: false)!
|
||||
path_cmd: pathlib.get_dir(path: '/etc/systemd_cmds', create: true)!
|
||||
}
|
||||
systemd.load()!
|
||||
systemd_global << &systemd
|
||||
return systemd_global[0]
|
||||
}
|
||||
|
||||
// check if systemd is on system, returns True if yes
|
||||
pub fn check() !bool {
|
||||
if !osal.cmd_exists('systemctl') {
|
||||
return false
|
||||
}
|
||||
|
||||
return osal.execute_ok('systemctl status --no-pager')
|
||||
}
|
||||
|
||||
fn (mut systemd Systemd) load() ! {
|
||||
if systemd.status == .ok {
|
||||
return
|
||||
}
|
||||
console.print_header('Systemd load')
|
||||
osal.execute_silent('systemctl daemon-reload')!
|
||||
systemd.processes = []&SystemdProcess{}
|
||||
for item in process_list()! {
|
||||
mut sdprocess := SystemdProcess{
|
||||
description: item.description
|
||||
systemd: &systemd
|
||||
unit: item.unit
|
||||
info: item
|
||||
}
|
||||
systemd.setinternal(mut sdprocess)
|
||||
}
|
||||
|
||||
systemd.status = .ok
|
||||
}
|
||||
|
||||
pub fn (mut systemd Systemd) reload() ! {
|
||||
systemd.status = .init
|
||||
systemd.load()!
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct SystemdProcessNewArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
cmd string @[required]
|
||||
description string
|
||||
env map[string]string
|
||||
start bool = true
|
||||
restart bool = true
|
||||
}
|
||||
|
||||
//```
|
||||
// name string @[required]
|
||||
// cmd string @[required]
|
||||
// description string @[required]
|
||||
//```
|
||||
pub fn (mut systemd Systemd) new(args_ SystemdProcessNewArgs) !SystemdProcess {
|
||||
mut args := args_
|
||||
args.name = name_fix(args.name)
|
||||
|
||||
if args.cmd == '' {
|
||||
return error('cmd needs to be filled in in:\n${args}')
|
||||
}
|
||||
|
||||
mut sdprocess := SystemdProcess{
|
||||
name: args.name
|
||||
description: args.description
|
||||
cmd: args.cmd
|
||||
restart: true
|
||||
systemd: &systemd
|
||||
info: SystemdProcessInfo{
|
||||
unit: args.name
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: maybe systemd can start multiline scripts?
|
||||
if args.cmd.contains('\n') {
|
||||
// means we can load the special cmd
|
||||
mut pathcmd := systemd.path_cmd.file_get_new('${args.name}_cmd')!
|
||||
pathcmd.write(sdprocess.cmd)!
|
||||
pathcmd.chmod(0o750)!
|
||||
sdprocess.cmd = '/bin/bash -c ${pathcmd.path}'
|
||||
}
|
||||
sdprocess.env = args.env.move()
|
||||
|
||||
sdprocess.write()!
|
||||
systemd.setinternal(mut sdprocess)
|
||||
|
||||
if args.start || args.restart {
|
||||
sdprocess.stop()!
|
||||
}
|
||||
|
||||
if args.start {
|
||||
sdprocess.start()!
|
||||
}
|
||||
|
||||
return sdprocess
|
||||
}
|
||||
|
||||
pub fn (mut systemd Systemd) names() []string {
|
||||
r := systemd.processes.map(it.name)
|
||||
return r
|
||||
}
|
||||
|
||||
fn (mut systemd Systemd) setinternal(mut sdprocess SystemdProcess) {
|
||||
sdprocess.name = name_fix(sdprocess.info.unit)
|
||||
systemd.processes = systemd.processes.filter(it.name != sdprocess.name)
|
||||
systemd.processes << &sdprocess
|
||||
}
|
||||
|
||||
pub fn (mut systemd Systemd) get(name_ string) !&SystemdProcess {
|
||||
name := name_fix(name_)
|
||||
if systemd.processes.len == 0 {
|
||||
systemd.load()!
|
||||
}
|
||||
for item in systemd.processes {
|
||||
if name_fix(item.name) == name {
|
||||
return item
|
||||
}
|
||||
}
|
||||
return error("Can't find systemd process with name ${name}, maybe reload the state with systemd.load()")
|
||||
}
|
||||
|
||||
pub fn (mut systemd Systemd) exists(name_ string) bool {
|
||||
name := name_fix(name_)
|
||||
for item in systemd.processes {
|
||||
if name_fix(item.name) == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn (mut systemd Systemd) destroy(name_ string) ! {
|
||||
for i, mut pr in systemd.processes {
|
||||
if name_fix(pr.name) == name_fix(name_) {
|
||||
pr.delete()!
|
||||
systemd.processes[i] = systemd.processes[systemd.processes.len - 1]
|
||||
systemd.processes.delete_last()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn name_fix(name_ string) string {
|
||||
mut name := texttools.name_fix(name_)
|
||||
if name.contains('.service') {
|
||||
name = name.all_before_last('.')
|
||||
}
|
||||
return name
|
||||
}
|
||||
94
lib/osal/systemd/systemd_list.v
Normal file
94
lib/osal/systemd/systemd_list.v
Normal file
@@ -0,0 +1,94 @@
|
||||
module systemd
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
struct SystemdProcessInfoRaw {
|
||||
unit string
|
||||
load string
|
||||
active string
|
||||
sub string
|
||||
description string
|
||||
}
|
||||
|
||||
pub struct SystemdProcessInfo {
|
||||
pub mut:
|
||||
unit string
|
||||
load_state LoadState
|
||||
active_state ActiveState
|
||||
sub_state SubState
|
||||
description string
|
||||
}
|
||||
|
||||
pub enum LoadState {
|
||||
loaded // The unit's configuration file has been successfully loaded into memory.
|
||||
not_found // The unit's configuration file could not be found.
|
||||
error // There was an error loading the unit's configuration file.
|
||||
masked // The unit has been masked, which means it has been explicitly disabled and cannot be started.
|
||||
}
|
||||
|
||||
pub enum ActiveState {
|
||||
active // The unit has been started successfully and is running as expected.
|
||||
inactive // The unit is not running.
|
||||
activating // The unit is in the process of being started.
|
||||
deactivating // The unit is in the process of being stopped.
|
||||
failed // The unit tried to start but failed.
|
||||
}
|
||||
|
||||
// This provides more detailed information about the unit's state, often referred to as the "sub-state". This can vary significantly between different types of units (services, sockets, timers, etc.)
|
||||
pub enum SubState {
|
||||
unknown
|
||||
start
|
||||
running // The service is currently running.
|
||||
exited // The service has completed its process and exited. For services that do something at startup and then exit (oneshot services), this is a normal state.
|
||||
failed // The service has failed after starting.
|
||||
waiting // The service is waiting for some condition to be met.
|
||||
autorestart
|
||||
dead
|
||||
}
|
||||
|
||||
pub fn process_list() ![]SystemdProcessInfo {
|
||||
cmd := 'systemctl list-units --type=service --no-pager --all -o json-pretty '
|
||||
res_ := os.execute(cmd)
|
||||
if res_.exit_code > 0 {
|
||||
return error('could not execute: ${cmd}')
|
||||
}
|
||||
items := json.decode([]SystemdProcessInfoRaw, res_.output) or {
|
||||
panic('Failed to decode Systemd Process Info')
|
||||
}
|
||||
mut res := []SystemdProcessInfo{}
|
||||
for item in items {
|
||||
mut unit := SystemdProcessInfo{
|
||||
unit: item.unit
|
||||
description: item.description
|
||||
}
|
||||
match item.load {
|
||||
'loaded' { unit.load_state = .loaded }
|
||||
'not-found' { unit.load_state = .not_found }
|
||||
'error' { unit.load_state = .error }
|
||||
'bad-setting' { unit.load_state = .error }
|
||||
'masked' { unit.load_state = .masked }
|
||||
else { return error('could not find right load state for systemd ${unit.load_state}') }
|
||||
}
|
||||
match item.active {
|
||||
'active' { unit.active_state = .active }
|
||||
'inactive' { unit.active_state = .inactive }
|
||||
'activating' { unit.active_state = .activating }
|
||||
'deactivating' { unit.active_state = .deactivating }
|
||||
'failed' { unit.active_state = .failed }
|
||||
else { return error('could not find right active state for systemd ${unit.load_state}') }
|
||||
}
|
||||
match item.sub {
|
||||
'start' { unit.sub_state = .start }
|
||||
'running' { unit.sub_state = .running }
|
||||
'exited' { unit.sub_state = .exited }
|
||||
'failed' { unit.sub_state = .failed }
|
||||
'waiting' { unit.sub_state = .waiting }
|
||||
'dead' { unit.sub_state = .dead }
|
||||
'auto-restart' { unit.sub_state = .autorestart }
|
||||
else { unit.sub_state = .unknown }
|
||||
}
|
||||
res << unit
|
||||
}
|
||||
return res
|
||||
}
|
||||
143
lib/osal/systemd/systemd_process.v
Normal file
143
lib/osal/systemd/systemd_process.v
Normal file
@@ -0,0 +1,143 @@
|
||||
module systemd
|
||||
|
||||
// import os
|
||||
import maps
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import os
|
||||
|
||||
@[heap]
|
||||
pub struct SystemdProcess {
|
||||
pub mut:
|
||||
name string
|
||||
unit string // as generated or used by systemd
|
||||
cmd string
|
||||
pid int
|
||||
env map[string]string
|
||||
systemd &Systemd @[skip; str: skip]
|
||||
description string
|
||||
info SystemdProcessInfo
|
||||
restart bool = true // whether process will be restarted upon failure
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) servicefile_path() string {
|
||||
return '${self.systemd.path.path}/${self.name}.service'
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) write() ! {
|
||||
mut p := pathlib.get_file(path: self.servicefile_path(), create: true)!
|
||||
console.print_header(' systemd write service: ${p.path}')
|
||||
|
||||
envs_lst := maps.to_array[string, string, string](self.env, fn (k string, v string) string {
|
||||
return 'Environment=${k}=${v}'
|
||||
})
|
||||
|
||||
envs := envs_lst.join('\n')
|
||||
|
||||
servicecontent := $tmpl('templates/service.yaml')
|
||||
|
||||
println(self)
|
||||
println(servicecontent)
|
||||
|
||||
p.write(servicecontent)!
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) start() ! {
|
||||
console.print_header('starting systemd process: ${self.name}')
|
||||
// self.write()!
|
||||
cmd := '
|
||||
systemctl daemon-reload
|
||||
systemctl enable ${self.name}
|
||||
systemctl start ${self.name}
|
||||
' // console.print_debug(cmd)
|
||||
|
||||
_ = osal.execute_silent(cmd)!
|
||||
self.refresh()!
|
||||
}
|
||||
|
||||
// get status from system
|
||||
pub fn (mut self SystemdProcess) refresh() ! {
|
||||
self.systemd.load()!
|
||||
systemdobj2 := self.systemd.get(self.name)!
|
||||
self.info = systemdobj2.info
|
||||
self.description = systemdobj2.description
|
||||
self.name = systemdobj2.name
|
||||
self.unit = systemdobj2.unit
|
||||
self.cmd = systemdobj2.cmd
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) delete() ! {
|
||||
console.print_header('Process systemd: ${self.name} delete.')
|
||||
self.stop()!
|
||||
if os.exists(self.servicefile_path()) {
|
||||
os.rm(self.servicefile_path())!
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) stop() ! {
|
||||
cmd := '
|
||||
set +ex
|
||||
systemctl daemon-reload
|
||||
systemctl disable ${self.name}
|
||||
systemctl stop ${self.name}
|
||||
'
|
||||
_ = osal.exec(cmd: cmd, stdout: false, debug: false, ignore_error: false)!
|
||||
self.systemd.load()!
|
||||
}
|
||||
|
||||
pub fn (mut self SystemdProcess) restart() ! {
|
||||
cmd := '
|
||||
systemctl daemon-reload
|
||||
systemctl restart ${self.name}
|
||||
'
|
||||
_ = osal.execute_silent(cmd)!
|
||||
self.systemd.load()!
|
||||
}
|
||||
|
||||
enum SystemdStatus {
|
||||
unknown
|
||||
active
|
||||
inactive
|
||||
failed
|
||||
activating
|
||||
deactivating
|
||||
}
|
||||
|
||||
pub fn (self SystemdProcess) status() !SystemdStatus {
|
||||
// exit with 3 is converted to exit with 0
|
||||
cmd := '
|
||||
systemctl daemon-reload
|
||||
systemctl status --no-pager --lines=0 ${name_fix(self.name)}
|
||||
'
|
||||
job := osal.exec(cmd: cmd, stdout: false) or {
|
||||
if err.code() == 3 {
|
||||
if err is osal.JobError {
|
||||
return parse_systemd_process_status(err.job.output)
|
||||
}
|
||||
}
|
||||
return error('Failed to run command to get status ${err}')
|
||||
}
|
||||
|
||||
return parse_systemd_process_status(job.output)
|
||||
}
|
||||
|
||||
fn parse_systemd_process_status(output string) SystemdStatus {
|
||||
lines := output.split_into_lines()
|
||||
for line in lines {
|
||||
if line.contains('Active: ') {
|
||||
if line.contains('active (running)') {
|
||||
return .active
|
||||
} else if line.contains('inactive (dead)') {
|
||||
return .inactive
|
||||
} else if line.contains('failed') {
|
||||
return .failed
|
||||
} else if line.contains('activating') {
|
||||
return .activating
|
||||
} else if line.contains('deactivating') {
|
||||
return .deactivating
|
||||
}
|
||||
}
|
||||
}
|
||||
return .unknown
|
||||
}
|
||||
61
lib/osal/systemd/systemd_process_test.v
Normal file
61
lib/osal/systemd/systemd_process_test.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module systemd
|
||||
|
||||
// import os
|
||||
import maps
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import os
|
||||
|
||||
pub fn testsuite_begin() ! {
|
||||
mut systemdfactory := new()!
|
||||
mut process := systemdfactory.new(
|
||||
cmd: 'redis-server'
|
||||
name: 'testservice'
|
||||
start: false
|
||||
)!
|
||||
|
||||
process.delete()!
|
||||
}
|
||||
|
||||
pub fn testsuite_end() ! {
|
||||
mut systemdfactory := new()!
|
||||
mut process := systemdfactory.new(
|
||||
cmd: 'redis-server'
|
||||
name: 'testservice'
|
||||
start: false
|
||||
)!
|
||||
|
||||
process.delete()!
|
||||
}
|
||||
|
||||
pub fn test_systemd_process_status() ! {
|
||||
mut systemdfactory := new()!
|
||||
mut process := systemdfactory.new(
|
||||
cmd: 'redis-server'
|
||||
name: 'testservice'
|
||||
start: false
|
||||
)!
|
||||
|
||||
process.start()!
|
||||
status := process.status()!
|
||||
assert status == .active
|
||||
}
|
||||
|
||||
pub fn test_parse_systemd_process_status() ! {
|
||||
output := 'testservice.service - testservice
|
||||
Loaded: loaded (/etc/systemd/system/testservice.service; enabled; preset: disabled)
|
||||
Active: active (running) since Mon 2024-06-10 12:51:24 CEST; 2ms ago
|
||||
Main PID: 202537 (redis-server)
|
||||
Tasks: 1 (limit: 154455)
|
||||
Memory: 584.0K (peak: 584.0K)
|
||||
CPU: 0
|
||||
CGroup: /system.slice/testservice.service
|
||||
└─202537 redis-server
|
||||
|
||||
Jun 10 12:51:24 myhost1 systemd[1]: testservice.service: Scheduled restart job, restart counter is at 1.
|
||||
Jun 10 12:51:24 myhost1 systemd[1]: Started testservice.'
|
||||
|
||||
status := parse_systemd_process_status(output)
|
||||
assert status == .active
|
||||
}
|
||||
17
lib/osal/systemd/templates/service.yaml
Normal file
17
lib/osal/systemd/templates/service.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
[Unit]
|
||||
Description=${self.name}
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=${self.cmd}
|
||||
WorkingDirectory=/tmp
|
||||
@if self.restart
|
||||
Restart=always
|
||||
@else
|
||||
Restart=no
|
||||
@end
|
||||
@{envs}
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
24
lib/osal/tmux/readme.md
Normal file
24
lib/osal/tmux/readme.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# TMUX
|
||||
|
||||
|
||||
TMUX is a very capable process manager.
|
||||
|
||||
### Concepts
|
||||
|
||||
- tmux = is the factory, it represents the tmux process manager, linked to a node
|
||||
- session = is a set of windows, it has a name and groups windows
|
||||
- window = is typically one process running (you can have panes but in our implementation we skip this)
|
||||
|
||||
|
||||
## structure
|
||||
|
||||
tmux library provides functions for managing tmux sessions
|
||||
|
||||
- session is the top one
|
||||
- then windows (is where you see the app running)
|
||||
- then panes in windows (we don't support yet)
|
||||
|
||||
|
||||
## to attach to a tmux session
|
||||
|
||||
> TODO:
|
||||
86
lib/osal/tmux/testdata/tmux_session_test.v
vendored
Normal file
86
lib/osal/tmux/testdata/tmux_session_test.v
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
module tmux
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.installers.tmux
|
||||
|
||||
// fn testsuite_end() {
|
||||
|
||||
//
|
||||
// }
|
||||
|
||||
fn testsuite_begin() {
|
||||
mut tmux := Tmux{}
|
||||
|
||||
if tmux.is_running()! {
|
||||
tmux.stop()!
|
||||
}
|
||||
}
|
||||
|
||||
fn test_session_create() {
|
||||
// installer := tmux.get_install(
|
||||
// panic('could not install tmux: ${err}')
|
||||
// }
|
||||
|
||||
mut tmux := Tmux{}
|
||||
tmux.start() or { panic('cannot start tmux: ${err}') }
|
||||
|
||||
mut s := Session{
|
||||
tmux: &tmux
|
||||
windows: map[string]&Window{}
|
||||
name: 'testsession'
|
||||
}
|
||||
|
||||
mut s2 := Session{
|
||||
tmux: &tmux
|
||||
windows: map[string]&Window{}
|
||||
name: 'testsession2'
|
||||
}
|
||||
|
||||
// test testsession exists after session_create
|
||||
mut tmux_ls := osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
|
||||
assert !tmux_ls.contains('testsession: 1 windows')
|
||||
s.create() or { panic('Cannot create session: ${err}') }
|
||||
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
|
||||
assert tmux_ls.contains('testsession: 1 windows')
|
||||
|
||||
// test multiple session_create for same tmux
|
||||
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
|
||||
assert !tmux_ls.contains('testsession2: 1 windows')
|
||||
s2.create() or { panic('Cannot create session: ${err}') }
|
||||
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
|
||||
assert tmux_ls.contains('testsession2: 1 windows')
|
||||
|
||||
// test session_create with duplicate session
|
||||
mut create_err := ''
|
||||
s2.create() or { create_err = err.msg() }
|
||||
assert create_err != ''
|
||||
assert create_err.contains('duplicate session: testsession2')
|
||||
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
|
||||
assert tmux_ls.contains('testsession2: 1 windows')
|
||||
|
||||
s.stop() or { panic('Cannot stop session: ${err}') }
|
||||
s2.stop() or { panic('Cannot stop session: ${err}') }
|
||||
}
|
||||
|
||||
// fn test_session_stop() {
|
||||
|
||||
//
|
||||
// installer := tmux.get_install(
|
||||
|
||||
// mut tmux := Tmux {
|
||||
// node: node_ssh
|
||||
// }
|
||||
|
||||
// mut s := Session{
|
||||
// tmux: &tmux // reference back
|
||||
// windows: map[string]&Window{}
|
||||
// name: 'testsession3'
|
||||
// }
|
||||
|
||||
// s.create() or { panic("Cannot create session: $err") }
|
||||
// mut tmux_ls := osal.execute_silent('tmux ls') or { panic("can't exec: $err") }
|
||||
// assert tmux_ls.contains("testsession3: 1 windows")
|
||||
// s.stop() or { panic("Cannot stop session: $err")}
|
||||
// tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: $err") }
|
||||
// assert !tmux_ls.contains("testsession3: 1 windows")
|
||||
// }
|
||||
67
lib/osal/tmux/testdata/tmux_window_test.v
vendored
Normal file
67
lib/osal/tmux/testdata/tmux_window_test.v
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
module tmux
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.installers.tmux
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
// uses single tmux instance for all tests
|
||||
__global (
|
||||
tmux Tmux
|
||||
)
|
||||
|
||||
fn init() {
|
||||
tmux = get_remote('185.69.166.152')!
|
||||
|
||||
// reset tmux for tests
|
||||
if tmux.is_running() {
|
||||
tmux.stop() or { panic('Cannot stop tmux') }
|
||||
}
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
if tmux.is_running() {
|
||||
tmux.stop()!
|
||||
}
|
||||
}
|
||||
|
||||
fn test_window_new() {
|
||||
tmux.start() or { panic("can't start tmux: ${err}") }
|
||||
|
||||
// test window new with only name arg
|
||||
window_args := WindowArgs{
|
||||
name: 'TestWindow'
|
||||
}
|
||||
|
||||
assert !tmux.sessions.keys().contains('main')
|
||||
|
||||
mut window := tmux.window_new(window_args) or { panic("Can't create new window: ${err}") }
|
||||
assert tmux.sessions.keys().contains('main')
|
||||
window.delete() or { panic('Cant delete window') }
|
||||
}
|
||||
|
||||
// // tests creating duplicate windows
|
||||
// fn test_window_new0() {
|
||||
|
||||
//
|
||||
// installer := tmux.get_install(
|
||||
|
||||
// mut tmux := Tmux {
|
||||
// node: node_ssh
|
||||
// }
|
||||
|
||||
// window_args := WindowArgs {
|
||||
// name: 'TestWindow0'
|
||||
// }
|
||||
|
||||
// // console.print_debug(tmux)
|
||||
// mut window := tmux.window_new(window_args) or {
|
||||
// panic("Can't create new window: $err")
|
||||
// }
|
||||
// assert tmux.sessions.keys().contains('main')
|
||||
// mut window_dup := tmux.window_new(window_args) or {
|
||||
// panic("Can't create new window: $err")
|
||||
// }
|
||||
// console.print_debug(node_ssh.exec('tmux ls') or { panic("fail:$err")})
|
||||
// window.delete() or { panic("Cant delete window") }
|
||||
// // console.print_debug(tmux)
|
||||
// }
|
||||
116
lib/osal/tmux/tmux.v
Normal file
116
lib/osal/tmux/tmux.v
Normal file
@@ -0,0 +1,116 @@
|
||||
module tmux
|
||||
|
||||
import freeflowuniverse.herolib.osal
|
||||
// import freeflowuniverse.herolib.session
|
||||
import os
|
||||
import time
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
@[heap]
|
||||
pub struct Tmux {
|
||||
pub mut:
|
||||
sessions []&Session
|
||||
sessionid string // unique link to job
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct TmuxNewArgs {
|
||||
sessionid string
|
||||
}
|
||||
|
||||
// return tmux instance
|
||||
pub fn new(args TmuxNewArgs) !Tmux {
|
||||
mut t := Tmux{
|
||||
sessionid: args.sessionid
|
||||
}
|
||||
t.load()!
|
||||
t.scan()!
|
||||
return t
|
||||
}
|
||||
|
||||
// loads tmux session, populate the object
|
||||
pub fn (mut tmux Tmux) load() ! {
|
||||
isrunning := tmux.is_running()!
|
||||
if !isrunning {
|
||||
tmux.start()!
|
||||
}
|
||||
// console.print_debug("SCAN")
|
||||
tmux.scan()!
|
||||
}
|
||||
|
||||
pub fn (mut t Tmux) stop() ! {
|
||||
$if debug {
|
||||
console.print_debug('Stopping tmux...')
|
||||
}
|
||||
|
||||
t.sessions = []&Session{}
|
||||
t.scan()!
|
||||
|
||||
for _, mut session in t.sessions {
|
||||
session.stop()!
|
||||
}
|
||||
|
||||
cmd := 'tmux kill-server'
|
||||
_ := osal.exec(cmd: cmd, stdout: false, name: 'tmux_kill_server', ignore_error: true) or {
|
||||
panic('bug')
|
||||
}
|
||||
os.log('TMUX - All sessions stopped .')
|
||||
}
|
||||
|
||||
pub fn (mut t Tmux) start() ! {
|
||||
cmd := 'tmux new-sess -d -s main'
|
||||
_ := osal.exec(cmd: cmd, stdout: false, name: 'tmux_start') or {
|
||||
return error("Can't execute ${cmd} \n${err}")
|
||||
}
|
||||
// scan and add default bash window created with session init
|
||||
time.sleep(time.Duration(100 * time.millisecond))
|
||||
t.scan()!
|
||||
}
|
||||
|
||||
// print list of tmux sessions
|
||||
pub fn (mut t Tmux) list_print() {
|
||||
// os.log('TMUX - Start listing ....')
|
||||
for _, session in t.sessions {
|
||||
for _, window in session.windows {
|
||||
console.print_debug(window)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// get all windows as found in all sessions
|
||||
pub fn (mut t Tmux) windows_get() []&Window {
|
||||
mut res := []&Window{}
|
||||
// os.log('TMUX - Start listing ....')
|
||||
for _, session in t.sessions {
|
||||
for _, window in session.windows {
|
||||
res << window
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// checks whether tmux server is running
|
||||
pub fn (mut t Tmux) is_running() !bool {
|
||||
res := osal.exec(cmd: 'tmux info', stdout: false, name: 'tmux_info', raise_error: false) or {
|
||||
panic('bug')
|
||||
}
|
||||
if res.error.contains('no server running') {
|
||||
// console.print_debug(" TMUX NOT RUNNING")
|
||||
return false
|
||||
}
|
||||
if res.error.contains('no current client') {
|
||||
return true
|
||||
}
|
||||
if res.exit_code > 0 {
|
||||
return error('could not execute tmux info.\n${res}')
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
pub fn (mut t Tmux) str() string {
|
||||
mut out := '# Tmux\n\n'
|
||||
for s in t.sessions {
|
||||
out += '${*s}\n'
|
||||
}
|
||||
return out
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user