This commit is contained in:
2025-08-28 16:02:28 +02:00
parent d52aa5dbd4
commit fb87adf87d
44 changed files with 480 additions and 507 deletions

View File

@@ -134,7 +134,7 @@ Returns the current username.
## 2. Network Utilities ## 2. Network Utilities
### `osal.ping(args: PingArgs) !PingResult` ### `osal.ping(args: PingArgs) ! bool`
Checks host reachability. Checks host reachability.
* **Parameters**: * **Parameters**:
### `osal.ipaddr_pub_get_check() !string` ### `osal.ipaddr_pub_get_check() !string`

View File

@@ -23,12 +23,12 @@ This document describes the core functionalities of the Operating System Abstrac
## 2. Network Utilities ## 2. Network Utilities
* **`osal.ping(args: PingArgs) !PingResult`**: Check host reachability. * **`osal.ping(args: PingArgs) !bool`**: Check host reachability.
* **Key Parameters**: `address` (string). - address string = "8.8.8.8"
* **Returns**: `PingResult` (`.ok`, `.timeout`, `.unknownhost`). - nr_ping u16 = 3 // amount of ping requests we will do
* **`osal.tcp_port_test(args: TcpPortTestArgs) bool`**: Test if a TCP port is open. - nr_ok u16 = 3 //how many of them need to be ok
* **Key Parameters**: `address` (string), `port` (int). - retry u8 //how many times fo we retry above sequence, basically we ping ourselves with -c 1
* **`osal.ipaddr_pub_get() !string`**: Get public IP address. **`osal.ipaddr_pub_get() !string`**: Get public IP address.
## 3. File System Operations ## 3. File System Operations

View File

@@ -761,9 +761,7 @@ this document has info about the most core functions, more detailed info can be
### 2. Network Utilities ### 2. Network Utilities
* **`osal.ping(args: PingArgs) !PingResult`**: Check host reachability. * **`osal.ping(args: PingArgs) !bool`**: Check host reachability.
* **Key Parameters**: `address` (string).
* **Returns**: `PingResult` (`.ok`, `.timeout`, `.unknownhost`).
* **`osal.tcp_port_test(args: TcpPortTestArgs) bool`**: Test if a TCP port is open. * **`osal.tcp_port_test(args: TcpPortTestArgs) bool`**: Test if a TCP port is open.
* **Key Parameters**: `address` (string), `port` (int). * **Key Parameters**: `address` (string), `port` (int).
* **`osal.ipaddr_pub_get() !string`**: Get public IP address. * **`osal.ipaddr_pub_get() !string`**: Get public IP address.

View File

@@ -9,18 +9,18 @@ import freeflowuniverse.herolib.ui.console
fn main() { fn main() {
console.print_header('🔑 Hero SSH Agent Test Suite') console.print_header('🔑 Hero SSH Agent Test Suite')
os.execute('${os.dir(os.dir(@FILE))}/cli/compile.vsh') os.execute('${os.dir(os.dir(@FILE))}/cli/compile.vsh')
hero_bin := '${os.home_dir()}/hero/bin/hero' hero_bin := '${os.home_dir()}/hero/bin/hero'
// Check if hero binary exists // Check if hero binary exists
if !os.exists(hero_bin) { if !os.exists(hero_bin) {
console.print_stderr('Hero binary not found at ${hero_bin}') console.print_stderr('Hero binary not found at ${hero_bin}')
console.print_stderr('Please compile hero first with: ./cli/compile.vsh') console.print_stderr('Please compile hero first with: ./cli/compile.vsh')
exit(1) exit(1)
} }
console.print_green(' Hero binary found at ${hero_bin}') console.print_green(' Hero binary found at ${hero_bin}')
// Test 1: Profile initialization // Test 1: Profile initialization
console.print_header('Test 1: Profile Initialization') console.print_header('Test 1: Profile Initialization')
result1 := os.execute('${hero_bin} sshagent profile') result1 := os.execute('${hero_bin} sshagent profile')
@@ -29,17 +29,17 @@ fn main() {
} else { } else {
console.print_stderr(' Profile initialization failed: ${result1.output}') console.print_stderr(' Profile initialization failed: ${result1.output}')
} }
// Test 2: Status check // Test 2: Status check
console.print_header('Test 2: Status Check') console.print_header('Test 2: Status Check')
result2 := os.execute('${hero_bin} sshagent status') result2 := os.execute('${hero_bin} sshagent status')
if result2.exit_code == 0 && result2.output.contains("- SSH Agent Status") { if result2.exit_code == 0 && result2.output.contains('- SSH Agent Status') {
console.print_green(' Status check successful') console.print_green(' Status check successful')
println(result2.output) println(result2.output)
} else { } else {
console.print_stderr(' Status check failed: ${result2.output}') console.print_stderr(' Status check failed: ${result2.output}')
} }
// Test 3: List keys // Test 3: List keys
console.print_header('Test 3: List SSH Keys') console.print_header('Test 3: List SSH Keys')
result3 := os.execute('${hero_bin} sshagent list') result3 := os.execute('${hero_bin} sshagent list')
@@ -49,7 +49,7 @@ fn main() {
} else { } else {
console.print_stderr(' List keys failed: ${result3.output}') console.print_stderr(' List keys failed: ${result3.output}')
} }
// Test 4: Generate test key // Test 4: Generate test key
console.print_header('Test 4: Generate Test Key') console.print_header('Test 4: Generate Test Key')
test_key_name := 'hero_test_${os.getpid()}' test_key_name := 'hero_test_${os.getpid()}'
@@ -57,11 +57,11 @@ fn main() {
if result4.exit_code == 0 && result4.output.contains('Generating SSH key') { if result4.exit_code == 0 && result4.output.contains('Generating SSH key') {
console.print_green(' Key generation successful') console.print_green(' Key generation successful')
println(result4.output) println(result4.output)
// Cleanup: remove test key files // Cleanup: remove test key files
test_key_path := '${os.home_dir()}/.ssh/${test_key_name}' test_key_path := '${os.home_dir()}/.ssh/${test_key_name}'
test_pub_path := '${test_key_path}.pub' test_pub_path := '${test_key_path}.pub'
if os.exists(test_key_path) { if os.exists(test_key_path) {
os.rm(test_key_path) or {} os.rm(test_key_path) or {}
console.print_debug('Cleaned up test private key') console.print_debug('Cleaned up test private key')
@@ -73,7 +73,7 @@ fn main() {
} else { } else {
console.print_stderr(' Key generation failed: ${result4.output}') console.print_stderr(' Key generation failed: ${result4.output}')
} }
// Test 5: Help output // Test 5: Help output
console.print_header('Test 5: Help Output') console.print_header('Test 5: Help Output')
result5 := os.execute('${hero_bin} sshagent') result5 := os.execute('${hero_bin} sshagent')
@@ -82,10 +82,10 @@ fn main() {
} else { } else {
console.print_stderr(' Help output unexpected') console.print_stderr(' Help output unexpected')
} }
console.print_header('🎉 Test Suite Complete') console.print_header('🎉 Test Suite Complete')
console.print_green('Hero SSH Agent is ready for use!') console.print_green('Hero SSH Agent is ready for use!')
// Show usage examples // Show usage examples
console.print_header('Usage Examples:') console.print_header('Usage Examples:')
println('') println('')

View File

@@ -11,5 +11,4 @@ playcmds.run(
heroscript_path: heroscript_path heroscript_path: heroscript_path
)! )!
println('Simulation complete!') println('Simulation complete!')

2
examples/tmux/tmux_setup.heroscript Normal file → Executable file
View File

@@ -1,3 +1,5 @@
#!/usr/bin/env hero
// Create development session // Create development session
!!tmux.session_create !!tmux.session_create
name:'dev' name:'dev'

View File

@@ -6,16 +6,14 @@ import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.builder import freeflowuniverse.herolib.builder
import time import time
import os import os
import freeflowuniverse.herolib.core.playcmds import freeflowuniverse.herolib.core.playcmds
user := os.environ()['HETZNER_USER'] or {
user:=os.environ()['HETZNER_USER'] or { println('HETZNER_USER not set')
println('HETZNER_USER not set')
exit(1) exit(1)
} }
passwd:=os.environ()['HETZNER_PASSWORD'] or { passwd := os.environ()['HETZNER_PASSWORD'] or {
println('HETZNER_PASSWORD not set') println('HETZNER_PASSWORD not set')
exit(1) exit(1)
} }
@@ -31,7 +29,7 @@ playcmds.run(
console.print_header('Hetzner Test.') console.print_header('Hetzner Test.')
mut cl := hetznermanager.get(name:'main')! mut cl := hetznermanager.get(name: 'main')!
// for i in 0 .. 5 { // for i in 0 .. 5 {
// println('test cache, first time slow then fast') // println('test cache, first time slow then fast')
@@ -45,7 +43,7 @@ mut cl := hetznermanager.get(name:'main')!
// cl.server_reset(name:"kristof2",wait:true)! // cl.server_reset(name:"kristof2",wait:true)!
//don't forget to specify the keyname needed // don't forget to specify the keyname needed
// cl.server_rescue(name:"kristof2",wait:true, hero_install:true,sshkey_name:"kristof")! // cl.server_rescue(name:"kristof2",wait:true, hero_install:true,sshkey_name:"kristof")!
// mut ks:=cl.keys_get()! // mut ks:=cl.keys_get()!
@@ -55,11 +53,10 @@ mut cl := hetznermanager.get(name:'main')!
// mut b := builder.new()! // mut b := builder.new()!
// mut n := b.node_new(ipaddr: serverinfo.server_ip)! // mut n := b.node_new(ipaddr: serverinfo.server_ip)!
//this will put hero in debug mode on the system // this will put hero in debug mode on the system
// n.hero_install(compile:true)! // n.hero_install(compile:true)!
// n.shell("")! // n.shell("")!
cl.ubuntu_install(name:"kristof2",wait:true, hero_install:true,sshkey_name:"kristof")! cl.ubuntu_install(name: 'kristof2', wait: true, hero_install: true, sshkey_name: 'kristof')!
cl.ubuntu_install(name:"kristof20",wait:true, hero_install:true,sshkey_name:"kristof")! cl.ubuntu_install(name: 'kristof20', wait: true, hero_install: true, sshkey_name: 'kristof')!

View File

@@ -6,8 +6,8 @@ import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.installers.virt.lima as limainstaller import freeflowuniverse.herolib.installers.virt.lima as limainstaller
import os import os
mut i:=limainstaller.get(create:true)! mut i := limainstaller.get(create: true)!
i.install(reset:true)! i.install(reset: true)!
// mut virtmanager := lima.new()! // mut virtmanager := lima.new()!

View File

@@ -44,10 +44,10 @@ pub fn (mut bs BootStrapper) run(args_ BootstrapperArgs) ! {
@[params] @[params]
pub struct HeroInstallArgs { pub struct HeroInstallArgs {
pub mut: pub mut:
reset bool reset bool
compile bool compile bool
v_analyzer bool v_analyzer bool
debug bool //will go in shell debug bool // will go in shell
} }
pub fn (mut node Node) hero_install(args HeroInstallArgs) ! { pub fn (mut node Node) hero_install(args HeroInstallArgs) ! {
@@ -58,15 +58,15 @@ pub fn (mut node Node) hero_install(args HeroInstallArgs) ! {
homedir := myenv['HOME'] or { return error("can't find HOME in env") } homedir := myenv['HOME'] or { return error("can't find HOME in env") }
mut todo := []string{} mut todo := []string{}
if ! args.compile { if !args.compile {
todo << "curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install.sh" todo << 'curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install.sh'
todo << "bash /tmp/install.sh" todo << 'bash /tmp/install.sh'
}else{ } else {
todo << "curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh" todo << "curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh"
if args.v_analyzer { if args.v_analyzer {
todo << "bash /tmp/install_v.sh --analyzer --herolib " todo << 'bash /tmp/install_v.sh --analyzer --herolib '
}else{ } else {
todo << "bash /tmp/install_v.sh --herolib " todo << 'bash /tmp/install_v.sh --herolib '
} }
} }
node.exec_interactive(todo.join('\n'))! node.exec_interactive(todo.join('\n'))!

View File

@@ -54,13 +54,13 @@ pub fn (mut executor ExecutorSSH) exec(args_ ExecArgs) !string {
port = '-p ${executor.ipaddr.port}' port = '-p ${executor.ipaddr.port}'
} }
if args.cmd.contains("\n"){ if args.cmd.contains('\n') {
//need to upload the file first // need to upload the file first
args.cmd = texttools.dedent(args.cmd) args.cmd = texttools.dedent(args.cmd)
executor.file_write('/tmp/toexec.sh', args.cmd)! executor.file_write('/tmp/toexec.sh', args.cmd)!
args.cmd = "bash /tmp/toexec.sh" args.cmd = 'bash /tmp/toexec.sh'
} }
args.cmd = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${executor.user}@${executor.ipaddr.addr} ${port} "${args.cmd}"' args.cmd = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${executor.user}@${executor.ipaddr.addr} ${port} "${args.cmd}"'
res := osal.exec(cmd: args.cmd, stdout: args.stdout, debug: executor.debug)! res := osal.exec(cmd: args.cmd, stdout: args.stdout, debug: executor.debug)!
@@ -74,11 +74,11 @@ pub fn (mut executor ExecutorSSH) exec_interactive(args_ ExecArgs) ! {
port = '-p ${executor.ipaddr.port}' port = '-p ${executor.ipaddr.port}'
} }
if args.cmd.contains("\n"){ if args.cmd.contains('\n') {
args.cmd = texttools.dedent(args.cmd) args.cmd = texttools.dedent(args.cmd)
//need to upload the file first // need to upload the file first
executor.file_write('/tmp/toexec.sh', args.cmd)! executor.file_write('/tmp/toexec.sh', args.cmd)!
args.cmd = "bash /tmp/toexec.sh" args.cmd = 'bash /tmp/toexec.sh'
} }
args.cmd = 'ssh -tt -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${executor.user}@${executor.ipaddr.addr} ${port} "${args.cmd}"' args.cmd = 'ssh -tt -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${executor.user}@${executor.ipaddr.addr} ${port} "${args.cmd}"'

View File

@@ -24,10 +24,8 @@ pub fn check() bool {
// } // }
// TODO: might be dangerous if that one goes out // TODO: might be dangerous if that one goes out
ping_result := osal.ping(address: '40a:152c:b85b:9646:5b71:d03a:eb27:2462', retry: 2) or { ping_result := osal.ping(address: '40a:152c:b85b:9646:5b71:d03a:eb27:2462') or { panic(err) }
return false if ping_result {
}
if ping_result == .ok {
console.print_debug('could reach 40a:152c:b85b:9646:5b71:d03a:eb27:2462') console.print_debug('could reach 40a:152c:b85b:9646:5b71:d03a:eb27:2462')
return true return true
} }

View File

@@ -55,7 +55,7 @@ pub fn (mut h HTTPConnection) send(req_ Request) !Result {
mut is_cacheable := h.is_cacheable(req) mut is_cacheable := h.is_cacheable(req)
if req.debug { if req.debug {
//in debug mode should not cache // in debug mode should not cache
is_cacheable = false is_cacheable = false
} }
@@ -95,14 +95,14 @@ pub fn (mut h HTTPConnection) send(req_ Request) !Result {
new_req.header.set(http.CommonHeader.content_type, 'multipart/form-data') new_req.header.set(http.CommonHeader.content_type, 'multipart/form-data')
} }
} }
if req.debug { if req.debug {
console.print_debug('http request:\n${new_req.str()}') console.print_debug('http request:\n${new_req.str()}')
} }
for counter in 0 .. h.retry { for counter in 0 .. h.retry {
if req.debug { if req.debug {
console.print_debug("request attempt:${counter}") console.print_debug('request attempt:${counter}')
} }
response = new_req.do() or { response = new_req.do() or {
err_message = 'Cannot send request:${req}\nerror:${err}' err_message = 'Cannot send request:${req}\nerror:${err}'
// console.print_debug(err_message) // console.print_debug(err_message)
@@ -111,9 +111,9 @@ pub fn (mut h HTTPConnection) send(req_ Request) !Result {
break break
} }
if req.debug { if req.debug {
console.print_debug("request done") console.print_debug('request done')
console.print_debug(response.str()) console.print_debug(response.str())
} }
if response.status_code == 0 { if response.status_code == 0 {
return error(err_message) return error(err_message)
} }
@@ -192,8 +192,8 @@ pub fn (mut h HTTPConnection) get(req_ Request) !string {
mut req := req_ mut req := req_
req.method = .get req.method = .get
result := h.send(req)! result := h.send(req)!
if !result.is_ok() { if !result.is_ok() {
return error('Could not get ${req}\result:\n${result}') return error('Could not get ${req}\result:\n${result}')
} }
return result.data return result.data
} }
@@ -204,8 +204,8 @@ pub fn (mut h HTTPConnection) delete(req_ Request) !string {
req.method = .delete req.method = .delete
result := h.send(req)! result := h.send(req)!
if !result.is_ok() { if !result.is_ok() {
return error('Could not delete ${req}\result:\n${result}') return error('Could not delete ${req}\result:\n${result}')
} }
return result.data return result.data
} }
@@ -216,6 +216,6 @@ pub fn (mut h HTTPConnection) post_multi_part(req Request, form http.PostMultipa
header.set(http.CommonHeader.content_type, 'multipart/form-data') header.set(http.CommonHeader.content_type, 'multipart/form-data')
req_form.header = header req_form.header = header
url := h.url(req) url := h.url(req)
//TODO: should that not be on line with above? seems to be other codepath. // TODO: should that not be on line with above? seems to be other codepath.
return http.post_multipart_form(url, req_form)! return http.post_multipart_form(url, req_form)!
} }

View File

@@ -11,13 +11,13 @@ pub fn play_osal_core(mut plbook PlayBook) ! {
// Process done actions // Process done actions
play_done(mut plbook)! play_done(mut plbook)!
// Process environment actions // Process environment actions
play_env(mut plbook)! play_env(mut plbook)!
// Process execution actions // Process execution actions
play_exec(mut plbook)! play_exec(mut plbook)!
// Process package actions // Process package actions
play_package(mut plbook)! play_package(mut plbook)!
} }
@@ -29,7 +29,7 @@ fn play_done(mut plbook PlayBook) ! {
mut p := action.params mut p := action.params
key := p.get('key')! key := p.get('key')!
val := p.get('val')! val := p.get('val')!
console.print_header('Setting done flag: ${key} = ${val}') console.print_header('Setting done flag: ${key} = ${val}')
osal.done_set(key, val)! osal.done_set(key, val)!
action.done = true action.done = true
@@ -40,7 +40,7 @@ fn play_done(mut plbook PlayBook) ! {
for mut action in done_delete_actions { for mut action in done_delete_actions {
mut p := action.params mut p := action.params
key := p.get('key')! key := p.get('key')!
console.print_header('Deleting done flag: ${key}') console.print_header('Deleting done flag: ${key}')
osal.done_delete(key)! osal.done_delete(key)!
action.done = true action.done = true
@@ -71,11 +71,11 @@ fn play_env(mut plbook PlayBook) ! {
key := p.get('key')! key := p.get('key')!
value := p.get('value')! value := p.get('value')!
overwrite := p.get_default_true('overwrite') overwrite := p.get_default_true('overwrite')
console.print_header('Setting environment variable: ${key}') console.print_header('Setting environment variable: ${key}')
osal.env_set( osal.env_set(
key: key key: key
value: value value: value
overwrite: overwrite overwrite: overwrite
) )
action.done = true action.done = true
@@ -86,7 +86,7 @@ fn play_env(mut plbook PlayBook) ! {
for mut action in env_unset_actions { for mut action in env_unset_actions {
mut p := action.params mut p := action.params
key := p.get('key')! key := p.get('key')!
console.print_header('Unsetting environment variable: ${key}') console.print_header('Unsetting environment variable: ${key}')
osal.env_unset(key) osal.env_unset(key)
action.done = true action.done = true
@@ -96,7 +96,7 @@ fn play_env(mut plbook PlayBook) ! {
mut env_set_all_actions := plbook.find(filter: 'osal.env_set_all')! mut env_set_all_actions := plbook.find(filter: 'osal.env_set_all')!
for mut action in env_set_all_actions { for mut action in env_set_all_actions {
mut p := action.params mut p := action.params
// Parse environment variables from parameters // Parse environment variables from parameters
mut env_vars := map[string]string{} mut env_vars := map[string]string{}
// Get all parameters and filter out the control parameters // Get all parameters and filter out the control parameters
@@ -106,14 +106,14 @@ fn play_env(mut plbook PlayBook) ! {
env_vars[key] = value env_vars[key] = value
} }
} }
clear_before_set := p.get_default_false('clear_before_set') clear_before_set := p.get_default_false('clear_before_set')
overwrite_if_exists := p.get_default_true('overwrite_if_exists') overwrite_if_exists := p.get_default_true('overwrite_if_exists')
console.print_header('Setting multiple environment variables') console.print_header('Setting multiple environment variables')
osal.env_set_all( osal.env_set_all(
env: env_vars env: env_vars
clear_before_set: clear_before_set clear_before_set: clear_before_set
overwrite_if_exists: overwrite_if_exists overwrite_if_exists: overwrite_if_exists
) )
action.done = true action.done = true
@@ -124,7 +124,7 @@ fn play_env(mut plbook PlayBook) ! {
for mut action in env_load_file_actions { for mut action in env_load_file_actions {
mut p := action.params mut p := action.params
file_path := p.get('file_path')! file_path := p.get('file_path')!
console.print_header('Loading environment from file: ${file_path}') console.print_header('Loading environment from file: ${file_path}')
osal.load_env_file(file_path)! osal.load_env_file(file_path)!
action.done = true action.done = true
@@ -136,24 +136,24 @@ fn play_exec(mut plbook PlayBook) ! {
mut exec_actions := plbook.find(filter: 'osal.exec')! mut exec_actions := plbook.find(filter: 'osal.exec')!
for mut action in exec_actions { for mut action in exec_actions {
mut p := action.params mut p := action.params
cmd := p.get('cmd')! cmd := p.get('cmd')!
mut command := osal.Command{ mut command := osal.Command{
cmd: cmd cmd: cmd
name: p.get_default('name', '')! name: p.get_default('name', '')!
description: p.get_default('description', '')! description: p.get_default('description', '')!
timeout: p.get_int_default('timeout', 3600)! timeout: p.get_int_default('timeout', 3600)!
stdout: p.get_default_true('stdout') stdout: p.get_default_true('stdout')
stdout_log: p.get_default_true('stdout_log') stdout_log: p.get_default_true('stdout_log')
raise_error: p.get_default_true('raise_error') raise_error: p.get_default_true('raise_error')
ignore_error: p.get_default_false('ignore_error') ignore_error: p.get_default_false('ignore_error')
work_folder: p.get_default('work_folder', '')! work_folder: p.get_default('work_folder', '')!
retry: p.get_int_default('retry', 0)! retry: p.get_int_default('retry', 0)!
interactive: p.get_default_true('interactive') interactive: p.get_default_true('interactive')
debug: p.get_default_false('debug') debug: p.get_default_false('debug')
} }
// Parse environment variables if provided // Parse environment variables if provided
if p.exists('environment') { if p.exists('environment') {
env_str := p.get('environment')! env_str := p.get('environment')!
@@ -169,13 +169,13 @@ fn play_exec(mut plbook PlayBook) ! {
} }
command.environment = env_map.clone() command.environment = env_map.clone()
} }
// Parse ignore_error_codes if provided // Parse ignore_error_codes if provided
if p.exists('ignore_error_codes') { if p.exists('ignore_error_codes') {
ignore_codes := p.get_list_int('ignore_error_codes')! ignore_codes := p.get_list_int('ignore_error_codes')!
command.ignore_error_codes = ignore_codes command.ignore_error_codes = ignore_codes
} }
console.print_header('Executing command: ${cmd}') console.print_header('Executing command: ${cmd}')
osal.exec(command)! osal.exec(command)!
action.done = true action.done = true
@@ -186,7 +186,7 @@ fn play_exec(mut plbook PlayBook) ! {
for mut action in exec_silent_actions { for mut action in exec_silent_actions {
mut p := action.params mut p := action.params
cmd := p.get('cmd')! cmd := p.get('cmd')!
console.print_header('Executing command silently: ${cmd}') console.print_header('Executing command silently: ${cmd}')
osal.execute_silent(cmd)! osal.execute_silent(cmd)!
action.done = true action.done = true
@@ -197,7 +197,7 @@ fn play_exec(mut plbook PlayBook) ! {
for mut action in exec_interactive_actions { for mut action in exec_interactive_actions {
mut p := action.params mut p := action.params
cmd := p.get('cmd')! cmd := p.get('cmd')!
console.print_header('Executing command interactively: ${cmd}') console.print_header('Executing command interactively: ${cmd}')
osal.execute_interactive(cmd)! osal.execute_interactive(cmd)!
action.done = true action.done = true
@@ -217,14 +217,14 @@ fn play_package(mut plbook PlayBook) ! {
mut package_install_actions := plbook.find(filter: 'osal.package_install')! mut package_install_actions := plbook.find(filter: 'osal.package_install')!
for mut action in package_install_actions { for mut action in package_install_actions {
mut p := action.params mut p := action.params
// Support both 'name' parameter and arguments // Support both 'name' parameter and arguments
mut packages := []string{} mut packages := []string{}
if p.exists('name') { if p.exists('name') {
packages << p.get('name')! packages << p.get('name')!
} }
// Add any arguments (packages without keys) // Add any arguments (packages without keys)
mut i := 0 mut i := 0
for { for {
@@ -235,7 +235,7 @@ fn play_package(mut plbook PlayBook) ! {
packages << arg packages << arg
i++ i++
} }
for package in packages { for package in packages {
if package != '' { if package != '' {
console.print_header('Installing package: ${package}') console.print_header('Installing package: ${package}')
@@ -249,14 +249,14 @@ fn play_package(mut plbook PlayBook) ! {
mut package_remove_actions := plbook.find(filter: 'osal.package_remove')! mut package_remove_actions := plbook.find(filter: 'osal.package_remove')!
for mut action in package_remove_actions { for mut action in package_remove_actions {
mut p := action.params mut p := action.params
// Support both 'name' parameter and arguments // Support both 'name' parameter and arguments
mut packages := []string{} mut packages := []string{}
if p.exists('name') { if p.exists('name') {
packages << p.get('name')! packages << p.get('name')!
} }
// Add any arguments (packages without keys) // Add any arguments (packages without keys)
mut i := 0 mut i := 0
for { for {
@@ -267,7 +267,7 @@ fn play_package(mut plbook PlayBook) ! {
packages << arg packages << arg
i++ i++
} }
for package in packages { for package in packages {
if package != '' { if package != '' {
console.print_header('Removing package: ${package}') console.print_header('Removing package: ${package}')
@@ -276,4 +276,4 @@ fn play_package(mut plbook PlayBook) ! {
} }
action.done = true action.done = true
} }
} }

View File

@@ -24,8 +24,7 @@ pub mut:
// is_running checks if the node is operational by pinging its address // is_running checks if the node is operational by pinging its address
fn (node &StreamerNode) is_running() bool { fn (node &StreamerNode) is_running() bool {
ping_result := osal.ping(address: node.address, retry: 2) or { return false } return osal.ping(address: node.address, retry: 2)!
return ping_result == .ok
} }
// connect_to_master connects the worker node to its master // connect_to_master connects the worker node to its master

View File

@@ -99,7 +99,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
provider: args.provider provider: args.provider
)! )!
if repos.len<4 || args.cmd in 'pull,push,commit,delete'.split(',') { if repos.len < 4 || args.cmd in 'pull,push,commit,delete'.split(',') {
args.reload = true args.reload = true
} }

View File

@@ -66,7 +66,7 @@ fn installed() !bool {
if !osal.cmd_exists('limactl') { if !osal.cmd_exists('limactl') {
return false return false
} }
mut res:=os.execute("lima -v"); mut res := os.execute('lima -v')
r := res.output.split_into_lines().filter(it.contains('limactl version')) r := res.output.split_into_lines().filter(it.contains('limactl version'))
if r.len != 1 { if r.len != 1 {
return error("couldn't parse lima version, expected 'lima version' on 1 row.\n${res.output}") return error("couldn't parse lima version, expected 'lima version' on 1 row.\n${res.output}")
@@ -112,42 +112,38 @@ fn install() ! {
} }
console.print_header('download ${url}') console.print_header('download ${url}')
mut e:=osal.download( mut e := osal.download(
url: url url: url
minsize_kb: 20000 minsize_kb: 20000
dest: '/tmp/lima.tar.gz' dest: '/tmp/lima.tar.gz'
expand_file: '/tmp/download/lima' expand_file: '/tmp/download/lima'
)! )!
e.copy(dest: dest_on_os)!
e.copy(dest:dest_on_os)!
mut installer := get()! mut installer := get()!
if installer.extra { if installer.extra {
mut e2:=osal.download( mut e2 := osal.download(
url: url2 url: url2
minsize_kb: 20000 minsize_kb: 20000
dest: '/tmp/lima-additional-guestagents.tar.gz' dest: '/tmp/lima-additional-guestagents.tar.gz'
expand_file: '/tmp/download/lima-additional-guestagents' expand_file: '/tmp/download/lima-additional-guestagents'
)! )!
e2.copy(dest:dest_on_os)! e2.copy(dest: dest_on_os)!
} }
} }
fn destroy() ! { fn destroy() ! {
osal.process_kill_recursive(name: 'lima')!
osal.process_kill_recursive(name:'lima')!
osal.package_remove(' osal.package_remove('
lima lima
limactl limactl
')! ')!
osal.rm(" osal.rm('
lima lima
limactl limactl
${os.home_dir()}/bin/*.lima ${os.home_dir()}/bin/*.lima
@@ -156,5 +152,5 @@ fn destroy() ! {
${os.home_dir()}/share/lima ${os.home_dir()}/share/lima
${os.home_dir()}/share/man/lima* ${os.home_dir()}/share/man/lima*
")! ')!
} }

View File

@@ -12,10 +12,10 @@ const default = true
@[heap] @[heap]
pub struct LimaInstaller { pub struct LimaInstaller {
pub mut: pub mut:
name string = 'default' name string = 'default'
homedir string homedir string
extra bool //do we want to extra's extra bool // do we want to extra's
sshkey string //name of the key to use sshkey string // name of the key to use
} }
// your checking & initialization code if needed // your checking & initialization code if needed

View File

@@ -49,7 +49,7 @@ pub fn install_(args_ InstallArgs) ! {
if platform in [.arch, .ubuntu] { if platform in [.arch, .ubuntu] {
osal.package_install('qemu,libvirt,qemu-common,qemu-img,qemu-system-arm,qemu-system-x86,qemu-tools,libguestfs')! osal.package_install('qemu,libvirt,qemu-common,qemu-img,qemu-system-arm,qemu-system-x86,qemu-tools,libguestfs')!
osal.exec(cmd: 'systemctl start libvirtd && systemctl enable libvirtd')! osal.exec(cmd: 'systemctl start libvirtd && systemctl enable libvirtd')!
} }
if exists()! { if exists()! {
console.print_header(' - qemu exists check ok.') console.print_header(' - qemu exists check ok.')

View File

@@ -4,79 +4,53 @@ import net
import time import time
import freeflowuniverse.herolib.ui.console import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core import freeflowuniverse.herolib.core
import math
import os import os
pub enum PingResult {
ok
timeout // timeout from ping
unknownhost // means we don't know the hostname its a dns issue
}
@[params] @[params]
pub struct PingArgs { pub struct PingArgs {
pub mut: pub mut:
address string @[required] address string = '8.8.8.8'
count u8 = 2 // the ping is successful if it got count amount of replies from the other side nr_ping u16 = 2 // amount of ping requests we will do
timeout u16 = 1 // the time in which the other side should respond in seconds nr_ok u16 = 2 // how many of them need to be ok
retry u8 retry u8 // how many times fo we retry above sequence, basically we ping ourselves with -c 1
} }
// if reached in timout result will be True // if ping ok, return true
// address is e.g. 8.8.8.8 pub fn ping(args PingArgs) !bool {
// ping means we check if the destination responds // platform_ := core.platform()!
pub fn ping(args PingArgs) !PingResult {
platform_ := core.platform()!
mut cmd := 'ping' mut cmd := 'ping'
if args.address.contains(':') { if args.address.contains(':') {
cmd = 'ping6' cmd = 'ping6'
} }
if platform_ == .osx { cmd += ' -c 1 ${args.address}'
cmd += ' -c ${args.count} -i ${args.timeout} ${args.address}' if args.nr_ok > args.nr_ping {
} else if platform_ == .ubuntu { return error('nr_ok must be <= nr_ping')
cmd += ' -c ${args.count} -w ${args.timeout} ${args.address}'
} else {
return error('Unsupported platform for ping')
} }
console.print_debug(cmd) for _ in 0 .. math.max(1, args.retry) {
_ := exec(cmd: cmd, retry: args.retry, timeout: 0, stdout: false) or { mut nrerrors := 0
// println("ping failed.error.\n${err}") for _ in 0 .. args.nr_ping {
if err.code() == 9999 { console.print_debug(cmd)
return .timeout res := os.execute(cmd)
if res.exit_code > 0 {
nrerrors += 1
}
println(res)
} }
if platform_ == .osx { successes := args.nr_ping - nrerrors
return match err.code() { if successes >= args.nr_ok {
2 { return true
.timeout
}
68 {
.unknownhost
}
else {
// println("${err} ${err.code()}")
return error("can't ping on osx (${err.code()})\n${err}")
}
}
} else if platform_ == .ubuntu {
return match err.code() {
1 { .timeout }
2 { .unknownhost }
else { error("can't ping on ubuntu (${err.code()})\n${err}") }
}
} else {
panic('bug, should never get here')
} }
} }
return .ok return false
} }
@[params] @[params]
pub struct RebootWaitArgs { pub struct RebootWaitArgs {
pub mut: pub mut:
address string @[required] // 192.168.8.8 address string @[required] // 192.168.8.8
timeout_down i64 = 60 // total time in seconds to wait till its down timeout_down i64 = 60 // total time in seconds to wait till its down
timeout_up i64 = 60 * 5 timeout_up i64 = 60 * 5
} }
// test if a tcp port answers // test if a tcp port answers
@@ -89,26 +63,26 @@ pub fn reboot_wait(args RebootWaitArgs) ! {
start_time := time.now().unix() start_time := time.now().unix()
mut run_time := 0.0 mut run_time := 0.0
for true { for true {
console.print_debug("Waiting for server to go down...") console.print_debug('Waiting for server to go down...')
run_time = time.now().unix() run_time = time.now().unix()
if run_time > start_time + args.timeout_down { if run_time > start_time + args.timeout_down {
return error("timeout in waiting for server down") return error('timeout in waiting for server down')
} }
if ping(address:args.address)! == .timeout { if ping(address: args.address)! == false {
break break
} }
println(ping(address:args.address)!) println(ping(address: args.address)!)
$dbg; $dbg;
time.sleep(1) time.sleep(1)
} }
for true { for true {
console.print_debug("Waiting for server to come back up...") console.print_debug('Waiting for server to come back up...')
run_time = time.now().unix() run_time = time.now().unix()
if run_time > start_time + args.timeout_up { if run_time > start_time + args.timeout_up {
return error("timeout in waiting for server up") return error('timeout in waiting for server up')
} }
if ping(address:args.address)! == .ok { if ping(address: args.address)! == true {
println(ping(address:args.address)!) println(ping(address: args.address)!)
$dbg; $dbg;
break break
} }
@@ -116,7 +90,6 @@ pub fn reboot_wait(args RebootWaitArgs) ! {
} }
} }
@[params] @[params]
pub struct TcpPortTestArgs { pub struct TcpPortTestArgs {
pub mut: pub mut:
@@ -190,31 +163,28 @@ pub fn is_ip_on_local_interface(public_ip string) !bool {
return false return false
} }
// will give error if ssh test did not work
//will give error if ssh test did not work
pub fn ssh_check(args TcpPortTestArgs) ! { pub fn ssh_check(args TcpPortTestArgs) ! {
errmsg, res := ssh_testrun_internal(args)! errmsg, res := ssh_testrun_internal(args)!
if res != .ok{ if res != .ok {
return error(errmsg) return error(errmsg)
} }
} }
pub enum SSHResult { pub enum SSHResult {
ok ok
ping // timeout from ping ping // timeout from ping
tcpport // means we don't know the hostname its a dns issue tcpport // means we don't know the hostname its a dns issue
ssh ssh
} }
pub fn ssh_test(args TcpPortTestArgs) !SSHResult { pub fn ssh_test(args TcpPortTestArgs) !SSHResult {
_, res := ssh_testrun_internal(args)! _, res := ssh_testrun_internal(args)!
return res return res
} }
//will give error if ssh test did not work // will give error if ssh test did not work
pub fn ssh_wait(args TcpPortTestArgs) ! { pub fn ssh_wait(args TcpPortTestArgs) ! {
start_time := time.now().unix_milli() start_time := time.now().unix_milli()
mut run_time := 0.0 mut run_time := 0.0
for true { for true {
@@ -225,18 +195,16 @@ pub fn ssh_wait(args TcpPortTestArgs) ! {
if run_time > start_time + args.timeout { if run_time > start_time + args.timeout {
return error(errmsg) return error(errmsg)
} }
if res == .ok{ if res == .ok {
return return
} }
} }
} }
fn ssh_testrun_internal(args TcpPortTestArgs) !(string, SSHResult) {
fn ssh_testrun_internal(args TcpPortTestArgs) !(string,SSHResult) { cmd := '
cmd:='
ssh -o BatchMode=yes -o ConnectTimeout=3 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -q "${args.address}" exit ssh -o BatchMode=yes -o ConnectTimeout=3 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -q "${args.address}" exit
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
echo "OK: SSH works" echo "OK: SSH works"
@@ -254,9 +222,9 @@ fn ssh_testrun_internal(args TcpPortTestArgs) !(string,SSHResult) {
fi fi
echo "ERROR: Host unreachable, over ping and ssh" echo "ERROR: Host unreachable, over ping and ssh"
exit 3 exit 3
' ' // console.print_debug('ssh test cmd: ${cmd}')
// console.print_debug('ssh test cmd: ${cmd}')
res:=exec(cmd:cmd,ignore_error:true,stdout:false,debug:false)! res := exec(cmd: cmd, ignore_error: true, stdout: false, debug: false)!
// console.print_debug('ssh test result: ${res}') // console.print_debug('ssh test result: ${res}')
if res.exit_code == 0 { if res.exit_code == 0 {
return res.output, SSHResult.ok return res.output, SSHResult.ok
@@ -267,4 +235,4 @@ fn ssh_testrun_internal(args TcpPortTestArgs) !(string,SSHResult) {
} else { } else {
return res.output, SSHResult.ssh return res.output, SSHResult.ssh
} }
} }

View File

@@ -1,6 +1,5 @@
module incatokens module incatokens
// AMMPool represents a simple Automated Market Maker pool. // AMMPool represents a simple Automated Market Maker pool.
// It uses the constant product formula (x * y = k) to determine prices. // It uses the constant product formula (x * y = k) to determine prices.
pub struct AMMPool { pub struct AMMPool {
@@ -42,4 +41,4 @@ pub fn (pool AMMPool) get_price() f64 {
return 0 // Avoid division by zero if there are no tokens in the pool. return 0 // Avoid division by zero if there are no tokens in the pool.
} }
return pool.usdc / pool.tokens return pool.usdc / pool.tokens
} }

View File

@@ -18,20 +18,20 @@ fn test_trade() {
mut pool := AMMPool{} mut pool := AMMPool{}
pool.add_liquidity(1000.0, 500.0) // Initial price: 0.5 USDC/token pool.add_liquidity(1000.0, 500.0) // Initial price: 0.5 USDC/token
initial_tokens := pool.tokens initial_tokens := pool.tokens
// Trade 100 USDC for tokens // Trade 100 USDC for tokens
usdc_to_trade := 100.0 usdc_to_trade := 100.0
pool.trade(usdc_to_trade)! pool.trade(usdc_to_trade)!
// Verify the new state of the pool // Verify the new state of the pool
assert pool.usdc == 600.0 assert pool.usdc == 600.0
expected_tokens := pool.k / pool.usdc expected_tokens := pool.k / pool.usdc
assert math.abs(pool.tokens - expected_tokens) < 0.0001 assert math.abs(pool.tokens - expected_tokens) < 0.0001
// Check that tokens were removed from the pool // Check that tokens were removed from the pool
tokens_received := initial_tokens - pool.tokens tokens_received := initial_tokens - pool.tokens
assert tokens_received > 0 assert tokens_received > 0
// Verify the new price (it should be higher) // Verify the new price (it should be higher)
new_price := pool.get_price() new_price := pool.get_price()
assert new_price > 0.5 assert new_price > 0.5
@@ -41,15 +41,15 @@ fn test_trade() {
fn test_price_impact() { fn test_price_impact() {
mut pool := AMMPool{} mut pool := AMMPool{}
pool.add_liquidity(1000.0, 1000.0) // Initial price: 1.0 USDC/token pool.add_liquidity(1000.0, 1000.0) // Initial price: 1.0 USDC/token
// First trade // First trade
pool.trade(100.0)! pool.trade(100.0)!
price1 := pool.get_price() price1 := pool.get_price()
// Second trade // Second trade
pool.trade(100.0)! pool.trade(100.0)!
price2 := pool.get_price() price2 := pool.get_price()
// The price should increase after each trade // The price should increase after each trade
assert price2 > price1 assert price2 > price1
} }
@@ -62,7 +62,7 @@ fn test_trade_in_empty_pool() {
pool.trade(100.0) or { pool.trade(100.0) or {
expected_error := 'AMM pool is empty and cannot facilitate trades' expected_error := 'AMM pool is empty and cannot facilitate trades'
assert err.msg() == expected_error assert err.msg() == expected_error
return // Exit the test function successfully after catching the error. return
} }
// This line should not be reached if the error is caught correctly. // This line should not be reached if the error is caught correctly.
assert false, 'Expected trade to fail, but it succeeded' assert false, 'Expected trade to fail, but it succeeded'
@@ -73,4 +73,4 @@ fn test_get_price_in_empty_pool() {
pool := AMMPool{} pool := AMMPool{}
price := pool.get_price() price := pool.get_price()
assert price == 0.0 assert price == 0.0
} }

View File

@@ -11,17 +11,15 @@ pub mut:
token_supply f64 // The total number of tokens available for sale. token_supply f64 // The total number of tokens available for sale.
} }
// AuctionResult holds the outcome of a simulated Dutch auction. // AuctionResult holds the outcome of a simulated Dutch auction.
pub struct AuctionResult { pub struct AuctionResult {
pub mut: pub mut:
tokens_sold f64 // The total number of tokens sold. tokens_sold f64 // The total number of tokens sold.
clearing_price f64 // The final price per token. clearing_price f64 // The final price per token.
usd_raised f64 // The total funds raised. usd_raised f64 // The total funds raised.
fully_subscribed bool // True if all tokens were sold. fully_subscribed bool // True if all tokens were sold.
} }
// simulate_auction performs a simplified Dutch auction simulation. // simulate_auction performs a simplified Dutch auction simulation.
// It determines the market-clearing price based on total demand and token supply. // It determines the market-clearing price based on total demand and token supply.
pub fn simulate_auction(config AuctionConfig) !AuctionResult { pub fn simulate_auction(config AuctionConfig) !AuctionResult {
@@ -32,9 +30,9 @@ pub fn simulate_auction(config AuctionConfig) !AuctionResult {
// If there are no tokens to sell, the auction is trivially complete. // If there are no tokens to sell, the auction is trivially complete.
if token_supply <= 0 { if token_supply <= 0 {
return AuctionResult{ return AuctionResult{
tokens_sold: 0 tokens_sold: 0
clearing_price: 0 clearing_price: 0
usd_raised: 0 usd_raised: 0
fully_subscribed: true fully_subscribed: true
} }
} }
@@ -48,9 +46,9 @@ pub fn simulate_auction(config AuctionConfig) !AuctionResult {
// The number of tokens sold is determined by how many can be bought with the total demand at the minimum price. // The number of tokens sold is determined by how many can be bought with the total demand at the minimum price.
tokens_sold := math.min(demand / min_price, token_supply) tokens_sold := math.min(demand / min_price, token_supply)
return AuctionResult{ return AuctionResult{
tokens_sold: tokens_sold tokens_sold: tokens_sold
clearing_price: min_price clearing_price: min_price
usd_raised: tokens_sold * min_price usd_raised: tokens_sold * min_price
fully_subscribed: tokens_sold >= token_supply fully_subscribed: tokens_sold >= token_supply
} }
} }
@@ -58,10 +56,9 @@ pub fn simulate_auction(config AuctionConfig) !AuctionResult {
// Scenario 2: Demand is sufficient to sell all tokens at or above the minimum price. // Scenario 2: Demand is sufficient to sell all tokens at or above the minimum price.
// The auction is fully subscribed, and the clearing price is the implied average price. // The auction is fully subscribed, and the clearing price is the implied average price.
return AuctionResult{ return AuctionResult{
tokens_sold: token_supply tokens_sold: token_supply
clearing_price: implied_avg clearing_price: implied_avg
usd_raised: demand usd_raised: demand
fully_subscribed: true fully_subscribed: true
} }
} }

View File

@@ -7,8 +7,8 @@ import math
// so all tokens are sold at the market-clearing price. // so all tokens are sold at the market-clearing price.
fn test_simulate_auction_full_subscription() { fn test_simulate_auction_full_subscription() {
config := AuctionConfig{ config := AuctionConfig{
demand: 125000.0 demand: 125000.0
min_price: 0.5 min_price: 0.5
token_supply: 100000.0 token_supply: 100000.0
} }
res := simulate_auction(config)! res := simulate_auction(config)!
@@ -23,8 +23,8 @@ fn test_simulate_auction_full_subscription() {
// The auction clears at the minimum price, and only a portion of tokens are sold. // The auction clears at the minimum price, and only a portion of tokens are sold.
fn test_simulate_auction_partial_fill() { fn test_simulate_auction_partial_fill() {
config := AuctionConfig{ config := AuctionConfig{
demand: 40000.0 demand: 40000.0
min_price: 0.5 min_price: 0.5
token_supply: 100000.0 token_supply: 100000.0
} }
res := simulate_auction(config)! res := simulate_auction(config)!
@@ -40,8 +40,8 @@ fn test_simulate_auction_partial_fill() {
// If there are no tokens to sell, the auction should result in zero sales and fundraising. // If there are no tokens to sell, the auction should result in zero sales and fundraising.
fn test_simulate_auction_zero_supply() { fn test_simulate_auction_zero_supply() {
config := AuctionConfig{ config := AuctionConfig{
demand: 50000.0 demand: 50000.0
min_price: 0.5 min_price: 0.5
token_supply: 0 token_supply: 0
} }
res := simulate_auction(config)! res := simulate_auction(config)!
@@ -56,8 +56,8 @@ fn test_simulate_auction_zero_supply() {
// The auction should be fully subscribed at the minimum price. // The auction should be fully subscribed at the minimum price.
fn test_simulate_auction_demand_equals_min_price() { fn test_simulate_auction_demand_equals_min_price() {
config := AuctionConfig{ config := AuctionConfig{
demand: 50000.0 demand: 50000.0
min_price: 0.5 min_price: 0.5
token_supply: 100000.0 token_supply: 100000.0
} }
res := simulate_auction(config)! res := simulate_auction(config)!
@@ -65,4 +65,4 @@ fn test_simulate_auction_demand_equals_min_price() {
assert res.clearing_price == 0.5 assert res.clearing_price == 0.5
assert res.usd_raised == 50000.0 assert res.usd_raised == 50000.0
assert res.fully_subscribed == true assert res.fully_subscribed == true
} }

View File

@@ -9,13 +9,13 @@ pub fn (sim Simulation) generate_price_chart() !echarts.EChartsOption {
for name, _ in sim.scenarios { for name, _ in sim.scenarios {
rownames << 'scenario_${name}_price' rownames << 'scenario_${name}_price'
} }
return sim.price_sheet.line_chart( return sim.price_sheet.line_chart(
rowname: rownames.join(',') rowname: rownames.join(',')
period_type: .month period_type: .month
title: 'INCA Token Price Evolution' title: 'INCA Token Price Evolution'
title_sub: 'Price paths across different scenarios' title_sub: 'Price paths across different scenarios'
unit: .normal unit: .normal
)! )!
} }
@@ -23,30 +23,30 @@ pub fn (sim Simulation) generate_price_chart() !echarts.EChartsOption {
pub fn (sim Simulation) generate_market_cap_chart() !echarts.EChartsOption { pub fn (sim Simulation) generate_market_cap_chart() !echarts.EChartsOption {
// Create market cap rows from price rows // Create market cap rows from price rows
mut mc_sheet := spreadsheet.sheet_new( mut mc_sheet := spreadsheet.sheet_new(
name: '${sim.name}_market_cap' name: '${sim.name}_market_cap'
nrcol: sim.price_sheet.nrcol nrcol: sim.price_sheet.nrcol
curr: sim.params.simulation.currency curr: sim.params.simulation.currency
)! )!
for name, scenario in sim.scenarios { for name, scenario in sim.scenarios {
mut mc_row := mc_sheet.row_new( mut mc_row := mc_sheet.row_new(
name: 'scenario_${name}_mc' name: 'scenario_${name}_mc'
tags: 'scenario:${name} type:market_cap' tags: 'scenario:${name} type:market_cap'
descr: 'Market cap for ${name} scenario' descr: 'Market cap for ${name} scenario'
)! )!
price_row := sim.price_sheet.row_get('scenario_${name}_price')! price_row := sim.price_sheet.row_get('scenario_${name}_price')!
for i, cell in price_row.cells { for i, cell in price_row.cells {
mc_row.cells[i].val = cell.val * sim.params.distribution.total_supply mc_row.cells[i].val = cell.val * sim.params.distribution.total_supply
} }
} }
return mc_sheet.bar_chart( return mc_sheet.bar_chart(
namefilter: mc_sheet.rows.keys() namefilter: mc_sheet.rows.keys()
period_type: .quarter period_type: .quarter
title: 'INCA Market Capitalization' title: 'INCA Market Capitalization'
title_sub: 'Market cap evolution by quarter' title_sub: 'Market cap evolution by quarter'
unit: .million unit: .million
)! )!
} }
@@ -55,13 +55,13 @@ pub fn (mut sim Simulation) generate_vesting_chart() !echarts.EChartsOption {
if isnil(sim.vesting_sheet) { if isnil(sim.vesting_sheet) {
sim.create_vesting_schedules()! sim.create_vesting_schedules()!
} }
return sim.vesting_sheet.line_chart( return sim.vesting_sheet.line_chart(
includefilter: ['type:vesting'] includefilter: ['type:vesting']
excludefilter: ['type:total_vesting'] excludefilter: ['type:total_vesting']
period_type: .quarter period_type: .quarter
title: 'Token Vesting Schedule' title: 'Token Vesting Schedule'
title_sub: 'Cumulative tokens unlocked over time' title_sub: 'Cumulative tokens unlocked over time'
unit: .million unit: .million
)! )!
} }

View File

@@ -7,10 +7,10 @@ import time
// Struct to hold all data for the report template // Struct to hold all data for the report template
pub struct ReportData { pub struct ReportData {
pub mut: pub mut:
sim &Simulation sim &Simulation
generation_date string generation_date string
total_raised f64 total_raised f64
initial_price f64 initial_price f64
} }
// Export data to CSV // Export data to CSV
@@ -22,11 +22,11 @@ pub fn (sim Simulation) export_csv(sheet_name string, path string) ! {
'vesting' { sim.vesting_sheet } 'vesting' { sim.vesting_sheet }
else { return error('Unknown sheet: ${sheet_name}') } else { return error('Unknown sheet: ${sheet_name}') }
} }
console.print_debug('Exporting sheet "${sheet_name}" to: ${path}') console.print_debug('Exporting sheet "${sheet_name}" to: ${path}')
sheet.export_csv( sheet.export_csv(
path: path path: path
separator: ',' separator: ','
include_empty: false include_empty: false
)! )!
console.print_debug('Finished exporting sheet "${sheet_name}".') console.print_debug('Finished exporting sheet "${sheet_name}".')
@@ -34,8 +34,10 @@ pub fn (sim Simulation) export_csv(sheet_name string, path string) ! {
// Generate a single scenario section for use in templates // Generate a single scenario section for use in templates
pub fn (sim Simulation) generate_scenario_section(scenario_name string) !string { pub fn (sim Simulation) generate_scenario_section(scenario_name string) !string {
scenario := sim.scenarios[scenario_name] or { return error('Scenario not found: ${scenario_name}') } scenario := sim.scenarios[scenario_name] or {
return error('Scenario not found: ${scenario_name}')
}
mut lines := []string{} mut lines := []string{}
lines << '### ${scenario.name} Scenario' lines << '### ${scenario.name} Scenario'
lines << '**Parameters:**' lines << '**Parameters:**'
@@ -43,29 +45,30 @@ pub fn (sim Simulation) generate_scenario_section(scenario_name string) !string
lines << '- **AMM Net Trade:** \$${scenario.amm_trades.map(it.str()).join(', ')}' lines << '- **AMM Net Trade:** \$${scenario.amm_trades.map(it.str()).join(', ')}'
lines << '' lines << ''
lines << '**Results:**' lines << '**Results:**'
// Create table header // Create table header
mut header := ['Treasury Raised', 'Final Price'] mut header := ['Treasury Raised', 'Final Price']
for round in sim.investor_rounds { for round in sim.investor_rounds {
header << 'ROI ${round.name}' header << 'ROI ${round.name}'
} }
lines << '| ${header.join(' | ')} |' lines << '| ${header.join(' | ')} |'
// Create separator row // Create separator row
mut separator := [':---', ':---'] mut separator := [':---', ':---']
for _ in sim.investor_rounds { for _ in sim.investor_rounds {
separator << ':---' separator << ':---'
} }
lines << '| ${separator.join('|')} |' lines << '| ${separator.join('|')} |'
// Create data row // Create data row
mut row := ['\$${(scenario.final_metrics.treasury_total / 1_000_000):.1f}M', '\$${scenario.final_metrics.final_price:.4f}'] mut row := ['\$${(scenario.final_metrics.treasury_total / 1_000_000):.1f}M',
'\$${scenario.final_metrics.final_price:.4f}']
for round in sim.investor_rounds { for round in sim.investor_rounds {
roi := scenario.final_metrics.investor_roi[round.name] or { 0.0 } roi := scenario.final_metrics.investor_roi[round.name] or { 0.0 }
row << '${roi:.2f}x' row << '${roi:.2f}x'
} }
lines << '| ${row.join(' | ')} |' lines << '| ${row.join(' | ')} |'
return lines.join('\n') return lines.join('\n')
} }
@@ -79,26 +82,25 @@ fn (sim Simulation) calculate_total_raised() f64 {
return total return total
} }
pub fn (sim Simulation) generate_report(output_dir string) ! { pub fn (sim Simulation) generate_report(output_dir string) ! {
// Ensure output directory exists // Ensure output directory exists
mut output_path := pathlib.get_dir(path: output_dir, create: true)! mut output_path := pathlib.get_dir(path: output_dir, create: true)!
// Prepare template variables // Prepare template variables
data := ReportData{ data := ReportData{
sim: &sim sim: &sim
generation_date: time.now().format() generation_date: time.now().format()
total_raised: sim.calculate_total_raised() total_raised: sim.calculate_total_raised()
initial_price: sim.get_last_investor_price() initial_price: sim.get_last_investor_price()
} }
// Process template // Process template
content := $tmpl('templates/report.md') content := $tmpl('templates/report.md')
// Write report // Write report
report_path := '${output_path.path}/${sim.name}_report.md' report_path := '${output_path.path}/${sim.name}_report.md'
mut report_file := pathlib.get_file(path: report_path, create: true)! mut report_file := pathlib.get_file(path: report_path, create: true)!
report_file.write(content)! report_file.write(content)!
console.print_green(' Report generated: ${report_path}') console.print_green(' Report generated: ${report_path}')
} }

View File

@@ -10,42 +10,40 @@ __global (
// Create simulation from parameters struct - MAIN ENTRY POINT // Create simulation from parameters struct - MAIN ENTRY POINT
pub fn simulation_new(params SimulationParams) !&Simulation { pub fn simulation_new(params SimulationParams) !&Simulation {
name := texttools.name_fix(params.name) name := texttools.name_fix(params.name)
// Initialize spreadsheets for tracking // Initialize spreadsheets for tracking
price_sheet := spreadsheet.sheet_new( price_sheet := spreadsheet.sheet_new(
name: '${name}_prices' name: '${name}_prices'
nrcol: params.simulation.nrcol nrcol: params.simulation.nrcol
curr: params.simulation.currency curr: params.simulation.currency
)! )!
token_sheet := spreadsheet.sheet_new( token_sheet := spreadsheet.sheet_new(
name: '${name}_tokens' name: '${name}_tokens'
nrcol: params.simulation.nrcol nrcol: params.simulation.nrcol
curr: params.simulation.currency curr: params.simulation.currency
)! )!
investment_sheet := spreadsheet.sheet_new( investment_sheet := spreadsheet.sheet_new(
name: '${name}_investments' name: '${name}_investments'
nrcol: params.simulation.nrcol nrcol: params.simulation.nrcol
curr: params.simulation.currency curr: params.simulation.currency
)! )!
mut sim := &Simulation{ mut sim := &Simulation{
name: name name: name
params: params params: params
price_sheet: price_sheet price_sheet: price_sheet
token_sheet: token_sheet token_sheet: token_sheet
investment_sheet: investment_sheet investment_sheet: investment_sheet
vesting_sheet: unsafe { nil } vesting_sheet: unsafe { nil }
} }
simulations[name] = sim simulations[name] = sim
return sim return sim
} }
pub fn simulation_get(name string) !&Simulation { pub fn simulation_get(name string) !&Simulation {
name_fixed := texttools.name_fix(name) name_fixed := texttools.name_fix(name)
return simulations[name_fixed] or { return simulations[name_fixed] or { return error('Simulation "${name_fixed}" not found') }
return error('Simulation "${name_fixed}" not found') }
}
}

View File

@@ -1,4 +1,5 @@
module incatokens module incatokens
import freeflowuniverse.herolib.biz.spreadsheet import freeflowuniverse.herolib.biz.spreadsheet
import os import os
import incatokens.defaults import incatokens.defaults
@@ -7,10 +8,10 @@ import incatokens.factory
fn test_simulation_creation() { fn test_simulation_creation() {
mut params := default_params() mut params := default_params()
params.name = 'test_sim_creation' params.name = 'test_sim_creation'
mut sim := factory.simulation_new(params)! mut sim := factory.simulation_new(params)!
sim.run_simulation()! // Run the simulation sim.run_simulation()! // Run the simulation
assert sim.name == 'test_sim_creation' assert sim.name == 'test_sim_creation'
assert sim.params.distribution.total_supply == params.distribution.total_supply assert sim.params.distribution.total_supply == params.distribution.total_supply
assert sim.investor_rounds.len == params.investor_rounds.len assert sim.investor_rounds.len == params.investor_rounds.len
@@ -19,17 +20,17 @@ fn test_simulation_creation() {
fn test_scenario_execution() { fn test_scenario_execution() {
mut params := default_params() mut params := default_params()
params.name = 'test_scenario_exec' params.name = 'test_scenario_exec'
mut sim := factory.simulation_new(params)! mut sim := factory.simulation_new(params)!
sim.run_simulation()! sim.run_simulation()!
// Get the 'Low' scenario results // Get the 'Low' scenario results
low_scenario := sim.scenarios['Low']! low_scenario := sim.scenarios['Low']!
assert low_scenario.name == 'Low' assert low_scenario.name == 'Low'
assert low_scenario.final_metrics.treasury_total > 0 assert low_scenario.final_metrics.treasury_total > 0
assert low_scenario.final_metrics.final_price > 0 assert low_scenario.final_metrics.final_price > 0
// Check ROI is positive for all rounds // Check ROI is positive for all rounds
for round in sim.investor_rounds { for round in sim.investor_rounds {
roi := low_scenario.final_metrics.investor_roi[round.name] or { 0.0 } roi := low_scenario.final_metrics.investor_roi[round.name] or { 0.0 }
@@ -40,19 +41,19 @@ fn test_scenario_execution() {
fn test_vesting_schedules() { fn test_vesting_schedules() {
mut params := default_params() mut params := default_params()
params.name = 'test_vesting_schedules' params.name = 'test_vesting_schedules'
mut sim := factory.simulation_new(params)! mut sim := factory.simulation_new(params)!
sim.run_simulation()! sim.run_simulation()!
// Check team vesting // Check team vesting
team_row := sim.vesting_sheet.row_get('team_vesting')! team_row := sim.vesting_sheet.row_get('team_vesting')!
// Before cliff (month 11), should be 0 // Before cliff (month 11), should be 0
assert team_row.cells[11].val == 0 assert team_row.cells[11].val == 0
// After cliff starts (month 12), should have tokens // After cliff starts (month 12), should have tokens
assert team_row.cells[12].val > 0 assert team_row.cells[12].val > 0
// After full vesting (month 48), should have all tokens // After full vesting (month 48), should have all tokens
total_team_tokens := sim.params.distribution.total_supply * sim.params.distribution.team_pct total_team_tokens := sim.params.distribution.total_supply * sim.params.distribution.team_pct
assert team_row.cells[48].val == total_team_tokens assert team_row.cells[48].val == total_team_tokens
@@ -64,47 +65,47 @@ fn test_export_functionality() {
params.output.export_dir = '/tmp/incatokens_test_output' params.output.export_dir = '/tmp/incatokens_test_output'
params.output.generate_csv = true params.output.generate_csv = true
params.output.generate_report = true params.output.generate_report = true
mut sim := factory.simulation_new(params)! mut sim := factory.simulation_new(params)!
sim.run_simulation()! sim.run_simulation()!
// Ensure price sheet has data before export // Ensure price sheet has data before export
assert sim.price_sheet.rows.len > 0 assert sim.price_sheet.rows.len > 0
// Export all data // Export all data
os.mkdir_all(params.output.export_dir)! os.mkdir_all(params.output.export_dir)!
sim.export_all(params.output.export_dir)! sim.export_all(params.output.export_dir)!
// Test CSV export // Test CSV export
assert os.exists('${params.output.export_dir}/${params.name}_prices.csv') assert os.exists('${params.output.export_dir}/${params.name}_prices.csv')
assert os.exists('${params.output.export_dir}/${params.name}_tokens.csv') assert os.exists('${params.output.export_dir}/${params.name}_tokens.csv')
assert os.exists('${params.output.export_dir}/${params.name}_investments.csv') assert os.exists('${params.output.export_dir}/${params.name}_investments.csv')
assert os.exists('${params.output.export_dir}/${params.name}_vesting.csv') assert os.exists('${params.output.export_dir}/${params.name}_vesting.csv')
// Test report generation // Test report generation
assert os.exists('${params.output.export_dir}/${params.name}_report.md') assert os.exists('${params.output.export_dir}/${params.name}_report.md')
} }
fn test_direct_csv_export() { fn test_direct_csv_export() {
mut sheet := spreadsheet.sheet_new( mut sheet := spreadsheet.sheet_new(
name: 'test_sheet' name: 'test_sheet'
nrcol: 2 nrcol: 2
curr: 'USD' curr: 'USD'
)! )!
mut row := sheet.row_new(name: 'test_row')! mut row := sheet.row_new(name: 'test_row')!
row.cells[0].val = 100.0 row.cells[0].val = 100.0
row.cells[1].val = 200.0 row.cells[1].val = 200.0
export_path := '/tmp/test_direct_export.csv' export_path := '/tmp/test_direct_export.csv'
os.rm(export_path) or {} // Clean up previous run os.rm(export_path) or {} // Clean up previous run
sheet.export_csv( sheet.export_csv(
path: export_path path: export_path
separator: ',' separator: ','
include_empty: false include_empty: false
)! )!
assert os.exists(export_path) assert os.exists(export_path)
os.rm(export_path) or {} // Clean up os.rm(export_path) or {} // Clean up
} }

View File

@@ -1,6 +1,6 @@
module incatokens module incatokens
import os
import os
// SimulationParams is the main configuration struct containing all parameters // SimulationParams is the main configuration struct containing all parameters
pub struct SimulationParams { pub struct SimulationParams {
@@ -59,7 +59,7 @@ pub mut:
// SimulationConfig defines technical simulation parameters // SimulationConfig defines technical simulation parameters
pub struct SimulationConfig { pub struct SimulationConfig {
pub mut: pub mut:
nrcol int = 60 // Number of months to simulate nrcol int = 60 // Number of months to simulate
currency string = 'USD' currency string = 'USD'
} }
@@ -79,4 +79,3 @@ pub mut:
generate_charts bool = true generate_charts bool = true
generate_report bool = true generate_report bool = true
} }

View File

@@ -6,7 +6,6 @@ import freeflowuniverse.herolib.core.pathlib
import os import os
pub fn play(mut plbook PlayBook) ! { pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'incatokens.') { if !plbook.exists(filter: 'incatokens.') {
return return
} }
@@ -45,15 +44,21 @@ pub fn play(mut plbook PlayBook) ! {
params.simulation.currency = p.get_default('currency', 'USD')! params.simulation.currency = p.get_default('currency', 'USD')!
// Configure economics // Configure economics
params.economics.epoch1_floor_uplift = p.get_float_default('epoch1_floor_uplift', 1.20)! params.economics.epoch1_floor_uplift = p.get_float_default('epoch1_floor_uplift',
params.economics.epochn_floor_uplift = p.get_float_default('epochn_floor_uplift', 1.20)! 1.20)!
params.economics.amm_liquidity_depth_factor = p.get_float_default('amm_liquidity_depth_factor', 2.0)! params.economics.epochn_floor_uplift = p.get_float_default('epochn_floor_uplift',
1.20)!
params.economics.amm_liquidity_depth_factor = p.get_float_default('amm_liquidity_depth_factor',
2.0)!
// Configure vesting // Configure vesting
params.vesting.team.cliff_months = p.get_int_default('team_cliff_months', 12)! params.vesting.team.cliff_months = p.get_int_default('team_cliff_months', 12)!
params.vesting.team.vesting_months = p.get_int_default('team_vesting_months', 36)! params.vesting.team.vesting_months = p.get_int_default('team_vesting_months',
params.vesting.treasury.cliff_months = p.get_int_default('treasury_cliff_months', 12)! 36)!
params.vesting.treasury.vesting_months = p.get_int_default('treasury_vesting_months', 48)! params.vesting.treasury.cliff_months = p.get_int_default('treasury_cliff_months',
12)!
params.vesting.treasury.vesting_months = p.get_int_default('treasury_vesting_months',
48)!
// Configure output - use export_path if provided, otherwise use param // Configure output - use export_path if provided, otherwise use param
if export_path != '' { if export_path != '' {
@@ -69,13 +74,13 @@ pub fn play(mut plbook PlayBook) ! {
mut investor_rounds := []InvestorRoundConfig{} mut investor_rounds := []InvestorRoundConfig{}
for round_action in plbook.find(filter: 'incatokens.investor_round')! { for round_action in plbook.find(filter: 'incatokens.investor_round')! {
mut rp := round_action.params mut rp := round_action.params
round := InvestorRoundConfig{ round := InvestorRoundConfig{
name: rp.get('name')! name: rp.get('name')!
allocation_pct: rp.get_float('allocation_pct')! allocation_pct: rp.get_float('allocation_pct')!
price: rp.get_float('price')! price: rp.get_float('price')!
vesting: VestingConfig{ vesting: VestingConfig{
cliff_months: rp.get_int('cliff_months')! cliff_months: rp.get_int('cliff_months')!
vesting_months: rp.get_int('vesting_months')! vesting_months: rp.get_int('vesting_months')!
} }
} }
@@ -88,10 +93,10 @@ pub fn play(mut plbook PlayBook) ! {
mut scenarios := []ScenarioConfig{} mut scenarios := []ScenarioConfig{}
for scenario_action in plbook.find(filter: 'incatokens.scenario')! { for scenario_action in plbook.find(filter: 'incatokens.scenario')! {
mut sp := scenario_action.params mut sp := scenario_action.params
scenario := ScenarioConfig{ scenario := ScenarioConfig{
name: sp.get('name')! name: sp.get('name')!
demands: sp.get_list_f64('demands')! demands: sp.get_list_f64('demands')!
amm_trades: sp.get_list_f64('amm_trades')! amm_trades: sp.get_list_f64('amm_trades')!
} }
scenarios << scenario scenarios << scenario
@@ -105,17 +110,17 @@ pub fn play(mut plbook PlayBook) ! {
// Run all simulations // Run all simulations
for params in simulations_to_run { for params in simulations_to_run {
console.print_item('Running simulation: ${params.name}') console.print_item('Running simulation: ${params.name}')
// Create and run simulation // Create and run simulation
mut sim := simulation_new(params)! mut sim := simulation_new(params)!
sim.run_simulation()! sim.run_simulation()!
// Create export directory if needed // Create export directory if needed
os.mkdir_all(params.output.export_dir)! os.mkdir_all(params.output.export_dir)!
// Export all data in one call // Export all data in one call
sim.export_all(params.output.export_dir)! sim.export_all(params.output.export_dir)!
console.print_green(' Simulation "${params.name}" completed and exported to: ${params.output.export_dir}') console.print_green(' Simulation "${params.name}" completed and exported to: ${params.output.export_dir}')
} }
} }

View File

@@ -10,18 +10,18 @@ pub mut:
// Configuration (embedded) // Configuration (embedded)
params SimulationParams params SimulationParams
// Derived data // Derived data
investor_rounds []InvestorRound investor_rounds []InvestorRound
team_vesting VestingSchedule team_vesting VestingSchedule
treasury_vesting VestingSchedule treasury_vesting VestingSchedule
// Runtime state // Runtime state
scenarios map[string]&Scenario scenarios map[string]&Scenario
// Spreadsheets for tracking // Spreadsheets for tracking
price_sheet &spreadsheet.Sheet price_sheet &spreadsheet.Sheet
token_sheet &spreadsheet.Sheet token_sheet &spreadsheet.Sheet
investment_sheet &spreadsheet.Sheet investment_sheet &spreadsheet.Sheet
vesting_sheet &spreadsheet.Sheet vesting_sheet &spreadsheet.Sheet
} }
@@ -30,22 +30,22 @@ pub mut:
pub fn (mut sim Simulation) run_simulation() ! { pub fn (mut sim Simulation) run_simulation() ! {
// Set up investor rounds from params // Set up investor rounds from params
sim.investor_rounds = sim.params.investor_rounds.map(InvestorRound{ sim.investor_rounds = sim.params.investor_rounds.map(InvestorRound{
name: it.name name: it.name
allocation_pct: it.allocation_pct allocation_pct: it.allocation_pct
price: it.price price: it.price
vesting: VestingSchedule{ vesting: VestingSchedule{
cliff_months: it.vesting.cliff_months cliff_months: it.vesting.cliff_months
vesting_months: it.vesting.vesting_months vesting_months: it.vesting.vesting_months
} }
}) })
// Set up vesting schedules from params // Set up vesting schedules from params
sim.team_vesting = VestingSchedule{ sim.team_vesting = VestingSchedule{
cliff_months: sim.params.vesting.team.cliff_months cliff_months: sim.params.vesting.team.cliff_months
vesting_months: sim.params.vesting.team.vesting_months vesting_months: sim.params.vesting.team.vesting_months
} }
sim.treasury_vesting = VestingSchedule{ sim.treasury_vesting = VestingSchedule{
cliff_months: sim.params.vesting.treasury.cliff_months cliff_months: sim.params.vesting.treasury.cliff_months
vesting_months: sim.params.vesting.treasury.vesting_months vesting_months: sim.params.vesting.treasury.vesting_months
} }
@@ -61,95 +61,116 @@ pub fn (mut sim Simulation) run_simulation() ! {
// Run a scenario with given demands and AMM trades // Run a scenario with given demands and AMM trades
pub fn (mut sim Simulation) run_scenario(name string, demands []f64, amm_trades []f64) !&Scenario { pub fn (mut sim Simulation) run_scenario(name string, demands []f64, amm_trades []f64) !&Scenario {
mut scenario := &Scenario{ mut scenario := &Scenario{
name: name name: name
demands: demands demands: demands
amm_trades: amm_trades amm_trades: amm_trades
} }
// Initialize epochs // Initialize epochs
scenario.epochs = [ scenario.epochs = [
Epoch{index: 0, type_: .auction_only, start_month: 0, end_month: 3, auction_share: 1.0, amm_share: 0.0}, Epoch{
Epoch{index: 1, type_: .hybrid, start_month: 3, end_month: 6, auction_share: 0.5, amm_share: 0.5}, index: 0
Epoch{index: 2, type_: .amm_only, start_month: 6, end_month: 12, auction_share: 0.0, amm_share: 1.0} type_: .auction_only
start_month: 0
end_month: 3
auction_share: 1.0
amm_share: 0.0
},
Epoch{
index: 1
type_: .hybrid
start_month: 3
end_month: 6
auction_share: 0.5
amm_share: 0.5
},
Epoch{
index: 2
type_: .amm_only
start_month: 6
end_month: 12
auction_share: 0.0
amm_share: 1.0
},
] ]
// Track in spreadsheet // Track in spreadsheet
mut price_row := sim.price_sheet.row_new( mut price_row := sim.price_sheet.row_new(
name: 'scenario_${name}_price' name: 'scenario_${name}_price'
tags: 'scenario:${name} type:price' tags: 'scenario:${name} type:price'
descr: 'Token price evolution for ${name} scenario' descr: 'Token price evolution for ${name} scenario'
)! )!
mut treasury_row := sim.investment_sheet.row_new( mut treasury_row := sim.investment_sheet.row_new(
name: 'scenario_${name}_treasury' name: 'scenario_${name}_treasury'
tags: 'scenario:${name} type:treasury' tags: 'scenario:${name} type:treasury'
descr: 'Treasury raised for ${name} scenario' descr: 'Treasury raised for ${name} scenario'
aggregatetype: .sum aggregatetype: .sum
)! )!
// Calculate public tokens per epoch // Calculate public tokens per epoch
total_public := sim.params.distribution.total_supply * sim.params.distribution.public_pct total_public := sim.params.distribution.total_supply * sim.params.distribution.public_pct
tokens_per_epoch := total_public / 3.0 tokens_per_epoch := total_public / 3.0
mut last_auction_price := sim.get_last_investor_price() mut last_auction_price := sim.get_last_investor_price()
mut spillover := 0.0 mut spillover := 0.0
mut treasury_total := 0.0 mut treasury_total := 0.0
mut amm_pool := AMMPool{} mut amm_pool := AMMPool{}
for mut epoch in scenario.epochs { for mut epoch in scenario.epochs {
epoch.tokens_allocated = tokens_per_epoch + spillover epoch.tokens_allocated = tokens_per_epoch + spillover
epoch.auction_demand = demands[epoch.index] epoch.auction_demand = demands[epoch.index]
epoch.amm_net_trade = amm_trades[epoch.index] epoch.amm_net_trade = amm_trades[epoch.index]
// Run auction if applicable // Run auction if applicable
if epoch.auction_share > 0 { if epoch.auction_share > 0 {
auction_tokens := epoch.tokens_allocated * epoch.auction_share auction_tokens := epoch.tokens_allocated * epoch.auction_share
floor_price := sim.calculate_floor_price(epoch.index, last_auction_price) floor_price := sim.calculate_floor_price(epoch.index, last_auction_price)
auction_result := simulate_auction( auction_result := simulate_auction(
demand: epoch.auction_demand demand: epoch.auction_demand
min_price: floor_price min_price: floor_price
token_supply: auction_tokens token_supply: auction_tokens
)! )!
epoch.treasury_raised = auction_result.usd_raised epoch.treasury_raised = auction_result.usd_raised
treasury_total += auction_result.usd_raised treasury_total += auction_result.usd_raised
last_auction_price = auction_result.clearing_price last_auction_price = auction_result.clearing_price
epoch.final_price = auction_result.clearing_price epoch.final_price = auction_result.clearing_price
spillover = auction_tokens - auction_result.tokens_sold spillover = auction_tokens - auction_result.tokens_sold
// Record in spreadsheet // Record in spreadsheet
treasury_row.cells[epoch.start_month].val = auction_result.usd_raised treasury_row.cells[epoch.start_month].val = auction_result.usd_raised
} }
// Handle AMM if applicable // Handle AMM if applicable
if epoch.amm_share > 0 { if epoch.amm_share > 0 {
amm_tokens := epoch.tokens_allocated * epoch.amm_share + spillover amm_tokens := epoch.tokens_allocated * epoch.amm_share + spillover
spillover = 0 spillover = 0
// Seed AMM pool // Seed AMM pool
amm_usdc_to_add := sim.params.economics.amm_liquidity_depth_factor * epoch.treasury_raised amm_usdc_to_add := sim.params.economics.amm_liquidity_depth_factor * epoch.treasury_raised
amm_pool.add_liquidity(amm_tokens, amm_usdc_to_add) amm_pool.add_liquidity(amm_tokens, amm_usdc_to_add)
// Simulate trading // Simulate trading
if epoch.amm_net_trade != 0 { if epoch.amm_net_trade != 0 {
amm_pool.trade(epoch.amm_net_trade)! amm_pool.trade(epoch.amm_net_trade)!
} }
epoch.final_price = amm_pool.get_price() epoch.final_price = amm_pool.get_price()
} }
// Record price in spreadsheet // Record price in spreadsheet
for month in epoch.start_month .. epoch.end_month { for month in epoch.start_month .. epoch.end_month {
price_row.cells[month].val = epoch.final_price price_row.cells[month].val = epoch.final_price
} }
epoch.tokens_spillover = spillover epoch.tokens_spillover = spillover
} }
// Calculate final metrics // Calculate final metrics
scenario.final_metrics = sim.calculate_metrics(scenario, treasury_total)! scenario.final_metrics = sim.calculate_metrics(scenario, treasury_total)!
sim.scenarios[name] = scenario sim.scenarios[name] = scenario
return scenario return scenario
} }
@@ -157,17 +178,17 @@ pub fn (mut sim Simulation) run_scenario(name string, demands []f64, amm_trades
// Calculate metrics for scenario // Calculate metrics for scenario
fn (sim Simulation) calculate_metrics(scenario &Scenario, treasury_total f64) !ScenarioMetrics { fn (sim Simulation) calculate_metrics(scenario &Scenario, treasury_total f64) !ScenarioMetrics {
final_price := scenario.epochs.last().final_price final_price := scenario.epochs.last().final_price
mut investor_roi := map[string]f64{} mut investor_roi := map[string]f64{}
for round in sim.investor_rounds { for round in sim.investor_rounds {
investor_roi[round.name] = final_price / round.price investor_roi[round.name] = final_price / round.price
} }
return ScenarioMetrics{ return ScenarioMetrics{
treasury_total: treasury_total treasury_total: treasury_total
final_price: final_price final_price: final_price
investor_roi: investor_roi investor_roi: investor_roi
market_cap_final: final_price * sim.params.distribution.total_supply market_cap_final: final_price * sim.params.distribution.total_supply
circulating_supply_final: sim.calculate_circulating_supply(12) // at month 12 circulating_supply_final: sim.calculate_circulating_supply(12) // at month 12
} }
} }
@@ -184,7 +205,7 @@ fn (sim Simulation) get_last_investor_price() f64 {
fn (sim Simulation) calculate_floor_price(epoch_idx int, last_auction_price f64) f64 { fn (sim Simulation) calculate_floor_price(epoch_idx int, last_auction_price f64) f64 {
last_investor_price := sim.get_last_investor_price() last_investor_price := sim.get_last_investor_price()
if epoch_idx == 0 { if epoch_idx == 0 {
return last_investor_price * sim.params.economics.epoch1_floor_uplift return last_investor_price * sim.params.economics.epoch1_floor_uplift
} }
@@ -196,10 +217,10 @@ fn (sim Simulation) calculate_circulating_supply(month int) f64 {
investor_tokens := sim.params.distribution.investor_pct * sim.params.distribution.total_supply investor_tokens := sim.params.distribution.investor_pct * sim.params.distribution.total_supply
team_tokens := sim.params.distribution.team_pct * sim.params.distribution.total_supply team_tokens := sim.params.distribution.team_pct * sim.params.distribution.total_supply
treasury_tokens := sim.params.distribution.treasury_pct * sim.params.distribution.total_supply treasury_tokens := sim.params.distribution.treasury_pct * sim.params.distribution.total_supply
// For simplicity, assume all public tokens are circulating after TGE // For simplicity, assume all public tokens are circulating after TGE
public_tokens := sim.params.distribution.public_pct * sim.params.distribution.total_supply public_tokens := sim.params.distribution.public_pct * sim.params.distribution.total_supply
return investor_tokens + team_tokens + treasury_tokens + public_tokens return investor_tokens + team_tokens + treasury_tokens + public_tokens
} }
@@ -215,4 +236,4 @@ pub fn (sim Simulation) export_all(export_dir string) ! {
if sim.params.output.generate_report { if sim.params.output.generate_report {
sim.generate_report(export_dir)! sim.generate_report(export_dir)!
} }
} }

View File

@@ -5,8 +5,8 @@ import freeflowuniverse.herolib.biz.spreadsheet
// VestingSchedule defines cliff and vesting periods // VestingSchedule defines cliff and vesting periods
pub struct VestingSchedule { pub struct VestingSchedule {
pub mut: pub mut:
cliff_months int cliff_months int
vesting_months int vesting_months int
initial_unlock_pct f64 // Percentage of tokens unlocked at month 0 initial_unlock_pct f64 // Percentage of tokens unlocked at month 0
} }
@@ -61,4 +61,3 @@ pub mut:
epochs []Epoch epochs []Epoch
final_metrics ScenarioMetrics final_metrics ScenarioMetrics
} }

View File

@@ -6,56 +6,56 @@ import freeflowuniverse.herolib.biz.spreadsheet
pub fn (mut sim Simulation) create_vesting_schedules() ! { pub fn (mut sim Simulation) create_vesting_schedules() ! {
// Create vesting sheet // Create vesting sheet
mut vesting_sheet := spreadsheet.sheet_new( mut vesting_sheet := spreadsheet.sheet_new(
name: '${sim.name}_vesting' name: '${sim.name}_vesting'
nrcol: 60 // 60 months nrcol: 60 // 60 months
curr: sim.params.simulation.currency curr: sim.params.simulation.currency
)! )!
// Team vesting // Team vesting
team_tokens := sim.params.distribution.total_supply * sim.params.distribution.team_pct team_tokens := sim.params.distribution.total_supply * sim.params.distribution.team_pct
mut team_row := vesting_sheet.row_new( mut team_row := vesting_sheet.row_new(
name: 'team_vesting' name: 'team_vesting'
tags: 'category:team type:vesting' tags: 'category:team type:vesting'
descr: 'Team token vesting schedule' descr: 'Team token vesting schedule'
)! )!
sim.apply_vesting_schedule(mut team_row, team_tokens, sim.team_vesting)! sim.apply_vesting_schedule(mut team_row, team_tokens, sim.team_vesting)!
// Treasury vesting // Treasury vesting
treasury_tokens := sim.params.distribution.total_supply * sim.params.distribution.treasury_pct treasury_tokens := sim.params.distribution.total_supply * sim.params.distribution.treasury_pct
mut treasury_row := vesting_sheet.row_new( mut treasury_row := vesting_sheet.row_new(
name: 'treasury_vesting' name: 'treasury_vesting'
tags: 'category:treasury type:vesting' tags: 'category:treasury type:vesting'
descr: 'Treasury token vesting schedule' descr: 'Treasury token vesting schedule'
)! )!
sim.apply_vesting_schedule(mut treasury_row, treasury_tokens, sim.treasury_vesting)! sim.apply_vesting_schedule(mut treasury_row, treasury_tokens, sim.treasury_vesting)!
// Investor rounds vesting // Investor rounds vesting
for round in sim.investor_rounds { for round in sim.investor_rounds {
round_tokens := sim.params.distribution.total_supply * round.allocation_pct round_tokens := sim.params.distribution.total_supply * round.allocation_pct
mut round_row := vesting_sheet.row_new( mut round_row := vesting_sheet.row_new(
name: '${round.name}_vesting' name: '${round.name}_vesting'
tags: 'category:investor round:${round.name} type:vesting' tags: 'category:investor round:${round.name} type:vesting'
descr: '${round.name} investor vesting schedule' descr: '${round.name} investor vesting schedule'
)! )!
sim.apply_vesting_schedule(mut round_row, round_tokens, round.vesting)! sim.apply_vesting_schedule(mut round_row, round_tokens, round.vesting)!
} }
// Create total unlocked row // Create total unlocked row
mut total_row := vesting_sheet.group2row( mut total_row := vesting_sheet.group2row(
name: 'total_unlocked' name: 'total_unlocked'
include: ['type:vesting'] include: ['type:vesting']
tags: 'summary type:total_vesting' tags: 'summary type:total_vesting'
descr: 'Total tokens unlocked over time' descr: 'Total tokens unlocked over time'
aggregatetype: .sum aggregatetype: .sum
)! )!
sim.vesting_sheet = vesting_sheet sim.vesting_sheet = vesting_sheet
} }
fn (sim Simulation) apply_vesting_schedule(mut row spreadsheet.Row, total_tokens f64, schedule VestingSchedule) ! { fn (sim Simulation) apply_vesting_schedule(mut row spreadsheet.Row, total_tokens f64, schedule VestingSchedule) ! {
initial_unlocked_tokens := total_tokens * schedule.initial_unlock_pct initial_unlocked_tokens := total_tokens * schedule.initial_unlock_pct
remaining_tokens_to_vest := total_tokens - initial_unlocked_tokens remaining_tokens_to_vest := total_tokens - initial_unlocked_tokens
monthly_vesting_amount := if schedule.vesting_months > 0 { monthly_vesting_amount := if schedule.vesting_months > 0 {
remaining_tokens_to_vest / f64(schedule.vesting_months) remaining_tokens_to_vest / f64(schedule.vesting_months)
} else { } else {
@@ -72,10 +72,11 @@ fn (sim Simulation) apply_vesting_schedule(mut row spreadsheet.Row, total_tokens
} else if month < schedule.cliff_months + schedule.vesting_months { } else if month < schedule.cliff_months + schedule.vesting_months {
// During vesting period (after cliff) // During vesting period (after cliff)
months_after_cliff := month - schedule.cliff_months + 1 months_after_cliff := month - schedule.cliff_months + 1
row.cells[month].val = initial_unlocked_tokens + (monthly_vesting_amount * f64(months_after_cliff)) row.cells[month].val = initial_unlocked_tokens +
(monthly_vesting_amount * f64(months_after_cliff))
} else { } else {
// After vesting complete // After vesting complete
row.cells[month].val = total_tokens row.cells[month].val = total_tokens
} }
} }
} }

View File

@@ -30,47 +30,46 @@ pub mut:
hero_install bool hero_install bool
sshkey_name string @[required] sshkey_name string @[required]
reset bool // ask to do reset/rescue even if its already in that state reset bool // ask to do reset/rescue even if its already in that state
retry int = 3 retry int = 3
} }
pub fn (mut h HetznerManager) server_rescue(args_ ServerRescueArgs) !ServerInfoDetailed { pub fn (mut h HetznerManager) server_rescue(args_ ServerRescueArgs) !ServerInfoDetailed {
if args_.retry > 1{ if args_.retry > 1 {
for _ in 0 .. args_.retry-1 { for _ in 0 .. args_.retry - 1 {
return h.server_rescue_internal(args_) or {continue} return h.server_rescue_internal(args_) or { continue }
} }
} }
return h.server_rescue_internal(args_)! return h.server_rescue_internal(args_)!
} }
fn (mut h HetznerManager) server_rescue_internal(args_ ServerRescueArgs) !ServerInfoDetailed { fn (mut h HetznerManager) server_rescue_internal(args_ ServerRescueArgs) !ServerInfoDetailed {
mut args := args_ mut args := args_
mut serverinfo := h.server_info_get(id: args.id, name: args.name)! mut serverinfo := h.server_info_get(id: args.id, name: args.name)!
if serverinfo.rescue && ! args.reset { if serverinfo.rescue && !args.reset {
if osal.ssh_test(address: serverinfo.server_ip, port: 22)! == .ok { if osal.ssh_test(address: serverinfo.server_ip, port: 22)! == .ok {
console.print_debug('test server ${serverinfo.server_name} is in rescue mode?') console.print_debug('test server ${serverinfo.server_name} is in rescue mode?')
mut b := builder.new()! mut b := builder.new()!
mut n := b.node_new(ipaddr: serverinfo.server_ip)! mut n := b.node_new(ipaddr: serverinfo.server_ip)!
res:=n.exec(cmd:"ls /root/.oldroot/nfs/install/installimage",stdout:false) or { res := n.exec(cmd: 'ls /root/.oldroot/nfs/install/installimage', stdout: false) or {
"ERROR" 'ERROR'
} }
if res.contains("nfs/install/installimage"){ if res.contains('nfs/install/installimage') {
console.print_debug('server ${serverinfo.server_name} is in rescue mode') console.print_debug('server ${serverinfo.server_name} is in rescue mode')
return serverinfo return serverinfo
} }
} }
serverinfo.rescue = false serverinfo.rescue = false
} }
// only do it if its not in rescue yet // only do it if its not in rescue yet
if serverinfo.rescue == false || args.reset { if serverinfo.rescue == false || args.reset {
console.print_header('server ${serverinfo.server_name} goes into rescue mode') console.print_header('server ${serverinfo.server_name} goes into rescue mode')
mut keyfps := []string{} mut keyfps := []string{}
if args.sshkey_name != '' { if args.sshkey_name != '' {
keyfps << h.key_get(args.sshkey_name)!.fingerprint keyfps << h.key_get(args.sshkey_name)!.fingerprint
}else{ } else {
keyfps = h.keys_get()!.map(it.fingerprint) keyfps = h.keys_get()!.map(it.fingerprint)
} }
@@ -87,19 +86,24 @@ fn (mut h HetznerManager) server_rescue_internal(args_ ServerRescueArgs) !Server
// console.print_debug('hetzner rescue\n${rescue}') // console.print_debug('hetzner rescue\n${rescue}')
h.server_reset(id: args.id, name: args.name, wait: args.wait, msg:" to get up and running in rescue mode.")! h.server_reset(
id: args.id
name: args.name
wait: args.wait
msg: ' to get up and running in rescue mode.'
)!
os.execute_opt("ssh-keygen -R ${serverinfo.server_ip}")! os.execute_opt('ssh-keygen -R ${serverinfo.server_ip}')!
} }
if args.hero_install{ if args.hero_install {
args.wait = true args.wait = true
} }
if args.wait { if args.wait {
mut b := builder.new()! mut b := builder.new()!
mut n := b.node_new(ipaddr: serverinfo.server_ip)! mut n := b.node_new(ipaddr: serverinfo.server_ip)!
n.exec_silent("apt update && apt install -y mc redis")! n.exec_silent('apt update && apt install -y mc redis')!
if args.hero_install { if args.hero_install {
n.hero_install()! n.hero_install()!
} }
@@ -113,27 +117,30 @@ fn (mut h HetznerManager) server_rescue_internal(args_ ServerRescueArgs) !Server
pub fn (mut h HetznerManager) server_rescue_node(args ServerRescueArgs) !&builder.Node { pub fn (mut h HetznerManager) server_rescue_node(args ServerRescueArgs) !&builder.Node {
mut serverinfo := h.server_rescue(args)! mut serverinfo := h.server_rescue(args)!
mut b := builder.new()! mut b := builder.new()!
mut n := b.node_new(ipaddr: serverinfo.server_ip)! mut n := b.node_new(ipaddr: serverinfo.server_ip)!
return n return n
} }
pub struct ServerInstallArgs { pub struct ServerInstallArgs {
pub mut: pub mut:
id int id int
name string name string
wait bool = true wait bool = true
hero_install bool hero_install bool
hero_install_compile bool hero_install_compile bool
sshkey_name string @[required] sshkey_name string @[required]
raid bool raid bool
} }
pub fn (mut h HetznerManager) ubuntu_install(args ServerInstallArgs) !&builder.Node { pub fn (mut h HetznerManager) ubuntu_install(args ServerInstallArgs) !&builder.Node {
mut serverinfo := h.server_rescue(
mut serverinfo := h.server_rescue(id:args.id,name:args.name,wait:true,sshkey_name:args.sshkey_name)! id: args.id
name: args.name
wait: true
sshkey_name: args.sshkey_name
)!
mut b := builder.new()! mut b := builder.new()!
mut n := b.node_new(ipaddr: serverinfo.server_ip)! mut n := b.node_new(ipaddr: serverinfo.server_ip)!
@@ -142,33 +149,33 @@ pub fn (mut h HetznerManager) ubuntu_install(args ServerInstallArgs) !&builder.N
// n.file_write("/tmp/installconfig",installconfig)! // n.file_write("/tmp/installconfig",installconfig)!
// n.exec_interactive("installimage -a -c /tmp/installconfig")! // n.exec_interactive("installimage -a -c /tmp/installconfig")!
mut rstr:="" mut rstr := ''
if args.raid { if args.raid {
rstr="-r yes -l 1 " rstr = '-r yes -l 1 '
} }
n.exec(cmd:' n.exec(
cmd: '
set -ex set -ex
echo "go into install mode, try to install ubuntu 24.04" echo "go into install mode, try to install ubuntu 24.04"
/root/.oldroot/nfs/install/installimage -a -n kristof2 ${rstr} -i /root/.oldroot/nfs/images/Ubuntu-2404-noble-amd64-base.tar.gz -f yes -t yes -p swap:swap:4G,/boot:ext3:1024M,/:btrfs:all /root/.oldroot/nfs/install/installimage -a -n kristof2 ${rstr} -i /root/.oldroot/nfs/images/Ubuntu-2404-noble-amd64-base.tar.gz -f yes -t yes -p swap:swap:4G,/boot:ext3:1024M,/:btrfs:all
reboot')! reboot'
)!
os.execute_opt("ssh-keygen -R ${serverinfo.server_ip}")! os.execute_opt('ssh-keygen -R ${serverinfo.server_ip}')!
console.print_debug('server ${serverinfo.server_name} is installed in ubuntu now, should be restarting.') console.print_debug('server ${serverinfo.server_name} is installed in ubuntu now, should be restarting.')
osal.reboot_wait( osal.reboot_wait(
address: serverinfo.server_ip address: serverinfo.server_ip
timeout_down: 60 timeout_down: 60
timeout_up: 60 * 5 timeout_up: 60 * 5
)! )!
$dbg;
if args.hero_install { if args.hero_install {
n.hero_install(compile: args.hero_install_compile)! n.hero_install(compile: args.hero_install_compile)!
} }
return n return n
} }

View File

@@ -30,7 +30,7 @@ pub fn (mut h HetznerManager) server_reset(args ServerRebootArgs) !ResetInfo {
console.print_header('server ${serverinfo.server_name} goes for reset') console.print_header('server ${serverinfo.server_name} goes for reset')
mut serveractive := false mut serveractive := false
if osal.ping(address: serverinfo.server_ip)! == .ok { if osal.ping(address: serverinfo.server_ip)! {
serveractive = true serveractive = true
console.print_debug('server ${serverinfo.server_name} is active') console.print_debug('server ${serverinfo.server_name} is active')
} else { } else {
@@ -52,7 +52,7 @@ pub fn (mut h HetznerManager) server_reset(args ServerRebootArgs) !ResetInfo {
for { for {
console.print_debug('wait for server ${serverinfo.server_name} to go down.') console.print_debug('wait for server ${serverinfo.server_name} to go down.')
pingresult := osal.ping(address: serverinfo.server_ip)! pingresult := osal.ping(address: serverinfo.server_ip)!
if pingresult != .ok { if !pingresult {
console.print_debug('server ${serverinfo.server_name} is now down, now waitig for reboot.') console.print_debug('server ${serverinfo.server_name} is now down, now waitig for reboot.')
break break
} }

View File

@@ -50,7 +50,7 @@ pub fn (mut h HetznerManager) servers_list() ![]ServerInfo {
method: .get method: .get
prefix: 'server' prefix: 'server'
list_dict_key: 'server' list_dict_key: 'server'
debug: false debug: false
)! )!
} }

View File

@@ -8,9 +8,9 @@ import freeflowuniverse.herolib.installers.virt.podman as podman_installer
pub struct PodmanFactory { pub struct PodmanFactory {
pub mut: pub mut:
// sshkeys_allowed []string // all keys here have access over ssh into the machine, when ssh enabled // sshkeys_allowed []string // all keys here have access over ssh into the machine, when ssh enabled
images []Image images []Image
containers []Container containers []Container
buildpath string buildpath string
// cache bool = true // cache bool = true
// push bool // push bool
// platform []BuildPlatformType // used to build // platform []BuildPlatformType // used to build
@@ -18,7 +18,6 @@ pub mut:
prefix string prefix string
} }
@[params] @[params]
pub struct NewArgs { pub struct NewArgs {
pub mut: pub mut:
@@ -27,12 +26,10 @@ pub mut:
herocompile bool herocompile bool
} }
if args.install {
if args.install { mut podman_installer0 := podman_installer.get()!
mut podman_installer0 := podman_installer.get()! podman_installer0.install()!
podman_installer0.install()! }
}
fn (mut e PodmanFactory) init() ! { fn (mut e PodmanFactory) init() ! {
if e.buildpath == '' { if e.buildpath == '' {

View File

@@ -7,7 +7,6 @@ import freeflowuniverse.herolib.core.pathlib
import os import os
import json import json
pub fn (mut self BuildAHContainer) install_zinit() ! { pub fn (mut self BuildAHContainer) install_zinit() ! {
// https://github.com/threefoldtech/zinit // https://github.com/threefoldtech/zinit
self.hero_copy()! self.hero_copy()!
@@ -16,12 +15,11 @@ pub fn (mut self BuildAHContainer) install_zinit() ! {
self.set_entrypoint('/sbin/zinit init --container')! self.set_entrypoint('/sbin/zinit init --container')!
} }
pub fn (mut self BuildAHContainer) install_herodb() ! { pub fn (mut self BuildAHContainer) install_herodb() ! {
self.install_zinit()! self.install_zinit()!
// the hero database gets installed and put in zinit for automatic start // the hero database gets installed and put in zinit for automatic start
self.hero_play_execute('!!installer.herodb') self.hero_play_execute('!!installer.herodb')
//TODO: the hero_play needs to be implemented // TODO: the hero_play needs to be implemented
} }
// copies the hero from host into guest // copies the hero from host into guest
@@ -29,5 +27,5 @@ pub fn (mut self BuildAHContainer) install_mycelium() ! {
self.install_zinit()! self.install_zinit()!
// the mycelium database gets installed and put in zinit for automatic start // the mycelium database gets installed and put in zinit for automatic start
self.hero_play_execute('!!installer.mycelium') self.hero_play_execute('!!installer.mycelium')
//TODO: the hero_play needs to be implemented // TODO: the hero_play needs to be implemented
} }

View File

@@ -36,10 +36,9 @@ pub enum RunTime {
v v
} }
//should use builders underneith // should use builders underneith
pub fn (mut self BuildAHContainer) exec(cmd Command) !osal.Job { pub fn (mut self BuildAHContainer) exec(cmd Command) !osal.Job {
// make sure we have hero in the hostnode of self
//make sure we have hero in the hostnode of self
self.hero_copy()! self.hero_copy()!
mut rt := RunTime.bash mut rt := RunTime.bash
@@ -61,7 +60,7 @@ pub fn (mut self BuildAHContainer) exec(cmd Command) !osal.Job {
if cmd.runtime == .heroscript || cmd.runtime == .herocmd { if cmd.runtime == .heroscript || cmd.runtime == .herocmd {
self.hero_copy()! self.hero_copy()!
} }
mut j:=osal.exec( mut j := osal.exec(
name: cmd.name name: cmd.name
cmd: cmd_str cmd: cmd_str
description: cmd.description description: cmd.description

View File

@@ -11,23 +11,19 @@ pub fn (mut self BuildAHContainer) hero_cmd_execute(cmd string) ! {
// send a hero play command to the buildah container // send a hero play command to the buildah container
pub fn (mut self BuildAHContainer) hero_play_execute(cmd string) ! { pub fn (mut self BuildAHContainer) hero_play_execute(cmd string) ! {
self.hero_copy()! self.hero_copy()!
panic("implement") panic('implement')
} }
pub fn (mut self BuildAHContainer) hero_execute_script(cmd string) ! { pub fn (mut self BuildAHContainer) hero_execute_script(cmd string) ! {
self.hero_copy()! self.hero_copy()!
self.exec(cmd: cmd, runtime: .heroscript)! self.exec(cmd: cmd, runtime: .heroscript)!
} }
// copies the hero from host into guest // copies the hero from host into guest
pub fn (mut self BuildAHContainer) hero_copy() ! { pub fn (mut self BuildAHContainer) hero_copy() ! {
// TODO: check we are on linux, check also the platformtype arm or intel, if not right platform then build hero in container
//TODO: check we are on linux, check also the platformtype arm or intel, if not right platform then build hero in container panic('implement')
panic("implement")
// if !osal.cmd_exists('hero') { // if !osal.cmd_exists('hero') {
// herolib.hero_compile()! // herolib.hero_compile()!
@@ -36,9 +32,7 @@ pub fn (mut self BuildAHContainer) hero_copy() ! {
self.copy(heropath, '/usr/local/bin/hero')! self.copy(heropath, '/usr/local/bin/hero')!
} }
// get a container where we build hero and export hero from the container so we can use it for hero_copy // get a container where we build hero and export hero from the container so we can use it for hero_copy
pub fn (mut self BuildAHContainer) hero_build() ! { pub fn (mut self BuildAHContainer) hero_build() ! {
panic("implement") panic('implement')
} }

View File

@@ -63,8 +63,7 @@ pub fn builder_base(args GetArgs) !BuildAHContainer {
return builder return builder
} }
// TODO: all below are not good, need to use play cmd over hero remotely. see how we did it with core_installers
//TODO: all below are not good, need to use play cmd over hero remotely. see how we did it with core_installers
// // builder machine based on arch and install vlang // // builder machine based on arch and install vlang
// pub fn (mut e CEngine) builder_go_rust(args GetArgs) !BuildAHContainer { // pub fn (mut e CEngine) builder_go_rust(args GetArgs) !BuildAHContainer {