fixes for formatting

This commit is contained in:
2024-12-31 11:00:02 +01:00
parent 8400bfc4ee
commit 92335f8828
39 changed files with 699 additions and 752 deletions

View File

@@ -26,9 +26,9 @@ pub fn run() ! {
pub fn run_server(params RunParams) ! {
mut a := new()!
mut server := actor.new_server(
redis_url: 'localhost:6379'
redis_queue: a.name
openapi_spec: example_actor.openapi_specification
redis_url: 'localhost:6379'
redis_queue: a.name
openapi_spec: openapi_specification
)!
server.run(params)
}

View File

@@ -52,7 +52,7 @@ fn main() {
mut rpc := redis.rpc_get('procedure_queue')
mut actor := Actor{
rpc: rpc
rpc: rpc
data_store: DataStore{}
}
@@ -172,9 +172,9 @@ fn (mut store DataStore) list_pets(params ListPetParams) []Pet {
fn (mut store DataStore) create_pet(new_pet NewPet) Pet {
id := store.pets.keys().len + 1
pet := Pet{
id: id
id: id
name: new_pet.name
tag: new_pet.tag
tag: new_pet.tag
}
store.pets[id] = pet
return pet
@@ -211,10 +211,10 @@ fn (mut store DataStore) delete_order(id int) ! {
fn (mut store DataStore) create_user(new_user NewUser) User {
id := store.users.keys().len + 1
user := User{
id: id
id: id
username: new_user.username
email: new_user.email
phone: new_user.phone
email: new_user.email
phone: new_user.phone
}
store.users[id] = user
return user

View File

@@ -23,7 +23,7 @@ fn main() {
// Initialize the server
mut server := &Server{
specification: openapi.json_decode(spec_json)!
handler: Handler{
handler: Handler{
processor: Processor{
rpc: rpc
}
@@ -122,7 +122,7 @@ fn (mut handler Handler) handle(request Request) !Response {
if err is processor.ProcedureError {
return Response{
status: http.status_from_int(err.code()) // Map ProcedureError reason to HTTP status code
body: json.encode({
body: json.encode({
'error': err.msg()
})
}
@@ -133,6 +133,6 @@ fn (mut handler Handler) handle(request Request) !Response {
// Convert returned procedure response to OpenAPI response
return Response{
status: http.Status.ok // Assuming success if no error
body: procedure_response.result
body: procedure_response.result
}
}

View File

@@ -1,40 +1,45 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
struct DeploymentStateDB{
secret ... //to encrypt symmetric
//...
struct DeploymentStateDB {
secret string // to encrypt symmetric
data map[string]string
}
struct DeploymentState{
name ...
vms []VMDeployed
struct DeploymentState {
name string
vms []VMDeployed
zdbs []ZDBDeployed
...
}
pub fn (db DeploymentStateDB) set(deployment_name string, key string, val string)! {
//store e.g. \n separated list of all keys per deployment_name
//encrypt
pub fn (mut db DeploymentStateDB) set(deployment_name string, key string, val string)! {
// store e.g. \n separated list of all keys per deployment_name
// encrypt
db.data['${deployment_name}_${key}'] = val
}
pub fn (db DeploymentStateDB) get(deployment_name string, key string)!string {
return db.data['${deployment_name}_${key}'] or { return error('key not found') }
}
pub fn (db DeploymentStateDB) delete(deployment_name string, key string)! {
pub fn (mut db DeploymentStateDB) delete(deployment_name string, key string)! {
db.data.delete('${deployment_name}_${key}')
}
pub fn (db DeploymentStateDB) keys(deployment_name string)![]string {
mut keys := []string{}
for k, _ in db.data {
if k.starts_with('${deployment_name}_') {
keys << k.all_after('${deployment_name}_')
}
}
return keys
}
pub fn (db DeploymentStateDB) load(deployment_name string)!DeploymentState {
}
mut state := DeploymentState{
name: deployment_name
}
// Implementation would need to load VMs and ZDBs based on stored data
return state
}

View File

@@ -1,41 +1,35 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
struct VMSpecs{
struct VMSpecs {
deployment_name string
name string
nodeid string
pub_sshkeys []string
flist string //if any, if used then ostype not used
ostype OSType
name string
nodeid string
pub_sshkeys []string
flist string // if any, if used then ostype not used
ostype OSType
}
enum OSType{
enum OSType {
ubuntu_22_04
ubuntu_24_04
arch
alpine
}
struct VMDeployed{
name string
struct VMDeployed {
name string
nodeid string
//size ..
guid string
// size ..
guid string
yggdrasil_ip string
mycelium_ip string
mycelium_ip string
}
pub fn (vm VMDeployed) builder_node() builder.Node {
}
//only connect to yggdrasil and mycelium
// only connect to yggdrasil and mycelium
//
fn vm_deploy(args_ VMSpecs) VMDeployed{
deploymentstate_db.set(args.deployment_name,"vm_${args.name}",VMDeployed.json)
}
fn vm_deploy(args_ VMSpecs) VMDeployed {
deploymentstate_db.set(args.deployment_name, 'vm_${args.name}', VMDeployed.json)
}

View File

@@ -1,39 +1,40 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
struct NodeQuery{
location string //how to define location
struct NodeQuery {
location string // how to define location
capacity_available_hdd_gb int
capacity_available_ssd_gb int
capacity_available_mem_gb int
capacity_available_vcpu int //vcpu core's
capacity_free_hdd_gb int
capacity_free_ssd_gb int
capacity_free_mem_gb int
capacity_free_vcpu int //vcpu core's
uptime_min int = 70 //0..99
bw_min_mb_sec int = 0 //bandwith in mbit per second, min
capacity_available_mem_gb int
capacity_available_vcpu int // vcpu core's
capacity_free_hdd_gb int
capacity_free_ssd_gb int
capacity_free_mem_gb int
capacity_free_vcpu int // vcpu core's
uptime_min int = 70 // 0..99
bw_min_mb_sec int = 0 // bandwith in mbit per second, min
}
struct NodeInfo{
location string //how to define location
struct NodeInfo {
location string // how to define location
capacity_available_hdd_gb int
capacity_available_ssd_gb int
capacity_available_mem_gb int
capacity_available_vcpu int //vcpu core's
capacity_free_hdd_gb int
capacity_free_ssd_gb int
capacity_free_mem_gb int
capacity_free_vcpu int //vcpu core's
uptime_min int = 70 //0..99
bw_min_mb_sec int = 0 //bandwith in mbit per second, min
guid str
...
capacity_available_mem_gb int
capacity_available_vcpu int // vcpu core's
capacity_free_hdd_gb int
capacity_free_ssd_gb int
capacity_free_mem_gb int
capacity_free_vcpu int // vcpu core's
uptime_min int = 70 // 0..99
bw_min_mb_sec int = 0 // bandwith in mbit per second, min
guid string
status string
last_update i64 // unix timestamp
}
fn node_find(args_ NodeQuery) []NodeInfo{
}
fn node_find(args NodeQuery) []NodeInfo {
// Implementation would need to:
// 1. Query nodes based on the criteria in args
// 2. Filter nodes that match the requirements
// 3. Return array of matching NodeInfo
return []NodeInfo{}
}

View File

@@ -1,19 +1,10 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
struct WebGWArgs{
struct WebGWArgs {
deployment_name string
//...
}
//connect domain name, or exising to it
fn webgateway_rule_deploy(args_ WebGWArgs) []VMDeployed{
}
// connect domain name, or exising to it
fn webgateway_rule_deploy(args_ WebGWArgs) []VMDeployed {
}

View File

@@ -1,31 +1,48 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.redisclient { RedisClient }
struct ZDBSpecs{
struct ZDBSpecs {
deployment_name string
nodeid string
nodeid string
namespace string
secret string
}
struct ZDBDeployed {
nodeid string
namespace string
secret string
secret string
host string
port int
}
struct ZDBDeployed{
nodeid string
namespace string
secret string
// test zdb is answering
pub fn (zdb ZDBDeployed) ping() !bool {
mut client := zdb.redisclient()!
return client.ping()!
}
//test zdb is answering
pub fn (vm ZDBDeployed) ping() bool {
pub fn (zdb ZDBDeployed) redisclient() !RedisClient {
return RedisClient.new(
host: zdb.host
port: zdb.port
password: zdb.secret
db: 0
)!
}
pub fn (vm ZDBDeployed) redisclient() redisclient... {
// only connect to yggdrasil and mycelium
fn zdb_deploy(args ZDBSpecs) !ZDBDeployed {
// Implementation would need to:
// 1. Deploy ZDB on the specified node
// 2. Configure namespace and security
// 3. Return connection details
return ZDBDeployed{
nodeid: args.nodeid
namespace: args.namespace
secret: args.secret
host: '' // Would be set to actual host
port: 0 // Would be set to actual port
}
}
//only connect to yggdrasil and mycelium
//
fn zdb_deploy(args_ ZDBSpecs) ZDBDeployed{
}

View File

@@ -29,56 +29,52 @@ fn main() {
mut logger := &log.Log{}
logger.set_level(.debug)
// ##### Part 1 #####
nodes := tfgrid.search()! // gives error if no nodes
nodes=tfgrid.search(...)! //gives error if no nodes
// default is mycelium
mut vm := tfgrid.vm_new(
profilename: 'main'
name: 'myvm'
mem_mb: 4000
ssd_gb: 50
cpu_cores: 4
nodeid: nodes[0].id
flist: ''
)!
//default is mycelium
vm:=tfgrid.vm_new(profilename="main",name="myvm",mem_mb=4000,ssd_gb=50,cpu_cores=4,nodeid=nodes[0].id,flist='')
vm.shell()
vm.ipaddr
vm.webgw_add(...)
b:=vm.builder()!
vm:=tfgrid.vm_get(...)
vm.delete()!
//gives me a builder (can do ssh on it)
b.exec(..)
// These lines appear to be placeholders/examples, commenting them out
// vm.shell()!
// println(vm.ipaddr)
// vm.webgw_add()!
// b := vm.builder()!
// vm.delete()!
chain_net_enum := get_chain_network(chain_network)!
mut deployer := tfgrid.new_deployer(mnemonics, chain_net_enum, mut logger)!
mut workloads := []models.Workload{}
// node_id := get_node_id(chain_net_enum, memory, disk, cpu, public_ip)!
node_id := u32(146)
logger.info('deploying on node: ${node_id}')
network_name := 'net_${rand.string(5).to_lower()}' // autocreate a network
wg_port := deployer.assign_wg_port(node_id)!
mut network := models.Znet{
ip_range: '10.1.0.0/16' // auto-assign
subnet: '10.1.1.0/24' // auto-assign
ip_range: '10.1.0.0/16' // auto-assign
subnet: '10.1.1.0/24' // auto-assign
wireguard_private_key: 'GDU+cjKrHNJS9fodzjFDzNFl5su3kJXTZ3ipPgUjOUE=' // autocreate
wireguard_listen_port: wg_port
mycelium: models.Mycelium{
mycelium: models.Mycelium{
hex_key: rand.string(32).bytes().hex()
}
}
workloads << network.to_workload(name: network_name, description: 'test_network1')
mut public_ip_name := ''
if public_ip{
if public_ip {
public_ip_name = rand.string(5).to_lower()
workloads << models.PublicIP{
v4: true
@@ -86,52 +82,52 @@ fn main() {
}
zmachine := models.Zmachine{
flist: 'https://hub.grid.tf/petep.3bot/threefolddev-ubuntu24.04-latest.flist'
network: models.ZmachineNetwork{
flist: 'https://hub.grid.tf/petep.3bot/threefolddev-ubuntu24.04-latest.flist'
network: models.ZmachineNetwork{
interfaces: [
models.ZNetworkInterface{
network: network_name
ip: '10.1.1.3'
ip: '10.1.1.3'
},
]
public_ip: public_ip_name
planetary: true
mycelium: models.MyceliumIP{
network: network_name
public_ip: public_ip_name
planetary: true
mycelium: models.MyceliumIP{
network: network_name
hex_seed: rand.string(6).bytes().hex()
}
}
entrypoint: '/sbin/zinit init' // from user or default
entrypoint: '/sbin/zinit init' // from user or default
compute_capacity: models.ComputeCapacity{
cpu: u8(cpu)
cpu: u8(cpu)
memory: i64(memory) * 1024 * 1024 * 1024
}
size: u64(disk) * 1024 * 1024 * 1024
env: {
size: u64(disk) * 1024 * 1024 * 1024
env: {
'SSH_KEY': ssh_key
'CODE_SERVER_PASSWORD': code_server_pass
}
}
workloads << zmachine.to_workload(
name: 'vm_${rand.string(5).to_lower()}'
name: 'vm_${rand.string(5).to_lower()}'
description: 'zmachine_test'
)
signature_requirement := models.SignatureRequirement{
weight_required: 1
requests: [
requests: [
models.SignatureRequest{
twin_id: deployer.twin_id
weight: 1
weight: 1
},
]
}
mut deployment := models.new_deployment(
twin_id: deployer.twin_id
description: 'seahorse deployment'
workloads: workloads
twin_id: deployer.twin_id
description: 'seahorse deployment'
workloads: workloads
signature_requirement: signature_requirement
)
deployment.add_metadata('vm', 'SeahorseVM')
@@ -154,32 +150,31 @@ fn main() {
logger.info('Wait for 30 seconds to ensure the VM is fully up...')
time.sleep(30 * time.second) // Wait for 30 seconds
// ##### Part 2 #####
// Check if mycelium is installed on my local machine
if !is_mycelium_installed() {
logger.error('Mycelium is not installed. Please install Mycelium before proceeding.')
return
} else {
if !is_mycelium_installed() {
logger.error('Mycelium is not installed. Please install Mycelium before proceeding.')
return
} else {
logger.info('Mycelium is installed.')
}
// Check if mycelium is running on my local machine
if !is_mycelium_running() {
// logger.info('Warning: Mycelium is not running.')
if !is_mycelium_running() {
// logger.info('Warning: Mycelium is not running.')
// logger.info('Attempting to start Mycelium...')
// os.execute('sudo mycelium --peers tcp://188.40.132.242:9651 tcp://[2a01:4f8:212:fa6::2]:9651 quic://185.69.166.7:9651 tcp://[2a02:1802:5e:0:8c9e:7dff:fec9:f0d2]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651')
// // Wait a bit and check again
// time.sleep(5 * time.second)
// if !is_mycelium_running() {
// logger.error('Failed to start Mycelium. Please start it manually before proceeding.')
// return
// }
// os.execute('sudo mycelium --peers tcp://188.40.132.242:9651 tcp://[2a01:4f8:212:fa6::2]:9651 quic://185.69.166.7:9651 tcp://[2a02:1802:5e:0:8c9e:7dff:fec9:f0d2]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651')
// // Wait a bit and check again
// time.sleep(5 * time.second)
// if !is_mycelium_running() {
// logger.error('Failed to start Mycelium. Please start it manually before proceeding.')
// return
// }
logger.error('Mycelium is not running on local machine.')
return
} else {
} else {
logger.info('Mycelium is running on local machine.')
}
@@ -188,9 +183,9 @@ fn main() {
// Ping remote mycelium ip
if !ping_ip(remote_mycelium_ip, 5) {
logger.error('Failed to ping ${remote_mycelium_ip} after 5 attempts')
return
} else {
logger.error('Failed to ping ${remote_mycelium_ip} after 5 attempts')
return
} else {
logger.info('Successed to ping ${remote_mycelium_ip}')
}
@@ -198,18 +193,18 @@ fn main() {
if !try_ssh_connection(remote_mycelium_ip) {
logger.error('Unable to establish SSH connection. Please check your network and VM status.')
return
} else {
logger.info('Ready to proceed with further operations')
}
} else {
logger.info('Ready to proceed with further operations')
}
// Run installation script on remote VM
seahorse_install_script := 'seahorse_install.sh'
if !execute_remote_script(remote_mycelium_ip, seahorse_install_script) {
logger.error('Seahorse remote installation failed')
if !execute_remote_script(remote_mycelium_ip, seahorse_install_script) {
logger.error('Seahorse remote installation failed')
return
} else {
} else {
logger.info('Seahorse remote installation completed successfully')
}
}
}
fn get_machine_result(dl models.Deployment) !models.ZmachineResult {
@@ -235,7 +230,7 @@ fn get_chain_network(network string) !tfgrid.ChainNetwork {
return chain_net_enum
}
fn get_node_id(network tfgrid.ChainNetwork, memory int, disk int, cpu int, public_ip bool) !u32{
fn get_node_id(network tfgrid.ChainNetwork, memory int, disk int, cpu int, public_ip bool) !u32 {
gp_net := match network {
.dev { gridproxy.TFGridNet.dev }
.qa { gridproxy.TFGridNet.qa }
@@ -245,15 +240,15 @@ fn get_node_id(network tfgrid.ChainNetwork, memory int, disk int, cpu int, publi
mut gridproxy_client := gridproxy.get(gp_net, false)!
mut free_ips := u64(0)
if public_ip{
if public_ip {
free_ips = 1
}
mut node_it := gridproxy_client.get_nodes_has_resources(
free_mru_gb: u64(memory)
free_sru_gb: u64(disk)
free_cpu: u64(cpu)
free_ips: free_ips
free_cpu: u64(cpu)
free_ips: free_ips
)
nodes := node_it.next()
mut node_id := u32(0) // get from user or use gridproxy to get nodeid
@@ -266,41 +261,40 @@ fn get_node_id(network tfgrid.ChainNetwork, memory int, disk int, cpu int, publi
return node_id
}
// Function to check if Mycelium is installed
fn is_mycelium_installed() bool {
result := os.execute('mycelium --version')
return result.exit_code == 0
result := os.execute('mycelium --version')
return result.exit_code == 0
}
// Function to check if Mycelium is running locally
fn is_mycelium_running() bool {
mut logger := &log.Log{}
logger.set_level(.debug)
mut logger := &log.Log{}
logger.set_level(.debug)
// Use pgrep to find Mycelium processes
result := os.execute('pgrep -f "^mycelium\\s"')
if result.exit_code != 0 {
logger.debug('No Mycelium process found')
return false
}
pids := result.output.trim_space().split('\n')
logger.info('Mycelium process IDs: ${pids}')
// Use pgrep to find Mycelium processes
result := os.execute('pgrep -f "^mycelium\\s"')
return pids.len > 0
if result.exit_code != 0 {
logger.debug('No Mycelium process found')
return false
}
pids := result.output.trim_space().split('\n')
logger.info('Mycelium process IDs: ${pids}')
return pids.len > 0
}
fn ping_ip(ip string, attempts int) bool {
for i := 0; i < attempts; i++ {
result := os.execute('ping6 -c 1 -W 2 $ip')
if result.exit_code == 0 {
return true
}
time.sleep(1 * time.second)
}
return false
for i := 0; i < attempts; i++ {
result := os.execute('ping6 -c 1 -W 2 ${ip}')
if result.exit_code == 0 {
return true
}
time.sleep(1 * time.second)
}
return false
}
fn try_ssh_connection(mycelium_ip string) bool {
@@ -308,80 +302,80 @@ fn try_ssh_connection(mycelium_ip string) bool {
logger.set_level(.debug)
logger.info('Attempting SSH connection...')
// Use -6 flag to force IPv6
command := 'ssh -6 -o ConnectTimeout=10 -o StrictHostKeyChecking=no root@${mycelium_ip} true'
// Use -6 flag to force IPv6
command := 'ssh -6 -o ConnectTimeout=10 -o StrictHostKeyChecking=no root@${mycelium_ip} true'
logger.info('Executing SSH command: ${command}')
result := os.execute(command)
if result.exit_code == 0 {
logger.info('SSH connection successful')
return true
} else {
logger.info('SSH connection failed: ${result.output}')
return false
}
result := os.execute(command)
if result.exit_code == 0 {
logger.info('SSH connection successful')
return true
} else {
logger.info('SSH connection failed: ${result.output}')
return false
}
}
fn execute_remote_script(mycelium_ip string, script_name string) bool {
mut logger := &log.Log{}
logger.set_level(.info)
mut logger := &log.Log{}
logger.set_level(.info)
// Get the directory of the V script
v_script_dir := os.dir(os.executable())
logger.info('V script directory: ${v_script_dir}')
// Get the directory of the V script
v_script_dir := os.dir(os.executable())
logger.info('V script directory: ${v_script_dir}')
// Construct the full path to the install script
script_path := os.join_path(v_script_dir, script_name)
logger.info('Full script path: ${script_path}')
// Construct the full path to the install script
script_path := os.join_path(v_script_dir, script_name)
logger.info('Full script path: ${script_path}')
// Ensure the script exists
if !os.exists(script_path) {
logger.error('Script ${script_path} not found')
return false
}
// Ensure the script exists
if !os.exists(script_path) {
logger.error('Script ${script_path} not found')
return false
}
// Format the IPv6 address correctly for SSH and SCP commands
// Format the IPv6 address correctly for SSH and SCP commands
ssh_ip := mycelium_ip
scp_ip := if mycelium_ip.contains(':') { '[${mycelium_ip}]' } else { mycelium_ip }
scp_ip := if mycelium_ip.contains(':') { '[${mycelium_ip}]' } else { mycelium_ip }
remote_script_path := '/tmp/${script_name}'
// Construct the SSH and SCP commands
scp_command := 'scp -6 -o StrictHostKeyChecking=no ${script_path} root@${scp_ip}:${remote_script_path}'
ssh_command := 'ssh -6 -o ConnectTimeout=10 -o StrictHostKeyChecking=no -tt root@${ssh_ip}'
// Construct the SSH and SCP commands
scp_command := 'scp -6 -o StrictHostKeyChecking=no ${script_path} root@${scp_ip}:${remote_script_path}'
ssh_command := 'ssh -6 -o ConnectTimeout=10 -o StrictHostKeyChecking=no -tt root@${ssh_ip}'
// Copy the script to the remote machine
logger.info('Copying script to remote machine: ${scp_command}')
scp_result := os.execute(scp_command)
if scp_result.exit_code != 0 {
logger.error('Failed to copy script. Exit code: ${scp_result.exit_code}')
logger.error('SCP output: ${scp_result.output}')
return false
}
// Copy the script to the remote machine
logger.info('Copying script to remote machine: ${scp_command}')
scp_result := os.execute(scp_command)
if scp_result.exit_code != 0 {
logger.error('Failed to copy script. Exit code: ${scp_result.exit_code}')
logger.error('SCP output: ${scp_result.output}')
return false
}
// Verify if the script was copied successfully
check_file_command := '${ssh_command} "ls -l ${remote_script_path}"'
check_result := os.execute(check_file_command)
if check_result.exit_code != 0 {
logger.error('Failed to verify script on remote machine. Exit code: ${check_result.exit_code}')
return false
}
logger.info('Script found on remote machine: ${remote_script_path}')
// Verify if the script was copied successfully
check_file_command := '${ssh_command} "ls -l ${remote_script_path}"'
check_result := os.execute(check_file_command)
if check_result.exit_code != 0 {
logger.error('Failed to verify script on remote machine. Exit code: ${check_result.exit_code}')
return false
}
logger.info('Script found on remote machine: ${remote_script_path}')
// Now execute the script on the remote machine and stream the output
// Now execute the script on the remote machine and stream the output
log_file := '/tmp/output.log'
run_script_command := '${ssh_command} "bash -l ${remote_script_path} | tee ${log_file}"'
logger.info('Executing remote script: ${run_script_command}')
logger.info('Executing remote script: ${run_script_command}')
logger.info('Follow remote script execution: ${ssh_command} "tail -f ${log_file}"')
run_result := os.execute(run_script_command)
logger.info('See full output log file: ${ssh_command} "cat ${log_file}"')
if run_result.exit_code == 0 {
logger.info('Remote script execution completed successfully')
return true
} else {
logger.error('Remote script execution failed with exit code: ${run_result.exit_code}')
return false
}
if run_result.exit_code == 0 {
logger.info('Remote script execution completed successfully')
return true
} else {
logger.error('Remote script execution failed with exit code: ${run_result.exit_code}')
return false
}
}

View File

@@ -1,23 +1,37 @@
module main
import freeflowuniverse.herolib.console
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.ui.uimodel { DropDownArgs, QuestionArgs }
fn do() ! {
mut c:=console.new()
r:=c.ask_question(question:"my question")!
mut c := console.new()
r := c.ask_question(QuestionArgs{
question: 'my question'
})!
println(r)
r2:=c.ask_dropdown_multiple(question:"my dropdown",items:["a","b","c"])!
r2 := c.ask_dropdown_multiple(DropDownArgs{
description: 'my dropdown'
items: ['a', 'b', 'c']
})!
println(r2)
r3:=c.ask_dropdown_multiple(question:"my dropdown",items:["a","b","c"],default:["a","b"],clear:true)!
r3 := c.ask_dropdown_multiple(DropDownArgs{
description: 'my dropdown'
items: ['a', 'b', 'c']
default: ['a', 'b']
clear: true
})!
println(r3)
r3:=c.ask_dropdown(question:"my dropdown",items:["a","b","c"],default:["c"],clear:true)!
println(r3)
r4 := c.ask_dropdown(DropDownArgs{
description: 'my dropdown'
items: ['a', 'b', 'c']
default: ['c']
clear: true
})!
println(r4)
}
fn main() {

View File

@@ -15,7 +15,7 @@ pub fn heroplay(mut plbook playbook.PlayBook) ! {
instance := p.get_default('instance', 'default')!
// cfg.keyname = p.get('keyname')!
mut cl := get(instance,
openaikey: p.get('openaikey')!
openaikey: p.get('openaikey')!
description: p.get_default('description', '')!
)!
cl.config_save()!

View File

@@ -31,8 +31,8 @@ fn obj_init(obj_ OpenAI) !OpenAI {
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
mut c := client.conn or {
mut c2 := httpconnection.new(
name: 'openrouterclient_${client.name}'
url: 'https://openrouter.ai/api/v1/chat/completions'
name: 'openrouterclient_${client.name}'
url: 'https://openrouter.ai/api/v1/chat/completions'
cache: false
retry: 0
)!

View File

@@ -74,24 +74,24 @@ fn (mut f OpenAI) create_audio_request(args AudioArgs, endpoint string) !AudioRe
file_content := os.read_file(args.filepath)!
ext := os.file_ext(args.filepath)
mut file_mime_type := ''
if ext in openai.audio_mime_types {
file_mime_type = openai.audio_mime_types[ext]
if ext in audio_mime_types {
file_mime_type = audio_mime_types[ext]
} else {
return error('file extenion not supported')
}
file_data := http.FileData{
filename: os.base(args.filepath)
filename: os.base(args.filepath)
content_type: file_mime_type
data: file_content
data: file_content
}
form := http.PostMultipartFormConfig{
files: {
'file': [file_data]
}
form: {
'model': openai.audio_model
form: {
'model': audio_model
'prompt': args.prompt
'response_format': audio_resp_type_str(args.response_format)
'temperature': args.temperature.str()
@@ -115,11 +115,11 @@ fn (mut f OpenAI) create_audio_request(args AudioArgs, endpoint string) !AudioRe
pub struct CreateSpeechArgs {
pub:
model ModelType = .tts_1
input string @[required]
input string @[required]
voice Voice = .alloy
response_format AudioFormat = .mp3
speed f32 = 1.0
output_path string @[required]
speed f32 = 1.0
output_path string @[required]
}
pub struct CreateSpeechRequest {
@@ -135,11 +135,11 @@ pub fn (mut f OpenAI) create_speech(args CreateSpeechArgs) ! {
mut output_file := os.open_file(args.output_path, 'w+')!
req := CreateSpeechRequest{
model: modelname_str(args.model)
input: args.input
voice: voice_str(args.voice)
model: modelname_str(args.model)
input: args.input
voice: voice_str(args.voice)
response_format: audio_format_str(args.response_format)
speed: args.speed
speed: args.speed
}
data := json.encode(req)

View File

@@ -13,7 +13,7 @@ fn test_chat_completion() {
res := client.chat_completion(.gpt_4o_2024_08_06, Messages{
messages: [
Message{
role: .user
role: .user
content: 'Say these words exactly as i write them with no punctuation: AI is getting out of hand'
},
]
@@ -50,7 +50,7 @@ fn test_files() {
mut client := get()!
uploaded_file := client.upload_file(
filepath: '${os.dir(@FILE) + '/testdata/testfile.txt'}'
purpose: .assistants
purpose: .assistants
)!
assert uploaded_file.filename == 'testfile.txt'
@@ -61,7 +61,7 @@ fn test_files() {
uploaded_file2 := client.upload_file(
filepath: '${os.dir(@FILE) + '/testdata/testfile2.txt'}'
purpose: .assistants
purpose: .assistants
)!
assert uploaded_file2.filename == 'testfile2.txt'
@@ -97,7 +97,7 @@ fn test_audio() {
// create speech
client.create_speech(
input: 'the quick brown fox jumps over the lazy dog'
input: 'the quick brown fox jumps over the lazy dog'
output_path: '/tmp/output.mp3'
)!

View File

@@ -57,7 +57,7 @@ pub fn (mut f OpenAI) chat_completion(model_type ModelType, msgs Messages) !Chat
}
for msg in msgs.messages {
mr := MessageRaw{
role: roletype_str(msg.role)
role: roletype_str(msg.role)
content: msg.content
}
m.messages << mr

View File

@@ -46,7 +46,7 @@ pub fn (mut f OpenAI) create_embeddings(args EmbeddingCreateArgs) !EmbeddingResp
req := EmbeddingCreateRequest{
input: args.input
model: embedding_model_str(args.model)
user: args.user
user: args.user
}
data := json.encode(req)

View File

@@ -48,16 +48,16 @@ pub fn (mut f OpenAI) upload_file(args FileUploadArgs) !File {
file_content := os.read_file(args.filepath)!
file_data := http.FileData{
filename: os.base(args.filepath)
data: file_content
content_type: openai.jsonl_mime_type
filename: os.base(args.filepath)
data: file_content
content_type: jsonl_mime_type
}
form := http.PostMultipartFormConfig{
files: {
'file': [file_data]
}
form: {
form: {
'purpose': file_purpose_str(args.purpose)
}
}

View File

@@ -99,11 +99,11 @@ pub fn (mut f OpenAI) create_image(args ImageCreateArgs) !Images {
image_size := image_size_str(args.size)
response_format := image_resp_type_str(args.format)
request := ImageRequest{
prompt: args.prompt
n: args.num_images
size: image_size
prompt: args.prompt
n: args.num_images
size: image_size
response_format: response_format
user: args.user
user: args.user
}
data := json.encode(request)
mut conn := f.connection()!
@@ -118,17 +118,17 @@ pub fn (mut f OpenAI) create_image(args ImageCreateArgs) !Images {
pub fn (mut f OpenAI) create_edit_image(args ImageEditArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: openai.image_mine_type
data: image_content
filename: os.base(args.image_path)
content_type: image_mine_type
data: image_content
}
mut mask_file := []http.FileData{}
if args.mask_path != '' {
mask_content := os.read_file(args.mask_path)!
mask_file << http.FileData{
filename: os.base(args.mask_path)
content_type: openai.image_mine_type
data: mask_content
filename: os.base(args.mask_path)
content_type: image_mine_type
data: mask_content
}
}
@@ -137,7 +137,7 @@ pub fn (mut f OpenAI) create_edit_image(args ImageEditArgs) !Images {
'image': [image_file]
'mask': mask_file
}
form: {
form: {
'prompt': args.prompt
'n': args.num_images.str()
'response_format': image_resp_type_str(args.format)
@@ -163,16 +163,16 @@ pub fn (mut f OpenAI) create_edit_image(args ImageEditArgs) !Images {
pub fn (mut f OpenAI) create_variation_image(args ImageVariationArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: openai.image_mine_type
data: image_content
filename: os.base(args.image_path)
content_type: image_mine_type
data: image_content
}
form := http.PostMultipartFormConfig{
files: {
'image': [image_file]
}
form: {
form: {
'n': args.num_images.str()
'response_format': image_resp_type_str(args.format)
'size': image_size_str(args.size)

View File

@@ -31,7 +31,7 @@ pub mut:
fn cfg_play(p paramsparser.Params) ! {
// THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := OpenAI{
name: p.get_default('name', 'default')!
name: p.get_default('name', 'default')!
api_key: p.get('api_key')!
}
set(mycfg)!
@@ -46,8 +46,8 @@ fn obj_init(obj_ OpenAI) !OpenAI {
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
mut c := client.conn or {
mut c2 := httpconnection.new(
name: 'openaiconnection_${client.name}'
url: 'https://api.openai.com/v1'
name: 'openaiconnection_${client.name}'
url: 'https://api.openai.com/v1'
cache: false
)!
c2

View File

@@ -20,7 +20,7 @@ mut:
params_ ?&paramsparser.Params
dbcollection_ ?&dbfs.DBCollection @[skip; str: skip]
redis_ ?&redisclient.Redis @[skip; str: skip]
path_ ?pathlib.Path
path_ ?pathlib.Path
pub mut:
// snippets map[string]string
config ContextConfig
@@ -187,9 +187,9 @@ pub fn (mut self Context) secret_set(secret_ string) ! {
}
pub fn (mut self Context) path() !pathlib.Path {
return self.path_ or {
return self.path_ or {
path2 := '${os.home_dir()}/hero/context/${self.config.name}'
mut path := pathlib.get_dir(path: path2,create: false)!
mut path := pathlib.get_dir(path: path2, create: false)!
path
}
}

View File

@@ -13,8 +13,8 @@ import freeflowuniverse.herolib.core.pathlib
@[heap]
pub struct Session {
mut:
path_ ?pathlib.Path
logger_ ?logger.Logger
path_ ?pathlib.Path
logger_ ?logger.Logger
pub mut:
name string // unique id for session (session id), can be more than one per context
interactive bool = true
@@ -24,7 +24,6 @@ pub mut:
context &Context @[skip; str: skip]
config SessionConfig
env map[string]string
}
///////// LOAD & SAVE
@@ -94,9 +93,9 @@ pub fn (self Session) guid() string {
}
pub fn (mut self Session) path() !pathlib.Path {
return self.path_ or {
return self.path_ or {
path2 := '${self.context.path()!.path}/${self.name}'
mut path := pathlib.get_dir(path: path2,create: true)!
mut path := pathlib.get_dir(path: path2, create: true)!
path
}
}

View File

@@ -3,8 +3,8 @@ module base
import freeflowuniverse.herolib.core.logger
pub fn (mut session Session) logger() !logger.Logger {
return session.logger_ or {
mut l2 := logger.new("${session.path()!.path}/logs")!
return session.logger_ or {
mut l2 := logger.new('${session.path()!.path}/logs')!
l2
}
}

View File

@@ -1,12 +1,11 @@
module logger
import freeflowuniverse.herolib.core.pathlib
pub fn new(path string)! Logger {
mut p := pathlib.get_dir(path:path,create:true)!
pub fn new(path string) !Logger {
mut p := pathlib.get_dir(path: path, create: true)!
return Logger{
path: p
path: p
lastlog_time: 0
}
}

View File

@@ -1,46 +1,44 @@
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct LogItemArgs {
pub mut:
timestamp ?ourtime.OurTime
cat string
log string
logtype LogType
timestamp ?ourtime.OurTime
cat string
log string
logtype LogType
}
pub fn (mut l Logger) log(args_ LogItemArgs)! {
pub fn (mut l Logger) log(args_ LogItemArgs) ! {
mut args := args_
t := args.timestamp or {
t2:=ourtime.now()
t := args.timestamp or {
t2 := ourtime.now()
t2
}
}
// Format category (max 10 chars, ascii only)
args.cat = texttools.name_fix(args.cat)
if args.cat.len > 10 {
return error('category cannot be longer than 10 chars')
}
args.cat = texttools.expand(args.cat,10," ")
args.cat = texttools.expand(args.cat, 10, ' ')
args.log = texttools.dedent(args.log).trim_space()
mut logfile_path:="${l.path.path}/${t.dayhour()}.log"
mut logfile_path := '${l.path.path}/${t.dayhour()}.log'
// Create log file if it doesn't exist
if !os.exists(logfile_path) {
os.write_file(logfile_path, '')!
l.lastlog_time = 0 //make sure we put time again
l.lastlog_time = 0 // make sure we put time again
}
mut f:= os.open_append(logfile_path)!
mut f := os.open_append(logfile_path)!
mut content := ''
@@ -53,7 +51,7 @@ pub fn (mut l Logger) log(args_ LogItemArgs)! {
// Format log lines
error_prefix := if args.logtype == .error { 'E' } else { ' ' }
lines := args.log.split('\n')
for i, line in lines {
if i == 0 {
content += '${error_prefix} ${args.cat} - ${line}\n'

View File

@@ -15,64 +15,60 @@ fn test_logger() {
// Test stdout logging
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp:ourtime.new('2022-12-05 20:14:35')!
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp: ourtime.new('2022-12-05 20:14:35')!
})!
// Test error logging
logger.log(LogItemArgs{
cat: 'error-test'
log: 'This is an error\nWith details'
logtype: .error
cat: 'error-test'
log: 'This is an error\nWith details'
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:35')!
})!
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
cat: 'error-test'
log: '
This is an error
With details
'
logtype: .error
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
cat: 'error-test'
log: '
aaa
bbb
'
logtype: .error
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
cat: 'error-test'
log: '
aaa2
bbb2
'
logtype: .error
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
// Verify log directory exists
assert os.exists('/tmp/testlogs'), 'Log directory should exist'
@@ -81,17 +77,16 @@ fn test_logger() {
assert files.len == 2
mut file := pathlib.get_file(
path: '/tmp/testlogs/${files[0]}'
path: '/tmp/testlogs/${files[0]}'
create: false
)!
)!
println('/tmp/testlogs/${files[0]}')
content:=file.read()!.trim_space()
content := file.read()!.trim_space()
items := logger.search()!
assert items.len == 6 //still wrong: TODO
assert items.len == 6 // still wrong: TODO
}
fn testsuite_end() {

View File

@@ -1,28 +1,24 @@
module logger
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.pathlib
@[heap]
pub struct Logger {
pub mut:
path pathlib.Path
lastlog_time i64 //to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster)
path pathlib.Path
lastlog_time i64 // to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster)
}
pub struct LogItem {
pub mut:
timestamp ourtime.OurTime
cat string
log string
logtype LogType
timestamp ourtime.OurTime
cat string
log string
logtype LogType
}
pub enum LogType {
stdout
error
}

View File

@@ -1,24 +1,21 @@
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct SearchArgs {
pub mut:
timestamp_from ?ourtime.OurTime
timestamp_to ?ourtime.OurTime
cat string //can be empty
log string //any content in here will be looked for
logtype LogType
maxitems int = 10000
timestamp_from ?ourtime.OurTime
timestamp_to ?ourtime.OurTime
cat string // can be empty
log string // any content in here will be looked for
logtype LogType
maxitems int = 10000
}
pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
pub fn (mut l Logger) search(args_ SearchArgs) ![]LogItem {
mut args := args_
// Format category (max 10 chars, ascii only)
@@ -48,15 +45,14 @@ pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
if !file.ends_with('.log') {
continue
}
// Parse dayhour from filename
dayhour := file[..file.len-4] // remove .log
dayhour := file[..file.len - 4] // remove .log
file_time := ourtime.new(dayhour)!
mut current_time:=ourtime.OurTime{}
mut current_time := ourtime.OurTime{}
mut current_item := LogItem{}
mut collecting := false
// Skip if file is outside time range
if file_time.unix() < from_time || file_time.unix() > to_time {
continue
@@ -65,23 +61,22 @@ pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
// Read and parse log file
content := os.read_file('${l.path.path}/${file}')!
lines := content.split('\n')
for line in lines {
for line in lines {
if result.len >= args.maxitems {
return result
}
line_trim := line.trim_space()
if line_trim == '' {
continue
}
// Check if this is a timestamp line
if !(line.starts_with(" ") || line.starts_with("E")){
if !(line.starts_with(' ') || line.starts_with('E')) {
current_time = ourtime.new(line_trim)!
if collecting {
process(mut result,current_item,current_time, args , from_time, to_time)!
process(mut result, current_item, current_time, args, from_time, to_time)!
}
collecting = false
continue
@@ -93,9 +88,9 @@ pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
// Start new item
current_item = LogItem{
timestamp: current_time
cat: line_trim[2..12].trim_space()
log: line_trim[15..].trim_space()
logtype: if is_error { .error } else { .stdout }
cat: line_trim[2..12].trim_space()
log: line_trim[15..].trim_space()
logtype: if is_error { .error } else { .stdout }
}
collecting = true
} else {
@@ -106,23 +101,22 @@ pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
// Add last item if collecting
if collecting {
process(mut result,current_item,current_time, args , from_time, to_time)!
process(mut result, current_item, current_time, args, from_time, to_time)!
}
}
return result
}
fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! {
fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! {
// Add previous item if it matches filters
log_epoch:= current_item.timestamp.unix()
log_epoch := current_item.timestamp.unix()
if log_epoch < from_time || log_epoch > to_time {
return
}
if (args.cat == '' || current_item.cat.trim_space() == args.cat) &&
(args.log == '' || current_item.log.contains(args.log)) &&
(args.logtype == current_item.logtype) {
if (args.cat == '' || current_item.cat.trim_space() == args.cat)
&& (args.log == '' || current_item.log.contains(args.log))
&& args.logtype == current_item.logtype {
result << current_item
}
}
}

View File

@@ -94,10 +94,9 @@ pub fn (ot OurTime) day() string {
// returns a date-time string in "YYYY-MM-DD HH" format (24h).
pub fn (ot OurTime) dayhour() string {
return ot.time().format().all_before_last(":")
return ot.time().format().all_before_last(':')
}
// returns as epoch (seconds)
pub fn (ot OurTime) int() int {
return int(ot.time().unix())

View File

@@ -145,8 +145,8 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 {
return error("unrecognized time format, time must either be YYYY/MM/DD or DD/MM/YYYY, or : in stead of /. Input was:'${timestr_}'")
}
if timepart.trim_space() == ""{
timepart='00:00:00'
if timepart.trim_space() == '' {
timepart = '00:00:00'
}
timparts := timepart.split(':')
if timparts.len > 3 {
@@ -165,6 +165,6 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 {
return error("could not parse date/time string '${full_string}': ${err}")
}
//println(" ${timparts} ${time_struct}")
// println(" ${timparts} ${time_struct}")
return time_struct.unix()
}

View File

@@ -89,19 +89,18 @@ fn test_input_variations() {
// check that standard formats can be inputted
fn test_absolute_time() {
input_strings := {
'2022-12-5': 1670198400
' 2022-12-05 ': 1670198400
'2022-12-5 1': 1670198400 + 3600
'2022-12-5 20': 1670198400 + 3600 * 20
'2022-12-5 20:14': 1670198400 + 3600 * 20 + 14 * 60
'2022-12-5 20:14:35': 1670198400 + 3600 * 20 + 14 * 60 + 35
'2022-12-5': 1670198400
' 2022-12-05 ': 1670198400
'2022-12-5 1': 1670198400 + 3600
'2022-12-5 20': 1670198400 + 3600 * 20
'2022-12-5 20:14': 1670198400 + 3600 * 20 + 14 * 60
'2022-12-5 20:14:35': 1670198400 + 3600 * 20 + 14 * 60 + 35
}
for key, value in input_strings {
println(" ===== ${key} ${value}")
thetime := new(key) or { panic('cannot get ourtime for ${key}.\n$err') }
println(' ===== ${key} ${value}')
thetime := new(key) or { panic('cannot get ourtime for ${key}.\n${err}') }
assert value == get_unix_from_absolute(key)!
assert thetime.unix() == value, 'expiration was incorrect for ${key}'
}
a := get_unix_from_absolute('2022-12-5')!
@@ -111,12 +110,11 @@ fn test_absolute_time() {
d := get_unix_from_absolute('2022-12-5 01:00')!
e := get_unix_from_absolute('2022-12-5 01:1')!
assert a==a2
assert b==a+3600
assert b==c
assert b==d
assert e==d+60
assert a == a2
assert b == a + 3600
assert b == c
assert b == d
assert e == d + 60
}
fn test_from_epoch() {
@@ -138,4 +136,3 @@ fn test_parse_date() {
assert test_value.unix() == value
}
}

View File

@@ -45,7 +45,7 @@ pub fn example_configure(args_ InstallArgs) ! {
exampledbfile := $tmpl('templates/db.example.org')
mut path_testzone := pathlib.get_file(
path: '${args_.dnszones_path}/db.example.org'
path: '${args_.dnszones_path}/db.example.org'
create: true
)!
path_testzone.template_write(exampledbfile, true)!

View File

@@ -57,7 +57,7 @@ pub fn install(args_ InstallArgs) ! {
}
mut dest := osal.download(
url: url
url: url
minsize_kb: 13000
expand_dir: '/tmp/coredns'
)!
@@ -65,7 +65,7 @@ pub fn install(args_ InstallArgs) ! {
mut binpath := dest.file_get('coredns')!
osal.cmd_add(
cmdname: 'coredns'
source: binpath.path
source: binpath.path
)!
}

View File

@@ -32,17 +32,17 @@ pub fn play(mut plbook playbook.PlayBook) ! {
example := p.get_default_false('example')
install(
reset: reset
start: start
stop: stop
restart: restart
homedir: homedir
config_path: config_path
config_url: config_url
reset: reset
start: start
stop: stop
restart: restart
homedir: homedir
config_path: config_path
config_url: config_url
dnszones_path: dnszones_path
dnszones_url: dnszones_url
plugins: plugins
example: example
dnszones_url: dnszones_url
plugins: plugins
example: example
)!
}
}

View File

@@ -24,16 +24,16 @@ pub fn install() ! {
url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz'
console.print_debug(' download ${url}')
mut dest := osal.download(
url: url
minsize_kb: 40000
reset: true
url: url
minsize_kb: 40000
reset: true
expand_file: '/tmp/download/gitea'
)!
binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)!
osal.cmd_add(
cmdname: 'gitea'
source: binpath.path
source: binpath.path
)!
osal.done_set('gitea_install', 'OK')!
@@ -59,16 +59,16 @@ pub fn start() ! {
url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz'
console.print_debug(' download ${url}')
mut dest := osal.download(
url: url
minsize_kb: 40000
reset: true
url: url
minsize_kb: 40000
reset: true
expand_file: '/tmp/download/gitea'
)!
binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)!
osal.cmd_add(
cmdname: 'gitea'
source: binpath.path
source: binpath.path
)!
osal.done_set('gitea_install', 'OK')!

View File

@@ -95,8 +95,8 @@ pub fn get(name_ string) !Server {
args := json.decode(Config, data)!
mut server := Server{
name: name
config: args
name: name
config: args
path_config: pathlib.get_dir(path: '${args.path}/cfg', create: true)!
}
@@ -162,7 +162,7 @@ pub fn (mut server Server) start() ! {
processname := 'gitea_${server.name}'
mut p := z.process_new(
name: processname
cmd: '
cmd: '
cd /tmp
sudo -u git bash -c \'gitea web --config ${config_path.path} --verbose\'
'

View File

@@ -4,107 +4,94 @@ import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import net.http
import json
import os
// checks if a certain version or above is installed
fn installed() !bool {
res := os.execute('${osal.profile_path_source_and()} livekit-server -v')
if res.exit_code != 0 {
return false
}
r := res.output.split_into_lines().filter(it.contains("version"))
if r.len != 1 {
return error("couldn't parse livekit version.\n${res.output}")
}
installedversion:=r[0].all_after_first('version')
if texttools.version(version) != texttools.version(installedversion) {
return false
}
return true
res := os.execute('${osal.profile_path_source_and()} livekit-server -v')
if res.exit_code != 0 {
return false
}
r := res.output.split_into_lines().filter(it.contains('version'))
if r.len != 1 {
return error("couldn't parse livekit version.\n${res.output}")
}
installedversion := r[0].all_after_first('version')
if texttools.version(version) != texttools.version(installedversion) {
return false
}
return true
}
fn install() ! {
console.print_header('install livekit')
mut installer := get()!
osal.execute_silent("
console.print_header('install livekit')
mut installer := get()!
osal.execute_silent('
curl -s https://livekit.io/install.sh | bash
")!
')!
}
fn startupcmd() ![]zinit.ZProcessNewArgs {
mut res := []zinit.ZProcessNewArgs{}
mut installer := get()!
res << zinit.ZProcessNewArgs
{
name: 'livekit'
cmd: 'livekit-server --config ${installer.configpath} --bind 0.0.0.0'
}
fn startupcmd () ![]zinit.ZProcessNewArgs{
mut res := []zinit.ZProcessNewArgs{}
mut installer := get()!
res << zinit.ZProcessNewArgs{
name: 'livekit'
cmd: 'livekit-server --config ${installer.configpath} --bind 0.0.0.0'
}
return res
return res
}
fn running() !bool {
mut installer := get()!
mut installer := get()!
myport:=installer.nr*2+7880
endpoint := '${http://localhost:${myport}/api/v1/health'
response := http.get(endpoint) or {
println('Error connecting to LiveKit server: $err')
return false
}
if response.status_code != 200 {
println('LiveKit server returned non-200 status code: ${response.status_code}')
return false
}
health_info := json.decode(map[string]string, response.body) or {
println('Error decoding LiveKit server response: $err')
return false
}
if health_info['status'] != 'ok' {
println('LiveKit server health check failed: ${health_info["status"]}')
return false
}
return true
myport := installer.nr * 2 + 7880
endpoint := 'http://localhost:${myport}/api/v1/health'
response := http.get(endpoint) or {
console.print_stderr('Error connecting to LiveKit server: ${err}')
return false
}
if response.status_code != 200 {
console.print_stderr('LiveKit server returned non-200 status code: ${response.status_code}')
return false
}
health_info := json.decode(map[string]string, response.body) or {
console.print_stderr('Error decoding LiveKit server response: ${err}')
return false
}
if health_info['status'] != 'ok' {
console.print_stderr('LiveKit server health check failed: ${health_info['status']}')
return false
}
return true
}
fn start_pre()!{
fn start_pre() ! {
// Pre-start initialization if needed
}
fn start_post()!{
fn start_post() ! {
// Post-start operations if needed
}
fn stop_pre()!{
fn stop_pre() ! {
// Pre-stop operations if needed
}
fn stop_post()!{
fn stop_post() ! {
// Post-stop cleanup if needed
}
fn destroy() ! {
mut installer := get()!
os.rm("
${installer.configpath}
livekit-server
")!
mut installer := get()!
os.rm(installer.configpath) or {}
os.rm('livekit-server') or {}
}

View File

@@ -1,229 +1,204 @@
module livekit
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.ui.console
import time
__global (
livekit_global map[string]&LivekitServer
livekit_default string
livekit_global map[string]&LivekitServer
livekit_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet{
pub struct ArgsGet {
pub mut:
name string = "default"
name string = 'default'
}
fn args_get (args_ ArgsGet) ArgsGet {
mut args:=args_
if args.name == ""{
args.name = livekit_default
}
if args.name == ""{
args.name = "default"
}
return args
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = livekit_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&LivekitServer {
mut args := args_get(args_)
if !(args.name in livekit_global) {
if ! config_exists(){
if default{
config_save()!
}
}
config_load()!
}
return livekit_global[args.name] or {
println(livekit_global)
panic("bug in get from factory: ")
}
pub fn get(args_ ArgsGet) !&LivekitServer {
mut args := args_get(args_)
if args.name !in livekit_global {
if !config_exists() {
if default {
config_save()!
}
}
config_load()!
}
return livekit_global[args.name] or {
println(livekit_global)
panic('bug in get from factory: ')
}
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context:=base.context() or { panic("bug") }
return context.hero_config_exists("livekit",args.name)
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('livekit', args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
mut heroscript := context.hero_config_get("livekit",args.name)!
play(heroscript:heroscript)!
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('livekit', args.name)!
play(heroscript: heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_set("livekit",args.name,heroscript_default()!)!
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('livekit', args.name, heroscript_default()!)!
}
fn set(o LivekitServer)! {
mut o2:=obj_init(o)!
livekit_global["default"] = &o2
fn set(o LivekitServer) ! {
mut o2 := obj_init(o)!
livekit_global['default'] = &o2
}
@[params]
pub struct PlayArgs {
pub mut:
name string = 'default'
heroscript string //if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
name string = 'default'
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
start bool
stop bool
restart bool
delete bool
configure bool //make sure there is at least one installed
start bool
stop bool
restart bool
delete bool
configure bool // make sure there is at least one installed
}
pub fn play(args_ PlayArgs) ! {
mut args:=args_
mut args := args_
if args.heroscript == '' {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
if args.heroscript == "" {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or {
playbook.new(text: args.heroscript)!
}
mut install_actions := plbook.find(filter: 'livekit.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
mycfg:=cfg_play(p)!
set(mycfg)!
}
}
mut install_actions := plbook.find(filter: 'livekit.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
mycfg := cfg_play(p)!
set(mycfg)!
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
//load from disk and make sure is properly intialized
// load from disk and make sure is properly intialized
pub fn (mut self LivekitServer) reload() ! {
switch(self.name)
self=obj_init(self)!
switch(self.name)
self = obj_init(self)!
}
pub fn (mut self LivekitServer) start() ! {
switch(self.name)
if self.running()!{
return
}
switch(self.name)
if self.running()! {
return
}
console.print_header('livekit start')
console.print_header('livekit start')
configure()!
configure()!
start_pre()!
start_pre()!
mut sm := startupmanager.get()!
mut sm := startupmanager.get()!
for zprocess in startupcmd()!{
sm.start(zprocess.name)!
}
for zprocess in startupcmd()! {
sm.start(zprocess.name)!
}
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('livekit did not install properly.')
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('livekit did not install properly.')
}
pub fn (mut self LivekitServer) install_start(args RestartArgs) ! {
switch(self.name)
self.install(args)!
self.start()!
switch(self.name)
self.install(args)!
self.start()!
}
pub fn (mut self LivekitServer) stop() ! {
switch(self.name)
stop_pre()!
mut sm := startupmanager.get()!
for zprocess in startupcmd()!{
sm.stop(zprocess.name)!
}
stop_post()!
switch(self.name)
stop_pre()!
mut sm := startupmanager.get()!
for zprocess in startupcmd()! {
sm.stop(zprocess.name)!
}
stop_post()!
}
pub fn (mut self LivekitServer) restart() ! {
switch(self.name)
self.stop()!
self.start()!
switch(self.name)
self.stop()!
self.start()!
}
pub fn (mut self LivekitServer) running() !bool {
switch(self.name)
mut sm := startupmanager.get()!
switch(self.name)
mut sm := startupmanager.get()!
//walk over the generic processes, if not running return
for zprocess in startupcmd()!{
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
return running()!
// walk over the generic processes, if not running return
for zprocess in startupcmd()! {
r := sm.running(zprocess.name)!
if r == false {
return false
}
}
return running()!
}
@[params]
pub struct InstallArgs{
pub struct InstallArgs {
pub mut:
reset bool
reset bool
}
pub fn (mut self LivekitServer) install(args InstallArgs) ! {
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
}
pub fn (mut self LivekitServer) destroy() ! {
switch(self.name)
switch(self.name)
self.stop()!
destroy()!
self.stop()!
destroy()!
}
//switch instance to be used for livekit
// switch instance to be used for livekit
pub fn switch(name string) {
livekit_default = name
livekit_default = name
}

View File

@@ -1,4 +1,5 @@
module livekit
import freeflowuniverse.herolib.data.paramsparser
import os
@@ -6,11 +7,9 @@ pub const version = '1.7.2'
const singleton = false
const default = true
//TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
heroscript:="
heroscript := "
!!livekit.configure
name:'default'
apikey: ''
@@ -18,72 +17,66 @@ pub fn heroscript_default() !string {
nr: 1 // each specific instance onto this server needs to have a unique nr
"
return heroscript
return heroscript
}
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
pub struct LivekitServer {
pub mut:
name string = 'default'
apikey string
apisecret string @[secret]
configpath string
nr int = 0 // each specific instance onto this server needs to have a unique nr
name string = 'default'
apikey string
apisecret string @[secret]
configpath string
nr int = 0 // each specific instance onto this server needs to have a unique nr
}
fn cfg_play(p paramsparser.Params) !LivekitServer {
mut mycfg := LivekitServer{
name: p.get_default('name', 'default')!
apikey: p.get_default('apikey', '')!
apisecret: p.get_default('apisecret', '')!
nr: p.get_default_int('nr', 0)!
}
return mycfg
mut mycfg := LivekitServer{
name: p.get_default('name', 'default')!
apikey: p.get_default('apikey', '')!
apisecret: p.get_default('apisecret', '')!
nr: p.get_default_int('nr', 0)!
}
return mycfg
}
fn obj_init(obj_ LivekitServer) !LivekitServer {
mut mycfg := obj_
if mycfg.configpath == '' {
mycfg.configpath = '${os.home_dir()}/hero/cfg/livekit_${myconfig.name}.yaml'
}
if mycfg.apikey == '' || mycfg.apisecret == '' {
// Execute the livekit-server generate-keys command
result := os.execute('livekit-server generate-keys')
if result.exit_code != 0 {
return error('Failed to generate LiveKit keys')
}
// Split the output into lines
lines := result.output.split_into_lines()
fn obj_init(obj_ LivekitServer)!LivekitServer{
mut mycfg:=obj_
if mycfg.configpath == ''{
mycfg.configpath = '${os.home_dir()}/hero/cfg/livekit_${myconfig.name}.yaml'
}
if mycfg.apikey == '' || mycfg.apisecret == '' {
// Execute the livekit-server generate-keys command
result := os.execute('livekit-server generate-keys')
if result.exit_code != 0 {
return error('Failed to generate LiveKit keys')
}
// Split the output into lines
lines := result.output.split_into_lines()
// Extract API Key and API Secret
for line in lines {
if line.starts_with('API Key:') {
server.apikey = line.all_after('API Key:').trim_space()
} else if line.starts_with('API Secret:') {
server.apisecret = line.all_after('API Secret:').trim_space()
}
}
// Verify that both keys were extracted
if server.apikey == '' || server.apisecret == '' {
return error('Failed to extract API Key or API Secret')
}
}
return obj
// Extract API Key and API Secret
for line in lines {
if line.starts_with('API Key:') {
server.apikey = line.all_after('API Key:').trim_space()
} else if line.starts_with('API Secret:') {
server.apisecret = line.all_after('API Secret:').trim_space()
}
}
// Verify that both keys were extracted
if server.apikey == '' || server.apisecret == '' {
return error('Failed to extract API Key or API Secret')
}
}
return obj
}
//called before start if done
// called before start if done
fn configure() ! {
mut installer := get()!
mut installer := get()!
mut mycode := $tmpl('templates/config.yaml')
mut path := pathlib.get_file(path: installer.configpath, create: true)!
path.write(mycode)!
console.print_debug(mycode)
mut mycode := $tmpl('templates/config.yaml')
mut path := pathlib.get_file(path: installer.configpath, create: true)!
path.write(mycode)!
console.print_debug(mycode)
}

View File

@@ -5,7 +5,6 @@ import freeflowuniverse.herolib.installers.develapps.vscode
import freeflowuniverse.herolib.installers.develapps.chrome
import freeflowuniverse.herolib.installers.virt.podman as podman_installer
import freeflowuniverse.herolib.installers.virt.buildah as buildah_installer
import freeflowuniverse.herolib.installers.virt.lima
import freeflowuniverse.herolib.installers.net.mycelium
import freeflowuniverse.herolib.core.texttools
@@ -104,8 +103,8 @@ pub fn install_multi(args_ InstallArgs) ! {
}
'hero' {
herolib.install(
reset: args.reset
git_pull: args.gitpull
reset: args.reset
git_pull: args.gitpull
git_reset: args.gitreset
)!
}
@@ -113,7 +112,7 @@ pub fn install_multi(args_ InstallArgs) ! {
herolib.hero_install(reset: args.reset)!
}
'caddy' {
//caddy.install(reset: args.reset)!
// caddy.install(reset: args.reset)!
// caddy.configure_examples()!
}
'chrome' {
@@ -133,12 +132,12 @@ pub fn install_multi(args_ InstallArgs) ! {
lima.install(reset: args.reset, uninstall: args.uninstall)!
}
'herocontainers' {
mut podman_installer0:= podman_installer.get()!
mut buildah_installer0:= buildah_installer.get()!
mut podman_installer0 := podman_installer.get()!
mut buildah_installer0 := buildah_installer.get()!
if args.reset{
podman_installer0.destroy()! //will remove all
buildah_installer0.destroy()! //will remove all
if args.reset {
podman_installer0.destroy()! // will remove all
buildah_installer0.destroy()! // will remove all
}
podman_installer0.install()!
buildah_installer0.install()!
@@ -173,16 +172,16 @@ pub fn install_multi(args_ InstallArgs) ! {
}
'zola' {
mut i2 := zola.get()!
i2.install()! //will also install tailwind
i2.install()! // will also install tailwind
}
'tailwind' {
mut i := tailwind.get()!
i.install()!
}
}
'zinit' {
mut i := zinit.get()!
i.install()!
}
}
else {
return error('cannot find installer for: ${item}')
}