Merge branch 'development_ourdb_new' into development

* development_ourdb_new: (115 commits)
  webdav completion wip
  Remove path from fsentry metadata, make vfs and webdav work again with fixes
  feat: Implement database synchronization using binary encoding
  Add documentation and tests for model_property.v
  feat: Add diagrams and README for OurDB syncer
  circle core objects work again
  ...
  ...
  radix tree has now prefix
  names
  models
  ...
  ....
  ...
  ...
  vfs_basics working
  vfs working
  ...
  ...
  ...
  ...

# Conflicts:
#	.gitignore
#	lib/code/generator/installer_client/ask.v
#	lib/code/generator/installer_client/factory.v
#	lib/code/generator/installer_client/generate.v
#	lib/code/generator/installer_client/model.v
#	lib/code/generator/installer_client/readme.md
#	lib/code/generator/installer_client/scanner.v
#	lib/code/generator/installer_client/templates/atemplate.yaml
#	lib/code/generator/installer_client/templates/heroscript_client
#	lib/code/generator/installer_client/templates/heroscript_installer
#	lib/code/generator/installer_client/templates/objname_actions.vtemplate
#	lib/code/generator/installer_client/templates/objname_factory_.vtemplate
#	lib/code/generator/installer_client/templates/objname_model.vtemplate
#	lib/code/generator/installer_client/templates/readme.md
#	lib/code/generator/installer_client_OLD/ask.v
#	lib/code/generator/installer_client_OLD/do.v
#	lib/code/generator/installer_client_OLD/generate.v
#	lib/code/generator/installer_client_OLD/model.v
#	lib/code/generator/installer_client_OLD/readme.md
#	lib/code/generator/installer_client_OLD/scanner.v
#	lib/code/generator/installer_client_OLD/templates/atemplate.yaml
#	lib/code/generator/installer_client_OLD/templates/heroscript_client
#	lib/code/generator/installer_client_OLD/templates/heroscript_installer
#	lib/code/generator/installer_client_OLD/templates/objname_actions.vtemplate
#	lib/code/generator/installer_client_OLD/templates/objname_factory_.vtemplate
#	lib/code/generator/installer_client_OLD/templates/objname_model.vtemplate
#	lib/code/generator/installer_client_OLD/templates/readme.md
#	lib/core/generator/installer_client_OLD/ask.v
#	lib/core/generator/installer_client_OLD/factory.v
#	lib/core/generator/installer_client_OLD/generate.v
#	lib/core/generator/installer_client_OLD/model.v
#	lib/core/generator/installer_client_OLD/readme.md
#	lib/core/generator/installer_client_OLD/scanner.v
#	lib/core/generator/installer_client_OLD/templates/atemplate.yaml
#	lib/core/generator/installer_client_OLD/templates/heroscript_client
#	lib/core/generator/installer_client_OLD/templates/heroscript_installer
#	lib/core/generator/installer_client_OLD/templates/objname_actions.vtemplate
#	lib/core/generator/installer_client_OLD/templates/objname_factory_.vtemplate
#	lib/core/generator/installer_client_OLD/templates/objname_model.vtemplate
#	lib/core/generator/installer_client_OLD/templates/readme.md
#	lib/core/texttools/namefix.v
This commit is contained in:
2025-03-24 05:29:46 +01:00
436 changed files with 23966 additions and 5190 deletions

14
.gitignore vendored
View File

@@ -1,4 +1,13 @@
# Additional ignore files and directories
Thumbs.db
# Logs
logs/
*.log
*.out
# Compiled Python files
*.pyc
*.pyo
__pycache__/
*dSYM/
.vmodules/
.vscode
@@ -33,4 +42,5 @@ cli/hero
compile_results.log
tmp
compile_summary.log
.summary_lock
.summary_lock
.aider*

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env -S v -n -w -parallel-cc -enable-globals run
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import flag

View File

@@ -51,7 +51,7 @@ fn do() ! {
mut cmd := Command{
name: 'hero'
description: 'Your HERO toolset.'
version: '1.0.19'
version: '1.0.21'
}
// herocmds.cmd_run_add_flags(mut cmd)

86
examples/aitest/dir_listing.vsh Executable file
View File

@@ -0,0 +1,86 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.core.pathlib
// Helper function to format file sizes
fn format_size(size i64) string {
if size < 1024 {
return '${size} B'
} else if size < 1024 * 1024 {
kb := f64(size) / 1024.0
return '${kb:.1f} KB'
} else if size < 1024 * 1024 * 1024 {
mb := f64(size) / (1024.0 * 1024.0)
return '${mb:.1f} MB'
} else {
gb := f64(size) / (1024.0 * 1024.0 * 1024.0)
return '${gb:.1f} GB'
}
}
// Set parameters directly in the script
// Change these values as needed
target_dir := '/tmp' // Current directory by default
show_hidden := false // Set to true to show hidden files
recursive := false // Set to true for recursive listing
// Create a Path object for the target directory
mut path := pathlib.get(target_dir)
// Ensure the directory exists and is a directory
if path.exist == .no {
eprintln('Error: Directory "${target_dir}" does not exist')
exit(1)
}
if path.cat != .dir && path.cat != .linkdir {
eprintln('Error: "${target_dir}" is not a directory')
exit(1)
}
// Main execution
println('Listing contents of: ${path.absolute()}')
println('----------------------------')
// Define list arguments
mut list_args := pathlib.ListArgs{
recursive: recursive,
ignoredefault: !show_hidden
}
// Use pathlib to list the directory contents
mut list_result := path.list(list_args) or {
eprintln('Error listing directory: ${err}')
exit(1)
}
// Print each file/directory
for p in list_result.paths {
// Skip the root directory itself
if p.path == path.path {
continue
}
// Calculate the level based on the path depth relative to the root
rel_path := p.path.replace(list_result.root, '')
level := rel_path.count('/') - if rel_path.starts_with('/') { 1 } else { 0 }
// Print indentation based on level
if level > 0 {
print(' '.repeat(level))
}
// Print file/directory info
name := p.name()
if p.cat == .dir || p.cat == .linkdir {
println('📁 ${name}/')
} else {
// Get file size
file_size := os.file_size(p.path)
println('📄 ${name} (${format_size(file_size)})')
}
}
println('----------------------------')
println('Done!')

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.mycelium
import freeflowuniverse.herolib.installers.net.mycelium as mycelium_installer
import freeflowuniverse.herolib.installers.net.mycelium_installer
import freeflowuniverse.herolib.osal
import time
import os

115
examples/core/agent_encoding.vsh Executable file
View File

@@ -0,0 +1,115 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.jobs.model
// Create a test agent with some sample data
mut agent := model.Agent{
pubkey: 'ed25519:1234567890abcdef'
address: '192.168.1.100'
port: 9999
description: 'Test agent for binary encoding'
status: model.AgentStatus{
guid: 'agent-123'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: model.AgentState.ok
}
services: []
signature: 'signature-data-here'
}
// Add a service
mut service := model.AgentService{
actor: 'vm'
description: 'Virtual machine management'
status: model.AgentServiceState.ok
public: true
actions: []
}
// Add an action to the service
mut action := model.AgentServiceAction{
action: 'create'
description: 'Create a new virtual machine'
status: model.AgentServiceState.ok
public: true
params: {
'name': 'Name of the VM'
'memory': 'Memory in MB'
'cpu': 'Number of CPU cores'
}
params_example: {
'name': 'my-test-vm'
'memory': '2048'
'cpu': '2'
}
}
service.actions << action
agent.services << service
// Test binary encoding
binary_data := agent.dumps() or {
println('Failed to encode agent: ${err}')
exit(1)
}
println('Successfully encoded agent to binary, size: ${binary_data.len} bytes')
// Test binary decoding
decoded_agent := model.loads(binary_data) or {
println('Failed to decode agent: ${err}')
exit(1)
}
// Verify the decoded data matches the original
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.description == agent.description
assert decoded_agent.signature == agent.signature
// Verify status
assert decoded_agent.status.guid == agent.status.guid
assert decoded_agent.status.status == agent.status.status
// Verify services
assert decoded_agent.services.len == agent.services.len
if decoded_agent.services.len > 0 {
service1 := decoded_agent.services[0]
original_service := agent.services[0]
assert service1.actor == original_service.actor
assert service1.description == original_service.description
assert service1.status == original_service.status
assert service1.public == original_service.public
// Verify actions
assert service1.actions.len == original_service.actions.len
if service1.actions.len > 0 {
action1 := service1.actions[0]
original_action := original_service.actions[0]
assert action1.action == original_action.action
assert action1.description == original_action.description
assert action1.status == original_action.status
assert action1.public == original_action.public
// Verify params
assert action1.params.len == original_action.params.len
for key, value in original_action.params {
assert key in action1.params
assert action1.params[key] == value
}
// Verify params_example
assert action1.params_example.len == original_action.params_example.len
for key, value in original_action.params_example {
assert key in action1.params_example
assert action1.params_example[key] == value
}
}
}
println('Agent binary encoding/decoding test passed successfully')

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.ourdb
import time
// Known worker public key
worker1_public_key := '46a9f9cee1ce98ef7478f3dea759589bbf6da9156533e63fed9f233640ac072c'
// worker2_public_key := '46a9f9cee1ce98ef7478f3dea759589bbf6da9156533e63fed9f233640ac072c'
// Create master node
println('Starting master node...')
mut streamer := ourdb.new_streamer(
incremental_mode: false
server_port: 9000 // Master uses default port
is_worker: false
)!
println('Initializing workers...')
// Add workers and initialize its database
// You should run the deduped_mycelium_worker.vsh script for each worker
streamer.add_worker(worker1_public_key)!
// streamer.add_worker(worker2_public_key)!
// When we preforming a write, we get the ID of the record
// We basically write to the master database, and read from the workers normally
mut id1 := streamer.write(id: 1, value: 'Record 1')!
mut id2 := streamer.write(id: 2, value: 'Record 2')!
println('Master record 1 data: ${id1}')
println('Master record 2 data: ${id2}')
// Read data from master
master_id1 := streamer.read(id: 1)!
master_id2 := streamer.read(id: 2)!
println('Master 1 data: ${master_id1.bytestr()}')
println('Master 2 data: ${master_id2.bytestr()}')
// Read data from workers
worker_id1 := streamer.read(id: 1, worker_public_key: worker1_public_key)!
worker_id2 := streamer.read(id: 2, worker_public_key: worker1_public_key)!
println('Worker 1 data: ${worker_id1.bytestr()}')
println('Worker 2 data: ${worker_id2.bytestr()}')

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.ourdb
worker_public_key := '46a9f9cee1ce98ef7478f3dea759589bbf6da9156533e63fed9f233640ac072c'
// Create a worker node with a unique database path
mut streamer := ourdb.new_streamer(
incremental_mode: false
server_port: 9000 // Use different port than master
is_worker: true
)!
streamer.add_worker(worker_public_key)!
// Initialize and run worker node
streamer.listen()!

23
examples/data/ourdb_client.vsh Executable file
View File

@@ -0,0 +1,23 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
// Please note that before running this script you need to run the server first
// See examples/data/ourdb_server.vsh
import freeflowuniverse.herolib.data.ourdb
import os
mut client := ourdb.new_client(
port: 3000
host: 'localhost'
)!
set := client.set('hello')!
get := client.get(set.id)!
assert set.id == get.id
println('Set result: ${set}')
println('Get result: ${get}')
// test delete functionality
client.delete(set.id)!

17
examples/data/ourdb_server.vsh Executable file
View File

@@ -0,0 +1,17 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.ourdb
import os
mut server := ourdb.new_server(
port: 3000
allowed_hosts: ['localhost']
allowed_operations: ['set', 'get', 'delete']
secret_key: 'secret'
config: ourdb.OurDBConfig{
path: '/tmp/ourdb'
incremental_mode: true
}
)!
server.run()

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.db.cometbft as cometbft_installer
// coredns_installer.delete()!
mut installer := cometbft_installer.get()!
installer.install()!

View File

@@ -8,33 +8,5 @@ import freeflowuniverse.herolib.core
core.interactive_set()! // make sure the sudo works so we can do things even if it requires those rights
// import freeflowuniverse.herolib.data.dbfs
// import freeflowuniverse.herolib.installers.lang.vlang
// import freeflowuniverse.herolib.installers.db.redis as redis_installer
// import freeflowuniverse.herolib.installers.infra.coredns as coredns_installer
// import freeflowuniverse.herolib.installers.sysadmintools.daguserver as dagu_installer
// import freeflowuniverse.herolib.installers.sysadmintools.b2 as b2_installer
// import freeflowuniverse.herolib.installers.net.mycelium as mycelium_installer
// import freeflowuniverse.herolib.osal.screen
// import freeflowuniverse.herolib.osal
// redis_installer.new()!
// dagu_installer.install(passwd:"1234",secret:"1234",restart:true)!
// coredns_installer.install()!
// mycelium_installer.install()!
// mycelium_installer.restart()!
// mut screens:=screen.new()!
// println(screens)
// dagu_installer.check(secret:"1234")!
// vlang.v_analyzer_install()!
// b2_installer.install()!
// rust.install(reset:false)!
// python.install(reset:false)!
// nodejs.install(reset:false)!
golang.install(reset: false)!
mut i1:=golang.get()!
i1.install()!

View File

@@ -0,0 +1,94 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.jobs.model
import flag
import os
import time
// This example demonstrates using the VFS-based job storage
// - Creating jobs and storing them in VFS
// - Listing jobs from VFS
// - Cleaning up old jobs
mut fp := flag.new_flag_parser(os.args)
fp.application('vfs_jobs_example.vsh')
fp.version('v0.1.0')
fp.description('Example of VFS-based job storage with cleanup functionality')
fp.skip_executable()
cleanup_days := fp.int('days', `d`, 7, 'Clean up jobs older than this many days')
create_count := fp.int('create', `c`, 5, 'Number of jobs to create')
help_requested := fp.bool('help', `h`, false, 'Show help message')
if help_requested {
println(fp.usage())
exit(0)
}
additional_args := fp.finalize() or {
eprintln(err)
println(fp.usage())
exit(1)
}
// Create a new HeroRunner instance
mut runner := model.new() or {
panic('Failed to create HeroRunner: ${err}')
}
println('\n---------BEGIN VFS JOBS EXAMPLE')
// Create some jobs
println('\n---------CREATING JOBS')
for i in 0..create_count {
mut job := runner.jobs.new()
job.guid = 'job_${i}_${time.now().unix}'
job.actor = 'example_actor'
job.action = 'test_action'
job.params = {
'param1': 'value1'
'param2': 'value2'
}
// For demonstration, make some jobs older by adjusting their creation time
if i % 2 == 0 {
job.status.created.time = time.now().add_days(-(cleanup_days + 1))
}
runner.jobs.set(job) or {
panic('Failed to set job: ${err}')
}
println('Created job with GUID: ${job.guid}')
}
// List all jobs
println('\n---------LISTING ALL JOBS')
jobs := runner.jobs.list() or {
panic('Failed to list jobs: ${err}')
}
println('Found ${jobs.len} jobs:')
for job in jobs {
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
println('- ${job.guid} (created ${days_ago} days ago)')
}
// Clean up old jobs
println('\n---------CLEANING UP OLD JOBS')
println('Cleaning up jobs older than ${cleanup_days} days...')
deleted_count := runner.cleanup_jobs(cleanup_days) or {
panic('Failed to clean up jobs: ${err}')
}
println('Deleted ${deleted_count} old jobs')
// List remaining jobs
println('\n---------LISTING REMAINING JOBS')
remaining_jobs := runner.jobs.list() or {
panic('Failed to list jobs: ${err}')
}
println('Found ${remaining_jobs.len} remaining jobs:')
for job in remaining_jobs {
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
println('- ${job.guid} (created ${days_ago} days ago)')
}
println('\n---------END VFS JOBS EXAMPLE')

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import json
import log

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import log
fn main() {

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import log
fn main() {

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import json
import log

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import log
import os

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import log

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import flag
import rand
import json

View File

@@ -1,9 +1,9 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.gridproxy.model { NodeFilter }
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy.model { NodeFilter }
import rand
import log
import os

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
contract_id := u64(119450)

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
fn get_contracts_example() ! {

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
fn get_farms_example() ! {

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
fn get_gateway_nodes_example() ! {

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
mut gp_client := gridproxy.new(net: .test, cache: true)!

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
fn get_nodes_example() ! {

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.gridproxy.model { NodeStatus }
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy.model { NodeStatus }
import freeflowuniverse.herolib.ui.console
fn get_online_grid_stats_example() ! {

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.ui.console
fn get_all_twins_example() ! {

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.grid.models
import freeflowuniverse.herolib.threefold.grid3.models
import freeflowuniverse.herolib.threefold.grid as tfgrid
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import time
import flag
import rand

View File

@@ -1,8 +1,7 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -d use_openssl -enable-globals -cg run
//#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals -cg run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.installers.threefold.griddriver
import os
import time
@@ -20,9 +19,9 @@ deployment.add_machine(
cpu: 1
memory: 2
planetary: false
public_ip4: true
wireguard: true
public_ip4: false
size: 10 // 10 gig
mycelium: tfgrid3deployer.Mycelium{}
)
deployment.deploy()!

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env -S v -gc none -d use_openssl -enable-globals -cg run
//#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.installers.threefold.griddriver
import os
import time

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -d use_openssl -enable-globals -cg run
//#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals -cg run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.installers.threefold.griddriver
import os
import time

View File

@@ -7,8 +7,8 @@ This script automates the deployment of an OpenWebUI instance on the ThreeFold G
- V compiler installed
- OpenSSL support enabled
- herolib dependencies:
- `freeflowuniverse.herolib.threefold.gridproxy`
- `freeflowuniverse.herolib.threefold.tfgrid3deployer`
- `freeflowuniverse.herolib.threefold.grid3.gridproxy`
- `freeflowuniverse.herolib.threefold.grid3.deployer`
- `freeflowuniverse.herolib.installers.threefold.griddriver`
## Installation

View File

@@ -1,13 +1,10 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.installers.threefold.griddriver
fn main() {
griddriver.install()!
v := tfgrid3deployer.get()!
println('cred: ${v}')
deployment_name := 'my_deployment27'
@@ -19,19 +16,17 @@ fn main() {
cpu: 1
memory: 2
planetary: false
public_ip4: true
mycelium: tfgrid3deployer.Mycelium{}
nodes: [u32(167)]
)
deployment.add_machine(
name: 'my_vm2'
cpu: 1
memory: 2
planetary: false
public_ip4: true
mycelium: tfgrid3deployer.Mycelium{}
// nodes: [u32(164)]
public_ip4: false
nodes: [167]
)
// deployment.add_machine(
// name: 'my_vm2'
// cpu: 1
// memory: 2
// planetary: false
// public_ip4: true
// // nodes: [u32(164)]
// )
deployment.add_zdb(name: 'my_zdb', password: 'my_passw&rd', size: 2)
deployment.add_webname(name: 'mywebname2', backend: 'http://37.27.132.47:8000')

View File

@@ -0,0 +1,39 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.ui.console
const node_id = u32(2009)
const deployment_name = 'vmtestdeployment'
fn deploy_vm() ! {
mut deployment := deployer.new_deployment(deployment_name)!
deployment.add_machine(
name: 'vm1'
cpu: 1
memory: 2
planetary: false
public_ip4: true
nodes: [node_id]
)
deployment.deploy()!
println(deployment)
}
fn delete_vm() ! {
deployer.delete_deployment(deployment_name)!
}
fn main() {
if os.args.len < 2 {
println('Please provide a command: "deploy" or "delete"')
return
}
match os.args[1] {
'deploy' { deploy_vm()! }
'delete' { delete_vm()! }
else { println('Invalid command. Use "deploy" or "delete"') }
}
}

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.installers.threefold.griddriver
import os
import time

View File

@@ -1,13 +1,15 @@
#!/usr/bin/env -S v -gc none -d use_openssl -enable-globals -cg run
//#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
import freeflowuniverse.herolib.threefold.gridproxy
import freeflowuniverse.herolib.threefold.tfgrid3deployer
import freeflowuniverse.herolib.threefold.grid3.gridproxy
import freeflowuniverse.herolib.threefold.grid3.deployer
import freeflowuniverse.herolib.installers.threefold.griddriver
import os
import time
griddriver.install()!
res2 := tfgrid3deployer.filter_nodes()!
println(res2)
exit(0)
v := tfgrid3deployer.get()!
println('cred: ${v}')
@@ -18,7 +20,7 @@ deployment.add_machine(
cpu: 1
memory: 2
planetary: false
public_ip4: true
public_ip4: false
size: 10 // 10 gig
mycelium: tfgrid3deployer.Mycelium{}
)

2
examples/vfs/vfs_db/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
dedupestor_vfs
ourdb_vfs

View File

@@ -0,0 +1,122 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import rand
import freeflowuniverse.herolib.vfs.vfs_db
import freeflowuniverse.herolib.data.dedupestor
import freeflowuniverse.herolib.data.ourdb
pub struct VFSDedupeDB {
dedupestor.DedupeStore
}
pub fn (mut db VFSDedupeDB) set(args ourdb.OurDBSetArgs) !u32 {
return db.store(args.data,
dedupestor.Reference{owner: u16(1), id: args.id or {panic('VFS Must provide id')}}
)!
}
pub fn (mut db VFSDedupeDB) delete(id u32) ! {
db.DedupeStore.delete(id, dedupestor.Reference{owner: u16(1), id: id})!
}
example_data_dir := os.join_path(os.dir(@FILE), 'example_db')
os.mkdir_all(example_data_dir)!
data_path := os.join_path(example_data_dir, 'data')
// Create separate databases for data and metadata
mut db_data := VFSDedupeDB{
DedupeStore: dedupestor.new(
path: os.join_path(example_data_dir, 'data')
)!
}
mut db_metadata := ourdb.new(
path: os.join_path(example_data_dir, 'metadata')
incremental_mode: false
)!
// Create VFS with separate databases for data and metadata
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or {
panic('Failed to create VFS: ${err}')
}
println('\n---------BEGIN EXAMPLE')
println('---------WRITING FILES')
vfs.file_create('/some_file.txt') or {
panic('Failed to create file: ${err}')
}
vfs.file_create('/another_file.txt') or {
panic('Failed to create file: ${err}')
}
vfs.file_write('/some_file.txt', 'gibberish'.bytes()) or {
panic('Failed to write file: ${err}')
}
vfs.file_write('/another_file.txt', 'abcdefg'.bytes()) or {
panic('Failed to write file: ${err}')
}
println('\n---------READING FILES')
some_file_content := vfs.file_read('/some_file.txt') or {
panic('Failed to read file: ${err}')
}
println(some_file_content.bytestr())
another_file_content := vfs.file_read('/another_file.txt') or {
panic('Failed to read file: ${err}')
}
println(another_file_content.bytestr())
println("\n---------WRITING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
vfs.file_create('/duplicate.txt') or {
panic('Failed to create file: ${err}')
}
vfs.file_write('/duplicate.txt', 'gibberish'.bytes()) or {
panic('Failed to write file: ${err}')
}
println("\n---------WROTE DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
println('---------READING FILES')
some_file_content3 := vfs.file_read('/some_file.txt') or {
panic('Failed to read file: ${err}')
}
println(some_file_content3.bytestr())
another_file_content3 := vfs.file_read('/another_file.txt') or {
panic('Failed to read file: ${err}')
}
println(another_file_content3.bytestr())
duplicate_content := vfs.file_read('/duplicate.txt') or {
panic('Failed to read file: ${err}')
}
println(duplicate_content.bytestr())
println("\n---------DELETING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
vfs.file_delete('/duplicate.txt') or {
panic('Failed to delete file: ${err}')
}
data_path := os.join_path(example_data_dir, 'data/0.db')
db_file_path := os.join_path(data_path, '0.db')
println("---------READING FILES (DB SIZE: ${if os.exists(db_file_path) { os.file_size(db_file_path) } else { 0 }})")
some_file_content2 := vfs.file_read('/some_file.txt') or {
panic('Failed to read file: ${err}')
}
println(some_file_content2.bytestr())
another_file_content2 := vfs.file_read('/another_file.txt') or {
panic('Failed to read file: ${err}')
}
println(another_file_content2.bytestr())
// FAILS SUCCESSFULLY
// duplicate_content := vfs.file_read('duplicate.txt') or {
// println('Expected error: ${err}')
// []u8{}
// }
// if duplicate_content.len > 0 {
// println(duplicate_content.bytestr())
// }

View File

@@ -0,0 +1,73 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import rand
import freeflowuniverse.herolib.vfs.vfs_db
import freeflowuniverse.herolib.data.ourdb
example_data_dir := os.join_path(os.temp_dir(), 'ourdb_example_data_${rand.string(3)}')
os.mkdir_all(example_data_dir)!
// Create separate directories for data and metadata
data_dir := os.join_path(example_data_dir, 'data')
metadata_dir := os.join_path(example_data_dir, 'metadata')
os.mkdir_all(data_dir)!
os.mkdir_all(metadata_dir)!
// Create separate databases for data and metadata
mut db_data := ourdb.new(
path: data_dir
incremental_mode: false
)!
mut db_metadata := ourdb.new(
path: metadata_dir
incremental_mode: false
)!
// Create VFS with separate databases for data and metadata
mut vfs := vfs_db.new_with_separate_dbs(
mut db_data,
mut db_metadata,
data_dir: data_dir,
metadata_dir: metadata_dir
)!
// Create a root directory if it doesn't exist
if !vfs.exists('/') {
vfs.dir_create('/')!
}
// Create some files and directories
vfs.dir_create('/test_dir')!
vfs.file_create('/test_file.txt')!
vfs.file_write('/test_file.txt', 'Hello, world!'.bytes())!
// Create a file in the directory
vfs.file_create('/test_dir/nested_file.txt')!
vfs.file_write('/test_dir/nested_file.txt', 'This is a nested file.'.bytes())!
// Read the files
println('File content: ${vfs.file_read('/test_file.txt')!.bytestr()}')
println('Nested file content: ${vfs.file_read('/test_dir/nested_file.txt')!.bytestr()}')
// List directory contents
println('Root directory contents:')
root_entries := vfs.dir_list('/')!
for entry in root_entries {
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
}
println('Test directory contents:')
test_dir_entries := vfs.dir_list('/test_dir')!
for entry in test_dir_entries {
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
}
// Create a duplicate file with the same content
vfs.file_create('/duplicate_file.txt')!
vfs.file_write('/duplicate_file.txt', 'Hello, world!'.bytes())!
// Demonstrate that data and metadata are stored separately
println('Data DB Size: ${os.file_size(os.join_path(data_dir, '0.ourdb'))} bytes')
println('Metadata DB Size: ${os.file_size(os.join_path(metadata_dir, '0.ourdb'))} bytes')

View File

@@ -0,0 +1,180 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.vfs.vfs_db
import freeflowuniverse.herolib.data.ourdb
// This example demonstrates directory operations in the VFS
// - Creating directories with subdirectories
// - Listing directories
// - Reading and writing files in subdirectories
// - Deleting files and verifying they're gone
// Set up the example data directory
example_data_dir := '/tmp/example_dir_ops'
os.mkdir_all(example_data_dir)!
// Create separate databases for data and metadata
mut db_data := ourdb.new(
path: os.join_path(example_data_dir, 'data')
incremental_mode: false
)!
mut db_metadata := ourdb.new(
path: os.join_path(example_data_dir, 'metadata')
incremental_mode: false
)!
// Create VFS with separate databases for data and metadata
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or {
panic('Failed to create VFS: ${err}')
}
println('\n---------BEGIN DIRECTORY OPERATIONS EXAMPLE')
// Create directories with subdirectories
println('\n---------CREATING DIRECTORIES')
vfs.dir_create('/dir1') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir1')
vfs.dir_create('/dir1/subdir1') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir1/subdir1')
vfs.dir_create('/dir1/subdir2') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir1/subdir2')
vfs.dir_create('/dir2') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir2')
vfs.dir_create('/dir2/subdir1') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir2/subdir1')
vfs.dir_create('/dir2/subdir1/subsubdir1') or {
panic('Failed to create directory: ${err}')
}
println('Created directory: /dir2/subdir1/subsubdir1')
// List directories
println('\n---------LISTING ROOT DIRECTORY')
root_entries := vfs.dir_list('/') or {
panic('Failed to list directory: ${err}')
}
println('Root directory contains:')
for entry in root_entries {
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
println('- ${entry.get_metadata().name} (${entry_type})')
}
println('\n---------LISTING /dir1 DIRECTORY')
dir1_entries := vfs.dir_list('/dir1') or {
panic('Failed to list directory: ${err}')
}
println('/dir1 directory contains:')
for entry in dir1_entries {
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
println('- ${entry.get_metadata().name} (${entry_type})')
}
// Write a file in a subdirectory
println('\n---------WRITING FILE IN SUBDIRECTORY')
vfs.file_create('/dir1/subdir1/test_file.txt') or {
panic('Failed to create file: ${err}')
}
println('Created file: /dir1/subdir1/test_file.txt')
test_content := 'This is a test file in a subdirectory'
vfs.file_write('/dir1/subdir1/test_file.txt', test_content.bytes()) or {
panic('Failed to write file: ${err}')
}
println('Wrote content to file: /dir1/subdir1/test_file.txt')
// Read the file and verify content
println('\n---------READING FILE FROM SUBDIRECTORY')
file_content := vfs.file_read('/dir1/subdir1/test_file.txt') or {
panic('Failed to read file: ${err}')
}
println('File content: ${file_content.bytestr()}')
println('Content verification: ${if file_content.bytestr() == test_content { 'SUCCESS' } else { 'FAILED' }}')
// List the subdirectory to see the file
println('\n---------LISTING /dir1/subdir1 DIRECTORY')
subdir1_entries := vfs.dir_list('/dir1/subdir1') or {
panic('Failed to list directory: ${err}')
}
println('/dir1/subdir1 directory contains:')
for entry in subdir1_entries {
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
println('- ${entry.get_metadata().name} (${entry_type})')
}
// Delete the file
println('\n---------DELETING FILE')
vfs.file_delete('/dir1/subdir1/test_file.txt') or {
panic('Failed to delete file: ${err}')
}
println('Deleted file: /dir1/subdir1/test_file.txt')
// List the subdirectory again to verify the file is gone
println('\n---------LISTING /dir1/subdir1 DIRECTORY AFTER DELETION')
subdir1_entries_after := vfs.dir_list('/dir1/subdir1') or {
panic('Failed to list directory: ${err}')
}
println('/dir1/subdir1 directory contains:')
if subdir1_entries_after.len == 0 {
println('- (empty directory)')
} else {
for entry in subdir1_entries_after {
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
println('- ${entry.get_metadata().name} (${entry_type})')
}
}
// Create a file in a deep subdirectory
println('\n---------CREATING FILE IN DEEP SUBDIRECTORY')
vfs.file_create('/dir2/subdir1/subsubdir1/deep_file.txt') or {
panic('Failed to create file: ${err}')
}
println('Created file: /dir2/subdir1/subsubdir1/deep_file.txt')
deep_content := 'This file is in a deep subdirectory'
vfs.file_write('/dir2/subdir1/subsubdir1/deep_file.txt', deep_content.bytes()) or {
panic('Failed to write file: ${err}')
}
println('Wrote content to file: /dir2/subdir1/subsubdir1/deep_file.txt')
// Read the deep file and verify content
println('\n---------READING FILE FROM DEEP SUBDIRECTORY')
deep_file_content := vfs.file_read('/dir2/subdir1/subsubdir1/deep_file.txt') or {
panic('Failed to read file: ${err}')
}
println('File content: ${deep_file_content.bytestr()}')
println('Content verification: ${if deep_file_content.bytestr() == deep_content { 'SUCCESS' } else { 'FAILED' }}')
// Clean up by deleting directories (optional)
println('\n---------CLEANING UP')
vfs.file_delete('/dir2/subdir1/subsubdir1/deep_file.txt') or {
panic('Failed to delete file: ${err}')
}
println('Deleted file: /dir2/subdir1/subsubdir1/deep_file.txt')
// Try to verify the file is gone by attempting to read it
println('\n---------VERIFYING FILE IS GONE')
deep_file_exists := vfs.file_read('/dir2/subdir1/subsubdir1/deep_file.txt') or {
println('File is gone as expected: ${err}')
[]u8{}
}
if deep_file_exists.len > 0 {
panic('ERROR: File still exists!')
}
println('\n---------END DIRECTORY OPERATIONS EXAMPLE')

View File

@@ -1,30 +1,69 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.webdav
import freeflowuniverse.herolib.core.pathlib
import time
import net.http
import encoding.base64
import freeflowuniverse.herolib.vfs.webdav
import cli { Command, Flag }
import os
file_name := 'newfile.txt'
root_dir := '/tmp/webdav'
fn main() {
mut cmd := Command{
name: 'webdav'
description: 'Vlang Webdav Server'
}
username := 'omda'
password := 'password'
hashed_password := base64.encode_str('${username}:${password}')
mut app := Command{
name: 'webdav'
description: 'Vlang Webdav Server'
execute: fn (cmd Command) ! {
port := cmd.flags.get_int('port')!
directory := cmd.flags.get_string('directory')!
user := cmd.flags.get_string('user')!
password := cmd.flags.get_string('password')!
mut app := webdav.new_app(root_dir: root_dir, username: username, password: password) or {
eprintln('failed to create new server: ${err}')
exit(1)
mut server := webdav.new_app(
root_dir: directory
server_port: port
user_db: {
user: password
}
)!
server.run()
return
}
}
app.add_flag(Flag{
flag: .int
name: 'port'
abbrev: 'p'
description: 'server port'
default_value: ['8000']
})
app.add_flag(Flag{
flag: .string
required: true
name: 'directory'
abbrev: 'd'
description: 'server directory'
})
app.add_flag(Flag{
flag: .string
required: true
name: 'user'
abbrev: 'u'
description: 'username'
})
app.add_flag(Flag{
flag: .string
required: true
name: 'password'
abbrev: 'pw'
description: 'user password'
})
app.setup()
app.parse(os.args)
}
app.run(spawn_: true)
time.sleep(1 * time.second)
mut p := pathlib.get_file(path: '${root_dir}/${file_name}', create: true)!
p.write('my new file')!
mut req := http.new_request(.get, 'http://localhost:${app.server_port}/${file_name}',
'')
req.add_custom_header('Authorization', 'Basic ${hashed_password}')!
req.do()!

20
examples/webdav/webdav_vfs.vsh Executable file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.dav.webdav
import freeflowuniverse.herolib.vfs.vfs_db
import freeflowuniverse.herolib.data.ourdb
import os
import log
const database_path := os.join_path(os.dir(@FILE), 'database')
mut metadata_db := ourdb.new(path:os.join_path(database_path, 'metadata'))!
mut data_db := ourdb.new(path:os.join_path(database_path, 'data'))!
mut vfs := vfs_db.new(mut metadata_db, mut data_db)!
mut server := webdav.new_server(vfs: vfs, user_db: {
'admin': '123'
})!
log.set_level(.debug)
server.run()

View File

@@ -0,0 +1,94 @@
#!/usr/bin/env -S v -n -w -gc none run
import freeflowuniverse.herolib.data.markdownparser2
// Sample markdown text
text := '# Heading 1
This is a paragraph with **bold** and *italic* text.
## Heading 2
- List item 1
- List item 2
- Nested item
- List item 3
```v
fn main() {
println("Hello, world!")
}
```
> This is a blockquote
> with multiple lines
| Column 1 | Column 2 | Column 3 |
|----------|:--------:|---------:|
| Left | Center | Right |
| Cell 1 | Cell 2 | Cell 3 |
[Link to V language](https://vlang.io)
![Image](https://vlang.io/img/v-logo.png)
Footnote reference[^1]
[^1]: This is a footnote.
'
// Example 1: Using the plain text renderer
println('=== PLAINTEXT RENDERING ===')
println(markdownparser2.to_plain(text))
println('')
// Example 2: Using the structure renderer to show markdown structure
println('=== STRUCTURE RENDERING ===')
println(markdownparser2.to_structure(text))
// Example 3: Using the navigator to find specific elements
println('\n=== NAVIGATION EXAMPLE ===')
// Parse the markdown text
doc := markdownparser2.parse(text)
// Create a navigator
mut nav := markdownparser2.new_navigator(doc)
// Find all headings
headings := nav.find_all_by_type(.heading)
println('Found ${headings.len} headings:')
for heading in headings {
level := heading.attributes['level']
println(' ${'#'.repeat(level.int())} ${heading.content}')
}
// Find all code blocks
code_blocks := nav.find_all_by_type(.code_block)
println('\nFound ${code_blocks.len} code blocks:')
for block in code_blocks {
language := block.attributes['language']
println(' Language: ${language}')
println(' Content length: ${block.content.len} characters')
}
// Find all list items
list_items := nav.find_all_by_type(.list_item)
println('\nFound ${list_items.len} list items:')
for item in list_items {
println(' - ${item.content}')
}
// Find content containing specific text
if element := nav.find_by_content('blockquote') {
println('\nFound element containing "blockquote":')
println(' Type: ${element.typ}')
println(' Content: ${element.content}')
}
// Find all footnotes
println('\nFootnotes:')
for id, footnote in nav.footnotes() {
println(' [^${id}]: ${footnote.content}')
}

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
// import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.ui.console
import log
import os
import markdown
import freeflowuniverse.herolib.data.markdownparser2
path2:="${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/links.md"
path1:="${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/test.md"
text := os.read_file(path1)!
// Example 1: Using the built-in plaintext renderer
println('=== PLAINTEXT RENDERING ===')
println(markdown.to_plain(text))
println('')
// Example 2: Using our custom structure renderer to show markdown structure
println('=== STRUCTURE RENDERING ===')
println(markdownparser2.to_structure(text))
// // Example 3: Using a simple markdown example to demonstrate structure
// println('\n=== STRUCTURE OF A SIMPLE MARKDOWN EXAMPLE ===')
// simple_md := '# Heading 1\n\nThis is a paragraph with **bold** and *italic* text.\n\n- List item 1\n- List item 2\n\n```v\nfn main() {\n\tprintln("Hello, world!")\n}\n```\n\n[Link to V language](https://vlang.io)'
// println(markdown.to_structure(simple_md))

View File

@@ -0,0 +1,29 @@
---
sidebar_position: 10
title: 'Dunia CyberCity'
description: 'Co-create the Future'
---
![alt text](img/cybercity2.png)
We are building a 700,000 m2 Regenerative Startup Cyber City
- 100% co-owned
- regenerative
- autonomous zone
a city for startups and its creators
- build a system for augmented collective intelligence
- operate business wise from a digital freezone
- (co)own assets (shares, digital currencies) safely and privately
## More Info
> see [https://friends.threefold.info/cybercity](https://friends.threefold.info/cybercity)
- login:```planet```
- passwd:```first```

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.doctree
@@ -11,16 +11,17 @@ mut tree := doctree.new(name: 'test')!
// git_root string
// git_pull bool
// load bool = true // means we scan automatically the added collection
for project in 'projectinca, legal, why, web4,tfgrid3'.split(',').map(it.trim_space()) {
for project in 'projectinca, legal, why'.split(',').map(it.trim_space()) {
tree.scan(
git_url: 'https://git.ourworld.tf/tfgrid/info_tfgrid/src/branch/development/collections/${project}'
git_pull: false
)!
}
tree.export(
destination: '/tmp/test'
destination: '/tmp/mdexport'
reset: true
keep_structure: true
//keep_structure: true
exclude_errors: false
)!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
// import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.ui.console

View File

@@ -4,7 +4,7 @@ set -e
os_name="$(uname -s)"
arch_name="$(uname -m)"
version='1.0.19'
version='1.0.21'
# Base URL for GitHub releases

View File

@@ -64,10 +64,12 @@ os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowunive
println('Herolib installation completed successfully!')
// Add vtest alias
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -cc tcc test\' ') or {
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -show-c-output -n -w -cg -gc none -cc tcc test\' ') or {
eprintln('Failed to add vtest alias: ${err}')
}
//alias vtest='v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc test'
addtoscript('HOME/hero/bin', 'export PATH="\$PATH:\$HOME/hero/bin"') or {
eprintln('Failed to add path to hero, ${err}')
}

View File

@@ -0,0 +1,70 @@
module actionprocessor
import freeflowuniverse.herolib.circles.dbs.core
import freeflowuniverse.herolib.circles.models
import freeflowuniverse.herolib.core.texttools
__global (
circle_global map[string]&CircleCoordinator
circle_default string
)
// HeroRunner is the main factory for managing jobs, agents, services, circles and names
@[heap]
pub struct CircleCoordinator {
pub mut:
name string //is a unique name on planetary scale is a dns name
agents &core.AgentDB
circles &core.CircleDB
names &core.NameDB
session_state models.SessionState
}
@[params]
pub struct CircleCoordinatorArgs{
pub mut:
name string = "local"
pubkey string // pubkey of user who called this
addr string //mycelium address
path string
}
// new creates a new CircleCoordinator instance
pub fn new(args_ CircleCoordinatorArgs) !&CircleCoordinator {
mut args:=args_
args.name = texttools.name_fix(args.name)
if args.name in circle_global {
mut c:=circle_global[args.name] or {panic("bug")}
return c
}
mut session_state:=models.new_session(name: args.name, pubkey: args.pubkey, addr: args.addr, path: args.path)!
// os.mkdir_all(mypath)!
// Create the directories if they don't exist// SHOULD BE AUTOMATIC
// os.mkdir_all(os.join_path(mypath, 'data_core'))!
// os.mkdir_all(os.join_path(mypath, 'data_mcc'))!
// os.mkdir_all(os.join_path(mypath, 'meta_core'))!
// os.mkdir_all(os.join_path(mypath, 'meta_mcc'))! //message, contacts, calendar
// Initialize the db handlers with proper ourdb instances
mut agent_db := core.new_agentdb(session_state)!
mut circle_db := core.new_circledb(session_state)!
mut name_db := core.new_namedb(session_state)!
mut cm := &CircleCoordinator{
agents: &agent_db
circles: &circle_db
names: &name_db
session_state: session_state
}
circle_global[args.name] = cm
return cm
}

View File

@@ -0,0 +1,121 @@
module core
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.circles.models { DBHandler, SessionState }
import freeflowuniverse.herolib.circles.models.core { Agent, AgentService, AgentServiceAction, AgentState }
@[heap]
pub struct AgentDB {
pub mut:
db DBHandler[Agent]
}
pub fn new_agentdb(session_state SessionState) !AgentDB {
return AgentDB{
db:models.new_dbhandler[Agent]('agent', session_state)
}
}
pub fn (mut m AgentDB) new() Agent {
return Agent{}
}
// set adds or updates an agent
pub fn (mut m AgentDB) set(agent Agent) !Agent {
return m.db.set(agent)!
}
// get retrieves an agent by its ID
pub fn (mut m AgentDB) get(id u32) !Agent {
return m.db.get(id)!
}
// list returns all agent IDs
pub fn (mut m AgentDB) list() ![]u32 {
return m.db.list()!
}
pub fn (mut m AgentDB) getall() ![]Agent {
return m.db.getall()!
}
// delete removes an agent by its ID
pub fn (mut m AgentDB) delete(id u32) ! {
m.db.delete(id)!
}
//////////////////CUSTOM METHODS//////////////////////////////////
// get_by_pubkey retrieves an agent by its public key
pub fn (mut m AgentDB) get_by_pubkey(pubkey string) !Agent {
return m.db.get_by_key('pubkey', pubkey)!
}
// delete_by_pubkey removes an agent by its public key
pub fn (mut m AgentDB) delete_by_pubkey(pubkey string) ! {
// Get the agent by pubkey
agent := m.get_by_pubkey(pubkey) or {
// Agent not found, nothing to delete
return
}
// Delete the agent by ID
m.delete(agent.id)!
}
// update_status updates just the status of an agent
pub fn (mut m AgentDB) update_status(pubkey string, status AgentState) !Agent {
// Get the agent by pubkey
mut agent := m.get_by_pubkey(pubkey)!
// Update the status
agent.status.status = status
agent.status.timestamp_last = ourtime.now()
// Save the updated agent
return m.set(agent)!
}
// get_all_agent_pubkeys returns all agent pubkeys
pub fn (mut m AgentDB) get_all_agent_pubkeys() ![]string {
// Get all agent IDs
agent_ids := m.list()!
// Get pubkeys for all agents
mut pubkeys := []string{}
for id in agent_ids {
agent := m.get(id) or { continue }
pubkeys << agent.pubkey
}
return pubkeys
}
// get_by_service returns all agents that provide a specific service
pub fn (mut m AgentDB) get_by_service(actor string, action string) ![]Agent {
mut matching_agents := []Agent{}
// Get all agent IDs
agent_ids := m.list()!
// Filter agents that provide the specified service
for id in agent_ids {
// Get the agent by ID
agent := m.get(id) or { continue }
// Check if agent provides the specified service
for service in agent.services {
if service.actor == actor {
for service_action in service.actions {
if service_action.action == action {
matching_agents << agent
break
}
}
break
}
}
}
return matching_agents
}

View File

@@ -0,0 +1,176 @@
module core
import os
import rand
import freeflowuniverse.herolib.circles.actionprocessor
import freeflowuniverse.herolib.circles.models.core
fn test_agent_db() {
// Create a temporary directory for testing
test_dir := os.join_path(os.temp_dir(), 'hero_agent_test_${rand.intn(9000) or { 0 } + 1000}')
os.mkdir_all(test_dir) or { panic(err) }
defer { os.rmdir_all(test_dir) or {} }
mut runner := actionprocessor.new(path: test_dir)!
// Create multiple agents for testing
mut agent1 := runner.agents.new()
agent1.pubkey = 'test-agent-1'
agent1.address = '127.0.0.1'
agent1.description = 'Test Agent 1'
mut agent2 := runner.agents.new()
agent2.pubkey = 'test-agent-2'
agent2.address = '127.0.0.2'
agent2.description = 'Test Agent 2'
mut agent3 := runner.agents.new()
agent3.pubkey = 'test-agent-3'
agent3.address = '127.0.0.3'
agent3.description = 'Test Agent 3'
// Create a service using the factory method
mut service := agent1.new_service(actor: 'vm_manager', description: 'VM Management Service')
// Create a service action using the factory method
mut action := service.new_action(action:'start', description: 'Start a VM')
// Set additional properties for the action
action.params = {
'name': 'string'
}
action.params_example = {
'name': 'myvm'
}
// Add the agents
println('Adding agent 1')
agent1 = runner.agents.set(agent1)!
// Explicitly set different IDs for each agent to avoid overwriting
agent2.id = 1 // Set a different ID for agent2
println('Adding agent 2')
agent2 = runner.agents.set(agent2)!
agent3.id = 2 // Set a different ID for agent3
println('Adding agent 3')
agent3 = runner.agents.set(agent3)!
// Test list functionality
println('Testing list functionality')
// Debug: Print the agent IDs in the list
agent_ids := runner.agents.list()!
println('Agent IDs in list: ${agent_ids}')
// Debug: Print the 'all' key from the radix tree
all_bytes := runner.agents.db.session_state.dbs.db_meta_core.get('agent:id') or {
println('No agent:id key found in radix tree')
[]u8{}
}
if all_bytes.len > 0 {
all_str := all_bytes.bytestr()
println('Raw agent:id key content: "${all_str}"')
}
// Get all agents
all_agents := runner.agents.getall()!
println('Retrieved ${all_agents.len} agents')
for i, agent in all_agents {
println('Agent ${i}: id=${agent.id}, pubkey=${agent.pubkey}')
}
assert all_agents.len == 3, 'Expected 3 agents, got ${all_agents.len}'
// Verify all agents are in the list
mut found1 := false
mut found2 := false
mut found3 := false
for agent in all_agents {
if agent.pubkey == 'test-agent-1' {
found1 = true
} else if agent.pubkey == 'test-agent-2' {
found2 = true
} else if agent.pubkey == 'test-agent-3' {
found3 = true
}
}
assert found1, 'Agent 1 not found in list'
assert found2, 'Agent 2 not found in list'
assert found3, 'Agent 3 not found in list'
// Get and verify individual agents
println('Verifying individual agents')
retrieved_agent1 := runner.agents.get_by_pubkey('test-agent-1')!
assert retrieved_agent1.pubkey == agent1.pubkey
assert retrieved_agent1.address == agent1.address
assert retrieved_agent1.description == agent1.description
assert retrieved_agent1.services.len == 1
assert retrieved_agent1.services[0].actor == 'vm_manager'
assert retrieved_agent1.status.status == .ok
// Update agent status
println('Updating agent status')
runner.agents.update_status('test-agent-1', .down)!
updated_agent := runner.agents.get_by_pubkey('test-agent-1')!
assert updated_agent.status.status == .down
// Test get_by_service
println('Testing get_by_service')
service_agents := runner.agents.get_by_service('vm_manager', 'start')!
assert service_agents.len == 1
assert service_agents[0].pubkey == 'test-agent-1'
// Test delete functionality
println('Testing delete functionality')
// Delete agent 2
runner.agents.delete_by_pubkey('test-agent-2')!
// Verify deletion with list
agents_after_delete := runner.agents.getall()!
assert agents_after_delete.len == 2, 'Expected 2 agents after deletion, got ${agents_after_delete.len}'
// Verify the remaining agents
mut found_after_delete1 := false
mut found_after_delete2 := false
mut found_after_delete3 := false
for agent in agents_after_delete {
if agent.pubkey == 'test-agent-1' {
found_after_delete1 = true
} else if agent.pubkey == 'test-agent-2' {
found_after_delete2 = true
} else if agent.pubkey == 'test-agent-3' {
found_after_delete3 = true
}
}
assert found_after_delete1, 'Agent 1 not found after deletion'
assert !found_after_delete2, 'Agent 2 found after deletion (should be deleted)'
assert found_after_delete3, 'Agent 3 not found after deletion'
// Delete another agent
println('Deleting another agent')
runner.agents.delete_by_pubkey('test-agent-3')!
// Verify only one agent remains
agents_after_second_delete := runner.agents.getall()!
assert agents_after_second_delete.len == 1, 'Expected 1 agent after second deletion, got ${agents_after_second_delete.len}'
assert agents_after_second_delete[0].pubkey == 'test-agent-1', 'Remaining agent should be test-agent-1'
// Delete the last agent
println('Deleting last agent')
runner.agents.delete_by_pubkey('test-agent-1')!
// Verify no agents remain
agents_after_all_deleted := runner.agents.getall() or {
// This is expected to fail with 'No agents found' error
assert err.msg() == 'No agents found'
[]core.Agent{cap: 0}
}
assert agents_after_all_deleted.len == 0, 'Expected 0 agents after all deletions, got ${agents_after_all_deleted.len}'
println('All tests passed successfully')
}

View File

@@ -0,0 +1,149 @@
module core
import freeflowuniverse.herolib.circles.models { DBHandler, SessionState }
import freeflowuniverse.herolib.circles.models.core { Circle }
@[heap]
pub struct CircleDB {
pub mut:
db DBHandler[Circle]
}
pub fn new_circledb(session_state SessionState) !CircleDB {
return CircleDB{
db: models.new_dbhandler[Circle]('circle', session_state)
}
}
pub fn (mut m CircleDB) new() Circle {
return Circle{}
}
// set adds or updates a circle
pub fn (mut m CircleDB) set(circle Circle) !Circle {
return m.db.set(circle)!
}
// get retrieves a circle by its ID
pub fn (mut m CircleDB) get(id u32) !Circle {
return m.db.get(id)!
}
// list returns all circle IDs
pub fn (mut m CircleDB) list() ![]u32 {
return m.db.list()!
}
pub fn (mut m CircleDB) getall() ![]Circle {
return m.db.getall()!
}
// delete removes a circle by its ID
pub fn (mut m CircleDB) delete(id u32) ! {
m.db.delete(id)!
}
//////////////////CUSTOM METHODS//////////////////////////////////
// get_by_name retrieves a circle by its name
pub fn (mut m CircleDB) get_by_name(name string) !Circle {
return m.db.get_by_key('name', name)!
}
// delete_by_name removes a circle by its name
pub fn (mut m CircleDB) delete_by_name(name string) ! {
// Get the circle by name
circle := m.get_by_name(name) or {
// Circle not found, nothing to delete
return
}
// Delete the circle by ID
m.delete(circle.id)!
}
// get_all_circle_names returns all circle names
pub fn (mut m CircleDB) get_all_circle_names() ![]string {
// Get all circle IDs
circle_ids := m.list()!
// Get names for all circles
mut names := []string{}
for id in circle_ids {
circle := m.get(id) or { continue }
names << circle.name
}
return names
}
// add_member adds a member to a circle
pub fn (mut m CircleDB) add_member(circle_name string, member core.Member) !Circle {
// Get the circle by name
mut circle := m.get_by_name(circle_name)!
// Check if member with same name already exists
for existing_member in circle.members {
if existing_member.name == member.name {
return error('Member with name ${member.name} already exists in circle ${circle_name}')
}
}
// Add the member
circle.members << member
// Save the updated circle
return m.set(circle)!
}
// remove_member removes a member from a circle by name
pub fn (mut m CircleDB) remove_member(circle_name string, member_name string) !Circle {
// Get the circle by name
mut circle := m.get_by_name(circle_name)!
// Find and remove the member
mut found := false
mut new_members := []core.Member{}
for member in circle.members {
if member.name == member_name {
found = true
continue
}
new_members << member
}
if !found {
return error('Member with name ${member_name} not found in circle ${circle_name}')
}
// Update the circle members
circle.members = new_members
// Save the updated circle
return m.set(circle)!
}
// update_member_role updates the role of a member in a circle
pub fn (mut m CircleDB) update_member_role(circle_name string, member_name string, new_role core.Role) !Circle {
// Get the circle by name
mut circle := m.get_by_name(circle_name)!
// Find and update the member
mut found := false
for i, mut member in circle.members {
if member.name == member_name {
circle.members[i].role = new_role
found = true
break
}
}
if !found {
return error('Member with name ${member_name} not found in circle ${circle_name}')
}
// Save the updated circle
return m.set(circle)!
}

View File

@@ -0,0 +1,192 @@
module core
import os
import rand
import freeflowuniverse.herolib.circles.actionprocessor
import freeflowuniverse.herolib.circles.models.core
fn test_circle_db() {
// Create a temporary directory for testing
test_dir := os.join_path(os.temp_dir(), 'hero_circle_test_${rand.intn(9000) or { 0 } + 1000}')
os.mkdir_all(test_dir) or { panic(err) }
defer { os.rmdir_all(test_dir) or {} }
mut runner := actionprocessor.new(path: test_dir)!
// Create multiple circles for testing
mut circle1 := runner.circles.new()
circle1.name = 'test-circle-1'
circle1.description = 'Test Circle 1'
mut circle2 := runner.circles.new()
circle2.name = 'test-circle-2'
circle2.description = 'Test Circle 2'
mut circle3 := runner.circles.new()
circle3.name = 'test-circle-3'
circle3.description = 'Test Circle 3'
// Create members for testing
mut member1 := core.Member{
name: 'member1'
description: 'Test Member 1'
role: .admin
pubkeys: ['pubkey1']
emails: ['member1@example.com']
}
mut member2 := core.Member{
name: 'member2'
description: 'Test Member 2'
role: .member
pubkeys: ['pubkey2']
emails: ['member2@example.com']
}
// Add members to circle1
circle1.members << member1
circle1.members << member2
// Add the circles
println('Adding circle 1')
circle1 = runner.circles.set(circle1)!
// Explicitly set different IDs for each circle to avoid overwriting
circle2.id = 1 // Set a different ID for circle2
println('Adding circle 2')
circle2 = runner.circles.set(circle2)!
circle3.id = 2 // Set a different ID for circle3
println('Adding circle 3')
circle3 = runner.circles.set(circle3)!
// Test list functionality
println('Testing list functionality')
// Get all circles
all_circles := runner.circles.getall()!
println('Retrieved ${all_circles.len} circles')
for i, circle in all_circles {
println('Circle ${i}: id=${circle.id}, name=${circle.name}')
}
assert all_circles.len == 3, 'Expected 3 circles, got ${all_circles.len}'
// Verify all circles are in the list
mut found1 := false
mut found2 := false
mut found3 := false
for circle in all_circles {
if circle.name == 'test-circle-1' {
found1 = true
} else if circle.name == 'test-circle-2' {
found2 = true
} else if circle.name == 'test-circle-3' {
found3 = true
}
}
assert found1, 'Circle 1 not found in list'
assert found2, 'Circle 2 not found in list'
assert found3, 'Circle 3 not found in list'
// Get and verify individual circles
println('Verifying individual circles')
retrieved_circle1 := runner.circles.get_by_name('test-circle-1')!
assert retrieved_circle1.name == circle1.name
assert retrieved_circle1.description == circle1.description
assert retrieved_circle1.members.len == 2
assert retrieved_circle1.members[0].name == 'member1'
assert retrieved_circle1.members[0].role == .admin
assert retrieved_circle1.members[1].name == 'member2'
assert retrieved_circle1.members[1].role == .member
// Test add_member method
println('Testing add_member method')
mut member3 := core.Member{
name: 'member3'
description: 'Test Member 3'
role: .contributor
pubkeys: ['pubkey3']
emails: ['member3@example.com']
}
runner.circles.add_member('test-circle-2', member3)!
updated_circle2 := runner.circles.get_by_name('test-circle-2')!
assert updated_circle2.members.len == 1
assert updated_circle2.members[0].name == 'member3'
assert updated_circle2.members[0].role == .contributor
// Test update_member_role method
println('Testing update_member_role method')
runner.circles.update_member_role('test-circle-2', 'member3', .stakeholder)!
role_updated_circle2 := runner.circles.get_by_name('test-circle-2')!
assert role_updated_circle2.members[0].role == .stakeholder
// Test remove_member method
println('Testing remove_member method')
runner.circles.remove_member('test-circle-1', 'member2')!
member_removed_circle1 := runner.circles.get_by_name('test-circle-1')!
assert member_removed_circle1.members.len == 1
assert member_removed_circle1.members[0].name == 'member1'
// Test get_all_circle_names method
println('Testing get_all_circle_names method')
circle_names := runner.circles.get_all_circle_names()!
assert circle_names.len == 3
assert 'test-circle-1' in circle_names
assert 'test-circle-2' in circle_names
assert 'test-circle-3' in circle_names
// Test delete functionality
println('Testing delete functionality')
// Delete circle 2
runner.circles.delete_by_name('test-circle-2')!
// Verify deletion with list
circles_after_delete := runner.circles.getall()!
assert circles_after_delete.len == 2, 'Expected 2 circles after deletion, got ${circles_after_delete.len}'
// Verify the remaining circles
mut found_after_delete1 := false
mut found_after_delete2 := false
mut found_after_delete3 := false
for circle in circles_after_delete {
if circle.name == 'test-circle-1' {
found_after_delete1 = true
} else if circle.name == 'test-circle-2' {
found_after_delete2 = true
} else if circle.name == 'test-circle-3' {
found_after_delete3 = true
}
}
assert found_after_delete1, 'Circle 1 not found after deletion'
assert !found_after_delete2, 'Circle 2 found after deletion (should be deleted)'
assert found_after_delete3, 'Circle 3 not found after deletion'
// Delete another circle
println('Deleting another circle')
runner.circles.delete_by_name('test-circle-3')!
// Verify only one circle remains
circles_after_second_delete := runner.circles.getall()!
assert circles_after_second_delete.len == 1, 'Expected 1 circle after second deletion, got ${circles_after_second_delete.len}'
assert circles_after_second_delete[0].name == 'test-circle-1', 'Remaining circle should be test-circle-1'
// Delete the last circle
println('Deleting last circle')
runner.circles.delete_by_name('test-circle-1')!
// Verify no circles remain
circles_after_all_deleted := runner.circles.getall() or {
// This is expected to fail with 'No circles found' error
assert err.msg().contains('No index keys defined for this type') || err.msg().contains('No circles found')
[]core.Circle{cap: 0}
}
assert circles_after_all_deleted.len == 0, 'Expected 0 circles after all deletions, got ${circles_after_all_deleted.len}'
println('All tests passed successfully')
}

View File

@@ -0,0 +1,194 @@
module core
import freeflowuniverse.herolib.circles.models { DBHandler, SessionState }
import freeflowuniverse.herolib.circles.models.core { Name, Record, RecordType }
@[heap]
pub struct NameDB {
pub mut:
db DBHandler[Name]
}
pub fn new_namedb(session_state SessionState) !NameDB {
return NameDB{
db: models.new_dbhandler[Name]('name', session_state)
}
}
pub fn (mut m NameDB) new() Name {
return Name{}
}
// set adds or updates a name
pub fn (mut m NameDB) set(name Name) !Name {
return m.db.set(name)!
}
// get retrieves a name by its ID
pub fn (mut m NameDB) get(id u32) !Name {
return m.db.get(id)!
}
// list returns all name IDs
pub fn (mut m NameDB) list() ![]u32 {
return m.db.list()!
}
pub fn (mut m NameDB) getall() ![]Name {
return m.db.getall()!
}
// delete removes a name by its ID
pub fn (mut m NameDB) delete(id u32) ! {
m.db.delete(id)!
}
//////////////////CUSTOM METHODS//////////////////////////////////
// get_by_domain retrieves a name by its domain
pub fn (mut m NameDB) get_by_domain(domain string) !Name {
return m.db.get_by_key('domain', domain)!
}
// delete_by_domain removes a name by its domain
pub fn (mut m NameDB) delete_by_domain(domain string) ! {
// Get the name by domain
name := m.get_by_domain(domain) or {
// Name not found, nothing to delete
return
}
// Delete the name by ID
m.delete(name.id)!
}
// get_all_domains returns all domains
pub fn (mut m NameDB) get_all_domains() ![]string {
// Get all name IDs
name_ids := m.list()!
// Get domains for all names
mut domains := []string{}
for id in name_ids {
name := m.get(id) or { continue }
domains << name.domain
}
return domains
}
// add_record adds a record to a name
pub fn (mut m NameDB) add_record(domain string, record Record) !Name {
// Get the name by domain
mut name := m.get_by_domain(domain)!
// Check if record with same name and type already exists
for existing_record in name.records {
if existing_record.name == record.name && existing_record.category == record.category {
return error('Record with name ${record.name} and type ${record.category} already exists in domain ${domain}')
}
}
// Add the record
name.records << record
// Save the updated name
return m.set(name)!
}
// remove_record removes a record from a name by record name and type
pub fn (mut m NameDB) remove_record(domain string, record_name string, record_type RecordType) !Name {
// Get the name by domain
mut name := m.get_by_domain(domain)!
// Find and remove the record
mut found := false
mut new_records := []Record{}
for record in name.records {
if record.name == record_name && record.category == record_type {
found = true
continue
}
new_records << record
}
if !found {
return error('Record with name ${record_name} and type ${record_type} not found in domain ${domain}')
}
// Update the name records
name.records = new_records
// Save the updated name
return m.set(name)!
}
// update_record_text updates the text of a record
pub fn (mut m NameDB) update_record_text(domain string, record_name string, record_type RecordType, new_text string) !Name {
// Get the name by domain
mut name := m.get_by_domain(domain)!
// Find and update the record
mut found := false
for i, mut record in name.records {
if record.name == record_name && record.category == record_type {
name.records[i].text = new_text
found = true
break
}
}
if !found {
return error('Record with name ${record_name} and type ${record_type} not found in domain ${domain}')
}
// Save the updated name
return m.set(name)!
}
// add_admin adds an admin to a name
pub fn (mut m NameDB) add_admin(domain string, pubkey string) !Name {
// Get the name by domain
mut name := m.get_by_domain(domain)!
// Check if admin already exists
if pubkey in name.admins {
return error('Admin with pubkey ${pubkey} already exists in domain ${domain}')
}
// Add the admin
name.admins << pubkey
// Save the updated name
return m.set(name)!
}
// remove_admin removes an admin from a name
pub fn (mut m NameDB) remove_admin(domain string, pubkey string) !Name {
// Get the name by domain
mut name := m.get_by_domain(domain)!
// Find and remove the admin
mut found := false
mut new_admins := []string{}
for admin in name.admins {
if admin == pubkey {
found = true
continue
}
new_admins << admin
}
if !found {
return error('Admin with pubkey ${pubkey} not found in domain ${domain}')
}
// Update the name admins
name.admins = new_admins
// Save the updated name
return m.set(name)!
}

View File

@@ -0,0 +1,209 @@
module core
import os
import rand
import freeflowuniverse.herolib.circles.actionprocessor
import freeflowuniverse.herolib.circles.models.core
fn test_name_db() {
// Create a temporary directory for testing
test_dir := os.join_path(os.temp_dir(), 'hero_name_test_${rand.intn(9000) or { 0 } + 1000}')
os.mkdir_all(test_dir) or { panic(err) }
defer { os.rmdir_all(test_dir) or {} }
mut runner := actionprocessor.new(path: test_dir)!
// Create multiple names for testing
mut name1 := runner.names.new()
name1.domain = 'example.com'
name1.description = 'Example Domain'
name1.admins = ['admin1_pubkey']
mut name2 := runner.names.new()
name2.domain = 'test.org'
name2.description = 'Test Organization'
name2.admins = ['admin2_pubkey']
mut name3 := runner.names.new()
name3.domain = 'herolib.io'
name3.description = 'HeroLib Website'
name3.admins = ['admin3_pubkey']
// Create records for testing
mut record1 := core.Record{
name: 'www'
text: 'Web server'
category: .a
addr: ['192.168.1.1', '192.168.1.2']
}
mut record2 := core.Record{
name: 'mail'
text: 'Mail server'
category: .mx
addr: ['192.168.2.1']
}
// Add records to name1
name1.records << record1
name1.records << record2
// Add the names
println('Adding name 1')
name1 = runner.names.set(name1)!
// Explicitly set different IDs for each name to avoid overwriting
name2.id = 1 // Set a different ID for name2
println('Adding name 2')
name2 = runner.names.set(name2)!
name3.id = 2 // Set a different ID for name3
println('Adding name 3')
name3 = runner.names.set(name3)!
// Test list functionality
println('Testing list functionality')
// Get all names
all_names := runner.names.getall()!
println('Retrieved ${all_names.len} names')
for i, name in all_names {
println('Name ${i}: id=${name.id}, domain=${name.domain}')
}
assert all_names.len == 3, 'Expected 3 names, got ${all_names.len}'
// Verify all names are in the list
mut found1 := false
mut found2 := false
mut found3 := false
for name in all_names {
if name.domain == 'example.com' {
found1 = true
} else if name.domain == 'test.org' {
found2 = true
} else if name.domain == 'herolib.io' {
found3 = true
}
}
assert found1, 'Name 1 not found in list'
assert found2, 'Name 2 not found in list'
assert found3, 'Name 3 not found in list'
// Get and verify individual names
println('Verifying individual names')
retrieved_name1 := runner.names.get_by_domain('example.com')!
assert retrieved_name1.domain == name1.domain
assert retrieved_name1.description == name1.description
assert retrieved_name1.records.len == 2
assert retrieved_name1.records[0].name == 'www'
assert retrieved_name1.records[0].category == .a
assert retrieved_name1.records[1].name == 'mail'
assert retrieved_name1.records[1].category == .mx
assert retrieved_name1.admins.len == 1
assert retrieved_name1.admins[0] == 'admin1_pubkey'
// Test add_record method
println('Testing add_record method')
mut record3 := core.Record{
name: 'api'
text: 'API server'
category: .a
addr: ['192.168.3.1']
}
runner.names.add_record('test.org', record3)!
updated_name2 := runner.names.get_by_domain('test.org')!
assert updated_name2.records.len == 1
assert updated_name2.records[0].name == 'api'
assert updated_name2.records[0].category == .a
assert updated_name2.records[0].text == 'API server'
// Test update_record_text method
println('Testing update_record_text method')
runner.names.update_record_text('test.org', 'api', .a, 'Updated API server')!
text_updated_name2 := runner.names.get_by_domain('test.org')!
assert text_updated_name2.records[0].text == 'Updated API server'
// Test remove_record method
println('Testing remove_record method')
runner.names.remove_record('example.com', 'mail', .mx)!
record_removed_name1 := runner.names.get_by_domain('example.com')!
assert record_removed_name1.records.len == 1
assert record_removed_name1.records[0].name == 'www'
// Test add_admin method
println('Testing add_admin method')
runner.names.add_admin('example.com', 'new_admin_pubkey')!
admin_added_name1 := runner.names.get_by_domain('example.com')!
assert admin_added_name1.admins.len == 2
assert 'new_admin_pubkey' in admin_added_name1.admins
// Test remove_admin method
println('Testing remove_admin method')
runner.names.remove_admin('example.com', 'admin1_pubkey')!
admin_removed_name1 := runner.names.get_by_domain('example.com')!
assert admin_removed_name1.admins.len == 1
assert admin_removed_name1.admins[0] == 'new_admin_pubkey'
// Test get_all_domains method
println('Testing get_all_domains method')
domains := runner.names.get_all_domains()!
assert domains.len == 3
assert 'example.com' in domains
assert 'test.org' in domains
assert 'herolib.io' in domains
// Test delete functionality
println('Testing delete functionality')
// Delete name 2
runner.names.delete_by_domain('test.org')!
// Verify deletion with list
names_after_delete := runner.names.getall()!
assert names_after_delete.len == 2, 'Expected 2 names after deletion, got ${names_after_delete.len}'
// Verify the remaining names
mut found_after_delete1 := false
mut found_after_delete2 := false
mut found_after_delete3 := false
for name in names_after_delete {
if name.domain == 'example.com' {
found_after_delete1 = true
} else if name.domain == 'test.org' {
found_after_delete2 = true
} else if name.domain == 'herolib.io' {
found_after_delete3 = true
}
}
assert found_after_delete1, 'Name 1 not found after deletion'
assert !found_after_delete2, 'Name 2 found after deletion (should be deleted)'
assert found_after_delete3, 'Name 3 not found after deletion'
// Delete another name
println('Deleting another name')
runner.names.delete_by_domain('herolib.io')!
// Verify only one name remains
names_after_second_delete := runner.names.getall()!
assert names_after_second_delete.len == 1, 'Expected 1 name after second deletion, got ${names_after_second_delete.len}'
assert names_after_second_delete[0].domain == 'example.com', 'Remaining name should be example.com'
// Delete the last name
println('Deleting last name')
runner.names.delete_by_domain('example.com')!
// Verify no names remain
names_after_all_deleted := runner.names.getall() or {
// This is expected to fail with 'No names found' error
assert err.msg().contains('No index keys defined for this type') || err.msg().contains('No names found')
[]core.Name{cap: 0}
}
assert names_after_all_deleted.len == 0, 'Expected 0 names after all deletions, got ${names_after_all_deleted.len}'
println('All tests passed successfully')
}

View File

@@ -0,0 +1,367 @@
# HeroLib Job DBSession
This document explains the job management system in HeroLib, which is designed to coordinate distributed task execution across multiple agents.
## Core Components
### 1. Job System
The job system is the central component that manages tasks to be executed by agents. It consists of:
- **Job**: Represents a task to be executed by an agent. Each job has:
- A unique GUID
- Target agents (public keys of agents that can execute the job)
- Source (public key of the agent requesting the job)
- Circle and context (organizational structure)
- Actor and action (what needs to be executed)
- Parameters (data needed for execution)
- Timeout settings
- Status information
- Dependencies on other jobs
- **JobStatus**: Tracks the state of a job through its lifecycle:
- created → scheduled → planned → running → ok/error
- **JobManager**: Handles CRUD operations for jobs, storing them in Redis under the `herorunner:jobs` key.
### 2. Agent System
The agent system represents the entities that can execute jobs:
- **Agent**: Represents a service provider that can execute jobs. Each agent has:
- A public key (identifier)
- Network address and port
- Status information
- List of services it provides
- Cryptographic signature for verification
- **AgentService**: Represents a service provided by an agent, with:
- Actor name
- Available actions
- Status information
- **AgentManager**: Handles CRUD operations for agents, storing them in Redis under the `herorunner:agents` key.
### 3. Service System
The service system defines the capabilities available in the system:
- **Service**: Represents a capability that can be provided by agents. Each service has:
- Actor name
- Available actions
- Status information
- Optional access control list
- **ServiceAction**: Represents an action that can be performed by a service, with:
- Action name
- Parameters
- Optional access control list
- **ServiceManager**: Handles CRUD operations for services, storing them in Redis under the `herorunner:services` key.
### 4. Access Control System
The access control system manages permissions:
- **Circle**: Represents a collection of members (users or other circles)
- **ACL**: Access Control List containing multiple ACEs
- **ACE**: Access Control Entry defining permissions for users or circles
- **CircleManager**: Handles CRUD operations for circles, storing them in Redis under the `herorunner:circles` key.
### 5. HeroRunner
The `HeroRunner` is the main factory that brings all components together, providing a unified interface to the job management system.
## How It Works
1. **Job Creation and Scheduling**:
- A client creates a job with specific actor, action, and parameters
- The job is stored in Redis with status "created"
- The job can specify dependencies on other jobs
2. **Agent Registration**:
- Agents register themselves with their public key, address, and services
- Each agent provides a list of services (actors) and actions it can perform
- Agents periodically update their status
3. **Service Discovery**:
- Services define the capabilities available in the system
- Each service has a list of actions it can perform
- Services can have access control to restrict who can use them
4. **Job Execution**:
- The herorunner process monitors jobs in Redis
- When a job is ready (dependencies satisfied), it changes status to "scheduled"
- The herorunner forwards the job to an appropriate agent
- The agent changes job status to "planned", then "running", and finally "ok" or "error"
- If an agent fails, the herorunner can retry with another agent
5. **Access Control**:
- Users and circles are organized in a hierarchical structure
- ACLs define who can access which services and actions
- The service manager checks access permissions before allowing job execution
## Data Storage
All data is stored in Redis using the following keys:
- `herorunner:jobs` - Hash map of job GUIDs to job JSON
- `herorunner:agents` - Hash map of agent public keys to agent JSON
- `herorunner:services` - Hash map of service actor names to service JSON
- `herorunner:circles` - Hash map of circle GUIDs to circle JSON
## Potential Issues
1. **Concurrency Management**:
- The current implementation doesn't have explicit locking mechanisms for concurrent access to Redis
- Race conditions could occur if multiple processes update the same job simultaneously
2. **Error Handling**:
- While there are error states, the error handling is minimal
- There's no robust mechanism for retrying failed jobs or handling partial failures
3. **Dependency Resolution**:
- The code for resolving job dependencies is not fully implemented
- It's unclear how circular dependencies would be handled
4. **Security Concerns**:
- While there's a signature field in the Agent struct, the verification process is not evident
- The ACL system is basic and might not handle complex permission scenarios
5. **Scalability**:
- All data is stored in Redis, which could become a bottleneck with a large number of jobs
- There's no apparent sharding or partitioning strategy
6. **Monitoring and Observability**:
- Limited mechanisms for monitoring the system's health
- No built-in logging or metrics collection
## Recommendations
1. Implement proper concurrency control using Redis transactions or locks
2. Enhance error handling with more detailed error states and recovery mechanisms
3. Develop a robust dependency resolution system with cycle detection
4. Strengthen security by implementing proper signature verification and enhancing the ACL system
5. Consider a more scalable storage solution for large deployments
6. Add comprehensive logging and monitoring capabilities
## Usage Example
```v
// Initialize the HeroRunner
mut hr := model.new()!
// Create a new job
mut job := hr.jobs.new()
job.guid = 'job-123'
job.actor = 'vm_manager'
job.action = 'start'
job.params['id'] = '10'
hr.jobs.set(job)!
// Register an agent
mut agent := hr.agents.new()
agent.pubkey = 'agent-456'
agent.address = '192.168.1.100'
agent.services << model.AgentService{
actor: 'vm_manager'
actions: [
model.AgentServiceAction{
action: 'start'
params: {'id': 'string'}
}
]
}
hr.agents.set(agent)!
// Define a service
mut service := hr.services.new()
service.actor = 'vm_manager'
service.actions << model.ServiceAction{
action: 'start'
params: {'id': 'string'}
}
hr.services.set(service)!
```
## Circle Management with HeroScript
You can use HeroScript to create and manage circles. Here's an example of how to create a circle and add members to it:
```heroscript
!!circle.create
name: 'development'
description: 'Development team circle'
!!circle.add_member
circle: 'development'
name: 'John Doe'
pubkey: 'user-123'
email: 'john@example.com'
role: 'admin'
description: 'Lead developer'
!!circle.add_member
circle: 'development'
name: 'Jane Smith'
pubkeys: 'user-456,user-789'
emails: 'jane@example.com,jsmith@company.com'
role: 'member'
description: 'Frontend developer'
```
To process this HeroScript in your V code:
```v
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.data.ourdb
import freeflowuniverse.herolib.data.radixtree
import freeflowuniverse.herolib.core.jobs.model
// Example HeroScript text
const heroscript_text = """
!!circle.create
name: 'development'
description: 'Development team circle'
!!circle.add_member
circle: 'development'
name: 'John Doe'
pubkey: 'user-123'
email: 'john@example.com'
role: 'admin'
description: 'Lead developer'
!!circle.add_member
circle: 'development'
name: 'Jane Smith'
pubkeys: 'user-456,user-789'
emails: 'jane@example.com,jsmith@company.com'
role: 'member'
description: 'Frontend developer'
"""
fn main() ! {
// Initialize database
mut db_data := ourdb.new(path: '/tmp/herorunner_data')!
mut db_meta := radixtree.new(path: '/tmp/herorunner_meta')!
// Create circle manager
mut circle_manager := model.new_circlemanager(db_data, db_meta)
// Parse the HeroScript
mut pb := playbook.new(text: heroscript_text)!
// Process the circle commands
model.play_circle(mut circle_manager, mut pb)!
// Check the results
circles := circle_manager.getall()!
println('Created ${circles.len} circles:')
for circle in circles {
println('Circle: ${circle.name} (ID: ${circle.id})')
println('Members: ${circle.members.len}')
for member in circle.members {
println(' - ${member.name} (${member.role})')
}
}
}
```
## Domain Name Management with HeroScript
You can use HeroScript to create and manage domain names and DNS records. Here's an example of how to create a domain and add various DNS records to it:
```heroscript
!!name.create
domain: 'example.org'
description: 'Example organization domain'
admins: 'admin1-pubkey,admin2-pubkey'
!!name.add_record
domain: 'example.org'
name: 'www'
type: 'a'
addrs: '192.168.1.1,192.168.1.2'
text: 'Web server'
!!name.add_record
domain: 'example.org'
name: 'mail'
type: 'mx'
addr: '192.168.1.10'
text: 'Mail server'
!!name.add_admin
domain: 'example.org'
pubkey: 'admin3-pubkey'
```
To process this HeroScript in your V code:
```v
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.data.ourdb
import freeflowuniverse.herolib.data.radixtree
import freeflowuniverse.herolib.core.jobs.model
// Example HeroScript text
const heroscript_text = """
!!name.create
domain: 'example.org'
description: 'Example organization domain'
admins: 'admin1-pubkey,admin2-pubkey'
!!name.add_record
domain: 'example.org'
name: 'www'
type: 'a'
addrs: '192.168.1.1,192.168.1.2'
text: 'Web server'
!!name.add_record
domain: 'example.org'
name: 'mail'
type: 'mx'
addr: '192.168.1.10'
text: 'Mail server'
!!name.add_admin
domain: 'example.org'
pubkey: 'admin3-pubkey'
"""
fn main() ! {
// Initialize database
mut db_data := ourdb.new(path: '/tmp/dns_data')!
mut db_meta := radixtree.new(path: '/tmp/dns_meta')!
// Create name manager
mut name_manager := model.new_namemanager(db_data, db_meta)
// Parse the HeroScript
mut pb := playbook.new(text: heroscript_text)!
// Process the name commands
model.play_name(mut name_manager, mut pb)!
// Check the results
names := name_manager.getall()!
println('Created ${names.len} domains:')
for name in names {
println('Domain: ${name.domain} (ID: ${name.id})')
println('Records: ${name.records.len}')
for record in name.records {
println(' - ${record.name}.${name.domain} (${record.category})')
println(' Addresses: ${record.addr}')
}
println('Admins: ${name.admins.len}')
for admin in name.admins {
println(' - ${admin}')
}
}
}
```

View File

@@ -0,0 +1,247 @@
module core
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.data.encoder
// Agent represents self service provider that can execute jobs
pub struct Agent {
pub mut:
id u32
pubkey string // pubkey using ed25519
address string // where we can find the agent
port u16 // default 9999
description string // optional
status AgentStatus
services []AgentService // these are the public services
signature string // signature as done by private key of $address+$port+$description+$status
}
@[params]
pub struct ServiceParams {
pub mut:
actor string
description string
}
// new_service creates self new AgentService for this agent
pub fn (mut self Agent) new_service(args ServiceParams) &AgentService {
mut service := AgentService{
actor: args.actor
description: args.description
status: .ok
public: true
}
self.services << service
return &self.services[self.services.len - 1]
}
@[params]
pub struct ActionParams {
pub mut:
action string
description string
}
// new_service_action creates self new AgentServiceAction for the specified service
pub fn (mut service AgentService) new_action(args ActionParams) &AgentServiceAction {
mut service_action := AgentServiceAction{
action: args.action
description: args.description
status: .ok
public: true
}
service.actions << service_action
return &service.actions[service.actions.len - 1]
}
// AgentStatus represents the current state of an agent
pub struct AgentStatus {
pub mut:
guid string // unique id for the job
timestamp_first ourtime.OurTime // when agent came online
timestamp_last ourtime.OurTime // last time agent let us know that he is working
status AgentState // current state of the agent
}
// AgentService represents self service provided by an agent
pub struct AgentService {
pub mut:
actor string // name of the actor providing the service
actions []AgentServiceAction // available actions for this service
description string // optional description
status AgentServiceState // current state of the service
public bool // if everyone can use then true, if restricted means only certain people can use
}
// AgentServiceAction represents an action that can be performed by self service
pub struct AgentServiceAction {
pub mut:
action string // which action
description string // optional description
params map[string]string // e.g. name:'name of the vm' ...
params_example map[string]string // e.g. name:'myvm'
status AgentServiceState // current state of the action
public bool // if everyone can use then true, if restricted means only certain people can use
}
// AgentState represents the possible states of an agent
pub enum AgentState {
ok // agent is functioning normally
down // agent is not responding
error // agent encountered an error
halted // agent has been manually stopped
}
// AgentServiceState represents the possible states of an agent service or action
pub enum AgentServiceState {
ok // service/action is functioning normally
down // service/action is not available
error // service/action encountered an error
halted // service/action has been manually stopped
}
pub fn (self Agent) index_keys() map[string]string {
return {"pubkey": self.pubkey}
}
// dumps serializes the Agent struct to binary format using the encoder
pub fn (self Agent) dumps() ![]u8 {
mut e := encoder.new()
// Add unique encoding ID to identify this type of data
e.add_u16(100)
// Encode Agent fields
e.add_string(self.pubkey)
e.add_string(self.address)
e.add_u16(self.port)
e.add_string(self.description)
// Encode AgentStatus
e.add_string(self.status.guid)
e.add_ourtime(self.status.timestamp_first)
e.add_ourtime(self.status.timestamp_last)
e.add_u8(u8(self.status.status))
// Encode services array
e.add_u16(u16(self.services.len))
for service in self.services {
// Encode AgentService fields
e.add_string(service.actor)
e.add_string(service.description)
e.add_u8(u8(service.status))
e.add_u8(u8(service.public))
// Encode actions array
e.add_u16(u16(service.actions.len))
for action in service.actions {
// Encode AgentServiceAction fields
e.add_string(action.action)
e.add_string(action.description)
e.add_u8(u8(action.status))
e.add_u8(u8(action.public))
// Encode params map
e.add_map_string(action.params)
// Encode params_example map
e.add_map_string(action.params_example)
}
}
// Encode signature
e.add_string(self.signature)
return e.data
}
// loads deserializes binary data into an Agent struct
pub fn agent_loads(data []u8) !Agent {
mut d := encoder.decoder_new(data)
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 100 {
return error('Wrong file type: expected encoding ID 100, got ${encoding_id}, for agent')
}
mut self:=Agent{}
// Decode Agent fields
self.pubkey = d.get_string()!
self.address = d.get_string()!
self.port = d.get_u16()!
self.description = d.get_string()!
// Decode AgentStatus
self.status.guid = d.get_string()!
self.status.timestamp_first = d.get_ourtime()!
self.status.timestamp_last = d.get_ourtime()!
status_val := d.get_u8()!
self.status.status = match status_val {
0 { AgentState.ok }
1 { AgentState.down }
2 { AgentState.error }
3 { AgentState.halted }
else { return error('Invalid AgentState value: ${status_val}') }
}
// Decode services array
services_len := d.get_u16()!
self.services = []AgentService{len: int(services_len)}
for i in 0 .. services_len {
mut service := AgentService{}
// Decode AgentService fields
service.actor = d.get_string()!
service.description = d.get_string()!
service_status_val := d.get_u8()!
service.status = match service_status_val {
0 { AgentServiceState.ok }
1 { AgentServiceState.down }
2 { AgentServiceState.error }
3 { AgentServiceState.halted }
else { return error('Invalid AgentServiceState value: ${service_status_val}') }
}
service.public = d.get_u8()! == 1
// Decode actions array
actions_len := d.get_u16()!
service.actions = []AgentServiceAction{len: int(actions_len)}
for j in 0 .. actions_len {
mut action := AgentServiceAction{}
// Decode AgentServiceAction fields
action.action = d.get_string()!
action.description = d.get_string()!
action_status_val := d.get_u8()!
action.status = match action_status_val {
0 { AgentServiceState.ok }
1 { AgentServiceState.down }
2 { AgentServiceState.error }
3 { AgentServiceState.halted }
else { return error('Invalid AgentServiceState value: ${action_status_val}') }
}
action.public = d.get_u8()! == 1
// Decode params map
action.params = d.get_map_string()!
// Decode params_example map
action.params_example = d.get_map_string()!
service.actions[j] = action
}
self.services[i] = service
}
// Decode signature
self.signature = d.get_string()!
return self
}

View File

@@ -0,0 +1,324 @@
module core
import freeflowuniverse.herolib.data.ourtime
fn test_agent_dumps_loads() {
// Create a test agent with some sample data
mut agent := Agent{
pubkey: 'ed25519:1234567890abcdef'
address: '192.168.1.100'
port: 9999
description: 'Test agent for binary encoding'
status: AgentStatus{
guid: 'agent-123'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.ok
}
signature: 'signature-data-here'
}
// Add a service
mut service := AgentService{
actor: 'vm'
description: 'Virtual machine management'
status: AgentServiceState.ok
public: true
}
// Add an action to the service
action := AgentServiceAction{
action: 'create'
description: 'Create a new virtual machine'
status: AgentServiceState.ok
public: true
params: {
'name': 'Name of the VM'
'memory': 'Memory in MB'
'cpu': 'Number of CPU cores'
}
params_example: {
'name': 'my-test-vm'
'memory': '2048'
'cpu': '2'
}
}
service.actions << action
// Add another action
action2 := AgentServiceAction{
action: 'delete'
description: 'Delete a virtual machine'
status: AgentServiceState.ok
public: false
params: {
'name': 'Name of the VM to delete'
}
params_example: {
'name': 'my-test-vm'
}
}
service.actions << action2
agent.services << service
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode agent: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.description == agent.description
assert decoded_agent.signature == agent.signature
// Verify status
assert decoded_agent.status.guid == agent.status.guid
assert decoded_agent.status.status == agent.status.status
// Verify services
assert decoded_agent.services.len == agent.services.len
if decoded_agent.services.len > 0 {
service1 := decoded_agent.services[0]
original_service := agent.services[0]
assert service1.actor == original_service.actor
assert service1.description == original_service.description
assert service1.status == original_service.status
assert service1.public == original_service.public
// Verify actions
assert service1.actions.len == original_service.actions.len
if service1.actions.len > 0 {
action1 := service1.actions[0]
original_action := original_service.actions[0]
assert action1.action == original_action.action
assert action1.description == original_action.description
assert action1.status == original_action.status
assert action1.public == original_action.public
// Verify params
assert action1.params.len == original_action.params.len
for key, value in original_action.params {
assert key in action1.params
assert action1.params[key] == value
}
// Verify params_example
assert action1.params_example.len == original_action.params_example.len
for key, value in original_action.params_example {
assert key in action1.params_example
assert action1.params_example[key] == value
}
}
}
println('Agent binary encoding/decoding test passed successfully')
}
fn test_agent_complex_structure() {
// Create a more complex agent with multiple services and actions
mut agent := Agent{
pubkey: 'ed25519:complex-test-key'
address: '10.0.0.5'
port: 8080
description: 'Complex test agent'
status: AgentStatus{
guid: 'complex-agent-456'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.ok
}
signature: 'complex-signature-data'
}
// Add first service - VM management
mut vm_service := AgentService{
actor: 'vm'
description: 'VM management service'
status: AgentServiceState.ok
public: true
}
// Add actions to VM service
vm_service.actions << AgentServiceAction{
action: 'create'
description: 'Create VM'
status: AgentServiceState.ok
public: true
params: {
'name': 'VM name'
'size': 'VM size'
}
params_example: {
'name': 'test-vm'
'size': 'medium'
}
}
vm_service.actions << AgentServiceAction{
action: 'start'
description: 'Start VM'
status: AgentServiceState.ok
public: true
params: {
'name': 'VM name'
}
params_example: {
'name': 'test-vm'
}
}
// Add second service - Storage management
mut storage_service := AgentService{
actor: 'storage'
description: 'Storage management service'
status: AgentServiceState.ok
public: false
}
// Add actions to storage service
storage_service.actions << AgentServiceAction{
action: 'create_volume'
description: 'Create storage volume'
status: AgentServiceState.ok
public: false
params: {
'name': 'Volume name'
'size': 'Volume size in GB'
}
params_example: {
'name': 'data-vol'
'size': '100'
}
}
storage_service.actions << AgentServiceAction{
action: 'attach_volume'
description: 'Attach volume to VM'
status: AgentServiceState.ok
public: false
params: {
'volume': 'Volume name'
'vm': 'VM name'
'mount_point': 'Mount point'
}
params_example: {
'volume': 'data-vol'
'vm': 'test-vm'
'mount_point': '/data'
}
}
// Add services to agent
agent.services << vm_service
agent.services << storage_service
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode complex agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode complex agent: ${err}'
return
}
// Verify the decoded data
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.services.len == agent.services.len
// Verify first service (VM)
if decoded_agent.services.len > 0 {
vm := decoded_agent.services[0]
assert vm.actor == 'vm'
assert vm.actions.len == 2
// Check VM create action
create_action := vm.actions[0]
assert create_action.action == 'create'
assert create_action.params.len == 2
assert create_action.params['name'] == 'VM name'
// Check VM start action
start_action := vm.actions[1]
assert start_action.action == 'start'
assert start_action.params.len == 1
}
// Verify second service (Storage)
if decoded_agent.services.len > 1 {
storage := decoded_agent.services[1]
assert storage.actor == 'storage'
assert storage.public == false
assert storage.actions.len == 2
// Check storage attach action
attach_action := storage.actions[1]
assert attach_action.action == 'attach_volume'
assert attach_action.params.len == 3
assert attach_action.params['mount_point'] == 'Mount point'
assert attach_action.params_example['mount_point'] == '/data'
}
println('Complex agent binary encoding/decoding test passed successfully')
}
fn test_agent_empty_structures() {
// Test with empty arrays and maps
mut agent := Agent{
pubkey: 'ed25519:empty-test'
address: '127.0.0.1'
port: 7777
description: ''
status: AgentStatus{
guid: 'empty-agent'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.down
}
signature: ''
services: []
}
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode empty agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode empty agent: ${err}'
return
}
// Verify the decoded data
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.description == ''
assert decoded_agent.signature == ''
assert decoded_agent.services.len == 0
assert decoded_agent.status.status == AgentState.down
println('Empty agent binary encoding/decoding test passed successfully')
}

View File

@@ -0,0 +1,129 @@
module core
import freeflowuniverse.herolib.data.encoder
// Role represents the role of a member in a circle
pub enum Role {
admin
stakeholder
member
contributor
guest
}
// Member represents a member of a circle
pub struct Member {
pub mut:
pubkeys []string // public keys of the member
emails []string // list of emails
name string // name of the member
description string // optional description
role Role // role of the member in the circle
}
// Circle represents a collection of members (users or other circles)
pub struct Circle {
pub mut:
id u32 // unique id
name string // name of the circle
description string // optional description
members []Member // members of the circle
}
pub fn (c Circle) index_keys() map[string]string {
return {"name": c.name}
}
// dumps serializes the Circle struct to binary format using the encoder
// This implements the Serializer interface
pub fn (c Circle) dumps() ![]u8 {
mut e := encoder.new()
// Add unique encoding ID to identify this type of data
e.add_u16(200)
// Encode Circle fields
e.add_u32(c.id)
e.add_string(c.name)
e.add_string(c.description)
// Encode members array
e.add_u16(u16(c.members.len))
for member in c.members {
// Encode Member fields
// Encode pubkeys array
e.add_u16(u16(member.pubkeys.len))
for pubkey in member.pubkeys {
e.add_string(pubkey)
}
// Encode emails array
e.add_u16(u16(member.emails.len))
for email in member.emails {
e.add_string(email)
}
e.add_string(member.name)
e.add_string(member.description)
e.add_u8(u8(member.role))
}
return e.data
}
// loads deserializes binary data into a Circle struct
pub fn circle_loads(data []u8) !Circle {
mut d := encoder.decoder_new(data)
mut circle := Circle{}
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 200 {
return error('Wrong file type: expected encoding ID 200, got ${encoding_id}, for circle')
}
// Decode Circle fields
circle.id = d.get_u32()!
circle.name = d.get_string()!
circle.description = d.get_string()!
// Decode members array
members_len := d.get_u16()!
circle.members = []Member{len: int(members_len)}
for i in 0 .. members_len {
mut member := Member{}
// Decode Member fields
// Decode pubkeys array
pubkeys_len := d.get_u16()!
member.pubkeys = []string{len: int(pubkeys_len)}
for j in 0 .. pubkeys_len {
member.pubkeys[j] = d.get_string()!
}
// Decode emails array
emails_len := d.get_u16()!
member.emails = []string{len: int(emails_len)}
for j in 0 .. emails_len {
member.emails[j] = d.get_string()!
}
member.name = d.get_string()!
member.description = d.get_string()!
role_val := d.get_u8()!
member.role = match role_val {
0 { Role.admin }
1 { Role.stakeholder }
2 { Role.member }
3 { Role.contributor }
4 { Role.guest }
else { return error('Invalid Role value: ${role_val}') }
}
circle.members[i] = member
}
return circle
}

View File

@@ -0,0 +1,219 @@
module core
fn test_circle_dumps_loads() {
// Create a test circle with some sample data
mut circle := Circle{
id: 123
name: 'Test Circle'
description: 'A test circle for binary encoding'
}
// Add a member
mut member1 := Member{
pubkeys: ['user1-pubkey']
name: 'User One'
description: 'First test user'
role: .admin
emails: ['user1@example.com', 'user.one@example.org']
}
circle.members << member1
// Add another member
mut member2 := Member{
pubkeys: ['user2-pubkey']
name: 'User Two'
description: 'Second test user'
role: .member
emails: ['user2@example.com']
}
circle.members << member2
// Test binary encoding
binary_data := circle.dumps() or {
assert false, 'Failed to encode circle: ${err}'
return
}
// Test binary decoding
decoded_circle := circle_loads(binary_data) or {
assert false, 'Failed to decode circle: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_circle.id == circle.id
assert decoded_circle.name == circle.name
assert decoded_circle.description == circle.description
// Verify members
assert decoded_circle.members.len == circle.members.len
// Verify first member
assert decoded_circle.members[0].pubkeys.len == circle.members[0].pubkeys.len
assert decoded_circle.members[0].pubkeys[0] == circle.members[0].pubkeys[0]
assert decoded_circle.members[0].name == circle.members[0].name
assert decoded_circle.members[0].description == circle.members[0].description
assert decoded_circle.members[0].role == circle.members[0].role
assert decoded_circle.members[0].emails.len == circle.members[0].emails.len
assert decoded_circle.members[0].emails[0] == circle.members[0].emails[0]
assert decoded_circle.members[0].emails[1] == circle.members[0].emails[1]
// Verify second member
assert decoded_circle.members[1].pubkeys.len == circle.members[1].pubkeys.len
assert decoded_circle.members[1].pubkeys[0] == circle.members[1].pubkeys[0]
assert decoded_circle.members[1].name == circle.members[1].name
assert decoded_circle.members[1].description == circle.members[1].description
assert decoded_circle.members[1].role == circle.members[1].role
assert decoded_circle.members[1].emails.len == circle.members[1].emails.len
assert decoded_circle.members[1].emails[0] == circle.members[1].emails[0]
println('Circle binary encoding/decoding test passed successfully')
}
fn test_circle_complex_structure() {
// Create a more complex circle with multiple members of different roles
mut circle := Circle{
id: 456
name: 'Complex Test Circle'
description: 'A complex test circle with multiple members'
}
// Add admin member
circle.members << Member{
pubkeys: ['admin-pubkey']
name: 'Admin User'
description: 'Circle administrator'
role: .admin
emails: ['admin@example.com']
}
// Add stakeholder member
circle.members << Member{
pubkeys: ['stakeholder-pubkey']
name: 'Stakeholder User'
description: 'Circle stakeholder'
role: .stakeholder
emails: ['stakeholder@example.com', 'stakeholder@company.com']
}
// Add regular members
circle.members << Member{
pubkeys: ['member1-pubkey']
name: 'Regular Member 1'
description: 'First regular member'
role: .member
emails: ['member1@example.com']
}
circle.members << Member{
pubkeys: ['member2-pubkey']
name: 'Regular Member 2'
description: 'Second regular member'
role: .member
emails: ['member2@example.com']
}
// Add contributor
circle.members << Member{
pubkeys: ['contributor-pubkey']
name: 'Contributor'
description: 'Circle contributor'
role: .contributor
emails: ['contributor@example.com']
}
// Add guest
circle.members << Member{
pubkeys: ['guest-pubkey']
name: 'Guest User'
description: 'Circle guest'
role: .guest
emails: ['guest@example.com']
}
// Test binary encoding
binary_data := circle.dumps() or {
assert false, 'Failed to encode complex circle: ${err}'
return
}
// Test binary decoding
decoded_circle := circle_loads(binary_data) or {
assert false, 'Failed to decode complex circle: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_circle.id == circle.id
assert decoded_circle.name == circle.name
assert decoded_circle.description == circle.description
assert decoded_circle.members.len == circle.members.len
// Verify each member type is correctly encoded/decoded
mut role_counts := {
Role.admin: 0
Role.stakeholder: 0
Role.member: 0
Role.contributor: 0
Role.guest: 0
}
for member in decoded_circle.members {
role_counts[member.role]++
}
assert role_counts[Role.admin] == 1
assert role_counts[Role.stakeholder] == 1
assert role_counts[Role.member] == 2
assert role_counts[Role.contributor] == 1
assert role_counts[Role.guest] == 1
// Verify specific members by pubkeys
for i, member in circle.members {
decoded_member := decoded_circle.members[i]
assert decoded_member.pubkeys.len == member.pubkeys.len
assert decoded_member.pubkeys[0] == member.pubkeys[0]
assert decoded_member.name == member.name
assert decoded_member.description == member.description
assert decoded_member.role == member.role
assert decoded_member.emails.len == member.emails.len
for j, email in member.emails {
assert decoded_member.emails[j] == email
}
}
println('Complex circle binary encoding/decoding test passed successfully')
}
fn test_circle_empty_members() {
// Test a circle with no members
circle := Circle{
id: 789
name: 'Empty Circle'
description: 'A circle with no members'
members: []
}
// Test binary encoding
binary_data := circle.dumps() or {
assert false, 'Failed to encode empty circle: ${err}'
return
}
// Test binary decoding
decoded_circle := circle_loads(binary_data) or {
assert false, 'Failed to decode empty circle: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_circle.id == circle.id
assert decoded_circle.name == circle.name
assert decoded_circle.description == circle.description
assert decoded_circle.members.len == 0
println('Empty circle binary encoding/decoding test passed successfully')
}

View File

@@ -0,0 +1,138 @@
module core
import freeflowuniverse.herolib.data.encoder
// record types for a DNS record
pub enum RecordType {
a
aaaa
cname
mx
ns
ptr
soa
srv
txt
}
// represents a DNS record
pub struct Record {
pub mut:
name string // name of the record
text string
category RecordType // role of the member in the circle
addr []string //the multiple ipaddresses for this record
}
// Circle represents a collection of members (users or other circles)
pub struct Name {
pub mut:
id u32 // unique id
domain string
description string // optional description
records []Record // members of the circle
admins []string //pubkeys who can change it
}
pub fn (n Name) index_keys() map[string]string {
return {"domain": n.domain}
}
// dumps serializes the Name struct to binary format using the encoder
// This implements the Serializer interface
pub fn (n Name) dumps() ![]u8 {
mut e := encoder.new()
// Add unique encoding ID to identify this type of data
e.add_u16(300)
// Encode Name fields
e.add_u32(n.id)
e.add_string(n.domain)
e.add_string(n.description)
// Encode records array
e.add_u16(u16(n.records.len))
for record in n.records {
// Encode Record fields
e.add_string(record.name)
e.add_string(record.text)
e.add_u8(u8(record.category))
// Encode addresses array
e.add_u16(u16(record.addr.len))
for addr in record.addr {
e.add_string(addr)
}
}
// Encode admins array
e.add_u16(u16(n.admins.len))
for admin in n.admins {
e.add_string(admin)
}
return e.data
}
// loads deserializes binary data into a Name struct
pub fn name_loads(data []u8) !Name {
mut d := encoder.decoder_new(data)
mut name := Name{}
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 300 {
return error('Wrong file type: expected encoding ID 300, got ${encoding_id}, for name')
}
// Decode Name fields
name.id = d.get_u32()!
name.domain = d.get_string()!
name.description = d.get_string()!
// Decode records array
records_len := d.get_u16()!
name.records = []Record{len: int(records_len)}
for i in 0 .. records_len {
mut record := Record{}
// Decode Record fields
record.name = d.get_string()!
record.text = d.get_string()!
category_val := d.get_u8()!
record.category = match category_val {
0 { RecordType.a }
1 { RecordType.aaaa }
2 { RecordType.cname }
3 { RecordType.mx }
4 { RecordType.ns }
5 { RecordType.ptr }
6 { RecordType.soa }
7 { RecordType.srv }
8 { RecordType.txt }
else { return error('Invalid RecordType value: ${category_val}') }
}
// Decode addr array
addr_len := d.get_u16()!
record.addr = []string{len: int(addr_len)}
for j in 0 .. addr_len {
record.addr[j] = d.get_string()!
}
name.records[i] = record
}
// Decode admins array
admins_len := d.get_u16()!
name.admins = []string{len: int(admins_len)}
for i in 0 .. admins_len {
name.admins[i] = d.get_string()!
}
return name
}

View File

@@ -0,0 +1,234 @@
module core
import freeflowuniverse.herolib.data.ourdb
import freeflowuniverse.herolib.data.radixtree
import freeflowuniverse.herolib.core.playbook
fn test_name_dumps_loads() {
// Create a test name with some sample data
mut name := Name{
id: 123
domain: 'example.com'
description: 'A test domain for binary encoding'
}
// Add a record
mut record1 := Record{
name: 'www'
text: 'Website'
category: .a
addr: ['192.168.1.1', '192.168.1.2']
}
name.records << record1
// Add another record
mut record2 := Record{
name: 'mail'
text: 'Mail server'
category: .mx
addr: ['192.168.1.10']
}
name.records << record2
// Add admins
name.admins << 'admin1-pubkey'
name.admins << 'admin2-pubkey'
// Test binary encoding
binary_data := name.dumps() or {
assert false, 'Failed to encode name: ${err}'
return
}
// Test binary decoding
decoded_name := name_loads(binary_data) or {
assert false, 'Failed to decode name: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_name.id == name.id
assert decoded_name.domain == name.domain
assert decoded_name.description == name.description
// Verify records
assert decoded_name.records.len == name.records.len
// Verify first record
assert decoded_name.records[0].name == name.records[0].name
assert decoded_name.records[0].text == name.records[0].text
assert decoded_name.records[0].category == name.records[0].category
assert decoded_name.records[0].addr.len == name.records[0].addr.len
assert decoded_name.records[0].addr[0] == name.records[0].addr[0]
assert decoded_name.records[0].addr[1] == name.records[0].addr[1]
// Verify second record
assert decoded_name.records[1].name == name.records[1].name
assert decoded_name.records[1].text == name.records[1].text
assert decoded_name.records[1].category == name.records[1].category
assert decoded_name.records[1].addr.len == name.records[1].addr.len
assert decoded_name.records[1].addr[0] == name.records[1].addr[0]
// Verify admins
assert decoded_name.admins.len == name.admins.len
assert decoded_name.admins[0] == name.admins[0]
assert decoded_name.admins[1] == name.admins[1]
println('Name binary encoding/decoding test passed successfully')
}
fn test_name_complex_structure() {
// Create a more complex name with multiple records of different types
mut name := Name{
id: 456
domain: 'complex-example.org'
description: 'A complex test domain with multiple records'
}
// Add A record
name.records << Record{
name: 'www'
text: 'Web server'
category: .a
addr: ['203.0.113.1']
}
// Add AAAA record
name.records << Record{
name: 'ipv6'
text: 'IPv6 server'
category: .aaaa
addr: ['2001:db8::1']
}
// Add CNAME record
name.records << Record{
name: 'alias'
text: 'Alias record'
category: .cname
addr: ['www.complex-example.org']
}
// Add MX record
name.records << Record{
name: 'mail'
text: 'Mail server'
category: .mx
addr: ['mail.complex-example.org']
}
// Add NS record
name.records << Record{
name: 'ns1'
text: 'Name server 1'
category: .ns
addr: ['ns1.complex-example.org']
}
// Add TXT record
name.records << Record{
name: 'txt'
text: 'SPF record'
category: .txt
addr: ['v=spf1 include:_spf.complex-example.org ~all']
}
// Add admins
name.admins << 'admin-pubkey'
name.admins << 'backup-admin-pubkey'
// Test binary encoding
binary_data := name.dumps() or {
assert false, 'Failed to encode complex name: ${err}'
return
}
// Test binary decoding
decoded_name := name_loads(binary_data) or {
assert false, 'Failed to decode complex name: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_name.id == name.id
assert decoded_name.domain == name.domain
assert decoded_name.description == name.description
assert decoded_name.records.len == name.records.len
assert decoded_name.admins.len == name.admins.len
// Verify each record type is correctly encoded/decoded
mut record_types := {
RecordType.a: 0
RecordType.aaaa: 0
RecordType.cname: 0
RecordType.mx: 0
RecordType.ns: 0
RecordType.txt: 0
}
for record in decoded_name.records {
record_types[record.category]++
}
assert record_types[RecordType.a] == 1
assert record_types[RecordType.aaaa] == 1
assert record_types[RecordType.cname] == 1
assert record_types[RecordType.mx] == 1
assert record_types[RecordType.ns] == 1
assert record_types[RecordType.txt] == 1
// Verify specific records by name
for i, record in name.records {
decoded_record := decoded_name.records[i]
assert decoded_record.name == record.name
assert decoded_record.text == record.text
assert decoded_record.category == record.category
assert decoded_record.addr.len == record.addr.len
for j, addr in record.addr {
assert decoded_record.addr[j] == addr
}
}
// Verify admins
for i, admin in name.admins {
assert decoded_name.admins[i] == admin
}
println('Complex name binary encoding/decoding test passed successfully')
}
fn test_name_empty_records() {
// Test a name with no records
name := Name{
id: 789
domain: 'empty.example.net'
description: 'A domain with no records'
records: []
admins: ['admin-pubkey']
}
// Test binary encoding
binary_data := name.dumps() or {
assert false, 'Failed to encode empty name: ${err}'
return
}
// Test binary decoding
decoded_name := name_loads(binary_data) or {
assert false, 'Failed to decode empty name: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_name.id == name.id
assert decoded_name.domain == name.domain
assert decoded_name.description == name.description
assert decoded_name.records.len == 0
assert decoded_name.admins.len == 1
assert decoded_name.admins[0] == name.admins[0]
println('Empty records name binary encoding/decoding test passed successfully')
}

View File

@@ -0,0 +1,182 @@
module models
import freeflowuniverse.herolib.circles.models.core { agent_loads, Agent, circle_loads, Circle, name_loads, Name }
pub struct DBHandler[T] {
pub mut:
prefix string
session_state SessionState
}
// new_dbhandler creates a new DBHandler for type T
pub fn new_dbhandler[T](prefix string, session_state SessionState) DBHandler[T] {
return DBHandler[T]{
prefix: prefix
session_state: session_state
}
}
// set adds or updates an item
pub fn (mut m DBHandler[T]) set(item_ T) !T {
mut item := item_
// Store the item data in the database and get the assigned ID
item.id = m.session_state.dbs.db_data_core.set(data: item.dumps()!)!
// Update index keys
for key, value in m.index_keys(item)! {
index_key := '${m.prefix}:${key}:${value}'
m.session_state.dbs.db_meta_core.set(index_key, item.id.str().bytes())!
}
return item
}
// get retrieves an item by its ID
pub fn (mut m DBHandler[T]) get(id u32) !T {
// Get the item data from the database
item_data := m.session_state.dbs.db_data_core.get(id) or {
return error('Item data not found for ID ${id}')
}
//THIS IS SUPER ANNOYING AND NOT NICE
$if T is Agent {
mut o:= agent_loads(item_data)!
o.id = id
return o
} $else $if T is Circle {
mut o:= circle_loads(item_data)!
o.id = id
return o
} $else $if T is Name {
mut o:= name_loads(item_data)!
o.id = id
return o
} $else {
return error('Unsupported type for deserialization')
}
panic("bug")
}
pub fn (mut m DBHandler[T]) exists(id u32) !bool {
item_data := m.session_state.dbs.db_data_core.get(id) or { return false }
return item_data != []u8{}
}
// get_by_key retrieves an item by a specific key field and value
pub fn (mut m DBHandler[T]) get_by_key(key_field string, key_value string) !T {
// Create the key for the radix tree
key := '${m.prefix}:${key_field}:${key_value}'
// Get the ID from the radix tree
id_bytes := m.session_state.dbs.db_meta_core.get(key) or {
return error('Item with ${key_field}=${key_value} not found')
}
// Convert the ID bytes to u32
id_str := id_bytes.bytestr()
id := id_str.u32()
// Get the item using the ID
return m.get(id)
}
// delete removes an item by its ID
pub fn (mut m DBHandler[T]) delete(id u32) ! {
exists := m.exists(id)!
if !exists {
return
}
// Get the item before deleting it to remove index keys
item := m.get(id)!
for key, value in m.index_keys(item)! {
index_key := '${m.prefix}:${key}:${value}'
m.session_state.dbs.db_meta_core.delete(index_key)!
}
// Delete the item data from the database
m.session_state.dbs.db_data_core.delete(id)!
}
//internal function to always have at least one index key, the default is id
fn (mut m DBHandler[T]) index_keys(item T) !map[string]string {
mut keymap := item.index_keys()
if keymap.len==0{
keymap["id"]=item.id.str()
}
return keymap
}
// list returns all ids from the db handler
pub fn (mut m DBHandler[T]) list() ![]u32 {
// Use the RadixTree's prefix capabilities to list all items
mut empty_item := T{}
mut keys_map := m.index_keys(empty_item)!
if keys_map.len == 0 {
return error('No index keys defined for this type')
}
// Get the first key from the map
mut default_key := ''
for k, _ in keys_map {
default_key = k
break
}
// Get all IDs from the meta database
id_bytes := m.session_state.dbs.db_meta_core.getall('${m.prefix}:${default_key}')!
// Convert bytes to u32 IDs
mut result := []u32{}
for id_byte in id_bytes {
id_str := id_byte.bytestr()
result << id_str.u32()
}
return result
}
pub fn (mut m DBHandler[T]) getall() ![]T {
mut items := []T{}
for id in m.list()! {
items << m.get(id)!
}
return items
}
// list_by_prefix returns all items that match a specific prefix pattern
pub fn (mut m DBHandler[T]) list_by_prefix(key_field string, prefix_value string) ![]u32 {
// Create the prefix for the radix tree
prefix := '${m.prefix}:${key_field}:${prefix_value}'
// Use RadixTree's list method to get all keys with this prefix
keys := m. session_state.dbs.db_meta_core.list(prefix)!
// Extract IDs from the values stored in these keys
mut ids := []u32{}
for key in keys {
if id_bytes := m.session_state.dbs.db_meta_core.get(key) {
id_str := id_bytes.bytestr()
if id_str.len > 0 {
ids << id_str.u32()
}
}
}
return ids
}
// getall_by_prefix returns all items that match a specific prefix pattern
pub fn (mut m DBHandler[T]) getall_by_prefix(key_field string, prefix_value string) ![]T {
mut items := []T{}
for id in m.list_by_prefix(key_field, prefix_value)! {
items << m.get(id)!
}
return items
}

View File

@@ -30,8 +30,8 @@ pub mut:
// ACE represents an access control entry
pub struct ACE {
pub mut:
groups []string // guid's of the groups who have access
users []string // in case groups are not used then is users
circles []string // guid's of the circles who have access
users []string // in case circles are not used then is users
right string // e.g. read, write, admin, block
}

View File

@@ -0,0 +1,76 @@
module models
import freeflowuniverse.herolib.data.ourdb
import freeflowuniverse.herolib.data.radixtree
import freeflowuniverse.herolib.core.texttools
import os
// SessionState holds the state of a session which is linked to someone calling it as well as the DB's we use
pub struct SessionState {
pub mut:
name string
pubkey string // pubkey of user who called this
addr string //mycelium address
dbs Databases
}
pub struct Databases{
pub mut:
db_data_core &ourdb.OurDB
db_meta_core &radixtree.RadixTree
db_data_mcc &ourdb.OurDB
db_meta_mcc &radixtree.RadixTree
}
@[params]
pub struct StateArgs {
pub mut:
name string
pubkey string // pubkey of user who called this
addr string //mycelium address
path string
}
pub fn new_session(args_ StateArgs) !SessionState {
mut args:=args_
args.name = texttools.name_fix(args.name)
if args.path.len == 0 {
args.path = os.join_path(os.home_dir(), 'hero', 'dbs')
}
mypath:=os.join_path(args.path, args.name)
mut db_data_core := ourdb.new(
path: os.join_path(mypath, 'data_core')
incremental_mode: true
)!
mut db_meta_core := radixtree.new(
path: os.join_path(mypath, 'meta_core')
)!
mut db_data_mcc := ourdb.new(
path: os.join_path(mypath, 'data_mcc')
incremental_mode: false
)!
mut db_meta_mcc := radixtree.new(
path: os.join_path(mypath, 'meta_mcc')
)!
mut dbs := Databases{
db_data_core: &db_data_core
db_meta_core: &db_meta_core
db_data_mcc: &db_data_mcc
db_meta_mcc: &db_meta_mcc
}
mut s := SessionState{
name: args.name
dbs: dbs
pubkey: args.pubkey
addr: args.addr
}
return s
}

View File

@@ -8,7 +8,7 @@ To get started
import freeflowuniverse.crystallib.clients. runpod
import freeflowuniverse.herolib.clients. runpod
mut client:= runpod.get()!

View File

@@ -0,0 +1,69 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import os
import freeflowuniverse.herolib.core.pathlib
// will ask questions & create the .heroscript
pub fn ask(path string) ! {
mut myconsole := console.new()
mut model := gen_model_get(path, false)!
console.clear()
console.print_header('Configure generation of code for a module on path:')
console.print_green('Path: ${path}')
console.lf()
model.classname = myconsole.ask_question(
description: 'Class name of the ${model.cat}'
question: 'What is the class name of the generator e.g. MyClass ?'
warning: 'Please provide a valid class name for the generator'
default: model.classname
minlen: 4
)!
model.title = myconsole.ask_question(
description: 'Title of the ${model.cat} (optional)'
default: model.title
)!
model.hasconfig = !myconsole.ask_yesno(
description: 'Is there a config (normally yes)?'
default: model.hasconfig
)!
if model.hasconfig {
model.singleton = !myconsole.ask_yesno(
description: 'Can there be multiple instances (normally yes)?'
default: !model.singleton
)!
if model.cat == .installer {
model.templates = myconsole.ask_yesno(
description: 'Will there be templates available for your installer?'
default: model.templates
)!
}
} else {
model.singleton = true
}
if model.cat == .installer {
model.startupmanager = myconsole.ask_yesno(
description: 'Is this an installer which will be managed by a startup mananger?'
default: model.startupmanager
)!
model.build = myconsole.ask_yesno(
description: 'Are there builders for the installers (compilation)'
default: model.build
)!
}
// if true{
// println(model)
// panic("Sdsd")
// }
gen_model_set(GenerateArgs{ model: model, path: path })!
}

View File

@@ -0,0 +1,88 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import os
@[params]
pub struct GenerateArgs {
pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
model ?GenModel
cat ?Cat
}
pub struct PlayArgs {
pub mut:
name string
modulepath string
}
// the default to start with
//
// reset bool // regenerate all, dangerous !!!
// interactive bool //if we want to ask
// path string
// model ?GenModel
// cat ?Cat
//
// will return the module path where we need to execute a play command as well as the name of
pub fn do(args_ GenerateArgs) ! PlayArgs{
mut args := args_
console.print_header('Generate code for path: ${args.path} (reset:${args.reset}, interactive:${args.interactive})')
mut create := true // to create .heroscript
mut model := args.model or {
create = false // we cannot create because model not given
if args.path == '' {
args.path = os.getwd()
}
mut m := gen_model_get(args.path, false)!
m
}
if model.classname == '' {
args.interactive = true
}
if create {
if args.path == '' {
return error('need to specify path fo ${args_} because we asked to create .heroscript ')
}
gen_model_set(args)! // persist it on disk
} else {
if args.path == '' {
args.path = os.getwd()
}
}
// if model.cat == .unknown {
// model.cat = args.cat or { return error('cat needs to be specified for generator.') }
// }
if args.interactive {
ask(args.path)!
args.model = gen_model_get(args.path, false)!
} else {
args.model = model
}
console.print_debug(args)
//only generate if playonly is false and there is a classname
if !args.playonly && model.classname.len>0{
generate(args)!
}
return PlayArgs{
name: model.play_name
modulepath: model.module_path
}
}

View File

@@ -0,0 +1,77 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.pathlib
// generate based on filled in args, ask has to be done before
fn generate(args GenerateArgs) ! {
console.print_debug('generate code for path: ${args.path}')
// as used in the templates
model := args.model or { panic('bug no model specified in generate') }
mut path_actions := pathlib.get(args.path + '/${model.name}_actions.v')
if args.reset {
path_actions.delete()!
}
if !path_actions.exists() && model.cat == .installer {
console.print_debug('write installer actions')
mut templ_1 := $tmpl('templates/objname_actions.vtemplate')
pathlib.template_write(templ_1, '${args.path}/${model.name}_actions.v', true)!
}
mut templ_2 := $tmpl('templates/objname_factory_.vtemplate')
pathlib.template_write(templ_2, '${args.path}/${model.name}_factory_.v', true)!
mut path_model := pathlib.get(args.path + '/${model.name}_model.v')
if args.reset || !path_model.exists() {
console.print_debug('write model.')
mut templ_3 := $tmpl('templates/objname_model.vtemplate')
pathlib.template_write(templ_3, '${args.path}/${model.name}_model.v', true)!
}
// TODO: check case sensistivity for delete
mut path_readme := pathlib.get(args.path + '/readme.md')
if args.reset || !path_readme.exists() {
mut templ_readme := $tmpl('templates/readme.md')
pathlib.template_write(templ_readme, '${args.path}/readme.md', true)!
}
mut path_templ_dir := pathlib.get_dir(path: args.path + '/templates', create: false)!
if args.reset {
path_templ_dir.delete()!
}
if (args.model or { panic('bug') }).templates {
if !path_templ_dir.exists() {
mut templ_6 := $tmpl('templates/atemplate.yaml')
pathlib.template_write(templ_6, '${args.path}/templates/atemplate.yaml', true)!
}
}
}
// fn platform_check(args GenModel) ! {
// ok := 'osx,ubuntu,arch'
// ok2 := ok.split(',')
// for i in args.supported_platforms {
// if i !in ok2 {
// return error('cannot find ${i} in choices for supported_platforms. Valid ones are ${ok}')
// }
// }
// }
// pub fn (args GenModel) platform_check_str() string {
// mut out := ''
// if 'osx' in args.supported_platforms {
// out += 'myplatform == .osx || '
// }
// if 'ubuntu' in args.supported_platforms {
// out += 'myplatform == .ubuntu ||'
// }
// if 'arch' in args.supported_platforms {
// out += 'myplatform == .arch ||'
// }
// out = out.trim_right('|')
// return out
// }

View File

@@ -0,0 +1,138 @@
module installer_client
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
pub struct GenModel {
pub mut:
name string
classname string
default bool = true // means user can just get the object and a default will be created
title string
// supported_platforms []string // only relevant for installers for now
singleton bool // means there can only be one
templates bool // means we will use templates in the installer, client doesn't do this'
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
startupmanager bool = true
build bool = true
hasconfig bool = true
cat Cat // dont' set default
play_name string // e.g. docusaurus is what we look for
module_path string // e.g.freeflowuniverse.herolib.web.docusaurus
}
pub enum Cat {
unknown
client
installer
}
// creates the heroscript from the GenModel as part of GenerateArgs
pub fn gen_model_set(args GenerateArgs) ! {
console.print_debug('Code generator set: ${args}')
model := args.model or { return error('model is none') }
heroscript_templ := match model.cat {
.client { $tmpl('templates/heroscript_client') }
.installer { $tmpl('templates/heroscript_installer') }
else { return error('Invalid category: ${model.cat}') }
}
pathlib.template_write(heroscript_templ, '${args.path}/.heroscript', true)!
}
// loads the heroscript and return the model
pub fn gen_model_get(path string, create bool) !GenModel {
console.print_debug('play installer code for path: ${path}')
mut config_path := pathlib.get_file(path: '${path}/.heroscript', create: create)!
mut plbook := playbook.new(text: config_path.read()!)!
mut model := GenModel{}
mut found := false
mut install_actions := plbook.find(filter: 'hero_code.generate_installer')!
if install_actions.len > 0 {
for install_action in install_actions {
if found {
return error('cannot find more than one her_code.generate_installer ... in ${path}')
}
found = true
mut p := install_action.params
model = GenModel{
name: p.get_default('name', '')!
classname: p.get_default('classname', '')!
title: p.get_default('title', '')!
default: p.get_default_true('default')
// supported_platforms: p.get_list('supported_platforms')!
singleton: p.get_default_false('singleton')
templates: p.get_default_false('templates')
startupmanager: p.get_default_true('startupmanager')
build: p.get_default_true('build')
hasconfig: p.get_default_true('hasconfig')
cat: .installer
}
}
}
mut client_actions := plbook.find(filter: 'hero_code.generate_client')!
if client_actions.len > 0 {
for client_action in client_actions {
if found {
return error('cannot find more than one her_code.generate_client ... in ${path}')
}
found = true
mut p := client_action.params
model = GenModel{
name: p.get_default('name', '')!
classname: p.get_default('classname', '')!
title: p.get_default('title', '')!
default: p.get_default_true('default')
singleton: p.get_default_false('singleton')
hasconfig: p.get_default_true('hasconfig')
cat: .client
}
}
}
if model.cat == .unknown {
if path.contains('clients') {
model.cat = .client
} else {
model.cat = .installer
}
}
if model.name == '' {
model.name = os.base(path).to_lower()
}
model.play_name = model.name
pathsub:=path.replace('${os.home_dir()}/code/github/','')
model.module_path = pathsub.replace("/",".").replace(".lib.",".")
// !!hero_code.play
// name:'docusaurus'
mut play_actions := plbook.find(filter: 'hero_code.play')!
if play_actions.len>1{
return error("should have max 1 hero_code.play action in ${config_path.path}")
}
if play_actions.len==1{
mut p := play_actions[0].params
model.play_name = p.get_default('name',model.name)!
}
if model.module_path.contains("docusaurus"){
println(model)
println("4567ujhjk")
exit(0)
}
return model
}

View File

@@ -0,0 +1,71 @@
# generation framework for clients & installers
```bash
#generate all play commands
hero generate -playonly
#will ask questions if .heroscript is not there yet
hero generate -p thepath_is_optional
# to generate without questions
hero generate -p thepath_is_optional -t client
#if installer, default is a client
hero generate -p thepath_is_optional -t installer
#when you want to scan over multiple directories
hero generate -p thepath_is_optional -t installer -s
```
there will be a ```.heroscript``` in the director you want to generate for, the format is as follows:
```hero
//for a server
!!hero_code.generate_installer
name:'daguserver'
classname:'DaguServer'
singleton:1 //there can only be 1 object in the globals, is called 'default'
templates:1 //are there templates for the installer
title:''
startupmanager:1 //managed by a startup manager, default true
build:1 //will we also build the component
//or for a client
!!hero_code.generate_client
name:'mail'
classname:'MailClient'
singleton:0 //default is 0
```
needs to be put as .heroscript in the directories which we want to generate
## templates remarks
in templates:
- ^^ or @@ > gets replaced to @
- ?? > gets replaced to $
this is to make distinction between processing at compile time (pre-compile) or at runtime.
## call by code
to call in code
```v
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.code.generator.generic
generic.scan(path:"~/code/github/freeflowuniverse/herolib/herolib/installers",force:true)!
```
to run from bash
```bash
~/code/github/freeflowuniverse/herolib/scripts/fix_installers.vsh
```

View File

@@ -0,0 +1,44 @@
module installer_client
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
@[params]
pub struct ScannerArgs {
pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
}
// scan over a set of directories call the play where
pub fn scan(args ScannerArgs) ! {
console.print_debug('Code generator scan: ${args.path}')
if args.path == '' {
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/installers')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/clients')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/web')!
return
}
console.print_header('Scan for generation of code for ${args.path}')
// now walk over all directories, find .heroscript
mut pathroot := pathlib.get_dir(path: args.path, create: false)!
mut plist := pathroot.list(
recursive: true
ignoredefault: false
regex: ['.heroscript']
)!
for mut p in plist.paths {
pparent := p.parent()!
path_module := pparent.path
if os.exists('${path_module}/.heroscript') {
do(interactive: args.interactive, path: path_module, reset: args.reset, playonly:args.playonly)!
}
}
}

View File

@@ -0,0 +1,5 @@
name: ??{model.name}

View File

@@ -0,0 +1,7 @@
!!hero_code.generate_client
name: "${model.name}"
classname: "${model.classname}"
hasconfig: ${model.hasconfig}
singleton: ${model.singleton}
default: ${model.default}
title: "${model.title}"

View File

@@ -0,0 +1,11 @@
!!hero_code.generate_installer
name: "${model.name}"
classname: "${model.classname}"
hasconfig: ${model.hasconfig}
singleton: ${model.singleton}
default: ${model.default}
title: "${model.title}"
templates: ${model.templates}
build: ${model.build}
startupmanager: ${model.startupmanager}

View File

@@ -0,0 +1,219 @@
module ${model.name}
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.installers.ulist
import freeflowuniverse.herolib.installers.base
@if model.startupmanager
import freeflowuniverse.herolib.osal.systemd
import freeflowuniverse.herolib.osal.zinit
@end
@if model.build
import freeflowuniverse.herolib.installers.lang.golang
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.installers.lang.python
@end
import os
@if model.startupmanager
fn startupcmd () ![]zinit.ZProcessNewArgs{
mut installer := get()!
mut res := []zinit.ZProcessNewArgs{}
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res << zinit.ZProcessNewArgs{
// name: '${model.name}'
// cmd: '${model.name} server'
// env: {
// 'HOME': '/root'
// }
// }
return res
}
fn running_() !bool {
mut installer := get()!
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// this checks health of ${model.name}
// curl http://localhost:3333/api/v1/s --oauth2-bearer 1234 works
// url:='http://127.0.0.1:??{cfg.port}/api/v1'
// mut conn := httpconnection.new(name: '${model.name}', url: url)!
// if cfg.secret.len > 0 {
// conn.default_header.add(.authorization, 'Bearer ??{cfg.secret}')
// }
// conn.default_header.add(.content_type, 'application/json')
// console.print_debug("curl -X 'GET' '??{url}'/tags --oauth2-bearer ??{cfg.secret}")
// r := conn.get_json_dict(prefix: 'tags', debug: false) or {return false}
// println(r)
// if true{panic("ssss")}
// tags := r['Tags'] or { return false }
// console.print_debug(tags)
// console.print_debug('${model.name} is answering.')
return false
}
fn start_pre()!{
}
fn start_post()!{
}
fn stop_pre()!{
}
fn stop_post()!{
}
@end
//////////////////// following actions are not specific to instance of the object
@if model.cat == .installer
// checks if a certain version or above is installed
fn installed_() !bool {
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res := os.execute('??{osal.profile_path_source_and()!} ${model.name} version')
// if res.exit_code != 0 {
// return false
// }
// r := res.output.split_into_lines().filter(it.trim_space().len > 0)
// if r.len != 1 {
// return error("couldn't parse ${model.name} version.\n??{res.output}")
// }
// if texttools.version(version) == texttools.version(r[0]) {
// return true
// }
return false
}
//get the Upload List of the files
fn ulist_get() !ulist.UList {
//optionally build a UList which is all paths which are result of building, is then used e.g. in upload
return ulist.UList{}
}
//uploads to S3 server if configured
fn upload_() ! {
// installers.upload(
// cmdname: '${model.name}'
// source: '??{gitpath}/target/x86_64-unknown-linux-musl/release/${model.name}'
// )!
}
fn install_() ! {
console.print_header('install ${model.name}')
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// mut url := ''
// if core.is_linux_arm()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_linux_arm64.tar.gz'
// } else if core.is_linux_intel()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_linux_amd64.tar.gz'
// } else if core.is_osx_arm()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_darwin_arm64.tar.gz'
// } else if core.is_osx_intel()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_darwin_amd64.tar.gz'
// } else {
// return error('unsported platform')
// }
// mut dest := osal.download(
// url: url
// minsize_kb: 9000
// expand_dir: '/tmp/${model.name}'
// )!
// //dest.moveup_single_subdir()!
// mut binpath := dest.file_get('${model.name}')!
// osal.cmd_add(
// cmdname: '${model.name}'
// source: binpath.path
// )!
}
@if model.build
fn build_() ! {
//url := 'https://github.com/threefoldtech/${model.name}'
// make sure we install base on the node
// if core.platform()!= .ubuntu {
// return error('only support ubuntu for now')
// }
//mut g:=golang.get()!
//g.install()!
//console.print_header('build coredns')
//mut gs := gittools.new(coderoot: '~/code')!
// console.print_header('build ${model.name}')
// gitpath := gittools.get_repo(url: url, reset: true, pull: true)!
// cmd := '
// cd ??{gitpath}
// source ~/.cargo/env
// exit 1 #todo
// '
// osal.execute_stdout(cmd)!
//
// //now copy to the default bin path
// mut binpath := dest.file_get('...')!
// adds it to path
// osal.cmd_add(
// cmdname: 'griddriver2'
// source: binpath.path
// )!
}
@end
fn destroy_() ! {
// mut systemdfactory := systemd.new()!
// systemdfactory.destroy("zinit")!
// osal.process_kill_recursive(name:'zinit')!
// osal.cmd_delete('zinit')!
// osal.package_remove('
// podman
// conmon
// buildah
// skopeo
// runc
// ')!
// //will remove all paths where go/bin is found
// osal.profile_path_add_remove(paths2delete:"go/bin")!
// osal.rm("
// podman
// conmon
// buildah
// skopeo
// runc
// /var/lib/containers
// /var/lib/podman
// /var/lib/buildah
// /tmp/podman
// /tmp/conmon
// ")!
}
@end

View File

@@ -0,0 +1,352 @@
module ${model.name}
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core
@if model.hasconfig
import freeflowuniverse.herolib.data.encoderhero
@end
@if model.cat == .installer
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
@end
__global (
${model.name}_global map[string]&${model.classname}
${model.name}_default string
)
/////////FACTORY
@if model.singleton == false
^^[params]
pub struct ArgsGet{
pub mut:
name string
}
fn args_get (args_ ArgsGet) ArgsGet {
mut model:=args_
if model.name == ""{
model.name = ${model.name}_default
}
if model.name == ""{
model.name = "default"
}
return model
}
pub fn get(args_ ArgsGet) !&${model.classname} {
mut args := args_get(args_)
if !(args.name in ${model.name}_global) {
if args.name=="default"{
if ! exists(args)!{
if default{
mut context:=base.context() or { panic("bug") }
context.hero_config_set("${model.name}",args.name,heroscript_default()!)!
}
}
load(args)!
}
}
return ${model.name}_global[args.name] or {
println(${model.name}_global)
panic("could not get config for ??{args.name}.")
}
}
@end
@if model.hasconfig
//set the model in mem and the config on the filesystem
pub fn set(o ${model.classname})! {
mut o2:=obj_init(o)!
${model.name}_global[o.name] = &o2
${model.name}_default = o.name
}
//check we find the config on the filesystem
pub fn exists(args_ ArgsGet)!bool {
mut model := args_get(args_)
mut context:=base.context()!
return context.hero_config_exists("${model.name}",model.name)
}
//load the config error if it doesn't exist
pub fn load(args_ ArgsGet) ! {
mut model := args_get(args_)
mut context:=base.context()!
mut heroscript := context.hero_config_get("${model.name}",model.name)!
play(heroscript:heroscript)!
}
//save the config to the filesystem in the context
pub fn save(o ${model.classname})! {
mut context:=base.context()!
heroscript := encoderhero.encode[${model.classname}](o)!
context.hero_config_set("${model.name}",o.name,heroscript)!
}
^^[params]
pub struct PlayArgs {
pub mut:
heroscript string //if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut model:=args_
@if model.hasconfig
if model.heroscript == "" {
model.heroscript = heroscript_default()!
}
@end
mut plbook := model.plbook or {
playbook.new(text: model.heroscript)!
}
@if model.hasconfig
mut configure_actions := plbook.find(filter: '${model.name}.configure')!
if configure_actions.len > 0 {
for config_action in configure_actions {
mut p := config_action.params
mycfg:=cfg_play(p)!
console.print_debug("install action ${model.name}.configure\n??{mycfg}")
set(mycfg)!
save(mycfg)!
}
}
@end
@if model.cat == .installer
mut other_actions := plbook.find(filter: '${model.name}.')!
for other_action in other_actions {
if other_action.name in ["destroy","install","build"]{
mut p := other_action.params
reset:=p.get_default_false("reset")
if other_action.name == "destroy" || reset{
console.print_debug("install action ${model.name}.destroy")
destroy_()!
}
if other_action.name == "install"{
console.print_debug("install action ${model.name}.install")
install_()!
}
}
@if model.startupmanager
if other_action.name in ["start","stop","restart"]{
mut p := other_action.params
name := p.get('name')!
mut ${model.name}_obj:=get(name:name)!
console.print_debug("action object:\n??{${model.name}_obj}")
if other_action.name == "start"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.start()!
}
if other_action.name == "stop"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.stop()!
}
if other_action.name == "restart"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.restart()!
}
}
@end
}
@end
}
@end
@if model.cat == .installer
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@if model.hasconfig
//load from disk and make sure is properly intialized
pub fn (mut self ${model.classname}) reload() ! {
switch(self.name)
self=obj_init(self)!
}
@end
@if model.startupmanager
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit
// tmux
// systemd
match cat{
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
}else{
console.print_debug("startupmanager: auto")
return startupmanager.get()!
}
}
}
pub fn (mut self ${model.classname}) start() ! {
switch(self.name)
if self.running()!{
return
}
console.print_header('${model.name} start')
if ! installed_()!{
install_()!
}
configure()!
start_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
console.print_debug('starting ${model.name} with ??{zprocess.startuptype}...')
sm.new(zprocess)!
sm.start(zprocess.name)!
}
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('${model.name} did not install properly.')
}
pub fn (mut self ${model.classname}) install_start(model InstallArgs) ! {
switch(self.name)
self.install(model)!
self.start()!
}
pub fn (mut self ${model.classname}) stop() ! {
switch(self.name)
stop_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
sm.stop(zprocess.name)!
}
stop_post()!
}
pub fn (mut self ${model.classname}) restart() ! {
switch(self.name)
self.stop()!
self.start()!
}
pub fn (mut self ${model.classname}) running() !bool {
switch(self.name)
//walk over the generic processes, if not running_ return
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
return running_()!
}
@end
@@[params]
pub struct InstallArgs{
pub mut:
reset bool
}
@if model.singleton
pub fn install(args InstallArgs) ! {
if args.reset {
destroy()!
}
if ! (installed_()!){
install_()!
}
}
pub fn destroy() ! {
destroy_()!
}
@if model.build
pub fn build() ! {
build_()!
}
@end
@else
//switch instance to be used for ${model.name}
pub fn switch(name string) {
${model.name}_default = name
}
pub fn (mut self ${model.classname}) install(args InstallArgs) ! {
switch(self.name)
if args.reset {
destroy_()!
}
if ! (installed_()!){
install_()!
}
}
@if model.build
pub fn (mut self ${model.classname}) build() ! {
switch(self.name)
build_()!
}
@end
pub fn (mut self ${model.classname}) destroy() ! {
switch(self.name)
@if model.startupmanager
self.stop() or {}
@end
destroy_()!
}
@end
@end

View File

@@ -0,0 +1,155 @@
module ${model.name}
import freeflowuniverse.herolib.data.paramsparser
import os
pub const version = '0.0.0'
const singleton = ${model.singleton}
const default = ${model.default}
@if model.hasconfig
//TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
@if model.cat == .installer
heroscript:="
!!${model.name}.configure
name:'${model.name}'
homedir: '{HOME}/hero/var/${model.name}'
configpath: '{HOME}/.config/${model.name}/admin.yaml'
username: 'admin'
password: 'secretpassword'
secret: ''
title: 'My Hero DAG'
host: 'localhost'
port: 8888
"
@else
heroscript:="
!!${model.name}.configure
name:'${model.name}'
mail_from: 'info@@example.com'
mail_password: 'secretpassword'
mail_port: 587
mail_server: 'smtp-relay.brevo.com'
mail_username: 'kristof@@incubaid.com'
"
// mail_from := os.getenv_opt('MAIL_FROM') or {'info@@example.com'}
// mail_password := os.getenv_opt('MAIL_PASSWORD') or {'secretpassword'}
// mail_port := (os.getenv_opt('MAIL_PORT') or {"587"}).int()
// mail_server := os.getenv_opt('MAIL_SERVER') or {'smtp-relay.brevo.com'}
// mail_username := os.getenv_opt('MAIL_USERNAME') or {'kristof@@incubaid.com'}
//
// heroscript:="
// !!mailclient.configure name:'default'
// mail_from: '??{mail_from}'
// mail_password: '??{mail_password}'
// mail_port: ??{mail_port}
// mail_server: '??{mail_server}'
// mail_username: '??{mail_username}'
//
// "
//
@end
return heroscript
}
@end
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@if model.cat == .installer
^^[heap]
pub struct ${model.classname} {
pub mut:
name string = 'default'
@if model.hasconfig
homedir string
configpath string
username string
password string @@[secret]
secret string @@[secret]
title string
host string
port int
@end
}
@if model.hasconfig
fn cfg_play(p paramsparser.Params) !${model.classname} {
//THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := ${model.classname}{
name: p.get_default('name', 'default')!
homedir: p.get_default('homedir', '{HOME}/hero/var/${model.name}')!
configpath: p.get_default('configpath', '{HOME}/hero/var/${model.name}/admin.yaml')!
username: p.get_default('username', 'admin')!
password: p.get_default('password', '')!
secret: p.get_default('secret', '')!
title: p.get_default('title', 'HERO DAG')!
host: p.get_default('host', 'localhost')!
port: p.get_int_default('port', 8888)!
}
if mycfg.password == '' && mycfg.secret == '' {
return error('password or secret needs to be filled in for ${model.name}')
}
return mycfg
}
@end
@else
^^[heap]
pub struct ${model.classname} {
pub mut:
name string = 'default'
mail_from string
mail_password string @@[secret]
mail_port int
mail_server string
mail_username string
}
@if model.hasconfig
fn cfg_play(p paramsparser.Params) !${model.classname} {
//THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := ${model.classname}{
name: p.get_default('name', 'default')!
mail_from: p.get('mail_from')!
mail_password: p.get('mail_password')!
mail_port: p.get_int_default('mail_port', 8888)!
mail_server: p.get('mail_server')!
mail_username: p.get('mail_username')!
}
set(mycfg)!
return mycfg
}
@end
@end
fn obj_init(obj_ ${model.classname})!${model.classname}{
//never call get here, only thing we can do here is work on object itself
mut obj:=obj_
return obj
}
@if model.cat == .installer
//called before start if done
fn configure() ! {
@if model.cat == .installer
//mut installer := get()!
@else
//mut client := get()!
@end
@if model.templates
// mut mycode := ??tmpl('templates/atemplate.yaml')
// mut path := pathlib.get_file(path: cfg.configpath, create: true)!
// path.write(mycode)!
// console.print_debug(mycode)
@end
}
@end

View File

@@ -0,0 +1,63 @@
# ${model.name}
${model.title}
To get started
```vlang
@if model.cat == .installer
import freeflowuniverse.herolib.installers.something.${model.name} as ${model.name}_installer
heroscript:="
!!${model.name}.configure name:'test'
password: '1234'
port: 7701
!!${model.name}.start name:'test' reset:1
"
${model.name}_installer.play(heroscript=heroscript)!
//or we can call the default and do a start with reset
//mut installer:= ${model.name}_installer.get()!
//installer.start(reset:true)!
@else
import freeflowuniverse.herolib.clients. ${model.name}
mut client:= ${model.name}.get()!
client...
@end
```
## example heroscript
@if model.cat == .installer
```hero
!!${model.name}.configure
homedir: '/home/user/${model.name}'
username: 'admin'
password: 'secretpassword'
title: 'Some Title'
host: 'localhost'
port: 8888
```
@else
```hero
!!${model.name}.configure
secret: '...'
host: 'localhost'
port: 8888
```
@end

View File

@@ -20,6 +20,9 @@ pub mut:
path string
force bool
hasconfig bool = true
playonly bool
play_name string // e.g. docusaurus is what we look for
module_path string // e.g.freeflowuniverse.herolib.web.docusaurus
}
pub enum Cat {
@@ -80,5 +83,5 @@ fn args_get(path string) !GeneratorArgs {
}
}
return error("can't find hero_code.generate_client or hero_code.generate_installer in ${path}")
// return GeneratorArgs{}
}

View File

@@ -9,10 +9,18 @@ pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
model ?GenModel
cat ?Cat
}
pub struct PlayArgs {
pub mut:
name string
modulepath string
}
// the default to start with
//
// reset bool // regenerate all, dangerous !!!
@@ -20,7 +28,9 @@ pub mut:
// path string
// model ?GenModel
// cat ?Cat
pub fn do(args_ GenerateArgs) ! {
//
// will return the module path where we need to execute a play command as well as the name of
pub fn do(args_ GenerateArgs) ! PlayArgs{
mut args := args_
console.print_header('Generate code for path: ${args.path} (reset:${args.reset}, interactive:${args.interactive})')
@@ -51,9 +61,9 @@ pub fn do(args_ GenerateArgs) ! {
}
}
if model.cat == .unknown {
model.cat = args.cat or { return error('cat needs to be specified for generator.') }
}
// if model.cat == .unknown {
// model.cat = args.cat or { return error('cat needs to be specified for generator.') }
// }
if args.interactive {
ask(args.path)!
@@ -64,5 +74,15 @@ pub fn do(args_ GenerateArgs) ! {
console.print_debug(args)
generate(args)!
//only generate if playonly is false and there is a classname
if !args.playonly && model.classname.len>0{
generate(args)!
}
return PlayArgs{
name: model.play_name
modulepath: model.module_path
}
}

View File

@@ -20,6 +20,8 @@ pub mut:
build bool = true
hasconfig bool = true
cat Cat // dont' set default
play_name string // e.g. docusaurus is what we look for
module_path string // e.g.freeflowuniverse.herolib.web.docusaurus
}
pub enum Cat {
@@ -37,7 +39,6 @@ pub fn gen_model_set(args GenerateArgs) ! {
.installer { $tmpl('templates/heroscript_installer') }
else { return error('Invalid category: ${model.cat}') }
}
pathlib.template_write(heroscript_templ, '${args.path}/.heroscript', true)!
}
@@ -108,8 +109,30 @@ pub fn gen_model_get(path string, create bool) !GenModel {
model.name = os.base(path).to_lower()
}
console.print_debug('Code generator get: ${model}')
model.play_name = model.name
pathsub:=path.replace('${os.home_dir()}/code/github/','')
model.module_path = pathsub.replace("/",".").replace(".lib.",".")
// !!hero_code.play
// name:'docusaurus'
mut play_actions := plbook.find(filter: 'hero_code.play')!
if play_actions.len>1{
return error("should have max 1 hero_code.play action in ${config_path.path}")
}
if play_actions.len==1{
mut p := play_actions[0].params
model.play_name = p.get_default('name',model.name)!
}
if model.module_path.contains("docusaurus"){
println(model)
println("4567ujhjk")
exit(0)
}
return model
// return GenModel{}
}

View File

@@ -1,6 +1,8 @@
# generation framework for clients & installers
```bash
#generate all play commands
hero generate -playonly
#will ask questions if .heroscript is not there yet
hero generate -p thepath_is_optional
# to generate without questions

View File

@@ -10,6 +10,7 @@ pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
}
// scan over a set of directories call the play where
@@ -19,6 +20,7 @@ pub fn scan(args ScannerArgs) ! {
if args.path == '' {
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/installers')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/clients')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/web')!
return
}
@@ -36,7 +38,7 @@ pub fn scan(args ScannerArgs) ! {
pparent := p.parent()!
path_module := pparent.path
if os.exists('${path_module}/.heroscript') {
do(interactive: args.interactive, path: path_module, reset: args.reset)!
do(interactive: args.interactive, path: path_module, reset: args.reset, playonly:args.playonly)!
}
}
}

View File

@@ -36,6 +36,13 @@ pub fn cmd_generator(mut cmdroot Command) {
description: 'will work non interactive if possible.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'playonly'
description: 'generate the play script.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
@@ -59,9 +66,14 @@ fn cmd_generator_execute(cmd Command) ! {
mut force := cmd.flags.get_bool('force') or { false }
mut reset := cmd.flags.get_bool('reset') or { false }
mut scan := cmd.flags.get_bool('scan') or { false }
mut playonly := cmd.flags.get_bool('playonly') or { false }
mut installer := cmd.flags.get_bool('installer') or { false }
mut path := cmd.flags.get_string('path') or { '' }
if playonly{
force=true
}
if path == '' {
path = os.getwd()
}
@@ -74,7 +86,7 @@ fn cmd_generator_execute(cmd Command) ! {
}
if scan {
generic.scan(path: path, reset: reset, force: force, cat: cat)!
generic.scan(path: path, reset: reset, force: force, cat: cat, playonly:playonly)!
} else {
generic.generate(path: path, reset: reset, force: force, cat: cat)!
}

View File

@@ -1,60 +0,0 @@
module model
import freeflowuniverse.herolib.data.ourtime
// Agent represents a service provider that can execute jobs
pub struct Agent {
pub mut:
pubkey string // pubkey using ed25519
address string // where we can find the agent
port int // default 9999
description string // optional
status AgentStatus
services []AgentService // these are the public services
signature string // signature as done by private key of $address+$port+$description+$status
}
// AgentStatus represents the current state of an agent
pub struct AgentStatus {
pub mut:
guid string // unique id for the job
timestamp_first ourtime.OurTime // when agent came online
timestamp_last ourtime.OurTime // last time agent let us know that he is working
status AgentState // current state of the agent
}
// AgentService represents a service provided by an agent
pub struct AgentService {
pub mut:
actor string // name of the actor providing the service
actions []AgentServiceAction // available actions for this service
description string // optional description
status AgentServiceState // current state of the service
}
// AgentServiceAction represents an action that can be performed by a service
pub struct AgentServiceAction {
pub mut:
action string // which action
description string // optional description
params map[string]string // e.g. name:'name of the vm' ...
params_example map[string]string // e.g. name:'myvm'
status AgentServiceState // current state of the action
public bool // if everyone can use then true, if restricted means only certain people can use
}
// AgentState represents the possible states of an agent
pub enum AgentState {
ok // agent is functioning normally
down // agent is not responding
error // agent encountered an error
halted // agent has been manually stopped
}
// AgentServiceState represents the possible states of an agent service or action
pub enum AgentServiceState {
ok // service/action is functioning normally
down // service/action is not available
error // service/action encountered an error
halted // service/action has been manually stopped
}

View File

@@ -1,91 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime
import json
const agents_key = 'herorunner:agents' // Redis key for storing agents
// AgentManager handles all agent-related operations
pub struct AgentManager {
mut:
redis &redisclient.Redis
}
// new creates a new Agent instance
pub fn (mut m AgentManager) new() Agent {
return Agent{
pubkey: '' // Empty pubkey to be filled by caller
port: 9999 // Default port
status: AgentStatus{
guid: ''
timestamp_first: ourtime.now()
timestamp_last: ourtime.OurTime{}
status: .ok
}
services: []AgentService{}
}
}
// add adds a new agent to Redis
pub fn (mut m AgentManager) set(agent Agent) ! {
// Store agent in Redis hash where key is agent.pubkey and value is JSON of agent
agent_json := json.encode(agent)
m.redis.hset(agents_key, agent.pubkey, agent_json)!
}
// get retrieves an agent by its public key
pub fn (mut m AgentManager) get(pubkey string) !Agent {
agent_json := m.redis.hget(agents_key, pubkey)!
return json.decode(Agent, agent_json)
}
// list returns all agents
pub fn (mut m AgentManager) list() ![]Agent {
mut agents := []Agent{}
// Get all agents from Redis hash
agents_map := m.redis.hgetall(agents_key)!
// Convert each JSON value to Agent struct
for _, agent_json in agents_map {
agent := json.decode(Agent, agent_json)!
agents << agent
}
return agents
}
// delete removes an agent by its public key
pub fn (mut m AgentManager) delete(pubkey string) ! {
m.redis.hdel(agents_key, pubkey)!
}
// update_status updates just the status of an agent
pub fn (mut m AgentManager) update_status(pubkey string, status AgentState) ! {
mut agent := m.get(pubkey)!
agent.status.status = status
m.set(agent)!
}
// get_by_service returns all agents that provide a specific service
pub fn (mut m AgentManager) get_by_service(actor string, action string) ![]Agent {
mut matching_agents := []Agent{}
agents := m.list()!
for agent in agents {
for service in agent.services {
if service.actor != actor {
continue
}
for act in service.actions {
if act.action == action {
matching_agents << agent
break
}
}
}
}
return matching_agents
}

View File

@@ -1,74 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime
fn test_agents_model() {
mut runner := new()!
// Create a new agent using the manager
mut agent := runner.agents.new()
agent.pubkey = 'test-agent-1'
agent.address = '127.0.0.1'
agent.description = 'Test Agent'
// Create a service action
mut action := AgentServiceAction{
action: 'start'
description: 'Start a VM'
params: {
'name': 'string'
}
params_example: {
'name': 'myvm'
}
status: .ok
public: true
}
// Create a service
mut service := AgentService{
actor: 'vm_manager'
actions: [action]
description: 'VM Management Service'
status: .ok
}
agent.services = [service]
// Add the agent
runner.agents.set(agent)!
// Get the agent and verify fields
retrieved_agent := runner.agents.get(agent.pubkey)!
assert retrieved_agent.pubkey == agent.pubkey
assert retrieved_agent.address == agent.address
assert retrieved_agent.description == agent.description
assert retrieved_agent.services.len == 1
assert retrieved_agent.services[0].actor == 'vm_manager'
assert retrieved_agent.status.status == .ok
// Update agent status
runner.agents.update_status(agent.pubkey, .down)!
updated_agent := runner.agents.get(agent.pubkey)!
assert updated_agent.status.status == .down
// Test get_by_service
agents := runner.agents.get_by_service('vm_manager', 'start')!
assert agents.len > 0
assert agents[0].pubkey == agent.pubkey
// List all agents
all_agents := runner.agents.list()!
assert all_agents.len > 0
assert all_agents[0].pubkey == agent.pubkey
// Delete the agent
runner.agents.delete(agent.pubkey)!
// Verify deletion
agents_after := runner.agents.list()!
for a in agents_after {
assert a.pubkey != agent.pubkey
}
}

View File

@@ -1,37 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
// HeroRunner is the main factory for managing jobs, agents, services and groups
pub struct HeroRunner {
mut:
redis &redisclient.Redis
pub mut:
jobs &JobManager
agents &AgentManager
services &ServiceManager
groups &GroupManager
}
// new creates a new HeroRunner instance
pub fn new() !&HeroRunner {
mut redis := redisclient.core_get()!
mut hr := &HeroRunner{
redis: redis
jobs: &JobManager{
redis: redis
}
agents: &AgentManager{
redis: redis
}
services: &ServiceManager{
redis: redis
}
groups: &GroupManager{
redis: redis
}
}
return hr
}

View File

@@ -1,10 +0,0 @@
module model
// Group represents a collection of members (users or other groups)
pub struct Group {
pub mut:
guid string // unique id
name string // name of the group
description string // optional description
members []string // can be other group or member which is defined by pubkey
}

View File

@@ -1,99 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
import json
const groups_key = 'herorunner:groups' // Redis key for storing groups
// GroupManager handles all group-related operations
pub struct GroupManager {
mut:
redis &redisclient.Redis
}
// new creates a new Group instance
pub fn (mut m GroupManager) new() Group {
return Group{
guid: '' // Empty GUID to be filled by caller
members: []string{}
}
}
// add adds a new group to Redis
pub fn (mut m GroupManager) set(group Group) ! {
// Store group in Redis hash where key is group.guid and value is JSON of group
group_json := json.encode(group)
m.redis.hset(groups_key, group.guid, group_json)!
}
// get retrieves a group by its GUID
pub fn (mut m GroupManager) get(guid string) !Group {
group_json := m.redis.hget(groups_key, guid)!
return json.decode(Group, group_json)
}
// list returns all groups
pub fn (mut m GroupManager) list() ![]Group {
mut groups := []Group{}
// Get all groups from Redis hash
groups_map := m.redis.hgetall(groups_key)!
// Convert each JSON value to Group struct
for _, group_json in groups_map {
group := json.decode(Group, group_json)!
groups << group
}
return groups
}
// delete removes a group by its GUID
pub fn (mut m GroupManager) delete(guid string) ! {
m.redis.hdel(groups_key, guid)!
}
// add_member adds a member (user pubkey or group GUID) to a group
pub fn (mut m GroupManager) add_member(guid string, member string) ! {
mut group := m.get(guid)!
if member !in group.members {
group.members << member
m.set(group)!
}
}
// remove_member removes a member from a group
pub fn (mut m GroupManager) remove_member(guid string, member string) ! {
mut group := m.get(guid)!
group.members = group.members.filter(it != member)
m.set(group)!
}
pub fn (mut m GroupManager) get_user_groups(user_pubkey string) ![]Group {
mut user_groups := []Group{}
mut checked_groups := map[string]bool{}
groups := m.list()!
// Check each group
for group in groups {
check_group_membership(group, user_pubkey, groups, mut checked_groups, mut user_groups)
}
return user_groups
}
// Recursive function to check group membership
fn check_group_membership(group Group, user string, groups []Group, mut checked map[string]bool, mut result []Group) {
if group.guid in checked {
return
}
checked[group.guid] = true
if user in group.members {
result << group
// Check parent groups
for parent_group in groups {
if group.guid in parent_group.members {
check_group_membership(parent_group, user, groups, mut checked, mut result)
}
}
}
}

View File

@@ -1,67 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
fn test_groups() {
mut runner := new()!
// Create a new group using the manager
mut group := runner.groups.new()
group.guid = 'admin-group'
group.name = 'Administrators'
group.description = 'Administrator group with full access'
// Add the group
runner.groups.set(group)!
// Create a subgroup
mut subgroup := runner.groups.new()
subgroup.guid = 'vm-admins'
subgroup.name = 'VM Administrators'
subgroup.description = 'VM management administrators'
runner.groups.set(subgroup)!
// Add subgroup to main group
runner.groups.add_member(group.guid, subgroup.guid)!
// Add a user to the subgroup
runner.groups.add_member(subgroup.guid, 'user-1-pubkey')!
// Get the groups and verify fields
retrieved_group := runner.groups.get(group.guid)!
assert retrieved_group.guid == group.guid
assert retrieved_group.name == group.name
assert retrieved_group.description == group.description
assert retrieved_group.members.len == 1
assert retrieved_group.members[0] == subgroup.guid
retrieved_subgroup := runner.groups.get(subgroup.guid)!
assert retrieved_subgroup.members.len == 1
assert retrieved_subgroup.members[0] == 'user-1-pubkey'
// Test recursive group membership
user_groups := runner.groups.get_user_groups('user-1-pubkey')!
assert user_groups.len == 1
assert user_groups[0].guid == subgroup.guid
// Remove member from subgroup
runner.groups.remove_member(subgroup.guid, 'user-1-pubkey')!
updated_subgroup := runner.groups.get(subgroup.guid)!
assert updated_subgroup.members.len == 0
// List all groups
groups := runner.groups.list()!
assert groups.len == 2
// Delete the groups
runner.groups.delete(subgroup.guid)!
runner.groups.delete(group.guid)!
// Verify deletion
groups_after := runner.groups.list()!
for g in groups_after {
assert g.guid != group.guid
assert g.guid != subgroup.guid
}
}

View File

@@ -1,68 +0,0 @@
module model
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime
import json
const jobs_key = 'herorunner:jobs' // Redis key for storing jobs
// JobManager handles all job-related operations
pub struct JobManager {
mut:
redis &redisclient.Redis
}
// new creates a new Job instance
pub fn (mut m JobManager) new() Job {
return Job{
guid: '' // Empty GUID to be filled by caller
status: JobStatus{
guid: ''
created: ourtime.now()
start: ourtime.OurTime{}
end: ourtime.OurTime{}
status: .created
}
}
}
// add adds a new job to Redis
pub fn (mut m JobManager) set(job Job) ! {
// Store job in Redis hash where key is job.guid and value is JSON of job
job_json := json.encode(job)
m.redis.hset(jobs_key, job.guid, job_json)!
}
// get retrieves a job by its GUID
pub fn (mut m JobManager) get(guid string) !Job {
job_json := m.redis.hget(jobs_key, guid)!
return json.decode(Job, job_json)
}
// list returns all jobs
pub fn (mut m JobManager) list() ![]Job {
mut jobs := []Job{}
// Get all jobs from Redis hash
jobs_map := m.redis.hgetall(jobs_key)!
// Convert each JSON value to Job struct
for _, job_json in jobs_map {
job := json.decode(Job, job_json)!
jobs << job
}
return jobs
}
// delete removes a job by its GUID
pub fn (mut m JobManager) delete(guid string) ! {
m.redis.hdel(jobs_key, guid)!
}
// update_status updates just the status of a job
pub fn (mut m JobManager) update_status(guid string, status Status) ! {
mut job := m.get(guid)!
job.status.status = status
m.set(job)!
}

Some files were not shown because too many files have changed in this diff Show More