Merge branch 'development' of github.com:freeflowuniverse/herolib into development

This commit is contained in:
2025-09-05 14:07:20 +04:00
88 changed files with 2945 additions and 2003 deletions

3
.gitignore vendored
View File

@@ -53,4 +53,5 @@ HTTP_REST_MCP_DEMO.md
MCP_HTTP_REST_IMPLEMENTATION_PLAN.md
.roo
.kilocode
.continue
.continue
tmux_logger

View File

@@ -52,7 +52,6 @@ println(' - API title: ${spec.info.title}')
println(' - API version: ${spec.info.version}')
println(' - Methods available: ${spec.methods.len}')
// 2. List all services
println('\n2. Listing all services...')
services := client.service_list() or {

View File

@@ -0,0 +1,114 @@
#!/usr/bin/env hero
// Enhanced Declarative Tmux Test with Redis State Tracking
// This demonstrates the new intelligent command management features
// Ensure a test session exists
!!tmux.session_ensure
name:"enhanced_test"
// Ensure a 4-pane window exists
!!tmux.window_ensure
name:"enhanced_test|demo"
cat:"4pane"
// Configure panes with intelligent state management
// The system will now:
// 1. Check if commands have changed using MD5 hashing
// 2. Verify if previous commands are still running
// 3. Kill and restart only when necessary
// 4. Ensure bash is the parent process
// 5. Reset panes when needed
// 6. Track all state in Redis
!!tmux.pane_ensure
name:"enhanced_test|demo|1"
label:"web_server"
cmd:"echo \"Starting web server...\" && python3 -m http.server 8000"
log:true
logpath:"/tmp/enhanced_logs"
logreset:true
!!tmux.pane_ensure
name:"enhanced_test|demo|2"
label:"monitor"
cmd:"echo \"Starting system monitor...\" && htop"
log:true
logpath:"/tmp/enhanced_logs"
!!tmux.pane_ensure
name:"enhanced_test|demo|3"
label:"logs"
cmd:"echo \"Monitoring logs...\" && tail -f /var/log/system.log"
log:true
logpath:"/tmp/enhanced_logs"
!!tmux.pane_ensure
name:"enhanced_test|demo|4"
label:"development"
cmd:"
echo \"Setting up development environment...\"
mkdir -p /tmp/dev_workspace
cd /tmp/dev_workspace
echo \"Development environment ready!\"
echo \"Current directory:\" && pwd
echo \"Available commands: ls, vim, git, etc.\"
"
log:true
logpath:"/tmp/enhanced_logs"
// Test the intelligent state management by running the same commands again
// The system should detect that commands haven't changed and skip re-execution
// for commands that are still running
!!tmux.pane_ensure
name:"enhanced_test|demo|1"
label:"web_server"
cmd:"echo \"Starting web server...\" && python3 -m http.server 8000"
log:true
logpath:"/tmp/enhanced_logs"
// Test command change detection by modifying a command slightly
!!tmux.pane_ensure
name:"enhanced_test|demo|2"
label:"monitor"
cmd:"echo \"Starting UPDATED system monitor...\" && htop"
log:true
logpath:"/tmp/enhanced_logs"
// This should kill the previous htop and start a new one because the command changed
// Test with a completely different command
!!tmux.pane_ensure
name:"enhanced_test|demo|3"
label:"network"
cmd:"echo \"Switching to network monitoring...\" && netstat -tuln"
log:true
logpath:"/tmp/enhanced_logs"
// This should kill the tail command and start netstat
// Test multi-line command with state tracking
!!tmux.pane_ensure
name:"enhanced_test|demo|4"
label:"advanced_dev"
cmd:"
echo \"Advanced development setup...\"
cd /tmp/dev_workspace
echo \"Creating project structure...\"
mkdir -p src tests docs
echo \"Project structure created:\"
ls -la
echo \"Ready for development!\"
"
log:true
logpath:"/tmp/enhanced_logs"
// The system will:
// - Compare MD5 hash of this multi-line command with the previous one
// - Detect that it's different
// - Kill the previous command
// - Execute this new command
// - Store the new state in Redis
// - Ensure bash is the parent process
// - Enable logging with the tmux_logger binary

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env hero
// Demonstration of multi-line command support in tmux heroscripts
// This example shows how to use multi-line commands in pane configurations
// Create a development session
!!tmux.session_create
name:"dev_multiline"
reset:true
// Create a 4-pane development workspace
!!tmux.window_ensure
name:"dev_multiline|workspace"
cat:"4pane"
// Pane 1: Development environment setup
!!tmux.pane_ensure
name:"dev_multiline|workspace|1"
label:"dev_setup"
cmd:'
echo "=== Development Environment Setup ==="
echo "Current directory: $(pwd)"
echo "Git status:"
git status --porcelain || echo "Not a git repository"
echo "Available disk space:"
df -h .
echo "Development setup complete"
'
// Pane 2: System monitoring
!!tmux.pane_ensure
name:"dev_multiline|workspace|2"
label:"monitoring"
cmd:'
echo "=== System Monitoring ==="
echo "System uptime:"
uptime
echo "Memory usage:"
free -h 2>/dev/null || vm_stat | head -5
echo "CPU info:"
sysctl -n machdep.cpu.brand_string 2>/dev/null || cat /proc/cpuinfo | grep "model name" | head -1
echo "Monitoring setup complete"
'
// Pane 3: Network diagnostics
!!tmux.pane_ensure
name:"dev_multiline|workspace|3"
label:"network"
cmd:'
echo "=== Network Diagnostics ==="
echo "Network interfaces:"
ifconfig | grep -E "^[a-z]|inet " | head -10
echo "DNS configuration:"
cat /etc/resolv.conf 2>/dev/null || scutil --dns | head -10
echo "Network diagnostics complete"
'
// Pane 4: File operations and cleanup
!!tmux.pane_ensure
name:"dev_multiline|workspace|4"
label:"file_ops"
cmd:'
echo "=== File Operations ==="
echo "Creating temporary workspace..."
mkdir -p /tmp/dev_workspace
cd /tmp/dev_workspace
echo "Current location: $(pwd)"
echo "Creating sample files..."
echo "Sample content" > sample.txt
echo "Another file" > another.txt
echo "Files created:"
ls -la
echo "File operations complete"
'

View File

@@ -3,9 +3,7 @@ module builder
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import v.embed_file
const heropath_ = os.dir(@FILE) + '/../'
@@ -52,10 +50,10 @@ pub mut:
pub fn (mut node Node) hero_install(args HeroInstallArgs) ! {
console.print_debug('install hero')
mut bs := bootstrapper()
bootstrapper()
myenv := node.environ_get()!
homedir := myenv['HOME'] or { return error("can't find HOME in env") }
_ := myenv['HOME'] or { return error("can't find HOME in env") }
mut todo := []string{}
if !args.compile {

View File

@@ -1,13 +1,8 @@
module mycelium
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.ui
import os
import time
import json
pub fn check() bool {

View File

@@ -2,7 +2,6 @@ module mycelium
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (

View File

@@ -150,21 +150,20 @@ pub fn plbook_code_get(cmd Command) !string {
// same as session_run_get but will also run the plbook
pub fn plbook_run(cmd Command) !(&playbook.PlayBook, string) {
heroscript := cmd.flags.get_string('heroscript') or { '' }
heroscript := cmd.flags.get_string('heroscript') or { '' }
mut path := ''
mut plbook := if heroscript.len > 0 {
playbook.new(text: heroscript)!
} else {
path
= plbook_code_get(cmd)!
path = plbook_code_get(cmd)!
if path.len == 0 {
return error(cmd.help_message())
}
// add all actions inside to the plbook
playbook.new(path: path)!
playbook.new(path: path)!
}
dagu := cmd.flags.get_bool('dagu') or { false }
playcmds.run(plbook: plbook)!

View File

@@ -11,7 +11,9 @@ fn testsuite_begin() {
}
fn test_logger() {
mut logger := new('/tmp/testlogs')!
mut logger := new(LoggerFactoryArgs{
path: '/tmp/testlogs'
})!
// Test stdout logging
logger.log(LogItemArgs{

View File

@@ -1,6 +1,6 @@
module playbook
import freeflowuniverse.herolib.develop.gittools // Added import for gittools
// import freeflowuniverse.herolib.develop.gittools // Added import for gittools
// REMARK: include is done in play_core

View File

@@ -1,8 +1,6 @@
module encoderhero
import time
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.core.texttools
pub struct Decoder[T] {
pub mut:

View File

@@ -4,7 +4,6 @@ import freeflowuniverse.herolib.data.paramsparser
import time
import v.reflection
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.texttools
// import freeflowuniverse.herolib.ui.console
// Encoder encodes the an `Any` type into HEROSCRIPT representation.

View File

@@ -1,7 +1,5 @@
module encoderhero
import time
// byte array versions of the most common tokens/chars to avoid reallocations
const null_in_bytes = 'null'

View File

@@ -25,7 +25,7 @@ fn test_ping() {
mut addr := IPAddress{
addr: '127.0.0.1'
}
assert addr.ping(timeout: 3)!
assert addr.ping(nr_ok: 3)!
assert addr.port == 0
}
@@ -33,7 +33,7 @@ fn test_ping_fails() {
mut addr := IPAddress{
addr: '22.22.22.22'
}
assert addr.ping(timeout: 3)! == false
assert addr.ping(nr_ok: 3)! == false
assert addr.port == 0
assert addr.addr == '22.22.22.22'
}
@@ -56,7 +56,7 @@ fn test_ipv6() {
mut addr := new('202:6a34:cd78:b0d7:5521:8de7:218e:6680') or { panic(err) }
assert addr.cat == .ipv6
assert addr.port == 0
// assert addr.ping(timeout: 3)! == false
// assert addr.ping(nr_ok: 3)! == false
}
fn test_ipv6b() {

View File

@@ -23,7 +23,7 @@ pub mut:
}
// is_running checks if the node is operational by pinging its address
fn (node &StreamerNode) is_running() bool {
fn (node &StreamerNode) is_running() !bool {
return osal.ping(address: node.address, retry: 2)!
}
@@ -198,7 +198,7 @@ pub fn (mut node StreamerNode) handle_ping_nodes() ! {
mut i := 0
for i < node.workers.len {
worker := &node.workers[i]
if !worker.is_running() {
if !(worker.is_running() or { false }) {
log_event(event_type: 'logs', message: 'Worker ${worker.address} is not running')
log_event(event_type: 'logs', message: 'Removing worker ${worker.public_key}')
node.workers.delete(i)
@@ -212,7 +212,7 @@ pub fn (mut node StreamerNode) handle_ping_nodes() ! {
}
}
} else {
if !node.is_running() {
if !(node.is_running() or { false }) {
return error('Worker node is not running')
}
if node.master_public_key.len == 0 {

View File

@@ -244,7 +244,7 @@ pub fn (mut self Streamer) add_worker(params StreamerNodeParams) !StreamerNode {
mut worker_node := self.new_node(params)!
if !worker_node.is_running() {
if !(worker_node.is_running() or { false }) {
return error('Worker node is not running')
}

View File

@@ -175,7 +175,7 @@ fn test_get_u64_default() {
assert params.get_u64_default('key3', 17)! == 17
}
fn test_get_u32()! {
fn test_get_u32() ! {
text := '
key1: val1
key2: 19

View File

@@ -2,7 +2,7 @@ module gittools
import crypto.md5
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
// import freeflowuniverse.herolib.ui.console
import os
import json

View File

@@ -65,7 +65,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
// means current dir
args.path = os.getwd()
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
// mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
args.path = curdiro.path
}
if !os.exists(args.path) {

View File

@@ -1,8 +1,8 @@
module gittools
import freeflowuniverse.herolib.core.redisclient
// import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.ui.console
import time
// import time
// ReposGetArgs defines arguments to retrieve repositories from the git structure.
// It includes filters by name, account, provider, and an option to clone a missing repo.

View File

@@ -27,7 +27,7 @@ fn (mut repo GitRepo) cache_get() ! {
if repo_json.len > 0 {
mut cached := json.decode(GitRepo, repo_json)!
cached.gs = repo.gs
cached.config.remote_check_period = 3600 * 24 * 7
cached.config.remote_check_period = 3600 * 24 * 7
repo = cached
}
}

View File

@@ -2,7 +2,7 @@ module gittools
import freeflowuniverse.herolib.ui.console
import os
import freeflowuniverse.herolib.core.pathlib
// import freeflowuniverse.herolib.core.pathlib
@[params]
pub struct GitCloneArgs {
@@ -40,17 +40,17 @@ pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
gitstructure.repos[key_] = &repo
if repo.exists() {
console.print_green("Repository already exists at ${repo.path()}")
console.print_green('Repository already exists at ${repo.path()}')
// Load the existing repository status
repo.load_internal() or {
console.print_debug('Could not load existing repository status: ${err}')
}
return &repo
}
// Check if path exists but is not a git repository
if os.exists(repo.path()) {
return error("Path exists but is not a git repository: ${repo.path()}")
return error('Path exists but is not a git repository: ${repo.path()}')
}
if args.sshkey.len > 0 {

View File

@@ -2,7 +2,7 @@ module gittools
import time
import freeflowuniverse.herolib.ui.console
import os
// import os
@[params]
pub struct StatusUpdateArgs {

View File

@@ -182,7 +182,7 @@ pub fn (mut gs GitStructure) check_repos_exist(args ReposActionsArgs) !string {
account: args.account
provider: args.provider
)!
if repos.len > 0 {
// Repository exists - print path and return success
if !args.script {

110
lib/hero/herocluster/example/example.vsh Normal file → Executable file
View File

@@ -1,67 +1,107 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import crypto.ed25519
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.hero.herocluster
import os
import rand
mut ctx := base.context()!
redis := ctx.redis()!
if os.args.len < 3 {
eprintln('Usage: ./prog <node_id> <status>')
eprintln(' status: active|buffer')
return
eprintln('Usage: ./prog <node_id> <status>')
eprintln(' status: active|buffer')
return
}
node_id := os.args[1]
status_str := os.args[2]
status := match status_str {
'active' { NodeStatus.active }
'buffer' { NodeStatus.buffer }
else {
eprintln('Invalid status. Use: active|buffer')
return
}
'active' {
herocluster.NodeStatus.active
}
'buffer' {
herocluster.NodeStatus.buffer
}
else {
eprintln('Invalid status. Use: active|buffer')
return
}
}
// --- Generate ephemeral keys for demo ---
// In real use: load from PEM files
priv, pub := ed25519.generate_key(rand.reader) or { panic(err) }
pub_, priv := ed25519.generate_key()!
mut pubkeys := map[string]ed25519.PublicKey{}
pubkeys[node_id] = pub
pubkeys[node_id] = pub_
// TODO: load all pubkeys from config file so every node knows others
// Initialize all nodes (in real scenario, load from config)
mut all_nodes := map[string]Node{}
all_nodes['node1'] = Node{id: 'node1', status: .active}
all_nodes['node2'] = Node{id: 'node2', status: .active}
all_nodes['node3'] = Node{id: 'node3', status: .active}
all_nodes['node4'] = Node{id: 'node4', status: .buffer}
mut all_nodes := map[string]herocluster.Node{}
all_nodes['node1'] = herocluster.Node{
id: 'node1'
status: .active
}
all_nodes['node2'] = herocluster.Node{
id: 'node2'
status: .active
}
all_nodes['node3'] = herocluster.Node{
id: 'node3'
status: .active
}
all_nodes['node4'] = herocluster.Node{
id: 'node4'
status: .buffer
}
// Set current node status
all_nodes[node_id].status = status
servers := ['127.0.0.1:6379', '127.0.0.1:6380', '127.0.0.1:6381', '127.0.0.1:6382']
mut conns := []redis.Connection{}
mut conns := []&redisclient.Redis{}
for s in servers {
mut c := redis.connect(redis.Options{ server: s }) or {
panic('could not connect to redis $s: $err')
}
conns << c
redis_url := redisclient.get_redis_url(s) or {
eprintln('Warning: could not parse redis url ${s}: ${err}')
continue
}
mut c := redisclient.core_get(redis_url) or {
eprintln('Warning: could not connect to redis ${s}: ${err}')
continue
}
conns << c
println('Connected to Redis server: ${s}')
}
mut election := Election{
clients: conns
pubkeys: pubkeys
self: Node{
id: node_id
term: 0
leader: false
status: status
}
keys: Keys{ priv: priv, pub: pub }
all_nodes: all_nodes
buffer_nodes: ['node4'] // Initially node4 is buffer
if conns.len == 0 {
eprintln('Error: No Redis servers available. Please start at least one Redis server.')
return
}
println('[$node_id] started as $status_str, connected to 4 redis servers.')
mut election := &herocluster.Election{
clients: conns
pubkeys: pubkeys
self: herocluster.Node{
id: node_id
term: 0
leader: false
status: status
}
keys: herocluster.Keys{
priv: priv
pub: pub_
}
all_nodes: all_nodes
buffer_nodes: ['node4'] // Initially node4 is buffer
}
println('[${node_id}] started as ${status_str}, connected to 4 redis servers.')
// Start health monitoring in background
go election.health_monitor_loop()
spawn election.health_monitor_loop()
// Start main heartbeat loop
election.heartbeat_loop()

View File

@@ -1,10 +1,8 @@
module herocluster
import db.redis
import freeflowuniverse.herolib.core.redisclient
import crypto.ed25519
import crypto.rand
import encoding.hex
import os
import time
const election_timeout_ms = 3000
@@ -14,295 +12,318 @@ const health_check_interval_ms = 30000 // 30 seconds
// --- Crypto helpers ---
struct Keys {
priv ed25519.PrivateKey
pub ed25519.PublicKey
pub struct Keys {
pub mut:
priv ed25519.PrivateKey
pub ed25519.PublicKey
}
// sign a message
fn (k Keys) sign(msg string) string {
sig := ed25519.sign(k.priv, msg.bytes())
return hex.encode(sig)
sig := ed25519.sign(k.priv, msg.bytes()) or { panic('Failed to sign message: ${err}') }
return hex.encode(sig)
}
// verify signature
fn verify(pub ed25519.PublicKey, msg string, sig_hex string) bool {
sig := hex.decode(sig_hex) or { return false }
return ed25519.verify(pub, msg.bytes(), sig)
fn verify(pubkey ed25519.PublicKey, msg string, sig_hex string) bool {
sig := hex.decode(sig_hex) or { return false }
return ed25519.verify(pubkey, msg.bytes(), sig) or { false }
}
// --- Node & Election ---
enum NodeStatus {
active
buffer
unavailable
pub enum NodeStatus {
active
buffer
unavailable
}
struct Node {
id string
mut:
term int
leader bool
voted_for string
status NodeStatus
last_seen i64 // timestamp
pub struct Node {
pub:
id string
pub mut:
term int
leader bool
voted_for string
status NodeStatus
last_seen i64 // timestamp
}
struct HealthReport {
reporter_id string
target_id string
status string // "available" or "unavailable"
timestamp i64
signature string
reporter_id string
target_id string
status string // "available" or "unavailable"
timestamp i64
signature string
}
struct Election {
mut:
clients []redis.Connection
pubkeys map[string]ed25519.PublicKey
self Node
keys Keys
all_nodes map[string]Node
buffer_nodes []string
pub struct Election {
pub mut:
clients []&redisclient.Redis
pubkeys map[string]ed25519.PublicKey
self Node
keys Keys
all_nodes map[string]Node
buffer_nodes []string
}
// Redis keys
fn vote_key(term int, node_id string) string { return 'vote:${term}:${node_id}' }
fn health_key(reporter_id string, target_id string) string { return 'health:${reporter_id}:${target_id}' }
fn node_status_key(node_id string) string { return 'node_status:${node_id}' }
fn vote_key(term int, node_id string) string {
return 'vote:${term}:${node_id}'
}
fn health_key(reporter_id string, target_id string) string {
return 'health:${reporter_id}:${target_id}'
}
fn node_status_key(node_id string) string {
return 'node_status:${node_id}'
}
// Write vote (signed) to ALL redis servers
fn (mut e Election) vote_for(candidate string) {
msg := '${e.self.term}:${candidate}'
sig_hex := e.keys.sign(msg)
for mut c in e.clients {
k := vote_key(e.self.term, e.self.id)
c.hset(k, 'candidate', candidate) or {}
c.hset(k, 'sig', sig_hex) or {}
c.expire(k, 5) or {}
}
println('[${e.self.id}] voted for $candidate (term=${e.self.term})')
msg := '${e.self.term}:${candidate}'
sig_hex := e.keys.sign(msg)
for mut c in e.clients {
k := vote_key(e.self.term, e.self.id)
c.hset(k, 'candidate', candidate) or {}
c.hset(k, 'sig', sig_hex) or {}
c.expire(k, 5) or {}
}
println('[${e.self.id}] voted for ${candidate} (term=${e.self.term})')
}
// Report node health status
fn (mut e Election) report_node_health(target_id string, status string) {
now := time.now().unix()
msg := '${target_id}:${status}:${now}'
sig_hex := e.keys.sign(msg)
report := HealthReport{
reporter_id: e.self.id
target_id: target_id
status: status
timestamp: now
signature: sig_hex
}
for mut c in e.clients {
k := health_key(e.self.id, target_id)
c.hset(k, 'status', status) or {}
c.hset(k, 'timestamp', now.str()) or {}
c.hset(k, 'signature', sig_hex) or {}
c.expire(k, 86400) or {} // expire after 24 hours
}
println('[${e.self.id}] reported $target_id as $status')
now := time.now().unix()
msg := '${target_id}:${status}:${now}'
sig_hex := e.keys.sign(msg)
_ := HealthReport{
reporter_id: e.self.id
target_id: target_id
status: status
timestamp: now
signature: sig_hex
}
for mut c in e.clients {
k := health_key(e.self.id, target_id)
c.hset(k, 'status', status) or {}
c.hset(k, 'timestamp', now.str()) or {}
c.hset(k, 'signature', sig_hex) or {}
c.expire(k, 86400) or {} // expire after 24 hours
}
println('[${e.self.id}] reported ${target_id} as ${status}')
}
// Collect health reports and check for consensus on unavailable nodes
fn (mut e Election) check_node_availability() {
now := time.now().unix()
mut unavailable_reports := map[string]map[string]i64{} // target_id -> reporter_id -> timestamp
for mut c in e.clients {
keys := c.keys('health:*') or { continue }
for k in keys {
parts := k.split(':')
if parts.len != 3 { continue }
reporter_id := parts[1]
target_id := parts[2]
vals := c.hgetall(k) or { continue }
status := vals['status']
timestamp_str := vals['timestamp']
sig_hex := vals['signature']
if reporter_id !in e.pubkeys { continue }
timestamp := timestamp_str.i64()
msg := '${target_id}:${status}:${timestamp}'
if verify(e.pubkeys[reporter_id], msg, sig_hex) {
if status == 'unavailable' && (now - timestamp) >= (node_unavailable_threshold_ms / 1000) {
if target_id !in unavailable_reports {
unavailable_reports[target_id] = map[string]i64{}
}
unavailable_reports[target_id][reporter_id] = timestamp
}
}
}
}
// Check for consensus (2 out of 3 active nodes agree)
for target_id, reports in unavailable_reports {
if reports.len >= 2 && target_id in e.all_nodes {
if e.all_nodes[target_id].status == .active {
println('[${e.self.id}] Consensus reached: $target_id is unavailable for >1 day')
e.promote_buffer_node(target_id)
}
}
}
now := time.now().unix()
mut unavailable_reports := map[string]map[string]i64{} // target_id -> reporter_id -> timestamp
for mut c in e.clients {
keys := c.keys('health:*') or { continue }
for k in keys {
parts := k.split(':')
if parts.len != 3 {
continue
}
reporter_id := parts[1]
target_id := parts[2]
vals := c.hgetall(k) or { continue }
status := vals['status']
timestamp_str := vals['timestamp']
sig_hex := vals['signature']
if reporter_id !in e.pubkeys {
continue
}
timestamp := timestamp_str.i64()
msg := '${target_id}:${status}:${timestamp}'
if verify(e.pubkeys[reporter_id], msg, sig_hex) {
if status == 'unavailable'
&& (now - timestamp) >= (node_unavailable_threshold_ms / 1000) {
if target_id !in unavailable_reports {
unavailable_reports[target_id] = map[string]i64{}
}
unavailable_reports[target_id][reporter_id] = timestamp
}
}
}
}
// Check for consensus (2 out of 3 active nodes agree)
for target_id, reports in unavailable_reports {
if reports.len >= 2 && target_id in e.all_nodes {
if e.all_nodes[target_id].status == .active {
println('[${e.self.id}] Consensus reached: ${target_id} is unavailable for >1 day')
e.promote_buffer_node(target_id)
}
}
}
}
// Promote a buffer node to active status
fn (mut e Election) promote_buffer_node(failed_node_id string) {
if e.buffer_nodes.len == 0 {
println('[${e.self.id}] No buffer nodes available for promotion')
return
}
// Select first available buffer node
buffer_id := e.buffer_nodes[0]
// Update node statuses
if failed_node_id in e.all_nodes {
e.all_nodes[failed_node_id].status = .unavailable
}
if buffer_id in e.all_nodes {
e.all_nodes[buffer_id].status = .active
}
// Remove from buffer list
e.buffer_nodes = e.buffer_nodes.filter(it != buffer_id)
// Announce the promotion
for mut c in e.clients {
k := node_status_key(buffer_id)
c.hset(k, 'status', 'active') or {}
c.hset(k, 'promoted_at', time.now().unix().str()) or {}
c.hset(k, 'replaced_node', failed_node_id) or {}
// Mark failed node as unavailable
failed_k := node_status_key(failed_node_id)
c.hset(failed_k, 'status', 'unavailable') or {}
c.hset(failed_k, 'failed_at', time.now().unix().str()) or {}
}
println('[${e.self.id}] Promoted buffer node $buffer_id to replace failed node $failed_node_id')
if e.buffer_nodes.len == 0 {
println('[${e.self.id}] No buffer nodes available for promotion')
return
}
// Select first available buffer node
buffer_id := e.buffer_nodes[0]
// Update node statuses
if failed_node_id in e.all_nodes {
e.all_nodes[failed_node_id].status = .unavailable
}
if buffer_id in e.all_nodes {
e.all_nodes[buffer_id].status = .active
}
// Remove from buffer list
e.buffer_nodes = e.buffer_nodes.filter(it != buffer_id)
// Announce the promotion
for mut c in e.clients {
k := node_status_key(buffer_id)
c.hset(k, 'status', 'active') or {}
c.hset(k, 'promoted_at', time.now().unix().str()) or {}
c.hset(k, 'replaced_node', failed_node_id) or {}
// Mark failed node as unavailable
failed_k := node_status_key(failed_node_id)
c.hset(failed_k, 'status', 'unavailable') or {}
c.hset(failed_k, 'failed_at', time.now().unix().str()) or {}
}
println('[${e.self.id}] Promoted buffer node ${buffer_id} to replace failed node ${failed_node_id}')
}
// Collect votes from ALL redis servers, verify signatures (only from active nodes)
fn (mut e Election) collect_votes(term int) map[string]int {
mut counts := map[string]int{}
mut seen := map[string]bool{} // avoid double-counting same vote from multiple servers
mut counts := map[string]int{}
mut seen := map[string]bool{} // avoid double-counting same vote from multiple servers
for mut c in e.clients {
keys := c.keys('vote:${term}:*') or { continue }
for k in keys {
if seen[k] { continue }
seen[k] = true
vals := c.hgetall(k) or { continue }
candidate := vals['candidate']
sig_hex := vals['sig']
voter_id := k.split(':')[2]
// Only count votes from active nodes
if voter_id !in e.pubkeys || voter_id !in e.all_nodes { continue }
if e.all_nodes[voter_id].status != .active { continue }
msg := '${term}:${candidate}'
if verify(e.pubkeys[voter_id], msg, sig_hex) {
counts[candidate]++
} else {
println('[${e.self.id}] invalid signature from $voter_id')
}
}
}
return counts
for mut c in e.clients {
keys := c.keys('vote:${term}:*') or { continue }
for k in keys {
if seen[k] {
continue
}
seen[k] = true
vals := c.hgetall(k) or { continue }
candidate := vals['candidate']
sig_hex := vals['sig']
voter_id := k.split(':')[2]
// Only count votes from active nodes
if voter_id !in e.pubkeys || voter_id !in e.all_nodes {
continue
}
if e.all_nodes[voter_id].status != .active {
continue
}
msg := '${term}:${candidate}'
if verify(e.pubkeys[voter_id], msg, sig_hex) {
counts[candidate]++
} else {
println('[${e.self.id}] invalid signature from ${voter_id}')
}
}
}
return counts
}
// Run election (only active nodes participate)
fn (mut e Election) run_election() {
if e.self.status != .active {
return // Buffer nodes don't participate in elections
}
e.self.term++
e.vote_for(e.self.id)
if e.self.status != .active {
return
}
// wait a bit for other nodes to also vote
time.sleep(500 * time.millisecond)
e.self.term++
e.vote_for(e.self.id)
votes := e.collect_votes(e.self.term)
active_node_count := e.all_nodes.values().filter(it.status == .active).len
majority_threshold := (active_node_count / 2) + 1
for cand, cnt in votes {
if cnt >= majority_threshold {
if cand == e.self.id {
println('[${e.self.id}] I AM LEADER (term=${e.self.term}, votes=$cnt, active_nodes=$active_node_count)')
e.self.leader = true
} else {
println('[${e.self.id}] sees LEADER = $cand (term=${e.self.term}, votes=$cnt, active_nodes=$active_node_count)')
e.self.leader = false
}
}
}
// wait a bit for other nodes to also vote
time.sleep(500 * time.millisecond)
votes := e.collect_votes(e.self.term)
active_node_count := e.all_nodes.values().filter(it.status == .active).len
majority_threshold := (active_node_count / 2) + 1
for cand, cnt in votes {
if cnt >= majority_threshold {
if cand == e.self.id {
println('[${e.self.id}] I AM LEADER (term=${e.self.term}, votes=${cnt}, active_nodes=${active_node_count})')
e.self.leader = true
} else {
println('[${e.self.id}] sees LEADER = ${cand} (term=${e.self.term}, votes=${cnt}, active_nodes=${active_node_count})')
e.self.leader = false
}
}
}
}
// Health monitoring loop (runs in background)
fn (mut e Election) health_monitor_loop() {
for {
if e.self.status == .active {
// Check health of other nodes
for node_id, node in e.all_nodes {
if node_id == e.self.id { continue }
// Simple health check: try to read a heartbeat key
mut is_available := false
for mut c in e.clients {
heartbeat_key := 'heartbeat:${node_id}'
val := c.get(heartbeat_key) or { continue }
last_heartbeat := val.i64()
if (time.now().unix() - last_heartbeat) < 60 { // 60 seconds threshold
is_available = true
break
}
}
status := if is_available { 'available' } else { 'unavailable' }
e.report_node_health(node_id, status)
}
// Check for consensus on failed nodes
e.check_node_availability()
}
time.sleep(health_check_interval_ms * time.millisecond)
}
pub fn (mut e Election) health_monitor_loop() {
for {
if e.self.status == .active {
// Check health of other nodes
for node_id, _ in e.all_nodes {
if node_id == e.self.id {
continue
}
// Simple health check: try to read a heartbeat key
mut is_available := false
for mut c in e.clients {
heartbeat_key := 'heartbeat:${node_id}'
val := c.get(heartbeat_key) or { continue }
last_heartbeat := val.i64()
if (time.now().unix() - last_heartbeat) < 60 { // 60 seconds threshold
is_available = true
break
}
}
status := if is_available { 'available' } else { 'unavailable' }
e.report_node_health(node_id, status)
}
// Check for consensus on failed nodes
e.check_node_availability()
}
time.sleep(health_check_interval_ms * time.millisecond)
}
}
// Heartbeat loop
fn (mut e Election) heartbeat_loop() {
for {
// Update own heartbeat
now := time.now().unix()
for mut c in e.clients {
heartbeat_key := 'heartbeat:${e.self.id}'
c.set(heartbeat_key, now.str()) or {}
c.expire(heartbeat_key, 120) or {} // expire after 2 minutes
}
if e.self.status == .active {
if e.self.leader {
println('[${e.self.id}] Heartbeat term=${e.self.term} (LEADER)')
} else {
e.run_election()
}
} else if e.self.status == .buffer {
println('[${e.self.id}] Buffer node monitoring cluster')
}
time.sleep(heartbeat_interval_ms * time.millisecond)
}
pub fn (mut e Election) heartbeat_loop() {
for {
// Update own heartbeat
now := time.now().unix()
for mut c in e.clients {
heartbeat_key := 'heartbeat:${e.self.id}'
c.set(heartbeat_key, now.str()) or {}
c.expire(heartbeat_key, 120) or {} // expire after 2 minutes
}
if e.self.status == .active {
if e.self.leader {
println('[${e.self.id}] Heartbeat term=${e.self.term} (LEADER)')
} else {
e.run_election()
}
} else if e.self.status == .buffer {
println('[${e.self.id}] Buffer node monitoring cluster')
}
time.sleep(heartbeat_interval_ms * time.millisecond)
}
}

View File

@@ -6,68 +6,68 @@ import time
// Calendar represents a collection of events
@[heap]
pub struct Calendar {
Base
Base
pub mut:
group_id u32 // Associated group for permissions
events []u32 // IDs of calendar events (changed to u32 to match CalendarEvent)
color string // Hex color code
timezone string
is_public bool
group_id u32 // Associated group for permissions
events []u32 // IDs of calendar events (changed to u32 to match CalendarEvent)
color string // Hex color code
timezone string
is_public bool
}
@[params]
pub struct CalendarArgs {
BaseArgs
BaseArgs
pub mut:
group_id u32
events []u32
color string
timezone string
is_public bool
group_id u32
events []u32
color string
timezone string
is_public bool
}
pub fn calendar_new(args CalendarArgs) !Calendar {
mut commentids:=[]u32{}
for comment in args.comments{
// Convert CommentArg to CommentArgExtended
extended_comment := CommentArgExtended{
comment: comment.comment
parent: 0
author: 0
}
commentids << comment_set(extended_comment)!
}
mut obj := Calendar{
id: args.id or {0} // Will be set by DB?
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or {0}
tags: tags2id(args.tags)!
comments: commentids
group_id: args.group_id
events: args.events
color: args.color
timezone: args.timezone
is_public: args.is_public
}
return obj
mut commentids := []u32{}
for comment in args.comments {
// Convert CommentArg to CommentArgExtended
extended_comment := CommentArgExtended{
comment: comment.comment
parent: 0
author: 0
}
commentids << comment_set(extended_comment)!
}
mut obj := Calendar{
id: args.id or { 0 } // Will be set by DB?
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags2id(args.tags)!
comments: commentids
group_id: args.group_id
events: args.events
color: args.color
timezone: args.timezone
is_public: args.is_public
}
return obj
}
pub fn (mut c Calendar) add_event(event_id u32) { // Changed event_id to u32
if event_id !in c.events {
c.events << event_id
c.updated_at = ourtime.now().unix() // Use Base's updated_at
}
if event_id !in c.events {
c.events << event_id
c.updated_at = ourtime.now().unix() // Use Base's updated_at
}
}
pub fn (mut c Calendar) dump() []u8 {
//TODO: implement based on lib/data/encoder/readme.md
return []u8{}
// TODO: implement based on lib/data/encoder/readme.md
return []u8{}
}
pub fn calendar_load(data []u8) Calendar {
//TODO: implement based on lib/data/encoder/readme.md
return Calendar{}
}
// TODO: implement based on lib/data/encoder/readme.md
return Calendar{}
}

View File

@@ -9,256 +9,253 @@ import freeflowuniverse.herolib.core.redisclient
// CalendarEvent represents a single event in a calendar
@[heap]
pub struct CalendarEvent {
Base
Base
pub mut:
title string
start_time i64 // Unix timestamp
end_time i64 // Unix timestamp
location string
attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar
status EventStatus
is_all_day bool
is_recurring bool
recurrence []RecurrenceRule //normally empty
reminder_mins []int // Minutes before event for reminders
color string // Hex color code
timezone string
title string
start_time i64 // Unix timestamp
end_time i64 // Unix timestamp
location string
attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar
status EventStatus
is_all_day bool
is_recurring bool
recurrence []RecurrenceRule // normally empty
reminder_mins []int // Minutes before event for reminders
color string // Hex color code
timezone string
}
pub struct Attendee {
pub mut:
user_id u32
status AttendanceStatus
role AttendeeRole
user_id u32
status AttendanceStatus
role AttendeeRole
}
pub enum AttendanceStatus {
no_response
accepted
declined
tentative
no_response
accepted
declined
tentative
}
pub enum AttendeeRole {
required
optional
organizer
required
optional
organizer
}
pub enum EventStatus {
draft
published
cancelled
completed
draft
published
cancelled
completed
}
pub struct RecurrenceRule {
pub mut:
frequency RecurrenceFreq
interval int // Every N frequencies
until i64 // End date (Unix timestamp)
count int // Number of occurrences
by_weekday []int // Days of week (0=Sunday)
by_monthday []int // Days of month
frequency RecurrenceFreq
interval int // Every N frequencies
until i64 // End date (Unix timestamp)
count int // Number of occurrences
by_weekday []int // Days of week (0=Sunday)
by_monthday []int // Days of month
}
pub enum RecurrenceFreq {
none
daily
weekly
monthly
yearly
none
daily
weekly
monthly
yearly
}
@[params]
pub struct CalendarEventArgs {
BaseArgs
BaseArgs
pub mut:
title string
start_time string // use ourtime module to go from string to epoch
end_time string // use ourtime module to go from string to epoch
location string
attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar
status EventStatus
is_all_day bool
is_recurring bool
recurrence []RecurrenceRule
reminder_mins []int // Minutes before event for reminders
color string // Hex color code
timezone string
title string
start_time string // use ourtime module to go from string to epoch
end_time string // use ourtime module to go from string to epoch
location string
attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar
status EventStatus
is_all_day bool
is_recurring bool
recurrence []RecurrenceRule
reminder_mins []int // Minutes before event for reminders
color string // Hex color code
timezone string
}
pub fn calendar_event_new(args CalendarEventArgs) !CalendarEvent {
// Convert tags to u32 ID
tags_id := tags2id(args.tags)!
// Convert tags to u32 ID
tags_id := tags2id(args.tags)!
return CalendarEvent{
// Base fields
id: args.id or { 0 }
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags_id
comments: comments2ids(args.comments)!
return CalendarEvent{
// Base fields
id: args.id or { 0 }
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags_id
comments: comments2ids(args.comments)!
// CalendarEvent specific fields
title: args.title
start_time: ourtime.new(args.start_time)!.unix()
end_time: ourtime.new(args.end_time)!.unix()
location: args.location
attendees: args.attendees
fs_items: args.fs_items
calendar_id: args.calendar_id
status: args.status
is_all_day: args.is_all_day
is_recurring: args.is_recurring
recurrence: args.recurrence
reminder_mins: args.reminder_mins
color: args.color
timezone: args.timezone
}
// CalendarEvent specific fields
title: args.title
start_time: ourtime.new(args.start_time)!.unix()
end_time: ourtime.new(args.end_time)!.unix()
location: args.location
attendees: args.attendees
fs_items: args.fs_items
calendar_id: args.calendar_id
status: args.status
is_all_day: args.is_all_day
is_recurring: args.is_recurring
recurrence: args.recurrence
reminder_mins: args.reminder_mins
color: args.color
timezone: args.timezone
}
}
pub fn (mut e CalendarEvent) dump() ![]u8 {
// Create a new encoder
mut enc := encoder.new()
// Add version byte
enc.add_u8(1)
// Encode Base fields
enc.add_u32(e.id)
enc.add_string(e.name)
enc.add_string(e.description)
enc.add_i64(e.created_at)
enc.add_i64(e.updated_at)
enc.add_u32(e.securitypolicy)
enc.add_u32(e.tags)
enc.add_list_u32(e.comments)
// Encode CalendarEvent specific fields
enc.add_string(e.title)
enc.add_string(e.description)
enc.add_i64(e.start_time)
enc.add_i64(e.end_time)
enc.add_string(e.location)
enc.add_list_u32(e.attendees)
enc.add_list_u32(e.fs_items)
enc.add_u32(e.calendar_id)
enc.add_u8(u8(e.status))
enc.add_bool(e.is_all_day)
enc.add_bool(e.is_recurring)
// Encode recurrence array
enc.add_u16(u16(e.recurrence.len))
for rule in e.recurrence {
enc.add_u8(u8(rule.frequency))
enc.add_int(rule.interval)
enc.add_i64(rule.until)
enc.add_int(rule.count)
enc.add_list_int(rule.by_weekday)
enc.add_list_int(rule.by_monthday)
}
enc.add_list_int(e.reminder_mins)
enc.add_string(e.color)
enc.add_string(e.timezone)
return enc.data
// Create a new encoder
mut enc := encoder.new()
// Add version byte
enc.add_u8(1)
// Encode Base fields
enc.add_u32(e.id)
enc.add_string(e.name)
enc.add_string(e.description)
enc.add_i64(e.created_at)
enc.add_i64(e.updated_at)
enc.add_u32(e.securitypolicy)
enc.add_u32(e.tags)
enc.add_list_u32(e.comments)
// Encode CalendarEvent specific fields
enc.add_string(e.title)
enc.add_string(e.description)
enc.add_i64(e.start_time)
enc.add_i64(e.end_time)
enc.add_string(e.location)
enc.add_list_u32(e.attendees)
enc.add_list_u32(e.fs_items)
enc.add_u32(e.calendar_id)
enc.add_u8(u8(e.status))
enc.add_bool(e.is_all_day)
enc.add_bool(e.is_recurring)
// Encode recurrence array
enc.add_u16(u16(e.recurrence.len))
for rule in e.recurrence {
enc.add_u8(u8(rule.frequency))
enc.add_int(rule.interval)
enc.add_i64(rule.until)
enc.add_int(rule.count)
enc.add_list_int(rule.by_weekday)
enc.add_list_int(rule.by_monthday)
}
enc.add_list_int(e.reminder_mins)
enc.add_string(e.color)
enc.add_string(e.timezone)
return enc.data
}
pub fn (ce CalendarEvent) load(data []u8) !CalendarEvent {
// Create a new decoder
mut dec := encoder.decoder_new(data)
// Read version byte
version := dec.get_u8()!
if version != 1 {
return error('wrong version in calendar event load')
}
// Decode Base fields
id := dec.get_u32()!
name := dec.get_string()!
description := dec.get_string()!
created_at := dec.get_i64()!
updated_at := dec.get_i64()!
securitypolicy := dec.get_u32()!
tags := dec.get_u32()!
comments := dec.get_list_u32()!
// Decode CalendarEvent specific fields
title := dec.get_string()!
description2 := dec.get_string()! // Second description field
start_time := dec.get_i64()!
end_time := dec.get_i64()!
location := dec.get_string()!
attendees := dec.get_list_u32()!
fs_items := dec.get_list_u32()!
calendar_id := dec.get_u32()!
status := unsafe { EventStatus(dec.get_u8()!) }
is_all_day := dec.get_bool()!
is_recurring := dec.get_bool()!
// Decode recurrence array
recurrence_len := dec.get_u16()!
mut recurrence := []RecurrenceRule{}
for _ in 0..recurrence_len {
frequency := unsafe{RecurrenceFreq(dec.get_u8()!)}
interval := dec.get_int()!
until := dec.get_i64()!
count := dec.get_int()!
by_weekday := dec.get_list_int()!
by_monthday := dec.get_list_int()!
recurrence << RecurrenceRule{
frequency: frequency
interval: interval
until: until
count: count
by_weekday: by_weekday
by_monthday: by_monthday
}
}
reminder_mins := dec.get_list_int()!
color := dec.get_string()!
timezone := dec.get_string()!
return CalendarEvent{
// Base fields
id: id
name: name
description: description
created_at: created_at
updated_at: updated_at
securitypolicy: securitypolicy
tags: tags
comments: comments
// CalendarEvent specific fields
title: title
start_time: start_time
end_time: end_time
location: location
attendees: attendees
fs_items: fs_items
calendar_id: calendar_id
status: status
is_all_day: is_all_day
is_recurring: is_recurring
recurrence: recurrence
reminder_mins: reminder_mins
color: color
timezone: timezone
}
// Create a new decoder
mut dec := encoder.decoder_new(data)
// Read version byte
version := dec.get_u8()!
if version != 1 {
return error('wrong version in calendar event load')
}
// Decode Base fields
id := dec.get_u32()!
name := dec.get_string()!
description := dec.get_string()!
created_at := dec.get_i64()!
updated_at := dec.get_i64()!
securitypolicy := dec.get_u32()!
tags := dec.get_u32()!
comments := dec.get_list_u32()!
// Decode CalendarEvent specific fields
title := dec.get_string()!
description2 := dec.get_string()! // Second description field
start_time := dec.get_i64()!
end_time := dec.get_i64()!
location := dec.get_string()!
attendees := dec.get_list_u32()!
fs_items := dec.get_list_u32()!
calendar_id := dec.get_u32()!
status := unsafe { EventStatus(dec.get_u8()!) }
is_all_day := dec.get_bool()!
is_recurring := dec.get_bool()!
// Decode recurrence array
recurrence_len := dec.get_u16()!
mut recurrence := []RecurrenceRule{}
for _ in 0 .. recurrence_len {
frequency := unsafe { RecurrenceFreq(dec.get_u8()!) }
interval := dec.get_int()!
until := dec.get_i64()!
count := dec.get_int()!
by_weekday := dec.get_list_int()!
by_monthday := dec.get_list_int()!
recurrence << RecurrenceRule{
frequency: frequency
interval: interval
until: until
count: count
by_weekday: by_weekday
by_monthday: by_monthday
}
}
reminder_mins := dec.get_list_int()!
color := dec.get_string()!
timezone := dec.get_string()!
return CalendarEvent{
// Base fields
id: id
name: name
description: description
created_at: created_at
updated_at: updated_at
securitypolicy: securitypolicy
tags: tags
comments: comments
// CalendarEvent specific fields
title: title
start_time: start_time
end_time: end_time
location: location
attendees: attendees
fs_items: fs_items
calendar_id: calendar_id
status: status
is_all_day: is_all_day
is_recurring: is_recurring
recurrence: recurrence
reminder_mins: reminder_mins
color: color
timezone: timezone
}
}

View File

@@ -8,57 +8,57 @@ import json
@[heap]
pub struct ChatGroup {
pub mut:
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
chat_type ChatType
messages []string // IDs of chat messages
created_at i64
updated_at i64
last_activity i64
is_archived bool
tags []string
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
chat_type ChatType
messages []string // IDs of chat messages
created_at i64
updated_at i64
last_activity i64
is_archived bool
tags []string
}
pub enum ChatType {
public_channel
private_channel
direct_message
group_message
public_channel
private_channel
direct_message
group_message
}
pub fn (mut c ChatGroup) calculate_id() {
content := json.encode(ChatGroupContent{
name: c.name
description: c.description
group_id: c.group_id
chat_type: c.chat_type
is_archived: c.is_archived
tags: c.tags
})
hash := blake3.sum256(content.bytes())
c.id = hash.hex()[..48]
content := json.encode(ChatGroupContent{
name: c.name
description: c.description
group_id: c.group_id
chat_type: c.chat_type
is_archived: c.is_archived
tags: c.tags
})
hash := blake3.sum256(content.bytes())
c.id = hash.hex()[..48]
}
struct ChatGroupContent {
name string
description string
group_id string
chat_type ChatType
is_archived bool
tags []string
name string
description string
group_id string
chat_type ChatType
is_archived bool
tags []string
}
pub fn new_chat_group(name string, group_id string, chat_type ChatType) ChatGroup {
mut chat_group := ChatGroup{
name: name
group_id: group_id
chat_type: chat_type
created_at: time.now().unix()
updated_at: time.now().unix()
last_activity: time.now().unix()
}
chat_group.calculate_id()
return chat_group
}
mut chat_group := ChatGroup{
name: name
group_id: group_id
chat_type: chat_type
created_at: time.now().unix()
updated_at: time.now().unix()
last_activity: time.now().unix()
}
chat_group.calculate_id()
return chat_group
}

View File

@@ -8,97 +8,97 @@ import json
@[heap]
pub struct ChatMessage {
pub mut:
id string // blake192 hash
content string
chat_group_id string // Associated chat group
sender_id string // User ID of sender
parent_messages []MessageLink // Referenced/replied messages
fs_files []string // IDs of linked files
message_type MessageType
status MessageStatus
created_at i64
updated_at i64
edited_at i64
deleted_at i64
reactions []MessageReaction
mentions []string // User IDs mentioned in message
tags []string
id string // blake192 hash
content string
chat_group_id string // Associated chat group
sender_id string // User ID of sender
parent_messages []MessageLink // Referenced/replied messages
fs_files []string // IDs of linked files
message_type MessageType
status MessageStatus
created_at i64
updated_at i64
edited_at i64
deleted_at i64
reactions []MessageReaction
mentions []string // User IDs mentioned in message
tags []string
}
pub struct MessageLink {
pub mut:
message_id string
link_type MessageLinkType
message_id string
link_type MessageLinkType
}
pub enum MessageLinkType {
reply
reference
forward
quote
reply
reference
forward
quote
}
pub enum MessageType {
text
image
file
voice
video
system
announcement
text
image
file
voice
video
system
announcement
}
pub enum MessageStatus {
sent
delivered
read
failed
deleted
sent
delivered
read
failed
deleted
}
pub struct MessageReaction {
pub mut:
user_id string
emoji string
timestamp i64
user_id string
emoji string
timestamp i64
}
pub fn (mut m ChatMessage) calculate_id() {
content := json.encode(MessageContent{
content: m.content
chat_group_id: m.chat_group_id
sender_id: m.sender_id
parent_messages: m.parent_messages
fs_files: m.fs_files
message_type: m.message_type
mentions: m.mentions
tags: m.tags
})
hash := blake3.sum256(content.bytes())
m.id = hash.hex()[..48]
content := json.encode(MessageContent{
content: m.content
chat_group_id: m.chat_group_id
sender_id: m.sender_id
parent_messages: m.parent_messages
fs_files: m.fs_files
message_type: m.message_type
mentions: m.mentions
tags: m.tags
})
hash := blake3.sum256(content.bytes())
m.id = hash.hex()[..48]
}
struct MessageContent {
content string
chat_group_id string
sender_id string
parent_messages []MessageLink
fs_files []string
message_type MessageType
mentions []string
tags []string
content string
chat_group_id string
sender_id string
parent_messages []MessageLink
fs_files []string
message_type MessageType
mentions []string
tags []string
}
pub fn new_chat_message(content string, chat_group_id string, sender_id string) ChatMessage {
mut message := ChatMessage{
content: content
chat_group_id: chat_group_id
sender_id: sender_id
message_type: .text
status: .sent
created_at: time.now().unix()
updated_at: time.now().unix()
}
message.calculate_id()
return message
}
mut message := ChatMessage{
content: content
chat_group_id: chat_group_id
sender_id: sender_id
message_type: .text
status: .sent
created_at: time.now().unix()
updated_at: time.now().unix()
}
message.calculate_id()
return message
}

View File

@@ -0,0 +1,47 @@
module heromodels
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.encoder
pub fn set[T](obj T) ! {
mut redis := redisclient.core_get()!
id := obj.id
data := encoder.encode(obj)!
redis.hset('db:${T.name}', id.str(), data.bytestr())!
}
pub fn get[T](id u32) !T {
mut redis := redisclient.core_get()!
data := redis.hget('db:${T.name}', id.str())!
t := T{}
return encoder.decode[T](data.bytes())!
}
pub fn exists[T](id u32) !bool {
name := T{}.type_name()
mut redis := redisclient.core_get()!
return redis.hexists('db:${name}', id.str())!
}
pub fn delete[T](id u32) ! {
name := T{}.type_name()
mut redis := redisclient.core_get()!
redis.hdel('db:${name}', id.str())!
}
pub fn list[T]() ![]T {
mut redis := redisclient.core_get()!
ids := redis.hkeys('db:${name}')!
mut result := []T{}
for id in ids {
result << get[T](id.u32())!
}
return result
}
// make it easy to get a base object
pub fn new_from_base[T](args BaseArgs) !Base {
return T{
Base: new_base(args)!
}
}

View File

@@ -5,7 +5,6 @@ import json
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.hero.heromodels.openrpc
fn send_request(mut conn unix.StreamConn, request openrpc.JsonRpcRequest) ! {
request_json := json.encode(request)
conn.write_string(request_json)!
@@ -31,9 +30,9 @@ console.print_item('Connected to server')
console.print_header('Test 1: Discover OpenRPC Specification')
discover_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0'
method: 'discover'
params: 'null'
id: '1'
method: 'discover'
params: 'null'
id: '1'
}
send_request(mut conn, discover_request)!
@@ -46,9 +45,9 @@ comment_json := '{"comment": "This is a test comment from OpenRPC client", "pare
create_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0'
method: 'comment_set'
params: comment_json
id: '2'
method: 'comment_set'
params: comment_json
id: '2'
}
send_request(mut conn, create_request)!
@@ -59,9 +58,9 @@ console.print_item('Comment created: ${create_response}')
console.print_header('Test 3: List All Comments')
list_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0'
method: 'comment_list'
params: 'null'
id: '3'
method: 'comment_list'
params: 'null'
id: '3'
}
send_request(mut conn, list_request)!
@@ -74,9 +73,9 @@ get_args_json := '{"author": 1}'
get_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0'
method: 'comment_get'
params: get_args_json
id: '4'
method: 'comment_get'
params: get_args_json
id: '4'
}
send_request(mut conn, get_request)!
@@ -84,5 +83,3 @@ get_response := read_response(mut conn)!
console.print_item('Comments by author: ${get_response}')
console.print_header('All tests completed successfully!')

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
// Create a user
mut user := new_user('John Doe', 'john@example.com')
@@ -18,7 +17,8 @@ mut issue := new_project_issue('Fix login bug', project.id, user.id, .bug)
mut calendar := new_calendar('Team Calendar', group.id)
// Create an event
mut event := new_calendar_event('Sprint Planning', 1672531200, 1672534800, calendar.id, user.id)
mut event := new_calendar_event('Sprint Planning', 1672531200, 1672534800, calendar.id,
user.id)
calendar.add_event(event.id)
// Create a filesystem
@@ -34,4 +34,4 @@ println('Issue ID: ${issue.id}')
println('Calendar ID: ${calendar.id}')
println('Event ID: ${event.id}')
println('Filesystem ID: ${fs.id}')
println('Blob ID: ${blob.id}')
println('Blob ID: ${blob.id}')

View File

@@ -20,4 +20,4 @@ console.print_item('Press Ctrl+C to stop the server')
// Keep the main thread alive
for {
time.sleep(1 * time.second)
}
}

View File

@@ -8,45 +8,45 @@ import json
@[heap]
pub struct Fs {
pub mut:
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
root_dir_id string // ID of root directory
created_at i64
updated_at i64
quota_bytes i64 // Storage quota in bytes
used_bytes i64 // Current usage in bytes
tags []string
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
root_dir_id string // ID of root directory
created_at i64
updated_at i64
quota_bytes i64 // Storage quota in bytes
used_bytes i64 // Current usage in bytes
tags []string
}
pub fn (mut f Fs) calculate_id() {
content := json.encode(FsContent{
name: f.name
description: f.description
group_id: f.group_id
quota_bytes: f.quota_bytes
tags: f.tags
})
hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48]
content := json.encode(FsContent{
name: f.name
description: f.description
group_id: f.group_id
quota_bytes: f.quota_bytes
tags: f.tags
})
hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48]
}
struct FsContent {
name string
description string
group_id string
quota_bytes i64
tags []string
name string
description string
group_id string
quota_bytes i64
tags []string
}
pub fn new_fs(name string, group_id string) Fs {
mut fs := Fs{
name: name
group_id: group_id
created_at: time.now().unix()
updated_at: time.now().unix()
}
fs.calculate_id()
return fs
}
mut fs := Fs{
name: name
group_id: group_id
created_at: time.now().unix()
updated_at: time.now().unix()
}
fs.calculate_id()
return fs
}

View File

@@ -7,35 +7,35 @@ import crypto.blake3
@[heap]
pub struct FsBlob {
pub mut:
id string // blake192 hash of content
data []u8 // Binary data (max 1MB)
size_bytes int // Size in bytes
created_at i64
mime_type string
encoding string // e.g., "gzip", "none"
id string // blake192 hash of content
data []u8 // Binary data (max 1MB)
size_bytes int // Size in bytes
created_at i64
mime_type string
encoding string // e.g., "gzip", "none"
}
pub fn (mut b FsBlob) calculate_id() {
hash := blake3.sum256(b.data)
b.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
hash := blake3.sum256(b.data)
b.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
}
pub fn new_fs_blob(data []u8) !FsBlob {
if data.len > 1024 * 1024 { // 1MB limit
return error('Blob size exceeds 1MB limit')
}
mut blob := FsBlob{
data: data
size_bytes: data.len
created_at: time.now().unix()
encoding: 'none'
}
blob.calculate_id()
return blob
if data.len > 1024 * 1024 { // 1MB limit
return error('Blob size exceeds 1MB limit')
}
mut blob := FsBlob{
data: data
size_bytes: data.len
created_at: time.now().unix()
encoding: 'none'
}
blob.calculate_id()
return blob
}
pub fn (b FsBlob) verify_integrity() bool {
hash := blake3.sum256(b.data)
return hash.hex()[..48] == b.id
}
hash := blake3.sum256(b.data)
return hash.hex()[..48] == b.id
}

View File

@@ -8,46 +8,46 @@ import json
@[heap]
pub struct FsDir {
pub mut:
id string // blake192 hash
name string
fs_id string // Associated filesystem
parent_id string // Parent directory ID (empty for root)
group_id string // Associated group for permissions
children []string // Child directory and file IDs
created_at i64
updated_at i64
tags []string
id string // blake192 hash
name string
fs_id string // Associated filesystem
parent_id string // Parent directory ID (empty for root)
group_id string // Associated group for permissions
children []string // Child directory and file IDs
created_at i64
updated_at i64
tags []string
}
pub fn (mut d FsDir) calculate_id() {
content := json.encode(DirContent{
name: d.name
fs_id: d.fs_id
parent_id: d.parent_id
group_id: d.group_id
tags: d.tags
})
hash := blake3.sum256(content.bytes())
d.id = hash.hex()[..48]
content := json.encode(DirContent{
name: d.name
fs_id: d.fs_id
parent_id: d.parent_id
group_id: d.group_id
tags: d.tags
})
hash := blake3.sum256(content.bytes())
d.id = hash.hex()[..48]
}
struct DirContent {
name string
fs_id string
parent_id string
group_id string
tags []string
name string
fs_id string
parent_id string
group_id string
tags []string
}
pub fn new_fs_dir(name string, fs_id string, parent_id string, group_id string) FsDir {
mut dir := FsDir{
name: name
fs_id: fs_id
parent_id: parent_id
group_id: group_id
created_at: time.now().unix()
updated_at: time.now().unix()
}
dir.calculate_id()
return dir
}
mut dir := FsDir{
name: name
fs_id: fs_id
parent_id: parent_id
group_id: group_id
created_at: time.now().unix()
updated_at: time.now().unix()
}
dir.calculate_id()
return dir
}

View File

@@ -8,58 +8,58 @@ import json
@[heap]
pub struct FsFile {
pub mut:
id string // blake192 hash
name string
fs_id string // Associated filesystem
directories []string // Directory IDs where this file exists
blobs []string // Blake192 IDs of file content blobs
size_bytes i64 // Total file size
mime_type string
checksum string // Overall file checksum
created_at i64
updated_at i64
accessed_at i64
tags []string
metadata map[string]string // Custom metadata
id string // blake192 hash
name string
fs_id string // Associated filesystem
directories []string // Directory IDs where this file exists
blobs []string // Blake192 IDs of file content blobs
size_bytes i64 // Total file size
mime_type string
checksum string // Overall file checksum
created_at i64
updated_at i64
accessed_at i64
tags []string
metadata map[string]string // Custom metadata
}
pub fn (mut f FsFile) calculate_id() {
content := json.encode(FileContent{
name: f.name
fs_id: f.fs_id
directories: f.directories
blobs: f.blobs
size_bytes: f.size_bytes
mime_type: f.mime_type
checksum: f.checksum
tags: f.tags
metadata: f.metadata
})
hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48]
content := json.encode(FileContent{
name: f.name
fs_id: f.fs_id
directories: f.directories
blobs: f.blobs
size_bytes: f.size_bytes
mime_type: f.mime_type
checksum: f.checksum
tags: f.tags
metadata: f.metadata
})
hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48]
}
struct FileContent {
name string
fs_id string
directories []string
blobs []string
size_bytes i64
mime_type string
checksum string
tags []string
metadata map[string]string
name string
fs_id string
directories []string
blobs []string
size_bytes i64
mime_type string
checksum string
tags []string
metadata map[string]string
}
pub fn new_fs_file(name string, fs_id string, directories []string) FsFile {
mut file := FsFile{
name: name
fs_id: fs_id
directories: directories
created_at: time.now().unix()
updated_at: time.now().unix()
accessed_at: time.now().unix()
}
file.calculate_id()
return file
}
mut file := FsFile{
name: name
fs_id: fs_id
directories: directories
created_at: time.now().unix()
updated_at: time.now().unix()
accessed_at: time.now().unix()
}
file.calculate_id()
return file
}

View File

@@ -8,54 +8,54 @@ import json
@[heap]
pub struct FsSymlink {
pub mut:
id string // blake192 hash
name string
fs_id string // Associated filesystem
parent_id string // Parent directory ID
target_id string // ID of target file or directory
target_type SymlinkTargetType
created_at i64
updated_at i64
tags []string
id string // blake192 hash
name string
fs_id string // Associated filesystem
parent_id string // Parent directory ID
target_id string // ID of target file or directory
target_type SymlinkTargetType
created_at i64
updated_at i64
tags []string
}
pub enum SymlinkTargetType {
file
directory
file
directory
}
pub fn (mut s FsSymlink) calculate_id() {
content := json.encode(SymlinkContent{
name: s.name
fs_id: s.fs_id
parent_id: s.parent_id
target_id: s.target_id
target_type: s.target_type
tags: s.tags
})
hash := blake3.sum256(content.bytes())
s.id = hash.hex()[..48]
content := json.encode(SymlinkContent{
name: s.name
fs_id: s.fs_id
parent_id: s.parent_id
target_id: s.target_id
target_type: s.target_type
tags: s.tags
})
hash := blake3.sum256(content.bytes())
s.id = hash.hex()[..48]
}
struct SymlinkContent {
name string
fs_id string
parent_id string
target_id string
target_type SymlinkTargetType
tags []string
name string
fs_id string
parent_id string
target_id string
target_type SymlinkTargetType
tags []string
}
pub fn new_fs_symlink(name string, fs_id string, parent_id string, target_id string, target_type SymlinkTargetType) FsSymlink {
mut symlink := FsSymlink{
name: name
fs_id: fs_id
parent_id: parent_id
target_id: target_id
target_type: target_type
created_at: time.now().unix()
updated_at: time.now().unix()
}
symlink.calculate_id()
return symlink
}
mut symlink := FsSymlink{
name: name
fs_id: fs_id
parent_id: parent_id
target_id: target_id
target_type: target_type
created_at: time.now().unix()
updated_at: time.now().unix()
}
symlink.calculate_id()
return symlink
}

View File

@@ -8,74 +8,74 @@ import json
@[heap]
pub struct Group {
pub mut:
id string // blake192 hash
name string
description string
members []GroupMember
subgroups []string // IDs of child groups
parent_group string // ID of parent group
created_at i64
updated_at i64
is_public bool
tags []string
id string // blake192 hash
name string
description string
members []GroupMember
subgroups []string // IDs of child groups
parent_group string // ID of parent group
created_at i64
updated_at i64
is_public bool
tags []string
}
pub struct GroupMember {
pub mut:
user_id string
role GroupRole
joined_at i64
user_id string
role GroupRole
joined_at i64
}
pub enum GroupRole {
reader
writer
admin
owner
reader
writer
admin
owner
}
pub fn (mut g Group) calculate_id() {
content := json.encode(GroupContent{
name: g.name
description: g.description
members: g.members
subgroups: g.subgroups
parent_group: g.parent_group
is_public: g.is_public
tags: g.tags
})
hash := blake3.sum256(content.bytes())
g.id = hash.hex()[..48]
content := json.encode(GroupContent{
name: g.name
description: g.description
members: g.members
subgroups: g.subgroups
parent_group: g.parent_group
is_public: g.is_public
tags: g.tags
})
hash := blake3.sum256(content.bytes())
g.id = hash.hex()[..48]
}
struct GroupContent {
name string
description string
members []GroupMember
subgroups []string
parent_group string
is_public bool
tags []string
name string
description string
members []GroupMember
subgroups []string
parent_group string
is_public bool
tags []string
}
pub fn new_group(name string, description string) Group {
mut group := Group{
name: name
description: description
created_at: time.now().unix()
updated_at: time.now().unix()
is_public: false
}
group.calculate_id()
return group
mut group := Group{
name: name
description: description
created_at: time.now().unix()
updated_at: time.now().unix()
is_public: false
}
group.calculate_id()
return group
}
pub fn (mut g Group) add_member(user_id string, role GroupRole) {
g.members << GroupMember{
user_id: user_id
role: role
joined_at: time.now().unix()
}
g.updated_at = time.now().unix()
g.calculate_id()
}
g.members << GroupMember{
user_id: user_id
role: role
joined_at: time.now().unix()
}
g.updated_at = time.now().unix()
g.calculate_id()
}

View File

@@ -24,25 +24,25 @@ pub fn comment_get(params string) !string {
if params == 'null' || params == '{}' {
return error('No valid search criteria provided. Please specify id, author, or parent.')
}
args := json.decode(CommentGetArgs, params)!
// If ID is provided, get specific comment
if id := args.id {
comment := heromodels.comment_get(id)!
return json.encode(comment)
}
// If author is provided, find comments by author
if author := args.author {
return get_comments_by_author(author)!
}
// If parent is provided, find child comments
if parent := args.parent {
return get_comments_by_parent(parent)!
}
return error('No valid search criteria provided. Please specify id, author, or parent.')
}
@@ -50,21 +50,23 @@ pub fn comment_get(params string) !string {
pub fn comment_set(params string) !string {
comment_arg := json.decode(heromodels.CommentArgExtended, params)!
id := heromodels.comment_set(comment_arg)!
return json.encode({'id': id})
return json.encode({
'id': id
})
}
// comment_delete removes a comment by ID
pub fn comment_delete(params string) !string {
args := json.decode(CommentDeleteArgs, params)!
// Check if comment exists
if !heromodels.exists[heromodels.Comment](args.id)! {
return error('Comment with id ${args.id} does not exist')
}
// Delete using core method
heromodels.delete[heromodels.Comment](args.id)!
result_json := '{"success": true, "id": ${args.id}}'
return result_json
}
@@ -73,11 +75,11 @@ pub fn comment_delete(params string) !string {
pub fn comment_list() !string {
comments := heromodels.list[heromodels.Comment]()!
mut ids := []u32{}
for comment in comments {
ids << comment.id
}
return json.encode(ids)
}
@@ -85,13 +87,13 @@ pub fn comment_list() !string {
fn get_comments_by_author(author u32) !string {
all_comments := heromodels.list[heromodels.Comment]()!
mut matching_comments := []heromodels.Comment{}
for comment in all_comments {
if comment.author == author {
matching_comments << comment
}
}
return json.encode(matching_comments)
}
@@ -99,12 +101,12 @@ fn get_comments_by_author(author u32) !string {
fn get_comments_by_parent(parent u32) !string {
all_comments := heromodels.list[heromodels.Comment]()!
mut matching_comments := []heromodels.Comment{}
for comment in all_comments {
if comment.parent == parent {
matching_comments << comment
}
}
return json.encode(matching_comments)
}
}

View File

@@ -18,7 +18,7 @@ pub fn new_heromodels_server(args HeroModelsServerArgs) !&HeroModelsServer {
base_server := openrpcserver.new_rpc_server(
socket_path: args.socket_path
)!
return &HeroModelsServer{
RPCServer: *base_server
}
@@ -47,6 +47,6 @@ pub fn (mut server HeroModelsServer) process(method string, params_str string) !
return server.create_error_response(-32601, 'Method not found', method)
}
}
return result
}
}

View File

@@ -23,7 +23,7 @@ pub fn new_heromodels_server(args HeroModelsServerArgs) !&HeroModelsServer {
base_server := openrpcserver.new_rpc_server(
socket_path: args.socket_path
)!
return &HeroModelsServer{
RPCServer: *base_server
}
@@ -69,4 +69,4 @@ pub fn comment2id(comment string) !u32 {
pub fn new_base(args BaseArgs) !Base {
return openrpcserver.new_base(args)!
}
}

View File

@@ -8,96 +8,112 @@ import json
@[heap]
pub struct Project {
pub mut:
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
swimlanes []Swimlane
milestones []Milestone
issues []string // IDs of project issues
fs_files []string // IDs of linked files
status ProjectStatus
start_date i64
end_date i64
created_at i64
updated_at i64
tags []string
id string // blake192 hash
name string
description string
group_id string // Associated group for permissions
swimlanes []Swimlane
milestones []Milestone
issues []string // IDs of project issues
fs_files []string // IDs of linked files
status ProjectStatus
start_date i64
end_date i64
created_at i64
updated_at i64
tags []string
}
pub struct Swimlane {
pub mut:
id string
name string
description string
order int
color string
is_done bool
id string
name string
description string
order int
color string
is_done bool
}
pub struct Milestone {
pub mut:
id string
name string
description string
due_date i64
completed bool
issues []string // IDs of issues in this milestone
id string
name string
description string
due_date i64
completed bool
issues []string // IDs of issues in this milestone
}
pub enum ProjectStatus {
planning
active
on_hold
completed
cancelled
planning
active
on_hold
completed
cancelled
}
pub fn (mut p Project) calculate_id() {
content := json.encode(ProjectContent{
name: p.name
description: p.description
group_id: p.group_id
swimlanes: p.swimlanes
milestones: p.milestones
issues: p.issues
fs_files: p.fs_files
status: p.status
start_date: p.start_date
end_date: p.end_date
tags: p.tags
})
hash := blake3.sum256(content.bytes())
p.id = hash.hex()[..48]
content := json.encode(ProjectContent{
name: p.name
description: p.description
group_id: p.group_id
swimlanes: p.swimlanes
milestones: p.milestones
issues: p.issues
fs_files: p.fs_files
status: p.status
start_date: p.start_date
end_date: p.end_date
tags: p.tags
})
hash := blake3.sum256(content.bytes())
p.id = hash.hex()[..48]
}
struct ProjectContent {
name string
description string
group_id string
swimlanes []Swimlane
milestones []Milestone
issues []string
fs_files []string
status ProjectStatus
start_date i64
end_date i64
tags []string
name string
description string
group_id string
swimlanes []Swimlane
milestones []Milestone
issues []string
fs_files []string
status ProjectStatus
start_date i64
end_date i64
tags []string
}
pub fn new_project(name string, description string, group_id string) Project {
mut project := Project{
name: name
description: description
group_id: group_id
status: .planning
created_at: time.now().unix()
updated_at: time.now().unix()
swimlanes: [
Swimlane{id: 'todo', name: 'To Do', order: 1, color: '#f1c40f'},
Swimlane{id: 'in_progress', name: 'In Progress', order: 2, color: '#3498db'},
Swimlane{id: 'done', name: 'Done', order: 3, color: '#2ecc71', is_done: true}
]
}
project.calculate_id()
return project
}
mut project := Project{
name: name
description: description
group_id: group_id
status: .planning
created_at: time.now().unix()
updated_at: time.now().unix()
swimlanes: [
Swimlane{
id: 'todo'
name: 'To Do'
order: 1
color: '#f1c40f'
},
Swimlane{
id: 'in_progress'
name: 'In Progress'
order: 2
color: '#3498db'
},
Swimlane{
id: 'done'
name: 'Done'
order: 3
color: '#2ecc71'
is_done: true
},
]
}
project.calculate_id()
return project
}

View File

@@ -8,109 +8,109 @@ import json
@[heap]
pub struct ProjectIssue {
pub mut:
id string // blake192 hash
title string
description string
project_id string // Associated project
issue_type IssueType
priority IssuePriority
status IssueStatus
swimlane_id string // Current swimlane
assignees []string // User IDs
reporter string // User ID who created the issue
milestone_id string // Associated milestone
deadline i64 // Unix timestamp
estimate int // Story points or hours
fs_files []string // IDs of linked files
parent_id string // Parent issue ID (for sub-tasks)
children []string // Child issue IDs
created_at i64
updated_at i64
tags []string
id string // blake192 hash
title string
description string
project_id string // Associated project
issue_type IssueType
priority IssuePriority
status IssueStatus
swimlane_id string // Current swimlane
assignees []string // User IDs
reporter string // User ID who created the issue
milestone_id string // Associated milestone
deadline i64 // Unix timestamp
estimate int // Story points or hours
fs_files []string // IDs of linked files
parent_id string // Parent issue ID (for sub-tasks)
children []string // Child issue IDs
created_at i64
updated_at i64
tags []string
}
pub enum IssueType {
task
story
bug
question
epic
subtask
task
story
bug
question
epic
subtask
}
pub enum IssuePriority {
lowest
low
medium
high
highest
critical
lowest
low
medium
high
highest
critical
}
pub enum IssueStatus {
open
in_progress
blocked
review
testing
done
closed
open
in_progress
blocked
review
testing
done
closed
}
pub fn (mut i ProjectIssue) calculate_id() {
content := json.encode(IssueContent{
title: i.title
description: i.description
project_id: i.project_id
issue_type: i.issue_type
priority: i.priority
status: i.status
swimlane_id: i.swimlane_id
assignees: i.assignees
reporter: i.reporter
milestone_id: i.milestone_id
deadline: i.deadline
estimate: i.estimate
fs_files: i.fs_files
parent_id: i.parent_id
children: i.children
tags: i.tags
})
hash := blake3.sum256(content.bytes())
i.id = hash.hex()[..48]
content := json.encode(IssueContent{
title: i.title
description: i.description
project_id: i.project_id
issue_type: i.issue_type
priority: i.priority
status: i.status
swimlane_id: i.swimlane_id
assignees: i.assignees
reporter: i.reporter
milestone_id: i.milestone_id
deadline: i.deadline
estimate: i.estimate
fs_files: i.fs_files
parent_id: i.parent_id
children: i.children
tags: i.tags
})
hash := blake3.sum256(content.bytes())
i.id = hash.hex()[..48]
}
struct IssueContent {
title string
description string
project_id string
issue_type IssueType
priority IssuePriority
status IssueStatus
swimlane_id string
assignees []string
reporter string
milestone_id string
deadline i64
estimate int
fs_files []string
parent_id string
children []string
tags []string
title string
description string
project_id string
issue_type IssueType
priority IssuePriority
status IssueStatus
swimlane_id string
assignees []string
reporter string
milestone_id string
deadline i64
estimate int
fs_files []string
parent_id string
children []string
tags []string
}
pub fn new_project_issue(title string, project_id string, reporter string, issue_type IssueType) ProjectIssue {
mut issue := ProjectIssue{
title: title
project_id: project_id
reporter: reporter
issue_type: issue_type
priority: .medium
status: .open
swimlane_id: 'todo'
created_at: time.now().unix()
updated_at: time.now().unix()
}
issue.calculate_id()
return issue
}
mut issue := ProjectIssue{
title: title
project_id: project_id
reporter: reporter
issue_type: issue_type
priority: .medium
status: .open
swimlane_id: 'todo'
created_at: time.now().unix()
updated_at: time.now().unix()
}
issue.calculate_id()
return issue
}

View File

@@ -8,61 +8,61 @@ import json
@[heap]
pub struct User {
pub mut:
id string // blake192 hash
name string
email string
public_key string // for encryption/signing
phone string
address string
avatar_url string
bio string
timezone string
created_at i64
updated_at i64
status UserStatus
id string // blake192 hash
name string
email string
public_key string // for encryption/signing
phone string
address string
avatar_url string
bio string
timezone string
created_at i64
updated_at i64
status UserStatus
}
pub enum UserStatus {
active
inactive
suspended
pending
active
inactive
suspended
pending
}
pub fn (mut u User) calculate_id() {
content := json.encode(UserContent{
name: u.name
email: u.email
public_key: u.public_key
phone: u.phone
address: u.address
bio: u.bio
timezone: u.timezone
status: u.status
})
hash := blake3.sum256(content.bytes())
u.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
content := json.encode(UserContent{
name: u.name
email: u.email
public_key: u.public_key
phone: u.phone
address: u.address
bio: u.bio
timezone: u.timezone
status: u.status
})
hash := blake3.sum256(content.bytes())
u.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
}
struct UserContent {
name string
email string
public_key string
phone string
address string
bio string
timezone string
status UserStatus
name string
email string
public_key string
phone string
address string
bio string
timezone string
status UserStatus
}
pub fn new_user(name string, email string) User {
mut user := User{
name: name
email: email
created_at: time.now().unix()
updated_at: time.now().unix()
status: .active
}
user.calculate_id()
return user
}
mut user := User{
name: name
email: email
created_at: time.now().unix()
updated_at: time.now().unix()
status: .active
}
user.calculate_id()
return user
}

View File

@@ -6,32 +6,32 @@ import time
@[heap]
pub struct VersionHistory {
pub mut:
current_id string // blake192 hash of current version
previous_id string // blake192 hash of previous version
next_id string // blake192 hash of next version (if exists)
object_type string // Type of object (User, Group, etc.)
change_type ChangeType
changed_by string // User ID who made the change
changed_at i64 // Unix timestamp
change_notes string // Optional description of changes
current_id string // blake192 hash of current version
previous_id string // blake192 hash of previous version
next_id string // blake192 hash of next version (if exists)
object_type string // Type of object (User, Group, etc.)
change_type ChangeType
changed_by string // User ID who made the change
changed_at i64 // Unix timestamp
change_notes string // Optional description of changes
}
pub enum ChangeType {
create
update
delete
restore
create
update
delete
restore
}
pub fn new_version_history(current_id string, previous_id string, object_type string, change_type ChangeType, changed_by string) VersionHistory {
return VersionHistory{
current_id: current_id
previous_id: previous_id
object_type: object_type
change_type: change_type
changed_by: changed_by
changed_at: time.now().unix()
}
return VersionHistory{
current_id: current_id
previous_id: previous_id
object_type: object_type
change_type: change_type
changed_by: changed_by
changed_at: time.now().unix()
}
}
// Database indexes needed:
@@ -39,4 +39,4 @@ pub fn new_version_history(current_id string, previous_id string, object_type st
// - Index on previous_id for walking backward
// - Index on next_id for walking forward
// - Index on object_type for filtering by type
// - Index on changed_by for user activity tracking
// - Index on changed_by for user activity tracking

View File

@@ -1,9 +1,13 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.mcp.aitools.escalayer
import freeflowuniverse.herolib.core.redisclient
import os
fn main() {
// Example of using redisclient module instead of old redis.Connection
redis_example() or { println('Redis example failed: ${err}') }
// Get the current directory where this script is located
current_dir := os.dir(@FILE)
@@ -594,3 +598,64 @@ fn extract_functions_from_code(code string) []string {
return functions
}
// Example function showing how to use redisclient module instead of old redis.Connection
fn redis_example() ! {
// OLD WAY (don't use this):
// mut conns := []redis.Connection{}
// for s in servers {
// mut c := redis.connect(redis.Options{ server: s }) or {
// panic('could not connect to redis $s: $err')
// }
// conns << c
// }
// NEW WAY using redisclient module:
servers := ['127.0.0.1:6379', '127.0.0.1:6380', '127.0.0.1:6381', '127.0.0.1:6382']
mut redis_clients := []&redisclient.Redis{}
for server in servers {
// Parse server address
redis_url := redisclient.get_redis_url(server) or {
println('Failed to parse Redis URL ${server}: ${err}')
continue
}
// Create Redis client using redisclient module
mut redis_client := redisclient.core_get(redis_url) or {
println('Failed to connect to Redis ${server}: ${err}')
continue
}
// Test the connection
redis_client.ping() or {
println('Failed to ping Redis ${server}: ${err}')
continue
}
redis_clients << redis_client
println('Successfully connected to Redis server: ${server}')
}
// Example usage of Redis operations
if redis_clients.len > 0 {
mut redis := redis_clients[0]
// Set a test key
redis.set('test_key', 'test_value') or {
println('Failed to set test key: ${err}')
return
}
// Get the test key
value := redis.get('test_key') or {
println('Failed to get test key: ${err}')
return
}
println('Redis test successful - key: test_key, value: ${value}')
// Clean up
redis.del('test_key') or { println('Failed to delete test key: ${err}') }
}
}

View File

@@ -3,7 +3,7 @@ module core
import net
import time
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.core as herolib_core
import math
import os
@@ -18,7 +18,7 @@ pub mut:
// if ping ok, return true
pub fn ping(args PingArgs) !bool {
platform_ := core.platform()!
platform_ := herolib_core.platform()!
mut cmd := 'ping'
if args.address.contains(':') {
cmd = 'ping6'
@@ -238,13 +238,14 @@ fn ssh_testrun_internal(args TcpPortTestArgs) !(string, SSHResult) {
res := exec(cmd: cmd, ignore_error: true, stdout: false, debug: false)!
// console.print_debug('ssh test ${res.exit_code}: ===== cmd:\n${cmd}\n=====\n${res.output}')
res_output := res.output
if res.exit_code == 0 {
return res.output, SSHResult.ok
return res_output, SSHResult.ok
} else if res.exit_code == 1 {
return res.output, SSHResult.tcpport
return res_output, SSHResult.ssh
} else if res.exit_code == 2 {
return res.output, SSHResult.ping
return res_output, SSHResult.ping
} else {
return res.output, SSHResult.ssh
return res_output, SSHResult.ssh
}
}

View File

@@ -6,16 +6,16 @@ fn test_ipaddr_pub_get() {
}
fn test_ping() {
x := ping(address: '127.0.0.1', count: 1)!
assert x == .ok
x := ping(address: '127.0.0.1', retry: 1)!
assert x == true
}
fn test_ping_timeout() ! {
x := ping(address: '192.168.145.154', count: 5, timeout: 1)!
assert x == .timeout
x := ping(address: '192.168.145.154', retry: 5, nr_ok: 1)!
assert x == false
}
fn test_ping_unknownhost() ! {
x := ping(address: '12.902.219.1', count: 1, timeout: 1)!
assert x == .unknownhost
x := ping(address: '12.902.219.1', retry: 1, nr_ok: 1)!
assert x == false
}

View File

@@ -1,13 +1,5 @@
module linux
// import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.core.texttools
// import freeflowuniverse.herolib.screen
import os
import time
// import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.core as osal
@[heap]
pub struct LinuxFactory {
pub mut:

View File

@@ -15,10 +15,10 @@ pub fn new() ServerManager {
}
fn (s ServerManager) execute(command string) bool {
// console.print_debug(command)
console.print_debug(command)
r := os.execute(command)
// console.print_debug(r)
console.print_debug(r)
return true
}

View File

@@ -1,7 +1,6 @@
module sshagent
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.builder
// Check if SSH agent is properly configured and all is good
pub fn agent_check(mut agent SSHAgent) ! {

View File

@@ -3,40 +3,185 @@ module main
import os
import io
import freeflowuniverse.herolib.core.logger
import freeflowuniverse.herolib.core.texttools
struct Args {
mut:
logpath string
pane_id string
log bool = true
logreset bool
}
fn main() {
if os.args.len < 2 {
eprintln('Usage: tmux_logger <log_path> [pane_id]')
args := parse_args() or {
eprintln('Error: ${err}')
print_usage()
exit(1)
}
log_path := os.args[1]
if !args.log {
// If logging is disabled, just consume stdin and exit
mut reader := io.new_buffered_reader(reader: os.stdin())
for {
reader.read_line() or { break }
}
return
}
mut l := logger.new(path: log_path) or {
// Determine the actual log directory path
log_dir_path := determine_log_path(args) or {
eprintln('Error determining log path: ${err}')
exit(1)
}
// Handle log reset if requested
if args.logreset {
reset_logs(log_dir_path) or {
eprintln('Error resetting logs: ${err}')
exit(1)
}
}
// Create logger - the logger factory expects a directory path
mut l := logger.new(path: log_dir_path) or {
eprintln('Failed to create logger: ${err}')
exit(1)
}
// Read from stdin line by line and log with categorization
mut reader := io.new_buffered_reader(reader: os.stdin())
// Read from stdin using a more direct approach that works with tmux pipe-pane
// The issue is that tmux pipe-pane sends data differently than regular pipes
mut buffer := []u8{len: 1024}
mut line_buffer := ''
for {
line := reader.read_line() or { break }
if line.len == 0 {
// Read raw bytes from stdin - this is more compatible with tmux pipe-pane
data, bytes_read := os.fd_read(0, buffer.len)
if bytes_read == 0 {
// No data available - for tmux pipe-pane this is normal, continue waiting
continue
}
// Detect output type and set appropriate category
category, logtype := categorize_output(line)
// Convert bytes to string and add to line buffer
line_buffer += data
// Process complete lines
for line_buffer.contains('\n') {
idx := line_buffer.index('\n') or { break }
line := line_buffer[..idx].trim_space()
line_buffer = line_buffer[idx + 1..]
if line.len == 0 {
continue
}
// Detect output type and set appropriate category
category, logtype := categorize_output(line)
// Log immediately - the logger handles its own file operations
l.log(
cat: category
log: line
logtype: logtype
) or {
eprintln('Failed to log line: ${err}')
continue
}
}
}
// Process any remaining data in the buffer
if line_buffer.trim_space().len > 0 {
line := line_buffer.trim_space()
category, logtype := categorize_output(line)
l.log(
cat: category
log: line
logtype: logtype
) or {
eprintln('Failed to log line: ${err}')
continue
) or { eprintln('Failed to log final line: ${err}') }
}
}
fn parse_args() !Args {
if os.args.len < 2 {
return error('Missing required argument: logpath')
}
mut args := Args{
logpath: os.args[1]
}
// Parse optional pane_id (second positional argument)
if os.args.len >= 3 {
args.pane_id = os.args[2]
}
// Parse optional flags
for i in 3 .. os.args.len {
arg := os.args[i]
if arg == '--no-log' || arg == '--log=false' {
args.log = false
} else if arg == '--logreset' || arg == '--logreset=true' {
args.logreset = true
} else if arg.starts_with('--log=') {
val := arg.all_after('=').to_lower()
args.log = val == 'true' || val == '1' || val == 'yes'
} else if arg.starts_with('--logreset=') {
val := arg.all_after('=').to_lower()
args.logreset = val == 'true' || val == '1' || val == 'yes'
}
}
return args
}
fn determine_log_path(args Args) !string {
mut log_path := args.logpath
// Check if logpath is a directory or file
if os.exists(log_path) && os.is_dir(log_path) {
// It's an existing directory
if args.pane_id == '' {
return error('When logpath is a directory, pane_id must be provided')
}
// Create a subdirectory for this pane
pane_dir := os.join_path(log_path, args.pane_id)
return pane_dir
} else if log_path.contains('.') && !log_path.ends_with('/') {
// It looks like a file path, use parent directory
parent_dir := os.dir(log_path)
return parent_dir
} else {
// It's a directory path (may not exist yet)
if args.pane_id == '' {
return log_path
}
// Create a subdirectory for this pane
pane_dir := os.join_path(log_path, args.pane_id)
return pane_dir
}
}
fn reset_logs(logpath string) ! {
if !os.exists(logpath) {
return
}
if os.is_dir(logpath) {
// Remove all .log files in the directory
files := os.ls(logpath) or { return }
for file in files {
if file.ends_with('.log') {
full_path := os.join_path(logpath, file)
os.rm(full_path) or { eprintln('Warning: Failed to remove ${full_path}: ${err}') }
}
}
} else {
// Remove the specific log file
os.rm(logpath) or { return error('Failed to remove log file ${logpath}: ${err}') }
}
}
fn categorize_output(line string) (string, logger.LogType) {
@@ -47,21 +192,41 @@ fn categorize_output(line string) (string, logger.LogType) {
|| line_lower.contains('exception') || line_lower.contains('panic')
|| line_lower.starts_with('e ') || line_lower.contains('fatal')
|| line_lower.contains('critical') {
return 'error', logger.LogType.error
return texttools.expand('error', 10, ' '), logger.LogType.error
}
// Warning patterns - use .stdout logtype but warning category
if line_lower.contains('warning') || line_lower.contains('warn:')
|| line_lower.contains('deprecated') {
return 'warning', logger.LogType.stdout
return texttools.expand('warning', 10, ' '), logger.LogType.stdout
}
// Info/debug patterns - use .stdout logtype
if line_lower.contains('info:') || line_lower.contains('debug:')
|| line_lower.starts_with('info ') || line_lower.starts_with('debug ') {
return 'info', logger.LogType.stdout
return texttools.expand('info', 10, ' '), logger.LogType.stdout
}
// Default to stdout category and logtype
return 'stdout', logger.LogType.stdout
return texttools.expand('stdout', 10, ' '), logger.LogType.stdout
}
fn print_usage() {
eprintln('Usage: tmux_logger <logpath> [pane_id] [options]')
eprintln('')
eprintln('Arguments:')
eprintln(' logpath Directory or file path where logs will be stored')
eprintln(' pane_id Optional pane identifier (required if logpath is a directory)')
eprintln('')
eprintln('Options:')
eprintln(' --log=true|false Enable/disable logging (default: true)')
eprintln(' --no-log Disable logging (same as --log=false)')
eprintln(' --logreset=true|false Reset existing logs before starting (default: false)')
eprintln(' --logreset Reset existing logs (same as --logreset=true)')
eprintln('')
eprintln('Examples:')
eprintln(' tmux_logger /tmp/logs pane1')
eprintln(' tmux_logger /tmp/logs/session.log')
eprintln(' tmux_logger /tmp/logs pane1 --logreset')
eprintln(' tmux_logger /tmp/logs pane1 --no-log')
}

View File

@@ -673,7 +673,7 @@ fn play_pane_ensure(mut plbook PlayBook, mut tmux_instance Tmux) ! {
name := p.get('name')!
parsed := parse_pane_name(name)!
cmd := p.get_default('cmd', '')!
label := p.get_default('label', '')!
// label := p.get_default('label', '')!
// Parse environment variables if provided
mut env := map[string]string{}
@@ -721,7 +721,28 @@ fn play_pane_ensure(mut plbook PlayBook, mut tmux_instance Tmux) ! {
// Find the target pane (by index, since tmux pane IDs can vary)
if pane_number > 0 && pane_number <= window.panes.len {
mut target_pane := window.panes[pane_number - 1] // Convert to 0-based index
target_pane.send_command(cmd)!
// Use declarative command logic for intelligent state management
target_pane.send_command_declarative(cmd)!
}
}
// Handle logging parameters - enable logging if requested
log_enabled := p.get_default_false('log')
if log_enabled {
logpath := p.get_default('logpath', '')!
logreset := p.get_default_false('logreset')
// Find the target pane for logging
if pane_number > 0 && pane_number <= window.panes.len {
mut target_pane := window.panes[pane_number - 1] // Convert to 0-based index
// Enable logging with automation (binary compilation, directory creation, etc.)
target_pane.logging_enable(
logpath: logpath
logreset: logreset
) or {
console.print_debug('Warning: Failed to enable logging for pane ${name}: ${err}')
}
}
}

View File

@@ -164,6 +164,59 @@ hero run -p <heroscript_file>
label:'editor' // Optional: descriptive label
cmd:'vim' // Optional: command to run
env:'EDITOR=vim' // Optional: environment variables
// Multi-line commands are supported using proper heroscript syntax
!!tmux.pane_ensure
name:"mysession|mywindow|2"
label:'setup'
cmd:'
echo "Starting setup..."
mkdir -p /tmp/workspace
cd /tmp/workspace
echo "Setup complete"
'
```
### Multi-line Commands
The tmux module supports multi-line commands in heroscripts using proper multi-line parameter syntax. Multi-line commands are automatically converted to temporary shell scripts for execution.
#### Syntax
Use the multi-line parameter format with quotes:
```heroscript
!!tmux.pane_ensure
name:"session|window|pane"
cmd:'
command1
command2
command3
'
```
#### Features
- **Automatic Script Generation**: Multi-line commands are converted to temporary shell scripts
- **Sequential Execution**: All commands execute in order within the same shell context
- **Error Handling**: Scripts include proper bash shebang and error handling
- **Temporary Files**: Scripts are stored in `/tmp/tmux/{session}/pane_{id}_script.sh`
#### Example
```heroscript
!!tmux.pane_ensure
name:"dev|workspace|1"
label:"setup"
cmd:'
echo "Setting up development environment..."
mkdir -p /tmp/dev_workspace
cd /tmp/dev_workspace
git clone https://github.com/example/repo.git
cd repo
npm install
echo "Development environment ready!"
'
```
### Pane Layout Categories

View File

@@ -2,6 +2,7 @@ module tmux
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.redisclient
// import freeflowuniverse.herolib.session
import os
import time
@@ -12,6 +13,7 @@ pub struct Tmux {
pub mut:
sessions []&Session
sessionid string // unique link to job
redis &redisclient.Redis @[skip] // Redis client for command state tracking
}
// get session (session has windows) .
@@ -87,8 +89,12 @@ pub struct TmuxNewArgs {
// return tmux instance
pub fn new(args TmuxNewArgs) !Tmux {
// Initialize Redis client for command state tracking
mut redis := redisclient.core_get()!
mut t := Tmux{
sessionid: args.sessionid
redis: redis
}
// t.load()!
t.scan()!

View File

@@ -112,7 +112,7 @@ pub fn (mut p Pane) output_wait(c_ string, timeoutsec int) ! {
mut t := ourtime.now()
start := t.unix()
c := c_.replace('\n', '')
for i in 0 .. 2000 {
for _ in 0 .. 2000 {
entries := p.logs_get_new(reset: false)!
for entry in entries {
if entry.content.replace('\n', '').contains(c) {
@@ -146,9 +146,280 @@ pub fn (mut p Pane) processinfo_main() !osal.ProcessInfo {
}
// Send a command to this pane
// Supports both single-line and multi-line commands
pub fn (mut p Pane) send_command(command string) ! {
cmd := 'tmux send-keys -t ${p.window.session.name}:@${p.window.id}.%${p.id} "${command}" Enter'
osal.execute_silent(cmd) or { return error('Cannot send command to pane %${p.id}: ${err}') }
// Check if command contains multiple lines
if command.contains('\n') {
// Multi-line command - create temporary script
p.send_multiline_command(command)!
} else {
// Single-line command - send directly
cmd := 'tmux send-keys -t ${p.window.session.name}:@${p.window.id}.%${p.id} "${command}" Enter'
osal.execute_silent(cmd) or { return error('Cannot send command to pane %${p.id}: ${err}') }
}
}
// Send command with declarative mode logic (intelligent state management)
// This method implements the full declarative logic:
// 1. Check if pane has previous command (Redis lookup)
// 2. If previous command exists:
// a. Check if still running (process verification)
// b. Compare MD5 hashes
// c. If different command OR not running: proceed
// d. If same command AND running: skip
// 3. If proceeding: kill existing processes, then start new command
pub fn (mut p Pane) send_command_declarative(command string) ! {
console.print_debug('Declarative command for pane ${p.id}: ${command[..if command.len > 50 {
50
} else {
command.len
}]}...')
// Step 1: Check if command has changed
command_changed := p.has_command_changed(command)
// Step 2: Check if stored command is still running
stored_running := p.is_stored_command_running()
// Step 3: Decide whether to proceed
should_execute := command_changed || !stored_running
if !should_execute {
console.print_debug('Skipping command execution for pane ${p.id}: same command already running')
return
}
// Step 4: If we have a running command that needs to be replaced, kill it
if stored_running && command_changed {
console.print_debug('Killing existing command in pane ${p.id} before starting new one')
p.kill_running_command()!
// Give processes time to die
time.sleep(500 * time.millisecond)
}
// Step 5: Ensure bash is the parent process
p.ensure_bash_parent()!
// Step 6: Reset pane if it appears empty or needs cleanup
p.reset_if_needed()!
// Step 7: Execute the new command
p.send_command(command)!
// Step 8: Store the new command state
// Get the PID of the command we just started (this is approximate)
time.sleep(100 * time.millisecond) // Give command time to start
p.store_command_state(command, 'running', p.pid)!
console.print_debug('Successfully executed declarative command for pane ${p.id}')
}
// Kill the currently running command in this pane
pub fn (mut p Pane) kill_running_command() ! {
stored_state := p.get_command_state() or { return }
if stored_state.pid > 0 && osal.process_exists(stored_state.pid) {
// Kill the process and its children
osal.process_kill_recursive(pid: stored_state.pid)!
console.print_debug('Killed running command (PID: ${stored_state.pid}) in pane ${p.id}')
}
// Also try to kill any processes that might be running in the pane
p.kill_pane_process_group()!
// Update the command state to reflect that it's no longer running
p.update_command_status('killed')!
}
// Reset pane if it appears empty or needs cleanup
pub fn (mut p Pane) reset_if_needed() ! {
if p.is_pane_empty()! {
console.print_debug('Pane ${p.id} appears empty, sending reset')
p.send_reset()!
return
}
if !p.is_at_clean_prompt()! {
console.print_debug('Pane ${p.id} not at clean prompt, sending reset')
p.send_reset()!
}
}
// Check if pane is completely empty
pub fn (mut p Pane) is_pane_empty() !bool {
logs := p.logs_all() or { return true }
lines := logs.split_into_lines()
// Filter out empty lines
mut non_empty_lines := []string{}
for line in lines {
if line.trim_space().len > 0 {
non_empty_lines << line
}
}
return non_empty_lines.len == 0
}
// Check if pane is at a clean shell prompt
pub fn (mut p Pane) is_at_clean_prompt() !bool {
logs := p.logs_all() or { return false }
lines := logs.split_into_lines()
if lines.len == 0 {
return false
}
// Check last few lines for shell prompt indicators
check_lines := if lines.len > 5 { lines[lines.len - 5..] } else { lines }
for line in check_lines.reverse() {
line_clean := line.trim_space()
if line_clean.len == 0 {
continue
}
// Look for common shell prompt patterns
if line_clean.ends_with('$ ') || line_clean.ends_with('# ') || line_clean.ends_with('> ')
|| line_clean.ends_with('$') || line_clean.ends_with('#') || line_clean.ends_with('>') {
console.print_debug('Found clean prompt in pane ${p.id}: "${line_clean}"')
return true
}
// If we find a non-prompt line, we're not at a clean prompt
break
}
return false
}
// Send reset command to pane
pub fn (mut p Pane) send_reset() ! {
cmd := 'tmux send-keys -t ${p.window.session.name}:@${p.window.id}.%${p.id} "reset" Enter'
osal.execute_silent(cmd) or { return error('Cannot send reset to pane %${p.id}: ${err}') }
console.print_debug('Sent reset command to pane ${p.id}')
// Give reset time to complete
time.sleep(200 * time.millisecond)
}
// Verify that bash is the first process in this pane
pub fn (mut p Pane) verify_bash_parent() !bool {
if p.pid <= 0 {
return false
}
// Get process information for the pane's main process
proc_info := osal.processinfo_get(p.pid) or { return false }
// Check if the process command contains bash
if proc_info.cmd.contains('bash') || proc_info.cmd.contains('/bin/bash')
|| proc_info.cmd.contains('/usr/bin/bash') {
console.print_debug('Pane ${p.id} has bash as parent process (PID: ${p.pid})')
return true
}
console.print_debug('Pane ${p.id} does NOT have bash as parent process. Current: ${proc_info.cmd}')
return false
}
// Ensure bash is the first process in the pane
pub fn (mut p Pane) ensure_bash_parent() ! {
if p.verify_bash_parent()! {
return
}
console.print_debug('Ensuring bash is parent process for pane ${p.id}')
// Kill any existing processes in the pane
p.kill_pane_process_group()!
// Send a new bash command to establish bash as the parent
cmd := 'tmux send-keys -t ${p.window.session.name}:@${p.window.id}.%${p.id} "exec bash" Enter'
osal.execute_silent(cmd) or { return error('Cannot start bash in pane %${p.id}: ${err}') }
// Give bash time to start
time.sleep(500 * time.millisecond)
// Update pane information
p.window.scan()!
// Verify bash is now running
if !p.verify_bash_parent()! {
return error('Failed to establish bash as parent process in pane ${p.id}')
}
console.print_debug('Successfully established bash as parent process for pane ${p.id}')
}
// Get all child processes of this pane's main process
pub fn (mut p Pane) get_child_processes() ![]osal.ProcessInfo {
if p.pid <= 0 {
return []osal.ProcessInfo{}
}
children_map := osal.processinfo_children(p.pid)!
return children_map.processes
}
// Check if commands are running as children of bash
pub fn (mut p Pane) verify_command_hierarchy() !bool {
// First verify bash is the parent
if !p.verify_bash_parent()! {
return false
}
// Get child processes
children := p.get_child_processes()!
if children.len == 0 {
// No child processes, which is fine
return true
}
// Check if child processes have bash as their parent
for child in children {
if child.ppid != p.pid {
console.print_debug('Child process ${child.pid} (${child.cmd}) does not have pane process as parent')
return false
}
}
console.print_debug('Command hierarchy verified for pane ${p.id}: ${children.len} child processes')
return true
}
// Handle multi-line commands by creating a temporary script
fn (mut p Pane) send_multiline_command(command string) ! {
// Create temporary directory for tmux scripts
script_dir := '/tmp/tmux/${p.window.session.name}'
os.mkdir_all(script_dir) or { return error('Cannot create script directory: ${err}') }
// Create unique script file for this pane
script_path := '${script_dir}/pane_${p.id}_script.sh'
// Prepare script content with proper shebang and commands
script_content := '#!/bin/bash\n' + command.trim_space()
// Write script to file
os.write_file(script_path, script_content) or {
return error('Cannot write script file ${script_path}: ${err}')
}
// Make script executable
os.chmod(script_path, 0o755) or {
return error('Cannot make script executable ${script_path}: ${err}')
}
// Execute the script in the pane
cmd := 'tmux send-keys -t ${p.window.session.name}:@${p.window.id}.%${p.id} "${script_path}" Enter'
osal.execute_silent(cmd) or { return error('Cannot execute script in pane %${p.id}: ${err}') }
// Optional: Clean up script after a delay (commented out for debugging)
// spawn {
// time.sleep(5 * time.second)
// os.rm(script_path) or {}
// }
}
// Send raw keys to this pane (without Enter)
@@ -367,62 +638,23 @@ pub fn (mut p Pane) logging_enable(args PaneLoggingEnableArgs) ! {
}
}
// Use a completely different approach: direct tmux pipe-pane with a buffer-based logger
// This ensures ALL output is captured in real-time without missing anything
buffer_logger_script := "#!/bin/bash
PANE_TARGET=\"${p.window.session.name}:@${p.window.id}.%${p.id}\"
LOG_PATH=\"${log_path}\"
LOGGER_BINARY=\"${logger_binary}\"
BUFFER_FILE=\"/tmp/tmux_pane_${p.id}_buffer.txt\"
// Use the simple and reliable tmux pipe-pane approach with tmux_logger binary
// This is the proven approach that works perfectly
# Create a named pipe for real-time logging
PIPE_FILE=\"/tmp/tmux_pane_${p.id}_pipe\"
mkfifo \"\$PIPE_FILE\" 2>/dev/null || true
// Determine the pane identifier for logging
pane_log_id := 'pane${p.id}'
# Start the logger process that reads from the pipe
\"\$LOGGER_BINARY\" \"\$LOG_PATH\" \"${p.id}\" < \"\$PIPE_FILE\" &
LOGGER_PID=\$!
// Set up tmux pipe-pane to send all output directly to tmux_logger
pipe_cmd := 'tmux pipe-pane -t ${p.window.session.name}:@${p.window.id}.%${p.id} -o "${logger_binary} ${log_path} ${pane_log_id}"'
# Function to cleanup on exit
cleanup() {
kill \$LOGGER_PID 2>/dev/null || true
rm -f \"\$PIPE_FILE\" \"\$BUFFER_FILE\"
exit 0
}
trap cleanup EXIT INT TERM
console.print_debug('Starting real-time logging: ${pipe_cmd}')
# Start tmux pipe-pane to send all output to our pipe
tmux pipe-pane -t \"\$PANE_TARGET\" \"cat >> \"\$PIPE_FILE\"\"
# Keep the script running and monitor the pane
while true; do
# Check if pane still exists
if ! tmux list-panes -t \"\$PANE_TARGET\" >/dev/null 2>&1; then
break
fi
sleep 1
done
cleanup
" // Write the buffer logger script
script_path := '/tmp/tmux_buffer_logger_${p.id}.sh'
os.write_file(script_path, buffer_logger_script) or {
return error("Can't create buffer logger script: ${err}")
osal.exec(cmd: pipe_cmd, stdout: false, name: 'tmux_start_pipe_logging') or {
return error("Can't start pipe logging for pane %${p.id}: ${err}")
}
// Make script executable
osal.exec(cmd: 'chmod +x "${script_path}"', stdout: false, name: 'make_script_executable') or {
return error("Can't make script executable: ${err}")
}
// Start the buffer logger script in background
start_cmd := 'nohup "${script_path}" > /dev/null 2>&1 &'
console.print_debug('Starting pane logging with buffer logger: ${start_cmd}')
osal.exec(cmd: start_cmd, stdout: false, name: 'tmux_start_buffer_logger') or {
return error("Can't start buffer logger for pane %${p.id}: ${err}")
}
// Wait a moment for the process to start
time.sleep(500 * time.millisecond)
// Update pane state
p.log_enabled = true
@@ -442,14 +674,12 @@ pub fn (mut p Pane) logging_disable() ! {
cmd := 'tmux pipe-pane -t ${p.window.session.name}:@${p.window.id}.%${p.id}'
osal.exec(cmd: cmd, stdout: false, name: 'tmux_stop_logging', ignore_error: true) or {}
// Kill the buffer logger script process
script_path := '/tmp/tmux_buffer_logger_${p.id}.sh'
kill_cmd := 'pkill -f "${script_path}"'
osal.exec(cmd: kill_cmd, stdout: false, name: 'kill_buffer_logger_script', ignore_error: true) or {}
// Kill the tmux_logger process for this pane
pane_log_id := 'pane${p.id}'
kill_cmd := 'pkill -f "tmux_logger.*${pane_log_id}"'
osal.exec(cmd: kill_cmd, stdout: false, name: 'kill_tmux_logger', ignore_error: true) or {}
// Clean up script and temp files
cleanup_cmd := 'rm -f "${script_path}" "/tmp/tmux_pane_${p.id}_buffer.txt" "/tmp/tmux_pane_${p.id}_pipe"'
osal.exec(cmd: cleanup_cmd, stdout: false, name: 'cleanup_logging_files', ignore_error: true) or {}
// No temp files to clean up with the simple pipe approach
// Update pane state
p.log_enabled = false

View File

@@ -1,86 +1,39 @@
module tmux
import freeflowuniverse.herolib.osal.core as osal
// import freeflowuniverse.herolib.installers.tmux
// fn testsuite_end() {
//
// }
import rand
fn testsuite_begin() {
mut tmux := Tmux{}
mut tmux_instance := new()!
if tmux.is_running()! {
tmux.stop()!
if tmux_instance.is_running()! {
tmux_instance.stop()!
}
}
fn test_session_create() {
// installer := tmux.get_install(
// panic('could not install tmux: ${err}')
// }
fn test_session_create() ! {
// Create unique session names to avoid conflicts
session_name1 := 'testsession_${rand.int()}'
session_name2 := 'testsession2_${rand.int()}'
mut tmux := Tmux{}
tmux.start() or { panic('cannot start tmux: ${err}') }
mut tmux_instance := new()!
tmux_instance.start()!
mut s := Session{
tmux: &tmux
windows: []&Window{}
name: 'testsession'
}
// Create sessions using the proper API
mut s := tmux_instance.session_create(name: session_name1)!
mut s2 := tmux_instance.session_create(name: session_name2)!
mut s2 := Session{
tmux: &tmux
windows: []&Window{}
name: 'testsession2'
}
// test testsession exists after session_create
// Test that sessions were created successfully
mut tmux_ls := osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
assert !tmux_ls.contains('testsession: 1 windows')
s.create() or { panic('Cannot create session: ${err}') }
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
assert tmux_ls.contains('testsession: 1 windows')
assert tmux_ls.contains(session_name1), 'Session 1 should exist'
assert tmux_ls.contains(session_name2), 'Session 2 should exist'
// test multiple session_create for same tmux
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
assert !tmux_ls.contains('testsession2: 1 windows')
s2.create() or { panic('Cannot create session: ${err}') }
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
assert tmux_ls.contains('testsession2: 1 windows')
// Test session existence check
assert tmux_instance.session_exist(session_name1), 'Session 1 should exist via API'
assert tmux_instance.session_exist(session_name2), 'Session 2 should exist via API'
// test session_create with duplicate session
mut create_err := ''
s2.create() or { create_err = err.msg() }
assert create_err != ''
assert create_err.contains('duplicate session: testsession2')
tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: ${err}") }
assert tmux_ls.contains('testsession2: 1 windows')
s.stop() or { panic('Cannot stop session: ${err}') }
s2.stop() or { panic('Cannot stop session: ${err}') }
// Clean up
tmux_instance.session_delete(session_name1)!
tmux_instance.session_delete(session_name2)!
tmux_instance.stop()!
}
// fn test_session_stop() {
//
// installer := tmux.get_install(
// mut tmux := Tmux {
// node: node_ssh
// }
// mut s := Session{
// tmux: &tmux // reference back
// windows: map[string]&Window{}
// name: 'testsession3'
// }
// s.create() or { panic("Cannot create session: $err") }
// mut tmux_ls := osal.execute_silent('tmux ls') or { panic("can't exec: $err") }
// assert tmux_ls.contains("testsession3: 1 windows")
// s.stop() or { panic("Cannot stop session: $err")}
// tmux_ls = osal.execute_silent('tmux ls') or { panic("can't exec: $err") }
// assert !tmux_ls.contains("testsession3: 1 windows")
// }

157
lib/osal/tmux/tmux_state.v Normal file
View File

@@ -0,0 +1,157 @@
module tmux
import freeflowuniverse.herolib.osal.core as osal
import crypto.md5
import json
import time
import freeflowuniverse.herolib.ui.console
// Command state structure for Redis storage
pub struct CommandState {
pub mut:
cmd_md5 string // MD5 hash of the command
cmd_text string // Original command text
status string // running|finished|failed|unknown
pid int // Process ID of the command
started_at string // Timestamp when command started
last_check string // Last time status was checked
pane_id int // Pane ID for reference
}
// Generate Redis key for command state tracking
// Pattern: herotmux:${session}:${window}|${pane}
pub fn (p &Pane) get_state_key() string {
return 'herotmux:${p.window.session.name}:${p.window.name}|${p.id}'
}
// Generate MD5 hash for a command (normalized)
pub fn normalize_and_hash_command(cmd string) string {
// Normalize command: trim whitespace, normalize newlines
normalized := cmd.trim_space().replace('\r\n', '\n').replace('\r', '\n')
return md5.hexhash(normalized)
}
// Store command state in Redis
pub fn (mut p Pane) store_command_state(cmd string, status string, pid int) ! {
key := p.get_state_key()
cmd_hash := normalize_and_hash_command(cmd)
now := time.now().format_ss_milli()
state := CommandState{
cmd_md5: cmd_hash
cmd_text: cmd
status: status
pid: pid
started_at: now
last_check: now
pane_id: p.id
}
state_json := json.encode(state)
p.window.session.tmux.redis.set(key, state_json)!
console.print_debug('Stored command state for pane ${p.id}: ${cmd_hash[..8]}... status=${status}')
}
// Retrieve command state from Redis
pub fn (mut p Pane) get_command_state() ?CommandState {
key := p.get_state_key()
state_json := p.window.session.tmux.redis.get(key) or { return none }
if state_json.len == 0 {
return none
}
state := json.decode(CommandState, state_json) or {
console.print_debug('Failed to decode command state for pane ${p.id}: ${err}')
return none
}
return state
}
// Check if command has changed by comparing MD5 hashes
pub fn (mut p Pane) has_command_changed(new_cmd string) bool {
stored_state := p.get_command_state() or { return true }
new_hash := normalize_and_hash_command(new_cmd)
return stored_state.cmd_md5 != new_hash
}
// Update command status in Redis
pub fn (mut p Pane) update_command_status(status string) ! {
mut stored_state := p.get_command_state() or { return }
stored_state.status = status
stored_state.last_check = time.now().format_ss_milli()
key := p.get_state_key()
state_json := json.encode(stored_state)
p.window.session.tmux.redis.set(key, state_json)!
console.print_debug('Updated command status for pane ${p.id}: ${status}')
}
// Clear command state from Redis (when pane is reset or command is removed)
pub fn (mut p Pane) clear_command_state() ! {
key := p.get_state_key()
p.window.session.tmux.redis.del(key) or {
console.print_debug('Failed to clear command state for pane ${p.id}: ${err}')
}
console.print_debug('Cleared command state for pane ${p.id}')
}
// Check if stored command is currently running by verifying the PID
pub fn (mut p Pane) is_stored_command_running() bool {
stored_state := p.get_command_state() or { return false }
if stored_state.pid <= 0 {
return false
}
// Use osal to check if process exists
return osal.process_exists(stored_state.pid)
}
// Get all command states for a session (useful for debugging/monitoring)
pub fn (mut s Session) get_all_command_states() !map[string]CommandState {
mut states := map[string]CommandState{}
// Get all keys matching the session pattern
pattern := 'herotmux:${s.name}:*'
keys := s.tmux.redis.keys(pattern)!
for key in keys {
state_json := s.tmux.redis.get(key) or { continue }
if state_json.len == 0 {
continue
}
state := json.decode(CommandState, state_json) or {
console.print_debug('Failed to decode state for key ${key}: ${err}')
continue
}
states[key] = state
}
return states
}
// Clean up stale command states (for maintenance)
pub fn (mut s Session) cleanup_stale_command_states() ! {
states := s.get_all_command_states()!
for key, state in states {
// Check if the process is still running
if state.pid > 0 && !osal.process_exists(state.pid) {
// Process is dead, update status
mut updated_state := state
updated_state.status = 'finished'
updated_state.last_check = time.now().format_ss_milli()
state_json := json.encode(updated_state)
s.tmux.redis.set(key, state_json)!
console.print_debug('Updated stale command state ${key}: process ${state.pid} no longer exists')
}
}
}

View File

@@ -29,8 +29,8 @@ fn test_start() ! {
// test server is running after start()
tmux.start() or { panic('cannot start tmux: ${err}') }
mut tmux_ls := osal.execute_silent('tmux ls') or { panic('Cannot execute tmux ls: ${err}') }
// test started tmux contains windows
assert tmux_ls.contains('init: 1 windows')
// test started tmux contains some session
assert tmux_ls.len > 0, 'Tmux should have at least one session'
tmux.stop() or { panic('cannot stop tmux: ${err}') }
}

View File

@@ -1,65 +1,57 @@
module tmux
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import rand
import time
// uses single tmux instance for all tests
// Simple tests for tmux functionality
fn testsuite_begin() {
muttmux := new() or { panic('Cannot create tmux: ${err}') }
// Test MD5 command hashing (doesn't require tmux)
fn test_md5_hashing() ! {
// Test basic hashing
cmd1 := 'echo "test"'
cmd2 := 'echo "test"'
cmd3 := 'echo "different"'
// reset tmux for tests
is_running := is_running() or { panic('cannot check if tmux is running: ${err}') }
if is_running {
stop() or { panic('Cannot stop tmux: ${err}') }
}
hash1 := normalize_and_hash_command(cmd1)
hash2 := normalize_and_hash_command(cmd2)
hash3 := normalize_and_hash_command(cmd3)
assert hash1 == hash2, 'Same commands should have same hash'
assert hash1 != hash3, 'Different commands should have different hashes'
// Test normalization
cmd_with_spaces := ' echo "test" '
cmd_with_newlines := 'echo "test"\n'
hash_spaces := normalize_and_hash_command(cmd_with_spaces)
hash_newlines := normalize_and_hash_command(cmd_with_newlines)
assert hash1 == hash_spaces, 'Commands with extra spaces should normalize to same hash'
assert hash1 == hash_newlines, 'Commands with newlines should normalize to same hash'
}
fn testsuite_end() {
is_running := is_running() or { panic('cannot check if tmux is running: ${err}') }
if is_running {
stop() or { panic('Cannot stop tmux: ${err}') }
// Test basic tmux functionality
fn test_tmux_basic() ! {
// Create unique session name to avoid conflicts
session_name := 'test_${rand.int()}'
mut tmux_instance := new()!
// Ensure tmux is running
if !tmux_instance.is_running()! {
tmux_instance.start()!
}
}
fn test_window_new() ! {
mut tmux := new()!
tmux.start()!
// Create session first
mut session := tmux.session_create(name: 'main')!
// Create session
mut session := tmux_instance.session_create(name: session_name)!
// Note: session name gets normalized by name_fix, so we check if it contains our unique part
assert session.name.contains('test_'), 'Session name should contain test_ prefix'
// Test window creation
mut window := session.window_new(
name: 'TestWindow'
cmd: 'bash'
reset: true
)!
mut window := session.window_new(name: 'testwin')!
assert window.name == 'testwin'
assert session.window_exist(name: 'testwin')
assert window.name == 'testwindow' // name_fix converts to lowercase
assert session.window_exist(name: 'testwindow')
tmux.stop()!
}
// tests creating duplicate windows
fn test_window_new0() {
installer := get_install()!
mut tmux := Tmux{
node: node_ssh
}
window_args := WindowArgs{
name: 'TestWindow0'
}
// console.print_debug(tmux)
mut window := tmux.window_new(window_args) or { panic("Can't create new window: ${err}") }
assert tmux.sessions.keys().contains('main')
mut window_dup := tmux.window_new(window_args) or { panic("Can't create new window: ${err}") }
console.print_debug(node_ssh.exec('tmux ls') or { panic('fail:${err}') })
window.delete() or { panic('Cant delete window') }
// console.print_debug(tmux)
// Clean up - just stop tmux to clean everything
tmux_instance.stop()!
}

View File

@@ -94,6 +94,7 @@ pub fn (mut c Client) send[T, D](request RequestGeneric[T], params SendParams) !
myerror := response.error_ or {
return error('Failed to get error from response:\nRequest: ${request.encode()}\nResponse: ${response_json}\n${err}')
}
// print_backtrace()
mut myreq := request.encode()
if c.transport is UnixSocketTransport {

View File

@@ -78,11 +78,10 @@ pub fn (mut t UnixSocketTransport) send(request string, params SendParams) !stri
// Append the newly read data to the total response
res_total << res[..n]
//here we need to check we are at end
// here we need to check we are at end
if res.bytestr().contains('\n') {
break
}
}
unix.shutdown(socket.sock.handle)
socket.close() or {}

View File

@@ -6,16 +6,14 @@ import freeflowuniverse.herolib.schemas.jsonschema { Reference, decode_schemaref
pub fn decode_json_any(data string) !Any {
// mut o:=decode(data)!
return json2.decode[json2.Any](data)!
return json2.decode[Any](data)!
}
pub fn decode_json_string(data string) !string {
mut o := decode(data)!
return json.encode(o)!
return json.encode(o)
}
pub fn decode(data string) !OpenRPC {
// mut object := json.decode[OpenRPC](data) or { return error('Failed to decode json\n=======\n${data}\n===========\n${err}') }
mut object := json.decode(OpenRPC, data) or {

View File

@@ -3,117 +3,115 @@ module openrpcserver
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
@[heap]
pub struct Comment {
pub mut:
id u32
comment string
parent u32 //id of parent comment if any, 0 means none
updated_at i64
author u32 //links to user
id u32
comment string
parent u32 // id of parent comment if any, 0 means none
updated_at i64
author u32 // links to user
}
pub fn (self Comment) type_name() string {
return 'comments'
return 'comments'
}
pub fn (self Comment) load(data []u8) !Comment {
return comment_load(data)!
return comment_load(data)!
}
pub fn (self Comment) dump() ![]u8{
// Create a new encoder
mut e := encoder.new()
e.add_u8(1)
e.add_u32(self.id)
e.add_string(self.comment)
e.add_u32(self.parent)
e.add_i64(self.updated_at)
e.add_u32(self.author)
return e.data
pub fn (self Comment) dump() ![]u8 {
// Create a new encoder
mut e := encoder.new()
e.add_u8(1)
e.add_u32(self.id)
e.add_string(self.comment)
e.add_u32(self.parent)
e.add_i64(self.updated_at)
e.add_u32(self.author)
return e.data
}
pub fn comment_load(data []u8) !Comment{
// Create a new decoder
mut e := encoder.decoder_new(data)
version := e.get_u8()!
if version != 1 {
panic("wrong version in comment load")
}
mut comment := Comment{}
comment.id = e.get_u32()!
comment.comment = e.get_string()!
comment.parent = e.get_u32()!
comment.updated_at = e.get_i64()!
comment.author = e.get_u32()!
return comment
pub fn comment_load(data []u8) !Comment {
// Create a new decoder
mut e := encoder.decoder_new(data)
version := e.get_u8()!
if version != 1 {
panic('wrong version in comment load')
}
mut comment := Comment{}
comment.id = e.get_u32()!
comment.comment = e.get_string()!
comment.parent = e.get_u32()!
comment.updated_at = e.get_i64()!
comment.author = e.get_u32()!
return comment
}
pub struct CommentArg {
pub mut:
comment string
parent u32
author u32
comment string
parent u32
author u32
}
pub fn comment_multiset(args []CommentArg) ![]u32 {
return comments2ids(args)!
return comments2ids(args)!
}
pub fn comments2ids(args []CommentArg) ![]u32 {
return args.map(comment2id(it.comment)!)
return args.map(comment2id(it.comment)!)
}
pub fn comment2id(comment string) !u32 {
comment_fixed := comment.to_lower_ascii().trim_space()
mut redis := redisclient.core_get()!
return if comment_fixed.len > 0{
hash := md5.hexhash(comment_fixed)
comment_found := redis.hget("db:comments", hash)!
if comment_found == ""{
id := u32(redis.incr("db:comments:id")!)
redis.hset("db:comments", hash, id.str())!
redis.hset("db:comments", id.str(), comment_fixed)!
id
}else{
comment_found.u32()
}
} else { 0 }
comment_fixed := comment.to_lower_ascii().trim_space()
mut redis := redisclient.core_get()!
return if comment_fixed.len > 0 {
hash := md5.hexhash(comment_fixed)
comment_found := redis.hget('db:comments', hash)!
if comment_found == '' {
id := u32(redis.incr('db:comments:id')!)
redis.hset('db:comments', hash, id.str())!
redis.hset('db:comments', id.str(), comment_fixed)!
id
} else {
comment_found.u32()
}
} else {
0
}
}
//get new comment, not from the DB
pub fn comment_new(args CommentArg) !Comment{
mut o := Comment {
comment: args.comment
parent: args.parent
updated_at: ourtime.now().unix()
author: args.author
}
return o
// get new comment, not from the DB
pub fn comment_new(args CommentArg) !Comment {
mut o := Comment{
comment: args.comment
parent: args.parent
updated_at: ourtime.now().unix()
author: args.author
}
return o
}
pub fn comment_multiset(args []CommentArg) ![]u32{
mut ids := []u32{}
for comment in args {
ids << comment_set(comment)!
}
return ids
pub fn comment_multiset(args []CommentArg) ![]u32 {
mut ids := []u32{}
for comment in args {
ids << comment_set(comment)!
}
return ids
}
pub fn comment_set(args CommentArg) !u32{
mut o := comment_new(args)!
// Use openrpcserver set function which now returns the ID
return openrpcserver.set[Comment](mut o)!
pub fn comment_set(args CommentArg) !u32 {
mut o := comment_new(args)!
// Use openrpcserver set function which now returns the ID
return set[Comment](mut o)!
}
pub fn comment_exist(id u32) !bool{
return openrpcserver.exists[Comment](id)!
pub fn comment_exist(id u32) !bool {
return exists[Comment](id)!
}
pub fn comment_get(id u32) !Comment{
return openrpcserver.get[Comment](id)!
pub fn comment_get(id u32) !Comment {
return get[Comment](id)!
}

View File

@@ -3,55 +3,57 @@ module openrpcserver
import freeflowuniverse.herolib.core.redisclient
pub fn set[T](mut obj T) !u32 {
name := T{}.type_name()
mut redis := redisclient.core_get()!
// Generate ID if not set
if obj.id == 0 {
myid := redis.incr("db:${name}:id")!
obj.id = u32(myid)
}
data := obj.dump()!
redis.hset("db:${name}",obj.id.str(),data.bytestr())!
return obj.id
name := T{}.type_name()
mut redis := redisclient.core_get()!
// Generate ID if not set
if obj.id == 0 {
myid := redis.incr('db:${name}:id')!
obj.id = u32(myid)
}
data := obj.dump()!
redis.hset('db:${name}', obj.id.str(), data.bytestr())!
return obj.id
}
pub fn get[T](id u32) !T {
name := T{}.type_name()
mut redis := redisclient.core_get()!
data := redis.hget("db:${name}",id.str())!
if data.len > 0 {
return T{}.load(data.bytes())!
} else {
return error("Can't find ${name} with id: ${id}")
}
name := T{}.type_name()
mut redis := redisclient.core_get()!
data := redis.hget('db:${name}', id.str())!
if data.len > 0 {
return T{}.load(data.bytes())!
} else {
return error("Can't find ${name} with id: ${id}")
}
}
pub fn exists[T](id u32) !bool {
name := T{}.type_name()
mut redis := redisclient.core_get()!
return redis.hexists("db:${name}",id.str())!
name := T{}.type_name()
mut redis := redisclient.core_get()!
return redis.hexists('db:${name}', id.str())!
}
pub fn delete[T](id u32) ! {
name := T{}.type_name()
mut redis := redisclient.core_get()!
redis.hdel("db:${name}", id.str())!
name := T{}.type_name()
mut redis := redisclient.core_get()!
redis.hdel('db:${name}', id.str())!
}
pub fn list[T]() ![]T {
name := T{}.type_name()
mut redis := redisclient.core_get()!
all_data := redis.hgetall("db:${name}")!
mut result := []T{}
for _, data in all_data {
result << T{}.load(data.bytes())!
}
return result
name := T{}.type_name()
mut redis := redisclient.core_get()!
all_data := redis.hgetall('db:${name}')!
mut result := []T{}
for _, data in all_data {
result << T{}.load(data.bytes())!
}
return result
}
//make it easy to get a base object
// make it easy to get a base object
pub fn new_from_base[T](args BaseArgs) !Base {
return T { Base: new_base(args)! }
}
return T{
Base: new_base(args)!
}
}

View File

@@ -1,7 +1,6 @@
module openrpcserver
import crypto.md5
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime
@@ -9,85 +8,83 @@ import freeflowuniverse.herolib.data.ourtime
@[heap]
pub struct Base {
pub mut:
id u32
name string
description string
created_at i64
updated_at i64
securitypolicy u32
tags u32 //when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags
comments []u32
id u32
name string
description string
created_at i64
updated_at i64
securitypolicy u32
tags u32 // when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags
comments []u32
}
@[heap]
pub struct SecurityPolicy {
pub mut:
id u32
read []u32 //links to users & groups
write []u32 //links to users & groups
delete []u32 //links to users & groups
public bool
md5 string //this sorts read, write and delete u32 + hash, then do md5 hash, this allows to go from a random read/write/delete/public config to a hash
id u32
read []u32 // links to users & groups
write []u32 // links to users & groups
delete []u32 // links to users & groups
public bool
md5 string // this sorts read, write and delete u32 + hash, then do md5 hash, this allows to go from a random read/write/delete/public config to a hash
}
@[heap]
pub struct Tags {
pub mut:
id u32
names []string //unique per id
md5 string //of sorted names, to make easy to find unique id, each name lowercased and made ascii
id u32
names []string // unique per id
md5 string // of sorted names, to make easy to find unique id, each name lowercased and made ascii
}
/////////////////
@[params]
pub struct BaseArgs {
pub mut:
id ?u32
name string
description string
securitypolicy ?u32
tags []string
comments []CommentArg
id ?u32
name string
description string
securitypolicy ?u32
tags []string
comments []CommentArg
}
//make it easy to get a base object
// make it easy to get a base object
pub fn new_base(args BaseArgs) !Base {
mut redis := redisclient.core_get()!
mut redis := redisclient.core_get()!
commentids:=comment_multiset(args.comments)!
tags:=tags2id(args.tags)!
commentids := comment_multiset(args.comments)!
tags := tags2id(args.tags)!
return Base {
id: args.id or { 0 }
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags
comments: commentids
}
return Base{
id: args.id or { 0 }
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags
comments: commentids
}
}
pub fn tags2id(tags []string) !u32 {
mut redis := redisclient.core_get()!
return if tags.len>0{
mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != "")
tags_fixed.sort_ignore_case()
hash :=md5.hexhash(tags_fixed.join(","))
tags_found := redis.hget("db:tags", hash)!
return if tags_found == ""{
id := u32(redis.incr("db:tags:id")!)
redis.hset("db:tags", hash, id.str())!
redis.hset("db:tags", id.str(), tags_fixed.join(","))!
id
}else{
tags_found.u32()
}
} else {
0
}
mut redis := redisclient.core_get()!
return if tags.len > 0 {
mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != '')
tags_fixed.sort_ignore_case()
hash := md5.hexhash(tags_fixed.join(','))
tags_found := redis.hget('db:tags', hash)!
return if tags_found == '' {
id := u32(redis.incr('db:tags:id')!)
redis.hset('db:tags', hash, id.str())!
redis.hset('db:tags', id.str(), tags_fixed.join(','))!
id
} else {
tags_found.u32()
}
} else {
0
}
}

View File

@@ -6,7 +6,7 @@ import net.unix
import os
import freeflowuniverse.herolib.ui.console
//THIS IS DEFAULT NEEDED FOR EACH OPENRPC SERVER WE MAKE
// THIS IS DEFAULT NEEDED FOR EACH OPENRPC SERVER WE MAKE
pub struct JsonRpcRequest {
pub:
@@ -33,10 +33,9 @@ pub:
data string
}
pub struct RPCServer {
pub mut:
listener &unix.StreamListener
listener &unix.StreamListener
socket_path string
}
@@ -59,18 +58,18 @@ pub fn new_rpc_server(args RPCServerArgs) !&RPCServer {
if os.exists(args.socket_path) {
os.rm(args.socket_path)!
}
listener := unix.listen_stream(args.socket_path, unix.ListenOptions{})!
return &RPCServer{
listener: listener
listener: listener
socket_path: args.socket_path
}
}
pub fn (mut server RPCServer) start() ! {
console.print_header('Starting HeroModels OpenRPC Server on ${server.socket_path}')
for {
mut conn := server.listener.accept()!
spawn server.handle_connection(mut conn)
@@ -88,7 +87,7 @@ fn (mut server RPCServer) handle_connection(mut conn unix.StreamConn) {
defer {
conn.close() or { console.print_stderr('Error closing connection: ${err}') }
}
for {
// Read JSON-RPC request
mut buffer := []u8{len: 4096}
@@ -96,19 +95,19 @@ fn (mut server RPCServer) handle_connection(mut conn unix.StreamConn) {
console.print_debug('Connection closed or error reading: ${err}')
break
}
if bytes_read == 0 {
break
}
request_data := buffer[..bytes_read].bytestr()
console.print_debug('Received request: ${request_data}')
// Process the JSON-RPC request
response := server.process_request(request_data) or {
server.create_error_response(-32603, 'Internal error: ${err}', 'null')
}
// Send response
conn.write_string(response) or {
console.print_stderr('Error writing response: ${err}')
@@ -145,22 +144,22 @@ pub fn (mut server RPCServer) process(method string, params_str string) !string
fn (mut server RPCServer) create_success_response(result string, id string) string {
response := JsonRpcResponse{
jsonrpc: '2.0'
result: result
id: id
result: result
id: id
}
return json.encode(response)
}
fn (mut server RPCServer) create_error_response(code int, message string, id string) string {
error := JsonRpcError{
code: code
code: code
message: message
data: 'null'
data: 'null'
}
response := JsonRpcResponse{
jsonrpc: '2.0'
error: error
id: id
error: error
id: id
}
return json.encode(response)
}
@@ -169,4 +168,4 @@ fn (mut server RPCServer) create_error_response(code int, message string, id str
pub fn (mut server RPCServer) discover() !string {
// Return a basic OpenRPC spec - should be overridden by implementations
return '{"openrpc": "1.2.6", "info": {"title": "OpenRPC Server", "version": "1.0.0"}, "methods": []}'
}
}

View File

@@ -41,21 +41,21 @@ pub mut:
// new creates a new Company with default values
pub fn Company.new() Company {
return Company{
id: 0
name: ''
id: 0
name: ''
registration_number: ''
incorporation_date: 0
fiscal_year_end: ''
email: ''
phone: ''
website: ''
address: ''
business_type: .single
industry: ''
description: ''
status: .pending_payment
created_at: 0
updated_at: 0
incorporation_date: 0
fiscal_year_end: ''
email: ''
phone: ''
website: ''
address: ''
business_type: .single
industry: ''
description: ''
status: .pending_payment
created_at: 0
updated_at: 0
}
}
@@ -185,4 +185,4 @@ pub fn (c Company) status_string() string {
.suspended { 'Suspended' }
.inactive { 'Inactive' }
}
}
}

View File

@@ -34,19 +34,19 @@ pub mut:
pub fn Payment.new(payment_intent_id string, company_id u32, payment_plan string, setup_fee f64, monthly_fee f64, total_amount f64) Payment {
now := time.now().unix_time()
return Payment{
id: 0
payment_intent_id: payment_intent_id
company_id: company_id
payment_plan: payment_plan
setup_fee: setup_fee
monthly_fee: monthly_fee
total_amount: total_amount
currency: 'usd'
status: .pending
id: 0
payment_intent_id: payment_intent_id
company_id: company_id
payment_plan: payment_plan
setup_fee: setup_fee
monthly_fee: monthly_fee
total_amount: total_amount
currency: 'usd'
status: .pending
stripe_customer_id: none
created_at: now
completed_at: none
updated_at: u64(now)
created_at: now
completed_at: none
updated_at: u64(now)
}
}
@@ -195,4 +195,4 @@ pub fn (p Payment) is_yearly_plan() bool {
// is_two_year_plan checks if this is a two-year payment plan
pub fn (p Payment) is_two_year_plan() bool {
return p.payment_plan == 'two_year'
}
}

View File

@@ -23,9 +23,9 @@ pub mut:
// new creates a new ProductComponent with default values
pub fn ProductComponent.new() ProductComponent {
return ProductComponent{
name: ''
name: ''
description: ''
quantity: 1
quantity: 1
}
}
@@ -51,37 +51,37 @@ pub fn (mut pc ProductComponent) quantity(quantity u32) ProductComponent {
@[heap]
pub struct Product {
pub mut:
id u32 // Unique product ID
name string // Product name
description string // Product description
price f64 // Product price
type_ ProductType // Product type (product or service)
category string // Product category
status ProductStatus // Product status
max_amount u16 // Maximum amount available
purchase_till i64 // Purchase deadline timestamp
active_till i64 // Active until timestamp
components []ProductComponent // Product components
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique product ID
name string // Product name
description string // Product description
price f64 // Product price
type_ ProductType // Product type (product or service)
category string // Product category
status ProductStatus // Product status
max_amount u16 // Maximum amount available
purchase_till i64 // Purchase deadline timestamp
active_till i64 // Active until timestamp
components []ProductComponent // Product components
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Product with default values
pub fn Product.new() Product {
return Product{
id: 0
name: ''
description: ''
price: 0.0
type_: .product
category: ''
status: .available
max_amount: 0
id: 0
name: ''
description: ''
price: 0.0
type_: .product
category: ''
status: .available
max_amount: 0
purchase_till: 0
active_till: 0
components: []
created_at: 0
updated_at: 0
active_till: 0
components: []
created_at: 0
updated_at: 0
}
}
@@ -209,4 +209,4 @@ pub fn (p Product) status_string() string {
.available { 'Available' }
.unavailable { 'Unavailable' }
}
}
}

View File

@@ -10,22 +10,22 @@ pub enum SaleStatus {
// SaleItem represents an individual item within a Sale
pub struct SaleItem {
pub mut:
product_id u32 // Product ID
name string // Denormalized product name at time of sale
quantity i32 // Quantity purchased
unit_price f64 // Price per unit at time of sale
subtotal f64 // Subtotal for this item
service_active_until ?i64 // Optional: For services, date until this specific purchased instance is active
product_id u32 // Product ID
name string // Denormalized product name at time of sale
quantity i32 // Quantity purchased
unit_price f64 // Price per unit at time of sale
subtotal f64 // Subtotal for this item
service_active_until ?i64 // Optional: For services, date until this specific purchased instance is active
}
// new creates a new SaleItem with default values
pub fn SaleItem.new() SaleItem {
return SaleItem{
product_id: 0
name: ''
quantity: 0
unit_price: 0.0
subtotal: 0.0
product_id: 0
name: ''
quantity: 0
unit_price: 0.0
subtotal: 0.0
service_active_until: none
}
}
@@ -91,17 +91,17 @@ pub mut:
// new creates a new Sale with default values
pub fn Sale.new() Sale {
return Sale{
id: 0
company_id: 0
buyer_id: 0
id: 0
company_id: 0
buyer_id: 0
transaction_id: 0
total_amount: 0.0
status: .pending
sale_date: 0
items: []
notes: ''
created_at: 0
updated_at: 0
total_amount: 0.0
status: .pending
sale_date: 0
items: []
notes: ''
created_at: 0
updated_at: 0
}
}
@@ -219,4 +219,4 @@ pub fn (s Sale) status_string() string {
.completed { 'Completed' }
.cancelled { 'Cancelled' }
}
}
}

View File

@@ -16,12 +16,12 @@ pub mut:
// new creates a new Comment with default values
pub fn Comment.new() Comment {
return Comment{
id: 0
user_id: 0
content: ''
id: 0
user_id: 0
content: ''
parent_comment_id: none
created_at: 0
updated_at: 0
created_at: 0
updated_at: 0
}
}
@@ -51,4 +51,4 @@ pub fn (c Comment) is_top_level() bool {
// is_reply returns true if this is a reply to another comment
pub fn (c Comment) is_reply() bool {
return c.parent_comment_id != none
}
}

View File

@@ -4,31 +4,31 @@ module finance
@[heap]
pub struct Account {
pub mut:
id u32 // Unique account ID
name string // Internal name of the account for the user
user_id u32 // User ID of the owner of the account
description string // Optional description of the account
ledger string // Describes the ledger/blockchain where the account is located
address string // Address of the account on the blockchain
pubkey string // Public key
assets []u32 // List of asset IDs in this account
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique account ID
name string // Internal name of the account for the user
user_id u32 // User ID of the owner of the account
description string // Optional description of the account
ledger string // Describes the ledger/blockchain where the account is located
address string // Address of the account on the blockchain
pubkey string // Public key
assets []u32 // List of asset IDs in this account
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Account with default values
pub fn Account.new() Account {
return Account{
id: 0
name: ''
user_id: 0
id: 0
name: ''
user_id: 0
description: ''
ledger: ''
address: ''
pubkey: ''
assets: []
created_at: 0
updated_at: 0
ledger: ''
address: ''
pubkey: ''
assets: []
created_at: 0
updated_at: 0
}
}
@@ -94,4 +94,4 @@ pub fn (a Account) has_asset(asset_id u32) bool {
// remove_asset removes an asset from the account
pub fn (mut a Account) remove_asset(asset_id u32) {
a.assets = a.assets.filter(it != asset_id)
}
}

View File

@@ -26,15 +26,15 @@ pub mut:
// new creates a new Asset with default values
pub fn Asset.new() Asset {
return Asset{
id: 0
name: ''
id: 0
name: ''
description: ''
amount: 0.0
address: ''
asset_type: .native
decimals: 18
created_at: 0
updated_at: 0
amount: 0.0
address: ''
asset_type: .native
decimals: 18
created_at: 0
updated_at: 0
}
}
@@ -81,7 +81,31 @@ pub fn (a Asset) formatted_amount() string {
factor *= 10
}
formatted_amount := (a.amount * factor).round() / factor
return '${formatted_amount:.${a.decimals}f}'
// Format with the specified number of decimal places
if a.decimals == 0 {
return '${formatted_amount:.0f}'
} else if a.decimals == 1 {
return '${formatted_amount:.1f}'
} else if a.decimals == 2 {
return '${formatted_amount:.2f}'
} else if a.decimals == 3 {
return '${formatted_amount:.3f}'
} else if a.decimals == 4 {
return '${formatted_amount:.4f}'
} else {
// For more than 4 decimals, use string manipulation
str_amount := formatted_amount.str()
if str_amount.contains('.') {
parts := str_amount.split('.')
if parts.len == 2 {
decimal_part := parts[1]
if decimal_part.len > a.decimals {
return '${parts[0]}.${decimal_part[..a.decimals]}'
}
}
}
return str_amount
}
}
// transfer_to transfers amount to another asset
@@ -96,4 +120,4 @@ pub fn (mut a Asset) transfer_to(mut target Asset, amount f64) ! {
a.amount -= amount
target.amount += amount
}
}

View File

@@ -40,10 +40,10 @@ pub mut:
pub fn Bid.new() Bid {
return Bid{
listing_id: ''
bidder_id: 0
amount: 0.0
currency: ''
status: .active
bidder_id: 0
amount: 0.0
currency: ''
status: .active
created_at: u64(time.now().unix_time())
}
}
@@ -82,50 +82,50 @@ pub fn (mut b Bid) status(status BidStatus) Bid {
@[heap]
pub struct Listing {
pub mut:
id u32 // Unique listing ID
title string // Title of the listing
description string // Description of the listing
asset_id string // ID of the asset being listed
asset_type AssetType // Type of the asset
seller_id string // ID of the user selling the asset
price f64 // Initial price for fixed price, or starting price for auction
currency string // Currency of the listing
listing_type ListingType // Type of listing (fixed_price, auction, exchange)
status ListingStatus // Status of the listing
expires_at ?u64 // Optional expiration date
sold_at ?u64 // Optional date when the item was sold
buyer_id ?string // Optional buyer ID
sale_price ?f64 // Optional final sale price
bids []Bid // List of bids for auction type listings
tags []string // Tags for the listing
image_url ?string // Optional image URL
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique listing ID
title string // Title of the listing
description string // Description of the listing
asset_id string // ID of the asset being listed
asset_type AssetType // Type of the asset
seller_id string // ID of the user selling the asset
price f64 // Initial price for fixed price, or starting price for auction
currency string // Currency of the listing
listing_type ListingType // Type of listing (fixed_price, auction, exchange)
status ListingStatus // Status of the listing
expires_at ?u64 // Optional expiration date
sold_at ?u64 // Optional date when the item was sold
buyer_id ?string // Optional buyer ID
sale_price ?f64 // Optional final sale price
bids []Bid // List of bids for auction type listings
tags []string // Tags for the listing
image_url ?string // Optional image URL
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Listing with default values
pub fn Listing.new() Listing {
now := u64(time.now().unix_time())
return Listing{
id: 0
title: ''
description: ''
asset_id: ''
asset_type: .native
seller_id: ''
price: 0.0
currency: ''
id: 0
title: ''
description: ''
asset_id: ''
asset_type: .native
seller_id: ''
price: 0.0
currency: ''
listing_type: .fixed_price
status: .active
expires_at: none
sold_at: none
buyer_id: none
sale_price: none
bids: []
tags: []
image_url: none
created_at: now
updated_at: now
status: .active
expires_at: none
sold_at: none
buyer_id: none
sale_price: none
bids: []
tags: []
image_url: none
created_at: now
updated_at: now
}
}
@@ -336,4 +336,4 @@ pub fn (mut l Listing) check_expiration() {
pub fn (mut l Listing) add_tag(tag string) Listing {
l.tags << tag
return l
}
}

View File

@@ -4,13 +4,13 @@ module flow
@[heap]
pub struct Flow {
pub mut:
id u32 // Unique flow ID
flow_uuid string // A unique UUID for the flow, for external reference
name string // Name of the flow
status string // Current status of the flow (e.g., "Pending", "InProgress", "Completed", "Failed")
steps []FlowStep // Steps involved in this flow
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique flow ID
flow_uuid string // A unique UUID for the flow, for external reference
name string // Name of the flow
status string // Current status of the flow (e.g., "Pending", "InProgress", "Completed", "Failed")
steps []FlowStep // Steps involved in this flow
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Flow
@@ -18,11 +18,11 @@ pub mut:
// The ID is managed by the database
pub fn Flow.new(flow_uuid string) Flow {
return Flow{
id: 0
flow_uuid: flow_uuid
name: ''
status: 'Pending'
steps: []
id: 0
flow_uuid: flow_uuid
name: ''
status: 'Pending'
steps: []
created_at: 0
updated_at: 0
}
@@ -71,7 +71,7 @@ pub fn (f Flow) is_completed() bool {
if f.steps.len == 0 {
return false
}
for step in f.steps {
if step.status != 'Completed' {
return false
@@ -84,11 +84,11 @@ pub fn (f Flow) is_completed() bool {
pub fn (f Flow) get_next_step() ?FlowStep {
mut sorted_steps := f.steps.clone()
sorted_steps.sort(a.step_order < b.step_order)
for step in sorted_steps {
if step.status == 'Pending' {
return step
}
}
return none
}
}

View File

@@ -15,12 +15,12 @@ pub mut:
// new creates a new flow step
pub fn FlowStep.new(step_order u32) FlowStep {
return FlowStep{
id: 0
id: 0
description: none
step_order: step_order
status: 'Pending'
created_at: 0
updated_at: 0
step_order: step_order
status: 'Pending'
created_at: 0
updated_at: 0
}
}
@@ -74,4 +74,4 @@ pub fn (mut fs FlowStep) fail() {
// reset resets the step to pending status
pub fn (mut fs FlowStep) reset() {
fs.status = 'Pending'
}
}

View File

@@ -18,15 +18,15 @@ pub mut:
// new creates a new signature requirement
pub fn SignatureRequirement.new(flow_step_id u32, public_key string, message string) SignatureRequirement {
return SignatureRequirement{
id: 0
id: 0
flow_step_id: flow_step_id
public_key: public_key
message: message
signed_by: none
signature: none
status: 'Pending'
created_at: 0
updated_at: 0
public_key: public_key
message: message
signed_by: none
signature: none
status: 'Pending'
created_at: 0
updated_at: 0
}
}
@@ -112,4 +112,4 @@ pub fn (sr SignatureRequirement) validate_signature() bool {
}
}
return false
}
}

View File

@@ -3,45 +3,45 @@ module identity
// IdenfyWebhookEvent represents an iDenfy webhook event structure
pub struct IdenfyWebhookEvent {
pub mut:
client_id string // Client ID
scan_ref string // Scan reference
status string // Verification status
platform string // Platform used
started_at string // When verification started
finished_at ?string // When verification finished (optional)
client_ip ?string // Client IP address (optional)
client_location ?string // Client location (optional)
data ?IdenfyVerificationData // Verification data (optional)
client_id string // Client ID
scan_ref string // Scan reference
status string // Verification status
platform string // Platform used
started_at string // When verification started
finished_at ?string // When verification finished (optional)
client_ip ?string // Client IP address (optional)
client_location ?string // Client location (optional)
data ?IdenfyVerificationData // Verification data (optional)
}
// IdenfyVerificationData represents the verification data from iDenfy
pub struct IdenfyVerificationData {
pub mut:
doc_first_name ?string // First name from document
doc_last_name ?string // Last name from document
doc_number ?string // Document number
doc_personal_code ?string // Personal code from document
doc_expiry ?string // Document expiry date
doc_dob ?string // Date of birth from document
doc_type ?string // Document type
doc_sex ?string // Sex from document
doc_nationality ?string // Nationality from document
doc_issuing_country ?string // Document issuing country
manually_data_changed ?bool // Whether data was manually changed
doc_first_name ?string // First name from document
doc_last_name ?string // Last name from document
doc_number ?string // Document number
doc_personal_code ?string // Personal code from document
doc_expiry ?string // Document expiry date
doc_dob ?string // Date of birth from document
doc_type ?string // Document type
doc_sex ?string // Sex from document
doc_nationality ?string // Nationality from document
doc_issuing_country ?string // Document issuing country
manually_data_changed ?bool // Whether data was manually changed
}
// new creates a new IdenfyWebhookEvent
pub fn IdenfyWebhookEvent.new() IdenfyWebhookEvent {
return IdenfyWebhookEvent{
client_id: ''
scan_ref: ''
status: ''
platform: ''
started_at: ''
finished_at: none
client_ip: none
client_id: ''
scan_ref: ''
status: ''
platform: ''
started_at: ''
finished_at: none
client_ip: none
client_location: none
data: none
data: none
}
}
@@ -102,16 +102,16 @@ pub fn (mut event IdenfyWebhookEvent) data(data ?IdenfyVerificationData) IdenfyW
// new creates a new IdenfyVerificationData
pub fn IdenfyVerificationData.new() IdenfyVerificationData {
return IdenfyVerificationData{
doc_first_name: none
doc_last_name: none
doc_number: none
doc_personal_code: none
doc_expiry: none
doc_dob: none
doc_type: none
doc_sex: none
doc_nationality: none
doc_issuing_country: none
doc_first_name: none
doc_last_name: none
doc_number: none
doc_personal_code: none
doc_expiry: none
doc_dob: none
doc_type: none
doc_sex: none
doc_nationality: none
doc_issuing_country: none
manually_data_changed: none
}
}
@@ -220,4 +220,4 @@ pub fn (event IdenfyWebhookEvent) get_document_info() string {
return '${doc_type}: ${doc_number}'
}
return 'No document information'
}
}

View File

@@ -32,11 +32,11 @@ pub mut:
// new creates a new ContractRevision
pub fn ContractRevision.new(version u32, content string, created_by string) ContractRevision {
return ContractRevision{
version: version
content: content
version: version
content: content
created_at: u64(time.now().unix_time())
created_by: created_by
comments: none
comments: none
}
}
@@ -49,27 +49,27 @@ pub fn (mut cr ContractRevision) comments(comments string) ContractRevision {
// ContractSigner represents a party involved in signing a contract
pub struct ContractSigner {
pub mut:
id string // Unique ID for the signer (UUID string)
name string // Signer's name
email string // Signer's email
status SignerStatus // Current status
signed_at ?u64 // When they signed (optional)
comments ?string // Optional comments from signer
last_reminder_mail_sent_at ?u64 // Last reminder timestamp
signature_data ?string // Base64 encoded signature image data
id string // Unique ID for the signer (UUID string)
name string // Signer's name
email string // Signer's email
status SignerStatus // Current status
signed_at ?u64 // When they signed (optional)
comments ?string // Optional comments from signer
last_reminder_mail_sent_at ?u64 // Last reminder timestamp
signature_data ?string // Base64 encoded signature image data
}
// new creates a new ContractSigner
pub fn ContractSigner.new(id string, name string, email string) ContractSigner {
return ContractSigner{
id: id
name: name
email: email
status: .pending
signed_at: none
comments: none
id: id
name: name
email: email
status: .pending
signed_at: none
comments: none
last_reminder_mail_sent_at: none
signature_data: none
signature_data: none
}
}
@@ -139,48 +139,48 @@ pub fn (mut cs ContractSigner) sign(signature_data ?string, comments ?string) {
@[heap]
pub struct Contract {
pub mut:
id u32 // Unique contract ID
contract_id string // Unique UUID for the contract
title string // Contract title
description string // Contract description
contract_type string // Type of contract
status ContractStatus // Current status
created_by string // Who created the contract
terms_and_conditions string // Terms and conditions text
start_date ?u64 // Optional start date
end_date ?u64 // Optional end date
renewal_period_days ?i32 // Optional renewal period in days
next_renewal_date ?u64 // Optional next renewal date
signers []ContractSigner // List of signers
revisions []ContractRevision // Contract revisions
current_version u32 // Current version number
last_signed_date ?u64 // Last signing date
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique contract ID
contract_id string // Unique UUID for the contract
title string // Contract title
description string // Contract description
contract_type string // Type of contract
status ContractStatus // Current status
created_by string // Who created the contract
terms_and_conditions string // Terms and conditions text
start_date ?u64 // Optional start date
end_date ?u64 // Optional end date
renewal_period_days ?i32 // Optional renewal period in days
next_renewal_date ?u64 // Optional next renewal date
signers []ContractSigner // List of signers
revisions []ContractRevision // Contract revisions
current_version u32 // Current version number
last_signed_date ?u64 // Last signing date
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Contract
pub fn Contract.new(contract_id string) Contract {
now := u64(time.now().unix_time())
return Contract{
id: 0
contract_id: contract_id
title: ''
description: ''
contract_type: ''
status: .draft
created_by: ''
id: 0
contract_id: contract_id
title: ''
description: ''
contract_type: ''
status: .draft
created_by: ''
terms_and_conditions: ''
start_date: none
end_date: none
renewal_period_days: none
next_renewal_date: none
signers: []
revisions: []
current_version: 0
last_signed_date: none
created_at: now
updated_at: now
start_date: none
end_date: none
renewal_period_days: none
next_renewal_date: none
signers: []
revisions: []
current_version: 0
last_signed_date: none
created_at: now
updated_at: now
}
}
@@ -331,7 +331,7 @@ pub fn (c Contract) all_signed() bool {
if c.signers.len == 0 {
return false
}
for signer in c.signers {
if signer.status != .signed {
return false
@@ -370,4 +370,4 @@ pub fn (c Contract) status_string() string {
.expired { 'Expired' }
.cancelled { 'Cancelled' }
}
}
}

View File

@@ -4,31 +4,31 @@ module library
@[heap]
pub struct Collection {
pub mut:
id u32 // Unique collection ID
title string // Title of the collection
description ?string // Optional description of the collection
images []u32 // List of image item IDs belonging to this collection
pdfs []u32 // List of PDF item IDs belonging to this collection
markdowns []u32 // List of Markdown item IDs belonging to this collection
books []u32 // List of Book item IDs belonging to this collection
slides []u32 // List of Slides item IDs belonging to this collection
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
id u32 // Unique collection ID
title string // Title of the collection
description ?string // Optional description of the collection
images []u32 // List of image item IDs belonging to this collection
pdfs []u32 // List of PDF item IDs belonging to this collection
markdowns []u32 // List of Markdown item IDs belonging to this collection
books []u32 // List of Book item IDs belonging to this collection
slides []u32 // List of Slides item IDs belonging to this collection
created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp
}
// new creates a new Collection with default values
pub fn Collection.new() Collection {
return Collection{
id: 0
title: ''
id: 0
title: ''
description: none
images: []
pdfs: []
markdowns: []
books: []
slides: []
created_at: 0
updated_at: 0
images: []
pdfs: []
markdowns: []
books: []
slides: []
created_at: 0
updated_at: 0
}
}
@@ -162,4 +162,4 @@ pub fn (c Collection) contains_slides(slides_id u32) bool {
// get_description_string returns the description as a string (empty if none)
pub fn (c Collection) get_description_string() string {
return c.description or { '' }
}
}

View File

@@ -17,14 +17,14 @@ pub mut:
// new creates a new Image with default values
pub fn Image.new() Image {
return Image{
id: 0
title: ''
id: 0
title: ''
description: none
url: ''
width: 0
height: 0
created_at: 0
updated_at: 0
url: ''
width: 0
height: 0
created_at: 0
updated_at: 0
}
}
@@ -97,13 +97,13 @@ pub mut:
// new creates a new Pdf with default values
pub fn Pdf.new() Pdf {
return Pdf{
id: 0
title: ''
id: 0
title: ''
description: none
url: ''
page_count: 0
created_at: 0
updated_at: 0
url: ''
page_count: 0
created_at: 0
updated_at: 0
}
}
@@ -151,12 +151,12 @@ pub mut:
// new creates a new Markdown document with default values
pub fn Markdown.new() Markdown {
return Markdown{
id: 0
title: ''
id: 0
title: ''
description: none
content: ''
created_at: 0
updated_at: 0
content: ''
created_at: 0
updated_at: 0
}
}
@@ -200,8 +200,8 @@ pub mut:
// new creates a new TocEntry with default values
pub fn TocEntry.new() TocEntry {
return TocEntry{
title: ''
page: 0
title: ''
page: 0
subsections: []
}
}
@@ -245,13 +245,13 @@ pub mut:
// new creates a new Book with default values
pub fn Book.new() Book {
return Book{
id: 0
title: ''
description: none
id: 0
title: ''
description: none
table_of_contents: []
pages: []
created_at: 0
updated_at: 0
pages: []
created_at: 0
updated_at: 0
}
}
@@ -325,8 +325,8 @@ pub mut:
// new creates a new Slide
pub fn Slide.new() Slide {
return Slide{
image_url: ''
title: none
image_url: ''
title: none
description: none
}
}
@@ -374,12 +374,12 @@ pub mut:
// new creates a new Slideshow with default values
pub fn Slideshow.new() Slideshow {
return Slideshow{
id: 0
title: ''
id: 0
title: ''
description: none
slides: []
created_at: 0
updated_at: 0
slides: []
created_at: 0
updated_at: 0
}
}
@@ -424,4 +424,4 @@ pub fn (mut s Slideshow) remove_slide(index u32) {
if index < u32(s.slides.len) {
s.slides.delete(int(index))
}
}
}

View File

@@ -14,12 +14,12 @@ pub mut:
// new creates a new Address with default values
pub fn Address.new() Address {
return Address{
street: ''
city: ''
state: none
street: ''
city: ''
state: none
postal_code: ''
country: ''
company: none
country: ''
company: none
}
}
@@ -77,57 +77,57 @@ pub fn (a Address) has_company() bool {
// format_single_line returns the address formatted as a single line
pub fn (a Address) format_single_line() string {
mut parts := []string{}
if company := a.company {
if company.len > 0 {
parts << company
}
}
if a.street.len > 0 {
parts << a.street
}
if a.city.len > 0 {
parts << a.city
}
if state := a.state {
if state.len > 0 {
parts << state
}
}
if a.postal_code.len > 0 {
parts << a.postal_code
}
if a.country.len > 0 {
parts << a.country
}
return parts.join(', ')
}
// format_multiline returns the address formatted as multiple lines
pub fn (a Address) format_multiline() string {
mut lines := []string{}
if company := a.company {
if company.len > 0 {
lines << company
}
}
if a.street.len > 0 {
lines << a.street
}
mut city_line := ''
if a.city.len > 0 {
city_line = a.city
}
if state := a.state {
if state.len > 0 {
if city_line.len > 0 {
@@ -137,7 +137,7 @@ pub fn (a Address) format_multiline() string {
}
}
}
if a.postal_code.len > 0 {
if city_line.len > 0 {
city_line += ' ${a.postal_code}'
@@ -145,15 +145,15 @@ pub fn (a Address) format_multiline() string {
city_line = a.postal_code
}
}
if city_line.len > 0 {
lines << city_line
}
if a.country.len > 0 {
lines << a.country
}
return lines.join('\n')
}
@@ -169,22 +169,15 @@ pub fn (a Address) get_company_string() string {
// equals compares two addresses for equality
pub fn (a Address) equals(other Address) bool {
return a.street == other.street &&
a.city == other.city &&
a.state == other.state &&
a.postal_code == other.postal_code &&
a.country == other.country &&
a.company == other.company
return a.street == other.street && a.city == other.city && a.state == other.state
&& a.postal_code == other.postal_code && a.country == other.country
&& a.company == other.company
}
// is_empty checks if the address is completely empty
pub fn (a Address) is_empty() bool {
return a.street.len == 0 &&
a.city.len == 0 &&
a.postal_code.len == 0 &&
a.country.len == 0 &&
a.state == none &&
a.company == none
return a.street.len == 0 && a.city.len == 0 && a.postal_code.len == 0 && a.country.len == 0
&& a.state == none && a.company == none
}
// validate performs basic validation on the address
@@ -192,22 +185,22 @@ pub fn (a Address) validate() !bool {
if a.is_empty() {
return error('Address cannot be empty')
}
if a.street.len == 0 {
return error('Street address is required')
}
if a.city.len == 0 {
return error('City is required')
}
if a.postal_code.len == 0 {
return error('Postal code is required')
}
if a.country.len == 0 {
return error('Country is required')
}
return true
}
}

View File

@@ -9,11 +9,12 @@ module models
// - Payment models (Stripe webhooks)
// - Location models (addresses)
// Re-export all model modules for easy access
pub use core
pub use finance
pub use flow
pub use business
pub use identity
pub use payment
pub use location
// Import all model modules for easy access
import freeflowuniverse.herolib.threefold.models.core
import freeflowuniverse.herolib.threefold.models.finance
import freeflowuniverse.herolib.threefold.models.flow
import freeflowuniverse.herolib.threefold.models.business
import freeflowuniverse.herolib.threefold.models.identity
import freeflowuniverse.herolib.threefold.models.payment
import freeflowuniverse.herolib.threefold.models.location

View File

@@ -17,8 +17,8 @@ pub mut:
// StripeEventData represents the data portion of a Stripe event
pub struct StripeEventData {
pub mut:
object string // The main object data (JSON as string for flexibility)
previous_attributes ?string // Previous attributes if this is an update (JSON as string)
object string // The main object data (JSON as string for flexibility)
previous_attributes ?string // Previous attributes if this is an update (JSON as string)
}
// StripeEventRequest represents request information for a Stripe event
@@ -31,15 +31,15 @@ pub mut:
// new creates a new StripeWebhookEvent
pub fn StripeWebhookEvent.new() StripeWebhookEvent {
return StripeWebhookEvent{
id: ''
object: 'event'
api_version: none
created: 0
data: StripeEventData.new()
livemode: false
id: ''
object: 'event'
api_version: none
created: 0
data: StripeEventData.new()
livemode: false
pending_webhooks: 0
request: none
event_type: ''
request: none
event_type: ''
}
}
@@ -100,7 +100,7 @@ pub fn (mut event StripeWebhookEvent) event_type(event_type string) StripeWebhoo
// new creates a new StripeEventData
pub fn StripeEventData.new() StripeEventData {
return StripeEventData{
object: ''
object: ''
previous_attributes: none
}
}
@@ -120,7 +120,7 @@ pub fn (mut data StripeEventData) previous_attributes(previous_attributes ?strin
// new creates a new StripeEventRequest
pub fn StripeEventRequest.new() StripeEventRequest {
return StripeEventRequest{
id: none
id: none
idempotency_key: none
}
}
@@ -219,4 +219,4 @@ pub fn (event StripeWebhookEvent) get_event_action() string {
return parts[parts.len - 1]
}
return ''
}
}

View File

@@ -41,8 +41,8 @@ pub fn (mut docsite DocSite) generate_docs() ! {
}
if gen.errors.len > 0 {
println("Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}")
return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n')
println('Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}')
return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n')
}
}