Merge branch 'development_ds' into development_fix_herobin

* development_ds:
  ...
  refactor: improve session action handling in play_core
  refactor: adapt docusaurus to use generic site module
  ...
  ...
  ...
  ...
  ...
  ...
  ...
  ...
  ...
  ...

# Conflicts:
#	lib/biz/bizmodel/play.v
#	lib/threefold/grid4/cloudslices/play.v
#	lib/threefold/grid4/farmingsimulator/play.v
#	lib/web/docusaurus/dsite.v
#	lib/web/docusaurus/dsite_add.v
#	lib/web/docusaurus/dsite_configuration.v
#	lib/web/docusaurus/dsite_generate.v
This commit is contained in:
2025-08-03 04:47:53 +02:00
111 changed files with 5160 additions and 4257 deletions

9
.kilocode/mcp.json Normal file
View File

@@ -0,0 +1,9 @@
{
"chat.mcp.discovery.enabled": true,
"mcpServers": {
"VSCode": {
"type": "internal",
"tools": ["chat_send", "apply_edits", "read_file", "write_file", "get_file_tree"]
}
}
}

View File

View File

@@ -1,39 +0,0 @@
generate specs for /Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions
use mcp
get the output of it un actions/specs.v
then use these specs.v
to generate play command instructions see @3_heroscript_vlang.md
this play command gets heroscript in and will then call the methods for actions as are ONLY in @lib/circles/actions/db
so the play only calls the methods in @lib/circles/actions/db
# put the play commands in
/Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions/play
do one file in the module per action
each method is an action
put them all on one Struct called Player
in this Player we have a method per action
Player has a property called actor: which is the name of the actor as is used in the heroscript
Player has also a output called return format which is enum for heroscript or json
input of the method - action is a params object
on player there is a method play which takes the text as input or playbook
if text then playbook is created
then we walk over all actions
all the ones starting with actions in this case are given to the right method

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,18 @@
in hero.db
make a generic function which takes any of the root objects (which inherits from Base)
and gets a json from it and add a save() function to it to store it in postgresql (see postgresql client)
and also a get and deserializes
the json is put in table as follows
tablename: $dirname_$rootobjectname all lowercase
each table has
- id
- ... the fields which represents indexes (see @[index])
- data which is the json
information how to use generics see aiprompts/v_advanced/generics.md and aiprompts/v_advanced/reflection.md

View File

@@ -0,0 +1,45 @@
$NAME = calendar
walk over all models from biz: db/heromodels/src/models/$NAME in the rust repo
create nice structured public models in Vlang (V) see instructions in herlolib
put the results in /Users/despiegk/code/github/freeflowuniverse/herolib/lib/hero/models/$NAME
put decorator on fields which need to be indexed: use @[index] for that at end of line of the property of the struct
copy the documentation as well and put on the vstruct and on its fields
make instructions so a coding agent can execute it, put the models in files, ...
keep it all simple
don't do anything additional for modules, don't do import
at top of each file we have ```module $NAME```
make sure all time related fields are in u64 format, use unix timestamp for that
don't create management classes, only output the structs, don't create a mod.v, don't make .v scripts executatble, don't create a main.v
## now also make sure we use core.base as follows
```
import freeflowuniverse.herolib.hero.models.core
// Account represents a financial account for tracking balances and transactions
// Supports multiple account types (checking, savings, investment, etc.)
pub struct Account {
core.Base
```
remove Local BaseModel
make sure module ... is always at first line of file
- remove id from the model we update because it is in the Base
- created_at u64 // Creation timestamp
- updated_at u64 // Last modification timestamp
- basically each property in the Base should be removed from the model

View File

@@ -0,0 +1 @@
Kimi k2 on groq is doing well

View File

@@ -0,0 +1,20 @@
in lib/hero/models
for governance and legal
make sure we use core.base as follows
import freeflowuniverse.herolib.hero.models.core
// Account represents a financial account for tracking balances and transactions
// Supports multiple account types (checking, savings, investment, etc.)
pub struct Account {
core.Base
remove Local BaseModel
make sure module ... is always at first line of file
- remove id from the model we update because it is in the Base
- created_at u64 // Creation timestamp
- updated_at u64 // Last modification timestamp
- basically each property in the Base should be removed from the model

View File

@@ -0,0 +1,64 @@
```v
struct Repo[T] {
db DB
}
struct User {
id int
name string
}
struct Post {
id int
user_id int
title string
body string
}
fn new_repo[T](db DB) Repo[T] {
return Repo[T]{db: db}
}
// This is a generic function. V will generate it for every type it's used with.
fn (r Repo[T]) find_by_id(id int) ?T {
table_name := T.name // in this example getting the name of the type gives us the table name
return r.db.query_one[T]('select * from ${table_name} where id = ?', id)
}
db := new_db()
users_repo := new_repo[User](db) // returns Repo[User]
posts_repo := new_repo[Post](db) // returns Repo[Post]
user := users_repo.find_by_id(1)? // find_by_id[User]
post := posts_repo.find_by_id(1)? // find_by_id[Post]
```
Currently generic function definitions must declare their type parameters, but in future V will infer generic type parameters from single-letter type names in runtime parameter types. This is why find_by_id can omit [T], because the receiver argument r uses a generic type T.
```v
fn compare[T](a T, b T) int {
if a < b {
return -1
}
if a > b {
return 1
}
return 0
}
// compare[int]
println(compare(1, 0)) // Outputs: 1
println(compare(1, 1)) // 0
println(compare(1, 2)) // -1
// compare[string]
println(compare('1', '0')) // Outputs: 1
println(compare('1', '1')) // 0
println(compare('1', '2')) // -1
// compare[f64]
println(compare(1.1, 1.0)) // Outputs: 1
println(compare(1.1, 1.1)) // 0
println(compare(1.1, 1.2)) // -1
```

View File

@@ -4,7 +4,7 @@ import freeflowuniverse.herolib.core.generator.generic as generator
import freeflowuniverse.herolib.core.pathlib
mut args := generator.GeneratorArgs{
path: '~/code/github/freeflowuniverse/herolib/lib/installers/infra'
path: '~/code/github/freeflowuniverse/herolib/lib/clients/postgresql_client'
force: true
}

116
examples/hero/db/psql2.vsh Executable file
View File

@@ -0,0 +1,116 @@
#!/usr/bin/env -S v -n -cg -w -gc none -cc tcc -d use_openssl -enable-globals run
// #!/usr/bin/env -S v -n -w -enable-globals run
import freeflowuniverse.herolib.clients.postgresql_client
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.hero.models.circle
import freeflowuniverse.herolib.core.playcmds
import freeflowuniverse.herolib.hero.db.hero_db
import db.pg
// psql -h /tmp -U myuser -d mydb
mut db := pg.connect(pg.Config{
host: '/tmp'
port: 5432
user: 'myuser'
password: 'mypassword'
dbname: 'mydb'
})!
mut r:=db.exec("select * from users;")!
println(r)
// // Configure PostgreSQL client
// heroscript := "
// !!postgresql_client.configure
// password:'testpass'
// name:'test5'
// user: 'testuser'
// port: 5432
// host: 'localhost'
// dbname: 'testdb'
// "
// mut plbook := playbook.new(text: heroscript)!
// postgresql_client.play(mut plbook)!
// Configure PostgreSQL client
heroscript := "
!!postgresql_client.configure
password:'mypassword'
name:'aaa'
user: 'myuser'
host: '/tmp'
dbname: 'mydb'
"
mut plbook := playbook.new(text: heroscript)!
postgresql_client.play(mut plbook)!
// //Get the configured client
mut db_client := postgresql_client.get(name: 'aaa')!
// println(db_client)
// // Check if test database exists, create if not
// if !db_client.db_exists('test')! {
// println('Creating database test...')
// db_client.db_create('test')!
// }
// // Switch to test database
// db_client.dbname = 'test'
// // Create table if not exists
// create_table_sql := 'CREATE TABLE IF NOT EXISTS users (
// id SERIAL PRIMARY KEY,
// name VARCHAR(100) NOT NULL,
// email VARCHAR(255) UNIQUE NOT NULL,
// created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
// )'
// println('Creating table users if not exists...')
// db_client.exec(create_table_sql)!
// println('Database and table setup completed successfully!')
// // Create HeroDB for Circle type
// mut circle_db := hero_db.new[circle.Circle]()!
// println(circle_db)
// if true{panic("sd")}
// circle_db.ensure_table()!
// // Create and save a circle
// mut my_circle := circle.Circle{
// name: "Tech Community"
// description: "A community for tech enthusiasts"
// domain: "tech.example.com"
// config: circle.CircleConfig{
// max_members: 1000
// allow_guests: true
// auto_approve: false
// theme: "modern"
// }
// status: circle.CircleStatus.active
// }
// circle_db.save(&my_circle)!
// // Retrieve the circle
// retrieved_circle := circle_db.get_by_index({
// "domain": "tech.example.com"
// })!
// // Search circles by status
// active_circles := circle_db.search_by_index("status", "active")!
//https://www.moncefbelyamani.com/how-to-install-postgresql-on-a-mac-with-homebrew-and-lunchy/

View File

@@ -2,7 +2,7 @@
import freeflowuniverse.herolib.web.docusaurus
docusaurus.new(
docusaurus.add(
heroscript: '
!!docusaurus.define

View File

@@ -1,6 +1,6 @@
!!hero_code.generate_client
name: "postgresql_client"
classname: "PostgresClient"
classname: "PostgresqlClient"
hasconfig: true
singleton: false
default: true

View File

@@ -6,12 +6,12 @@ import freeflowuniverse.herolib.osal.core as osal
import os
import freeflowuniverse.herolib.ui.console
pub fn (mut self PostgresClient) check() ! {
pub fn (mut self PostgresqlClient) check() ! {
mut db := self.db()!
db.exec('SELECT version();') or { return error('can\t select version from database.\n${self}') }
}
pub fn (mut self PostgresClient) exec(c_ string) ![]pg.Row {
pub fn (mut self PostgresqlClient) exec(c_ string) ![]pg.Row {
mut db := self.db()!
mut c := c_
if !(c.trim_space().ends_with(';')) {
@@ -22,7 +22,7 @@ pub fn (mut self PostgresClient) exec(c_ string) ![]pg.Row {
}
}
pub fn (mut self PostgresClient) db_exists(name_ string) !bool {
pub fn (mut self PostgresqlClient) db_exists(name_ string) !bool {
mut db := self.db()!
r := db.exec("SELECT datname FROM pg_database WHERE datname='${name_}';")!
if r.len == 1 {
@@ -35,7 +35,7 @@ pub fn (mut self PostgresClient) db_exists(name_ string) !bool {
return false
}
pub fn (mut self PostgresClient) db_create(name_ string) ! {
pub fn (mut self PostgresqlClient) db_create(name_ string) ! {
name := texttools.name_fix(name_)
mut db := self.db()!
if !self.db_exists(name)! {
@@ -47,7 +47,7 @@ pub fn (mut self PostgresClient) db_create(name_ string) ! {
}
}
pub fn (mut self PostgresClient) db_delete(name_ string) ! {
pub fn (mut self PostgresqlClient) db_delete(name_ string) ! {
mut db := self.db()!
name := texttools.name_fix(name_)
self.check()!
@@ -60,7 +60,7 @@ pub fn (mut self PostgresClient) db_delete(name_ string) ! {
}
}
pub fn (mut self PostgresClient) db_names() ![]string {
pub fn (mut self PostgresqlClient) db_names() ![]string {
mut res := []string{}
sqlstr := "SELECT datname FROM pg_database WHERE datistemplate = false and datname != 'postgres' and datname != 'root';"
for row in self.exec(sqlstr)! {
@@ -77,7 +77,7 @@ pub mut:
dest string
}
pub fn (mut self PostgresClient) backup(args BackupParams) ! {
pub fn (mut self PostgresqlClient) backup(args BackupParams) ! {
if args.dest == '' {
return error('specify the destination please')
}

View File

@@ -5,7 +5,7 @@ import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
__global (
postgresql_client_global map[string]&PostgresClient
postgresql_client_global map[string]&PostgresqlClient
postgresql_client_default string
)
@@ -25,10 +25,10 @@ fn args_get(args_ ArgsGet) ArgsGet {
return args
}
pub fn get(args_ ArgsGet) !&PostgresClient {
pub fn get(args_ ArgsGet) !&PostgresqlClient {
mut context := base.context()!
mut args := args_get(args_)
mut obj := PostgresClient{
mut obj := PostgresqlClient{
name: args.name
}
if args.name !in postgresql_client_global {
@@ -48,7 +48,7 @@ pub fn get(args_ ArgsGet) !&PostgresClient {
}
// register the config for the future
pub fn set(o PostgresClient) ! {
pub fn set(o PostgresqlClient) ! {
set_in_mem(o)!
mut context := base.context()!
heroscript := heroscript_dumps(o)!
@@ -72,7 +72,7 @@ pub fn delete(args_ ArgsGet) ! {
}
// only sets in mem, does not set as config
fn set_in_mem(o PostgresClient) ! {
fn set_in_mem(o PostgresqlClient) ! {
mut o2 := obj_init(o)!
postgresql_client_global[o.name] = &o2
postgresql_client_default = o.name

View File

@@ -1,6 +1,8 @@
module postgresql_client
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.ui.console
import os
import db.pg
@@ -8,55 +10,27 @@ pub const version = '0.0.0'
const singleton = false
const default = true
pub fn heroscript_default() !string {
heroscript := "
!!postgresql_client.configure
name:'default'
user: 'root'
port: 5432
host: 'localhost'
password: ''
dbname: 'postgres'
"
return heroscript
}
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct PostgresClient {
pub struct PostgresqlClient {
mut:
db_ ?pg.DB
db_ ?pg.DB @[skip]
pub mut:
name string = 'default'
user string = 'root'
port int = 5432
host string = 'localhost'
password string
password string = ''
dbname string = 'postgres'
}
fn cfg_play(p paramsparser.Params) !PostgresClient {
mut mycfg := PostgresClient{
name: p.get_default('name', 'default')!
user: p.get_default('user', 'root')!
port: p.get_int_default('port', 5432)!
host: p.get_default('host', 'localhost')!
password: p.get_default('password', '')!
dbname: p.get_default('dbname', 'postgres')!
}
set(mycfg)!
return mycfg
}
fn obj_init(obj_ PostgresClient) !PostgresClient {
fn obj_init(obj_ PostgresqlClient) !PostgresqlClient {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
return obj
}
pub fn (mut self PostgresClient) db() !pg.DB {
// console.print_debug(args)
pub fn (mut self PostgresqlClient) db() !pg.DB {
console.print_debug(self)
mut db := self.db_ or {
mut db_ := pg.connect(
host: self.host
@@ -70,3 +44,16 @@ pub fn (mut self PostgresClient) db() !pg.DB {
return db
}
/////////////NORMALLY NO NEED TO TOUCH
pub fn heroscript_dumps(obj PostgresqlClient) !string {
return encoderhero.encode[PostgresqlClient](obj)!
}
pub fn heroscript_loads(heroscript string) !PostgresqlClient {
mut obj := encoderhero.decode[PostgresqlClient](heroscript)!
return PostgresqlClient{
db_: pg.DB{}
}
}

View File

@@ -77,7 +77,6 @@ pub fn (mut self Session) env_set_once(key string, value string) ! {
self.env_set(key, value)!
}
// Get an environment variable
pub fn (mut self Session) env_get(key string) !string {
return self.env[key] or { return error("can't find env in session ${self.name}") }

View File

@@ -1,6 +1,7 @@
module herocmds
import freeflowuniverse.herolib.web.docusaurus
import freeflowuniverse.herolib.web.site
import freeflowuniverse.herolib.core.pathlib
import os
import cli { Command, Flag }
@@ -154,32 +155,31 @@ fn cmd_docusaurus_execute(cmd Command) ! {
mut builddevpublish := cmd.flags.get_bool('builddevpublish') or { false }
mut dev := cmd.flags.get_bool('dev') or { false }
mut docs := docusaurus.new(
template_update: update // Changed 'update' to 'template_update'
path_build: build_path
heroscript_path: heroscript_config_dir // Pass the directory path
)!
// Create a site first using the new API
mut generic_site := site.new(name: 'cli_site')!
mut site := docs.add(
git_url: url // Map CLI 'url' flag to DSiteGetArgs 'git_url'
update: update
path_publish: publish_path // Map CLI 'publish' flag to DSiteGetArgs 'path_publish'
init: init
open: open
// Removed build_path and deploykey as they are not in DSiteGetArgs
// Add docusaurus site
mut dsite := docusaurus.add(
site: generic_site
path_src: url // Use URL as source path for now
path_build: build_path
path_publish: publish_path
reset: false
template_update: update
install: init
)!
// Conditional site actions based on flags
if buildpublish {
site.build_publish()!
dsite.build_publish()!
} else if builddevpublish {
site.build_dev_publish()!
dsite.build_dev_publish()!
} else if dev {
site.dev(host: 'localhost', port: 3000)!
dsite.dev(host: 'localhost', port: 3000, open: open)!
} else if open {
site.open()!
dsite.open('localhost', 3000)!
} else {
// If no specific action (build/dev/open) is requested, just generate the site
site.generate()!
dsite.generate()!
}
}

View File

@@ -128,7 +128,7 @@ pub fn (mut plbook PlayBook) names() ![]string {
// // - If actor == "", then matches all actors.
// // - If name == "", then matches all actions from the defined actor (if defined).
// // - If actiontype == .unknown, then matches all action types; when specified, filters by the action type, default .sal
// pub fn (mut plbook PlayBook) actions_find(args ActionGetArgs) ![]&Action {
// pub fn (mut plbook PlayBook) find(args ActionGetArgs) ![]&Action {
// mut res := []&Action{}
// for a in plbook.actions {
// // If id is specified, return only the action with that id

View File

@@ -2,7 +2,6 @@ module playcmds
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.ui.console
// !!context.configure
@@ -10,9 +9,7 @@ import freeflowuniverse.herolib.ui.console
// coderoot:...
// interactive:true
fn play_core(mut plbook PlayBook) ! {
// for mut action in plbook.find(filter: 'context.configure')! {
// mut p := action.params
// mut session := plbook.session
@@ -37,11 +34,11 @@ fn play_core(mut plbook PlayBook) ! {
if playrunpath.len == 0 {
action.name = 'pull'
playrunpath = gittools.get_repo_path(
path:action.params.get_default('path', '')!
git_url:action.params.get_default('git_url', '')!
git_reset:action.params.get_default_false('git_reset')
git_pull:action.params.get_default_false('git_pull')
)!
path: action.params.get_default('path', '')!
git_url: action.params.get_default('git_url', '')!
git_reset: action.params.get_default_false('git_reset')
git_pull: action.params.get_default_false('git_pull')
)!
}
if playrunpath.len == 0 {
return error("can't run a heroscript didn't find url or path.")
@@ -53,42 +50,31 @@ fn play_core(mut plbook PlayBook) ! {
content := action_.params.get_default('content', "didn't find content")!
console.print_header(content)
}
}
}
for mut action in plbook.find(filter: 'session.')! {
mut p := action.params
mut session := plbook.session
//!!session.env_set key:'JWT_SHARED_KEY' val:'...'
if p.exists('env_set') {
if action.name == 'env_set' {
mut key := p.get('key')!
mut val := p.get('val')!
mut val := p.get('val') or { p.get('value')! }
session.env_set(key, val)!
}
if p.exists('env_set_once') {
if action.name == 'env_set_once' {
mut key := p.get('key')!
mut val := p.get('val')!
session.env_set_once(key, val)!
}
mut val := p.get('val') or { p.get('value')! }
// Use env_set instead of env_set_once to avoid duplicate errors
session.env_set(key, val)!
}
action.done = true
}
mut session := plbook.session
// CHANGE {...} args in plbook
println('plbook:${plbook}')
mut context := base.context()!
mut session := context.session_latest()!
sitename:=session.env_get('SITENAME') or {""}
println('session:${session}')
println('sitename:${sitename}')
if true{panic("dfghjkjhgfghjk")}
sitename := session.env_get('SITENAME') or { '' }
// for mut action in plbook.find(filter: 'core.coderoot_set')! {
// mut p := action.params
@@ -121,5 +107,4 @@ fn play_core(mut plbook PlayBook) ! {
// }
// action.done = true
// }
}

View File

@@ -1,44 +1,10 @@
module playcmds
import freeflowuniverse.herolib.core.playbook { PlayBook }
// import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.web.docusaurus
fn play(mut plbook PlayBook) ! {
mut ds := docusaurus.new()!
mut action0 := plbook.get(filter: 'docusaurus.define')!
mut p0 := action0.params
path_publish := p0.get_default('path_publish', '')!
path_build := p0.get_default('path_build', '')! // don't do heroscript here because this could already be done before
ds = docusaurus.new(
path_publish: path_publish
path_build: path_build
install: plbook.exists(filter: 'docusaurus.reset') || plbook.exists(filter: 'docusaurus.update')
reset: plbook.exists(filter: 'docusaurus.reset')
template_update: plbook.exists(filter: 'docusaurus.reset') || plbook.exists(filter: 'docusaurus.update')
)!
actions := plbook.find(filter: 'docusaurus.generate')!
for action in actions {
mut p := action.params
mut site := ds.add(
name: p.get('name') or {return error("can't find name in params for docusaurus.add in action:\n${action.heroscript}")}
nameshort: p.get_default('nameshort', p.get('name')!)!
path: p.get_default('path', '')!
git_url: p.get_default('git_url', '')!
git_reset: p.get_default_false('git_reset')
git_root: p.get_default('git_root', '')!
git_pull: p.get_default_false('git_pull')
)!
}
// Use the new docusaurus.play() function which handles the new API structure
docusaurus.play(mut plbook)!
}

View File

@@ -17,24 +17,29 @@ pub fn decode[T](data string) !T {
// decode_struct is a generic function that decodes a JSON map into the struct T.
fn decode_struct[T](_ T, data string) !T {
mut typ := T{}
// println(data)
$if T is $struct {
obj_name := texttools.snake_case(T.name.all_after_last('.'))
action_name := 'define.${obj_name}'
mut action_name := '${obj_name}.define'
if !data.contains(action_name) {
action_name = '${obj_name}.configure'
if !data.contains(action_name) {
return error('Data does not contain action name: ${obj_name}.define or ${action_name}')
}
}
actions_split := data.split('!!')
actions := actions_split.filter(it.starts_with(action_name))
// println('actions: ${actions}')
mut action_str := ''
// action_str := '!!define.${obj_name}'
if actions.len == 0 {
return T{}
} else {
if actions.len > 0 {
action_str = actions[0]
params_str := action_str.trim_string_left(action_name)
params := paramsparser.parse(params_str)!
typ = params.decode[T]()!
params := paramsparser.parse(params_str) or {
panic('could not parse: ${params_str}\n${err}')
}
typ = params.decode[T](typ)!
}
// panic('debuggge ${t_}\n${actions[0]}')
// return t_
$for field in T.fields {

View File

@@ -39,7 +39,7 @@ pub fn encode[T](val T) !string {
// export exports an encoder into encoded heroscript
pub fn (e Encoder) export() !string {
mut script := e.params.export(
pre: '!!define.${e.action_names.join('.')}'
pre: '!!${e.action_names.join('.')}.configure'
indent: ' '
skip_empty: true
)
@@ -119,6 +119,7 @@ pub fn (mut e Encoder) encode_struct[T](t T) ! {
struct_attrs := attrs_get_reflection(mytype)
mut action_name := texttools.snake_case(T.name.all_after_last('.'))
// println('action_name: ${action_name} ${T.name}')
if 'alias' in struct_attrs {
action_name = struct_attrs['alias'].to_lower()
}

View File

@@ -0,0 +1,235 @@
module encoderhero
pub struct PostgresqlClient {
pub mut:
name string = 'default'
user string = 'root'
port int = 5432
host string = 'localhost'
password string
dbname string = 'postgres'
}
const postgres_client_blank = '!!postgresql_client.configure'
const postgres_client_full = '!!postgresql_client.configure name:production user:app_user port:5433 host:db.example.com password:secret123 dbname:myapp'
const postgres_client_partial = '!!postgresql_client.configure name:dev host:localhost password:devpass'
const postgres_client_complex = "
!!postgresql_client.configure name:staging user:stage_user port:5434 host:staging.db.com password:stagepass dbname:stagingdb
"
fn test_postgres_client_decode_blank() ! {
mut client := decode[PostgresqlClient](postgres_client_blank)!
assert client.name == 'default'
assert client.user == 'root'
assert client.port == 5432
assert client.host == 'localhost'
assert client.password == ''
assert client.dbname == 'postgres'
}
fn test_postgres_client_decode_full() ! {
mut client := decode[PostgresqlClient](postgres_client_full)!
assert client.name == 'production'
assert client.user == 'app_user'
assert client.port == 5433
assert client.host == 'db.example.com'
assert client.password == 'secret123'
assert client.dbname == 'myapp'
}
fn test_postgres_client_decode_partial() ! {
mut client := decode[PostgresqlClient](postgres_client_partial)!
assert client.name == 'dev'
assert client.user == 'root' // default value
assert client.port == 5432 // default value
assert client.host == 'localhost'
assert client.password == 'devpass'
assert client.dbname == 'postgres' // default value
}
fn test_postgres_client_decode_complex() ! {
mut client := decode[PostgresqlClient](postgres_client_complex)!
assert client.name == 'staging'
assert client.user == 'stage_user'
assert client.port == 5434
assert client.host == 'staging.db.com'
assert client.password == 'stagepass'
assert client.dbname == 'stagingdb'
}
fn test_postgres_client_encode_decode_roundtrip() ! {
// Test encoding and decoding roundtrip
original := PostgresqlClient{
name: 'testdb'
user: 'testuser'
port: 5435
host: 'test.host.com'
password: 'testpass123'
dbname: 'testdb'
}
// Encode to heroscript
encoded := encode[PostgresqlClient](original)!
// println('Encoded heroscript: ${encoded}')
// if true {
// panic("sss")
// }
// Decode back from heroscript
decoded := decode[PostgresqlClient](encoded)!
// Verify roundtrip
assert decoded.name == original.name
assert decoded.user == original.user
assert decoded.port == original.port
assert decoded.host == original.host
assert decoded.password == original.password
assert decoded.dbname == original.dbname
}
fn test_postgres_client_encode() ! {
// Test encoding with different configurations
test_cases := [
PostgresqlClient{
name: 'minimal'
user: 'root'
port: 5432
host: 'localhost'
password: ''
dbname: 'postgres'
},
PostgresqlClient{
name: 'full_config'
user: 'admin'
port: 5433
host: 'remote.server.com'
password: 'securepass'
dbname: 'production'
},
PostgresqlClient{
name: 'localhost_dev'
user: 'dev'
port: 5432
host: '127.0.0.1'
password: 'devpassword'
dbname: 'devdb'
}
]
for client in test_cases {
encoded := encode[PostgresqlClient](client)!
decoded := decode[PostgresqlClient](encoded)!
assert decoded.name == client.name
assert decoded.user == client.user
assert decoded.port == client.port
assert decoded.host == client.host
assert decoded.password == client.password
assert decoded.dbname == client.dbname
}
}
// Play script for interactive testing
const play_script = "
# PostgresqlClient Encode/Decode Play Script
# This script demonstrates encoding and decoding PostgresqlClient configurations
!!postgresql_client.configure name:playground user:play_user
port:5432
host:localhost
password:playpass
dbname:playdb
# You can also use partial configurations
!!postgresql_client.configure name:quick_test host:127.0.0.1
# Default configuration (all defaults)
!!postgresql_client.configure
"
fn test_play_script() ! {
// Test the play script with multiple configurations
lines := play_script.split_into_lines().filter(fn (line string) bool {
return line.trim(' ') != '' && !line.starts_with('#')
})
mut clients := []PostgresqlClient{}
for line in lines {
if line.starts_with('!!postgresql_client.configure') {
client := decode[PostgresqlClient](line)!
clients << client
}
}
assert clients.len == 3
// First client: full configuration
assert clients[0].name == 'playground'
assert clients[0].user == 'play_user'
assert clients[0].port == 5432
// Second client: partial configuration
assert clients[1].name == 'quick_test'
assert clients[1].host == '127.0.0.1'
assert clients[1].user == 'root' // default
// Third client: defaults only
assert clients[2].name == 'default'
assert clients[2].host == 'localhost'
assert clients[2].port == 5432
}
// Utility function for manual testing
pub fn run_play_script() ! {
println('=== PostgresqlClient Encode/Decode Play Script ===')
println('Testing encoding and decoding of PostgresqlClient configurations...')
// Test 1: Basic encoding
println('\n1. Testing basic encoding...')
client := PostgresqlClient{
name: 'example'
user: 'example_user'
port: 5432
host: 'example.com'
password: 'example_pass'
dbname: 'example_db'
}
encoded := encode[PostgresqlClient](client)!
println('Encoded: ${encoded}')
decoded := decode[PostgresqlClient](encoded)!
println('Decoded name: ${decoded.name}')
println('Decoded host: ${decoded.host}')
// Test 2: Play script
println('\n2. Testing play script...')
test_play_script()!
println('Play script test passed!')
// Test 3: Edge cases
println('\n3. Testing edge cases...')
edge_client := PostgresqlClient{
name: 'edge'
user: ''
port: 0
host: ''
password: ''
dbname: ''
}
edge_encoded := encode[PostgresqlClient](edge_client)!
edge_decoded := decode[PostgresqlClient](edge_encoded)!
assert edge_decoded.name == 'edge'
assert edge_decoded.user == ''
assert edge_decoded.port == 0
println('Edge cases test passed!')
println('\n=== All tests completed successfully! ===')
}

View File

@@ -11,13 +11,13 @@ import freeflowuniverse.herolib.clients.postgresql_client
pub struct LocationDB {
pub mut:
db pg.DB
db_client postgresql_client.PostgresClient
db_client postgresql_client.PostgresqlClient
tmp_dir pathlib.Path
db_dir pathlib.Path
}
// new_location_db creates a new LocationDB instance
pub fn new_location_db(mut db_client postgresql_client.PostgresClient, reset bool) !LocationDB {
pub fn new_location_db(mut db_client postgresql_client.PostgresqlClient, reset bool) !LocationDB {
mut db_dir := pathlib.get_dir(path: '${os.home_dir()}/hero/var/db/location.db', create: true)!
// Create locations database if it doesn't exist

View File

@@ -6,11 +6,11 @@ import freeflowuniverse.herolib.clients.postgresql_client
pub struct Location {
mut:
db LocationDB
db_client postgresql_client.PostgresClient
db_client postgresql_client.PostgresqlClient
}
// new creates a new Location instance
pub fn new(mut db_client postgresql_client.PostgresClient, reset bool) !Location {
pub fn new(mut db_client postgresql_client.PostgresqlClient, reset bool) !Location {
db := new_location_db(mut db_client, reset)!
return Location{
db: db

View File

@@ -6,25 +6,25 @@ import v.reflection
// import freeflowuniverse.herolib.data.encoderhero
// TODO: support more field types
pub fn (params Params) decode[T]() !T {
// work around to allow recursive decoding
// otherwise v cant infer generic type for child fields that are structs
return params.decode_struct[T](T{})!
pub fn (params Params) decode[T](args T) !T {
return params.decode_struct[T](args)!
}
pub fn (params Params) decode_struct[T](_ T) !T {
pub fn (params Params) decode_struct[T](start T) !T {
mut t := T{}
$for field in T.fields {
$if field.is_enum {
t.$(field.name) = params.get_int(field.name) or { 0 }
t.$(field.name) = params.get_int(field.name) or { int(t.$(field.name)) }
} $else {
// super annoying didn't find other way, then to ignore options
$if field.is_option {
// For optional fields, if the key exists, decode it. Otherwise, leave it as none.
if params.exists(field.name) {
t.$(field.name) = params.decode_value(t.$(field.name), field.name)!
}
} $else {
if field.name[0].is_capital() {
// embed := params.decode_struct(t.$(field.name))!
t.$(field.name) = params.decode_struct(t.$(field.name))!
// panic("to implement")
} else {
t.$(field.name) = params.decode_value(t.$(field.name), field.name)!
}
@@ -34,7 +34,7 @@ pub fn (params Params) decode_struct[T](_ T) !T {
return t
}
pub fn (params Params) decode_value[T](_ T, key string) !T {
pub fn (params Params) decode_value[T](val T, key string) !T {
// $if T is $option {
// return error("is option")
// }
@@ -42,7 +42,7 @@ pub fn (params Params) decode_value[T](_ T, key string) !T {
// TODO: handle required fields
if !params.exists(key) {
return T{}
return val
}
$if T is string {
@@ -57,9 +57,10 @@ pub fn (params Params) decode_value[T](_ T, key string) !T {
return params.get_list(key)!
} $else $if T is []int {
return params.get_list_int(key)!
} $else $if T is []bool {
return params.get_list_bool(key)!
} $else $if T is []u32 {
lst := params.get_list_u32(key)!
return lst
return params.get_list_u32(key)!
} $else $if T is time.Time {
time_str := params.get(key)!
// todo: 'handle other null times'
@@ -82,6 +83,18 @@ pub fn (params Params) decode_value[T](_ T, key string) !T {
return T{}
}
pub fn (params Params) get_list_bool(key string) ![]bool {
mut res := []bool{}
val := params.get(key)!
if val.len == 0 {
return res
}
for item in val.split(',') {
res << item.trim_space().bool()
}
return res
}
@[params]
pub struct EncodeArgs {
pub:
@@ -89,13 +102,6 @@ pub:
}
pub fn encode[T](t T, args EncodeArgs) !Params {
$if t is $option {
// unwrap and encode optionals
workaround := t
if workaround != none {
encode(t, args)!
}
}
mut params := Params{}
// struct_attrs := attrs_get_reflection(mytype)
@@ -107,7 +113,27 @@ pub fn encode[T](t T, args EncodeArgs) !Params {
if 'alias' in field_attrs {
key = field_attrs['alias']
}
$if val is string || val is int || val is bool || val is i64 || val is u32
$if field.is_option {
// Handle optional fields
if val != none {
// Unwrap the optional value before type checking and encoding
// Get the unwrapped value using reflection
// This is a workaround for V's reflection limitations with optionals
// We assume that if val != none, then it can be safely unwrapped
// and its underlying type can be determined.
// This might require a more robust way to get the underlying value
// if V's reflection doesn't provide a direct 'unwrap' for generic `val`.
// For now, we'll rely on the type checks below.
// The `val` here is the actual value of the field, which is `?T`.
// We need to check the type of `field.typ` to know what `T` is.
// Revert to simpler handling for optional fields
// Rely on V's string interpolation for optional types
// If val is none, this block will be skipped.
// If val is not none, it will be converted to string.
params.set(key, '${val}')
}
} $else $if val is string || val is int || val is bool || val is i64 || val is u32
|| val is time.Time || val is ourtime.OurTime {
params.set(key, '${val}')
} $else $if field.is_enum {
@@ -136,6 +162,16 @@ pub fn encode[T](t T, args EncodeArgs) !Params {
key: field.name
value: v2
}
} $else $if field.typ is []bool {
mut v2 := ''
for i in val {
v2 += '${i},'
}
v2 = v2.trim(',')
params.params << Param{
key: field.name
value: v2
}
} $else $if field.typ is []u32 {
mut v2 := ''
for i in val {

View File

@@ -11,6 +11,7 @@ struct TestStruct {
liststr []string
listint []int
listbool []bool
listu32 []u32
child TestChild
}
@@ -21,6 +22,7 @@ struct TestChild {
child_liststr []string
child_listint []int
child_listbool []bool
child_listu32 []u32
}
const test_child = TestChild{
@@ -29,6 +31,8 @@ const test_child = TestChild{
child_yesno: false
child_liststr: ['three', 'four']
child_listint: [3, 4]
child_listbool: [true, false]
child_listu32: [u32(5), u32(6)]
}
const test_struct = TestStruct{
@@ -42,9 +46,12 @@ const test_struct = TestStruct{
yesno: true
liststr: ['one', 'two']
listint: [1, 2]
listbool: [true, false]
listu32: [u32(7), u32(8)]
child: test_child
}
const test_child_params = Params{
params: [
Param{
@@ -67,6 +74,14 @@ const test_child_params = Params{
key: 'child_listint'
value: '3,4'
},
Param{
key: 'child_listbool'
value: 'true,false'
},
Param{
key: 'child_listu32'
value: '5,6'
},
]
}
@@ -74,9 +89,6 @@ const test_params = Params{
params: [Param{
key: 'name'
value: 'test'
}, Param{
key: 'nick'
value: 'test_nick'
}, Param{
key: 'birthday'
value: '2012-12-12 00:00:00'
@@ -92,22 +104,64 @@ const test_params = Params{
}, Param{
key: 'listint'
value: '1,2'
}, Param{
key: 'listbool'
value: 'true,false'
}, Param{
key: 'listu32'
value: '7,8'
}, Param{
key: 'child'
value: test_child_params.export()
}]
}
fn test_decode() {
// test single level struct
decoded_child := test_child_params.decode[TestChild]()!
assert decoded_child == test_child
// IMPORTANT OPTIONALS ARE NOT SUPPORTED AND WILL NOT BE ENCODED FOR NOW (unless we find ways how to deal with attributes to not encode skipped elements)
fn test_encode_struct() {
encoded_struct := encode[TestStruct](test_struct)!
assert encoded_struct == test_params
}
// test recursive decode struct with child
decoded := test_params.decode[TestStruct]()!
assert decoded == test_struct
fn test_decode_struct() {
decoded_struct := test_params.decode[TestStruct](TestStruct{})!
assert decoded_struct.name == test_struct.name
assert decoded_struct.birthday.day == test_struct.birthday.day
assert decoded_struct.birthday.month == test_struct.birthday.month
assert decoded_struct.birthday.year == test_struct.birthday.year
assert decoded_struct.number == test_struct.number
assert decoded_struct.yesno == test_struct.yesno
assert decoded_struct.liststr == test_struct.liststr
assert decoded_struct.listint == test_struct.listint
assert decoded_struct.listbool == test_struct.listbool
assert decoded_struct.listu32 == test_struct.listu32
assert decoded_struct.child == test_struct.child
}
fn test_optional_field() {
mut test_struct_with_nick := TestStruct{
name: test_struct.name
nick: 'test_nick'
birthday: test_struct.birthday
number: test_struct.number
yesno: test_struct.yesno
liststr: test_struct.liststr
listint: test_struct.listint
listbool: test_struct.listbool
listu32: test_struct.listu32
child: test_struct.child
}
encoded_struct_with_nick := encode[TestStruct](test_struct_with_nick)!
assert encoded_struct_with_nick.get('nick')! == 'test_nick'
decoded_struct_with_nick := encoded_struct_with_nick.decode[TestStruct](TestStruct{})!
assert decoded_struct_with_nick.nick or { '' } == 'test_nick'
// Test decoding when optional field is not present in params
mut params_without_nick := test_params
params_without_nick.params = params_without_nick.params.filter(it.key != 'nick')
decoded_struct_without_nick := params_without_nick.decode[TestStruct](TestStruct{})!
assert decoded_struct_without_nick.nick == none
}
fn test_encode() {

View File

@@ -45,12 +45,12 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut args := args_
// console.print_debug('git do ${args.cmd}')
if args.path == '' {
if args.path == '' && args.url == '' && args.repo == '' && args.account == '' && args.provider == '' && args.filter == ''{
args.path = os.getwd()
}
// see if its one repo we are in, based on current path
if args.repo == '' && args.account == '' && args.provider == '' && args.filter == '' {
if args.path != '' {
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
if parentpath.path != '' {
@@ -60,6 +60,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
args.provider = r0.provider
}
}
// see if a url was used means we are in 1 repo
if args.url.len > 0 {
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == '') {

View File

@@ -1,175 +0,0 @@
module actionprocessor
import freeflowuniverse.herolib.circles.core.db as core_db
import freeflowuniverse.herolib.circles.mcc.db as mcc_db
import freeflowuniverse.herolib.circles.actions.db as actions_db
import freeflowuniverse.herolib.circles.base { SessionState }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.redisclient
__global (
circle_global map[string]&CircleCoordinator
circle_default string
action_queues map[string]&ActionQueue
)
// HeroRunner is the main factory for managing jobs, agents, services, circles and names
@[heap]
pub struct CircleCoordinator {
pub mut:
name string // is a unique name on planetary scale is a dns name
agents &core_db.AgentDB
circles &core_db.CircleDB
names &core_db.NameDB
mails &mcc_db.MailDB
calendar &mcc_db.CalendarDB
jobs &actions_db.JobDB
action_queues map[string]&ActionQueue
session_state SessionState
}
@[params]
pub struct CircleCoordinatorArgs {
pub mut:
name string = 'local'
pubkey string // pubkey of user who called this
addr string // mycelium address
path string
}
// new creates a new CircleCoordinator instance
pub fn new(args_ CircleCoordinatorArgs) !&CircleCoordinator {
mut args := args_
args.name = texttools.name_fix(args.name)
if args.name in circle_global {
mut c := circle_global[args.name] or { panic('bug') }
return c
}
mut session_state := base.new_session(base.StateArgs{
name: args.name
pubkey: args.pubkey
addr: args.addr
path: args.path
})!
// os.mkdir_all(mypath)!
// Create the directories if they don't exist// SHOULD BE AUTOMATIC
// os.mkdir_all(os.join_path(mypath, 'data_core'))!
// os.mkdir_all(os.join_path(mypath, 'data_mcc'))!
// os.mkdir_all(os.join_path(mypath, 'meta_core'))!
// os.mkdir_all(os.join_path(mypath, 'meta_mcc'))! //message, contacts, calendar
// Initialize the db handlers with proper ourdb instances
mut agent_db := core_db.new_agentdb(session_state) or {
return error('Failed to initialize agent_db: ${err}')
}
mut circle_db := core_db.new_circledb(session_state) or {
return error('Failed to initialize circle_db: ${err}')
}
mut name_db := core_db.new_namedb(session_state) or {
return error('Failed to initialize name_db: ${err}')
}
mut mail_db := mcc_db.new_maildb(session_state) or {
return error('Failed to initialize mail_db: ${err}')
}
mut calendar_db := mcc_db.new_calendardb(session_state) or {
return error('Failed to initialize calendar_db: ${err}')
}
mut job_db := actions_db.new_jobdb(session_state) or {
return error('Failed to initialize job_db: ${err}')
}
mut cm := &CircleCoordinator{
agents: &agent_db
circles: &circle_db
names: &name_db
mails: &mail_db
calendar: &calendar_db
jobs: &job_db
action_queues: map[string]&ActionQueue{}
session_state: session_state
}
circle_global[args.name] = cm
return cm
}
// ActionQueueArgs defines the parameters for creating a new ActionQueue
@[params]
pub struct ActionQueueArgs {
pub mut:
name string = 'default' // Name of the queue
redis_addr string // Redis server address, defaults to 'localhost:6379'
}
// new_action_queue creates a new ActionQueue
pub fn new_action_queue(args ActionQueueArgs) !&ActionQueue {
// Normalize the queue name
queue_name := texttools.name_fix(args.name)
// Check if queue already exists in global map
if queue_name in action_queues {
mut q := action_queues[queue_name] or { panic('bug') }
return q
}
// Set default Redis address if not provided
mut redis_addr := args.redis_addr
if redis_addr == '' {
redis_addr = 'localhost:6379'
}
// Create Redis client
mut redis := redisclient.new(redis_addr)!
// Create Redis queue
queue_key := 'actionqueue:${queue_name}'
mut redis_queue := redis.queue_get(queue_key)
// Create ActionQueue
mut action_queue := &ActionQueue{
name: queue_name
queue: &redis_queue
redis: redis
}
// Store in global map
action_queues[queue_name] = action_queue
return action_queue
}
// get_action_queue retrieves an existing ActionQueue or creates a new one
pub fn get_action_queue(name string) !&ActionQueue {
queue_name := texttools.name_fix(name)
if queue_name in action_queues {
mut q := action_queues[queue_name] or { panic('bug') }
return q
}
return new_action_queue(ActionQueueArgs{
name: queue_name
})!
}
// get_or_create_action_queue retrieves an existing ActionQueue for a CircleCoordinator or creates a new one
pub fn (mut cc CircleCoordinator) get_or_create_action_queue(name string) !&ActionQueue {
queue_name := texttools.name_fix(name)
if queue_name in cc.action_queues {
mut q := cc.action_queues[queue_name] or { panic('bug') }
return q
}
mut action_queue := new_action_queue(ActionQueueArgs{
name: queue_name
})!
cc.action_queues[queue_name] = action_queue
return action_queue
}

View File

@@ -1,269 +0,0 @@
module actionprocessor
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.playbook
import json
import time
// ActionJobStatus represents the current status of an action job
pub enum ActionJobStatus {
pending
processing
completed
failed
cancelled
}
// ActionJob represents a job to be processed by the action processor
@[heap]
pub struct ActionJob {
pub mut:
guid string
heroscript string
created ourtime.OurTime
deadline ourtime.OurTime
status ActionJobStatus
error string // Error message if job failed
async bool // Whether the job should be processed asynchronously
circleid string // ID of the circle this job belongs to
}
// ActionQueue is a queue of actions to be processed, which comes from a redis queue
@[heap]
pub struct ActionQueue {
pub mut:
name string
queue &redisclient.RedisQueue
redis &redisclient.Redis
}
// new_action_job creates a new ActionJob with the given heroscript
pub fn new_action_job(heroscript string) ActionJob {
now := ourtime.now()
// Default deadline is 1 hour from now
mut deadline := ourtime.now()
deadline.warp('+1h') or { panic('Failed to set deadline: ${err}') }
return ActionJob{
guid: time.now().unix_milli().str()
heroscript: heroscript
created: now
deadline: deadline
status: .pending
async: false
circleid: ''
}
}
// new_action_job_with_deadline creates a new ActionJob with the given heroscript and deadline
pub fn new_action_job_with_deadline(heroscript string, deadline_str string) !ActionJob {
mut job := new_action_job(heroscript)
job.deadline = ourtime.new(deadline_str)!
return job
}
// to_json converts the ActionJob to a JSON string
pub fn (job ActionJob) to_json() string {
return json.encode(job)
}
// from_json creates an ActionJob from a JSON string
pub fn action_job_from_json(data string) !ActionJob {
return json.decode(ActionJob, data)
}
// to_plbook converts the job's heroscript to a PlayBook object
pub fn (job ActionJob) to_plbook() !&playbook.PlayBook {
if job.heroscript.trim_space() == '' {
return error('No heroscript content in job')
}
// Create a new PlayBook with the heroscript content
mut pb := playbook.new(text: job.heroscript)!
// Check if any actions were found
if pb.actions.len == 0 {
return error('No actions found in heroscript')
}
return &pb
}
// add adds a job to the queue
pub fn (mut q ActionQueue) add_job(job ActionJob) ! {
// Store the job in Redis using HSET
job_key := 'heroactionjobs:${job.guid}'
q.redis.hset(job_key, 'guid', job.guid)!
q.redis.hset(job_key, 'heroscript', job.heroscript)!
q.redis.hset(job_key, 'created', job.created.unix().str())!
q.redis.hset(job_key, 'deadline', job.deadline.unix().str())!
q.redis.hset(job_key, 'status', job.status.str())!
q.redis.hset(job_key, 'async', job.async.str())!
q.redis.hset(job_key, 'circleid', job.circleid)!
if job.error != '' {
q.redis.hset(job_key, 'error', job.error)!
}
// Add the job reference to the queue
q.queue.add(job.guid)!
}
// get_job retrieves a job from Redis by its GUID
pub fn (mut q ActionQueue) get_job(guid string) !ActionJob {
job_key := 'heroactionjobs:${guid}'
// Check if the job exists
if !q.redis.exists(job_key)! {
return error('Job with GUID ${guid} not found')
}
// Retrieve job fields
mut job := ActionJob{
guid: guid
heroscript: q.redis.hget(job_key, 'heroscript')!
status: ActionJobStatus.pending // Default value, will be overwritten
error: '' // Default empty error message
async: false // Default to synchronous
circleid: '' // Default to empty circle ID
}
// Parse created time
created_str := q.redis.hget(job_key, 'created')!
created_unix := created_str.i64()
job.created = ourtime.new_from_epoch(u64(created_unix))
// Parse deadline
deadline_str := q.redis.hget(job_key, 'deadline')!
deadline_unix := deadline_str.i64()
job.deadline = ourtime.new_from_epoch(u64(deadline_unix))
// Parse status
status_str := q.redis.hget(job_key, 'status')!
match status_str {
'pending' { job.status = .pending }
'processing' { job.status = .processing }
'completed' { job.status = .completed }
'failed' { job.status = .failed }
'cancelled' { job.status = .cancelled }
else { job.status = .pending } // Default to pending if unknown
}
// Get error message if exists
job.error = q.redis.hget(job_key, 'error') or { '' }
// Get async flag
async_str := q.redis.hget(job_key, 'async') or { 'false' }
job.async = async_str == 'true'
// Get circle ID
job.circleid = q.redis.hget(job_key, 'circleid') or { '' }
return job
}
// update_job_status updates the status of a job in Redis
pub fn (mut q ActionQueue) update_job_status(guid string, status ActionJobStatus) ! {
job_key := 'heroactionjobs:${guid}'
// Check if the job exists
if !q.redis.exists(job_key)! {
return error('Job with GUID ${guid} not found')
}
// Update status
q.redis.hset(job_key, 'status', status.str())!
}
// set_job_failed marks a job as failed with an error message
pub fn (mut q ActionQueue) set_job_failed(guid string, error_msg string) ! {
job_key := 'heroactionjobs:${guid}'
// Check if the job exists
if !q.redis.exists(job_key)! {
return error('Job with GUID ${guid} not found')
}
// Update status and error message
q.redis.hset(job_key, 'status', ActionJobStatus.failed.str())!
q.redis.hset(job_key, 'error', error_msg)!
}
// count_waiting_jobs returns the number of jobs waiting in the queue
pub fn (mut q ActionQueue) count_waiting_jobs() !int {
// Get the length of the queue
return q.redis.llen('actionqueue:${q.name}')!
}
// find_failed_jobs returns a list of failed jobs
pub fn (mut q ActionQueue) find_failed_jobs() ![]ActionJob {
// Use Redis KEYS to find all job keys (since SCAN is more complex)
// In a production environment with many keys, KEYS should be avoided
// and replaced with a more efficient implementation using SCAN
keys := q.redis.keys('heroactionjobs:*')!
mut failed_jobs := []ActionJob{}
for key in keys {
// Check if job is failed
status := q.redis.hget(key, 'status') or { continue }
if status == ActionJobStatus.failed.str() {
// Get the job GUID from the key
guid := key.all_after('heroactionjobs:')
// Get the full job
job := q.get_job(guid) or { continue }
failed_jobs << job
}
}
return failed_jobs
}
// delete_job deletes a job from Redis
pub fn (mut q ActionQueue) delete_job(guid string) ! {
job_key := 'heroactionjobs:${guid}'
// Check if the job exists
if !q.redis.exists(job_key)! {
return error('Job with GUID ${guid} not found')
}
// Delete the job
q.redis.del(job_key)!
}
// add adds a string value to the queue
pub fn (mut q ActionQueue) add(val string) ! {
q.queue.add(val)!
}
// get retrieves a value from the queue with timeout
// timeout in msec
pub fn (mut q ActionQueue) get(timeout u64) !string {
return q.queue.get(timeout)!
}
// pop retrieves a value from the queue without timeout
// get without timeout, returns none if nil
pub fn (mut q ActionQueue) pop() !string {
return q.queue.pop()!
}
// fetch_job retrieves the next job from the queue
pub fn (mut q ActionQueue) fetch_job(timeout u64) !ActionJob {
guid := q.queue.get(timeout)!
return q.get_job(guid)!
}
// pop_job retrieves the next job from the queue without timeout
pub fn (mut q ActionQueue) pop_job() !ActionJob {
guid := q.queue.pop()!
return q.get_job(guid)!
}
// delete clears the queue (removes all items)
pub fn (mut q ActionQueue) delete() ! {
// Since RedisQueue doesn't have a delete method, we'll implement our own
// by deleting the key in Redis
q.redis.del('actionqueue:${q.name}')!
}

View File

@@ -1,176 +0,0 @@
module actionprocessor
import time
import freeflowuniverse.herolib.data.ourtime
fn test_action_job() {
// Create a new action job
heroscript := '!!action.test name:test1'
job := new_action_job(heroscript)
// Verify job properties
assert job.guid != ''
assert job.heroscript == heroscript
assert job.status == ActionJobStatus.pending
assert !job.created.empty()
assert !job.deadline.empty()
// Test JSON serialization
json_str := job.to_json()
job2 := action_job_from_json(json_str) or {
assert false, 'Failed to decode job from JSON: ${err}'
return
}
// Verify deserialized job
assert job2.guid == job.guid
assert job2.heroscript == job.heroscript
assert job2.status == job.status
// Test creating job with custom deadline
job3 := new_action_job_with_deadline(heroscript, '+2h') or {
assert false, 'Failed to create job with deadline: ${err}'
return
}
assert job3.deadline.unix() > job.deadline.unix()
}
fn test_action_queue() {
// Skip this test if Redis is not available
$if !test_with_redis ? {
println('Skipping Redis test (use -d test_with_redis to run)')
return
}
// Create a new action queue
queue_name := 'test_queue_${time.now().unix_milli()}'
mut queue := new_action_queue(ActionQueueArgs{
name: queue_name
}) or {
assert false, 'Failed to create action queue: ${err}'
return
}
// Create test jobs
mut job1 := new_action_job('!!action.test1 name:test1')
mut job2 := new_action_job('!!action.test2 name:test2')
mut job3 := new_action_job('!!action.test3 name:test3')
mut job4 := new_action_job('!!action.test4 name:test4')
// Add jobs to the queue
queue.add_job(job1) or {
assert false, 'Failed to add job1: ${err}'
return
}
queue.add_job(job2) or {
assert false, 'Failed to add job2: ${err}'
return
}
queue.add_job(job3) or {
assert false, 'Failed to add job3: ${err}'
return
}
// Test count_waiting_jobs
wait_count := queue.count_waiting_jobs() or {
assert false, 'Failed to count waiting jobs: ${err}'
return
}
assert wait_count == 3, 'Expected 3 waiting jobs, got ${wait_count}'
// Fetch jobs from the queue
fetched_job1 := queue.pop_job() or {
assert false, 'Failed to pop job1: ${err}'
return
}
assert fetched_job1.guid == job1.guid
assert fetched_job1.heroscript == job1.heroscript
fetched_job2 := queue.pop_job() or {
assert false, 'Failed to pop job2: ${err}'
return
}
assert fetched_job2.guid == job2.guid
assert fetched_job2.heroscript == job2.heroscript
// Update job status
queue.update_job_status(job3.guid, .processing) or {
assert false, 'Failed to update job status: ${err}'
return
}
// Fetch job with updated status
fetched_job3 := queue.pop_job() or {
assert false, 'Failed to pop job3: ${err}'
return
}
assert fetched_job3.guid == job3.guid
assert fetched_job3.status == .processing
// Test setting a job as failed with error message
queue.add_job(job4) or {
assert false, 'Failed to add job4: ${err}'
return
}
// Set job as failed
queue.set_job_failed(job4.guid, 'Test error message') or {
assert false, 'Failed to set job as failed: ${err}'
return
}
// Get the failed job and verify error message
failed_job := queue.get_job(job4.guid) or {
assert false, 'Failed to get failed job: ${err}'
return
}
assert failed_job.status == .failed
assert failed_job.error == 'Test error message'
// Test finding failed jobs
failed_jobs := queue.find_failed_jobs() or {
assert false, 'Failed to find failed jobs: ${err}'
return
}
assert failed_jobs.len > 0, 'Expected at least one failed job'
assert failed_jobs[0].guid == job4.guid
assert failed_jobs[0].error == 'Test error message'
// Delete a job
queue.delete_job(job3.guid) or {
assert false, 'Failed to delete job: ${err}'
return
}
// Try to get deleted job (should fail)
queue.get_job(job3.guid) or {
// Expected error
assert err.str().contains('not found')
return
}
// Test direct put and fetch to verify heroscript preservation
test_heroscript := '!!action.special name:direct_test param1:value1 param2:value2'
mut direct_job := new_action_job(test_heroscript)
// Add the job
queue.add_job(direct_job) or {
assert false, 'Failed to add direct job: ${err}'
return
}
// Fetch the job by GUID
fetched_direct_job := queue.get_job(direct_job.guid) or {
assert false, 'Failed to get direct job: ${err}'
return
}
// Verify the heroscript is preserved exactly
assert fetched_direct_job.heroscript == test_heroscript, 'Heroscript was not preserved correctly'
// Clean up
queue.delete() or {
assert false, 'Failed to delete queue: ${err}'
return
}
}

View File

@@ -1,75 +0,0 @@
module db
import freeflowuniverse.herolib.circles.base { DBHandler, SessionState, new_dbhandler }
import freeflowuniverse.herolib.circles.actions.models { Job, JobStatus }
@[heap]
pub struct JobDB {
pub mut:
db DBHandler[Job]
}
pub fn new_jobdb(session_state SessionState) !JobDB {
return JobDB{
db: new_dbhandler[Job]('job', session_state)
}
}
pub fn (mut m JobDB) new() Job {
return Job{}
}
// set adds or updates a job
pub fn (mut m JobDB) set(job Job) !Job {
return m.db.set(job)!
}
// get retrieves a job by its ID
pub fn (mut m JobDB) get(id u32) !Job {
return m.db.get(id)!
}
// list returns all job IDs
pub fn (mut m JobDB) list() ![]u32 {
return m.db.list()!
}
pub fn (mut m JobDB) getall() ![]Job {
return m.db.getall()!
}
// delete removes a job by its ID
pub fn (mut m JobDB) delete(id u32) ! {
m.db.delete(id)!
}
//////////////////CUSTOM METHODS//////////////////////////////////
// get_by_guid retrieves a job by its GUID
pub fn (mut m JobDB) get_by_guid(guid string) !Job {
return m.db.get_by_key('guid', guid)!
}
// delete_by_guid removes a job by its GUID
pub fn (mut m JobDB) delete_by_guid(guid string) ! {
// Get the job by GUID
job := m.get_by_guid(guid) or {
// Job not found, nothing to delete
return
}
// Delete the job by ID
m.delete(job.id)!
}
// update_job_status updates the status of a job
pub fn (mut m JobDB) update_job_status(guid string, new_status JobStatus) !Job {
// Get the job by GUID
mut job := m.get_by_guid(guid)!
// Update the job status
job.status = new_status
// Save the updated job
return m.set(job)!
}

View File

@@ -1,202 +0,0 @@
module db
import os
import rand
import freeflowuniverse.herolib.circles.actionprocessor
import freeflowuniverse.herolib.circles.actions.models { JobStatus, Status }
import freeflowuniverse.herolib.data.ourtime
fn test_job_db() {
// Create a temporary directory for testing
test_dir := os.join_path(os.temp_dir(), 'hero_job_test_${rand.intn(9000) or { 0 } + 1000}')
os.mkdir_all(test_dir) or { panic(err) }
defer { os.rmdir_all(test_dir) or {} }
mut runner := actionprocessor.new(path: test_dir)!
// Create multiple jobs for testing
mut job1 := runner.jobs.new()
job1.guid = 'job-1'
job1.actor = 'vm_manager'
job1.action = 'start'
job1.circle = 'circle1'
job1.context = 'context1'
job1.agents = ['agent1', 'agent2']
job1.source = 'source1'
job1.params = {
'id': '10'
'name': 'test-vm'
}
job1.status.guid = job1.guid
job1.status.created = ourtime.now()
job1.status.status = .created
mut job2 := runner.jobs.new()
job2.guid = 'job-2'
job2.actor = 'vm_manager'
job2.action = 'stop'
job2.circle = 'circle1'
job2.context = 'context2'
job2.agents = ['agent1']
job2.source = 'source1'
job2.params = {
'id': '11'
'name': 'test-vm-2'
}
job2.status.guid = job2.guid
job2.status.created = ourtime.now()
job2.status.status = .created
mut job3 := runner.jobs.new()
job3.guid = 'job-3'
job3.actor = 'network_manager'
job3.action = 'create'
job3.circle = 'circle2'
job3.context = 'context1'
job3.agents = ['agent3']
job3.source = 'source2'
job3.params = {
'name': 'test-network'
'type': 'bridge'
}
job3.status.guid = job3.guid
job3.status.created = ourtime.now()
job3.status.status = .created
// Add the jobs
println('Adding job 1')
job1 = runner.jobs.set(job1)!
println('Adding job 2')
job2 = runner.jobs.set(job2)!
println('Adding job 3')
job3 = runner.jobs.set(job3)!
// Test list functionality
println('Testing list functionality')
// Get all jobs
all_jobs := runner.jobs.getall()!
println('Retrieved ${all_jobs.len} jobs')
for i, job in all_jobs {
println('Job ${i}: id=${job.id}, guid=${job.guid}, actor=${job.actor}')
}
assert all_jobs.len == 3, 'Expected 3 jobs, got ${all_jobs.len}'
// Verify all jobs are in the list
mut found1 := false
mut found2 := false
mut found3 := false
for job in all_jobs {
if job.guid == 'job-1' {
found1 = true
} else if job.guid == 'job-2' {
found2 = true
} else if job.guid == 'job-3' {
found3 = true
}
}
assert found1, 'Job 1 not found in list'
assert found2, 'Job 2 not found in list'
assert found3, 'Job 3 not found in list'
// Get and verify individual jobs
println('Verifying individual jobs')
retrieved_job1 := runner.jobs.get_by_guid('job-1')!
assert retrieved_job1.guid == job1.guid
assert retrieved_job1.actor == job1.actor
assert retrieved_job1.action == job1.action
assert retrieved_job1.circle == job1.circle
assert retrieved_job1.context == job1.context
assert retrieved_job1.agents.len == 2
assert retrieved_job1.agents[0] == 'agent1'
assert retrieved_job1.agents[1] == 'agent2'
assert retrieved_job1.params['id'] == '10'
assert retrieved_job1.params['name'] == 'test-vm'
assert retrieved_job1.status.status == .created
// Test get_by_actor method
println('Testing get_by_actor method')
// Debug: Print all jobs and their actors
all_jobs_debug := runner.jobs.getall()!
println('Debug - All jobs:')
for job in all_jobs_debug {
println('Job ID: ${job.id}, GUID: ${job.guid}, Actor: ${job.actor}')
}
// Debug: Print the index keys for job1 and job2
println('Debug - Index keys for job1:')
for k, v in job1.index_keys() {
println('${k}: ${v}')
}
println('Debug - Index keys for job2:')
for k, v in job2.index_keys() {
println('${k}: ${v}')
}
// Test update_job_status method
println('Testing update_job_status method')
updated_job1 := runner.jobs.update_job_status('job-1', JobStatus{ status: Status.running })!
assert updated_job1.status.status == Status.running
// Verify the status was updated in the database
status_updated_job1 := runner.jobs.get_by_guid('job-1')!
assert status_updated_job1.status.status == Status.running
// Test delete functionality
println('Testing delete functionality')
// Delete job 2
runner.jobs.delete_by_guid('job-2')!
// Verify deletion with list
jobs_after_delete := runner.jobs.getall()!
assert jobs_after_delete.len == 2, 'Expected 2 jobs after deletion, got ${jobs_after_delete.len}'
// Verify the remaining jobs
mut found_after_delete1 := false
mut found_after_delete2 := false
mut found_after_delete3 := false
for job in jobs_after_delete {
if job.guid == 'job-1' {
found_after_delete1 = true
} else if job.guid == 'job-2' {
found_after_delete2 = true
} else if job.guid == 'job-3' {
found_after_delete3 = true
}
}
assert found_after_delete1, 'Job 1 not found after deletion'
assert !found_after_delete2, 'Job 2 found after deletion (should be deleted)'
assert found_after_delete3, 'Job 3 not found after deletion'
// Delete another job
println('Deleting another job')
runner.jobs.delete_by_guid('job-3')!
// Verify only one job remains
jobs_after_second_delete := runner.jobs.getall()!
assert jobs_after_second_delete.len == 1, 'Expected 1 job after second deletion, got ${jobs_after_second_delete.len}'
assert jobs_after_second_delete[0].guid == 'job-1', 'Remaining job should be job-1'
// Delete the last job
println('Deleting last job')
runner.jobs.delete_by_guid('job-1')!
// Verify no jobs remain
jobs_after_all_deleted := runner.jobs.getall() or {
// This is expected to fail with 'No jobs found' error
assert err.msg().contains('No index keys defined for this type')
|| err.msg().contains('No jobs found')
[]models.Job{cap: 0}
}
assert jobs_after_all_deleted.len == 0, 'Expected 0 jobs after all deletions, got ${jobs_after_all_deleted.len}'
println('All tests passed successfully')
}

View File

@@ -1,218 +0,0 @@
module models
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.data.encoder
// Job represents a task to be executed by an agent
pub struct Job {
pub mut:
id u32 // unique numeric id for the job
guid string // unique id for the job
agents []string // the pub key of the agent(s) which will execute the command, only 1 will execute
source string // pubkey from the agent who asked for the job
circle string = 'default' // our digital life is organized in circles
context string = 'default' // is the high level context in which actors will execute the work inside a circle
actor string // e.g. vm_manager
action string // e.g. start
params map[string]string // e.g. id:10
timeout_schedule u16 = 60 // timeout before its picked up
timeout u16 = 3600 // timeout in sec
log bool = true
ignore_error bool // means if error will just exit and not raise, there will be no error reporting
ignore_error_codes []u16 // of we want to ignore certain error codes
debug bool // if debug will get more context
retry u8 // default there is no debug
status JobStatus
dependencies []JobDependency // will not execute until other jobs are done
}
// JobStatus represents the current state of a job
pub struct JobStatus {
pub mut:
guid string // unique id for the job
created ourtime.OurTime // when we created the job
start ourtime.OurTime // when the job needs to start
end ourtime.OurTime // when the job ended, can be in error
status Status // current status of the job
}
// JobDependency represents a dependency on another job
pub struct JobDependency {
pub mut:
guid string // unique id for the job
agents []string // the pub key of the agent(s) which can execute the command
}
// Status represents the possible states of a job
pub enum Status {
created // initial state
scheduled // job has been scheduled
planned // arrived where actor will execute the job
running // job is currently running
error // job encountered an error
ok // job completed successfully
}
pub fn (j Job) index_keys() map[string]string {
return {
'guid': j.guid
'actor': j.actor
'circle': j.circle
'context': j.context
}
}
// dumps serializes the Job struct to binary format using the encoder
// This implements the Serializer interface
pub fn (j Job) dumps() ![]u8 {
mut e := encoder.new()
// Add unique encoding ID to identify this type of data
e.add_u16(300)
// Encode Job fields
e.add_u32(j.id)
e.add_string(j.guid)
// Encode agents array
e.add_u16(u16(j.agents.len))
for agent in j.agents {
e.add_string(agent)
}
e.add_string(j.source)
e.add_string(j.circle)
e.add_string(j.context)
e.add_string(j.actor)
e.add_string(j.action)
// Encode params map
e.add_u16(u16(j.params.len))
for key, value in j.params {
e.add_string(key)
e.add_string(value)
}
e.add_u16(j.timeout_schedule)
e.add_u16(j.timeout)
e.add_bool(j.log)
e.add_bool(j.ignore_error)
// Encode ignore_error_codes array
e.add_u16(u16(j.ignore_error_codes.len))
for code in j.ignore_error_codes {
e.add_u16(code)
}
e.add_bool(j.debug)
e.add_u8(j.retry)
// Encode JobStatus
e.add_string(j.status.guid)
e.add_u32(u32(j.status.created.unix()))
e.add_u32(u32(j.status.start.unix()))
e.add_u32(u32(j.status.end.unix()))
e.add_u8(u8(j.status.status))
// Encode dependencies array
e.add_u16(u16(j.dependencies.len))
for dependency in j.dependencies {
e.add_string(dependency.guid)
// Encode dependency agents array
e.add_u16(u16(dependency.agents.len))
for agent in dependency.agents {
e.add_string(agent)
}
}
return e.data
}
// loads deserializes binary data into a Job struct
pub fn job_loads(data []u8) !Job {
mut d := encoder.decoder_new(data)
mut job := Job{}
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 300 {
return error('Wrong file type: expected encoding ID 300, got ${encoding_id}, for job')
}
// Decode Job fields
job.id = d.get_u32()!
job.guid = d.get_string()!
// Decode agents array
agents_len := d.get_u16()!
job.agents = []string{len: int(agents_len)}
for i in 0 .. agents_len {
job.agents[i] = d.get_string()!
}
job.source = d.get_string()!
job.circle = d.get_string()!
job.context = d.get_string()!
job.actor = d.get_string()!
job.action = d.get_string()!
// Decode params map
params_len := d.get_u16()!
job.params = map[string]string{}
for _ in 0 .. params_len {
key := d.get_string()!
value := d.get_string()!
job.params[key] = value
}
job.timeout_schedule = d.get_u16()!
job.timeout = d.get_u16()!
job.log = d.get_bool()!
job.ignore_error = d.get_bool()!
// Decode ignore_error_codes array
error_codes_len := d.get_u16()!
job.ignore_error_codes = []u16{len: int(error_codes_len)}
for i in 0 .. error_codes_len {
job.ignore_error_codes[i] = d.get_u16()!
}
job.debug = d.get_bool()!
job.retry = d.get_u8()!
// Decode JobStatus
job.status.guid = d.get_string()!
job.status.created.unixt = u64(d.get_u32()!)
job.status.start.unixt = u64(d.get_u32()!)
job.status.end.unixt = u64(d.get_u32()!)
status_val := d.get_u8()!
job.status.status = match status_val {
0 { Status.created }
1 { Status.scheduled }
2 { Status.planned }
3 { Status.running }
4 { Status.error }
5 { Status.ok }
else { return error('Invalid Status value: ${status_val}') }
}
// Decode dependencies array
dependencies_len := d.get_u16()!
job.dependencies = []JobDependency{len: int(dependencies_len)}
for i in 0 .. dependencies_len {
mut dependency := JobDependency{}
dependency.guid = d.get_string()!
// Decode dependency agents array
dep_agents_len := d.get_u16()!
dependency.agents = []string{len: int(dep_agents_len)}
for j in 0 .. dep_agents_len {
dependency.agents[j] = d.get_string()!
}
job.dependencies[i] = dependency
}
return job
}

View File

@@ -1,206 +0,0 @@
module models
import freeflowuniverse.herolib.data.ourtime
fn test_job_serialization() {
// Create a test job
mut job := Job{
id: 1
guid: 'test-job-1'
agents: ['agent1', 'agent2']
source: 'source1'
circle: 'test-circle'
context: 'test-context'
actor: 'vm_manager'
action: 'start'
params: {
'id': '10'
'name': 'test-vm'
}
timeout_schedule: 120
timeout: 7200
log: true
ignore_error: false
ignore_error_codes: [u16(404), u16(500)]
debug: true
retry: 3
}
// Set up job status
job.status = JobStatus{
guid: job.guid
created: ourtime.now()
start: ourtime.now()
end: ourtime.OurTime{}
status: .created
}
// Add a dependency
job.dependencies << JobDependency{
guid: 'dependency-job-1'
agents: ['agent1']
}
// Test index_keys method
keys := job.index_keys()
assert keys['guid'] == 'test-job-1'
assert keys['actor'] == 'vm_manager'
assert keys['circle'] == 'test-circle'
assert keys['context'] == 'test-context'
// Serialize the job
println('Serializing job...')
serialized := job.dumps() or {
assert false, 'Failed to serialize job: ${err}'
return
}
assert serialized.len > 0, 'Serialized data should not be empty'
// Deserialize the job
println('Deserializing job...')
deserialized := job_loads(serialized) or {
assert false, 'Failed to deserialize job: ${err}'
return
}
// Verify the deserialized job
assert deserialized.id == job.id
assert deserialized.guid == job.guid
assert deserialized.agents.len == job.agents.len
assert deserialized.agents[0] == job.agents[0]
assert deserialized.agents[1] == job.agents[1]
assert deserialized.source == job.source
assert deserialized.circle == job.circle
assert deserialized.context == job.context
assert deserialized.actor == job.actor
assert deserialized.action == job.action
assert deserialized.params.len == job.params.len
assert deserialized.params['id'] == job.params['id']
assert deserialized.params['name'] == job.params['name']
assert deserialized.timeout_schedule == job.timeout_schedule
assert deserialized.timeout == job.timeout
assert deserialized.log == job.log
assert deserialized.ignore_error == job.ignore_error
assert deserialized.ignore_error_codes.len == job.ignore_error_codes.len
assert deserialized.ignore_error_codes[0] == job.ignore_error_codes[0]
assert deserialized.ignore_error_codes[1] == job.ignore_error_codes[1]
assert deserialized.debug == job.debug
assert deserialized.retry == job.retry
assert deserialized.status.guid == job.status.guid
assert deserialized.status.status == job.status.status
assert deserialized.dependencies.len == job.dependencies.len
assert deserialized.dependencies[0].guid == job.dependencies[0].guid
assert deserialized.dependencies[0].agents.len == job.dependencies[0].agents.len
assert deserialized.dependencies[0].agents[0] == job.dependencies[0].agents[0]
println('All job serialization tests passed!')
}
fn test_job_status_enum() {
// Test all status enum values
assert u8(Status.created) == 0
assert u8(Status.scheduled) == 1
assert u8(Status.planned) == 2
assert u8(Status.running) == 3
assert u8(Status.error) == 4
assert u8(Status.ok) == 5
// Test status progression
mut status := Status.created
assert status == .created
status = .scheduled
assert status == .scheduled
status = .planned
assert status == .planned
status = .running
assert status == .running
status = .error
assert status == .error
status = .ok
assert status == .ok
println('All job status enum tests passed!')
}
fn test_job_dependency() {
// Create a test dependency
mut dependency := JobDependency{
guid: 'dependency-job-1'
agents: ['agent1', 'agent2', 'agent3']
}
// Create a job with this dependency
mut job := Job{
id: 2
guid: 'test-job-2'
actor: 'network_manager'
action: 'create'
dependencies: [dependency]
}
// Test dependency properties
assert job.dependencies.len == 1
assert job.dependencies[0].guid == 'dependency-job-1'
assert job.dependencies[0].agents.len == 3
assert job.dependencies[0].agents[0] == 'agent1'
assert job.dependencies[0].agents[1] == 'agent2'
assert job.dependencies[0].agents[2] == 'agent3'
// Add another dependency
job.dependencies << JobDependency{
guid: 'dependency-job-2'
agents: ['agent4']
}
// Test multiple dependencies
assert job.dependencies.len == 2
assert job.dependencies[1].guid == 'dependency-job-2'
assert job.dependencies[1].agents.len == 1
assert job.dependencies[1].agents[0] == 'agent4'
println('All job dependency tests passed!')
}
fn test_job_with_empty_values() {
// Create a job with minimal values
mut job := Job{
id: 3
guid: 'minimal-job'
actor: 'minimal_actor'
action: 'test'
}
// Serialize and deserialize
serialized := job.dumps() or {
assert false, 'Failed to serialize minimal job: ${err}'
return
}
deserialized := job_loads(serialized) or {
assert false, 'Failed to deserialize minimal job: ${err}'
return
}
// Verify defaults are preserved
assert deserialized.id == job.id
assert deserialized.guid == job.guid
assert deserialized.circle == 'default' // Default value
assert deserialized.context == 'default' // Default value
assert deserialized.actor == 'minimal_actor'
assert deserialized.action == 'test'
assert deserialized.agents.len == 0
assert deserialized.params.len == 0
assert deserialized.timeout_schedule == 60 // Default value
assert deserialized.timeout == 3600 // Default value
assert deserialized.log == true // Default value
assert deserialized.ignore_error == false // Default value
assert deserialized.ignore_error_codes.len == 0
assert deserialized.dependencies.len == 0
println('All minimal job tests passed!')
}

View File

@@ -1,716 +0,0 @@
openapi: 3.1.0
info:
title: HeroLib Circles API
description: API for managing jobs and actions in the HeroLib Circles module
version: 1.0.0
contact:
name: FreeFlow Universe
url: https://github.com/freeflowuniverse/herolib
servers:
- url: /api/v1
description: Default API server
paths:
/jobs:
get:
summary: List all jobs
description: Returns all job IDs in the system
operationId: listJobs
tags:
- jobs
responses:
'200':
description: A list of job IDs
content:
application/json:
schema:
type: array
items:
type: integer
format: int32
examples:
listJobsExample:
value: [1, 2, 3, 4, 5]
'500':
$ref: '#/components/responses/InternalServerError'
post:
summary: Create a new job
description: Creates a new job in the system
operationId: createJob
tags:
- jobs
requestBody:
description: Job object to be created
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/JobCreate'
examples:
createJobExample:
value:
agents: ["agent1pubkey", "agent2pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "start"
params:
id: "10"
name: "test-vm"
timeout_schedule: 60
timeout: 3600
log: true
ignore_error: false
ignore_error_codes: []
debug: false
retry: 0
dependencies: []
responses:
'201':
description: Job created successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
'400':
$ref: '#/components/responses/BadRequest'
'500':
$ref: '#/components/responses/InternalServerError'
/jobs/all:
get:
summary: Get all jobs
description: Returns all jobs in the system
operationId: getAllJobs
tags:
- jobs
responses:
'200':
description: A list of jobs
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Job'
examples:
getAllJobsExample:
value:
- id: 1
guid: "job-guid-1"
agents: ["agent1pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "start"
params:
id: "10"
timeout_schedule: 60
timeout: 3600
log: true
ignore_error: false
ignore_error_codes: []
debug: false
retry: 0
status:
guid: "job-guid-1"
created: "2025-03-16T13:20:30Z"
start: "2025-03-16T13:21:00Z"
end: "2025-03-16T13:25:45Z"
status: "ok"
dependencies: []
- id: 2
guid: "job-guid-2"
agents: ["agent2pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "stop"
params:
id: "11"
timeout_schedule: 60
timeout: 3600
log: true
ignore_error: false
ignore_error_codes: []
debug: false
retry: 0
status:
guid: "job-guid-2"
created: "2025-03-16T14:10:30Z"
start: "2025-03-16T14:11:00Z"
end: "2025-03-16T14:12:45Z"
status: "ok"
dependencies: []
'500':
description: Internal server error
content:
application/json:
schema:
type: object
required:
- code
- message
properties:
code:
type: integer
format: int32
message:
type: string
examples:
internalServerErrorExample:
value:
code: 500
message: "Internal server error"
/jobs/{id}:
get:
summary: Get a job by ID
description: Returns a job by its numeric ID
operationId: getJobById
tags:
- jobs
parameters:
- name: id
in: path
description: Job ID
required: true
schema:
type: integer
format: int32
responses:
'200':
description: Job found
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
examples:
getJobByIdExample:
value:
id: 1
guid: "job-guid-1"
agents: ["agent1pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "start"
params:
id: "10"
timeout_schedule: 60
timeout: 3600
log: true
ignore_error: false
ignore_error_codes: []
debug: false
retry: 0
status:
guid: "job-guid-1"
created: "2025-03-16T13:20:30Z"
start: "2025-03-16T13:21:00Z"
end: "2025-03-16T13:25:45Z"
status: "ok"
dependencies: []
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
put:
summary: Update a job
description: Updates an existing job
operationId: updateJob
tags:
- jobs
parameters:
- name: id
in: path
description: Job ID
required: true
schema:
type: integer
format: int32
requestBody:
description: Job object to update
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
examples:
updateJobExample:
value:
id: 1
guid: "job-guid-1"
agents: ["agent1pubkey", "agent3pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "restart"
params:
id: "10"
force: "true"
timeout_schedule: 30
timeout: 1800
log: true
ignore_error: true
ignore_error_codes: [404]
debug: true
retry: 2
status:
guid: "job-guid-1"
created: "2025-03-16T13:20:30Z"
start: "2025-03-16T13:21:00Z"
end: "2025-03-16T13:25:45Z"
status: "ok"
dependencies: []
responses:
'200':
description: Job updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
'400':
description: Bad request
content:
application/json:
schema:
type: object
required:
- code
- message
properties:
code:
type: integer
format: int32
message:
type: string
examples:
badRequestExample:
value:
code: 400
message: "Invalid request parameters"
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
delete:
summary: Delete a job
description: Deletes a job by its ID
operationId: deleteJob
tags:
- jobs
parameters:
- name: id
in: path
description: Job ID
required: true
schema:
type: integer
format: int32
responses:
'204':
description: Job deleted successfully
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
/jobs/guid/{guid}:
get:
summary: Get a job by GUID
description: Returns a job by its GUID
operationId: getJobByGuid
tags:
- jobs
parameters:
- name: guid
in: path
description: Job GUID
required: true
schema:
type: string
responses:
'200':
description: Job found
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
examples:
getJobByGuidExample:
value:
id: 1
guid: "job-guid-1"
agents: ["agent1pubkey"]
source: "sourcepubkey"
circle: "default"
context: "default"
actor: "vm_manager"
action: "start"
params:
id: "10"
timeout_schedule: 60
timeout: 3600
log: true
ignore_error: false
ignore_error_codes: []
debug: false
retry: 0
status:
guid: "job-guid-1"
created: "2025-03-16T13:20:30Z"
start: "2025-03-16T13:21:00Z"
end: "2025-03-16T13:25:45Z"
status: "ok"
dependencies: []
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
delete:
summary: Delete a job by GUID
description: Deletes a job by its GUID
operationId: deleteJobByGuid
tags:
- jobs
parameters:
- name: guid
in: path
description: Job GUID
required: true
schema:
type: string
responses:
'204':
description: Job deleted successfully
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
/jobs/guid/{guid}/status:
put:
summary: Update job status
description: Updates the status of a job by its GUID
operationId: updateJobStatus
tags:
- jobs
parameters:
- name: guid
in: path
description: Job GUID
required: true
schema:
type: string
requestBody:
description: New job status
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/JobStatus'
examples:
updateJobStatusExample:
value:
guid: "job-guid-1"
created: "2025-03-16T13:20:30Z"
start: "2025-03-16T13:21:00Z"
end: "2025-03-16T13:30:45Z"
status: "running"
responses:
'200':
description: Job status updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Job'
'400':
description: Bad request
content:
application/json:
schema:
type: object
required:
- code
- message
properties:
code:
type: integer
format: int32
message:
type: string
examples:
badRequestExample:
value:
code: 400
message: "Invalid request parameters"
'404':
$ref: '#/components/responses/NotFound'
'500':
$ref: '#/components/responses/InternalServerError'
components:
schemas:
Job:
type: object
required:
- id
- guid
- agents
- source
- actor
- action
- status
properties:
id:
type: integer
format: int32
description: Unique numeric ID for the job
guid:
type: string
description: Unique ID for the job
agents:
type: array
description: The public keys of the agent(s) which will execute the command
items:
type: string
source:
type: string
description: Public key from the agent who asked for the job
circle:
type: string
description: Circle in which the job is organized
default: default
context:
type: string
description: High level context in which actors will execute the work inside a circle
default: default
actor:
type: string
description: The actor that will execute the job (e.g. vm_manager)
action:
type: string
description: The action to be executed (e.g. start)
params:
type: object
description: Parameters for the job (e.g. id:10)
additionalProperties:
type: string
timeout_schedule:
type: integer
format: int32
description: Timeout before the job is picked up (in seconds)
default: 60
timeout:
type: integer
format: int32
description: Timeout for job execution (in seconds)
default: 3600
log:
type: boolean
description: Whether to log job execution
default: true
ignore_error:
type: boolean
description: If true, errors will be ignored and not reported
default: false
ignore_error_codes:
type: array
description: Error codes to ignore
items:
type: integer
format: int32
debug:
type: boolean
description: If true, more context will be provided for debugging
default: false
retry:
type: integer
format: int32
description: Number of retries for the job
default: 0
status:
$ref: '#/components/schemas/JobStatus'
dependencies:
type: array
description: Jobs that must be completed before this job can execute
items:
$ref: '#/components/schemas/JobDependency'
JobCreate:
type: object
required:
- agents
- source
- actor
- action
properties:
agents:
type: array
description: The public keys of the agent(s) which will execute the command
items:
type: string
source:
type: string
description: Public key from the agent who asked for the job
circle:
type: string
description: Circle in which the job is organized
default: default
context:
type: string
description: High level context in which actors will execute the work inside a circle
default: default
actor:
type: string
description: The actor that will execute the job (e.g. vm_manager)
action:
type: string
description: The action to be executed (e.g. start)
params:
type: object
description: Parameters for the job (e.g. id:10)
additionalProperties:
type: string
timeout_schedule:
type: integer
format: int32
description: Timeout before the job is picked up (in seconds)
default: 60
timeout:
type: integer
format: int32
description: Timeout for job execution (in seconds)
default: 3600
log:
type: boolean
description: Whether to log job execution
default: true
ignore_error:
type: boolean
description: If true, errors will be ignored and not reported
default: false
ignore_error_codes:
type: array
description: Error codes to ignore
items:
type: integer
format: int32
debug:
type: boolean
description: If true, more context will be provided for debugging
default: false
retry:
type: integer
format: int32
description: Number of retries for the job
default: 0
dependencies:
type: array
description: Jobs that must be completed before this job can execute
items:
$ref: '#/components/schemas/JobDependency'
JobStatus:
type: object
required:
- guid
- status
properties:
guid:
type: string
description: Unique ID for the job
created:
type: string
format: date-time
description: When the job was created
start:
type: string
format: date-time
description: When the job started or should start
end:
type: string
format: date-time
description: When the job ended
status:
type: string
description: Current status of the job
enum:
- created
- scheduled
- planned
- running
- error
- ok
JobDependency:
type: object
required:
- guid
properties:
guid:
type: string
description: Unique ID for the dependent job
agents:
type: array
description: The public keys of the agent(s) which can execute the command
items:
type: string
Error:
type: object
required:
- code
- message
properties:
code:
type: integer
format: int32
description: Error code
message:
type: string
description: Error message
responses:
BadRequest:
description: Bad request
content:
application/json:
schema:
$ref: '#/components/schemas/Error'
examples:
badRequestExample:
value:
code: 400
message: "Invalid request parameters"
NotFound:
description: Resource not found
content:
application/json:
schema:
$ref: '#/components/schemas/Error'
examples:
notFoundExample:
value:
code: 404
message: "Job not found"
InternalServerError:
description: Internal server error
content:
application/json:
schema:
$ref: '#/components/schemas/Error'
examples:
internalServerErrorExample:
value:
code: 500
message: "Internal server error"

View File

@@ -1,82 +0,0 @@
module play
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.circles.actions.models { JobStatus, Status }
import freeflowuniverse.herolib.data.paramsparser
import crypto.rand
import encoding.hex
// create processes a job creation action
pub fn (mut p Player) create(params paramsparser.Params) ! {
// Create a new job
mut job := p.job_db.new()
// Set job properties from parameters
job.guid = params.get_default('guid', generate_random_id()!)!
job.actor = params.get_default('actor', '')!
job.action = params.get_default('action', '')!
job.circle = params.get_default('circle', 'default')!
job.context = params.get_default('context', 'default')!
// Set agents if provided
if params.exists('agents') {
job.agents = params.get_list('agents')!
}
// Set source if provided
if params.exists('source') {
job.source = params.get('source')!
}
// Set timeouts if provided
if params.exists('timeout_schedule') {
job.timeout_schedule = u16(params.get_int('timeout_schedule')!)
}
if params.exists('timeout') {
job.timeout = u16(params.get_int('timeout')!)
}
// Set flags
job.log = params.get_default_true('log')
job.ignore_error = params.get_default_false('ignore_error')
job.debug = params.get_default_false('debug')
if params.exists('retry') {
job.retry = u8(params.get_int('retry')!)
}
// Set initial status
job.status = JobStatus{
guid: job.guid
created: ourtime.now()
status: Status.created
}
// // Set any additional parameters
// for key, value in params.get_map() {
// if key !in ['guid', 'actor', 'action', 'circle', 'context', 'agents',
// 'source', 'timeout_schedule', 'timeout', 'log', 'ignore_error', 'debug', 'retry'] {
// job.params[key] = value
// }
// }
// Save the job
saved_job := p.job_db.set(job)!
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.created guid:\'${saved_job.guid}\' id:${saved_job.id}')
}
.json {
println('{"action": "job.created", "guid": "${saved_job.guid}", "id": ${saved_job.id}}')
}
}
}
// generate_random_id creates a random ID string
fn generate_random_id() !string {
random_bytes := rand.bytes(16)!
return hex.encode(random_bytes)
}

View File

@@ -1,36 +0,0 @@
module play
import freeflowuniverse.herolib.data.paramsparser
// delete processes a job deletion action
pub fn (mut p Player) delete(params paramsparser.Params) ! {
if params.exists('id') {
id := u32(params.get_int('id')!)
p.job_db.delete(id)!
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.deleted id:${id}')
}
.json {
println('{"action": "job.deleted", "id": ${id}}')
}
}
} else if params.exists('guid') {
guid := params.get('guid')!
p.job_db.delete_by_guid(guid)!
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.deleted guid:\'${guid}\'')
}
.json {
println('{"action": "job.deleted", "guid": "${guid}"}')
}
}
} else {
return error('Either id or guid must be provided for job.delete')
}
}

View File

@@ -1,41 +0,0 @@
module play
import freeflowuniverse.herolib.data.paramsparser
import json
// get processes a job retrieval action
pub fn (mut p Player) get(params paramsparser.Params) ! {
mut job_result := ''
if params.exists('id') {
id := u32(params.get_int('id')!)
job := p.job_db.get(id)!
// Return result based on format
match p.return_format {
.heroscript {
job_result = '!!job.result id:${job.id} guid:\'${job.guid}\' actor:\'${job.actor}\' action:\'${job.action}\' status:\'${job.status.status}\''
}
.json {
job_result = json.encode(job)
}
}
} else if params.exists('guid') {
guid := params.get('guid')!
job := p.job_db.get_by_guid(guid)!
// Return result based on format
match p.return_format {
.heroscript {
job_result = '!!job.result id:${job.id} guid:\'${job.guid}\' actor:\'${job.actor}\' action:\'${job.action}\' status:\'${job.status.status}\''
}
.json {
job_result = json.encode(job)
}
}
} else {
return error('Either id or guid must be provided for job.get')
}
println(job_result)
}

View File

@@ -1,38 +0,0 @@
module play
import freeflowuniverse.herolib.data.paramsparser
import json
// list processes a job listing action
pub fn (mut p Player) list(params paramsparser.Params) ! {
// Get all job IDs
ids := p.job_db.list()!
if params.get_default_false('verbose') {
// Get all jobs if verbose mode is enabled
jobs := p.job_db.getall()!
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.list_result count:${jobs.len}')
for job in jobs {
println('!!job.item id:${job.id} guid:\'${job.guid}\' actor:\'${job.actor}\' action:\'${job.action}\' status:\'${job.status.status}\'')
}
}
.json {
println(json.encode(jobs))
}
}
} else {
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.list_result count:${ids.len} ids:\'${ids.map(it.str()).join(',')}\'')
}
.json {
println('{"action": "job.list_result", "count": ${ids.len}, "ids": ${json.encode(ids)}}')
}
}
}
}

View File

@@ -1,61 +0,0 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.circles.actions.play { ReturnFormat }
import os
import flag
fn main() {
mut fp := flag.new_flag_parser(os.args)
fp.application('play_jobs.vsh')
fp.version('v0.1.0')
fp.description('Process heroscript job commands for circles actions')
fp.skip_executable()
input_file := fp.string('file', `f`, '', 'Input heroscript file')
input_text := fp.string('text', `t`, '', 'Input heroscript text')
actor := fp.string('actor', `a`, 'job', 'Actor name to process')
json_output := fp.bool('json', `j`, false, 'Output in JSON format')
help_requested := fp.bool('help', `h`, false, 'Show help message')
if help_requested {
println(fp.usage())
exit(0)
}
additional_args := fp.finalize() or {
eprintln(err)
println(fp.usage())
exit(1)
}
// Determine return format
return_format := if json_output { ReturnFormat.json } else { ReturnFormat.heroscript }
// Create a new player
mut player := play.new_player(actor, return_format) or {
eprintln('Failed to create player: ${err}')
exit(1)
}
// Load heroscript from file or text
mut input := ''
mut is_text := false
if input_file != '' {
input = input_file
is_text = false
} else if input_text != '' {
input = input_text
is_text = true
} else {
eprintln('Either --file or --text must be provided')
println(fp.usage())
exit(1)
}
// Process the heroscript
player.play(input, is_text) or {
eprintln('Failed to process heroscript: ${err}')
exit(1)
}
}

View File

@@ -1,84 +0,0 @@
module play
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.circles.base { SessionState, new_session }
import freeflowuniverse.herolib.circles.actions.db { JobDB, new_jobdb }
import os
// ReturnFormat defines the format for returning results
pub enum ReturnFormat {
heroscript
json
}
// Player is the main struct for processing heroscript actions
@[heap]
pub struct Player {
pub mut:
actor string // The name of the actor as used in heroscript
return_format ReturnFormat // Format for returning results
session_state SessionState // Session state for database operations
job_db JobDB // Job database handler
}
// new_player creates a new Player instance
pub fn new_player(actor string, return_format ReturnFormat) !Player {
// Initialize session state
mut session_state := new_session(
name: 'circles'
path: os.join_path(os.home_dir(), '.herolib', 'circles')
)!
// Create a new job database
mut job_db := new_jobdb(session_state)!
return Player{
actor: actor
return_format: return_format
session_state: session_state
job_db: job_db
}
}
// play processes a heroscript text or playbook
pub fn (mut p Player) play(input string, is_text bool) ! {
mut plbook := if is_text {
playbook.new(text: input)!
} else {
playbook.new(path: input)!
}
// Find all actions for this actor
filter := '${p.actor}.'
actions := plbook.find(filter: filter)!
if actions.len == 0 {
println('No actions found for actor: ${p.actor}')
return
}
// Process each action
for action in actions {
action_name := action.name.split('.')[1]
// Call the appropriate method based on the action name
match action_name {
'create' { p.create(action.params)! }
'get' { p.get(action.params)! }
'delete' { p.delete(action.params)! }
'update_status' { p.update_status(action.params)! }
'list' { p.list(action.params)! }
else { println('Unknown action: ${action_name}') }
}
}
}
// create method is implemented in create.v
// get method is implemented in get.v
// delete method is implemented in delete.v
// update_status method is implemented in update_status.v
// list method is implemented in list.v

View File

@@ -1,76 +0,0 @@
module play
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.circles.actions.models { JobStatus, Status }
import freeflowuniverse.herolib.data.ourtime
// update_status processes a job status update action
pub fn (mut p Player) update_status(params paramsparser.Params) ! {
if params.exists('guid') && params.exists('status') {
guid := params.get('guid')!
status_str := params.get('status')!
// Convert status string to Status enum
mut new_status := Status.created
match status_str {
'created' {
new_status = Status.created
}
'scheduled' {
new_status = Status.scheduled
}
'planned' {
new_status = Status.planned
}
'running' {
new_status = Status.running
}
'error' {
new_status = Status.error
}
'ok' {
new_status = Status.ok
}
else {
return error('Invalid status value: ${status_str}')
}
}
// Create job status object
mut job_status := JobStatus{
guid: guid
created: ourtime.now()
status: new_status
}
// Set start time if provided
if params.exists('start') {
job_status.start = params.get_time('start')!
} else {
job_status.start = ourtime.now()
}
// Set end time if provided
if params.exists('end') {
job_status.end = params.get_time('end')!
} else if new_status in [Status.error, Status.ok] {
// Automatically set end time for terminal statuses
job_status.end = ourtime.now()
}
// Update job status
p.job_db.update_job_status(guid, job_status)!
// Return result based on format
match p.return_format {
.heroscript {
println('!!job.status_updated guid:\'${guid}\' status:\'${status_str}\'')
}
.json {
println('{"action": "job.status_updated", "guid": "${guid}", "status": "${status_str}"}')
}
}
} else {
return error('Both guid and status must be provided for job.update_status')
}
}

View File

@@ -1,86 +0,0 @@
module actions
// From file: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions/db/job_db.v
pub struct JobDB {
pub mut:
db DBHandler[Job]
}
pub fn new_jobdb(session_state SessionState) !JobDB {}
pub fn (mut m JobDB) new() Job {}
// set adds or updates a job
pub fn (mut m JobDB) set(job Job) !Job {}
// get retrieves a job by its ID
pub fn (mut m JobDB) get(id u32) !Job {}
// list returns all job IDs
pub fn (mut m JobDB) list() ![]u32 {}
pub fn (mut m JobDB) getall() ![]Job {}
// delete removes a job by its ID
pub fn (mut m JobDB) delete(id u32) ! {}
// get_by_guid retrieves a job by its GUID
pub fn (mut m JobDB) get_by_guid(guid string) !Job {}
// delete_by_guid removes a job by its GUID
pub fn (mut m JobDB) delete_by_guid(guid string) ! {}
// update_job_status updates the status of a job
pub fn (mut m JobDB) update_job_status(guid string, new_status JobStatus) !Job {}
// From file: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions/models/job.v
// Job represents a task to be executed by an agent
pub struct Job {
pub mut:
id u32 // unique numeric id for the job
guid string // unique id for the job
agents []string // the pub key of the agent(s) which will execute the command, only 1 will execute
source string // pubkey from the agent who asked for the job
circle string = 'default' // our digital life is organized in circles
context string = 'default' // is the high level context in which actors will execute the work inside a circle
actor string // e.g. vm_manager
action string // e.g. start
params map[string]string // e.g. id:10
timeout_schedule u16 = 60 // timeout before its picked up
timeout u16 = 3600 // timeout in sec
log bool = true
ignore_error bool // means if error will just exit and not raise, there will be no error reporting
ignore_error_codes []u16 // of we want to ignore certain error codes
debug bool // if debug will get more context
retry u8 // default there is no debug
status JobStatus
dependencies []JobDependency // will not execute until other jobs are done
}
// JobStatus represents the current state of a job
pub struct JobStatus {
pub mut:
guid string // unique id for the job
created ourtime.OurTime // when we created the job
start ourtime.OurTime // when the job needs to start
end ourtime.OurTime // when the job ended, can be in error
status Status // current status of the job
}
// JobDependency represents a dependency on another job
pub struct JobDependency {
pub mut:
guid string // unique id for the job
agents []string // the pub key of the agent(s) which can execute the command
}
// Status represents the possible states of a job
pub enum Status {
created // initial state
scheduled // job has been scheduled
planned // arrived where actor will execute the job
running // job is currently running
error // job encountered an error
ok // job completed successfully
}

View File

@@ -1,155 +0,0 @@
module core
// import freeflowuniverse.herolib.hero.db.managers.circle as circle_models
pub struct DBHandler[T] {
pub mut:
prefix string
session_state SessionState
}
// new_dbhandler creates a new DBHandler for type T
pub fn new_dbhandler[T](prefix string, session_state SessionState) DBHandler[T] {
return DBHandler[T]{
prefix: prefix
session_state: session_state
}
}
// set adds or updates an item
pub fn (mut m DBHandler[T]) set(item_ T) !T {
mut item := item_
// Store the item data in the database and get the assigned ID
item.Base.id = m.session_state.dbs.db_data_core.set(data: item.dumps()!)!
// Update index keys
for key, value in m.index_keys(item)! {
index_key := '${m.prefix}:${key}:${value}'
m.session_state.dbs.db_meta_core.set(index_key, item.Base.id.str().bytes())!
}
return item
}
// get retrieves an item by its ID
pub fn (mut m DBHandler[T]) get_data(id u32) ![]u8 {
// Get the item data from the database
item_data := m.session_state.dbs.db_data_core.get(id) or {
return error('Item data not found for ID ${id}')
}
return item_data
}
pub fn (mut m DBHandler[T]) exists(id u32) !bool {
item_data := m.session_state.dbs.db_data_core.get(id) or { return false }
return item_data != []u8{}
}
// get_by_key retrieves an item by a specific key field and value
pub fn (mut m DBHandler[T]) get_data_by_key(key_field string, key_value string) ![]u8 {
// Create the key for the radix tree
key := '${m.prefix}:${key_field}:${key_value}'
// Get the ID from the radix tree
id_bytes := m.session_state.dbs.db_meta_core.get(key) or {
return error('Item with ${key_field}=${key_value} not found')
}
// Convert the ID bytes to u32
id_str := id_bytes.bytestr()
id := id_str.u32()
// Get the item using the ID
return m.get_data(id)
}
// delete removes an item by its ID
pub fn (mut m DBHandler[T]) delete(item T) ! {
exists := m.exists(item.Base.id)!
if !exists {
return
}
for key, value in m.index_keys(item)! {
index_key := '${m.prefix}:${key}:${value}'
m.session_state.dbs.db_meta_core.delete(index_key)!
}
// Delete the item data from the database
m.session_state.dbs.db_data_core.delete(item.Base.id)!
}
// internal function to always have at least one index key, the default is id
fn (mut m DBHandler[T]) index_keys(item T) !map[string]string {
mut keymap := item.index_keys()
if keymap.len == 0 {
keymap['id'] = item.Base.id.str()
}
return keymap
}
// list returns all ids from the db handler
pub fn (mut m DBHandler[T]) list() ![]u32 {
// Use the RadixTree's prefix capabilities to list all items
mut empty_item := T{}
mut keys_map := m.index_keys(empty_item)!
if keys_map.len == 0 {
return error('No index keys defined for this type')
}
// Get the first key from the map
mut default_key := ''
for k, _ in keys_map {
default_key = k
break
}
// Get all IDs from the meta database
id_bytes := m.session_state.dbs.db_meta_core.getall('${m.prefix}:${default_key}')!
// Convert bytes to u32 IDs
mut result := []u32{}
for id_byte in id_bytes {
id_str := id_byte.bytestr()
result << id_str.u32()
}
return result
}
// list_by_prefix returns all items that match a specific prefix pattern
pub fn (mut m DBHandler[T]) list_by_prefix(key_field string, prefix_value string) ![]u32 {
// Create the prefix for the radix tree
prefix := '${m.prefix}:${key_field}:${prefix_value}'
println('DEBUG: Searching with prefix: ${prefix}')
// Use RadixTree's list method to get all keys with this prefix
keys := m.session_state.dbs.db_meta_core.list(prefix)!
println('DEBUG: Found ${keys.len} keys matching prefix')
for i, key in keys {
println('DEBUG: Key ${i}: ${key}')
}
// Extract IDs from the values stored in these keys
mut ids := []u32{}
mut seen := map[u32]bool{}
for key in keys {
if id_bytes := m.session_state.dbs.db_meta_core.get(key) {
id_str := id_bytes.bytestr()
if id_str.len > 0 {
id := id_str.u32()
println('DEBUG: Found ID ${id} for key ${key}')
// Only add the ID if we haven't seen it before
if !seen[id] {
ids << id
seen[id] = true
}
}
}
}
println('DEBUG: Returning ${ids.len} unique IDs')
return ids
}

View File

@@ -1,75 +0,0 @@
module core
import freeflowuniverse.herolib.data.ourdb
import freeflowuniverse.herolib.data.radixtree
import freeflowuniverse.herolib.core.texttools
import os
// SessionState holds the state of a session which is linked to someone calling it as well as the DB's we use
pub struct SessionState {
pub mut:
name string
pubkey string // pubkey of user who called this
addr string // mycelium address
dbs Databases
}
pub struct Databases {
pub mut:
db_data_core &ourdb.OurDB
db_meta_core &radixtree.RadixTree
db_data_mcc &ourdb.OurDB
db_meta_mcc &radixtree.RadixTree
}
@[params]
pub struct StateArgs {
pub mut:
name string
pubkey string // pubkey of user who called this
addr string // mycelium address
path string
}
pub fn new_session(args_ StateArgs) !SessionState {
mut args := args_
args.name = texttools.name_fix(args.name)
if args.path.len == 0 {
args.path = os.join_path(os.home_dir(), 'hero', 'dbs')
}
mypath := os.join_path(args.path, args.name)
mut db_data_core := ourdb.new(
path: os.join_path(mypath, 'data_core')
incremental_mode: true
)!
mut db_meta_core := radixtree.new(
path: os.join_path(mypath, 'meta_core')
)!
mut db_data_mcc := ourdb.new(
path: os.join_path(mypath, 'data_mcc')
incremental_mode: false
)!
mut db_meta_mcc := radixtree.new(
path: os.join_path(mypath, 'meta_mcc')
)!
mut dbs := Databases{
db_data_core: &db_data_core
db_meta_core: &db_meta_core
db_data_mcc: &db_data_mcc
db_meta_mcc: &db_meta_mcc
}
mut s := SessionState{
name: args.name
dbs: dbs
pubkey: args.pubkey
addr: args.addr
}
return s
}

View File

@@ -0,0 +1,233 @@
module hero_db
import json
import freeflowuniverse.herolib.clients.postgresql_client
import db.pg
import freeflowuniverse.herolib.core.texttools
// Generic database interface for Hero root objects
pub struct HeroDB[T] {
pub mut:
db pg.DB
table_name string
}
// new creates a new HeroDB instance for a specific type T
pub fn new[T]() !HeroDB[T] {
mut table_name := '${texttools.snake_case(T.name)}s'
// Map dirname from module path
module_path := T.name.split('.')
if module_path.len >= 2 {
dirname := texttools.snake_case(module_path[module_path.len - 2])
table_name = '${dirname}_${texttools.snake_case(T.name)}'
}
mut dbclient:=postgresql_client.get()!
mut dbcl:=dbclient.db() or {
return error('Failed to connect to database')
}
return HeroDB[T]{
db: dbcl
table_name: table_name
}
}
// ensure_table creates the database table with proper schema for type T
pub fn (mut self HeroDB[T]) ensure_table() ! {
// Get index fields from struct reflection
index_fields := self.get_index_fields()
// Build index column definitions
mut index_cols := []string{}
for field in index_fields {
index_cols << '${field} varchar(255)'
}
// Create table with JSON storage
create_sql := '
CREATE TABLE IF NOT EXISTS ${self.table_name} (
id serial PRIMARY KEY,
${index_cols.join(', ')},
data jsonb NOT NULL,
created_at timestamp DEFAULT CURRENT_TIMESTAMP,
updated_at timestamp DEFAULT CURRENT_TIMESTAMP
)
'
// self.db.exec(create_sql)!
// Create indexes on index fields
for field in index_fields {
index_sql := 'CREATE INDEX IF NOT EXISTS idx_${self.table_name}_${field} ON ${self.table_name}(${field})'
// self.db.exec(index_sql)!
}
}
// Get index fields marked with @[index] from struct
fn (self HeroDB[T]) get_index_fields() []string {
mut fields := []string{}
$for field in T.fields {
if field.attrs.contains('index') {
fields << texttools.snake_case(field.name)
}
}
return fields
}
// save stores the object T in the database, updating if it already exists
pub fn (mut self HeroDB[T]) save(obj T) ! {
// Get index values from object
index_data := self.extract_index_values(obj)
// Serialize to JSON
json_data := json.encode_pretty(obj)
// Check if object already exists
mut query := 'SELECT id FROM ${self.table_name} WHERE '
mut params := []string{}
// Build WHERE clause for unique lookup
for key, value in index_data {
params << '${key} = \'${value}\''
}
query += params.join(' AND ')
existing :=self.db.exec(query)!
if existing.len > 0 {
// Update existing record
id_val := existing[0].vals[0] or { return error('no id') }
// id := id_val.int()
println('Updating existing record with ID: ${id_val}')
if true {
panic('sd111')
}
// update_sql := '
// UPDATE ${self.table_name}
// SET data = \$1, updated_at = CURRENT_TIMESTAMP
// WHERE id = \$2
// '
// self.db_client.db()!.exec_param(update_sql, [json_data, id.str()])!
} else {
// Insert new record
mut columns := []string{}
mut values := []string{}
// Add index columns
for key, value in index_data {
columns << key
values << "'${value}'"
}
// Add JSON data
columns << 'data'
values << "'${json_data}'"
insert_sql := '
INSERT INTO ${self.table_name} (${columns.join(', ')})
VALUES (${values.join(', ')})
'
// self.db.exec(insert_sql)!
}
}
// get_by_index retrieves an object T by its index values
pub fn (mut self HeroDB[T]) get_by_index(index_values map[string]string) !T {
mut query := 'SELECT data FROM ${self.table_name} WHERE '
mut params := []string{}
for key, value in index_values {
params << '${key} = \'${value}\''
}
query += params.join(' AND ')
rows := self.db.exec(query)!
if rows.len == 0 {
return error('${T.name} not found with index values: ${index_values}')
}
json_data_val := rows[0].vals[0] or { return error('no data') }
println('json_data_val: ${json_data_val}')
if true{
panic('sd2221')
}
// mut obj := json.decode(T, json_data_val) or {
// return error('Failed to decode JSON: ${err}')
// }
// return &obj
return T{}
}
// // get_all retrieves all objects T from the database
// pub fn (mut self HeroDB[T]) get_all() ![]T {
// query := 'SELECT data FROM ${self.table_name} ORDER BY id DESC'
// rows := self.db_client.db()!.exec(query)!
// mut results := []T{}
// for row in rows {
// json_data_val := row.vals[0] or { continue }
// json_data := json_data_val.str()
// mut obj := json.decode(T, json_data) or {
// // e.g. an error could be given here
// continue // Skip invalid JSON
// }
// results << &obj
// }
// return results
// }
// // search_by_index searches for objects T by a specific index field
// pub fn (mut self HeroDB[T]) search_by_index(field_name string, value string) ![]T {
// query := 'SELECT data FROM ${self.table_name} WHERE ${field_name} = \'${value}\' ORDER BY id DESC'
// rows := self.db_client.db()!.exec(query)!
// mut results := []T{}
// for row in rows {
// json_data_val := row.vals[0] or { continue }
// json_data := json_data_val.str()
// mut obj := json.decode(T, json_data) or {
// continue
// }
// results << &obj
// }
// return results
// }
// // delete_by_index removes objects T matching the given index values
// pub fn (mut self HeroDB[T]) delete_by_index(index_values map[string]string) ! {
// mut query := 'DELETE FROM ${self.table_name} WHERE '
// mut params := []string{}
// for key, value in index_values {
// params << '${key} = \'${value}\''
// }
// query += params.join(' AND ')
// self.db_client.db()!.exec(query)!
// }
// Helper to extract index values from object
fn (self HeroDB[T]) extract_index_values(obj T) map[string]string {
mut index_data := map[string]string{}
$for field in T.fields {
// $if field.attrs.contains('index') {
// field_name := texttools.snake_case(field.name)
// $if field.typ is string {
// value := obj.$(field.name)
// index_data[field_name] = value
// } $else $if field.typ is int {
// value := obj.$(field.name).str()
// index_data[field_name] = value
// } $else {
// value := obj.$(field.name).str()
// index_data[field_name] = value
// }
// }
}
return index_data
}

View File

@@ -0,0 +1,36 @@
```v
// Example usage:
// Initialize database client
mut db_client := postgresql_client.get(name: "default")!
// Create HeroDB for Circle type
mut circle_db := hero_db.new[circle.Circle](db_client)!
circle_db.ensure_table()!
// Create and save a circle
mut my_circle := circle.Circle{
name: "Tech Community"
description: "A community for tech enthusiasts"
domain: "tech.example.com"
config: circle.CircleConfig{
max_members: 1000
allow_guests: true
auto_approve: false
theme: "modern"
}
status: circle.CircleStatus.active
}
circle_db.save(&my_circle)!
// Retrieve the circle
retrieved_circle := circle_db.get_by_index({
"domain": "tech.example.com"
})!
// Search circles by status
active_circles := circle_db.search_by_index("status", "active")!
```

View File

@@ -1,84 +0,0 @@
module circle
import freeflowuniverse.herolib.hero.db.core { DBHandler, SessionState, new_dbhandler }
import freeflowuniverse.herolib.hero.db.models.circle { Role, User }
type UserObj = User
@[heap]
pub struct UserDB {
pub mut:
db DBHandler[UserObj]
}
pub fn new_userdb(session_state SessionState) !UserDB {
return UserDB{
db: new_dbhandler[UserObj]('user', session_state)
}
}
pub fn (mut m UserDB) new() User {
return UserObj{}
}
// set adds or updates a user
pub fn (mut m UserDB) set(user User) !UserObj {
return m.db.set(user)!
}
// get retrieves a user by its ID
pub fn (mut m UserDB) get(id u32) !UserObj {
data := m.db.get_data(id)!
return loads_user(data)!
}
// list returns all user IDs
pub fn (mut m UserDB) list() ![]u32 {
return m.db.list()!
}
pub fn (mut m UserDB) getall() ![]UserObj {
mut objs := []UserObj{}
for id in m.list()! {
user := m.get(id)!
objs << user
}
return objs
}
// delete removes a user by its ID
pub fn (mut m UserDB) delete(obj UserObj) ! {
m.db.delete(obj)!
}
//////////////////CUSTOM METHODS//////////////////////////////////
// get_by_name retrieves a user by its name
pub fn (mut m UserDB) get_by_name(name string) !UserObj {
data := m.db.get_data_by_key('name', name)!
return loads_user(data)!
}
// delete_by_name removes a user by its name
pub fn (mut m UserDB) delete_by_name(name string) ! {
// Get the user by name
user := m.get_by_name(name) or {
// User not found, nothing to delete
return
}
// Delete the user by ID
m.delete(user)!
}
// update_user_role updates the role of a user
pub fn (mut m UserDB) update_user_role(name string, new_role Role) !UserObj {
// Get the user by name
mut user := m.get_by_name(name)!
// Update the user role
user.role = new_role
// Save the updated user
return m.set(user)!
}

View File

@@ -1,106 +0,0 @@
module circle
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.hero.db.models.circle { Role, User }
// dumps serializes a User struct to binary data
pub fn (user UserObj) dumps() ![]u8 {
mut e := encoder.new()
// Add version byte (v1)
e.add_u8(1)
// Encode Base struct fields
e.add_u32(user.Base.id)
e.add_ourtime(user.Base.creation_time)
e.add_ourtime(user.Base.mod_time)
// Encode comments array from Base
e.add_u16(u16(user.Base.comments.len))
for id in user.Base.comments {
e.add_u32(id)
}
// Encode User-specific fields
e.add_string(user.name)
e.add_string(user.description)
e.add_u8(u8(user.role)) // Encode enum as u8
// Encode contact_ids array
e.add_u16(u16(user.contact_ids.len))
for id in user.contact_ids {
e.add_u32(id)
}
// Encode wallet_ids array
e.add_u16(u16(user.wallet_ids.len))
for id in user.wallet_ids {
e.add_u32(id)
}
// Encode pubkey
e.add_string(user.pubkey)
return e.data
}
// loads deserializes binary data to a User struct
pub fn loads_user(data []u8) !User {
mut d := encoder.decoder_new(data)
// Read version byte
version := d.get_u8()!
if version != 1 {
return error('Unsupported version: ${version}')
}
// Create a new User instance
mut user := User{}
// Decode Base struct fields
user.id = d.get_u32()!
user.creation_time = d.get_ourtime()!
user.mod_time = d.get_ourtime()!
// Decode comments array from Base
comments_count := d.get_u16()!
user.comments = []u32{cap: int(comments_count)}
for _ in 0 .. comments_count {
user.comments << d.get_u32()!
}
// Decode User-specific fields
user.name = d.get_string()!
user.description = d.get_string()!
// Get the u8 value first
role_value := d.get_u8()!
// Validate and convert to Role enum
if role_value <= u8(Role.external) {
// Use unsafe block for casting number to enum as required by V
unsafe {
user.role = Role(role_value)
}
} else {
return error('Invalid role value: ${role_value}')
}
// Decode contact_ids array
contact_count := d.get_u16()!
user.contact_ids = []u32{cap: int(contact_count)}
for _ in 0 .. contact_count {
user.contact_ids << d.get_u32()!
}
// Decode wallet_ids array
wallet_count := d.get_u16()!
user.wallet_ids = []u32{cap: int(wallet_count)}
for _ in 0 .. wallet_count {
user.wallet_ids << d.get_u32()!
}
// Decode pubkey
user.pubkey = d.get_string()!
return user
}

View File

@@ -1,113 +0,0 @@
module circle
import freeflowuniverse.herolib.hero.db.core { new_session }
import freeflowuniverse.herolib.hero.db.models.circle { Role }
import freeflowuniverse.herolib.data.ourtime
import os
// test_user_db tests the functionality of the UserDB
pub fn test_user_db() ! {
println('Starting User DB Test')
// Create a temporary directory for the test
test_dir := os.join_path(os.temp_dir(), 'hero_user_test')
os.mkdir_all(test_dir) or { return error('Failed to create test directory: ${err}') }
defer {
// Clean up after test
os.rmdir_all(test_dir) or { eprintln('Failed to remove test directory: ${err}') }
}
// Create a new session state
mut session := new_session(
name: 'test_session'
path: test_dir
)!
println('Session created: ${session.name}')
// Initialize the UserDB
mut user_db := new_userdb(session)!
println('UserDB initialized')
// Create and add users
mut admin_user := user_db.new()
admin_user.name = 'admin_user'
admin_user.description = 'Administrator user for testing'
admin_user.role = Role.admin
admin_user.pubkey = 'admin_pubkey_123'
admin_user.creation_time = ourtime.now()
admin_user.mod_time = ourtime.now()
// println(admin_user)
// if true{panic("sss")}
// Save the admin user
admin_user = user_db.set(admin_user)!
println('Admin user created with ID: ${admin_user.Base.id}')
// Create a regular member
mut member_user := user_db.new()
member_user.name = 'member_user'
member_user.description = 'Regular member for testing'
member_user.role = Role.member
member_user.pubkey = 'member_pubkey_456'
member_user.creation_time = ourtime.now()
member_user.mod_time = ourtime.now()
// Save the member user
member_user = user_db.set(member_user)!
println('Member user created with ID: ${member_user.Base.id}')
// Create a guest user
mut guest_user := user_db.new()
guest_user.name = 'guest_user'
guest_user.description = 'Guest user for testing'
guest_user.role = Role.guest
guest_user.pubkey = 'guest_pubkey_789'
guest_user.creation_time = ourtime.now()
guest_user.mod_time = ourtime.now()
// Save the guest user
guest_user = user_db.set(guest_user)!
println('Guest user created with ID: ${guest_user.Base.id}')
// Retrieve users by ID
retrieved_admin := user_db.get(admin_user.Base.id)!
println('Retrieved admin user by ID: ${retrieved_admin.name} (Role: ${retrieved_admin.role})')
// Retrieve users by name
retrieved_member := user_db.get_by_name('member_user')!
println('Retrieved member user by name: ${retrieved_member.name} (Role: ${retrieved_member.role})')
// Update a user's role
updated_guest := user_db.update_user_role('guest_user', Role.contributor)!
println('Updated guest user role to contributor: ${updated_guest.name} (Role: ${updated_guest.role})')
// List all users
user_ids := user_db.list()!
println('Total users: ${user_ids.len}')
println('User IDs: ${user_ids}')
// Get all users
all_users := user_db.getall()!
println('All users:')
for user in all_users {
println(' - ${user.name} (ID: ${user.Base.id}, Role: ${user.role})')
}
// Delete a user
user_db.delete(member_user)!
println('Deleted member user with ID: ${member_user.Base.id}')
// Delete a user by name
user_db.delete_by_name('guest_user')!
println('Deleted guest user by name')
// List remaining users
remaining_user_ids := user_db.list()!
println('Remaining users: ${remaining_user_ids.len}')
println('Remaining user IDs: ${remaining_user_ids}')
println('User DB Test completed successfully')
}

View File

@@ -1,6 +0,0 @@
to test the openapi see
https://editor-next.swagger.io/

View File

@@ -1,181 +0,0 @@
module bootstrap
import os
import time
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
const iso_path = '/var/vm/alpine-standard-3.19.1-x86_64.iso'
const hdd_path = '/var/vm/vm_alpine_automated.qcow2'
const hostname = 'debug-alpine.vm'
pub struct AlpineLoader {
pub mut:
alpine_url map[string]string
}
@[params]
pub struct AlpineLoaderArgs {
pub mut:
alpine_url map[string]string = {
'aarch64': 'https://dl-cdn.alpinelinux.org/alpine/v3.19/releases/aarch64/alpine-standard-3.19.1-aarch64.iso'
'x86_64': 'https://dl-cdn.alpinelinux.org/alpine/v3.19/releases/x86_64/alpine-standard-3.19.1-x86_64.iso'
}
}
pub fn new_alpine_loader(args AlpineLoaderArgs) AlpineLoader {
mut al := AlpineLoader{
alpine_url: args.alpine_url
}
return al
}
//```
// enum CPUType {
// unknown
// intel
// arm
// intel32
// arm32
// }
//```
@[params]
pub struct AlpineLaunchArgs {
pub mut:
name string
hostname string = 'herodev'
cputype osal.CPUType = .intel
reset bool
}
pub fn (mut self AlpineLoader) start(args_ AlpineLaunchArgs) ! {
mut args := args_
mut cpu_name := 'x86_64'
if args.cputype == .arm {
cpu_name = 'aarch64'
}
if args.name == '' {
args.name = 'alpine_build_${cpu_name}'
}
mut url := self.alpine_url[cpu_name]
mut iso_path0 := osal.download(
reset: args.reset
url: url
minsize_kb: 90000
dest: '/tmp/alpine_std_${cpu_name}.iso'
)!
iso_path := iso_path0.path
mut vmdir := pathlib.get_dir(path: '/var/vms/${args.name}', create: true)!
hdd_path := '${vmdir.path}/hdd.qcow2'
console.print_debug(' - iso: ${iso_path}')
// Clean up previous QEMU instance
console.print_debug('[+] cleaning previous instance of qemu')
os.system('killall qemu-system-x86_64')
for os.system('pidof qemu-system-x86_64') == 0 {
time.sleep(100 * time.millisecond)
}
// Check if the ISO file exists
if !os.exists(iso_path) {
console.print_debug('ISO file not found: ${iso_path}')
return
}
// Create HDD image if it doesn't exist
if !os.exists(hdd_path) {
osal.exec(cmd: 'qemu-img create -f qcow2 ${hdd_path} 10G')!
}
// Start the virtual machine
console.print_debug('[+] starting virtual machine')
osal.exec(
cmd: '
rm -f /tmp/alpine.in
rm -f /tmp/alpine.out
mkfifo /tmp/alpine.in /tmp/alpine.out'
)!
osal.exec(
cmd: '
qemu-system-x86_64 -m 1024 -cdrom "${iso_path}" -drive file="${hdd_path}",index=0,media=disk,format=qcow2 -boot c -enable-kvm -smp cores=2,maxcpus=2 -net nic -net user,hostfwd=tcp::2225-:22 -virtfs local,path="${SHARED_DIR}",mount_tag="${TAG}",security_model=mapped,id=shared -serial pipe:/tmp/alpine -qmp unix:/tmp/qmp-sock,server,nowait -vnc :0,password=on -monitor stdio
'
debug: true
)!
// -serial file:/tmp/qemu_log.txt \
// -display none
// -daemonize
console.print_debug('[+] virtual machine started, waiting for console')
// Interact with the console
mut console_input := os.open('/tmp/alpine.in') or { panic(err) }
mut console_output := os.open('/tmp/alpine.out') or { panic(err) }
defer {
console_input.close()
console_output.close()
}
for {
mut line := []u8{len: 1024}
read_count := console_output.read(mut line) or { break }
line_str := line[..read_count].bytestr()
console.print_debug(line_str)
// Handle console output and send input
if line_str.contains('localhost login:') {
console_input.writeln('root')!
} else if line_str.contains('localhost:~#') {
console_input.writeln('setup-alpine')!
} else if line_str.contains(' [localhost]') {
console_input.writeln(hostname)!
} else if line_str.contains(' [eth0]') {
console_input.writeln('')!
} else if line_str.contains(' [dhcp]') {
console_input.writeln('')!
} else if line_str.contains('manual network configuration? (y/n) [n]') {
console_input.writeln('')!
console.print_debug('[+] waiting for network connectivity')
} else if line_str.contains('New password:') {
console_input.writeln('root')!
} else if line_str.contains('Retype password:') {
console_input.writeln('root')!
} else if line_str.contains('are you in? [UTC]') {
console_input.writeln('')!
} else if line_str.contains('HTTP/FTP proxy URL?') && line_str.contains(' [none]') {
console_input.writeln('')!
} else if line_str.contains('Enter mirror number or URL: [1]') {
console_input.writeln('')!
} else if line_str.contains('Setup a user?') && line_str.contains(' [no]') {
console_input.writeln('')!
} else if line_str.contains(' [openssh]') {
console_input.writeln('')!
} else if line_str.contains(' [prohibit-password]') {
console_input.writeln('yes')!
} else if line_str.contains('Enter ssh key') && line_str.contains(' [none]') {
console_input.writeln('')!
} else if line_str.contains('Which disk') && line_str.contains(' [none]') {
console_input.writeln('sda')!
} else if line_str.contains('How would you like to use it?') && line_str.contains(' [?]') {
console_input.writeln('sys')!
} else if line_str.contains('and continue? (y/n) [n]') {
console_input.writeln('y')!
} else if line_str.contains('Installation is complete.') {
console_input.writeln('reboot')!
} else if line_str.contains('${hostname} login:') {
console.print_debug('[+] ====================================================================')
console.print_debug('[+] virtual machine configured, up and running, root password: root')
console.print_debug('[+] you can ssh this machine with the local reverse port:')
console.print_debug('[+]')
console.print_debug('[+] ssh root@localhost -p 2225')
console.print_debug('[+]')
break
}
}
console.print_debug('[+] virtual machine initialized')
}

View File

@@ -1,14 +0,0 @@
## troubleshooting
```bash
qemu-system-x86_64 -monitor stdio
#on remote
ssh -L 5901:localhost:5901 root@65.21.132.119
#on new console in remote (osx)
open vnc://localhost:5901
```

View File

@@ -1,213 +0,0 @@
#!/bin/bash
echo "[+] cleaning previous instance of qemu"
killall qemu-system-x86_64
while pidof qemu-system-x86_64; do
sleep 0.1
done
killall qemu-system-aarch64
while pidof qemu-system-aarch64; do
sleep 0.1
done
set -ex
# Path to your ISO file
# ISO_PATH="/var/vm/alpine-standard-3.19.1-x86_64.iso"
ISO_PATH="/var/vm/alpine-virt-3.19.1-aarch64.iso"
SHARED_DIR="/tmp/shared"
TAG="host"
HDD_PATH="/var/vm/vm_alpine_arm.qcow2"
UEFI_CODE="/tmp/uefi_code.fd"
UEFI_VARS="/tmp/uefi_vars.fd"
# Check if the ISO file exists
if [ ! -f "$ISO_PATH" ]; then
echo "ISO file not found: $ISO_PATH"
exit 1
fi
rm -f $HDD_PATH
if [ ! -f "$HDD_PATH" ]; then
qemu-img create -f qcow2 $HDD_PATH 10G
fi
if [ ! -f "$UEFI_CODE" ]; then
cd /tmp
wget https://snapshots.linaro.org/components/kernel/leg-virt-tianocore-edk2-upstream/5181/QEMU-AARCH64/RELEASE_GCC/QEMU_EFI.img.gz
rm -f QEMU_EFI.img
gunzip QEMU_EFI.img.gz
cp QEMU_EFI.img $UEFI_CODE
#echo "UEFI_CODE file not found: $UEFI_CODE"
#exit 1
fi
#rm -f $UEFI_VARS
if [ ! -f "$UEFI_VARS" ]; then
qemu-img create -f qcow2 $UEFI_VARS 64M
fi
rm -f /tmp/alpine.in /tmp/alpine.out
mkfifo /tmp/alpine.in /tmp/alpine.out
echo "[+] starting virtual machine"
qemu-system-aarch64 \
-machine virt,gic-version=max \
-cpu max \
-drive if=pflash,format=raw,readonly=on,file=$UEFI_CODE \
-drive if=pflash,file=$UEFI_VARS \
-m 1024 \
-cdrom "$ISO_PATH" \
-drive file="$HDD_PATH",index=0,media=disk,format=qcow2 \
-boot c \
-smp cores=2,maxcpus=2 \
-net nic \
-net user,hostfwd=tcp::2225-:22 \
-qmp unix:/tmp/qmp-sock,server,nowait \
-serial pipe:/tmp/alpine \
-daemonize
echo "[+] virtual machine started, waiting for console"
set +ex
exec 3<> /tmp/alpine.out
#
# FIXME: automate that in vlang, this is a poc
#
hname="debug-alpine.vm"
while true; do
read -t 0.1 -u 3 line
if [ $? -gt 0 -a $? -lt 128 ]; then
echo "[-] read failed: $?"
exit 1
fi
if [ ${#line} -eq 0 ]; then
continue
fi
#echo "${#line}"
echo "${line}"
if [[ ${line} == *"localhost login:"* ]]; then
echo "[+] authenticating root user"
echo "root" > /tmp/alpine.in
fi
if [[ ${line} == *"localhost:~#"* ]]; then
echo "[+] running automated setup-alpine process"
echo "setup-alpine" > /tmp/alpine.in
fi
if [[ ${line} == *" [localhost]" ]]; then
echo "[+] define hostname: $hname"
echo $hname > /tmp/alpine.in
fi
if [[ ${line} == *" [eth0]"* ]]; then
echo "[+] configuring default interface eth0"
echo > /tmp/alpine.in
fi
if [[ ${line} == *" [dhcp]"* ]]; then
echo "[+] configuring eth0 to uses dhcp"
echo > /tmp/alpine.in
fi
if [[ ${line} == *"manual network configuration? (y/n) [n]"* ]]; then
echo > /tmp/alpine.in
echo "[+] waiting for network connectivity"
fi
if [[ ${line} == *"New password:"* ]]; then
echo "[+] setting up root password (root)"
echo "root" > /tmp/alpine.in
fi
if [[ ${line} == *"Retype password:"* ]]; then
echo "root" > /tmp/alpine.in
fi
if [[ ${line} == *"Which NTP client to run"* ]]; then
echo "[+] default ntp server"
echo "busybox" > /tmp/alpine.in
fi
if [[ ${line} == *"are you in? [UTC]"* ]]; then
echo "[+] keeping default utc timezone for now"
echo > /tmp/alpine.in
fi
if [[ ${line} == *"HTTP/FTP proxy URL?"* ]] && [[ ${line} == *" [none]"* ]]; then
echo "[+] skipping proxy configuration"
echo > /tmp/alpine.in
fi
if [[ ${line} == *"Enter mirror number or URL: [1]"* ]]; then
echo "[+] finding and using the fastest mirror"
echo "http://dl-cdn.alpinelinux.org/alpine/" > /tmp/alpine.in
fi
if [[ ${line} == *"Setup a user?"* ]] && [[ ${line} == *" [no]"* ]]; then
echo "[+] skipping additionnal user creation process"
echo > /tmp/alpine.in
fi
if [[ ${line} == *" [openssh]"* ]]; then
echo "[+] installing openssh server"
echo > /tmp/alpine.in
fi
if [[ ${line} == *" [prohibit-password]"* ]]; then
echo "[+] authorizing root login with password"
echo "yes" > /tmp/alpine.in
fi
if [[ ${line} == *"Enter ssh key"* ]] && [[ ${line} == *" [none]"* ]]; then
echo "[+] skipping ssh key for now"
echo > /tmp/alpine.in
fi
if [[ ${line} == *"Which disk"* ]] && [[ ${line} == *" [none]"* ]]; then
echo "[+] configuring root disk: vda"
echo "vdb" > /tmp/alpine.in
fi
if [[ ${line} == *"How would you like to use it?"* ]] && [[ ${line} == *" [?]"* ]]; then
echo "sys" > /tmp/alpine.in
fi
if [[ ${line} == *"and continue? (y/n) [n]"* ]]; then
echo "[+] cleaning up disks and installing operating system"
echo "y" > /tmp/alpine.in
fi
if [[ ${line} == *"Installation is complete."* ]]; then
echo "[+] setup completed, rebooting virtual machine..."
echo "reboot" > /tmp/alpine.in
fi
if [[ ${line} == *"${hname} login:"* ]]; then
echo "[+] ===================================================================="
echo "[+] virtual machine configured, up and running, root password: root"
echo "[+] you can ssh this machine with the local reverse port:"
echo "[+]"
echo "[+] ssh root@localhost -p 2225"
echo "[+]"
break
fi
done
echo "[+] virtual machine initialized"

View File

@@ -1,141 +0,0 @@
module publishing
import cli { Command, Flag }
import freeflowuniverse.herolib.ui.console
// path string //if location on filessytem, if exists, this has prio on git_url
// git_url string // location of where the hero scripts are
// git_pull bool // means when getting new repo will pull even when repo is already there
// git_pullreset bool // means we will force a pull and reset old content
// coderoot string //the location of coderoot if its another one
pub fn cmd_publisher(pre_func fn (Command) !) Command {
mut cmd_publisher := Command{
name: 'publisher'
usage: '
## Manage your publications
example:
hero publisher -u https://git.threefold.info/ourworld_holding/info_ourworld/src/branch/develop/heroscript
If you do -gp it will pull newest book content from git and give error if there are local changes.
If you do -gr it will pull newest book content from git and overwrite local changes (careful).
'
description: 'create, edit, show mdbooks'
required_args: 0
execute: cmd_publisher_execute
pre_execute: pre_func
}
// cmd_run_add_flags(mut cmd_publisher)
cmd_publisher.add_flag(Flag{
flag: .string
name: 'name'
abbrev: 'n'
description: 'name of the publication.'
})
cmd_publisher.add_flag(Flag{
flag: .bool
required: false
name: 'edit'
description: 'will open vscode for collections & summary.'
})
cmd_publisher.add_flag(Flag{
flag: .bool
required: false
name: 'open'
abbrev: 'o'
description: 'will open the generated book.'
})
mut cmd_list := Command{
sort_flags: true
name: 'list_books'
execute: cmd_publisher_list_books
description: 'will list existing mdbooks'
pre_execute: pre_func
}
mut cmd_open := Command{
name: 'open'
execute: cmd_publisher_open
description: 'will open the publication with the provided name'
pre_execute: pre_func
}
cmd_open.add_flag(Flag{
flag: .string
name: 'name'
abbrev: 'n'
description: 'name of the publication.'
})
cmd_publisher.add_command(cmd_list)
cmd_publisher.add_command(cmd_open)
// cmdroot.add_command(cmd_publisher)
return cmd_publisher
}
fn cmd_publisher_list_books(cmd Command) ! {
console.print_header('Books:')
books := publisher.list_books()!
for book in books {
console.print_stdout(book.str())
}
}
fn cmd_publisher_open(cmd Command) ! {
name := cmd.flags.get_string('name') or { '' }
publisher.open(name)!
}
fn cmd_publisher_execute(cmd Command) ! {
mut name := cmd.flags.get_string('name') or { '' }
// mut url := cmd.flags.get_string('url') or { '' }
// mut path := cmd.flags.get_string('path') or { '' }
// if path.len > 0 || url.len > 0 {
// // execute the attached playbook
// mut plbook, _ := herocmds.plbook_run(cmd)!
// play(mut plbook)!
// // get name from the book.generate action
// // if name == '' {
// // mut a := plbook.action_get(actor: 'mdbook', name: 'define')!
// // name = a.params.get('name') or { '' }
// // }
// } else {
// publisher_help(cmd)
// }
if name == '' {
console.print_debug('did not find name of book to generate, check in heroscript or specify with --name')
publisher_help(cmd)
exit(1)
}
edit := cmd.flags.get_bool('edit') or { false }
open := cmd.flags.get_bool('open') or { false }
if edit || open {
// mdbook.book_open(name)!
}
if edit {
// publisher.book_edit(name)!
}
}
// fn pre_func(cmd Command) ! {
// herocmds.plbook_run(cmd)!
// }
fn publisher_help(cmd Command) {
console.clear()
console.print_header('Instructions for publisher:')
console.print_lf(1)
console.print_stdout(cmd.help_message())
console.print_lf(5)
}

View File

@@ -1,117 +0,0 @@
module publishing
import freeflowuniverse.herolib.core.playbook { Action }
import freeflowuniverse.herolib.data.paramsparser { Params }
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.core.pathlib
import os
pub fn play(mut plbook playbook.PlayBook) ! {
// first lets configure are publisher
if mut action := plbook.get(filter: 'publisher.configure') {
play_configure(mut action)!
}
// lets add all the collections
for mut action in plbook.find(filter: 'publisher:new_collection')! {
mut p := action.params
play_new_collection(mut p)!
action.done = true
}
// then lets export the doctree with all its collections
publisher.export_tree()!
// now we can start defining books
for mut action in plbook.find(filter: 'book:define')! {
mut p := action.params
play_book_define(mut p)!
action.done = true
}
// finally lets publish defined books
for mut action in plbook.find(filter: 'book:publish')! {
p := action.params
spawn play_book_publish(p)
action.done = true
}
}
fn play_configure(mut action Action) ! {
mut p := action.params
// Variables removed as they were unused
if p.exists('buildroot') {
_ = p.get('buildroot')!
}
if p.exists('coderoot') {
_ = p.get('coderoot')!
}
if p.exists('publishroot') {
_ = p.get('publishroot')!
}
if p.exists('reset') {
_ = p.get_default_false('reset')
}
action.done = true
}
fn play_new_collection(mut p Params) ! {
url := p.get_default('url', '')!
path := p.get_default('path', '')!
// name removed as unused
reset := p.get_default_false('reset')
pull := p.get_default_false('pull')
mut tree := publisher.tree
tree.scan_concurrent(
path: path
git_url: url
git_reset: reset
git_pull: pull
)!
publisher.tree = tree
}
fn play_book_define(mut params Params) ! {
summary_url := params.get_default('summary_url', '')!
summary_path := if summary_url == '' {
params.get('summary_path') or {
return error('both summary url and summary path cannot be empty')
}
} else {
get_summary_path(summary_url)!
}
name := params.get('name')!
publisher.new_book(
name: name
title: params.get_default('title', name)!
collections: params.get_list('collections')!
summary_path: summary_path
)!
}
fn play_book_publish(p Params) ! {
name := p.get('name')!
params := p.decode[PublishParams]()!
// production removed as unused
publisher.publish(name, params)!
}
fn get_summary_path(summary_url string) !string {
mut gs := gittools.get()!
mut repo := gs.get_repo(url: summary_url, reset: false, pull: false)!
// get the path corresponding to the summary_url dir/file
summary_path := repo.get_path_of_url(summary_url)!
mut summary_dir := pathlib.get_dir(path: os.dir(summary_path))!
summary_file := summary_dir.file_get_ignorecase('summary.md') or {
summary_dir = summary_dir.parent()!
summary_dir.file_get_ignorecase('summary.md') or {
return error('summary from git needs to be dir or file: ${err}')
}
}
return summary_file.path
}

View File

@@ -1,118 +0,0 @@
module publishing
import os
import freeflowuniverse.herolib.core.pathlib { Path }
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.data.doctree { Tree }
import freeflowuniverse.herolib.web.mdbook
__global (
publisher Publisher
)
pub struct Publisher {
pub mut:
tree Tree
books map[string]Book
root_path string = os.join_path(os.home_dir(), 'hero/publisher')
}
// returns the directory of a given collecation
fn (p Publisher) collection_directory(name string) ?Path {
mut cols_dir := p.collections_directory()
return cols_dir.dir_get(name) or { return none }
}
pub fn (p Publisher) collections_directory() Path {
collections_path := '${p.root_path}/collections'
return pathlib.get_dir(path: collections_path) or { panic('this should never happen ${err}') }
}
pub fn (p Publisher) build_directory() Path {
build_path := '${p.root_path}/build'
return pathlib.get_dir(path: build_path) or { panic('this should never happen ${err}') }
}
pub fn (p Publisher) publish_directory() Path {
publish_path := '${p.root_path}/publish'
return pathlib.get_dir(path: publish_path) or { panic('this should never happen ${err}') }
}
@[params]
pub struct PublishParams {
production bool
}
pub fn (p Publisher) publish(name string, params PublishParams) ! {
if name !in p.books {
return error('book ${name} doesnt exist')
}
p.books[name].publish(p.publish_directory().path, params)!
}
pub struct Book {
name string
title string
description string
path string
}
pub fn (book Book) publish(path string, params PublishParams) ! {
os.execute_opt('
cd ${book.path}
mdbook build --dest-dir ${path}/${book.name}')!
}
pub struct NewBook {
name string
title string
description string
summary_path string
collections []string
}
pub fn (p Publisher) new_book(book NewBook) ! {
mut mdbooks := mdbook.get()!
mut cfg := mdbooks
cfg.path_build = p.build_directory().path
cfg.path_publish = p.publish_directory().path
mut col_paths := []string{}
for col in book.collections {
col_dir := p.collection_directory(col) or {
return error('Collection ${col} not found in publisher tree')
}
col_paths << col_dir.path
}
_ := mdbooks.generate(
name: book.name
title: book.title
summary_path: book.summary_path
collections: col_paths
)!
publisher.books[book.name] = Book{
name: book.name
title: book.title
description: book.description
path: '${p.build_directory().path}/${book.name}'
}
}
pub fn (book Book) print() {
println('Book: ${book.name}\n- title: ${book.title}\n- description: ${book.description}\n- path: ${book.path}')
}
pub fn (p Publisher) open(name string) ! {
p.publish(name)!
cmd := 'open \'${p.publish_directory().path}/${name}/index.html\''
osal.exec(cmd: cmd)!
}
pub fn (p Publisher) export_tree() ! {
publisher.tree.export(destination: '${publisher.root_path}/collections')!
}
pub fn (p Publisher) list_books() ![]Book {
return p.books.values()
}

View File

@@ -0,0 +1,38 @@
module biz
import freeflowuniverse.herolib.hero.models.core
// Company represents a business entity with all necessary details
pub struct Company {
core.Base
pub mut:
name string // Company legal name @[index: 'company_name_idx']
registration_number string // Official registration number @[index: 'company_reg_idx']
incorporation_date u64 // Unix timestamp
fiscal_year_end string // Format: MM-DD
email string
phone string
website string
address string
business_type BusinessType
industry string // Industry classification
description string // Company description
status CompanyStatus
}
// CompanyStatus tracks the operational state of a company
pub enum CompanyStatus {
pending_payment
active
suspended
inactive
}
// BusinessType categorizes the company structure
pub enum BusinessType {
coop
single
twin
starter
global
}

View File

@@ -0,0 +1,28 @@
module biz
import freeflowuniverse.herolib.hero.models.core
// Payment handles financial transactions for companies
pub struct Payment {
core.Base
pub mut:
payment_intent_id string // Stripe payment intent ID @[index: 'payment_intent_idx']
company_id u32 // Associated company @[index: 'payment_company_idx']
payment_plan string // monthly/yearly/two_year
setup_fee f64
monthly_fee f64
total_amount f64
currency string // Default: usd
status PaymentStatus
stripe_customer_id string
completed_at u64 // Unix timestamp
}
// PaymentStatus tracks the lifecycle of a payment
pub enum PaymentStatus {
pending
processing
completed
failed
refunded
}

View File

@@ -0,0 +1,39 @@
module biz
import freeflowuniverse.herolib.hero.models.core
// Product represents goods or services offered by a company
pub struct Product {
core.Base
pub mut:
name string
description string
price f64
type_ ProductType
category string
status ProductStatus
max_amount u16
purchase_till u64 // Unix timestamp
active_till u64 // Unix timestamp
components []ProductComponent
}
// ProductComponent represents sub-parts of a complex product
pub struct ProductComponent {
pub mut:
name string
description string
quantity u32
}
// ProductType differentiates between products and services
pub enum ProductType {
product
service
}
// ProductStatus indicates availability
pub enum ProductStatus {
available
unavailable
}

View File

@@ -0,0 +1,35 @@
module biz
import freeflowuniverse.herolib.hero.models.core
// Sale represents a transaction linking buyers to products
pub struct Sale {
core.Base
pub mut:
company_id u32
buyer_id u32
transaction_id u32
total_amount f64
status SaleStatus
sale_date u64 // Unix timestamp
items []SaleItem
notes string
}
// SaleItem captures product details at time of sale
pub struct SaleItem {
pub mut:
product_id u32
name string // Product name snapshot
quantity i32
unit_price f64
subtotal f64
service_active_until u64 // Optional service expiry
}
// SaleStatus tracks transaction state
pub enum SaleStatus {
pending
completed
cancelled
}

View File

@@ -0,0 +1,22 @@
module biz
import freeflowuniverse.herolib.hero.models.core
// Shareholder tracks company ownership details
pub struct Shareholder {
core.Base
pub mut:
company_id u32
user_id u32
name string
shares f64
percentage f64
type_ ShareholderType
since u64 // Unix timestamp
}
// ShareholderType distinguishes between individual and corporate owners
pub enum ShareholderType {
individual
corporate
}

View File

@@ -0,0 +1,15 @@
module calendar
import freeflowuniverse.herolib.hero.models.core
// Calendar represents a calendar with events and scheduling capabilities
pub struct Calendar {
core.Base
pub mut:
name string @[index]
description string
color string // hex color code
timezone string
owner_id u32 @[index]
is_public bool
}

View File

@@ -0,0 +1,36 @@
module calendar
import freeflowuniverse.herolib.hero.models.core
// Contact represents a contact or address book entry
pub struct Contact {
core.Base
pub mut:
name string @[index]
email string @[index]
phone string
address string
company string
job_title string
notes string
tags []string
birthday u64
is_favorite bool
}
// ContactGroup represents a group of contacts
pub struct ContactGroup {
core.Base
pub mut:
name string @[index]
description string
color string
}
// ContactGroupMembership links contacts to groups
pub struct ContactGroupMembership {
core.Base
pub mut:
contact_id u32 @[index]
group_id u32 @[index]
}

View File

@@ -0,0 +1,53 @@
module calendar
import freeflowuniverse.herolib.hero.models.core
// EventStatus represents the current status of an event
pub enum EventStatus {
scheduled
ongoing
completed
cancelled
postponed
}
// EventType categorizes different types of events
pub enum EventType {
meeting
appointment
reminder
task
call
conference
}
// Event represents a calendar event
pub struct Event {
core.Base
pub mut:
calendar_id u32 @[index]
title string @[index]
description string
start_time u64 @[index]
end_time u64 @[index]
location string
status EventStatus
event_type EventType
priority u8 // 1-5 scale
is_all_day bool
recurrence_rule string // RFC 5545 recurrence rule
parent_event_id u32 // for recurring events
}
// EventParticipant represents a participant in an event
pub struct EventParticipant {
core.Base
pub mut:
event_id u32 @[index]
user_id u32 @[index]
email string @[index]
name string
role string // attendee, organizer, optional
status string // accepted, declined, tentative, pending
response_time u64
}

View File

@@ -0,0 +1,49 @@
module calendar
import freeflowuniverse.herolib.hero.models.core
// MessageStatus represents the delivery status of a message
pub enum MessageStatus {
draft
sent
delivered
read
failed
}
// MessageType categorizes different types of messages
pub enum MessageType {
email
sms
notification
reminder
}
// Message represents a communication message
pub struct Message {
core.Base
pub mut:
sender_id u32 @[index]
recipient_id u32 @[index]
subject string
body string
message_type MessageType
status MessageStatus
scheduled_at u64
sent_at u64
read_at u64
priority u8 // 1-5 scale
attachments []string // file paths or URLs
tags []string
}
// Reminder represents a scheduled reminder
pub struct Reminder {
core.Base
pub mut:
event_id u32 @[index]
message string
reminder_time u64 @[index]
is_sent bool
snooze_count u8
}

View File

@@ -0,0 +1,32 @@
module circle
import freeflowuniverse.herolib.hero.models.core
// Circle represents a circle entity with configuration and metadata
@[heap]
pub struct Circle {
core.Base
pub mut:
name string // Human-readable name of the circle
description string // Detailed description of the circle's purpose
domain string // Primary domain name for the circle @[index]
config CircleConfig // Configuration settings for the circle
status CircleStatus // Current operational status
}
// CircleConfig holds configuration settings for a circle
pub struct CircleConfig {
pub mut:
max_members u32 // Maximum number of members allowed
allow_guests bool // Whether to allow guest access
auto_approve bool // Whether new members are auto-approved
theme string // Visual theme identifier
}
// CircleStatus represents the operational status of a circle
pub enum CircleStatus {
active
inactive
suspended
archived
}

View File

@@ -0,0 +1,33 @@
module circle
import freeflowuniverse.herolib.hero.models.core
// Member represents a member within a circle
pub struct Member {
core.Base
pub mut:
circle_id u32 // Reference to the circle this member belongs to @[index]
user_id u32 // Reference to the user entity @[index]
role MemberRole // Member's role within the circle
status MemberStatus // Current membership status
joined_at u64 // Unix timestamp when member joined
invited_by u32 // User ID of who invited this member
permissions []string // List of custom permissions
}
// MemberRole defines the possible roles a member can have
pub enum MemberRole {
owner
admin
moderator
member
guest
}
// MemberStatus represents the current status of membership
pub enum MemberStatus {
active
pending
suspended
removed
}

View File

@@ -0,0 +1,27 @@
module circle
import freeflowuniverse.herolib.hero.models.core
// Name represents a domain name configuration for a circle
pub struct Name {
core.Base
pub mut:
circle_id u32 // Reference to the circle this name belongs to @[index]
domain string // The actual domain name @[index]
subdomain string // Optional subdomain
record_type NameType // Type of DNS record
value string // DNS record value/target
priority u32 // Priority for MX records
ttl u32 // Time to live in seconds
is_active bool // Whether this record is currently active
}
// NameType defines the supported DNS record types
pub enum NameType {
a
aaaa
cname
mx
txt
srv
}

View File

@@ -0,0 +1,33 @@
module circle
import freeflowuniverse.herolib.hero.models.core
// Wallet represents a wallet associated with a circle for financial operations
pub struct Wallet {
core.Base
pub mut:
circle_id u32 // Reference to the circle this wallet belongs to @[index]
address string // Blockchain address for this wallet @[index]
type WalletType // Type of wallet (custodial/non-custodial)
balance f64 // Current balance in the wallet
currency string // Currency type (e.g., "USD", "BTC", "ETH")
is_primary bool // Whether this is the primary wallet for the circle
status WalletStatus // Current wallet status
last_activity u64 // Unix timestamp of last transaction
}
// WalletType defines the types of wallets supported
pub enum WalletType {
custodial
non_custodial
hardware
software
}
// WalletStatus represents the operational status of a wallet
pub enum WalletStatus {
active
inactive
frozen
archived
}

View File

@@ -0,0 +1,12 @@
module core
// BaseData provides common fields for all models
pub struct Base {
pub mut:
id u32
created u64 // Unix timestamp of creation
updated u64 // Unix timestamp of last update
deleted bool
version u32
comments []Comment
}

View File

@@ -0,0 +1,20 @@
module core
// Comment represents a generic comment that can be associated with any model
// It supports threaded conversations with parent/child relationships
pub struct Comment {
pub mut:
// Unique identifier for the comment
id u32 // Unique identifier for the comment @[index]
// Timestamp when the comment was created
created_at u64 // Timestamp when the comment was created
// Timestamp when the comment was last updated
updated_at u64 // Timestamp when the comment was last updated
// ID of the user who posted this comment
user_id u32 // ID of the user who posted this comment @[index]
// The actual text content of the comment
content string
// Optional ID of the parent comment for threaded conversations
// None indicates this is a top-level comment
parent_comment_id u32
}

View File

@@ -0,0 +1,27 @@
module finance
import freeflowuniverse.herolib.hero.models.core
// Account represents a financial account for tracking balances and transactions
// Supports multiple account types (checking, savings, investment, etc.)
pub struct Account {
core.Base
pub mut:
name string // User-friendly account name
account_type AccountType
balance f64 // Current balance in the account's currency
currency string // Currency code (USD, EUR, etc.)
description string // Optional description of the account
is_active bool // Whether the account is currently active
}
// AccountType defines the different types of financial accounts
pub enum AccountType {
checking
savings
investment
credit
loan
crypto
other
}

View File

@@ -0,0 +1,34 @@
module finance
import freeflowuniverse.herolib.hero.models.core
// Asset represents any valuable resource owned by an entity
// Can be financial (stocks, bonds) or physical (real estate, commodities)
pub struct Asset {
core.Base
pub mut:
name string // Asset name or identifier
symbol string // Trading symbol or identifier @[index]
asset_type AssetType
quantity f64 // Amount of the asset held
unit_price f64 // Price per unit in the asset's currency
total_value f64 // total_value = quantity * unit_price
currency string // Currency for pricing (USD, EUR, etc.)
category string // Asset category (stocks, bonds, crypto, etc.)
exchange string // Exchange where asset is traded
description string // Detailed description of the asset
is_active bool // Whether the asset is currently tracked
purchase_date u64 // Unix timestamp of purchase/acquisition
}
// AssetType defines the classification of assets
pub enum AssetType {
stock
bond
crypto
commodity
real_estate
currency
nft
other
}

View File

@@ -0,0 +1,29 @@
module finance
import freeflowuniverse.herolib.hero.models.core
// Marketplace represents a platform for buying and selling goods/services
// Can be internal or external marketplace configurations
pub struct Marketplace {
core.Base
pub mut:
name string // Marketplace name (e.g., "Amazon", "eBay") @[index]
marketplace_type MarketplaceType
api_endpoint string // API endpoint for marketplace integration
api_key string // Authentication key for API access
currency string // Default currency for transactions
fee_percentage f64 // Marketplace fee as percentage (0.0-100.0)
is_active bool // Whether marketplace is currently enabled
description string // Detailed marketplace description
support_email string // Contact email for support issues
}
// MarketplaceType defines the type of marketplace platform
pub enum MarketplaceType {
centralized
decentralized
peer_to_peer
auction
classified
other
}

View File

@@ -0,0 +1,24 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// CommitteeMember represents a member of a committee
pub struct CommitteeMember {
core.Base
pub mut:
user_id u32
name string
role CommitteeRole
joined_date u64 // Unix timestamp
notes string
}
// Committee represents a committee in the governance system
pub struct Committee {
core.Base
pub mut:
company_id u32 @[index]
name string @[index]
description string
members []CommitteeMember
}

View File

@@ -0,0 +1,28 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// BusinessType represents the type of a business
pub struct BusinessType {
pub mut:
type_name string
description string
}
// Company represents a company in the governance system
pub struct Company {
core.Base
pub mut:
name string @[index]
registration_number string @[index]
incorporation_date u64 // Unix timestamp
fiscal_year_end string
email string
phone string
website string
address string
business_type BusinessType
industry string
description string
status CompanyStatus
}

View File

@@ -0,0 +1,30 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// Attendee represents an attendee of a meeting
pub struct Attendee {
pub mut:
user_id u32
name string
role string
status AttendanceStatus
notes string
}
// Meeting represents a meeting in the governance system
pub struct Meeting {
core.Base
pub mut:
company_id u32 @[index]
title string @[index]
description string
meeting_type MeetingType
status MeetingStatus
start_time u64 // Unix timestamp
end_time u64 // Unix timestamp
location string
agenda string
minutes string
attendees []Attendee
}

View File

@@ -0,0 +1,18 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// Resolution represents a resolution in the governance system
pub struct Resolution {
core.Base
pub mut:
company_id u32 @[index]
title string @[index]
description string
resolution_type ResolutionType
status ResolutionStatus
proposed_date u64 // Unix timestamp
effective_date ?u64 // Unix timestamp
expiry_date ?u64 // Unix timestamp
approvals []string
}

View File

@@ -0,0 +1,15 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// Shareholder represents a shareholder in the governance system
pub struct Shareholder {
core.Base
pub mut:
company_id u32 @[index]
name string @[index]
shareholder_type ShareholderType
contact_info string @[index]
shares u32
percentage f64
}

View File

@@ -0,0 +1,82 @@
module gov
pub enum CompanyStatus {
active
inactive
dissolved
suspended
pending
}
pub enum ShareholderType {
individual
corporate
trust
partnership
government
other
}
pub enum CommitteeRole {
chair
vice_chair
secretary
treasurer
member
observer
advisor
}
pub enum MeetingStatus {
scheduled
in_progress
completed
cancelled
}
pub enum MeetingType {
board_meeting
committee_meeting
general_assembly
annual_general_meeting
extraordinary_general_meeting
other
}
pub enum AttendanceStatus {
invited
confirmed
declined
attended
absent
}
pub enum ResolutionStatus {
draft
proposed
approved
rejected
expired
}
pub enum ResolutionType {
ordinary
special
unanimous
written
other
}
pub enum VoteStatus {
draft
open
closed
cancelled
}
pub enum VoteOption {
yes
no
abstain
custom
}

View File

@@ -0,0 +1,12 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// User represents a user in the governance system
pub struct User {
core.Base
pub mut:
name string @[index]
email string @[index]
role string
}

View File

@@ -0,0 +1,27 @@
module gov
import freeflowuniverse.herolib.hero.models.core
// Ballot represents a ballot cast in a vote
pub struct Ballot {
pub mut:
user_id u32
option VoteOption
weight f64
cast_at u64 // Unix timestamp
notes string
}
// Vote represents a vote in the governance system
pub struct Vote {
core.Base
pub mut:
company_id u32 @[index]
resolution_id u32 @[index]
title string @[index]
description string
status VoteStatus
start_date u64 // Unix timestamp
end_date u64 // Unix timestamp
ballots []Ballot
}

View File

@@ -0,0 +1,25 @@
module governance
import freeflowuniverse.herolib.hero.models.core
pub struct GovernanceActivity {
core.Base
pub mut:
company_id u32 // Reference to company @[index]
activity_type string // Type of activity (proposal, vote, meeting, etc.) @[index]
description string // Detailed description
initiator_id u32 // User who initiated @[index]
target_id u32 // Target entity ID
target_type string // Type of target (user, proposal, etc.)
metadata string // JSON metadata
}
// Activity types
pub enum ActivityType {
proposal_created
proposal_updated
vote_cast
meeting_scheduled
resolution_passed
shareholder_added
}

View File

@@ -0,0 +1,17 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// AttachedFile represents files attached to governance entities
pub struct AttachedFile {
core.Base
pub mut:
entity_id u32 // ID of entity this file is attached to @[index]
entity_type string // Type of entity (proposal, meeting, etc.) @[index]
filename string // Original filename
content_type string // MIME type
size u64 // File size in bytes
path string // Storage path
description string // Optional description
uploaded_by u32 // User who uploaded @[index]
}

View File

@@ -0,0 +1,39 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// CommitteeType defines committee categories
pub enum CommitteeType {
board
executive
audit
compensation
nomination
governance
finance
risk
other
}
// CommitteeStatus tracks committee state
pub enum CommitteeStatus {
active
inactive
dissolved
}
// Committee represents a governance committee
pub struct Committee {
core.Base
pub mut:
company_id u32 // Reference to company @[index]
name string // Committee name @[index]
committee_type CommitteeType // Type of committee
description string // Detailed description
status CommitteeStatus // Current state
chairman_id u32 // Committee chair @[index]
term_start u64 // Start of term
term_end u64 // End of term
meeting_frequency string // e.g., "monthly", "quarterly"
quorum_size u32 // Minimum members for quorum
}

View File

@@ -0,0 +1,45 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// CompanyType categorizes companies
pub enum CompanyType {
corporation
llc
partnership
cooperative
nonprofit
}
// CompanyStatus tracks company state
pub enum CompanyStatus {
active
inactive
dissolved
merged
acquired
}
// Company represents a governance entity
pub struct Company {
core.Base
pub mut:
name string // Company name @[index]
legal_name string // Legal entity name @[index]
company_type CompanyType // Type of company
status CompanyStatus // Current state
incorporation_date u64 // Unix timestamp
jurisdiction string // Country/state of incorporation
registration_number string // Government registration @[index]
tax_id string // Tax identification
address string // Primary address
headquarters string // City/country of HQ
website string // Company website
phone string // Contact phone
email string // Contact email
shares_authorized u64 // Total authorized shares
shares_issued u64 // Currently issued shares
par_value f64 // Par value per share
currency string // Currency code
fiscal_year_end string // "MM-DD" format
}

View File

@@ -0,0 +1,44 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// MeetingType defines meeting categories
pub enum MeetingType {
annual_general
extraordinary_general
board
committee
special
}
// MeetingStatus tracks meeting state
pub enum MeetingStatus {
scheduled
in_progress
completed
cancelled
postponed
}
// Meeting represents a governance meeting
pub struct Meeting {
core.Base
pub mut:
company_id u32 // Reference to company @[index]
committee_id u32 // Reference to committee @[index]
meeting_type MeetingType // Type of meeting
title string // Meeting title @[index]
description string // Detailed description
status MeetingStatus // Current state
scheduled_start u64 // Scheduled start time
scheduled_end u64 // Scheduled end time
actual_start u64 // Actual start time
actual_end u64 // Actual end time
location string // Physical/virtual location
meeting_url string // Video conference link
agenda string // Meeting agenda
minutes string // Meeting minutes
quorum_required u32 // Members required for quorum
quorum_present bool // Whether quorum was achieved
created_by u32 // User who scheduled @[index]
}

View File

@@ -0,0 +1,47 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// ProposalStatus tracks the state of a governance proposal
pub enum ProposalStatus {
draft
pending_review
active
voting
passed
rejected
implemented
cancelled
}
// ProposalType categorizes proposals
pub enum ProposalType {
constitutional
policy
budget
election
merger
dissolution
other
}
// Proposal represents a governance proposal
pub struct Proposal {
core.Base
pub mut:
company_id u32 // Reference to company @[index]
title string // Proposal title @[index]
description string // Detailed description
proposal_type ProposalType // Category of proposal
status ProposalStatus // Current state
proposer_id u32 // User who created @[index]
target_committee_id u32 // Target committee @[index]
voting_start u64 // Start timestamp
voting_end u64 // End timestamp
quorum_required f64 // Percentage required
approval_threshold f64 // Percentage for approval
votes_for u32 // Votes in favor
votes_against u32 // Votes against
votes_abstain u32 // Abstention votes
implementation_notes string // Post-implementation notes
}

View File

@@ -0,0 +1,40 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// ResolutionStatus tracks resolution state
pub enum ResolutionStatus {
proposed
voting
passed
failed
implemented
withdrawn
}
// ResolutionType categorizes resolutions
pub enum ResolutionType {
ordinary
special
unanimous
}
// Resolution represents a formal resolution
pub struct Resolution {
core.Base
pub mut:
company_id u32 // Reference to company @[index]
meeting_id u32 // Reference to meeting @[index]
proposal_id u32 // Reference to proposal @[index]
resolution_number string // Unique resolution number @[index]
title string // Resolution title @[index]
description string // Detailed description
resolution_type ResolutionType // Category
status ResolutionStatus // Current state
mover_id u32 // Person who moved @[index]
seconder_id u32 // Person who seconded @[index]
votes_for u32 // Votes in favor
votes_against u32 // Votes against
votes_abstain u32 // Abstention votes
effective_date u64 // When resolution takes effect
}

View File

@@ -0,0 +1,48 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// UserType defines user categories
pub enum UserType {
individual
corporate
system
}
// UserStatus tracks user state
pub enum UserStatus {
active
inactive
suspended
pending
}
// UserRole defines governance roles
pub enum UserRole {
shareholder
director
officer
employee
auditor
consultant
administrator
}
// User represents a governance participant
pub struct User {
core.Base
pub mut:
username string // Unique username @[index]
email string // Email address @[index]
first_name string // First name
last_name string // Last name
display_name string // Preferred display name
user_type UserType // Type of user
status UserStatus // Current state
roles []UserRole // Governance roles
company_id u32 // Primary company @[index]
phone string // Contact phone
address string // Contact address
profile_picture string // Profile picture URL
last_login u64 // Last login timestamp
}

View File

@@ -0,0 +1,34 @@
module governance
import freeflowuniverse.herolib.hero.models.core
// VoteValue represents voting choices
pub enum VoteValue {
yes
no
abstain
}
// VoteStatus tracks vote state
pub enum VoteStatus {
pending
cast
changed
retracted
}
// Vote represents a governance vote
pub struct Vote {
core.Base
pub mut:
proposal_id u32 // Reference to proposal @[index]
resolution_id u32 // Reference to resolution @[index]
voter_id u32 // User who voted @[index]
company_id u32 // Reference to company @[index]
vote_value VoteValue // Voting choice
status VoteStatus // Current state
weight u32 // Vote weight (for weighted voting)
comments string // Optional comments
proxy_voter_id u32 // If voting by proxy @[index]
ip_address string // IP address for verification
}

View File

@@ -0,0 +1,55 @@
module legal
import freeflowuniverse.herolib.hero.models.core
// ContractStatus represents the current state of a legal contract
pub enum ContractStatus {
draft
pending
active
expired
terminated
cancelled
}
// ContractType categorizes the type of legal agreement
pub enum ContractType {
service
sales
lease
employment
partnership
nda
other
}
// Contract represents a legal agreement between parties
// This model stores essential information about contracts including parties, terms, and status
pub struct Contract {
core.Base
pub mut:
title string // Human-readable title of the contract @[index]
contract_type ContractType // Type/category of the contract
status ContractStatus // Current status of the contract
party_a string // First party identifier (company, individual, etc.) @[index]
party_b string // Second party identifier @[index]
effective_date u64 // Unix timestamp when contract becomes effective
expiration_date u64 // Unix timestamp when contract expires
total_value f64 // Monetary value of the contract
currency string // Currency code (USD, EUR, etc.)
terms string // Full text of the contract terms
signature_date u64 // Unix timestamp when contract was signed
version string // Version identifier for contract revisions
parent_contract_id ?u32 // Optional reference to parent contract for amendments @[index]
attachment_urls []string // URLs or paths to attached documents
notes string // Additional notes and comments
}

View File

@@ -0,0 +1,30 @@
module library
import freeflowuniverse.herolib.hero.models.core
// TocEntry represents a table of contents entry for a book
pub struct TocEntry {
// Title of the chapter/section
title string
// Page number (index in the pages array)
page u32
// Optional subsections
subsections []TocEntry
}
// Book represents a Book library item (collection of markdown pages with TOC)
pub struct Book {
core.Base // Title of the book
title string @[index]
// Optional description of the book
description ?string
// Table of contents
table_of_contents []TocEntry
// Pages content (markdown strings)
pages []string
}

View File

@@ -0,0 +1,27 @@
module library
import freeflowuniverse.herolib.hero.models.core
// Collection represents a collection of library items
pub struct Collection {
core.Base // Title of the collection
title string @[index]
// Optional description of the collection
description ?string
// List of image item IDs belonging to this collection
images []u32
// List of PDF item IDs belonging to this collection
pdfs []u32
// List of Markdown item IDs belonging to this collection
markdowns []u32
// List of Book item IDs belonging to this collection
books []u32
// List of Slides item IDs belonging to this collection
slides []u32
}

View File

@@ -0,0 +1,21 @@
module library
import freeflowuniverse.herolib.hero.models.core
// Image represents an Image library item
pub struct Image {
core.Base // Title of the image
title string @[index]
// Optional description of the image
description ?string
// URL of the image
url string
// Width of the image in pixels
width u32
// Height of the image in pixels
height u32
}

View File

@@ -0,0 +1,15 @@
module library
import freeflowuniverse.herolib.hero.models.core
// Markdown represents a Markdown document library item
pub struct Markdown {
core.Base // Title of the document
title string @[index]
// Optional description of the document
description ?string
// The markdown content
content string
}

View File

@@ -0,0 +1,18 @@
module library
import freeflowuniverse.herolib.hero.models.core
// Pdf represents a PDF document library item
pub struct Pdf {
core.Base // Title of the PDF
title string @[index]
// Optional description of the PDF
description ?string
// URL of the PDF file
url string
// Number of pages in the PDF
page_count u32
}

Some files were not shown because too many files have changed in this diff Show More