diff --git a/aiprompts/instructions/herodb_base.md b/aiprompts/instructions/herodb_base.md
new file mode 100644
index 00000000..eff9fac3
--- /dev/null
+++ b/aiprompts/instructions/herodb_base.md
@@ -0,0 +1,1440 @@
+
+/Users/despiegk/code/github/incubaid/herolib
+├── .github
+│ └── workflows
+├── .zed
+├── aiprompts
+│ ├── .openhands
+│ ├── bizmodel
+│ ├── documentor
+│ ├── docusaurus
+│ ├── herolib_advanced
+│ ├── herolib_core
+│ ├── instructions_archive
+│ │ ├── models_from_v
+│ │ └── processing
+│ ├── v_advanced
+│ ├── v_core
+│ │ ├── array
+│ │ ├── benchmark
+│ │ ├── builtin
+│ │ ├── crypto
+│ │ ├── encoding
+│ │ ├── io
+│ │ ├── json
+│ │ ├── json2
+│ │ ├── maps
+│ │ ├── net
+│ │ ├── orm
+│ │ ├── regex
+│ │ ├── string
+│ │ ├── time
+│ │ ├── toml
+│ │ └── veb
+│ └── v_veb_webserver
+├── cli
+├── docker
+│ ├── herolib
+│ │ └── scripts
+│ └── postgresql
+├── examples
+│ ├── aiexamples
+│ ├── biztools
+│ │ ├── _archive
+│ │ ├── bizmodel_docusaurus
+│ │ │ └── archive
+│ │ │ └── img
+│ │ └── examples
+│ │ └── full
+│ ├── builder
+│ │ └── remote_executor
+│ ├── clients
+│ ├── core
+│ │ ├── base
+│ │ ├── db
+│ │ ├── logger
+│ │ ├── openapi
+│ │ │ └── gitea
+│ │ ├── openrpc
+│ │ │ └── examples
+│ │ │ ├── openrpc_client
+│ │ │ ├── openrpc_docs
+│ │ │ └── petstore_client
+│ │ └── pathlib
+│ │ └── examples
+│ │ ├── list
+│ │ ├── md5
+│ │ ├── scanner
+│ │ └── sha256
+│ ├── data
+│ │ ├── location
+│ │ ├── ourdb_syncer
+│ │ ├── params
+│ │ │ ├── args
+│ │ │ │ └── data
+│ │ │ └── paramsfilter
+│ │ └── resp
+│ ├── develop
+│ │ ├── codewalker
+│ │ ├── gittools
+│ │ ├── heroprompt
+│ │ ├── ipapi
+│ │ ├── juggler
+│ │ │ └── hero
+│ │ │ └── playbook
+│ │ ├── luadns
+│ │ ├── openai
+│ │ ├── runpod
+│ │ ├── vastai
+│ │ └── wireguard
+│ ├── hero
+│ │ ├── db
+│ │ ├── generation
+│ │ │ ├── blank_generation
+│ │ │ └── openapi_generation
+│ │ │ └── example_actor
+│ │ │ └── specs
+│ │ ├── herofs
+│ │ ├── heromodels
+│ │ └── openapi
+│ │ └── data
+│ ├── installers
+│ │ ├── db
+│ │ ├── infra
+│ │ ├── lang
+│ │ ├── net
+│ │ ├── sysadmintools
+│ │ ├── threefold
+│ │ └── virt
+│ ├── installers_remote
+│ ├── jobs
+│ ├── lang
+│ │ └── python
+│ ├── mcp
+│ │ ├── http_demo
+│ │ ├── http_server
+│ │ ├── inspector
+│ │ └── simple_http
+│ ├── osal
+│ │ ├── coredns
+│ │ ├── download
+│ │ ├── ping
+│ │ ├── process
+│ │ │ ├── process_bash
+│ │ │ └── process_python
+│ │ ├── rsync
+│ │ ├── sandbox
+│ │ │ └── examples
+│ │ ├── sshagent
+│ │ ├── tmux
+│ │ │ └── heroscripts
+│ │ ├── ubuntu
+│ │ └── zinit
+│ │ ├── rpc
+│ │ └── simple
+│ ├── schemas
+│ │ ├── example
+│ │ │ └── testdata
+│ │ ├── openapi
+│ │ │ └── codegen
+│ │ └── openrpc
+│ ├── sshagent
+│ ├── threefold
+│ │ ├── grid
+│ │ │ ├── deploy
+│ │ │ └── utils
+│ │ ├── gridproxy
+│ │ ├── holochain
+│ │ ├── incatokens
+│ │ │ └── data
+│ │ ├── solana
+│ │ └── tfgrid3deployer
+│ │ ├── gw_over_wireguard
+│ │ ├── heroscript
+│ │ ├── hetzner
+│ │ ├── open_webui_gw
+│ │ └── vm_gw_caddy
+│ ├── tools
+│ │ └── imagemagick
+│ │ └── .backup
+│ ├── ui
+│ │ ├── console
+│ │ │ ├── console2
+│ │ │ └── flow1
+│ │ └── telegram
+│ ├── vfs
+│ │ └── vfs_db
+│ ├── virt
+│ │ ├── daguserver
+│ │ ├── docker
+│ │ │ └── ai_web_ui
+│ │ ├── heropods
+│ │ ├── hetzner
+│ │ ├── lima
+│ │ ├── podman
+│ │ └── windows
+│ ├── web
+│ │ ├── doctree
+│ │ │ └── content
+│ │ └── markdown_renderer
+│ └── webdav
+├── lib
+│ ├── ai
+│ │ ├── escalayer
+│ │ ├── mcp
+│ │ │ ├── baobab
+│ │ │ ├── cmd
+│ │ │ ├── mcpgen
+│ │ │ │ ├── schemas
+│ │ │ │ └── templates
+│ │ │ ├── pugconvert
+│ │ │ │ ├── cmd
+│ │ │ │ ├── logic
+│ │ │ │ │ └── templates
+│ │ │ │ └── mcp
+│ │ │ ├── rhai
+│ │ │ │ ├── cmd
+│ │ │ │ ├── example
+│ │ │ │ ├── logic
+│ │ │ │ │ ├── prompts
+│ │ │ │ │ └── templates
+│ │ │ │ └── mcp
+│ │ │ ├── rust
+│ │ │ └── vcode
+│ │ │ ├── cmd
+│ │ │ ├── logic
+│ │ │ └── mcp
+│ │ └── utils
+│ ├── biz
+│ │ ├── bizmodel
+│ │ │ ├── docu
+│ │ │ ├── exampledata
+│ │ │ └── templates
+│ │ ├── investortool
+│ │ │ └── simulator
+│ │ │ └── templates
+│ │ ├── planner
+│ │ │ ├── examples
+│ │ │ └── models
+│ │ └── spreadsheet
+│ │ └── docu
+│ ├── builder
+│ ├── clients
+│ │ ├── giteaclient
+│ │ ├── ipapi
+│ │ ├── jina
+│ │ │ └── py_specs
+│ │ ├── livekit
+│ │ ├── mailclient
+│ │ ├── meilisearch
+│ │ ├── mycelium
+│ │ ├── mycelium_rpc
+│ │ ├── openai
+│ │ │ ├── audio
+│ │ │ ├── embeddings
+│ │ │ ├── files
+│ │ │ ├── finetune
+│ │ │ ├── images
+│ │ │ └── moderation
+│ │ ├── postgresql_client
+│ │ ├── qdrant
+│ │ ├── rclone
+│ │ ├── runpod
+│ │ ├── sendgrid
+│ │ ├── traefik
+│ │ ├── vastai
+│ │ ├── wireguard
+│ │ ├── zerodb_client
+│ │ └── zinit
+│ ├── conversiontools
+│ │ ├── docsorter
+│ │ │ └── pythonscripts
+│ │ ├── imagemagick
+│ │ ├── pdftotext
+│ │ └── text_extractor
+│ ├── core
+│ │ ├── base
+│ │ ├── code
+│ │ │ └── templates
+│ │ │ ├── comment
+│ │ │ ├── function
+│ │ │ ├── interface
+│ │ │ └── struct
+│ │ ├── generator
+│ │ │ └── generic
+│ │ │ └── templates
+│ │ ├── herocmds
+│ │ ├── httpconnection
+│ │ ├── logger
+│ │ ├── openrpc_remove
+│ │ │ ├── examples
+│ │ │ └── specs
+│ │ ├── pathlib
+│ │ ├── playbook
+│ │ ├── playcmds
+│ │ ├── playmacros
+│ │ ├── redisclient
+│ │ ├── rootpath
+│ │ ├── smartid
+│ │ ├── texttools
+│ │ │ └── regext
+│ │ │ └── testdata
+│ │ └── vexecutor
+│ ├── crypt
+│ │ ├── aes_symmetric
+│ │ ├── crpgp
+│ │ ├── ed25519
+│ │ ├── keychain
+│ │ ├── keysafe
+│ │ ├── openssl
+│ │ ├── pgp
+│ │ └── secrets
+│ ├── data
+│ │ ├── cache
+│ │ ├── countries
+│ │ │ └── data
+│ │ ├── currency
+│ │ ├── dbfs
+│ │ ├── dedupestor
+│ │ │ └── dedupe_ourdb
+│ │ ├── doctree
+│ │ │ ├── collection
+│ │ │ │ ├── data
+│ │ │ │ ├── template
+│ │ │ │ └── testdata
+│ │ │ │ └── export_test
+│ │ │ │ ├── export_expected
+│ │ │ │ │ └── src
+│ │ │ │ │ └── col1
+│ │ │ │ │ └── img
+│ │ │ │ └── mytree
+│ │ │ │ └── dir1
+│ │ │ │ └── dir2
+│ │ │ ├── pointer
+│ │ │ └── testdata
+│ │ │ ├── actions
+│ │ │ │ └── functionality
+│ │ │ ├── export_test
+│ │ │ │ ├── export_expected
+│ │ │ │ │ ├── col1
+│ │ │ │ │ │ └── img
+│ │ │ │ │ └── col2
+│ │ │ │ └── mytree
+│ │ │ │ ├── dir1
+│ │ │ │ │ └── dir2
+│ │ │ │ └── dir3
+│ │ │ ├── process_defs_test
+│ │ │ │ ├── col1
+│ │ │ │ └── col2
+│ │ │ ├── process_includes_test
+│ │ │ │ ├── col1
+│ │ │ │ └── col2
+│ │ │ ├── rpc
+│ │ │ └── tree_test
+│ │ │ ├── fruits
+│ │ │ │ └── berries
+│ │ │ │ └── img
+│ │ │ └── vegetables
+│ │ │ └── cruciferous
+│ │ ├── encoder
+│ │ ├── encoderhero
+│ │ ├── flist
+│ │ ├── gid
+│ │ ├── graphdb
+│ │ ├── ipaddress
+│ │ ├── location
+│ │ ├── markdown
+│ │ │ ├── elements
+│ │ │ ├── parsers
+│ │ │ ├── testdata
+│ │ │ └── tools
+│ │ ├── markdownparser2
+│ │ ├── markdownrenderer
+│ │ ├── mnemonic
+│ │ ├── models
+│ │ │ └── hr
+│ │ ├── ourdb
+│ │ ├── ourdb_syncer
+│ │ │ ├── http
+│ │ │ └── streamer
+│ │ ├── ourjson
+│ │ ├── ourtime
+│ │ ├── paramsparser
+│ │ ├── radixtree
+│ │ ├── resp
+│ │ ├── serializers
+│ │ ├── tst
+│ │ ├── verasure
+│ │ └── vstor
+│ ├── dav
+│ │ └── webdav
+│ │ ├── bin
+│ │ ├── specs
+│ │ └── templates
+│ ├── develop
+│ │ ├── codewalker
+│ │ ├── gittools
+│ │ │ └── tests
+│ │ ├── heroprompt
+│ │ │ └── templates
+│ │ ├── luadns
+│ │ ├── performance
+│ │ │ └── cmd
+│ │ ├── sourcetree
+│ │ ├── vscode
+│ │ └── vscode_extensions
+│ │ └── ourdb
+│ │ └── templates
+│ ├── hero
+│ │ ├── db
+│ │ ├── herocluster
+│ │ │ └── example
+│ │ ├── herofs
+│ │ ├── herohandlers
+│ │ ├── heromodels
+│ │ └── heromodels copy
+│ │ └── examples
+│ ├── installers
+│ │ ├── base
+│ │ │ └── templates
+│ │ ├── db
+│ │ │ ├── cometbft
+│ │ │ │ └── templates
+│ │ │ ├── meilisearch_installer
+│ │ │ ├── postgresql
+│ │ │ │ └── templates
+│ │ │ ├── qdrant_installer
+│ │ │ │ └── templates
+│ │ │ ├── zerodb
+│ │ │ └── zerofs
+│ │ ├── develapps
+│ │ │ ├── chrome
+│ │ │ └── vscode
+│ │ ├── infra
+│ │ │ ├── coredns
+│ │ │ │ └── templates
+│ │ │ ├── gitea
+│ │ │ │ └── templates
+│ │ │ ├── livekit
+│ │ │ │ └── templates
+│ │ │ └── zinit_installer
+│ │ ├── lang
+│ │ │ ├── golang
+│ │ │ ├── herolib
+│ │ │ ├── nodejs
+│ │ │ ├── python
+│ │ │ ├── rust
+│ │ │ └── vlang
+│ │ ├── net
+│ │ │ ├── mycelium_installer
+│ │ │ ├── wireguard_installer
+│ │ │ └── yggdrasil
+│ │ ├── sysadmintools
+│ │ │ ├── actrunner
+│ │ │ │ └── templates
+│ │ │ ├── b2
+│ │ │ ├── fungistor
+│ │ │ ├── garage_s3
+│ │ │ │ └── templates
+│ │ │ ├── grafana
+│ │ │ ├── prometheus
+│ │ │ │ └── templates
+│ │ │ ├── rclone
+│ │ │ │ └── templates
+│ │ │ ├── restic
+│ │ │ └── s3
+│ │ ├── threefold
+│ │ │ ├── griddriver
+│ │ │ └── tfrobot
+│ │ ├── ulist
+│ │ ├── virt
+│ │ │ ├── cloudhypervisor
+│ │ │ ├── docker
+│ │ │ ├── herorunner
+│ │ │ ├── lima
+│ │ │ │ └── templates
+│ │ │ ├── pacman
+│ │ │ │ └── templates
+│ │ │ ├── podman
+│ │ │ ├── qemu
+│ │ │ └── youki
+│ │ └── web
+│ │ ├── bun
+│ │ ├── imagemagick
+│ │ ├── lighttpd
+│ │ │ └── templates
+│ │ ├── tailwind
+│ │ ├── tailwind4
+│ │ ├── traefik
+│ │ │ └── templates
+│ │ └── zola
+│ ├── lang
+│ │ ├── python
+│ │ │ └── templates
+│ │ └── rust
+│ ├── mcp
+│ │ ├── baobab
+│ │ ├── cmd
+│ │ ├── mcpgen
+│ │ │ ├── schemas
+│ │ │ └── templates
+│ │ ├── pugconvert
+│ │ │ ├── cmd
+│ │ │ ├── logic
+│ │ │ │ └── templates
+│ │ │ └── mcp
+│ │ ├── rhai
+│ │ │ ├── cmd
+│ │ │ ├── example
+│ │ │ ├── logic
+│ │ │ │ ├── prompts
+│ │ │ │ └── templates
+│ │ │ └── mcp
+│ │ ├── transport
+│ │ └── vcode
+│ │ ├── cmd
+│ │ ├── logic
+│ │ └── mcp
+│ ├── osal
+│ │ ├── core
+│ │ ├── coredns
+│ │ ├── hostsfile
+│ │ ├── linux
+│ │ │ └── templates
+│ │ ├── netns
+│ │ ├── notifier
+│ │ ├── osinstaller
+│ │ ├── rsync
+│ │ │ └── templates
+│ │ ├── screen
+│ │ ├── sshagent
+│ │ ├── startupmanager
+│ │ ├── systemd
+│ │ │ └── templates
+│ │ ├── tmux
+│ │ │ └── bin
+│ │ ├── traefik
+│ │ │ └── specs
+│ │ ├── tun
+│ │ ├── ubuntu
+│ │ └── ufw
+│ ├── schemas
+│ │ ├── jsonrpc
+│ │ │ ├── reflection
+│ │ │ └── testdata
+│ │ │ ├── testmodule
+│ │ │ └── testserver
+│ │ ├── jsonschema
+│ │ │ ├── codegen
+│ │ │ │ └── templates
+│ │ │ └── testdata
+│ │ ├── openapi
+│ │ │ ├── codegen
+│ │ │ ├── templates
+│ │ │ └── testdata
+│ │ └── openrpc
+│ │ ├── codegen
+│ │ │ ├── templates
+│ │ │ └── testdata
+│ │ ├── server
+│ │ └── testdata
+│ │ └── petstore_client
+│ ├── security
+│ │ ├── authentication
+│ │ │ └── templates
+│ │ └── jwt
+│ ├── threefold
+│ │ ├── grid3
+│ │ │ ├── deploy_tosort
+│ │ │ ├── deployer
+│ │ │ ├── deployer2_sort
+│ │ │ ├── griddriver
+│ │ │ ├── gridproxy
+│ │ │ │ └── model
+│ │ │ ├── models
+│ │ │ ├── rmb
+│ │ │ ├── tfrobot
+│ │ │ │ └── templates
+│ │ │ ├── tokens
+│ │ │ └── zerohub
+│ │ ├── grid4
+│ │ │ ├── datamodel
+│ │ │ ├── datamodelsimulator
+│ │ │ ├── farmingsimulator
+│ │ │ │ └── templates
+│ │ │ └── gridsimulator
+│ │ │ └── manual
+│ │ ├── incatokens
+│ │ │ └── templates
+│ │ └── models
+│ │ ├── business
+│ │ ├── core
+│ │ ├── finance
+│ │ ├── flow
+│ │ ├── identity
+│ │ ├── legal
+│ │ ├── library
+│ │ ├── location
+│ │ └── payment
+│ ├── ui
+│ │ ├── console
+│ │ ├── generic
+│ │ ├── logger
+│ │ ├── telegram
+│ │ │ └── client
+│ │ ├── template
+│ │ └── uimodel
+│ ├── vfs
+│ │ ├── vfs_calendar
+│ │ ├── vfs_contacts
+│ │ ├── vfs_db
+│ │ ├── vfs_local
+│ │ ├── vfs_mail
+│ │ └── vfs_nested
+│ ├── virt
+│ │ ├── cloudhypervisor
+│ │ ├── crun
+│ │ ├── docker
+│ │ ├── heropods
+│ │ ├── herorun
+│ │ ├── herorun2
+│ │ ├── hetznermanager
+│ │ ├── lima
+│ │ │ ├── raw
+│ │ │ └── templates
+│ │ ├── podman
+│ │ └── qemu
+│ │ └── templates
+│ └── web
+│ ├── doctreeclient
+│ ├── docusaurus
+│ │ └── example
+│ ├── echarts
+│ ├── site
+│ │ └── example
+│ └── ui
+│ ├── static
+│ │ ├── css
+│ │ └── js
+│ └── templates
+│ └── admin
+├── libarchive
+│ ├── baobab
+│ │ ├── actor
+│ │ ├── generator
+│ │ │ ├── _archive
+│ │ │ ├── templates
+│ │ │ └── testdata
+│ │ ├── osis
+│ │ ├── specification
+│ │ └── stage
+│ │ └── interfaces
+│ ├── buildah
+│ ├── daguserver
+│ │ └── templates
+│ ├── dify
+│ │ └── templates
+│ ├── examples
+│ │ └── baobab
+│ │ ├── generator
+│ │ │ ├── basic
+│ │ │ ├── geomind_poc
+│ │ │ └── openapi_e2e
+│ │ └── specification
+│ ├── installers
+│ │ └── web
+│ │ └── caddy2
+│ │ └── templates
+│ ├── rhai
+│ │ ├── prompts
+│ │ ├── templates
+│ │ └── testdata
+│ ├── starlight
+│ │ └── templates
+│ └── zinit
+│ └── zinit
+├── manual
+│ ├── best_practices
+│ │ ├── osal
+│ │ └── scripts
+│ ├── core
+│ │ └── concepts
+│ ├── documentation
+│ └── playcmds
+├── research
+│ ├── globals
+│ └── openrpc
+├── tests
+│ └── data
+└── vscodeplugin
+ └── heroscrypt-syntax
+ └── syntaxes
+
+
+
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/db/ai_instructions.md
+```md
+# HeroDB Model Creation Instructions for AI
+
+## Overview
+
+This document provides clear instructions for AI agents to create new HeroDB models similar to `comment.v`. These models are used to store structured data in Redis using the HeroDB system.
+
+## Key Concepts
+
+- Each model represents a data type stored in Redis hash sets
+- Models must implement serialization/deserialization using the `encoder` module
+- Models inherit from the `Base` struct which provides common fields
+- The database uses a factory pattern for model access
+
+## File Structure
+
+Create a new file in `lib/hero/heromodels/` with the model name (e.g., `calendar.v`).
+
+## Required Components
+
+### 1. Model Struct Definition
+
+Define your model struct with the following pattern:
+
+```v
+@[heap]
+pub struct Calendar {
+ db.Base // Inherit from Base struct
+pub mut:
+ // Add your specific fields here
+ title string
+ start_time i64
+ end_time i64
+ location string
+ attendees []string
+}
+```
+
+### 2. Type Name Method
+
+Implement a method to return the model's type name:
+
+```v
+pub fn (self Calendar) type_name() string {
+ return 'calendar'
+}
+```
+
+### 3. Serialization (dump) Method
+
+Implement the `dump` method to serialize your struct's fields using the encoder:
+
+```v
+pub fn (self Calendar) dump(mut e &encoder.Encoder) ! {
+ e.add_string(self.title)
+ e.add_i64(self.start_time)
+ e.add_i64(self.end_time)
+ e.add_string(self.location)
+ e.add_list_string(self.attendees)
+}
+```
+
+### 4. Deserialization (load) Method
+
+Implement the `load` method to deserialize your struct's fields:
+
+```v
+fn (mut self DBCalendar) load(mut o Calendar, mut e &encoder.Decoder) ! {
+ o.title = e.get_string()!
+ o.start_time = e.get_i64()!
+ o.end_time = e.get_i64()!
+ o.location = e.get_string()!
+ o.attendees = e.get_list_string()!
+}
+```
+
+### 5. Model Arguments Struct
+
+Define a struct for creating new instances of your model:
+
+```v
+@[params]
+pub struct CalendarArg {
+pub mut:
+ title string @[required]
+ start_time i64
+ end_time i64
+ location string
+ attendees []string
+}
+```
+
+### 6. Database Wrapper Struct
+
+Create a database wrapper struct for your model:
+
+```v
+pub struct DBCalendar {
+pub mut:
+ db &db.DB @[skip; str: skip]
+}
+```
+
+### 7. Factory Integration
+
+Add your model to the ModelsFactory struct in `factory.v`:
+
+```v
+pub struct ModelsFactory {
+pub mut:
+ comments DBCalendar
+ // ... other models
+}
+```
+
+And initialize it in the `new()` function:
+
+```v
+pub fn new() !ModelsFactory {
+ mut mydb := db.new()!
+ return ModelsFactory{
+ comments: DBCalendar{
+ db: &mydb
+ }
+ // ... initialize other models
+ }
+}
+```
+
+## Encoder Methods Reference
+
+Use these methods for serialization/deserialization:
+
+### Encoder (Serialization)
+- `e.add_bool(val bool)`
+- `e.add_u8(val u8)`
+- `e.add_u16(val u16)`
+- `e.add_u32(val u32)`
+- `e.add_u64(val u64)`
+- `e.add_i8(val i8)`
+- `e.add_i16(val i16)`
+- `e.add_i32(val i32)`
+- `e.add_i64(val i64)`
+- `e.add_f32(val f32)`
+- `e.add_f64(val f64)`
+- `e.add_string(val string)`
+- `e.add_list_bool(val []bool)`
+- `e.add_list_u8(val []u8)`
+- `e.add_list_u16(val []u16)`
+- `e.add_list_u32(val []u32)`
+- `e.add_list_u64(val []u64)`
+- `e.add_list_i8(val []i8)`
+- `e.add_list_i16(val []i16)`
+- `e.add_list_i32(val []i32)`
+- `e.add_list_i64(val []i64)`
+- `e.add_list_f32(val []f32)`
+- `e.add_list_f64(val []f64)`
+- `e.add_list_string(val []string)`
+
+### Decoder (Deserialization)
+- `e.get_bool()!`
+- `e.get_u8()!`
+- `e.get_u16()!`
+- `e.get_u32()!`
+- `e.get_u64()!`
+- `e.get_i8()!`
+- `e.get_i16()!`
+- `e.get_i32()!`
+- `e.get_i64()!`
+- `e.get_f32()!`
+- `e.get_f64()!`
+- `e.get_string()!`
+- `e.get_list_bool()!`
+- `e.get_list_u8()!`
+- `e.get_list_u16()!`
+- `e.get_list_u32()!`
+- `e.get_list_u64()!`
+- `e.get_list_i8()!`
+- `e.get_list_i16()!`
+- `e.get_list_i32()!`
+- `e.get_list_i64()!`
+- `e.get_list_f32()!`
+- `e.get_list_f64()!`
+- `e.get_list_string()!`
+
+## CRUD Methods Implementation
+
+### Create New Instance
+```v
+pub fn (mut self DBCalendar) new(args CalendarArg) !Calendar {
+ mut o := Calendar{
+ title: args.title
+ start_time: args.start_time
+ end_time: args.end_time
+ location: args.location
+ attendees: args.attendees
+ updated_at: ourtime.now().unix()
+ }
+ return o
+}
+```
+
+### Save to Database
+```v
+pub fn (mut self DBCalendar) set(o Calendar) !u32 {
+ return self.db.set[Calendar](o)!
+}
+```
+
+### Retrieve from Database
+```v
+pub fn (mut self DBCalendar) get(id u32) !Calendar {
+ mut o, data := self.db.get_data[Calendar](id)!
+ mut e_decoder := encoder.decoder_new(data)
+ self.load(mut o, mut e_decoder)!
+ return o
+}
+```
+
+### Delete from Database
+```v
+pub fn (mut self DBCalendar) delete(id u32) ! {
+ self.db.delete[Calendar](id)!
+}
+```
+
+### Check Existence
+```v
+pub fn (mut self DBCalendar) exist(id u32) !bool {
+ return self.db.exists[Calendar](id)!
+}
+```
+
+### List All Objects
+```v
+pub fn (mut self DBCalendar) list() ![]Calendar {
+ return self.db.list[Calendar]()!.map(self.get(it)!)
+}
+```
+
+## Example Usage Script
+
+Create a `.vsh` script in `examples/hero/heromodels/` to demonstrate usage:
+
+```v
+#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
+
+import freeflowuniverse.herolib.core.redisclient
+import freeflowuniverse.herolib.hero.heromodels
+
+mut mydb := heromodels.new()!
+
+// Create a new object
+mut o := mydb.calendar.new(
+ title: 'Meeting'
+ start_time: 1672531200
+ end_time: 1672534800
+ location: 'Conference Room'
+ attendees: ['john@example.com', 'jane@example.com']
+)!
+
+// Save to database
+oid := mydb.calendar.set(o)!
+println('Created object with ID: ${oid}')
+
+// Retrieve from database
+mut o2 := mydb.calendar.get(oid)!
+println('Retrieved object: ${o2}')
+
+// List all objects
+mut objects := mydb.calendar.list()!
+println('All objects: ${objects}')
+```
+
+## Best Practices
+
+1. Always inherit from `db.Base` struct
+2. Implement all required methods (`type_name`, `dump`, `load`)
+3. Use the encoder methods for consistent serialization
+4. Handle errors appropriately with `!` or `or` blocks
+5. Keep field ordering consistent between `dump` and `load` methods
+6. Use snake_case for field names
+7. Add `@[required]` attribute to mandatory fields in argument structs
+8. Initialize timestamps using `ourtime.now().unix()`
+
+## Implementation Steps Summary
+
+1. Create model struct inheriting from `db.Base`
+2. Implement `type_name()` method
+3. Implement `dump()` method using encoder
+4. Implement `load()` method using decoder
+5. Create argument struct with `@[params]` attribute
+6. Create database wrapper struct
+7. Add model to `ModelsFactory` in `factory.v`
+8. Implement CRUD methods
+9. Create example usage script
+10. Test the implementation with the example script
+```
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/db/core_methods.v
+```v
+module db
+
+import freeflowuniverse.herolib.data.ourtime
+import freeflowuniverse.herolib.data.encoder
+
+pub fn (mut self DB) set[T](obj_ T) !u32 {
+ // Get the next ID
+ mut obj := obj_
+ if obj.id == 0 {
+ obj.id = self.new_id()!
+ }
+ mut t := ourtime.now().unix()
+ if obj.created_at == 0 {
+ obj.created_at = t
+ }
+ obj.updated_at = t
+
+ // id u32
+ // name string
+ // description string
+ // created_at i64
+ // updated_at i64
+ // securitypolicy u32
+ // tags u32 // when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags
+ // comments []u32
+ mut e := encoder.new()
+ e.add_u8(1)
+ e.add_u32(obj.id)
+ e.add_string(obj.name)
+ e.add_string(obj.description)
+ e.add_i64(obj.created_at)
+ e.add_i64(obj.updated_at)
+ e.add_u32(obj.securitypolicy)
+ e.add_u32(obj.tags)
+ e.add_u16(u16(obj.comments.len))
+ for comment in obj.comments {
+ e.add_u32(comment)
+ }
+ // println('set: before dump, e.data.len: ${e.data.len}')
+ obj.dump(mut e)!
+ // println('set: after dump, e.data.len: ${e.data.len}')
+ self.redis.hset(self.db_name[T](), obj.id.str(), e.data.bytestr())!
+ return obj.id
+}
+
+// return the data, cannot return the object as we do not know the type
+pub fn (mut self DB) get_data[T](id u32) !(T, []u8) {
+ data := self.redis.hget(self.db_name[T](), id.str())!
+
+ if data.len == 0 {
+ return error('herodb:${self.db_name[T]()} not found for ${id}')
+ }
+
+ // println('get_data: data.len: ${data.len}')
+ mut e := encoder.decoder_new(data.bytes())
+ version := e.get_u8()!
+ if version != 1 {
+ panic('wrong version in base load')
+ }
+ mut base := T{}
+ base.id = e.get_u32()!
+ base.name = e.get_string()!
+ base.description = e.get_string()!
+ base.created_at = e.get_i64()!
+ base.updated_at = e.get_i64()!
+ base.securitypolicy = e.get_u32()!
+ base.tags = e.get_u32()!
+ for _ in 0 .. e.get_u16()! {
+ base.comments << e.get_u32()!
+ }
+ return base, e.data
+}
+
+pub fn (mut self DB) exists[T](id u32) !bool {
+ return self.redis.hexists(self.db_name[T](), id.str())!
+}
+
+pub fn (mut self DB) delete[T](id u32) ! {
+ self.redis.hdel(self.db_name[T](), id.str())!
+}
+
+pub fn (mut self DB) list[T]() ![]u32 {
+ ids := self.redis.hkeys(self.db_name[T]())!
+ return ids.map(it.u32())
+}
+
+// make it easy to get a base object
+pub fn (mut self DB) new_from_base[T](args BaseArgs) !Base {
+ return T{
+ Base: new_base(args)!
+ }
+}
+
+fn (mut self DB) db_name[T]() string {
+ // get the name of the type T
+ mut name := T.name.to_lower_ascii().split('.').last()
+ // println("db_name rediskey: '${name}'")
+ return 'db:${name}'
+}
+
+pub fn (mut self DB) new_id() !u32 {
+ return u32(self.redis.incr('db:id')!)
+}
+
+```
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/db/core_models.v
+```v
+module db
+
+import crypto.md5
+import freeflowuniverse.herolib.core.redisclient
+import freeflowuniverse.herolib.data.ourtime
+
+// Group represents a collection of users with roles and permissions
+@[heap]
+pub struct Base {
+pub mut:
+ id u32
+ name string
+ description string
+ created_at i64
+ updated_at i64
+ securitypolicy u32
+ tags u32 // when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags
+ comments []u32
+}
+
+@[heap]
+pub struct SecurityPolicy {
+pub mut:
+ id u32
+ read []u32 // links to users & groups
+ write []u32 // links to users & groups
+ delete []u32 // links to users & groups
+ public bool
+ md5 string // this sorts read, write and delete u32 + hash, then do md5 hash, this allows to go from a random read/write/delete/public config to a hash
+}
+
+@[heap]
+pub struct Tags {
+pub mut:
+ id u32
+ names []string // unique per id
+ md5 string // of sorted names, to make easy to find unique id, each name lowercased and made ascii
+}
+
+```
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/db/helpers_comments.v
+```v
+module db
+
+import crypto.md5
+
+
+@[params]
+pub struct CommentArg {
+pub mut:
+ comment string
+ parent u32
+ author u32
+}
+
+pub fn (mut self DB) comments_get(args []CommentArg) ![]u32 {
+ return args.map(self.comment_get(it.comment)!)
+}
+
+pub fn (mut self DB) comment_get(comment string) !u32 {
+ comment_fixed := comment.to_lower_ascii().trim_space()
+ return if comment_fixed.len > 0 {
+ hash := md5.hexhash(comment_fixed)
+ comment_found := self.redis.hget('db:comments', hash)!
+ if comment_found == '' {
+ id := self.new_id()!
+ self.redis.hset('db:comments', hash, id.str())!
+ self.redis.hset('db:comments', id.str(), comment_fixed)!
+ id
+ } else {
+ comment_found.u32()
+ }
+ } else {
+ 0
+ }
+}
+
+```
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/db/helpers_tags.v
+```v
+module db
+
+import crypto.md5
+
+pub fn (mut self DB) tags_get(tags []string) !u32 {
+ return if tags.len > 0 {
+ mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != '')
+ tags_fixed.sort_ignore_case()
+ hash := md5.hexhash(tags_fixed.join(','))
+ tags_found := self.redis.hget('db:tags', hash)!
+ return if tags_found == '' {
+ println('tags_get: new tags: ${tags_fixed.join(",")}')
+ id := self.new_id()!
+ self.redis.hset('db:tags', hash, id.str())!
+ self.redis.hset('db:tags', id.str(), tags_fixed.join(','))!
+ id
+ } else {
+ tags_found.u32()
+ }
+ } else {
+ 0
+ }
+}
+
+```
+
+File: /Users/despiegk/code/github/incubaid/herolib/lib/hero/heromodels/calendar_event.v
+```v
+
+module heromodels
+
+import freeflowuniverse.herolib.data.encoder
+import freeflowuniverse.herolib.data.ourtime
+import freeflowuniverse.herolib.hero.db
+
+// CalendarEvent represents a single event in a calendar
+@[heap]
+pub struct CalendarEvent {
+ db.Base
+pub mut:
+ title string
+ start_time i64 // Unix timestamp
+ end_time i64 // Unix timestamp
+ location string
+ attendees []u32 // IDs of user groups
+ fs_items []u32 // IDs of linked files or dirs
+ calendar_id u32 // Associated calendar
+ status EventStatus
+ is_all_day bool
+ is_recurring bool
+ recurrence []RecurrenceRule // normally empty
+ reminder_mins []int // Minutes before event for reminders
+ color string // Hex color code
+ timezone string
+}
+
+pub struct Attendee {
+pub mut:
+ user_id u32
+ status AttendanceStatus
+ role AttendeeRole
+}
+
+pub enum AttendanceStatus {
+ no_response
+ accepted
+ declined
+ tentative
+}
+
+pub enum AttendeeRole {
+ required
+ optional
+ organizer
+}
+
+pub enum EventStatus {
+ draft
+ published
+ cancelled
+ completed
+}
+
+pub struct RecurrenceRule {
+pub mut:
+ frequency RecurrenceFreq
+ interval int // Every N frequencies
+ until i64 // End date (Unix timestamp)
+ count int // Number of occurrences
+ by_weekday []int // Days of week (0=Sunday)
+ by_monthday []int // Days of month
+}
+
+pub enum RecurrenceFreq {
+ none
+ daily
+ weekly
+ monthly
+ yearly
+}
+
+pub struct DBCalendarEvent {
+pub mut:
+ db &db.DB @[skip; str: skip]
+}
+
+pub fn (self CalendarEvent) type_name() string {
+ return 'calendar_event'
+}
+
+pub fn (self CalendarEvent) dump(mut e &encoder.Encoder) ! {
+ e.add_string(self.title)
+ e.add_i64(self.start_time)
+ e.add_i64(self.end_time)
+ e.add_string(self.location)
+ e.add_list_u32(self.attendees)
+ e.add_list_u32(self.fs_items)
+ e.add_u32(self.calendar_id)
+ e.add_u8(u8(self.status))
+ e.add_bool(self.is_all_day)
+ e.add_bool(self.is_recurring)
+
+ // Encode recurrence array
+ e.add_u16(u16(self.recurrence.len))
+ for rule in self.recurrence {
+ e.add_u8(u8(rule.frequency))
+ e.add_int(rule.interval)
+ e.add_i64(rule.until)
+ e.add_int(rule.count)
+ e.add_list_int(rule.by_weekday)
+ e.add_list_int(rule.by_monthday)
+ }
+
+ e.add_list_int(self.reminder_mins)
+ e.add_string(self.color)
+ e.add_string(self.timezone)
+}
+
+fn (mut self DBCalendarEvent) load(mut o CalendarEvent, mut e &encoder.Decoder) ! {
+ o.title = e.get_string()!
+ o.start_time = e.get_i64()!
+ o.end_time = e.get_i64()!
+ o.location = e.get_string()!
+ o.attendees = e.get_list_u32()!
+ o.fs_items = e.get_list_u32()!
+ o.calendar_id = e.get_u32()!
+ o.status = unsafe { EventStatus(e.get_u8()!) } //TODO: is there no better way?
+ o.is_all_day = e.get_bool()!
+ o.is_recurring = e.get_bool()!
+
+ // Decode recurrence array
+ recurrence_len := e.get_u16()!
+ mut recurrence := []RecurrenceRule{}
+ for _ in 0 .. recurrence_len {
+ frequency := unsafe { RecurrenceFreq(e.get_u8()!) }
+ interval := e.get_int()!
+ until := e.get_i64()!
+ count := e.get_int()!
+ by_weekday := e.get_list_int()!
+ by_monthday := e.get_list_int()!
+
+ recurrence << RecurrenceRule{
+ frequency: frequency
+ interval: interval
+ until: until
+ count: count
+ by_weekday: by_weekday
+ by_monthday: by_monthday
+ }
+ }
+ o.recurrence = recurrence
+
+ o.reminder_mins = e.get_list_int()!
+ o.color = e.get_string()!
+ o.timezone = e.get_string()!
+}
+
+@[params]
+pub struct CalendarEventArg {
+pub mut:
+ name string
+ description string
+ title string
+ start_time string // use ourtime module to go from string to epoch
+ end_time string // use ourtime module to go from string to epoch
+ location string
+ attendees []u32 // IDs of user groups
+ fs_items []u32 // IDs of linked files or dirs
+ calendar_id u32 // Associated calendar
+ status EventStatus
+ is_all_day bool
+ is_recurring bool
+ recurrence []RecurrenceRule
+ reminder_mins []int // Minutes before event for reminders
+ color string // Hex color code
+ timezone string
+ securitypolicy u32
+ tags []string
+ comments []db.CommentArg
+}
+
+// get new calendar event, not from the DB
+pub fn (mut self DBCalendarEvent) new(args CalendarEventArg) !CalendarEvent {
+ mut o := CalendarEvent{
+ title: args.title
+ location: args.location
+ attendees: args.attendees
+ fs_items: args.fs_items
+ calendar_id: args.calendar_id
+ status: args.status
+ is_all_day: args.is_all_day
+ is_recurring: args.is_recurring
+ recurrence: args.recurrence
+ reminder_mins: args.reminder_mins
+ color: args.color
+ timezone: args.timezone
+ }
+
+ // Set base fields
+ o.name = args.name
+ o.description = args.description
+ o.securitypolicy = args.securitypolicy
+ o.tags = self.db.tags_get(args.tags)!
+ o.comments = self.db.comments_get(args.comments)!
+ o.updated_at = ourtime.now().unix()
+
+ // Convert string times to Unix timestamps
+ mut start_time_obj := ourtime.new(args.start_time)!
+ o.start_time = start_time_obj.unix()
+
+ mut end_time_obj := ourtime.new(args.end_time)!
+ o.end_time = end_time_obj.unix()
+
+ return o
+}
+
+pub fn (mut self DBCalendarEvent) set(o CalendarEvent) !u32 {
+ // Use db set function which now returns the ID
+ return self.db.set[CalendarEvent](o)!
+}
+
+pub fn (mut self DBCalendarEvent) delete(id u32) ! {
+ self.db.delete[CalendarEvent](id)!
+}
+
+pub fn (mut self DBCalendarEvent) exist(id u32) !bool {
+ return self.db.exists[CalendarEvent](id)!
+}
+
+pub fn (mut self DBCalendarEvent) get(id u32) !CalendarEvent {
+ mut o, data := self.db.get_data[CalendarEvent](id)!
+ mut e_decoder := encoder.decoder_new(data)
+ self.load(mut o, mut e_decoder)!
+ return o
+}
+
+pub fn (mut self DBCalendarEvent) list() ![]CalendarEvent {
+ return self.db.list[CalendarEvent]()!.map(self.get(it)!)
+}
+
+```
+
+
+make the crud and example for all files in lib/hero/herofs
+
+think about which additional hsets we need to make it efficient
+
+check the implementation
+
+do the implementation
+
+
+
diff --git a/examples/hero/alpine_example.vsh b/examples/alpine_example.vsh
similarity index 100%
rename from examples/hero/alpine_example.vsh
rename to examples/alpine_example.vsh
diff --git a/examples/hero/generation/blank_generation/.gitignore b/examples/hero/generation/blank_generation/.gitignore
deleted file mode 100644
index 29e2ed58..00000000
--- a/examples/hero/generation/blank_generation/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-.example_1_actor
-.example_2_actor
\ No newline at end of file
diff --git a/examples/hero/generation/blank_generation/README.md b/examples/hero/generation/blank_generation/README.md
deleted file mode 100644
index e49b06c0..00000000
--- a/examples/hero/generation/blank_generation/README.md
+++ /dev/null
@@ -1,19 +0,0 @@
-## Blank Actor Generation Example
-
-This example shows how to generate a blank actor (unspecified, except for name). The generated actor module contains all the boilerplate code of an actor that can be compiled but lacks ant state or methods.
-
-Simply run:
-```
-chmod +x *.vsh
-example_1.vsh
-example_2.vsh
-```
-
-### Examples
-
-There are two examples of blank actor generation.
-- `example_1.vsh` generates the actor from a blank specification structure.
-- `example_2.vsh` generates the actor from a blank OpenAPI Specification.
-
-
-Read []() to learn how actor's are generated from specifications, and how the two example's differ.
\ No newline at end of file
diff --git a/examples/hero/generation/blank_generation/example_1.vsh b/examples/hero/generation/blank_generation/example_1.vsh
deleted file mode 100644
index 451a73e0..00000000
--- a/examples/hero/generation/blank_generation/example_1.vsh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
-
-import freeflowuniverse.herolib.hero.generation
-
-generation.generate_actor(
- name: 'Example'
-)
diff --git a/examples/hero/generation/blank_generation/example_2.vsh b/examples/hero/generation/blank_generation/example_2.vsh
deleted file mode 100644
index bef29f7a..00000000
--- a/examples/hero/generation/blank_generation/example_2.vsh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
-
-import freeflowuniverse.herolib.hero.generation
-
-generation.generate_actor(
- name: 'Example'
- interfaces: []
-)
diff --git a/examples/hero/generation/openapi_generation/README.md b/examples/hero/generation/openapi_generation/README.md
deleted file mode 100644
index 21be1e63..00000000
--- a/examples/hero/generation/openapi_generation/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Hero Generation Example
-
-## Getting started
-
-### Step 1: Generate specification
-
-### Step 2: Generate actor from specification
-
-The script below generates the actor's OpenAPI handler from a given OpenAPI Specification. The generated code is written to `handler.v` in the example actor's module.
-
-`generate_actor.vsh`
-
-### Step 3: Run actor
-
-The script below runs the actor's Redis RPC Queue Interface and uses the generated handler function to handle incoming RPCs. The Redis Interface listens to the RPC Queue assigned to the actor.
-
-`run_interface_procedure.vsh`
-
-### Step 3: Run server
-
-The script below runs the actor's RPC Queue Listener and uses the generated handler function to handle incoming RPCs.
-
-`run_interface_openapi.vsh`
\ No newline at end of file
diff --git a/examples/hero/generation/openapi_generation/example_actor/README.md b/examples/hero/generation/openapi_generation/example_actor/README.md
deleted file mode 100644
index 13cc32a8..00000000
--- a/examples/hero/generation/openapi_generation/example_actor/README.md
+++ /dev/null
@@ -1 +0,0 @@
-# Example Actor
\ No newline at end of file
diff --git a/examples/hero/generation/openapi_generation/example_actor/actor.v b/examples/hero/generation/openapi_generation/example_actor/actor.v
deleted file mode 100644
index 084db005..00000000
--- a/examples/hero/generation/openapi_generation/example_actor/actor.v
+++ /dev/null
@@ -1,34 +0,0 @@
-module example_actor
-
-import os
-import freeflowuniverse.herolib.hero.baobab.stage { IActor, RunParams }
-import freeflowuniverse.herolib.web.openapi
-import time
-
-const openapi_spec_path = '${os.dir(@FILE)}/specs/openapi.json'
-const openapi_spec_json = os.read_file(openapi_spec_path) or { panic(err) }
-const openapi_specification = openapi.json_decode(openapi_spec_json)!
-
-struct ExampleActor {
- stage.Actor
-}
-
-fn new() !ExampleActor {
- return ExampleActor{stage.new_actor('example')}
-}
-
-pub fn run() ! {
- mut a_ := new()!
- mut a := IActor(a_)
- a.run()!
-}
-
-pub fn run_server(params RunParams) ! {
- mut a := new()!
- mut server := actor.new_server(
- redis_url: 'localhost:6379'
- redis_queue: a.name
- openapi_spec: openapi_specification
- )!
- server.run(params)
-}
diff --git a/examples/hero/generation/openapi_generation/example_actor/actor_test.v b/examples/hero/generation/openapi_generation/example_actor/actor_test.v
deleted file mode 100644
index d07f5d25..00000000
--- a/examples/hero/generation/openapi_generation/example_actor/actor_test.v
+++ /dev/null
@@ -1,15 +0,0 @@
-module example_actor
-
-const test_port = 8101
-
-pub fn test_new() ! {
- new() or { return error('Failed to create actor:\n${err}') }
-}
-
-pub fn test_run() ! {
- spawn run()
-}
-
-pub fn test_run_server() ! {
- spawn run_server(port: test_port)
-}
diff --git a/examples/hero/generation/openapi_generation/example_actor/handle.v b/examples/hero/generation/openapi_generation/example_actor/handle.v
deleted file mode 100644
index 889fce52..00000000
--- a/examples/hero/generation/openapi_generation/example_actor/handle.v
+++ /dev/null
@@ -1,5 +0,0 @@
-module example_actor
-
-pub fn (mut a ExampleActor) handle(method string, data string) !string {
- return data
-}
diff --git a/examples/hero/generation/openapi_generation/example_actor/specs/openapi.json b/examples/hero/generation/openapi_generation/example_actor/specs/openapi.json
deleted file mode 100644
index 77de98a3..00000000
--- a/examples/hero/generation/openapi_generation/example_actor/specs/openapi.json
+++ /dev/null
@@ -1,346 +0,0 @@
-{
- "openapi": "3.0.3",
- "info": {
- "title": "Pet Store API",
- "description": "A sample API for a pet store",
- "version": "1.0.0"
- },
- "servers": [
- {
- "url": "https://api.petstore.example.com/v1",
- "description": "Production server"
- },
- {
- "url": "https://staging.petstore.example.com/v1",
- "description": "Staging server"
- }
- ],
- "paths": {
- "/pets": {
- "get": {
- "summary": "List all pets",
- "operationId": "listPets",
- "parameters": [
- {
- "name": "limit",
- "in": "query",
- "description": "Maximum number of pets to return",
- "required": false,
- "schema": {
- "type": "integer",
- "format": "int32"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "A paginated list of pets",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pets"
- }
- }
- }
- },
- "400": {
- "description": "Invalid request"
- }
- }
- },
- "post": {
- "summary": "Create a new pet",
- "operationId": "createPet",
- "requestBody": {
- "required": true,
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/NewPet"
- }
- }
- }
- },
- "responses": {
- "201": {
- "description": "Pet created",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pet"
- }
- }
- }
- },
- "400": {
- "description": "Invalid input"
- }
- }
- }
- },
- "/pets/{petId}": {
- "get": {
- "summary": "Get a pet by ID",
- "operationId": "getPet",
- "parameters": [
- {
- "name": "petId",
- "in": "path",
- "description": "ID of the pet to retrieve",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "A pet",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pet"
- }
- }
- }
- },
- "404": {
- "description": "Pet not found"
- }
- }
- },
- "delete": {
- "summary": "Delete a pet by ID",
- "operationId": "deletePet",
- "parameters": [
- {
- "name": "petId",
- "in": "path",
- "description": "ID of the pet to delete",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "204": {
- "description": "Pet deleted"
- },
- "404": {
- "description": "Pet not found"
- }
- }
- }
- },
- "/orders": {
- "get": {
- "summary": "List all orders",
- "operationId": "listOrders",
- "responses": {
- "200": {
- "description": "A list of orders",
- "content": {
- "application/json": {
- "schema": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Order"
- }
- }
- }
- }
- }
- }
- }
- },
- "/orders/{orderId}": {
- "get": {
- "summary": "Get an order by ID",
- "operationId": "getOrder",
- "parameters": [
- {
- "name": "orderId",
- "in": "path",
- "description": "ID of the order to retrieve",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "An order",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Order"
- }
- }
- }
- },
- "404": {
- "description": "Order not found"
- }
- }
- },
- "delete": {
- "summary": "Delete an order by ID",
- "operationId": "deleteOrder",
- "parameters": [
- {
- "name": "orderId",
- "in": "path",
- "description": "ID of the order to delete",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "204": {
- "description": "Order deleted"
- },
- "404": {
- "description": "Order not found"
- }
- }
- }
- },
- "/users": {
- "post": {
- "summary": "Create a user",
- "operationId": "createUser",
- "requestBody": {
- "required": true,
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/NewUser"
- }
- }
- }
- },
- "responses": {
- "201": {
- "description": "User created",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/User"
- }
- }
- }
- }
- }
- }
- }
- },
- "components": {
- "schemas": {
- "Pet": {
- "type": "object",
- "required": ["id", "name"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "name": {
- "type": "string"
- },
- "tag": {
- "type": "string"
- }
- }
- },
- "NewPet": {
- "type": "object",
- "required": ["name"],
- "properties": {
- "name": {
- "type": "string"
- },
- "tag": {
- "type": "string"
- }
- }
- },
- "Pets": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Pet"
- }
- },
- "Order": {
- "type": "object",
- "required": ["id", "petId", "quantity", "shipDate"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "petId": {
- "type": "integer",
- "format": "int64"
- },
- "quantity": {
- "type": "integer",
- "format": "int32"
- },
- "shipDate": {
- "type": "string",
- "format": "date-time"
- },
- "status": {
- "type": "string",
- "enum": ["placed", "approved", "delivered"]
- },
- "complete": {
- "type": "boolean"
- }
- }
- },
- "User": {
- "type": "object",
- "required": ["id", "username"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "username": {
- "type": "string"
- },
- "email": {
- "type": "string"
- },
- "phone": {
- "type": "string"
- }
- }
- },
- "NewUser": {
- "type": "object",
- "required": ["username"],
- "properties": {
- "username": {
- "type": "string"
- },
- "email": {
- "type": "string"
- },
- "phone": {
- "type": "string"
- }
- }
- }
- }
- }
- }
\ No newline at end of file
diff --git a/examples/hero/generation/openapi_generation/run_interface_procedure.vsh b/examples/hero/generation/openapi_generation/run_interface_procedure.vsh
deleted file mode 100755
index d6a19968..00000000
--- a/examples/hero/generation/openapi_generation/run_interface_procedure.vsh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
-
-// import example_actor
-
-// example_actor.run_interface_procedure()
diff --git a/examples/hero/herofs/herofs_advanced.vsh b/examples/hero/herofs/herofs_advanced.vsh
old mode 100644
new mode 100755
index b3c39a15..29876d33
--- a/examples/hero/herofs/herofs_advanced.vsh
+++ b/examples/hero/herofs/herofs_advanced.vsh
@@ -194,11 +194,13 @@ fn main() {
// 1. Move a file to multiple directories (hard link-like behavior)
println('Moving logo.png to both images and docs directories...')
image_file = fs_factory.fs_file.get(image_file_id)!
- image_file = fs_factory.fs_file.move(image_file_id, [images_dir_id, docs_dir_id])!
+ fs_factory.fs_file.move(image_file_id, [images_dir_id, docs_dir_id])!
+ image_file = fs_factory.fs_file.get(image_file_id)!
// 2. Rename a file
println('Renaming main.v to app.v...')
- code_file = fs_factory.fs_file.rename(code_file_id, 'app.v')!
+ fs_factory.fs_file.rename(code_file_id, 'app.v')!
+ code_file = fs_factory.fs_file.get(code_file_id)!
// 3. Update file metadata
println('Updating file metadata...')
diff --git a/examples/hero/herorpc/.gitignore b/examples/hero/herorpc/.gitignore
new file mode 100644
index 00000000..eb74fc10
--- /dev/null
+++ b/examples/hero/herorpc/.gitignore
@@ -0,0 +1 @@
+herorpc_example
\ No newline at end of file
diff --git a/examples/hero/herorpc/herorpc_example.vsh b/examples/hero/herorpc/herorpc_example.vsh
new file mode 100755
index 00000000..54352e04
--- /dev/null
+++ b/examples/hero/herorpc/herorpc_example.vsh
@@ -0,0 +1,22 @@
+#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
+
+import freeflowuniverse.herolib.hero.heromodels.rpc
+
+println('
+#to test the discover function:
+echo \'\{"jsonrpc":"2.0","method":"rpc.discover","params":[],"id":1\}\' \\
+ | nc -U /tmp/heromodels
+\'
+#to test interactively:
+
+nc -U /tmp/heromodels
+
+then e.g. do
+
+\{"jsonrpc":"2.0","method":"comment_set","params":{"comment":"Hello world!","parent":0,"author":42},"id":1\}
+
+needs to be on one line for openrpc to work
+
+')
+
+rpc.start()!
diff --git a/examples/hero/openapi/.gitignore b/examples/hero/openapi/.gitignore
deleted file mode 100644
index ada38f1b..00000000
--- a/examples/hero/openapi/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-actor
-server
diff --git a/examples/hero/openapi/README.md b/examples/hero/openapi/README.md
deleted file mode 100644
index 86387d37..00000000
--- a/examples/hero/openapi/README.md
+++ /dev/null
@@ -1,103 +0,0 @@
-# OpenAPI Server with Redis-Based RPC and Actor
-
-This project demonstrates how to implement a system consisting of:
- 1. An OpenAPI Server: Handles HTTP requests and translates them into procedure calls.
- 2. A Redis-Based RPC Processor: Acts as the communication layer between the server and the actor.
- 3. An Actor: Listens for RPC requests on a Redis queue and executes predefined procedures.
-
-## Features
- • OpenAPI server to manage HTTP requests.
- • Redis-based RPC mechanism for message passing.
- • Actor pattern for executing and responding to RPC tasks.
-
-## Setup Instructions
-
-Prerequisites
- • Redis installed and running on localhost:6379.
- • V programming language installed.
-
-Steps to Run
-
-1. OpenAPI Specification
-
-Place the OpenAPI JSON specification file at:
-
-`data/openapi.json`
-
-This file defines the API endpoints and their parameters.
-
-2. Start the Redis Server
-
-Ensure Redis is running locally:
-
-redis-server
-
-3. Start the OpenAPI Server
-
-Run the OpenAPI server:
-
-`server.vsh`
-
-The server listens on port 8080 by default.
-
-4. Start the Actor
-
-Run the actor service:
-
-`actor.vsh`
-
-The actor listens to the procedure_queue for RPC messages.
-
-Usage
-
-API Endpoints
-
-The API supports operations like:
- • Create a Pet: Adds a new pet.
- • List Pets: Lists all pets or limits results.
- • Get Pet by ID: Fetches a specific pet by ID.
- • Delete Pet: Removes a pet by ID.
- • Similar operations for users and orders.
-
-Use tools like curl, Postman, or a browser to interact with the endpoints.
-
-Example Requests
-
-Create a Pet
-
-curl -X POST http://localhost:8080/pets -d '{"name": "Buddy", "tag": "dog"}' -H "Content-Type: application/json"
-
-List Pets
-
-curl http://localhost:8080/pets
-
-## Code Overview
-
-1. OpenAPI Server
- • Reads the OpenAPI JSON file.
- • Maps HTTP requests to procedure calls using the operation ID.
- • Sends procedure calls to the Redis RPC queue.
-
-2. Redis-Based RPC
- • Implements a simple message queue using Redis.
- • Encodes requests as JSON strings for transport.
-
-3. Actor
- • Listens to the procedure_queue Redis queue.
- • Executes tasks like managing pets, orders, and users.
- • Responds with JSON-encoded results or errors.
-
-## Extending the System
-
-Add New Procedures
- 1. Define new methods in the Actor to handle tasks.
- 2. Add corresponding logic in the DataStore for storage operations.
- 3. Update the OpenAPI JSON file to expose new endpoints.
-
-Modify Data Models
- 1. Update the Pet, Order, and User structs as needed.
- 2. Adjust the DataStore methods to handle the changes.
-
-Troubleshooting
- • Redis Connection Issues: Ensure Redis is running and accessible on localhost:6379.
- • JSON Parsing Errors: Validate the input JSON against the OpenAPI specification.
diff --git a/examples/hero/openapi/actor b/examples/hero/openapi/actor
deleted file mode 100755
index 1197e6ec..00000000
Binary files a/examples/hero/openapi/actor and /dev/null differ
diff --git a/examples/hero/openapi/actor.vsh b/examples/hero/openapi/actor.vsh
deleted file mode 100755
index 15486bc0..00000000
--- a/examples/hero/openapi/actor.vsh
+++ /dev/null
@@ -1,233 +0,0 @@
-#!/usr/bin/env -S v -w -n -enable-globals run
-
-import os
-import time
-import veb
-import json
-import x.json2
-import net.http
-import freeflowuniverse.herolib.web.openapi
-import freeflowuniverse.herolib.hero.processor
-import freeflowuniverse.herolib.core.redisclient
-
-@[heap]
-struct Actor {
-mut:
- rpc redisclient.RedisRpc
- data_store DataStore
-}
-
-pub struct DataStore {
-mut:
- pets map[int]Pet
- orders map[int]Order
- users map[int]User
-}
-
-struct Pet {
- id int
- name string
- tag string
-}
-
-struct Order {
- id int
- pet_id int
- quantity int
- ship_date string
- status string
- complete bool
-}
-
-struct User {
- id int
- username string
- email string
- phone string
-}
-
-// Entry point for the actor
-fn main() {
- mut redis := redisclient.new('localhost:6379') or { panic(err) }
- mut rpc := redis.rpc_get('procedure_queue')
-
- mut actor := Actor{
- rpc: rpc
- data_store: DataStore{}
- }
-
- actor.listen() or { panic(err) }
-}
-
-// Actor listens to the Redis queue for method invocations
-fn (mut actor Actor) listen() ! {
- println('Actor started and listening for tasks...')
- for {
- actor.rpc.process(actor.handle_method)!
- time.sleep(time.millisecond * 100) // Prevent CPU spinning
- }
-}
-
-// Handle method invocations
-fn (mut actor Actor) handle_method(cmd string, data string) !string {
- param_anys := json2.raw_decode(data)!.arr()
- match cmd {
- 'listPets' {
- pets := if param_anys.len == 0 {
- actor.data_store.list_pets()
- } else {
- params := json.decode(ListPetParams, param_anys[0].str())!
- actor.data_store.list_pets(params)
- }
- return json.encode(pets)
- }
- 'createPet' {
- response := if param_anys.len == 0 {
- return error('at least data expected')
- } else if param_anys.len == 1 {
- payload := json.decode(NewPet, param_anys[0].str())!
- actor.data_store.create_pet(payload)
- } else {
- return error('expected 1 param, found too many')
- }
- // data := json.decode(NewPet, data) or { return error('Invalid pet data: $err') }
- // created_pet := actor.data_store.create_pet(pet)
- return json.encode(response)
- }
- 'getPet' {
- response := if param_anys.len == 0 {
- return error('at least data expected')
- } else if param_anys.len == 1 {
- payload := param_anys[0].int()
- actor.data_store.get_pet(payload)!
- } else {
- return error('expected 1 param, found too many')
- }
-
- return json.encode(response)
- }
- 'deletePet' {
- params := json.decode(map[string]int, data) or {
- return error('Invalid params: ${err}')
- }
- actor.data_store.delete_pet(params['petId']) or {
- return error('Pet not found: ${err}')
- }
- return json.encode({
- 'message': 'Pet deleted'
- })
- }
- 'listOrders' {
- orders := actor.data_store.list_orders()
- return json.encode(orders)
- }
- 'getOrder' {
- params := json.decode(map[string]int, data) or {
- return error('Invalid params: ${err}')
- }
- order := actor.data_store.get_order(params['orderId']) or {
- return error('Order not found: ${err}')
- }
- return json.encode(order)
- }
- 'deleteOrder' {
- params := json.decode(map[string]int, data) or {
- return error('Invalid params: ${err}')
- }
- actor.data_store.delete_order(params['orderId']) or {
- return error('Order not found: ${err}')
- }
- return json.encode({
- 'message': 'Order deleted'
- })
- }
- 'createUser' {
- user := json.decode(NewUser, data) or { return error('Invalid user data: ${err}') }
- created_user := actor.data_store.create_user(user)
- return json.encode(created_user)
- }
- else {
- return error('Unknown method: ${cmd}')
- }
- }
-}
-
-@[params]
-pub struct ListPetParams {
- limit u32
-}
-
-// DataStore methods for managing data
-fn (mut store DataStore) list_pets(params ListPetParams) []Pet {
- if params.limit > 0 {
- if params.limit >= store.pets.values().len {
- return store.pets.values()
- }
- return store.pets.values()[..params.limit]
- }
- return store.pets.values()
-}
-
-fn (mut store DataStore) create_pet(new_pet NewPet) Pet {
- id := store.pets.keys().len + 1
- pet := Pet{
- id: id
- name: new_pet.name
- tag: new_pet.tag
- }
- store.pets[id] = pet
- return pet
-}
-
-fn (mut store DataStore) get_pet(id int) !Pet {
- return store.pets[id] or { return error('Pet with id ${id} not found.') }
-}
-
-fn (mut store DataStore) delete_pet(id int) ! {
- if id in store.pets {
- store.pets.delete(id)
- return
- }
- return error('Pet not found')
-}
-
-fn (mut store DataStore) list_orders() []Order {
- return store.orders.values()
-}
-
-fn (mut store DataStore) get_order(id int) !Order {
- return store.orders[id] or { none }
-}
-
-fn (mut store DataStore) delete_order(id int) ! {
- if id in store.orders {
- store.orders.delete(id)
- return
- }
- return error('Order not found')
-}
-
-fn (mut store DataStore) create_user(new_user NewUser) User {
- id := store.users.keys().len + 1
- user := User{
- id: id
- username: new_user.username
- email: new_user.email
- phone: new_user.phone
- }
- store.users[id] = user
- return user
-}
-
-// NewPet struct for creating a pet
-struct NewPet {
- name string
- tag string
-}
-
-// NewUser struct for creating a user
-struct NewUser {
- username string
- email string
- phone string
-}
diff --git a/examples/hero/openapi/data/openapi.json b/examples/hero/openapi/data/openapi.json
deleted file mode 100644
index c5ab2d9d..00000000
--- a/examples/hero/openapi/data/openapi.json
+++ /dev/null
@@ -1,346 +0,0 @@
-{
- "openapi": "3.0.3",
- "info": {
- "title": "Pet Store API",
- "description": "A sample API for a pet store",
- "version": "1.0.0"
- },
- "servers": [
- {
- "url": "https://api.petstore.example.com/v1",
- "description": "Production server"
- },
- {
- "url": "https://staging.petstore.example.com/v1",
- "description": "Staging server"
- }
- ],
- "paths": {
- "/pets": {
- "get": {
- "summary": "List all pets",
- "operationId": "listPets",
- "parameters": [
- {
- "name": "limit",
- "in": "query",
- "description": "Maximum number of pets to return",
- "required": false,
- "schema": {
- "type": "integer",
- "format": "int32"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "A paginated list of pets",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pets"
- }
- }
- }
- },
- "400": {
- "description": "Invalid request"
- }
- }
- },
- "post": {
- "summary": "Create a new pet",
- "operationId": "createPet",
- "requestBody": {
- "required": true,
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/NewPet"
- }
- }
- }
- },
- "responses": {
- "201": {
- "description": "Pet created",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pet"
- }
- }
- }
- },
- "400": {
- "description": "Invalid input"
- }
- }
- }
- },
- "/pets/{petId}": {
- "get": {
- "summary": "Get a pet by ID",
- "operationId": "getPet",
- "parameters": [
- {
- "name": "petId",
- "in": "path",
- "description": "ID of the pet to retrieve",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "A pet",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Pet"
- }
- }
- }
- },
- "404": {
- "description": "Pet not found"
- }
- }
- },
- "delete": {
- "summary": "Delete a pet by ID",
- "operationId": "deletePet",
- "parameters": [
- {
- "name": "petId",
- "in": "path",
- "description": "ID of the pet to delete",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "204": {
- "description": "Pet deleted"
- },
- "404": {
- "description": "Pet not found"
- }
- }
- }
- },
- "/orders": {
- "get": {
- "summary": "List all orders",
- "operationId": "listOrders",
- "responses": {
- "200": {
- "description": "A list of orders",
- "content": {
- "application/json": {
- "schema": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Order"
- }
- }
- }
- }
- }
- }
- }
- },
- "/orders/{orderId}": {
- "get": {
- "summary": "Get an order by ID",
- "operationId": "getOrder",
- "parameters": [
- {
- "name": "orderId",
- "in": "path",
- "description": "ID of the order to retrieve",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "An order",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/Order"
- }
- }
- }
- },
- "404": {
- "description": "Order not found"
- }
- }
- },
- "delete": {
- "summary": "Delete an order by ID",
- "operationId": "deleteOrder",
- "parameters": [
- {
- "name": "orderId",
- "in": "path",
- "description": "ID of the order to delete",
- "required": true,
- "schema": {
- "type": "integer",
- "format": "int64"
- }
- }
- ],
- "responses": {
- "204": {
- "description": "Order deleted"
- },
- "404": {
- "description": "Order not found"
- }
- }
- }
- },
- "/users": {
- "post": {
- "summary": "Create a user",
- "operationId": "createUser",
- "requestBody": {
- "required": true,
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/NewUser"
- }
- }
- }
- },
- "responses": {
- "201": {
- "description": "User created",
- "content": {
- "application/json": {
- "schema": {
- "$ref": "#/components/schemas/User"
- }
- }
- }
- }
- }
- }
- }
- },
- "components": {
- "schemas": {
- "Pet": {
- "type": "object",
- "required": ["id", "name"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "name": {
- "type": "string"
- },
- "tag": {
- "type": "string"
- }
- }
- },
- "NewPet": {
- "type": "object",
- "required": ["name"],
- "properties": {
- "name": {
- "type": "string"
- },
- "tag": {
- "type": "string"
- }
- }
- },
- "Pets": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Pet"
- }
- },
- "Order": {
- "type": "object",
- "required": ["id", "petId", "quantity", "shipDate"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "petId": {
- "type": "integer",
- "format": "int64"
- },
- "quantity": {
- "type": "integer",
- "format": "int32"
- },
- "shipDate": {
- "type": "string",
- "format": "date-time"
- },
- "status": {
- "type": "string",
- "enum": ["placed", "approved", "delivered"]
- },
- "complete": {
- "type": "boolean"
- }
- }
- },
- "User": {
- "type": "object",
- "required": ["id", "username"],
- "properties": {
- "id": {
- "type": "integer",
- "format": "int64"
- },
- "username": {
- "type": "string"
- },
- "email": {
- "type": "string"
- },
- "phone": {
- "type": "string"
- }
- }
- },
- "NewUser": {
- "type": "object",
- "required": ["username"],
- "properties": {
- "username": {
- "type": "string"
- },
- "email": {
- "type": "string"
- },
- "phone": {
- "type": "string"
- }
- }
- }
- }
- }
-}
\ No newline at end of file
diff --git a/examples/hero/openapi/server b/examples/hero/openapi/server
deleted file mode 100755
index 86bac0a1..00000000
Binary files a/examples/hero/openapi/server and /dev/null differ
diff --git a/examples/hero/openapi/server.vsh b/examples/hero/openapi/server.vsh
deleted file mode 100755
index deff0734..00000000
--- a/examples/hero/openapi/server.vsh
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/usr/bin/env -S v -w -n -enable-globals run
-
-import os
-import time
-import veb
-import json
-import x.json2 { Any }
-import net.http
-import freeflowuniverse.herolib.data.jsonschema { Schema }
-import freeflowuniverse.herolib.web.openapi { Context, Request, Response, Server }
-import freeflowuniverse.herolib.hero.processor { ProcedureCall, ProcessParams, Processor }
-import freeflowuniverse.herolib.core.redisclient
-
-const spec_path = '${os.dir(@FILE)}/data/openapi.json'
-const spec_json = os.read_file(spec_path) or { panic(err) }
-
-// Main function to start the server
-fn main() {
- // Initialize the Redis client and RPC mechanism
- mut redis := redisclient.new('localhost:6379')!
- mut rpc := redis.rpc_get('procedure_queue')
-
- // Initialize the server
- mut server := &Server{
- specification: openapi.json_decode(spec_json)!
- handler: Handler{
- processor: Processor{
- rpc: rpc
- }
- }
- }
-
- // Start the server
- veb.run[Server, Context](mut server, 8080)
-}
-
-pub struct Handler {
-mut:
- processor Processor
-}
-
-fn (mut handler Handler) handle(request Request) !Response {
- // Convert incoming OpenAPI request to a procedure call
- mut params := []string{}
-
- if request.arguments.len > 0 {
- params = request.arguments.values().map(it.str()).clone()
- }
-
- if request.body != '' {
- params << request.body
- }
-
- if request.parameters.len != 0 {
- mut param_map := map[string]Any{} // Store parameters with correct types
-
- for param_name, param_value in request.parameters {
- operation_param := request.operation.parameters.filter(it.name == param_name)
- if operation_param.len > 0 {
- param_schema := operation_param[0].schema as Schema
- param_type := param_schema.typ
- param_format := param_schema.format
-
- // Convert parameter value to corresponding type
- match param_type {
- 'integer' {
- match param_format {
- 'int32' {
- param_map[param_name] = param_value.int() // Convert to int
- }
- 'int64' {
- param_map[param_name] = param_value.i64() // Convert to i64
- }
- else {
- param_map[param_name] = param_value.int() // Default to int
- }
- }
- }
- 'string' {
- param_map[param_name] = param_value // Already a string
- }
- 'boolean' {
- param_map[param_name] = param_value.bool() // Convert to bool
- }
- 'number' {
- match param_format {
- 'float' {
- param_map[param_name] = param_value.f32() // Convert to float
- }
- 'double' {
- param_map[param_name] = param_value.f64() // Convert to double
- }
- else {
- param_map[param_name] = param_value.f64() // Default to double
- }
- }
- }
- else {
- param_map[param_name] = param_value // Leave as string for unknown types
- }
- }
- } else {
- // If the parameter is not defined in the OpenAPI operation, skip or log it
- println('Unknown parameter: ${param_name}')
- }
- }
-
- // Encode the parameter map to JSON if needed
- params << json.encode(param_map.str())
- }
-
- call := ProcedureCall{
- method: request.operation.operation_id
- params: '[${params.join(',')}]' // Keep as a string since ProcedureCall expects a string
- }
-
- // Process the procedure call
- procedure_response := handler.processor.process(call, ProcessParams{
- timeout: 30 // Set timeout in seconds
- }) or {
- // Handle ProcedureError
- if err is processor.ProcedureError {
- return Response{
- status: http.status_from_int(err.code()) // Map ProcedureError reason to HTTP status code
- body: json.encode({
- 'error': err.msg()
- })
- }
- }
- return error('Unexpected error: ${err}')
- }
-
- // Convert returned procedure response to OpenAPI response
- return Response{
- status: http.Status.ok // Assuming success if no error
- body: procedure_response.result
- }
-}
diff --git a/examples/virt/heropods/heropods.vsh b/examples/virt/heropods/heropods.vsh
index 7fc57890..aafbdd12 100755
--- a/examples/virt/heropods/heropods.vsh
+++ b/examples/virt/heropods/heropods.vsh
@@ -13,7 +13,7 @@ println('=== HeroPods Refactored API Demo ===')
// Step 1: factory.new() now only creates a container definition/handle
// It does NOT create the actual container in the backend yet
mut container := factory.new(
- name: 'myalpine'
+ name: 'demo_alpine'
image: .custom
custom_image_name: 'alpine_3_20'
docker_url: 'docker.io/library/alpine:3.20'
diff --git a/examples/virt/heropods/runcommands.vsh b/examples/virt/heropods/runcommands.vsh
index f54fc43b..9352cc80 100644
--- a/examples/virt/heropods/runcommands.vsh
+++ b/examples/virt/heropods/runcommands.vsh
@@ -8,7 +8,7 @@ mut factory := heropods.new(
) or { panic('Failed to init ContainerFactory: ${err}') }
mut container := factory.new(
- name: 'myalpine'
+ name: 'alpine_demo'
image: .custom
custom_image_name: 'alpine_3_20'
docker_url: 'docker.io/library/alpine:3.20'
diff --git a/lib/builder/executor_crun.v b/lib/builder/executor_crun.v
index 990c5d0a..7d38504b 100644
--- a/lib/builder/executor_crun.v
+++ b/lib/builder/executor_crun.v
@@ -11,13 +11,22 @@ import freeflowuniverse.herolib.core.texttools
pub struct ExecutorCrun {
pub mut:
container_id string // container ID for crun
+ crun_root string // custom crun root directory
retry int = 1
debug bool = true
}
+// Helper method to get crun command with custom root
+fn (executor ExecutorCrun) crun_cmd(cmd string) string {
+ if executor.crun_root != '' {
+ return 'crun --root ${executor.crun_root} ${cmd}'
+ }
+ return 'crun ${cmd}'
+}
+
pub fn (mut executor ExecutorCrun) init() ! {
// Verify container exists and is running
- result := osal.exec(cmd: 'crun state ${executor.container_id}', stdout: false) or {
+ result := osal.exec(cmd: executor.crun_cmd('state ${executor.container_id}'), stdout: false) or {
return error('Container ${executor.container_id} not found or not accessible')
}
@@ -41,7 +50,7 @@ pub fn (mut executor ExecutorCrun) exec(args_ ExecArgs) !string {
console.print_debug('execute in container ${executor.container_id}: ${args.cmd}')
}
- mut cmd := 'crun exec ${executor.container_id} ${args.cmd}'
+ mut cmd := executor.crun_cmd('exec ${executor.container_id} ${args.cmd}')
if args.cmd.contains('\n') {
// For multiline commands, write to temp file first
temp_script := '/tmp/crun_script_${rand.uuid_v4()}.sh'
@@ -50,7 +59,7 @@ pub fn (mut executor ExecutorCrun) exec(args_ ExecArgs) !string {
// Copy script into container and execute
executor.file_write('/tmp/exec_script.sh', script_content)!
- cmd = 'crun exec ${executor.container_id} bash /tmp/exec_script.sh'
+ cmd = executor.crun_cmd('exec ${executor.container_id} bash /tmp/exec_script.sh')
}
res := osal.exec(cmd: cmd, stdout: args.stdout, debug: executor.debug)!
@@ -66,7 +75,7 @@ pub fn (mut executor ExecutorCrun) exec_interactive(args_ ExecArgs) ! {
args.cmd = 'bash /tmp/interactive_script.sh'
}
- cmd := 'crun exec -t ${executor.container_id} ${args.cmd}'
+ cmd := executor.crun_cmd('exec -t ${executor.container_id} ${args.cmd}')
console.print_debug(cmd)
osal.execute_interactive(cmd)!
}
@@ -82,7 +91,8 @@ pub fn (mut executor ExecutorCrun) file_write(path string, text string) ! {
defer { os.rm(temp_file) or {} }
// Use crun exec to copy file content
- cmd := 'cat ${temp_file} | crun exec -i ${executor.container_id} tee ${path} > /dev/null'
+ sbcmd := executor.crun_cmd('exec -i ${executor.container_id} tee ${path}')
+ cmd := 'cat ${temp_file} | ${sbcmd} > /dev/null'
osal.exec(cmd: cmd, stdout: false)!
}
diff --git a/lib/hero/herofs/README.md b/lib/hero/herofs/README.md
new file mode 100644
index 00000000..9e0d9f64
--- /dev/null
+++ b/lib/hero/herofs/README.md
@@ -0,0 +1,115 @@
+# HeroFS - Distributed Filesystem for HeroLib
+
+HeroFS is a distributed filesystem implementation built on top of HeroDB (Redis-based storage). It provides a virtual filesystem with support for files, directories, symbolic links, and binary data blobs.
+
+## Overview
+
+HeroFS implements a filesystem structure where:
+- **Fs**: Represents a filesystem as a top-level container
+- **FsDir**: Represents directories within a filesystem
+- **FsFile**: Represents files with support for multiple directory associations
+- **FsSymlink**: Represents symbolic links pointing to files or directories
+- **FsBlob**: Represents binary data chunks (up to 1MB) used as file content
+
+## Features
+
+- Distributed storage using Redis
+- Support for files, directories, and symbolic links
+- Blob-based file content storage with integrity verification
+- Multiple directory associations for files (similar to hard links)
+- Filesystem quotas and usage tracking
+- Metadata support for files
+- Efficient lookup mechanisms using Redis hash sets
+
+## Installation
+
+HeroFS is part of HeroLib and is automatically available when using HeroLib.
+
+## Usage
+
+To use HeroFS, you need to create a filesystem factory:
+
+```v
+import freeflowuniverse.herolib.hero.herofs
+
+mut fs_factory := herofs.new()!
+```
+
+### Creating a Filesystem
+
+```v
+fs_id := fs_factory.fs.set(fs_factory.fs.new(
+ name: 'my_filesystem'
+ quota_bytes: 1000000000 // 1GB quota
+)!)!
+```
+
+### Working with Directories
+
+```v
+// Create root directory
+root_dir_id := fs_factory.fs_dir.set(fs_factory.fs_dir.new(
+ name: 'root'
+ fs_id: fs_id
+ parent_id: 0
+)!)!
+
+// Create subdirectory
+sub_dir_id := fs_factory.fs_dir.set(fs_factory.fs_dir.new(
+ name: 'documents'
+ fs_id: fs_id
+ parent_id: root_dir_id
+)!)!
+```
+
+### Working with Blobs
+
+```v
+// Create a blob with binary data
+blob_id := fs_factory.fs_blob.set(fs_factory.fs_blob.new(
+ data: content_bytes
+ mime_type: 'text/plain'
+)!)!
+```
+
+### Working with Files
+
+```v
+// Create a file
+file_id := fs_factory.fs_file.set(fs_factory.fs_file.new(
+ name: 'example.txt'
+ fs_id: fs_id
+ directories: [root_dir_id]
+ blobs: [blob_id]
+)!)!
+```
+
+### Working with Symbolic Links
+
+```v
+// Create a symbolic link to a file
+symlink_id := fs_factory.fs_symlink.set(fs_factory.fs_symlink.new(
+ name: 'example_link.txt'
+ fs_id: fs_id
+ parent_id: root_dir_id
+ target_id: file_id
+ target_type: .file
+)!)!
+```
+
+## API Reference
+
+The HeroFS module provides the following main components:
+
+- `FsFactory` - Main factory for accessing all filesystem components
+- `DBFs` - Filesystem operations
+- `DBFsDir` - Directory operations
+- `DBFsFile` - File operations
+- `DBFsSymlink` - Symbolic link operations
+- `DBFsBlob` - Binary data blob operations
+
+Each component provides CRUD operations and specialized methods for filesystem management.
+
+## Examples
+
+Check the `examples/hero/herofs/` directory for detailed usage examples.
diff --git a/lib/hero/herofs/fs.v b/lib/hero/herofs/fs.v
index eebe52ca..1faea93e 100644
--- a/lib/hero/herofs/fs.v
+++ b/lib/hero/herofs/fs.v
@@ -79,10 +79,22 @@ pub fn (mut self DBFs) new(args FsArg) !Fs {
}
pub fn (mut self DBFs) set(o Fs) !u32 {
- return self.db.set[Fs](o)!
+ id := self.db.set[Fs](o)!
+
+ // Store name -> id mapping for lookups
+ self.db.redis.hset('fs:names', o.name, id.str())!
+
+ return id
}
pub fn (mut self DBFs) delete(id u32) ! {
+ // Get the filesystem to retrieve its name
+ fs := self.get(id)!
+
+ // Remove name -> id mapping
+ self.db.redis.hdel('fs:names', fs.name)!
+
+ // Delete the filesystem
self.db.delete[Fs](id)!
}
diff --git a/lib/hero/herofs/fs_dir.v b/lib/hero/herofs/fs_dir.v
index 5eaa0d73..760517be 100644
--- a/lib/hero/herofs/fs_dir.v
+++ b/lib/hero/herofs/fs_dir.v
@@ -75,12 +75,12 @@ pub fn (mut self DBFsDir) set(o FsDir) !u32 {
path_key := '${o.fs_id}:${o.parent_id}:${o.name}'
self.db.redis.hset('fsdir:paths', path_key, id.str())!
- // Store in filesystem's directory list
- self.db.redis.sadd('fsdir:fs:${o.fs_id}', id.str())!
+ // Store in filesystem's directory list using hset
+ self.db.redis.hset('fsdir:fs:${o.fs_id}', id.str(), id.str())!
- // Store in parent's children list
+ // Store in parent's children list using hset
if o.parent_id > 0 {
- self.db.redis.sadd('fsdir:children:${o.parent_id}', id.str())!
+ self.db.redis.hset('fsdir:children:${o.parent_id}', id.str(), id.str())!
}
return id
@@ -90,8 +90,8 @@ pub fn (mut self DBFsDir) delete(id u32) ! {
// Get the directory info before deleting
dir := self.get(id)!
- // Check if directory has children
- children := self.db.redis.smembers('fsdir:children:${id}')!
+ // Check if directory has children using hkeys
+ children := self.db.redis.hkeys('fsdir:children:${id}')!
if children.len > 0 {
return error('Cannot delete directory ${dir.name} (ID: ${id}) because it has ${children.len} children')
}
@@ -100,12 +100,12 @@ pub fn (mut self DBFsDir) delete(id u32) ! {
path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
self.db.redis.hdel('fsdir:paths', path_key)!
- // Remove from filesystem's directory list
- self.db.redis.srem('fsdir:fs:${dir.fs_id}', id.str())!
+ // Remove from filesystem's directory list using hdel
+ self.db.redis.hdel('fsdir:fs:${dir.fs_id}', id.str())!
- // Remove from parent's children list
+ // Remove from parent's children list using hdel
if dir.parent_id > 0 {
- self.db.redis.srem('fsdir:children:${dir.parent_id}', id.str())!
+ self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
}
// Delete the directory itself
@@ -139,7 +139,7 @@ pub fn (mut self DBFsDir) get_by_path(fs_id u32, parent_id u32, name string) !Fs
// Get all directories in a filesystem
pub fn (mut self DBFsDir) list_by_filesystem(fs_id u32) ![]FsDir {
- dir_ids := self.db.redis.smembers('fsdir:fs:${fs_id}')!
+ dir_ids := self.db.redis.hkeys('fsdir:fs:${fs_id}')!
mut dirs := []FsDir{}
for id_str in dir_ids {
dirs << self.get(id_str.u32())!
@@ -149,7 +149,7 @@ pub fn (mut self DBFsDir) list_by_filesystem(fs_id u32) ![]FsDir {
// Get children of a directory
pub fn (mut self DBFsDir) list_children(dir_id u32) ![]FsDir {
- child_ids := self.db.redis.smembers('fsdir:children:${dir_id}')!
+ child_ids := self.db.redis.hkeys('fsdir:children:${dir_id}')!
mut dirs := []FsDir{}
for id_str in child_ids {
dirs << self.get(id_str.u32())!
@@ -159,8 +159,8 @@ pub fn (mut self DBFsDir) list_children(dir_id u32) ![]FsDir {
// Check if a directory has children
pub fn (mut self DBFsDir) has_children(dir_id u32) !bool {
- count := self.db.redis.scard('fsdir:children:${dir_id}')!
- return count > 0
+ keys := self.db.redis.hkeys('fsdir:children:${dir_id}')!
+ return keys.len > 0
}
// Rename a directory
@@ -196,7 +196,7 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
// Remove from old parent's children list
if dir.parent_id > 0 {
- self.db.redis.srem('fsdir:children:${dir.parent_id}', id.str())!
+ self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
}
// Update parent
diff --git a/lib/hero/herofs/fs_file.v b/lib/hero/herofs/fs_file.v
index 24ad238c..cdfb1ee5 100644
--- a/lib/hero/herofs/fs_file.v
+++ b/lib/hero/herofs/fs_file.v
@@ -181,16 +181,16 @@ pub fn (mut self DBFsFile) set(o FsFile) !u32 {
path_key := '${dir_id}:${o.name}'
self.db.redis.hset('fsfile:paths', path_key, id.str())!
- // Add to directory's file list
- self.db.redis.sadd('fsfile:dir:${dir_id}', id.str())!
+ // Add to directory's file list using hset
+ self.db.redis.hset('fsfile:dir:${dir_id}', id.str(), id.str())!
}
- // Store in filesystem's file list
- self.db.redis.sadd('fsfile:fs:${o.fs_id}', id.str())!
+ // Store in filesystem's file list using hset
+ self.db.redis.hset('fsfile:fs:${o.fs_id}', id.str(), id.str())!
- // Store by mimetype
+ // Store by mimetype using hset
if o.mime_type != '' {
- self.db.redis.sadd('fsfile:mime:${o.mime_type}', id.str())!
+ self.db.redis.hset('fsfile:mime:${o.mime_type}', id.str(), id.str())!
}
return id
@@ -206,16 +206,16 @@ pub fn (mut self DBFsFile) delete(id u32) ! {
path_key := '${dir_id}:${file.name}'
self.db.redis.hdel('fsfile:paths', path_key)!
- // Remove from directory's file list
- self.db.redis.srem('fsfile:dir:${dir_id}', id.str())!
+ // Remove from directory's file list using hdel
+ self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
}
- // Remove from filesystem's file list
- self.db.redis.srem('fsfile:fs:${file.fs_id}', id.str())!
+ // Remove from filesystem's file list using hdel
+ self.db.redis.hdel('fsfile:fs:${file.fs_id}', id.str())!
- // Remove from mimetype index
+ // Remove from mimetype index using hdel
if file.mime_type != '' {
- self.db.redis.srem('fsfile:mime:${file.mime_type}', id.str())!
+ self.db.redis.hdel('fsfile:mime:${file.mime_type}', id.str())!
}
// Delete the file itself
@@ -249,7 +249,7 @@ pub fn (mut self DBFsFile) get_by_path(dir_id u32, name string) !FsFile {
// List files in a directory
pub fn (mut self DBFsFile) list_by_directory(dir_id u32) ![]FsFile {
- file_ids := self.db.redis.smembers('fsfile:dir:${dir_id}')!
+ file_ids := self.db.redis.hkeys('fsfile:dir:${dir_id}')!
mut files := []FsFile{}
for id_str in file_ids {
files << self.get(id_str.u32())!
@@ -259,7 +259,7 @@ pub fn (mut self DBFsFile) list_by_directory(dir_id u32) ![]FsFile {
// List files in a filesystem
pub fn (mut self DBFsFile) list_by_filesystem(fs_id u32) ![]FsFile {
- file_ids := self.db.redis.smembers('fsfile:fs:${fs_id}')!
+ file_ids := self.db.redis.hkeys('fsfile:fs:${fs_id}')!
mut files := []FsFile{}
for id_str in file_ids {
files << self.get(id_str.u32())!
@@ -269,7 +269,7 @@ pub fn (mut self DBFsFile) list_by_filesystem(fs_id u32) ![]FsFile {
// List files by mime type
pub fn (mut self DBFsFile) list_by_mime_type(mime_type string) ![]FsFile {
- file_ids := self.db.redis.smembers('fsfile:mime:${mime_type}')!
+ file_ids := self.db.redis.hkeys('fsfile:mime:${mime_type}')!
mut files := []FsFile{}
for id_str in file_ids {
files << self.get(id_str.u32())!
@@ -358,7 +358,7 @@ pub fn (mut self DBFsFile) move(id u32, new_directories []u32) !u32 {
for dir_id in file.directories {
path_key := '${dir_id}:${file.name}'
self.db.redis.hdel('fsfile:paths', path_key)!
- self.db.redis.srem('fsfile:dir:${dir_id}', id.str())!
+ self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
}
// Update directories
diff --git a/lib/hero/herofs/fs_symlink.v b/lib/hero/herofs/fs_symlink.v
index c4142371..fdd17a57 100644
--- a/lib/hero/herofs/fs_symlink.v
+++ b/lib/hero/herofs/fs_symlink.v
@@ -109,15 +109,15 @@ pub fn (mut self DBFsSymlink) set(o FsSymlink) !u32 {
path_key := '${o.parent_id}:${o.name}'
self.db.redis.hset('fssymlink:paths', path_key, id.str())!
- // Add to parent's symlinks list
- self.db.redis.sadd('fssymlink:parent:${o.parent_id}', id.str())!
+ // Add to parent's symlinks list using hset
+ self.db.redis.hset('fssymlink:parent:${o.parent_id}', id.str(), id.str())!
- // Store in filesystem's symlink list
- self.db.redis.sadd('fssymlink:fs:${o.fs_id}', id.str())!
+ // Store in filesystem's symlink list using hset
+ self.db.redis.hset('fssymlink:fs:${o.fs_id}', id.str(), id.str())!
- // Store in target's referrers list
+ // Store in target's referrers list using hset
target_key := '${o.target_type}:${o.target_id}'
- self.db.redis.sadd('fssymlink:target:${target_key}', id.str())!
+ self.db.redis.hset('fssymlink:target:${target_key}', id.str(), id.str())!
return id
}
@@ -130,15 +130,15 @@ pub fn (mut self DBFsSymlink) delete(id u32) ! {
path_key := '${symlink.parent_id}:${symlink.name}'
self.db.redis.hdel('fssymlink:paths', path_key)!
- // Remove from parent's symlinks list
- self.db.redis.srem('fssymlink:parent:${symlink.parent_id}', id.str())!
+ // Remove from parent's symlinks list using hdel
+ self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
- // Remove from filesystem's symlink list
- self.db.redis.srem('fssymlink:fs:${symlink.fs_id}', id.str())!
+ // Remove from filesystem's symlink list using hdel
+ self.db.redis.hdel('fssymlink:fs:${symlink.fs_id}', id.str())!
- // Remove from target's referrers list
+ // Remove from target's referrers list using hdel
target_key := '${symlink.target_type}:${symlink.target_id}'
- self.db.redis.srem('fssymlink:target:${target_key}', id.str())!
+ self.db.redis.hdel('fssymlink:target:${target_key}', id.str())!
// Delete the symlink itself
self.db.delete[FsSymlink](id)!
@@ -171,7 +171,7 @@ pub fn (mut self DBFsSymlink) get_by_path(parent_id u32, name string) !FsSymlink
// List symlinks in a parent directory
pub fn (mut self DBFsSymlink) list_by_parent(parent_id u32) ![]FsSymlink {
- symlink_ids := self.db.redis.smembers('fssymlink:parent:${parent_id}')!
+ symlink_ids := self.db.redis.hkeys('fssymlink:parent:${parent_id}')!
mut symlinks := []FsSymlink{}
for id_str in symlink_ids {
symlinks << self.get(id_str.u32())!
@@ -181,7 +181,7 @@ pub fn (mut self DBFsSymlink) list_by_parent(parent_id u32) ![]FsSymlink {
// List symlinks in a filesystem
pub fn (mut self DBFsSymlink) list_by_filesystem(fs_id u32) ![]FsSymlink {
- symlink_ids := self.db.redis.smembers('fssymlink:fs:${fs_id}')!
+ symlink_ids := self.db.redis.hkeys('fssymlink:fs:${fs_id}')!
mut symlinks := []FsSymlink{}
for id_str in symlink_ids {
symlinks << self.get(id_str.u32())!
@@ -192,7 +192,7 @@ pub fn (mut self DBFsSymlink) list_by_filesystem(fs_id u32) ![]FsSymlink {
// List symlinks pointing to a target
pub fn (mut self DBFsSymlink) list_by_target(target_type SymlinkTargetType, target_id u32) ![]FsSymlink {
target_key := '${target_type}:${target_id}'
- symlink_ids := self.db.redis.smembers('fssymlink:target:${target_key}')!
+ symlink_ids := self.db.redis.hkeys('fssymlink:target:${target_key}')!
mut symlinks := []FsSymlink{}
for id_str in symlink_ids {
symlinks << self.get(id_str.u32())!
@@ -231,8 +231,8 @@ pub fn (mut self DBFsSymlink) move(id u32, new_parent_id u32) !u32 {
old_path_key := '${symlink.parent_id}:${symlink.name}'
self.db.redis.hdel('fssymlink:paths', old_path_key)!
- // Remove from old parent's symlinks list
- self.db.redis.srem('fssymlink:parent:${symlink.parent_id}', id.str())!
+ // Remove from old parent's symlinks list using hdel
+ self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
// Update parent
symlink.parent_id = new_parent_id
@@ -260,7 +260,7 @@ pub fn (mut self DBFsSymlink) redirect(id u32, new_target_id u32, new_target_typ
// Remove from old target's referrers list
old_target_key := '${symlink.target_type}:${symlink.target_id}'
- self.db.redis.srem('fssymlink:target:${old_target_key}', id.str())!
+ self.db.redis.hdel('fssymlink:target:${old_target_key}', id.str())!
// Update target
symlink.target_id = new_target_id
diff --git a/lib/hero/herofs/specs.md b/lib/hero/herofs/specs.md
new file mode 100644
index 00000000..0aad6d0c
--- /dev/null
+++ b/lib/hero/herofs/specs.md
@@ -0,0 +1,289 @@
+# HeroFS Specifications
+
+This document provides detailed specifications for the HeroFS distributed filesystem implementation.
+
+## Architecture Overview
+
+HeroFS is built on top of HeroDB, which uses Redis as its storage backend. The filesystem is implemented as a collection of interconnected data structures that represent the various components of a filesystem:
+
+1. **Fs** - Filesystem container
+2. **FsDir** - Directories
+3. **FsFile** - Files
+4. **FsSymlink** - Symbolic links
+5. **FsBlob** - Binary data chunks
+
+All components inherit from the `Base` struct, which provides common fields like ID, name, description, timestamps, security policies, tags, and comments.
+
+## Filesystem (Fs)
+
+The `Fs` struct represents a filesystem as a top-level container:
+
+```v
+@[heap]
+pub struct Fs {
+ db.Base
+pub mut:
+ name string
+ group_id u32 // Associated group for permissions
+ root_dir_id u32 // ID of root directory
+ quota_bytes u64 // Storage quota in bytes
+ used_bytes u64 // Current usage in bytes
+}
+```
+
+### Key Features
+
+- **Name-based identification**: Filesystems can be retrieved by name using efficient Redis hash sets
+- **Quota management**: Each filesystem has a storage quota and tracks current usage
+- **Root directory**: Each filesystem has a root directory ID that serves as the entry point
+- **Group association**: Filesystems can be associated with groups for permission management
+
+### Methods
+
+- `new()`: Create a new filesystem instance
+- `set()`: Save filesystem to database
+- `get()`: Retrieve filesystem by ID
+- `get_by_name()`: Retrieve filesystem by name
+- `delete()`: Remove filesystem from database
+- `exist()`: Check if filesystem exists
+- `list()`: List all filesystems
+- `increase_usage()`: Increase used bytes counter
+- `decrease_usage()`: Decrease used bytes counter
+- `check_quota()`: Verify if additional bytes would exceed quota
+
+## Directory (FsDir)
+
+The `FsDir` struct represents a directory in a filesystem:
+
+```v
+@[heap]
+pub struct FsDir {
+ db.Base
+pub mut:
+ name string
+ fs_id u32 // Associated filesystem
+ parent_id u32 // Parent directory ID (0 for root)
+}
+```
+
+### Key Features
+
+- **Hierarchical structure**: Directories form a tree structure with parent-child relationships
+- **Path-based identification**: Efficient lookup by filesystem ID, parent ID, and name
+- **Children management**: Directories automatically track their children through Redis hash sets
+- **Cross-filesystem isolation**: Directories are bound to a specific filesystem
+
+### Methods
+
+- `new()`: Create a new directory instance
+- `set()`: Save directory to database and update indices
+- `get()`: Retrieve directory by ID
+- `delete()`: Remove directory (fails if it has children)
+- `exist()`: Check if directory exists
+- `list()`: List all directories
+- `get_by_path()`: Retrieve directory by path components
+- `list_by_filesystem()`: List directories in a filesystem
+- `list_children()`: List child directories
+- `has_children()`: Check if directory has children
+- `rename()`: Rename directory
+- `move()`: Move directory to a new parent
+
+## File (FsFile)
+
+The `FsFile` struct represents a file in a filesystem:
+
+```v
+@[heap]
+pub struct FsFile {
+ db.Base
+pub mut:
+ name string
+ fs_id u32 // Associated filesystem
+ directories []u32 // Directory IDs where this file exists
+ blobs []u32 // IDs of file content blobs
+ size_bytes u64
+ mime_type string // e.g., "image/png"
+ checksum string // e.g., SHA256 checksum of the file
+ accessed_at i64
+ metadata map[string]string // Custom metadata
+}
+```
+
+### Key Features
+
+- **Multiple directory associations**: Files can exist in multiple directories (similar to hard links in Linux)
+- **Blob-based content**: File content is stored as references to FsBlob objects
+- **Size tracking**: Files track their total size in bytes
+- **MIME type support**: Files store their MIME type for content identification
+- **Checksum verification**: Files can store checksums for integrity verification
+- **Access timestamp**: Tracks when the file was last accessed
+- **Custom metadata**: Files support custom key-value metadata
+
+### Methods
+
+- `new()`: Create a new file instance
+- `set()`: Save file to database and update indices
+- `get()`: Retrieve file by ID
+- `delete()`: Remove file and update all indices
+- `exist()`: Check if file exists
+- `list()`: List all files
+- `get_by_path()`: Retrieve file by directory and name
+- `list_by_directory()`: List files in a directory
+- `list_by_filesystem()`: List files in a filesystem
+- `list_by_mime_type()`: List files by MIME type
+- `append_blob()`: Add a new blob to the file
+- `update_accessed()`: Update accessed timestamp
+- `update_metadata()`: Update file metadata
+- `rename()`: Rename file (affects all directories)
+- `move()`: Move file to different directories
+
+## Symbolic Link (FsSymlink)
+
+The `FsSymlink` struct represents a symbolic link in a filesystem:
+
+```v
+@[heap]
+pub struct FsSymlink {
+ db.Base
+pub mut:
+ name string
+ fs_id u32 // Associated filesystem
+ parent_id u32 // Parent directory ID
+ target_id u32 // ID of target file or directory
+ target_type SymlinkTargetType
+}
+
+pub enum SymlinkTargetType {
+ file
+ directory
+}
+```
+
+### Key Features
+
+- **Target type specification**: Symlinks can point to either files or directories
+- **Cross-filesystem protection**: Symlinks cannot point to targets in different filesystems
+- **Referrer tracking**: Targets know which symlinks point to them
+- **Broken link detection**: Symlinks can be checked for validity
+
+### Methods
+
+- `new()`: Create a new symbolic link instance
+- `set()`: Save symlink to database and update indices
+- `get()`: Retrieve symlink by ID
+- `delete()`: Remove symlink and update all indices
+- `exist()`: Check if symlink exists
+- `list()`: List all symlinks
+- `get_by_path()`: Retrieve symlink by parent directory and name
+- `list_by_parent()`: List symlinks in a parent directory
+- `list_by_filesystem()`: List symlinks in a filesystem
+- `list_by_target()`: List symlinks pointing to a target
+- `rename()`: Rename symlink
+- `move()`: Move symlink to a new parent directory
+- `redirect()`: Change symlink target
+- `resolve()`: Get the target ID of a symlink
+- `is_broken()`: Check if symlink target exists
+
+## Binary Data Blob (FsBlob)
+
+The `FsBlob` struct represents binary data chunks:
+
+```v
+@[heap]
+pub struct FsBlob {
+ db.Base
+pub mut:
+ hash string // blake192 hash of content
+ data []u8 // Binary data (max 1MB)
+ size_bytes int // Size in bytes
+ created_at i64
+ mime_type string // MIME type
+ encoding string // Encoding type
+}
+```
+
+### Key Features
+
+- **Content-based addressing**: Blobs are identified by their BLAKE3 hash (first 192 bits)
+- **Size limit**: Blobs are limited to 1MB to ensure efficient storage and retrieval
+- **Integrity verification**: Built-in hash verification for data integrity
+- **MIME type and encoding**: Blobs store their content type information
+- **Deduplication**: Identical content blobs are automatically deduplicated
+
+### Methods
+
+- `new()`: Create a new blob instance
+- `set()`: Save blob to database (returns existing ID if content already exists)
+- `get()`: Retrieve blob by ID
+- `delete()`: Remove blob from database
+- `exist()`: Check if blob exists
+- `list()`: List all blobs
+- `get_by_hash()`: Retrieve blob by content hash
+- `exists_by_hash()`: Check if blob exists by content hash
+- `verify_integrity()`: Verify blob data integrity against stored hash
+- `calculate_hash()`: Calculate BLAKE3 hash of blob data
+
+## Storage Mechanisms
+
+HeroFS uses Redis hash sets extensively for efficient indexing and lookup:
+
+### Filesystem Indices
+- `fs:names` - Maps filesystem names to IDs
+- `fsdir:paths` - Maps directory path components to IDs
+- `fsdir:fs:${fs_id}` - Lists directories in a filesystem
+- `fsdir:children:${dir_id}` - Lists children of a directory
+- `fsfile:paths` - Maps file paths (directory:name) to IDs
+- `fsfile:dir:${dir_id}` - Lists files in a directory
+- `fsfile:fs:${fs_id}` - Lists files in a filesystem
+- `fsfile:mime:${mime_type}` - Lists files by MIME type
+- `fssymlink:paths` - Maps symlink paths (parent:name) to IDs
+- `fssymlink:parent:${parent_id}` - Lists symlinks in a parent directory
+- `fssymlink:fs:${fs_id}` - Lists symlinks in a filesystem
+- `fssymlink:target:${target_type}:${target_id}` - Lists symlinks pointing to a target
+- `fsblob:hashes` - Maps content hashes to blob IDs
+
+### Data Serialization
+
+All HeroFS components use the HeroLib encoder for serialization:
+
+- Version tag (u8) is stored first
+- All fields are serialized in a consistent order
+- Deserialization follows the exact same order
+- Type safety is maintained through V's type system
+
+## Special Features
+
+### Hard Links
+Files can be associated with multiple directories through the `directories` field, allowing for hard link-like behavior.
+
+### Deduplication
+Blobs are automatically deduplicated based on their content hash. When creating a new blob with identical content to an existing one, the existing ID is returned.
+
+### Quota Management
+Filesystems track their storage usage and can enforce quotas to prevent overconsumption.
+
+### Metadata Support
+Files support custom metadata as key-value pairs, allowing for flexible attribute storage.
+
+### Cross-Component Validation
+When creating or modifying components, HeroFS validates references to other components:
+- Directory parent must exist
+- File directories must exist
+- File blobs must exist
+- Symlink parent must exist
+- Symlink target must exist and match target type
+
+## Security Model
+
+HeroFS inherits the security model from HeroDB:
+- Each component has a `securitypolicy` field referencing a SecurityPolicy object
+- Components can have associated tags for categorization
+- Components can have associated comments for documentation
+
+## Performance Considerations
+
+- All indices are stored as Redis hash sets for O(1) lookup performance
+- Blob deduplication reduces storage requirements
+- Multiple directory associations allow efficient file organization
+- Content-based addressing enables easy integrity verification
+- Factory pattern provides easy access to all filesystem components
diff --git a/lib/hero/herohandlers/README.md b/lib/hero/herohandlers/README.md
deleted file mode 100644
index 57153d7c..00000000
--- a/lib/hero/herohandlers/README.md
+++ /dev/null
@@ -1,130 +0,0 @@
-# HeroModels OpenRPC Server
-
-This module provides an OpenRPC server for HeroModels that runs over Unix domain sockets. It exposes comment management functionality through a JSON-RPC 2.0 interface.
-
-## Features
-
-- **Unix Socket Communication**: Efficient local communication via Unix domain sockets
-- **JSON-RPC 2.0 Protocol**: Standard JSON-RPC 2.0 implementation
-- **Comment Management**: Full CRUD operations for comments
-- **OpenRPC Specification**: Auto-generated OpenRPC spec via `discover` method
-- **Concurrent Handling**: Multiple client connections supported
-
-## API Methods
-
-### comment_get
-Retrieve comments by ID, author, or parent.
-
-**Parameters:**
-- `id` (optional): Comment ID to retrieve
-- `author` (optional): Author ID to filter by
-- `parent` (optional): Parent comment ID to filter by
-
-**Returns:** Comment object or array of comments
-
-### comment_set
-Create a new comment.
-
-**Parameters:**
-- `comment`: Comment text content
-- `parent`: Parent comment ID (0 for top-level)
-- `author`: Author user ID
-
-**Returns:** Object with created comment ID
-
-### comment_delete
-Delete a comment by ID.
-
-**Parameters:**
-- `id`: Comment ID to delete
-
-**Returns:** Success status and deleted comment ID
-
-### comment_list
-List all comment IDs.
-
-**Parameters:** None
-
-**Returns:** Array of all comment IDs
-
-### discover
-Get the OpenRPC specification for this service.
-
-**Parameters:** None
-
-**Returns:** Complete OpenRPC specification object
-
-## Usage
-
-### Starting the Server
-
-```v
-import freeflowuniverse.herolib.hero.heromodels.openrpc
-
-mut server := openrpc.new_rpc_server(socket_path: '/tmp/heromodels')!
-server.start()! // Blocks and serves requests
-```
-
-### Example Client
-
-```v
-import net.unix
-import json
-import freeflowuniverse.herolib.hero.heromodels.openrpc
-
-// Connect to server
-mut conn := unix.connect_stream('/tmp/heromodels')!
-
-// Create a comment
-request := openrpc.JsonRpcRequest{
- jsonrpc: '2.0'
- method: 'comment_set'
- params: json.encode({
- 'comment': 'Hello World'
- 'parent': 0
- 'author': 1
- })
- id: 1
-}
-
-// Send request
-conn.write_string(json.encode(request))!
-
-// Read response
-mut buffer := []u8{len: 4096}
-bytes_read := conn.read(mut buffer)!
-response := buffer[..bytes_read].bytestr()
-```
-
-## Files
-
-- `server.v` - Main RPC server implementation
-- `types.v` - JSON-RPC and parameter type definitions
-- `comment.v` - Comment-specific RPC method implementations
-- `discover.v` - OpenRPC specification generation
-- `example.vsh` - Server example script
-- `client_example.vsh` - Client example script
-
-## Running Examples
-
-Start the server:
-```bash
-vrun lib/hero/heromodels/openrpc/example.vsh
-```
-
-Test with client (in another terminal):
-```bash
-vrun lib/hero/heromodels/openrpc/client_example.vsh
-```
-
-## Dependencies
-
-- Redis (for data storage via heromodels)
-- Unix domain socket support
-- JSON encoding/decoding
-
-## Socket Path
-
-Default socket path: `/tmp/heromodels`
-
-The socket file is automatically cleaned up when the server starts and stops.
\ No newline at end of file
diff --git a/lib/hero/herohandlers/handler.v b/lib/hero/herohandlers/handler.v
deleted file mode 100644
index fc5ee34d..00000000
--- a/lib/hero/herohandlers/handler.v
+++ /dev/null
@@ -1,75 +0,0 @@
-module openrpc
-
-import json
-import freeflowuniverse.herolib.schemas.openrpc
-import freeflowuniverse.herolib.hero.heromodels
-import freeflowuniverse.herolib.schemas.jsonrpc
-import os
-
-const openrpc_path = os.join_path(os.dir(@FILE), 'openrpc.json')
-
-pub fn new_heromodels_handler() !openrpc.Handler {
- mut openrpc_handler := openrpc.Handler {
- specification: openrpc.new(path: openrpc_path)!
- }
-
- openrpc_handler.register_procedure_handle('comment_get', comment_get)
- openrpc_handler.register_procedure_handle('comment_set', comment_set)
- openrpc_handler.register_procedure_handle('comment_delete', comment_delete)
- openrpc_handler.register_procedure_handle('comment_list', comment_list)
-
- openrpc_handler.register_procedure_handle('calendar_get', calendar_get)
- openrpc_handler.register_procedure_handle('calendar_set', calendar_set)
- openrpc_handler.register_procedure_handle('calendar_delete', calendar_delete)
- openrpc_handler.register_procedure_handle('calendar_list', calendar_list)
-
- return openrpc_handler
-}
-
-pub fn comment_get(request jsonrpc.Request) !jsonrpc.Response {
- payload := jsonrpc.decode_payload[u32](request.params) or { return jsonrpc.invalid_params }
- result := heromodels.comment_get(payload) or { return jsonrpc.internal_error }
- return jsonrpc.new_response(request.id, json.encode(result))
-}
-
-pub fn comment_set(request jsonrpc.Request) !jsonrpc.Response{
- payload := jsonrpc.decode_payload[heromodels.CommentArg](request.params) or { return jsonrpc.invalid_params }
- return jsonrpc.new_response(request.id, heromodels.comment_set(payload)!.str())
-}
-
-pub fn comment_delete(request jsonrpc.Request) !jsonrpc.Response {
- payload := jsonrpc.decode_payload[u32](request.params) or { return jsonrpc.invalid_params }
- return jsonrpc.new_response(request.id, '')
-}
-
-pub fn comment_list(request jsonrpc.Request) !jsonrpc.Response {
- result := heromodels.list[heromodels.Comment]() or { return jsonrpc.internal_error }
- return jsonrpc.new_response(request.id, json.encode(result))
-}
-
-pub fn calendar_get(request jsonrpc.Request) !jsonrpc.Response {
- payload := jsonrpc.decode_payload[u32](request.params) or { return jsonrpc.invalid_params }
- result := heromodels.get[heromodels.Calendar](payload) or { return jsonrpc.internal_error }
- return jsonrpc.new_response(request.id, json.encode(result))
-}
-
-pub fn calendar_set(request jsonrpc.Request) !jsonrpc.Response{
- mut payload := json.decode(heromodels.Calendar, request.params) or {
- return jsonrpc.invalid_params }
- id := heromodels.set[heromodels.Calendar](mut payload) or {
- println('error setting calendar $err')
- return jsonrpc.internal_error
- }
- return jsonrpc.new_response(request.id, id.str())
-}
-
-pub fn calendar_delete(request jsonrpc.Request) !jsonrpc.Response {
- payload := jsonrpc.decode_payload[u32](request.params) or { return jsonrpc.invalid_params }
- heromodels.delete[heromodels.Calendar](payload) or { return jsonrpc.internal_error }
- return jsonrpc.new_response(request.id, '')
-}
-
-pub fn calendar_list(request jsonrpc.Request) !jsonrpc.Response {
- result := heromodels.list[heromodels.Calendar]() or { return jsonrpc.internal_error }
- return jsonrpc.new_response(request.id, json.encode(result))
-}
\ No newline at end of file
diff --git a/lib/hero/herohandlers/handler_comment.v b/lib/hero/herohandlers/handler_comment.v
deleted file mode 100644
index 52f90072..00000000
--- a/lib/hero/herohandlers/handler_comment.v
+++ /dev/null
@@ -1,110 +0,0 @@
-module openrpc
-
-import json
-import freeflowuniverse.herolib.hero.heromodels
-
-// Comment-specific argument structures
-@[params]
-pub struct CommentGetArgs {
-pub mut:
- id ?u32
- author ?u32
- parent ?u32
-}
-
-@[params]
-pub struct CommentDeleteArgs {
-pub mut:
- id u32
-}
-
-// // comment_get retrieves comments based on the provided arguments
-// pub fn comment_get(params string) !string {
-// // Handle empty params
-// if params == 'null' || params == '{}' {
-// return error('No valid search criteria provided. Please specify id, author, or parent.')
-// }
-
-// args := json.decode(CommentGetArgs, params)!
-
-// // If ID is provided, get specific comment
-// if id := args.id {
-// comment := heromodels.comment_get(id)!
-// return json.encode(comment)
-// }
-
-// // If author is provided, find comments by author
-// if author := args.author {
-// return get_comments_by_author(author)!
-// }
-
-// // If parent is provided, find child comments
-// if parent := args.parent {
-// return get_comments_by_parent(parent)!
-// }
-
-// return error('No valid search criteria provided. Please specify id, author, or parent.')
-// }
-
-// // comment_set creates or updates a comment
-// pub fn comment_set(params string) !string {
-// comment_arg := json.decode(heromodels.CommentArgExtended, params)!
-// id := heromodels.comment_set(comment_arg)!
-// return json.encode({'id': id})
-// }
-
-// // comment_delete removes a comment by ID
-// pub fn comment_delete(params string) !string {
-// args := json.decode(CommentDeleteArgs, params)!
-
-// // Check if comment exists
-// if !heromodels.exists[heromodels.Comment](args.id)! {
-// return error('Comment with id ${args.id} does not exist')
-// }
-
-// // Delete using core method
-// heromodels.delete[heromodels.Comment](args.id)!
-
-// result_json := '{"success": true, "id": ${args.id}}'
-// return result_json
-// }
-
-// // comment_list returns all comment IDs
-// pub fn comment_list() !string {
-// comments := heromodels.list[heromodels.Comment]()!
-// mut ids := []u32{}
-
-// for comment in comments {
-// ids << comment.id
-// }
-
-// return json.encode(ids)
-// }
-
-// // Helper function to get comments by author
-// fn get_comments_by_author(author u32) !string {
-// all_comments := heromodels.list[heromodels.Comment]()!
-// mut matching_comments := []heromodels.Comment{}
-
-// for comment in all_comments {
-// if comment.author == author {
-// matching_comments << comment
-// }
-// }
-
-// return json.encode(matching_comments)
-// }
-
-// // Helper function to get comments by parent
-// fn get_comments_by_parent(parent u32) !string {
-// all_comments := heromodels.list[heromodels.Comment]()!
-// mut matching_comments := []heromodels.Comment{}
-
-// for comment in all_comments {
-// if comment.parent == parent {
-// matching_comments << comment
-// }
-// }
-
-// return json.encode(matching_comments)
-// }
\ No newline at end of file
diff --git a/lib/hero/herohandlers/handler_test.v b/lib/hero/herohandlers/handler_test.v
deleted file mode 100644
index 422535cf..00000000
--- a/lib/hero/herohandlers/handler_test.v
+++ /dev/null
@@ -1,9 +0,0 @@
-module openrpc
-
-import freeflowuniverse.herolib.schemas.openrpc
-import freeflowuniverse.herolib.hero.heromodels
-
-// new_heromodels_server creates a new HeroModels RPC server
-pub fn test_new_heromodels_handler() ! {
- handler := new_heromodels_handler()!
-}
\ No newline at end of file
diff --git a/lib/hero/herohandlers/openrpc.json b/lib/hero/herohandlers/openrpc.json
deleted file mode 100644
index 011dbe16..00000000
--- a/lib/hero/herohandlers/openrpc.json
+++ /dev/null
@@ -1,213 +0,0 @@
-{
- "openrpc": "1.0.0-rc1",
- "info": {
- "version": "1.0.0",
- "title": "HeroModels OpenRPC API",
- "description": "OpenRPC API for HeroModels comment management over Unix socket",
- "contact": {
- "name": "HeroLib Team",
- "url": "https://github.com/freeflowuniverse/herolib"
- }
- },
- "servers": [
- {
- "name": "Unix Socket Server",
- "url": "${server.socket_path}",
- "description": "Unix domain socket server for HeroModels"
- }
- ],
- "methods": [
- {
- "name": "comment_get",
- "description": "Retrieve comments by ID, author, or parent",
- "params": [
- {
- "name": "args",
- "description": "Comment search arguments",
- "required": true,
- "schema": {
- "type": "object",
- "properties": {
- "id": {
- "type": "integer",
- "description": "Comment ID to retrieve"
- },
- "author": {
- "type": "integer",
- "description": "Author ID to filter by"
- },
- "parent": {
- "type": "integer",
- "description": "Parent comment ID to filter by"
- }
- }
- }
- }
- ],
- "result": {
- "name": "comments",
- "description": "Comment(s) matching the criteria",
- "schema": {
- "oneOf": [
- {
- "$$ref": "#/components/schemas/Comment"
- },
- {
- "type": "array",
- "items": {
- "$$ref": "#/components/schemas/Comment"
- }
- }
- ]
- }
- }
- },
- {
- "name": "comment_set",
- "description": "Create a new comment",
- "params": [
- {
- "name": "comment",
- "description": "Comment data to create",
- "required": true,
- "schema": {
- "$$ref": "#/components/schemas/CommentArg"
- }
- }
- ],
- "result": {
- "name": "result",
- "description": "Created comment ID",
- "schema": {
- "type": "object",
- "properties": {
- "id": {
- "type": "integer",
- "description": "ID of the created comment"
- }
- }
- }
- }
- },
- {
- "name": "comment_delete",
- "description": "Delete a comment by ID",
- "params": [
- {
- "name": "args",
- "description": "Comment deletion arguments",
- "required": true,
- "schema": {
- "type": "object",
- "properties": {
- "id": {
- "type": "integer",
- "description": "ID of comment to delete"
- }
- },
- "required": ["id"]
- }
- }
- ],
- "result": {
- "name": "result",
- "description": "Deletion result",
- "schema": {
- "type": "object",
- "properties": {
- "success": {
- "type": "boolean"
- },
- "id": {
- "type": "integer"
- }
- }
- }
- }
- },
- {
- "name": "comment_list",
- "description": "List all comment IDs",
- "params": [],
- "result": {
- "name": "ids",
- "description": "Array of all comment IDs",
- "schema": {
- "type": "array",
- "items": {
- "type": "integer"
- }
- }
- }
- },
- {
- "name": "discover",
- "description": "Get the OpenRPC specification for this service",
- "params": [],
- "result": {
- "name": "spec",
- "description": "OpenRPC specification",
- "schema": {
- "type": "object"
- }
- }
- }
- ],
- "components": {
- "schemas": {
- "Comment": {
- "type": "object",
- "properties": {
- "id": {
- "type": "integer",
- "description": "Unique comment identifier"
- },
- "comment": {
- "type": "string",
- "description": "Comment text content"
- },
- "parent": {
- "type": "integer",
- "description": "Parent comment ID (0 if top-level)"
- },
- "updated_at": {
- "type": "integer",
- "description": "Unix timestamp of last update"
- },
- "author": {
- "type": "integer",
- "description": "Author user ID"
- }
- },
- "required": [
- "id",
- "comment",
- "parent",
- "updated_at",
- "author"
- ]
- },
- "CommentArg": {
- "type": "object",
- "properties": {
- "comment": {
- "type": "string",
- "description": "Comment text content"
- },
- "parent": {
- "type": "integer",
- "description": "Parent comment ID (0 if top-level)"
- },
- "author": {
- "type": "integer",
- "description": "Author user ID"
- }
- },
- "required": [
- "comment",
- "author"
- ]
- }
- }
- }
-}
\ No newline at end of file
diff --git a/lib/hero/herohandlers/server.v b/lib/hero/herohandlers/server.v
deleted file mode 100644
index 23cd96fb..00000000
--- a/lib/hero/herohandlers/server.v
+++ /dev/null
@@ -1,26 +0,0 @@
-module openrpc
-
-import freeflowuniverse.herolib.schemas.openrpc
-
-// HeroModelsServer extends the base openrpcserver.RPCServer with heromodels-specific functionality
-pub struct HeroModelsServer {
- openrpc.UNIXServer
-}
-
-@[params]
-pub struct HeroModelsServerArgs {
-pub mut:
- socket_path string = '/tmp/heromodels'
-}
-
-// new_heromodels_server creates a new HeroModels RPC server
-pub fn new_heromodels_server(args HeroModelsServerArgs) !&HeroModelsServer {
- mut base_server := openrpc.new_unix_server(
- new_heromodels_handler()!,
- socket_path: args.socket_path
- )!
-
- return &HeroModelsServer{
- UNIXServer: *base_server
- }
-}
\ No newline at end of file
diff --git a/lib/hero/heromodels copy/instructions.md b/lib/hero/heromodels copy/instructions.md
deleted file mode 100644
index 4a7dbf12..00000000
--- a/lib/hero/heromodels copy/instructions.md
+++ /dev/null
@@ -1,31 +0,0 @@
-distill vlang objects out of the calendr/contact/circle and create the missing parts
-
-organze per root object which are @[heap] and in separate file with name.v
-
-the rootobjects are
-
-- user
-- group (which users are members and in which role can be admin, writer, reader, can be linked to subgroups)
-- calendar (references to event, group)
-- calendar_event (everything related to an event on calendar, link to one or more fs_file)
-- project (grouping per project, defines swimlanes and milestones this allows us to visualize as kanban, link to group, link to one or more fs_file )
-- project_issue (and issue is specific type, e.g. task, story, bug, question,…), issue is linked to project by id, also defined priority…, on which swimlane, deadline, assignees, … ,,,, has tags, link to one or more fs_file
-- chat_group (link to group, name/description/tags)
-- chat_message (link to chat_group, link to parent_chat_messages and what type of link e.g. reply or reference or? , status, … link to one or more fs_file)
-- fs = filesystem (link to group)
-- fs_dir = directory in filesystem, link to parent, link to group
-- fs_file (link to one or more fs_dir, list of references to blobs as blake192)
-- fs_symlink (can be link to dir or file)
-- fs_blob (the data itself, max size 1 MB, binary data, id = blake192)
-
-the group’s define how people can interact with the parts e.g. calendar linked to group, so readers of that group can read and have copy of the info linked to that group
-
-all the objects are identified by their blake192 (based on the content)
-
-there is a special table which has link between blake192 and their previous & next version, so we can always walk the three, both parts are indexed (this is independent of type of object)
-
-
-
-
-
-
diff --git a/lib/hero/heromodels copy/version_history.v b/lib/hero/heromodels/beta/version_history.v
similarity index 100%
rename from lib/hero/heromodels copy/version_history.v
rename to lib/hero/heromodels/beta/version_history.v
diff --git a/lib/hero/heromodels/readme.md b/lib/hero/heromodels/readme.md
new file mode 100644
index 00000000..1f0e89e2
--- /dev/null
+++ b/lib/hero/heromodels/readme.md
@@ -0,0 +1,34 @@
+
+
+
+## unix socket based RPC server
+
+see lib/hero/heromodels/rpc/rpc_comment.v for an example of how to implement RPC methods.
+
+```v
+import freeflowuniverse.herolib.hero.heromodels.rpc
+
+#starts the rpc server
+rpc.start()!
+
+```
+
+```bash
+# to test the discover function, this returns the openrpc specification:
+echo '{"jsonrpc":"2.0","method":"rpc.discover","params":[],"id":1}' | nc -U /tmp/heromodels
+
+# to test interactively:
+
+nc -U /tmp/heromodels
+
+# then e.g. paste following in
+
+{"jsonrpc":"2.0","method":"comment_set","params":{"comment":"Hello world!","parent":0,"author":42},"id":1}
+
+needs to be on one line for openrpc to work
+
+```
+
+see lib/hero/heromodels/rpc/openrpc.json for the full openrpc specification
+
+
diff --git a/lib/hero/heromodels/rpc/factory.v b/lib/hero/heromodels/rpc/factory.v
new file mode 100644
index 00000000..9d1592c9
--- /dev/null
+++ b/lib/hero/heromodels/rpc/factory.v
@@ -0,0 +1,28 @@
+module rpc
+
+import freeflowuniverse.herolib.schemas.openrpc
+import os
+
+const openrpc_path = os.join_path(os.dir(@FILE), 'openrpc.json')
+
+@[params]
+pub struct ServerArgs {
+pub mut:
+ socket_path string = '/tmp/heromodels'
+}
+
+pub fn start(args ServerArgs) ! {
+ mut openrpc_handler := openrpc.new_handler(openrpc_path)!
+
+ openrpc_handler.register_procedure_handle('comment_get', comment_get)
+ openrpc_handler.register_procedure_handle('comment_set', comment_set)
+ openrpc_handler.register_procedure_handle('comment_delete', comment_delete)
+ openrpc_handler.register_procedure_handle('comment_list', comment_list)
+
+ openrpc_handler.register_procedure_handle('calendar_get', calendar_get)
+ openrpc_handler.register_procedure_handle('calendar_set', calendar_set)
+ openrpc_handler.register_procedure_handle('calendar_delete', calendar_delete)
+ openrpc_handler.register_procedure_handle('calendar_list', calendar_list)
+
+ openrpc.start_unix_server(openrpc_handler, socket_path: args.socket_path)!
+}
diff --git a/lib/hero/heromodels/rpc/openrpc.json b/lib/hero/heromodels/rpc/openrpc.json
new file mode 100644
index 00000000..3e9b3a87
--- /dev/null
+++ b/lib/hero/heromodels/rpc/openrpc.json
@@ -0,0 +1,315 @@
+{
+ "openrpc": "1.0.0",
+ "info": {
+ "title": "Hero Models API",
+ "version": "1.0.0"
+ },
+ "methods": [
+ {
+ "name": "comment_get",
+ "summary": "Get a comment by ID",
+ "params": [
+ {
+ "name": "id",
+ "description": "ID of comment to fetch",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ ],
+ "result": {
+ "name": "comment",
+ "description": "Comment object",
+ "schema": {
+ "$ref": "#/components/schemas/Comment"
+ }
+ }
+ },
+ {
+ "name": "comment_set",
+ "summary": "Create or update a comment",
+ "params": [
+ {
+ "name": "comment",
+ "description": "Comment text",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "parent",
+ "description": "ID of parent comment if any, 0 means none",
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ },
+ {
+ "name": "author",
+ "description": "ID of the author user",
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ ],
+ "result": {
+ "name": "id",
+ "description": "ID of the created/updated comment",
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "name": "comment_delete",
+ "summary": "Delete a comment by ID",
+ "params": [
+ {
+ "name": "id",
+ "description": "ID of comment to delete",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ ],
+ "result": {
+ "name": "result",
+ "description": "Success result",
+ "schema": {
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "name": "comment_list",
+ "summary": "List all comments",
+ "params": [],
+ "result": {
+ "name": "comments",
+ "description": "List of all comment objects",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Comment"
+ }
+ }
+ }
+ },
+ {
+ "name": "calendar_get",
+ "summary": "Get a calendar by ID",
+ "params": [
+ {
+ "name": "id",
+ "description": "ID of calendar to fetch",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ ],
+ "result": {
+ "name": "calendar",
+ "description": "Calendar object",
+ "schema": {
+ "$ref": "#/components/schemas/Calendar"
+ }
+ }
+ },
+ {
+ "name": "calendar_set",
+ "summary": "Create or update a calendar",
+ "params": [
+ {
+ "name": "name",
+ "description": "Name of the calendar",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "description",
+ "description": "Description of the calendar",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "color",
+ "description": "Hex color code for the calendar",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "timezone",
+ "description": "Timezone of the calendar",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "is_public",
+ "description": "Whether the calendar is public",
+ "schema": {
+ "type": "boolean"
+ }
+ },
+ {
+ "name": "events",
+ "description": "IDs of calendar events",
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ }
+ ],
+ "result": {
+ "name": "id",
+ "description": "ID of the created/updated calendar",
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ {
+ "name": "calendar_delete",
+ "summary": "Delete a calendar by ID",
+ "params": [
+ {
+ "name": "id",
+ "description": "ID of calendar to delete",
+ "required": true,
+ "schema": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ ],
+ "result": {
+ "name": "result",
+ "description": "Success result",
+ "schema": {
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "name": "calendar_list",
+ "summary": "List all calendars",
+ "params": [],
+ "result": {
+ "name": "calendars",
+ "description": "List of all calendar objects",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Calendar"
+ }
+ }
+ }
+ }
+ ],
+ "components": {
+ "schemas": {
+ "Base": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "name": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "created_at": {
+ "type": "integer"
+ },
+ "updated_at": {
+ "type": "integer"
+ },
+ "securitypolicy": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "tags": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "comments": {
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ }
+ },
+ "Comment": {
+ "title": "Comment",
+ "description": "A comment object",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/Base"
+ }
+ ],
+ "properties": {
+ "comment": {
+ "type": "string"
+ },
+ "parent": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "author": {
+ "type": "integer",
+ "minimum": 0
+ }
+ }
+ },
+ "Calendar": {
+ "title": "Calendar",
+ "description": "A calendar object",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/Base"
+ }
+ ],
+ "properties": {
+ "events": {
+ "type": "array",
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ }
+ },
+ "color": {
+ "type": "string"
+ },
+ "timezone": {
+ "type": "string"
+ },
+ "is_public": {
+ "type": "boolean"
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/lib/hero/heromodels/rpc/rpc_calendar.v b/lib/hero/heromodels/rpc/rpc_calendar.v
new file mode 100644
index 00000000..0a9580b8
--- /dev/null
+++ b/lib/hero/heromodels/rpc/rpc_calendar.v
@@ -0,0 +1,79 @@
+module rpc
+
+import json
+import freeflowuniverse.herolib.schemas.jsonrpc { Request, Response, new_response_true, new_response_u32 }
+import freeflowuniverse.herolib.hero.heromodels
+
+// Calendar-specific argument structures
+@[params]
+pub struct CalendarGetArgs {
+pub mut:
+ id u32 @[required]
+}
+
+@[params]
+pub struct CalendarSetArgs {
+pub mut:
+ name string @[required]
+ description string
+ color string
+ timezone string
+ is_public bool
+ events []u32
+}
+
+@[params]
+pub struct CalendarDeleteArgs {
+pub mut:
+ id u32 @[required]
+}
+
+pub fn calendar_get(request Request) !Response {
+ payload := jsonrpc.decode_payload[CalendarGetArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ calendar := mydb.calendar.get(payload.id)!
+
+ return jsonrpc.new_response(request.id, json.encode(calendar))
+}
+
+pub fn calendar_set(request Request) !Response {
+ payload := jsonrpc.decode_payload[CalendarSetArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ mut calendar_obj := mydb.calendar.new(
+ name: payload.name
+ description: payload.description
+ color: payload.color
+ timezone: payload.timezone
+ is_public: payload.is_public
+ events: payload.events
+ )!
+
+ id := mydb.calendar.set(calendar_obj)!
+
+ return new_response_u32(request.id, id)
+}
+
+pub fn calendar_delete(request Request) !Response {
+ payload := jsonrpc.decode_payload[CalendarDeleteArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ mydb.calendar.delete(payload.id)!
+
+ // returns
+ return new_response_true(request.id) // return true as jsonrpc (bool)
+}
+
+pub fn calendar_list(request Request) !Response {
+ mut mydb := heromodels.new()!
+ calendars := mydb.calendar.list()!
+
+ return jsonrpc.new_response(request.id, json.encode(calendars))
+}
diff --git a/lib/hero/heromodels/rpc/rpc_comment.v b/lib/hero/heromodels/rpc/rpc_comment.v
new file mode 100644
index 00000000..e887302b
--- /dev/null
+++ b/lib/hero/heromodels/rpc/rpc_comment.v
@@ -0,0 +1,72 @@
+module rpc
+
+import json
+import freeflowuniverse.herolib.schemas.jsonrpc { Request, Response, new_response_true, new_response_u32 }
+import freeflowuniverse.herolib.hero.heromodels
+
+// Comment-specific argument structures
+@[params]
+pub struct CommentGetArgs {
+pub mut:
+ id u32 @[required]
+}
+
+@[params]
+pub struct CommentSetArgs {
+pub mut:
+ comment string @[required]
+ parent u32
+ author u32
+}
+
+@[params]
+pub struct CommentDeleteArgs {
+pub mut:
+ id u32 @[required]
+}
+
+pub fn comment_get(request Request) !Response {
+ payload := jsonrpc.decode_payload[CommentGetArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ comment := mydb.comments.get(payload.id)!
+
+ return jsonrpc.new_response(request.id, json.encode(comment))
+}
+
+pub fn comment_set(request Request) !Response {
+ payload := jsonrpc.decode_payload[CommentSetArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ mut comment_obj := mydb.comments.new(
+ comment: payload.comment
+ parent: payload.parent
+ author: payload.author
+ )!
+
+ id := mydb.comments.set(comment_obj)!
+
+ return new_response_u32(request.id, id)
+}
+
+pub fn comment_delete(request Request) !Response {
+ payload := jsonrpc.decode_payload[CommentDeleteArgs](request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ mut mydb := heromodels.new()!
+ mydb.comments.delete(payload.id)!
+
+ return new_response_true(request.id) // return true as jsonrpc (bool)
+}
+
+pub fn comment_list(request Request) !Response {
+ mut mydb := heromodels.new()!
+ comments := mydb.comments.list()!
+
+ return jsonrpc.new_response(request.id, json.encode(comments))
+}
diff --git a/lib/schemas/jsonrpc/model_response.v b/lib/schemas/jsonrpc/model_response.v
index f5119753..42ac7f39 100644
--- a/lib/schemas/jsonrpc/model_response.v
+++ b/lib/schemas/jsonrpc/model_response.v
@@ -40,6 +40,38 @@ pub fn new_response(id int, result string) Response {
}
}
+pub fn new_response_true(id int) Response {
+ return Response{
+ jsonrpc: jsonrpc_version
+ result: 'true'
+ id: id
+ }
+}
+
+pub fn new_response_false(id int) Response {
+ return Response{
+ jsonrpc: jsonrpc_version
+ result: 'false'
+ id: id
+ }
+}
+
+pub fn new_response_int(id int, result int) Response {
+ return Response{
+ jsonrpc: jsonrpc_version
+ result: result.str()
+ id: id
+ }
+}
+
+pub fn new_response_u32(id int, result u32) Response {
+ return Response{
+ jsonrpc: jsonrpc_version
+ result: result.str()
+ id: id
+ }
+}
+
// new_error_response creates an error JSON-RPC response with the given error object.
//
// Parameters:
@@ -65,7 +97,9 @@ pub fn new_error_response(id int, error RPCError) Response {
// Returns:
// - A Response object or an error if parsing fails or the response is invalid
pub fn decode_response(data string) !Response {
- raw := json2.raw_decode(data) or { return error('Failed to decode JSONRPC response ${data}\n${err}') }
+ raw := json2.raw_decode(data) or {
+ return error('Failed to decode JSONRPC response ${data}\n${err}')
+ }
raw_map := raw.as_map()
// Validate that the response contains either result or error, but not both or neither
diff --git a/lib/schemas/openrpc/README.md b/lib/schemas/openrpc/_archive/README.md
similarity index 100%
rename from lib/schemas/openrpc/README.md
rename to lib/schemas/openrpc/_archive/README.md
diff --git a/lib/schemas/openrpc/codegen/docgen.v b/lib/schemas/openrpc/_archive/codegen/docgen.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/docgen.v
rename to lib/schemas/openrpc/_archive/codegen/docgen.v
diff --git a/lib/schemas/openrpc/codegen/generate.v b/lib/schemas/openrpc/_archive/codegen/generate.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate.v
rename to lib/schemas/openrpc/_archive/codegen/generate.v
diff --git a/lib/schemas/openrpc/codegen/generate_client.v b/lib/schemas/openrpc/_archive/codegen/generate_client.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_client.v
rename to lib/schemas/openrpc/_archive/codegen/generate_client.v
diff --git a/lib/schemas/openrpc/codegen/generate_handler.v b/lib/schemas/openrpc/_archive/codegen/generate_handler.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_handler.v
rename to lib/schemas/openrpc/_archive/codegen/generate_handler.v
diff --git a/lib/schemas/openrpc/codegen/generate_interface.v b/lib/schemas/openrpc/_archive/codegen/generate_interface.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_interface.v
rename to lib/schemas/openrpc/_archive/codegen/generate_interface.v
diff --git a/lib/schemas/openrpc/codegen/generate_model.v b/lib/schemas/openrpc/_archive/codegen/generate_model.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_model.v
rename to lib/schemas/openrpc/_archive/codegen/generate_model.v
diff --git a/lib/schemas/openrpc/codegen/generate_model_test.v b/lib/schemas/openrpc/_archive/codegen/generate_model_test.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_model_test.v
rename to lib/schemas/openrpc/_archive/codegen/generate_model_test.v
diff --git a/lib/schemas/openrpc/codegen/generate_openrpc.v b/lib/schemas/openrpc/_archive/codegen/generate_openrpc.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/generate_openrpc.v
rename to lib/schemas/openrpc/_archive/codegen/generate_openrpc.v
diff --git a/lib/schemas/openrpc/codegen/templates/client.v.template b/lib/schemas/openrpc/_archive/codegen/templates/client.v.template
similarity index 100%
rename from lib/schemas/openrpc/codegen/templates/client.v.template
rename to lib/schemas/openrpc/_archive/codegen/templates/client.v.template
diff --git a/lib/schemas/openrpc/codegen/testdata/openrpc.json b/lib/schemas/openrpc/_archive/codegen/testdata/openrpc.json
similarity index 100%
rename from lib/schemas/openrpc/codegen/testdata/openrpc.json
rename to lib/schemas/openrpc/_archive/codegen/testdata/openrpc.json
diff --git a/lib/schemas/openrpc/codegen/to_code.v b/lib/schemas/openrpc/_archive/codegen/to_code.v
similarity index 100%
rename from lib/schemas/openrpc/codegen/to_code.v
rename to lib/schemas/openrpc/_archive/codegen/to_code.v
diff --git a/lib/schemas/openrpc/parse_example.v b/lib/schemas/openrpc/_archive/parse_example.v
similarity index 100%
rename from lib/schemas/openrpc/parse_example.v
rename to lib/schemas/openrpc/_archive/parse_example.v
diff --git a/lib/schemas/openrpc/parse_example_test.v b/lib/schemas/openrpc/_archive/parse_example_test.v
similarity index 100%
rename from lib/schemas/openrpc/parse_example_test.v
rename to lib/schemas/openrpc/_archive/parse_example_test.v
diff --git a/lib/schemas/openrpc/playground.v b/lib/schemas/openrpc/_archive/playground.v
similarity index 100%
rename from lib/schemas/openrpc/playground.v
rename to lib/schemas/openrpc/_archive/playground.v
diff --git a/lib/schemas/openrpc/server/comment.v b/lib/schemas/openrpc/_archive/server/comment.v
similarity index 100%
rename from lib/schemas/openrpc/server/comment.v
rename to lib/schemas/openrpc/_archive/server/comment.v
diff --git a/lib/schemas/openrpc/server/core_methods.v b/lib/schemas/openrpc/_archive/server/core_methods.v
similarity index 100%
rename from lib/schemas/openrpc/server/core_methods.v
rename to lib/schemas/openrpc/_archive/server/core_methods.v
diff --git a/lib/schemas/openrpc/server/core_models.v b/lib/schemas/openrpc/_archive/server/core_models.v
similarity index 100%
rename from lib/schemas/openrpc/server/core_models.v
rename to lib/schemas/openrpc/_archive/server/core_models.v
diff --git a/lib/schemas/openrpc/testdata/method_plain.v b/lib/schemas/openrpc/_archive/testdata/method_plain.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/method_plain.v
rename to lib/schemas/openrpc/_archive/testdata/method_plain.v
diff --git a/lib/schemas/openrpc/testdata/method_with_description.v b/lib/schemas/openrpc/_archive/testdata/method_with_description.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/method_with_description.v
rename to lib/schemas/openrpc/_archive/testdata/method_with_description.v
diff --git a/lib/schemas/openrpc/testdata/methods.v b/lib/schemas/openrpc/_archive/testdata/methods.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/methods.v
rename to lib/schemas/openrpc/_archive/testdata/methods.v
diff --git a/lib/schemas/openrpc/_archive/testdata/openrpc.json b/lib/schemas/openrpc/_archive/testdata/openrpc.json
new file mode 100644
index 00000000..b80ac25c
--- /dev/null
+++ b/lib/schemas/openrpc/_archive/testdata/openrpc.json
@@ -0,0 +1,302 @@
+{
+ "openrpc": "1.0.0-rc1",
+ "info": {
+ "version": "1.0.0",
+ "title": "Petstore",
+ "license": {
+ "name": "MIT"
+ }
+ },
+ "servers": [
+ {
+ "name": "localhost",
+ "url": "http://localhost:8080"
+ }
+ ],
+ "methods": [
+ {
+ "name": "list_pets",
+ "summary": "List all pets",
+ "tags": [
+ {
+ "name": "pets"
+ }
+ ],
+ "params": [
+ {
+ "name": "limit",
+ "description": "How many items to return at one time (max 100)",
+ "required": false,
+ "schema": {
+ "type": "integer",
+ "minimum": 1
+ }
+ }
+ ],
+ "result": {
+ "name": "pets",
+ "description": "A paged array of pets",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Pet"
+ }
+ }
+ },
+ "errors": [
+ {
+ "code": 100,
+ "message": "pets busy"
+ }
+ ],
+ "examples": [
+ {
+ "name": "listPetExample",
+ "description": "List pet example",
+ "params": [
+ {
+ "name": "limit",
+ "value": 1
+ }
+ ],
+ "result": {
+ "name": "listPetResultExample",
+ "value": [
+ {
+ "id": 7,
+ "name": "fluffy",
+ "tag": "poodle"
+ }
+ ]
+ }
+ }
+ ]
+ },
+ {
+ "name": "create_pet",
+ "summary": "Create a pet",
+ "tags": [
+ {
+ "name": "pets"
+ }
+ ],
+ "params": [
+ {
+ "name": "newPetName",
+ "description": "Name of pet to create",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "newPetTag",
+ "description": "Pet tag to create",
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "examples": [
+ {
+ "name": "createPetExample",
+ "description": "Create pet example",
+ "params": [
+ {
+ "name": "newPetName",
+ "value": "fluffy"
+ },
+ {
+ "name": "newPetTag",
+ "value": "poodle"
+ }
+ ],
+ "result": {
+ "value": 7
+ }
+ }
+ ],
+ "result": {
+ "$ref": "#/components/contentDescriptors/PetId"
+ }
+ },
+ {
+ "name": "get_pet",
+ "summary": "Info for a specific pet",
+ "tags": [
+ {
+ "name": "pets"
+ }
+ ],
+ "params": [
+ {
+ "$ref": "#/components/contentDescriptors/PetId"
+ }
+ ],
+ "result": {
+ "name": "pet",
+ "description": "Expected response to a valid request",
+ "schema": {
+ "$ref": "#/components/schemas/Pet"
+ }
+ },
+ "examples": [
+ {
+ "name": "getPetExample",
+ "description": "Get pet example",
+ "params": [
+ {
+ "name": "petId",
+ "value": 7
+ }
+ ],
+ "result": {
+ "name": "getPetExampleResult",
+ "value": {
+ "name": "fluffy",
+ "tag": "poodle",
+ "id": 7
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "update_pet",
+ "summary": "Update a pet",
+ "tags": [
+ {
+ "name": "pets"
+ }
+ ],
+ "params": [
+ {
+ "$ref": "#/components/contentDescriptors/PetId"
+ },
+ {
+ "name": "updatedPetName",
+ "description": "New name for the pet",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "updatedPetTag",
+ "description": "New tag for the pet",
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "result": {
+ "name": "pet",
+ "description": "Updated pet object",
+ "schema": {
+ "$ref": "#/components/schemas/Pet"
+ }
+ },
+ "examples": [
+ {
+ "name": "updatePetExample",
+ "description": "Update pet example",
+ "params": [
+ {
+ "name": "petId",
+ "value": 7
+ },
+ {
+ "name": "updatedPetName",
+ "value": "fluffy updated"
+ },
+ {
+ "name": "updatedPetTag",
+ "value": "golden retriever"
+ }
+ ],
+ "result": {
+ "name": "updatePetExampleResult",
+ "value": {
+ "name": "fluffy updated",
+ "tag": "golden retriever",
+ "id": 7
+ }
+ }
+ }
+ ]
+ },
+ {
+ "name": "delete_pet",
+ "summary": "Delete a pet",
+ "tags": [
+ {
+ "name": "pets"
+ }
+ ],
+ "params": [
+ {
+ "$ref": "#/components/contentDescriptors/PetId"
+ }
+ ],
+ "result": {
+ "name": "success",
+ "description": "Boolean indicating success",
+ "schema": {
+ "type": "boolean"
+ }
+ },
+ "examples": [
+ {
+ "name": "deletePetExample",
+ "description": "Delete pet example",
+ "params": [
+ {
+ "name": "petId",
+ "value": 7
+ }
+ ],
+ "result": {
+ "name": "deletePetExampleResult",
+ "value": true
+ }
+ }
+ ]
+ }
+ ],
+ "components": {
+ "contentDescriptors": {
+ "PetId": {
+ "name": "petId",
+ "required": true,
+ "description": "The ID of the pet",
+ "schema": {
+ "$ref": "#/components/schemas/PetId"
+ }
+ }
+ },
+ "schemas": {
+ "PetId": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "Pet": {
+ "type": "object",
+ "required": [
+ "id",
+ "name"
+ ],
+ "properties": {
+ "id": {
+ "$ref": "#/components/schemas/PetId"
+ },
+ "name": {
+ "type": "string"
+ },
+ "tag": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/lib/schemas/openrpc/testdata/petstore_client/README.md b/lib/schemas/openrpc/_archive/testdata/petstore_client/README.md
similarity index 100%
rename from lib/schemas/openrpc/testdata/petstore_client/README.md
rename to lib/schemas/openrpc/_archive/testdata/petstore_client/README.md
diff --git a/lib/schemas/openrpc/testdata/petstore_client/client.v b/lib/schemas/openrpc/_archive/testdata/petstore_client/client.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/petstore_client/client.v
rename to lib/schemas/openrpc/_archive/testdata/petstore_client/client.v
diff --git a/lib/schemas/openrpc/testdata/petstore_client/methods.v b/lib/schemas/openrpc/_archive/testdata/petstore_client/methods.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/petstore_client/methods.v
rename to lib/schemas/openrpc/_archive/testdata/petstore_client/methods.v
diff --git a/lib/schemas/openrpc/testdata/petstore_client/model.v b/lib/schemas/openrpc/_archive/testdata/petstore_client/model.v
similarity index 100%
rename from lib/schemas/openrpc/testdata/petstore_client/model.v
rename to lib/schemas/openrpc/_archive/testdata/petstore_client/model.v
diff --git a/lib/schemas/openrpc/testdata/petstore_client/openrpc.json b/lib/schemas/openrpc/_archive/testdata/petstore_client/openrpc.json
similarity index 100%
rename from lib/schemas/openrpc/testdata/petstore_client/openrpc.json
rename to lib/schemas/openrpc/_archive/testdata/petstore_client/openrpc.json
diff --git a/lib/schemas/openrpc/factory.v b/lib/schemas/openrpc/factory.v
index 6715b56a..68ff36f8 100644
--- a/lib/schemas/openrpc/factory.v
+++ b/lib/schemas/openrpc/factory.v
@@ -20,7 +20,7 @@ pub fn new(params Params) !OpenRPC {
}
text := if params.path != '' {
- os.read_file(params.path)!
+ os.read_file(params.path) or { return error('Could not read openrpc spec file at ${params.path}: ${err}') }
} else {
params.text
}
diff --git a/lib/schemas/openrpc/handler_factory.v b/lib/schemas/openrpc/handler_factory.v
new file mode 100644
index 00000000..a9fac983
--- /dev/null
+++ b/lib/schemas/openrpc/handler_factory.v
@@ -0,0 +1,16 @@
+module openrpc
+
+import os
+import json
+
+
+//path to openrpc.json file
+pub fn new_handler(openrpc_path string) !Handler {
+
+ mut openrpc_handler := openrpc.Handler {
+ specification: new(path: openrpc_path)!
+ }
+
+ return openrpc_handler
+
+}
diff --git a/lib/schemas/openrpc/model.v b/lib/schemas/openrpc/model.v
index 5fdf4f12..35758435 100644
--- a/lib/schemas/openrpc/model.v
+++ b/lib/schemas/openrpc/model.v
@@ -9,10 +9,10 @@ import freeflowuniverse.herolib.schemas.jsonschema { Reference, SchemaRef }
pub struct OpenRPC {
pub mut:
openrpc string = '1.0.0' // This string MUST be the semantic version number of the OpenRPC Specification version that the OpenRPC document uses.
- info Info @[omitempty] // Provides metadata about the API.
- servers []Server @[omitempty]// An array of Server Objects, which provide connectivity information to a target server.
- methods []Method @[omitempty]// The available methods for the API.
- components Components @[omitempty] // An element to hold various schemas for the specification.
+ info Info @[omitempty] // Provides metadata about the API.
+ servers []Server @[omitempty] // An array of Server Objects, which provide connectivity information to a target server.
+ methods []Method @[omitempty] // The available methods for the API.
+ components Components @[omitempty] // An element to hold various schemas for the specification.
external_docs []ExternalDocs @[json: externalDocs; omitempty] // Additional external documentation.
}
@@ -20,12 +20,12 @@ pub mut:
// The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience.
pub struct Info {
pub:
- title string @[omitempty] // The title of the application.
- description string @[omitempty] // A verbose description of the application.
+ title string @[omitempty] // The title of the application.
+ description string @[omitempty] // A verbose description of the application.
terms_of_service string @[json: termsOfService; omitempty] // A URL to the Terms of Service for the API. MUST be in the format of a URL.
- contact Contact @[omitempty] // The contact information for the exposed API.
- license License @[omitempty] // The license information for the exposed API.
- version string @[omitempty] // The version of the OpenRPC document (which is distinct from the OpenRPC Specification version or the API implementation version).
+ contact Contact @[omitempty] // The contact information for the exposed API.
+ license License @[omitempty] // The license information for the exposed API.
+ version string @[omitempty] // The version of the OpenRPC document (which is distinct from the OpenRPC Specification version or the API implementation version).
}
// Contact information for the exposed API.
@@ -168,11 +168,11 @@ pub:
pub struct Components {
pub mut:
content_descriptors map[string]ContentDescriptorRef @[json: contentDescriptors; omitempty] // An object to hold reusable Content Descriptor Objects.
- schemas map[string]SchemaRef @[omitempty] // An object to hold reusable Schema Objects.
- examples map[string]Example @[omitempty] // An object to hold reusable Example Objects.
- links map[string]Link @[omitempty] // An object to hold reusable Link Objects.
- error map[string]Error @[omitempty] // An object to hold reusable Error Objects.
- example_pairing_objects map[string]ExamplePairing @[json: examplePairingObjects; omitempty] // An object to hold reusable Example Pairing Objects.
+ schemas map[string]SchemaRef @[omitempty] // An object to hold reusable Schema Objects.
+ examples map[string]Example @[omitempty] // An object to hold reusable Example Objects.
+ links map[string]Link @[omitempty] // An object to hold reusable Link Objects.
+ error map[string]Error @[omitempty] // An object to hold reusable Error Objects.
+ example_pairing_objects map[string]ExamplePairing @[json: examplePairingObjects; omitempty] // An object to hold reusable Example Pairing Objects.
tags map[string]Tag // An object to hold reusable Tag Objects.
}
diff --git a/lib/schemas/openrpc/readme.md b/lib/schemas/openrpc/readme.md
new file mode 100644
index 00000000..84a5f0e7
--- /dev/null
+++ b/lib/schemas/openrpc/readme.md
@@ -0,0 +1,65 @@
+
+# OpenRPC Module
+
+This module provides a complete implementation of the [OpenRPC specification](https://open-rpc.org) for V, enabling structured JSON-RPC 2.0 API development with schema-based validation and automatic documentation.
+
+## Purpose
+
+- Define and validate JSON-RPC APIs using OpenRPC schema definitions
+- Handle JSON-RPC requests/responses over HTTP or Unix sockets
+- Automatic discovery endpoint (`rpc.discover`) for API documentation
+- Type-safe request/response handling
+- Support for reusable components (schemas, parameters, errors, examples)
+
+## Usage
+
+### 1. Create an OpenRPC Handler
+
+Create a handler with your OpenRPC specification:
+
+```v
+import freeflowuniverse.herolib.schemas.openrpc
+
+// From file path
+mut handler := openrpc.new_handler('path/to/openrpc.json')!
+
+// From specification text
+mut handler := openrpc.new(text: spec_json)!
+```
+
+### 2. Register Methods
+
+Register your method handlers to process incoming JSON-RPC requests:
+
+```v
+fn my_method(request jsonrpc.Request) !jsonrpc.Response {
+ // Decode parameters
+ mut params := json.decode(MyParams, request.params) or {
+ return jsonrpc.invalid_params
+ }
+
+ // Process logic
+ result := process_my_method(params)
+
+ // Return response
+ return jsonrpc.new_response(request.id, json.encode(result))
+}
+
+// Register the method
+handler.register_procedure_handle('my.method', my_method)
+```
+
+### 3. Start Server
+
+Launch the server using either HTTP or Unix socket transport:
+
+```v
+// HTTP server
+mut controller := openrpc.new_http_controller(handler)
+controller.run(port: 8080)
+
+// Unix socket server
+mut server := openrpc.new_unix_server(handler)!
+server.start()
+```
+
diff --git a/lib/schemas/openrpc/server_unix.v b/lib/schemas/openrpc/server_unix.v
index 41f1d4b2..7249ebeb 100644
--- a/lib/schemas/openrpc/server_unix.v
+++ b/lib/schemas/openrpc/server_unix.v
@@ -20,6 +20,11 @@ pub mut:
socket_path string = '/tmp/heromodels'
}
+pub fn start_unix_server(handler Handler, params UNIXServerParams) ! {
+ mut server := new_unix_server(handler, params)!
+ server.start()!
+}
+
pub fn new_unix_server(handler Handler, params UNIXServerParams) !&UNIXServer {
// Remove existing socket file if it exists
if os.exists(params.socket_path) {
diff --git a/lib/schemas/openrpc/server_unix_test.v b/lib/schemas/openrpc/server_unix_test.v
index b5417422..806bdc9e 100644
--- a/lib/schemas/openrpc/server_unix_test.v
+++ b/lib/schemas/openrpc/server_unix_test.v
@@ -27,21 +27,21 @@ pub fn test_new_unix_server() ! {
// client()
}
-pub fn test_unix_server_start() ! {
- mut spec := OpenRPC{}
- handler := Handler{
- specification: new(path: openrpc_path)!
- }
- mut server := new_unix_server(handler)!
+// pub fn test_unix_server_start() ! {
+// mut spec := OpenRPC{}
+// handler := Handler{
+// specification: new(path: openrpc_path)!
+// }
+// mut server := new_unix_server(handler)!
- defer {
- server.close() or {panic(err)}
- }
+// defer {
+// server.close() or {panic(err)}
+// }
- spawn server.start()
+// spawn server.start()
- // client()
-}
+// // client()
+// }
pub fn test_unix_server_handle_connection() ! {
mut spec := OpenRPC{}
@@ -54,7 +54,7 @@ pub fn test_unix_server_handle_connection() ! {
spawn server.start()
// Give server time to start
- // time.sleep(50 * time.millisecond)
+ time.sleep(50 * time.millisecond)
// Connect to the server
mut conn := unix.connect_stream(server.socket_path)!
@@ -63,6 +63,7 @@ pub fn test_unix_server_handle_connection() ! {
conn.close() or {panic(err)}
server.close() or {panic(err)}
}
+ println('Connected to server at ${server.socket_path}')
// Test 1: Send rpc.discover request
discover_request := jsonrpc.new_request('rpc.discover', '')
diff --git a/lib/virt/crun/example.v b/lib/virt/crun/example.v
index 8be266a9..e6ab984a 100644
--- a/lib/virt/crun/example.v
+++ b/lib/virt/crun/example.v
@@ -1,31 +1,31 @@
module crun
-
pub fn example_heropods_compatible() ! {
mut configs := map[string]&CrunConfig{}
// Create a container configuration compatible with heropods template
mut config := new(mut configs, name: 'heropods-example')!
-
- // Configure to match the template
+
+ // Configure to match the template - disable terminal for background containers
+ config.set_terminal(false)
config.set_command(['/bin/sh'])
- .set_working_dir('/')
- .set_user(0, 0, [])
- .add_env('PATH', '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin')
- .add_env('TERM', 'xterm')
- .set_rootfs('${rootfs_path}', false) // This will be replaced by the actual path
- .set_hostname('container')
- .set_no_new_privileges(true)
-
+ config.set_working_dir('/')
+ config.set_user(0, 0, [])
+ config.add_env('PATH', '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin')
+ config.add_env('TERM', 'xterm')
+ config.set_rootfs('/tmp/rootfs', false) // This will be replaced by the actual path
+ config.set_hostname('container')
+ config.set_no_new_privileges(true)
+
// Add the specific rlimit from template
config.add_rlimit(.rlimit_nofile, 1024, 1024)
-
+
// Validate the configuration
config.validate()!
-
+
// Generate and print JSON
json_output := config.to_json()!
println(json_output)
-
+
// Save to file
config.save_to_file('/tmp/heropods_config.json')!
println('Heropods-compatible configuration saved to /tmp/heropods_config.json')
@@ -35,33 +35,34 @@ pub fn example_custom() ! {
mut configs := map[string]&CrunConfig{}
// Create a more complex container configuration
mut config := new(mut configs, name: 'custom-container')!
-
+
config.set_command(['/usr/bin/my-app', '--config', '/etc/myapp/config.yaml'])
- .set_working_dir('/app')
- .set_user(1000, 1000, [1001, 1002])
- .add_env('MY_VAR', 'my_value')
- .add_env('ANOTHER_VAR', 'another_value')
- .set_rootfs('/path/to/rootfs', false)
- .set_hostname('my-custom-container')
- .set_memory_limit(1024 * 1024 * 1024) // 1GB
- .set_cpu_limits(100000, 50000, 1024) // period, quota, shares
- .set_pids_limit(500)
- .add_mount('/host/path', '/container/path', .bind, [.rw])
- .add_mount('/tmp/cache', '/app/cache', .tmpfs, [.rw, .noexec])
- .add_capability(.cap_sys_admin)
- .remove_capability(.cap_net_raw)
- .add_rlimit(.rlimit_nproc, 100, 50)
- .set_no_new_privileges(true)
-
+ config.set_working_dir('/app')
+ config.set_user(1000, 1000, [1001, 1002])
+ config.add_env('MY_VAR', 'my_value')
+ config.add_env('ANOTHER_VAR', 'another_value')
+ config.set_rootfs('/path/to/rootfs', false)
+ config.set_hostname('my-custom-container')
+ config.set_memory_limit(1024 * 1024 * 1024) // 1GB
+ config.set_cpu_limits(100000, 50000, 1024) // period, quota, shares
+ config.set_pids_limit(500)
+ config.add_mount('/host/path', '/container/path', .bind, [.rw])
+ config.add_mount('/tmp/cache', '/app/cache', .tmpfs, [.rw, .noexec])
+ config.add_capability(.cap_sys_admin)
+ config.remove_capability(.cap_net_raw)
+ config.add_rlimit(.rlimit_nproc, 100, 50)
+ config.set_no_new_privileges(true)
+
// Add some additional security hardening
+
config.add_masked_path('/proc/kcore')
- .add_readonly_path('/proc/sys')
-
+ config.add_readonly_path('/proc/sys')
+
// Validate before use
config.validate()!
-
+
// Get the JSON
json_str := config.to_json()!
println('Custom container config:')
println(json_str)
-}
\ No newline at end of file
+}
diff --git a/lib/virt/crun/factory.v b/lib/virt/crun/factory.v
index e93b3fde..8fdc99b3 100644
--- a/lib/virt/crun/factory.v
+++ b/lib/virt/crun/factory.v
@@ -2,11 +2,10 @@ module crun
import freeflowuniverse.herolib.core.texttools
-
@[params]
pub struct FactoryArgs {
pub mut:
- name string = "default"
+ name string = 'default'
}
pub struct CrunConfig {
@@ -23,6 +22,8 @@ pub fn (mount_type MountType) to_string() string {
.proc { 'proc' }
.sysfs { 'sysfs' }
.devpts { 'devpts' }
+ .mqueue { 'mqueue' }
+ .cgroup { 'cgroup' }
.nfs { 'nfs' }
.overlay { 'overlay' }
}
@@ -120,21 +121,23 @@ pub fn (mut config CrunConfig) set_working_dir(cwd string) &CrunConfig {
pub fn (mut config CrunConfig) set_user(uid u32, gid u32, additional_gids []u32) &CrunConfig {
config.spec.process.user = User{
- uid: uid
- gid: gid
+ uid: uid
+ gid: gid
additional_gids: additional_gids.clone()
}
return config
}
pub fn (mut config CrunConfig) add_env(key string, value string) &CrunConfig {
+ // Remove existing env var with same key to avoid duplicates
+ config.spec.process.env = config.spec.process.env.filter(!it.starts_with('${key}='))
config.spec.process.env << '${key}=${value}'
return config
}
pub fn (mut config CrunConfig) set_rootfs(path string, readonly bool) &CrunConfig {
config.spec.root = Root{
- path: path
+ path: path
readonly: readonly
}
return config
@@ -165,16 +168,16 @@ pub fn (mut config CrunConfig) set_pids_limit(limit i64) &CrunConfig {
pub fn (mut config CrunConfig) add_mount(destination string, source string, typ MountType, options []MountOption) &CrunConfig {
config.spec.mounts << Mount{
destination: destination
- typ: typ.to_string()
- source: source
- options: options.map(it.to_string())
+ typ: typ.to_string()
+ source: source
+ options: options.map(it.to_string())
}
return config
}
pub fn (mut config CrunConfig) add_capability(cap Capability) &CrunConfig {
cap_str := cap.to_string()
-
+
if cap_str !in config.spec.process.capabilities.bounding {
config.spec.process.capabilities.bounding << cap_str
}
@@ -189,7 +192,7 @@ pub fn (mut config CrunConfig) add_capability(cap Capability) &CrunConfig {
pub fn (mut config CrunConfig) remove_capability(cap Capability) &CrunConfig {
cap_str := cap.to_string()
-
+
config.spec.process.capabilities.bounding = config.spec.process.capabilities.bounding.filter(it != cap_str)
config.spec.process.capabilities.effective = config.spec.process.capabilities.effective.filter(it != cap_str)
config.spec.process.capabilities.permitted = config.spec.process.capabilities.permitted.filter(it != cap_str)
@@ -197,8 +200,11 @@ pub fn (mut config CrunConfig) remove_capability(cap Capability) &CrunConfig {
}
pub fn (mut config CrunConfig) add_rlimit(typ RlimitType, hard u64, soft u64) &CrunConfig {
+ // Remove existing rlimit with same type to avoid duplicates
+ typ_str := typ.to_string()
+ config.spec.process.rlimits = config.spec.process.rlimits.filter(it.typ != typ_str)
config.spec.process.rlimits << Rlimit{
- typ: typ.to_string()
+ typ: typ_str
hard: hard
soft: soft
}
@@ -210,6 +216,11 @@ pub fn (mut config CrunConfig) set_no_new_privileges(value bool) &CrunConfig {
return config
}
+pub fn (mut config CrunConfig) set_terminal(value bool) &CrunConfig {
+ config.spec.process.terminal = value
+ return config
+}
+
pub fn (mut config CrunConfig) add_masked_path(path string) &CrunConfig {
if path !in config.spec.linux.masked_paths {
config.spec.linux.masked_paths << path
@@ -226,67 +237,65 @@ pub fn (mut config CrunConfig) add_readonly_path(path string) &CrunConfig {
pub fn new(mut configs map[string]&CrunConfig, args FactoryArgs) !&CrunConfig {
name := texttools.name_fix(args.name)
-
+
mut config := &CrunConfig{
name: name
spec: create_default_spec()
}
-
+
configs[name] = config
return config
}
pub fn get(configs map[string]&CrunConfig, args FactoryArgs) !&CrunConfig {
name := texttools.name_fix(args.name)
- return configs[name] or {
- return error('crun config with name "${name}" does not exist')
- }
+ return configs[name] or { return error('crun config with name "${name}" does not exist') }
}
fn create_default_spec() Spec {
// Create default spec that matches the heropods template
mut spec := Spec{
oci_version: '1.0.2' // Set default here
- platform: Platform{
- os: 'linux'
+ platform: Platform{
+ os: 'linux'
arch: 'amd64'
}
- process: Process{
- terminal: true
- user: User{
+ process: Process{
+ terminal: true
+ user: User{
uid: 0
gid: 0
}
- args: ['/bin/sh']
- env: [
+ args: ['/bin/sh']
+ env: [
'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin',
- 'TERM=xterm'
+ 'TERM=xterm',
]
- cwd: '/'
- capabilities: Capabilities{
- bounding: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
- effective: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
+ cwd: '/'
+ capabilities: Capabilities{
+ bounding: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
+ effective: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
inheritable: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
- permitted: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
+ permitted: ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
}
- rlimits: [
+ rlimits: [
Rlimit{
- typ: 'RLIMIT_NOFILE'
+ typ: 'RLIMIT_NOFILE'
hard: 1024
soft: 1024
- }
+ },
]
no_new_privileges: true // No JSON annotation needed here
}
- root: Root{
- path: 'rootfs'
+ root: Root{
+ path: 'rootfs'
readonly: false
}
- hostname: 'container'
- mounts: create_default_mounts()
- linux: Linux{
- namespaces: create_default_namespaces()
- masked_paths: [
+ hostname: 'container'
+ mounts: create_default_mounts()
+ linux: Linux{
+ namespaces: create_default_namespaces()
+ masked_paths: [
'/proc/acpi',
'/proc/kcore',
'/proc/keys',
@@ -295,7 +304,7 @@ fn create_default_spec() Spec {
'/proc/timer_stats',
'/proc/sched_debug',
'/proc/scsi',
- '/sys/firmware'
+ '/sys/firmware',
]
readonly_paths: [
'/proc/asound',
@@ -303,21 +312,34 @@ fn create_default_spec() Spec {
'/proc/fs',
'/proc/irq',
'/proc/sys',
- '/proc/sysrq-trigger'
+ '/proc/sysrq-trigger',
]
}
}
-
+
return spec
}
fn create_default_namespaces() []LinuxNamespace {
return [
- LinuxNamespace{typ: 'pid'},
- LinuxNamespace{typ: 'network'},
- LinuxNamespace{typ: 'ipc'},
- LinuxNamespace{typ: 'uts'},
- LinuxNamespace{typ: 'mount'},
+ LinuxNamespace{
+ typ: 'pid'
+ },
+ LinuxNamespace{
+ typ: 'network'
+ },
+ LinuxNamespace{
+ typ: 'ipc'
+ },
+ LinuxNamespace{
+ typ: 'uts'
+ },
+ LinuxNamespace{
+ typ: 'cgroup'
+ },
+ LinuxNamespace{
+ typ: 'mount'
+ },
]
}
@@ -325,20 +347,44 @@ fn create_default_mounts() []Mount {
return [
Mount{
destination: '/proc'
- typ: 'proc'
- source: 'proc'
+ typ: 'proc'
+ source: 'proc'
},
Mount{
destination: '/dev'
- typ: 'tmpfs'
- source: 'tmpfs'
- options: ['nosuid', 'strictatime', 'mode=755', 'size=65536k']
+ typ: 'tmpfs'
+ source: 'tmpfs'
+ options: ['nosuid', 'strictatime', 'mode=755', 'size=65536k']
+ },
+ Mount{
+ destination: '/dev/pts'
+ typ: 'devpts'
+ source: 'devpts'
+ options: ['nosuid', 'noexec', 'newinstance', 'ptmxmode=0666', 'mode=0620', 'gid=5']
+ },
+ Mount{
+ destination: '/dev/shm'
+ typ: 'tmpfs'
+ source: 'shm'
+ options: ['nosuid', 'noexec', 'nodev', 'mode=1777', 'size=65536k']
+ },
+ Mount{
+ destination: '/dev/mqueue'
+ typ: 'mqueue'
+ source: 'mqueue'
+ options: ['nosuid', 'noexec', 'nodev']
},
Mount{
destination: '/sys'
- typ: 'sysfs'
- source: 'sysfs'
- options: ['nosuid', 'noexec', 'nodev', 'ro']
+ typ: 'sysfs'
+ source: 'sysfs'
+ options: ['nosuid', 'noexec', 'nodev', 'ro']
+ },
+ Mount{
+ destination: '/sys/fs/cgroup'
+ typ: 'cgroup'
+ source: 'cgroup'
+ options: ['nosuid', 'noexec', 'nodev', 'relatime', 'ro']
},
]
-}
\ No newline at end of file
+}
diff --git a/lib/virt/crun/model.v b/lib/virt/crun/model.v
index f099a408..2eb7400c 100644
--- a/lib/virt/crun/model.v
+++ b/lib/virt/crun/model.v
@@ -3,7 +3,7 @@ module crun
// OCI Runtime Spec structures that can be directly encoded to JSON
pub struct Spec {
pub mut:
- oci_version string
+ oci_version string @[json: 'ociVersion']
platform Platform
process Process
root Root
@@ -21,21 +21,21 @@ pub mut:
pub struct Process {
pub mut:
- terminal bool = true
- user User
- args []string
- env []string
- cwd string = '/'
- capabilities Capabilities
- rlimits []Rlimit
- no_new_privileges bool
+ terminal bool = true
+ user User
+ args []string
+ env []string
+ cwd string = '/'
+ capabilities Capabilities
+ rlimits []Rlimit
+ no_new_privileges bool @[json: 'noNewPrivileges']
}
pub struct User {
pub mut:
uid u32
gid u32
- additional_gids []u32
+ additional_gids []u32 @[json: 'additionalGids']
}
pub struct Capabilities {
@@ -49,7 +49,7 @@ pub mut:
pub struct Rlimit {
pub mut:
- typ string
+ typ string @[json: 'type']
hard u64
soft u64
}
@@ -63,26 +63,26 @@ pub mut:
pub struct Mount {
pub mut:
destination string
- typ string
+ typ string @[json: 'type']
source string
options []string
}
pub struct Linux {
pub mut:
- namespaces []LinuxNamespace
- resources LinuxResources
- devices []LinuxDevice
- masked_paths []string
- readonly_paths []string
- uid_mappings []LinuxIDMapping
- gid_mappings []LinuxIDMapping
+ namespaces []LinuxNamespace
+ resources LinuxResources
+ devices []LinuxDevice
+ masked_paths []string @[json: 'maskedPaths']
+ readonly_paths []string @[json: 'readonlyPaths']
+ uid_mappings []LinuxIDMapping @[json: 'uidMappings']
+ gid_mappings []LinuxIDMapping @[json: 'gidMappings']
}
pub struct LinuxNamespace {
pub mut:
- typ string
- path string
+ typ string @[json: 'type']
+ path string @[omitempty]
}
pub struct LinuxResources {
@@ -95,47 +95,47 @@ pub mut:
pub struct Memory {
pub mut:
- limit u64
- reservation u64
- swap u64
- kernel u64
- swappiness i64
+ limit u64 @[omitempty]
+ reservation u64 @[omitempty]
+ swap u64 @[omitempty]
+ kernel u64 @[omitempty]
+ swappiness i64 @[omitempty]
}
pub struct CPU {
pub mut:
- shares u64
- quota i64
- period u64
- cpus string
- mems string
+ shares u64 @[omitempty]
+ quota i64 @[omitempty]
+ period u64 @[omitempty]
+ cpus string @[omitempty]
+ mems string @[omitempty]
}
pub struct Pids {
pub mut:
- limit i64
+ limit i64 @[omitempty]
}
pub struct BlockIO {
pub mut:
- weight u16
+ weight u16 @[omitempty]
}
pub struct LinuxDevice {
pub mut:
- path string
- typ string
- major i64
- minor i64
- file_mode u32
- uid u32
- gid u32
+ path string
+ typ string @[json: 'type']
+ major i64
+ minor i64
+ file_mode u32 @[json: 'fileMode']
+ uid u32
+ gid u32
}
pub struct LinuxIDMapping {
pub mut:
- container_id u32
- host_id u32
+ container_id u32 @[json: 'containerID']
+ host_id u32 @[json: 'hostID']
size u32
}
@@ -160,6 +160,8 @@ pub enum MountType {
proc
sysfs
devpts
+ mqueue
+ cgroup
nfs
overlay
}
@@ -235,4 +237,4 @@ pub enum RlimitType {
rlimit_nice
rlimit_rtprio
rlimit_rttime
-}
\ No newline at end of file
+}
diff --git a/lib/virt/heropods/config_template.json b/lib/virt/heropods/config_template.json
deleted file mode 100644
index 51cd699a..00000000
--- a/lib/virt/heropods/config_template.json
+++ /dev/null
@@ -1,121 +0,0 @@
-{
- "ociVersion": "1.0.2",
- "process": {
- "terminal": true,
- "user": {
- "uid": 0,
- "gid": 0
- },
- "args": [
- "/bin/sh",
- "-c",
- "while true; do sleep 30; done"
- ],
- "env": [
- "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
- "TERM=xterm"
- ],
- "cwd": "/",
- "capabilities": {
- "bounding": [
- "CAP_AUDIT_WRITE",
- "CAP_KILL",
- "CAP_NET_BIND_SERVICE"
- ],
- "effective": [
- "CAP_AUDIT_WRITE",
- "CAP_KILL",
- "CAP_NET_BIND_SERVICE"
- ],
- "inheritable": [
- "CAP_AUDIT_WRITE",
- "CAP_KILL",
- "CAP_NET_BIND_SERVICE"
- ],
- "permitted": [
- "CAP_AUDIT_WRITE",
- "CAP_KILL",
- "CAP_NET_BIND_SERVICE"
- ]
- },
- "rlimits": [
- {
- "type": "RLIMIT_NOFILE",
- "hard": 1024,
- "soft": 1024
- }
- ],
- "noNewPrivileges": true
- },
- "root": {
- "path": "${rootfs_path}",
- "readonly": false
- },
- "mounts": [
- {
- "destination": "/proc",
- "type": "proc",
- "source": "proc"
- },
- {
- "destination": "/dev",
- "type": "tmpfs",
- "source": "tmpfs",
- "options": [
- "nosuid",
- "strictatime",
- "mode=755",
- "size=65536k"
- ]
- },
- {
- "destination": "/sys",
- "type": "sysfs",
- "source": "sysfs",
- "options": [
- "nosuid",
- "noexec",
- "nodev",
- "ro"
- ]
- }
- ],
- "linux": {
- "namespaces": [
- {
- "type": "pid"
- },
- {
- "type": "network"
- },
- {
- "type": "ipc"
- },
- {
- "type": "uts"
- },
- {
- "type": "mount"
- }
- ],
- "maskedPaths": [
- "/proc/acpi",
- "/proc/kcore",
- "/proc/keys",
- "/proc/latency_stats",
- "/proc/timer_list",
- "/proc/timer_stats",
- "/proc/sched_debug",
- "/proc/scsi",
- "/sys/firmware"
- ],
- "readonlyPaths": [
- "/proc/asound",
- "/proc/bus",
- "/proc/fs",
- "/proc/irq",
- "/proc/sys",
- "/proc/sysrq-trigger"
- ]
- }
-}
\ No newline at end of file
diff --git a/lib/virt/heropods/container.v b/lib/virt/heropods/container.v
index bb2e1289..a01009c3 100644
--- a/lib/virt/heropods/container.v
+++ b/lib/virt/heropods/container.v
@@ -3,16 +3,19 @@ module heropods
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.tmux
import freeflowuniverse.herolib.osal.core as osal
+import freeflowuniverse.herolib.virt.crun
import time
import freeflowuniverse.herolib.builder
import json
+@[heap]
pub struct Container {
pub mut:
- name string
- node ?&builder.Node
- tmux_pane ?&tmux.Pane
- factory &ContainerFactory
+ name string
+ node ?&builder.Node
+ tmux_pane ?&tmux.Pane
+ crun_config ?&crun.CrunConfig
+ factory &ContainerFactory
}
// Struct to parse JSON output of `crun state`
@@ -31,10 +34,32 @@ pub fn (mut self Container) start() ! {
if !container_exists {
// Container doesn't exist, create it first
console.print_debug('Container ${self.name} does not exist, creating it...')
- osal.exec(
- cmd: 'crun create --bundle ${self.factory.base_dir}/configs/${self.name} ${self.name}'
+ // Try to create the container, if it fails with "File exists" error,
+ // try to force delete any leftover state and retry
+ crun_root := '${self.factory.base_dir}/runtime'
+ create_result := osal.exec(
+ cmd: 'crun --root ${crun_root} create --bundle ${self.factory.base_dir}/configs/${self.name} ${self.name}'
stdout: true
- )!
+ ) or {
+ if err.msg().contains('File exists') {
+ console.print_debug('Container creation failed with "File exists", attempting to clean up leftover state...')
+ // Force delete any leftover state - try multiple cleanup approaches
+ osal.exec(cmd: 'crun --root ${crun_root} delete ${self.name}', stdout: false) or {}
+ osal.exec(cmd: 'crun delete ${self.name}', stdout: false) or {} // Also try default root
+ // Clean up any leftover runtime directories
+ osal.exec(cmd: 'rm -rf ${crun_root}/${self.name}', stdout: false) or {}
+ osal.exec(cmd: 'rm -rf /run/crun/${self.name}', stdout: false) or {}
+ // Wait a moment for cleanup to complete
+ time.sleep(500 * time.millisecond)
+ // Retry creation
+ osal.exec(
+ cmd: 'crun --root ${crun_root} create --bundle ${self.factory.base_dir}/configs/${self.name} ${self.name}'
+ stdout: true
+ )!
+ } else {
+ return err
+ }
+ }
console.print_debug('Container ${self.name} created')
}
@@ -48,16 +73,18 @@ pub fn (mut self Container) start() ! {
// because crun doesn't allow restarting a stopped container
if container_exists && status != .running {
console.print_debug('Container ${self.name} exists but is stopped, recreating...')
- osal.exec(cmd: 'crun delete ${self.name}', stdout: false) or {}
+ crun_root := '${self.factory.base_dir}/runtime'
+ osal.exec(cmd: 'crun --root ${crun_root} delete ${self.name}', stdout: false) or {}
osal.exec(
- cmd: 'crun create --bundle ${self.factory.base_dir}/configs/${self.name} ${self.name}'
+ cmd: 'crun --root ${crun_root} create --bundle ${self.factory.base_dir}/configs/${self.name} ${self.name}'
stdout: true
)!
console.print_debug('Container ${self.name} recreated')
}
// start the container (crun start doesn't have --detach flag)
- osal.exec(cmd: 'crun start ${self.name}', stdout: true)!
+ crun_root := '${self.factory.base_dir}/runtime'
+ osal.exec(cmd: 'crun --root ${crun_root} start ${self.name}', stdout: true)!
console.print_green('Container ${self.name} started')
}
@@ -68,12 +95,13 @@ pub fn (mut self Container) stop() ! {
return
}
- osal.exec(cmd: 'crun kill ${self.name} SIGTERM', stdout: false) or {}
+ crun_root := '${self.factory.base_dir}/runtime'
+ osal.exec(cmd: 'crun --root ${crun_root} kill ${self.name} SIGTERM', stdout: false) or {}
time.sleep(2 * time.second)
// Force kill if still running
if self.status()! == .running {
- osal.exec(cmd: 'crun kill ${self.name} SIGKILL', stdout: false) or {}
+ osal.exec(cmd: 'crun --root ${crun_root} kill ${self.name} SIGKILL', stdout: false) or {}
}
console.print_green('Container ${self.name} stopped')
}
@@ -86,7 +114,8 @@ pub fn (mut self Container) delete() ! {
}
self.stop()!
- osal.exec(cmd: 'crun delete ${self.name}', stdout: false) or {}
+ crun_root := '${self.factory.base_dir}/runtime'
+ osal.exec(cmd: 'crun --root ${crun_root} delete ${self.name}', stdout: false) or {}
// Remove from factory's container cache
if self.name in self.factory.containers {
@@ -110,7 +139,10 @@ pub fn (mut self Container) exec(cmd_ osal.Command) !string {
}
pub fn (self Container) status() !ContainerStatus {
- result := osal.exec(cmd: 'crun state ${self.name}', stdout: false) or { return .unknown }
+ crun_root := '${self.factory.base_dir}/runtime'
+ result := osal.exec(cmd: 'crun --root ${crun_root} state ${self.name}', stdout: false) or {
+ return .unknown
+ }
// Parse JSON output from crun state
state := json.decode(CrunState, result.output) or { return .unknown }
@@ -126,7 +158,10 @@ pub fn (self Container) status() !ContainerStatus {
// Check if container exists in crun (regardless of its state)
fn (self Container) container_exists_in_crun() !bool {
// Try to get container state - if it fails, container doesn't exist
- result := osal.exec(cmd: 'crun state ${self.name}', stdout: false) or { return false }
+ crun_root := '${self.factory.base_dir}/runtime'
+ result := osal.exec(cmd: 'crun --root ${crun_root} state ${self.name}', stdout: false) or {
+ return false
+ }
// If we get here, the container exists (even if stopped/paused)
return result.exit_code == 0
@@ -206,7 +241,8 @@ pub fn (mut self Container) tmux_pane(args TmuxPaneArgs) !&tmux.Pane {
// Execute command if provided
if args.cmd != '' {
- pane.send_keys('crun exec ${self.name} ${args.cmd}')!
+ crun_root := '${self.factory.base_dir}/runtime'
+ pane.send_keys('crun --root ${crun_root} exec ${self.name} ${args.cmd}')!
}
self.tmux_pane = pane
@@ -223,6 +259,7 @@ pub fn (mut self Container) node() !&builder.Node {
mut exec := builder.ExecutorCrun{
container_id: self.name
+ crun_root: '${self.factory.base_dir}/runtime'
debug: false
}
@@ -242,3 +279,58 @@ pub fn (mut self Container) node() !&builder.Node {
self.node = node
return node
}
+
+// Get the crun configuration for this container
+pub fn (self Container) config() !&crun.CrunConfig {
+ return self.crun_config or { return error('Container ${self.name} has no crun configuration') }
+}
+
+// Container configuration customization methods
+pub fn (mut self Container) set_memory_limit(limit_mb u64) !&Container {
+ mut config := self.config()!
+ config.set_memory_limit(limit_mb * 1024 * 1024) // Convert MB to bytes
+ return &self
+}
+
+pub fn (mut self Container) set_cpu_limits(period u64, quota i64, shares u64) !&Container {
+ mut config := self.config()!
+ config.set_cpu_limits(period, quota, shares)
+ return &self
+}
+
+pub fn (mut self Container) add_mount(source string, destination string, mount_type crun.MountType, options []crun.MountOption) !&Container {
+ mut config := self.config()!
+ config.add_mount(source, destination, mount_type, options)
+ return &self
+}
+
+pub fn (mut self Container) add_capability(cap crun.Capability) !&Container {
+ mut config := self.config()!
+ config.add_capability(cap)
+ return &self
+}
+
+pub fn (mut self Container) remove_capability(cap crun.Capability) !&Container {
+ mut config := self.config()!
+ config.remove_capability(cap)
+ return &self
+}
+
+pub fn (mut self Container) add_env(key string, value string) !&Container {
+ mut config := self.config()!
+ config.add_env(key, value)
+ return &self
+}
+
+pub fn (mut self Container) set_working_dir(dir string) !&Container {
+ mut config := self.config()!
+ config.set_working_dir(dir)
+ return &self
+}
+
+// Save the current configuration to disk
+pub fn (self Container) save_config() ! {
+ config := self.config()!
+ config_path := '${self.factory.base_dir}/configs/${self.name}/config.json'
+ config.save_to_file(config_path)!
+}
diff --git a/lib/virt/heropods/container_create.v b/lib/virt/heropods/container_create.v
index 80ae4d80..66fec4fc 100644
--- a/lib/virt/heropods/container_create.v
+++ b/lib/virt/heropods/container_create.v
@@ -2,10 +2,9 @@ module heropods
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.core as osal
-import freeflowuniverse.herolib.core.pathlib
+import freeflowuniverse.herolib.virt.crun
import freeflowuniverse.herolib.installers.virt.herorunner as herorunner_installer
import os
-import x.json2
// Updated enum to be more flexible
pub enum ContainerImageType {
@@ -27,7 +26,7 @@ pub:
pub fn (mut self ContainerFactory) new(args ContainerNewArgs) !&Container {
if args.name in self.containers && !args.reset {
- return self.containers[args.name]
+ return self.containers[args.name] or { panic('bug: container should exist') }
}
// Determine image to use
@@ -67,8 +66,8 @@ pub fn (mut self ContainerFactory) new(args ContainerNewArgs) !&Container {
return error('Image rootfs not found: ${rootfs_path}. Please ensure the image is available.')
}
- // Create container config (with terminal disabled) but don't create the container yet
- self.create_container_config(args.name, rootfs_path)!
+ // Create crun configuration using the crun module
+ mut crun_config := self.create_crun_config(args.name, rootfs_path)!
// Ensure crun is installed on host
if !osal.cmd_exists('crun') {
@@ -79,41 +78,45 @@ pub fn (mut self ContainerFactory) new(args ContainerNewArgs) !&Container {
// Create container struct but don't create the actual container in crun yet
// The actual container creation will happen in container.start()
mut container := &Container{
- name: args.name
- factory: &self
+ name: args.name
+ crun_config: crun_config
+ factory: &self
}
self.containers[args.name] = container
return container
}
-// Create OCI config.json from template
-fn (self ContainerFactory) create_container_config(container_name string, rootfs_path string) ! {
+// Create crun configuration using the crun module
+fn (mut self ContainerFactory) create_crun_config(container_name string, rootfs_path string) !&crun.CrunConfig {
+ // Create crun configuration using the factory pattern
+ mut config := crun.new(mut self.crun_configs, name: container_name)!
+
+ // Configure for heropods use case - disable terminal for background containers
+ config.set_terminal(false)
+ config.set_command(['/bin/sh', '-c', 'while true; do sleep 30; done'])
+ config.set_working_dir('/')
+ config.set_user(0, 0, [])
+ config.add_env('PATH', '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin')
+ config.add_env('TERM', 'xterm')
+ config.set_rootfs(rootfs_path, false)
+ config.set_hostname('container')
+ config.set_no_new_privileges(true)
+
+ // Add the specific rlimit for file descriptors
+ config.add_rlimit(.rlimit_nofile, 1024, 1024)
+
+ // Validate the configuration
+ config.validate()!
+
+ // Create config directory and save JSON
config_dir := '${self.base_dir}/configs/${container_name}'
osal.exec(cmd: 'mkdir -p ${config_dir}', stdout: false)!
- // Load template
- mut config_content := $tmpl('config_template.json')
-
- // Parse JSON with json2
- mut root := json2.raw_decode(config_content)!
- mut config := root.as_map()
-
- // Get or create process map
- mut process := if 'process' in config {
- config['process'].as_map()
- } else {
- map[string]json2.Any{}
- }
-
- // Force disable terminal
- process['terminal'] = json2.Any(false)
- config['process'] = json2.Any(process)
-
- // Write back to config.json
config_path := '${config_dir}/config.json'
- mut p := pathlib.get_file(path: config_path, create: true)!
- p.write(json2.encode_pretty(json2.Any(config)))!
+ config.save_to_file(config_path)!
+
+ return config
}
// Use podman to pull image and extract rootfs
diff --git a/lib/virt/heropods/factory.v b/lib/virt/heropods/factory.v
index ef6516c1..bfd21019 100644
--- a/lib/virt/heropods/factory.v
+++ b/lib/virt/heropods/factory.v
@@ -2,7 +2,7 @@ module heropods
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.core as osal
-import time
+import freeflowuniverse.herolib.virt.crun
import os
@[heap]
@@ -11,6 +11,7 @@ pub mut:
tmux_session string
containers map[string]&Container
images map[string]&ContainerImage
+ crun_configs map[string]&crun.CrunConfig
base_dir string
}
@@ -45,6 +46,11 @@ fn (mut self ContainerFactory) init(args FactoryInitArgs) ! {
}
}
+ // Clean up any leftover crun state if reset is requested
+ if args.reset {
+ self.cleanup_crun_state()!
+ }
+
// Load existing images into cache
self.load_existing_images()!
@@ -104,7 +110,7 @@ pub fn (mut self ContainerFactory) get(args ContainerNewArgs) !&Container {
if args.name !in self.containers {
return error('Container "${args.name}" does not exist. Use factory.new() to create it first.')
}
- return self.containers[args.name]
+ return self.containers[args.name] or { panic('bug: container should exist') }
}
// Get image by name
@@ -112,7 +118,7 @@ pub fn (mut self ContainerFactory) image_get(name string) !&ContainerImage {
if name !in self.images {
return error('Image "${name}" not found in cache. Try importing or downloading it.')
}
- return self.images[name]
+ return self.images[name] or { panic('bug: image should exist') }
}
// List all containers currently managed by crun
@@ -136,3 +142,34 @@ pub fn (self ContainerFactory) list() ![]Container {
}
return containers
}
+
+// Clean up any leftover crun state
+fn (mut self ContainerFactory) cleanup_crun_state() ! {
+ console.print_debug('Cleaning up leftover crun state...')
+ crun_root := '${self.base_dir}/runtime'
+
+ // Stop and delete all containers in our custom root
+ result := osal.exec(cmd: 'crun --root ${crun_root} list -q', stdout: false) or { return }
+
+ for container_name in result.output.split_into_lines() {
+ if container_name.trim_space() != '' {
+ console.print_debug('Cleaning up container: ${container_name}')
+ osal.exec(cmd: 'crun --root ${crun_root} kill ${container_name} SIGKILL', stdout: false) or {}
+ osal.exec(cmd: 'crun --root ${crun_root} delete ${container_name}', stdout: false) or {}
+ }
+ }
+
+ // Also clean up any containers in the default root that might be ours
+ result2 := osal.exec(cmd: 'crun list -q', stdout: false) or { return }
+ for container_name in result2.output.split_into_lines() {
+ if container_name.trim_space() != '' && container_name in self.containers {
+ console.print_debug('Cleaning up container from default root: ${container_name}')
+ osal.exec(cmd: 'crun kill ${container_name} SIGKILL', stdout: false) or {}
+ osal.exec(cmd: 'crun delete ${container_name}', stdout: false) or {}
+ }
+ }
+
+ // Clean up runtime directories
+ osal.exec(cmd: 'rm -rf ${crun_root}/*', stdout: false) or {}
+ osal.exec(cmd: 'find /run/crun -name "*" -type d -exec rm -rf {} + 2>/dev/null', stdout: false) or {}
+}