Merge branch 'development' into development_heropods

This commit is contained in:
Mahmoud-Emad
2025-11-17 12:05:53 +02:00
20 changed files with 1291 additions and 478 deletions

View File

@@ -46,9 +46,6 @@ jobs:
cd v
make
./v symlink
if [ "${{ runner.os }}" = "macOS" ]; then
sudo sed -i '' '618,631d' /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
fi
cd -
mkdir -p ~/.vmodules/incubaid
@@ -92,7 +89,7 @@ jobs:
'
else
v -w -d use_openssl -enable-globals -gc none -cc tcc cli/hero.v -o cli/hero-${{ matrix.target }}
v -w -d use_openssl -enable-globals -cc clang cli/hero.v -o cli/hero-${{ matrix.target }}
fi
- name: Upload glibc binary

View File

@@ -2,11 +2,12 @@
## Overview
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`. These models are used to store structured data in Redis using the HeroDB system.
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`.
These models are used to store structured data in Redis using the HeroDB system.
The message.v can be found in `lib/hero/heromodels/message.v`.s
## Key Concepts
- Each model represents a data type stored in Redis hash sets
- Models must implement serialization/deserialization using the `encoder` module
- Models inherit from the `Base` struct which provides common fields
- The database uses a factory pattern for model access

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import incubaid.herolib.hero.heromodels
// Initialize database
mut mydb := heromodels.new()!
// Create goals
mut goals := [
heromodels.Goal{
id: 'G1'
title: 'Faster Requirements'
description: 'Reduce PRD creation time to under 1 day'
gtype: .product
}
]
// Create use cases
mut use_cases := [
heromodels.UseCase{
id: 'UC1'
title: 'Generate PRD'
actor: 'Product Manager'
goal: 'Create validated PRD'
steps: ['Select template', 'Fill fields', 'Export to Markdown']
success: 'Complete PRD generated'
failure: 'Validation failed'
}
]
// Create requirements
mut criterion := heromodels.AcceptanceCriterion{
id: 'AC1'
description: 'Display template list'
condition: 'List contains >= 5 templates'
}
mut requirements := [
heromodels.Requirement{
id: 'R1'
category: 'Editor'
title: 'Template Selection'
rtype: .functional
description: 'User can select from templates'
priority: .high
criteria: [criterion]
dependencies: []
}
]
// Create constraints
mut constraints := [
heromodels.Constraint{
id: 'C1'
title: 'ARM64 Support'
description: 'Must run on ARM64 infrastructure'
ctype: .technica
}
]
// Create risks
mut risks := map[string]string{}
risks['RISK1'] = 'Templates too limited Add community contributions'
risks['RISK2'] = 'AI suggestions inaccurate Add review workflow'
// Create a new PRD object
mut prd := mydb.prd.new(
product_name: 'Lumina PRD Builder'
version: 'v1.0'
overview: 'Tool to create structured PRDs quickly'
vision: 'Enable teams to generate clear requirements in minutes'
goals: goals
use_cases: use_cases
requirements: requirements
constraints: constraints
risks: risks
)!
// Save to database
prd = mydb.prd.set(prd)!
println(' Created PRD with ID: ${prd.id}')
// Retrieve from database
mut retrieved := mydb.prd.get(prd.id)!
println(' Retrieved PRD: ${retrieved.product_name}')
// List all PRDs
mut all_prds := mydb.prd.list()!
println(' Total PRDs in database: ${all_prds.len}')
// Check if exists
exists := mydb.prd.exist(prd.id)!
println(' PRD exists: ${exists}')

View File

@@ -1,8 +1,25 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
#==============================================================================
# GLOBAL VARIABLES
#==============================================================================
RESET=false
REMOVE=false
INSTALL_ANALYZER=false
HEROLIB=false
START_REDIS=false
export DIR_BASE="$HOME"
export DIR_BUILD="/tmp"
export DIR_CODE="$DIR_BASE/code"
export DIR_CODE_V="$DIR_BASE/_code"
export OSNAME=""
#==============================================================================
# FUNCTION DEFINITIONS
#==============================================================================
# Help function
print_help() {
@@ -16,6 +33,8 @@ print_help() {
echo " --remove Remove V installation and exit"
echo " --analyzer Install/update v-analyzer"
echo " --herolib Install our herolib"
echo " --herolib-version=VERSION Install specific herolib tag/branch (default: development)"
echo " --start-redis Start the Redis service if installed"
echo
echo "Examples:"
echo " $0"
@@ -27,38 +46,6 @@ print_help() {
echo
}
# Parse arguments
RESET=false
REMOVE=false
INSTALL_ANALYZER=false
HEROLIB=false
for arg in "$@"; do
case $arg in
-h|--help)
print_help
exit 0
;;
--reset)
RESET=true
;;
--remove)
REMOVE=true
;;
--herolib)
HEROLIB=true
;;
--analyzer)
INSTALL_ANALYZER=true
;;
*)
echo "Unknown option: $arg"
echo "Use -h or --help to see available options"
exit 1
;;
esac
done
# Function to check if command exists
command_exists() {
command -v "$1" >/dev/null 2>&1
@@ -80,11 +67,6 @@ function run_sudo() {
fi
}
export DIR_BASE="$HOME"
export DIR_BUILD="/tmp"
export DIR_CODE="$DIR_BASE/code"
export DIR_CODE_V="$DIR_BASE/_code"
check_release() {
if ! command -v lsb_release >/dev/null 2>&1; then
echo "❌ lsb_release command not found. Install 'lsb-release' package first."
@@ -119,16 +101,16 @@ ubuntu_sources_fix() {
if [ -f /etc/apt/sources.list ]; then
echo "📦 Backing up /etc/apt/sources.list -> /etc/apt/sources.list.backup.$TIMESTAMP"
sudo mv /etc/apt/sources.list /etc/apt/sources.list.backup.$TIMESTAMP
run_sudo mv /etc/apt/sources.list /etc/apt/sources.list.backup.$TIMESTAMP
fi
if [ -f /etc/apt/sources.list.d/ubuntu.sources ]; then
echo "📦 Backing up /etc/apt/sources.list.d/ubuntu.sources -> /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP"
sudo mv /etc/apt/sources.list.d/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP
run_sudo mv /etc/apt/sources.list.d/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP
fi
echo "📝 Writing new /etc/apt/sources.list.d/ubuntu.sources"
sudo tee /etc/apt/sources.list.d/ubuntu.sources >/dev/null <<EOF
run_sudo tee /etc/apt/sources.list.d/ubuntu.sources >/dev/null <<EOF
Types: deb
URIs: mirror://mirrors.ubuntu.com/mirrors.txt
Suites: $CODENAME $CODENAME-updates $CODENAME-backports $CODENAME-security
@@ -136,7 +118,7 @@ Components: main restricted universe multiverse
EOF
echo "🔄 Running apt update..."
sudo apt update -qq
run_sudo apt update -qq
echo "✅ Done! Your system now uses the rotating Ubuntu mirror list."
fi
@@ -159,34 +141,11 @@ function sshknownkeysadd {
}
function package_check_install {
local command_name="$1"
if command -v "$command_name" >/dev/null 2>&1; then
echo "command '$command_name' is already installed."
else
package_install '$command_name'
fi
}
function package_install {
local command_name="$1"
if [[ "${OSNAME}" == "ubuntu" ]]; then
if is_github_actions; then
run_sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
else
apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
fi
elif [[ "${OSNAME}" == "darwin"* ]]; then
brew install $command_name
elif [[ "${OSNAME}" == "alpine"* ]]; then
apk add $command_name
elif [[ "${OSNAME}" == "arch"* ]]; then
pacman --noconfirm -Su $command_name
else
echo "platform : ${OSNAME} not supported"
exit 1
fi
# Performs a non-interactive, forceful apt installation.
# WARNING: This is designed for CI/automated environments. It can be dangerous
# on a personal machine as it may remove essential packages to resolve conflicts.
function apt_force_install {
run_sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install "$@" -q -y --allow-downgrades --allow-remove-essential
}
is_github_actions() {
@@ -201,7 +160,6 @@ is_github_actions() {
fi
}
function myplatform {
if [[ "${OSTYPE}" == "darwin"* ]]; then
export OSNAME='darwin'
@@ -221,27 +179,14 @@ function myplatform {
echo "Unable to determine the operating system."
exit 1
fi
# if [ "$(uname -m)" == "x86_64" ]; then
# echo "This system is running a 64-bit processor."
# else
# echo "This system is not running a 64-bit processor."
# exit 1
# fi
}
myplatform
function os_update {
function update_system {
echo ' - System Update'
if [[ "${OSNAME}" == "ubuntu" ]]; then
ubuntu_sources_fix
fi
echo ' - os update'
if [[ "${OSNAME}" == "ubuntu" ]]; then
if is_github_actions; then
echo "github actions"
echo "github actions: preparing system"
else
rm -f /var/lib/apt/lists/lock
rm -f /var/cache/apt/archives/lock
@@ -252,38 +197,45 @@ function os_update {
run_sudo dpkg --configure -a
run_sudo apt update -y
if is_github_actions; then
echo "** IN GITHUB ACTIONS, DON'T DO UPDATE"
echo "** IN GITHUB ACTIONS, DON'T DO SYSTEM UPGRADE"
else
set +e
echo "** UPDATE"
echo "** System Upgrade"
apt-mark hold grub-efi-amd64-signed
set -e
apt upgrade -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
apt autoremove -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
fi
#apt install apt-transport-https ca-certificates curl software-properties-common -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
package_install "apt-transport-https ca-certificates curl wget software-properties-common tmux make gcc"
package_install "rclone rsync mc redis-server screen net-tools git dnsutils htop ca-certificates screen lsb-release binutils pkg-config libssl-dev iproute2"
elif [[ "${OSNAME}" == "darwin"* ]]; then
if command -v brew >/dev/null 2>&1; then
echo ' - homebrew installed'
else
if ! command -v brew >/dev/null 2>&1; then
echo ' - Installing Homebrew'
export NONINTERACTIVE=1
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
unset NONINTERACTIVE
fi
elif [[ "${OSNAME}" == "alpine"* ]]; then
apk update
elif [[ "${OSNAME}" == "arch"* ]]; then
pacman -Syyu --noconfirm
fi
echo ' - System Update Done'
}
function install_packages {
echo ' - Installing Packages'
if [[ "${OSNAME}" == "ubuntu" ]]; then
apt_force_install apt-transport-https ca-certificates curl wget software-properties-common tmux make gcc rclone rsync mc redis-server screen net-tools git dnsutils htop lsb-release binutils pkg-config libssl-dev iproute2
elif [[ "${OSNAME}" == "darwin"* ]]; then
# The set +e is to prevent script failure if some packages are already installed.
set +e
brew install mc redis curl tmux screen htop wget rclone tcc
set -e
elif [[ "${OSNAME}" == "alpine"* ]]; then
apk update screen git htop tmux
apk add mc curl rsync htop redis bash bash-completion screen git rclone
apk add --no-cache screen git htop tmux mc curl rsync redis bash bash-completion rclone
# Switch default shell to bash for better interactive use
sed -i 's#/bin/ash#/bin/bash#g' /etc/passwd
elif [[ "${OSNAME}" == "arch"* ]]; then
pacman -Syy --noconfirm
pacman -Syu --noconfirm
pacman -Su --noconfirm arch-install-scripts gcc mc git tmux curl htop redis wget screen net-tools git sudo htop ca-certificates lsb-release screen rclone
pacman -Su --noconfirm arch-install-scripts gcc mc git tmux curl htop redis wget screen net-tools sudo lsb-release rclone
# Check if builduser exists, create if not
if ! id -u builduser > /dev/null 2>&1; then
@@ -291,15 +243,10 @@ function os_update {
echo "builduser:$(openssl rand -base64 32 | sha256sum | base64 | head -c 32)" | chpasswd
echo 'builduser ALL=(ALL) NOPASSWD: ALL' | tee /etc/sudoers.d/builduser
fi
# if [[ -n "${DEBUG}" ]]; then
# execute_with_marker "paru_install" paru_install
# fi
fi
echo ' - os update done'
echo ' - Package Installation Done'
}
function hero_lib_pull {
pushd $DIR_CODE/github/incubaid/herolib 2>&1 >> /dev/null
if [[ $(git status -s) ]]; then
@@ -321,46 +268,19 @@ function hero_lib_get {
git clone --depth 1 --no-single-branch https://github.com/incubaid/herolib.git
popd 2>&1 >> /dev/null
fi
# Checkout specific version if requested
if [ -n "${HEROLIB_VERSION:-}" ]; then
pushd $DIR_CODE/github/incubaid/herolib 2>&1 >> /dev/null
if ! git checkout "$HEROLIB_VERSION"; then
echo "Failed to checkout herolib version: $HEROLIB_VERSION"
popd 2>&1 >> /dev/null
return 1
fi
popd 2>&1 >> /dev/null
fi
}
# function install_secp256k1 {
# echo "Installing secp256k1..."
# if [[ "${OSNAME}" == "darwin"* ]]; then
# # Attempt installation only if not already found
# echo "Attempting secp256k1 installation via Homebrew..."
# brew install secp256k1
# elif [[ "${OSNAME}" == "ubuntu" ]]; then
# # Install build dependencies
# package_install "build-essential wget autoconf libtool"
# # Download and extract secp256k1
# cd "${DIR_BUILD}"
# wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
# tar -xvf v0.3.2.tar.gz
# # Build and install
# cd secp256k1-0.3.2/
# ./autogen.sh
# ./configure
# make -j 5
# if is_github_actions; then
# run_sudo make install
# else
# make install
# fi
# # Cleanup
# cd ..
# rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
# else
# echo "secp256k1 installation not implemented for ${OSNAME}"
# exit 1
# fi
# echo "secp256k1 installation complete!"
# }
remove_all() {
echo "Removing V installation..."
# Set reset to true to use existing reset functionality
@@ -397,117 +317,44 @@ remove_all() {
echo "V removal complete"
}
# Function to check if a service is running and start it if needed
check_and_start_redis() {
# Normal service management for non-container environments
if [[ "${OSNAME}" == "ubuntu" ]] || [[ "${OSNAME}" == "debian" ]]; then
# Handle Redis installation for GitHub Actions environment
if is_github_actions; then
# Import Redis GPG key
curl -fsSL https://packages.redis.io/gpg | run_sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
# Add Redis repository
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | run_sudo tee /etc/apt/sources.list.d/redis.list
# Install Redis
run_sudo apt-get update
run_sudo apt-get install -y redis
# Start Redis
redis-server --daemonize yes
# Print versions
redis-cli --version
redis-server --version
return
# Starts the Redis service if it is not already running.
function start_redis_service() {
echo "Attempting to start Redis service..."
# Check if redis-server is even installed
if ! command_exists redis-server; then
echo "Warning: redis-server command not found. Skipping."
return 0
fi
# Check if running inside a container
if grep -q "/docker/" /proc/1/cgroup || [ ! -d "/run/systemd/system" ]; then
echo "Running inside a container. Starting redis directly."
if pgrep redis-server > /dev/null; then
echo "redis is already running."
else
echo "redis is not running. Starting it in the background..."
redis-server --daemonize yes
if pgrep redis-server > /dev/null; then
echo "redis started successfully."
else
echo "Failed to start redis. Please check logs for details."
exit 1
fi
fi
return
# Check if redis is already running by pinging it
if redis-cli ping > /dev/null 2>&1; then
echo "Redis is already running."
return 0
fi
if command_exists zinit; then
# Check if redis service is managed by zinit and is running
if zinit status redis | grep -q "state: Running"; then
echo "redis is already running and managed by zinit."
return
else
echo "zinit is installed, but redis is not running or not managed by zinit. Proceeding with other checks."
fi
fi
if systemctl is-active --quiet "redis"; then
echo "redis is already running."
else
echo "redis is not running. Starting it..."
run_sudo systemctl start "redis"
if systemctl is-active --quiet "redis"; then
echo "redis started successfully."
else
echo "Failed to start redis. Please check logs for details."
exit 1
fi
fi
echo "Redis is not running. Attempting to start it..."
if command_exists systemctl; then
run_sudo systemctl start redis
# For Alpine, use rc-service
elif command_exists rc-service; then
run_sudo rc-service redis start
elif [[ "${OSNAME}" == "darwin"* ]]; then
# Check if we're in GitHub Actions
if is_github_actions; then
echo "Running in GitHub Actions on macOS. Starting redis directly..."
if pgrep redis-server > /dev/null; then
echo "redis is already running."
else
echo "redis is not running. Starting it in the background..."
redis-server --daemonize yes
if pgrep redis-server > /dev/null; then
echo "redis started successfully."
else
echo "Failed to start redis. Please check logs for details."
exit 1
fi
fi
else
# For regular macOS environments, use brew services
if brew services list | grep -q "^redis.*started"; then
echo "redis is already running."
else
echo "redis is not running. Starting it..."
# For macOS, use brew services
if ! brew services list | grep -q "^redis.*started"; then
brew services start redis
fi
fi
elif [[ "${OSNAME}" == "alpine"* ]]; then
if rc-service "redis" status | grep -q "running"; then
echo "redis is already running."
else
echo "redis is not running. Starting it..."
rc-service "redis" start
echo "No service manager found, starting Redis manually..."
redis-server --daemonize yes
return 1
fi
elif [[ "${OSNAME}" == "arch"* ]]; then
if systemctl is-active --quiet "redis"; then
echo "redis is already running."
# Final check to see if it started
sleep 1 # Give it a second to start up
if redis-cli ping > /dev/null 2>&1; then
echo "Redis started successfully."
else
echo "redis is not running. Starting it..."
run_sudo systemctl start "redis"
fi
else
echo "Service management for redis is not implemented for platform: $OSNAME"
echo "Error: Failed to start Redis."
exit 1
fi
}
@@ -615,36 +462,82 @@ v-analyzer() {
}
#==============================================================================
# MAIN EXECUTION
#==============================================================================
main() {
# Make sure we're running in the directory where the script is
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
# Handle remove if requested
if [ "$REMOVE" = true ]; then
# Parse arguments
for arg in "$@"; do
case $arg in
-h|--help)
print_help
exit 0
;;
--reset)
RESET=true
;;
--remove)
REMOVE=true
;;
--herolib)
HEROLIB=true
;;
--herolib-version=*)
HEROLIB_VERSION="${arg#*=}"
if [ -z "$HEROLIB_VERSION" ]; then
echo "Error: --herolib-version requires a version argument"
echo "Example: $0 --herolib-version=v1.0.0"
exit 1
fi
;;
--analyzer)
INSTALL_ANALYZER=true
;;
--start-redis)
START_REDIS=true
;;
*)
echo "Unknown option: $arg"
echo "Use -h or --help to see available options"
exit 1
;;
esac
done
myplatform
# Handle remove if requested
if [ "$REMOVE" = true ]; then
remove_all
exit 0
fi
fi
# Create code directory if it doesn't exist
mkdir -p ~/code
# Create code directory if it doesn't exist
mkdir -p ~/code
# Check if v needs to be installed
if [ "$RESET" = true ] || ! command_exists v; then
# Check if v needs to be installed
if [ "$RESET" = true ] || ! command_exists v; then
os_update
update_system
install_packages
sshknownkeysadd
# Install secp256k1
v-install
fi
if [ "$START_REDIS" = true ]; then
start_redis_service
fi
fi
# set -x
check_and_start_redis
if [ "$HEROLIB" = true ]; then
if [ "$HEROLIB" = true ]; then
echo "=== Herolib Installation ==="
echo "Current directory: $(pwd)"
echo "Checking for install_herolib.vsh: $([ -f "./install_herolib.vsh" ] && echo "found" || echo "not found")"
@@ -669,16 +562,19 @@ if [ "$HEROLIB" = true ]; then
echo "Installing herolib from: $HEROLIB_DIR"
"$HEROLIB_DIR/install_herolib.vsh"
fi
fi
if [ "$INSTALL_ANALYZER" = true ]; then
if [ "$INSTALL_ANALYZER" = true ]; then
# Only install v-analyzer if not in GitHub Actions environment
if ! is_github_actions; then
v-analyzer
fi
echo "Run 'source ~/.bashrc' or 'source ~/.zshrc' to update PATH for v-analyzer"
fi
fi
echo "Installation complete!"
echo "Installation complete!"
}
main "$@"

View File

@@ -1,11 +1,11 @@
# ipapi
# ip api (IP INFO SERVICE CLIENT)
To get started
```v
import incubaid.herolib.clients. ipapi
import incubaid.herolib.clients.ipapi
mut client:= ipapi.get()!

View File

@@ -205,8 +205,7 @@ pub fn create_heroscript(args ModuleMeta) ! {
'1'
} else {
'0'
}
}"
}}"
}
if !os.exists(args.path) {
os.mkdir(args.path)!

View File

@@ -25,6 +25,8 @@ import incubaid.herolib.installers.infra.coredns
import incubaid.herolib.installers.infra.gitea
import incubaid.herolib.installers.infra.livekit
import incubaid.herolib.installers.infra.zinit_installer
import incubaid.herolib.installers.k8s.cryptpad
import incubaid.herolib.installers.k8s.element_chat
import incubaid.herolib.installers.lang.golang
import incubaid.herolib.installers.lang.nodejs
import incubaid.herolib.installers.lang.python
@@ -37,6 +39,7 @@ import incubaid.herolib.installers.threefold.griddriver
import incubaid.herolib.installers.virt.cloudhypervisor
import incubaid.herolib.installers.virt.docker
import incubaid.herolib.installers.virt.herorunner
import incubaid.herolib.installers.virt.kubernetes_installer
import incubaid.herolib.installers.virt.lima
import incubaid.herolib.installers.virt.pacman
import incubaid.herolib.installers.virt.podman
@@ -80,6 +83,8 @@ pub fn run_all(args_ PlayArgs) ! {
gitea.play(mut plbook)!
livekit.play(mut plbook)!
zinit_installer.play(mut plbook)!
cryptpad.play(mut plbook)!
element_chat.play(mut plbook)!
golang.play(mut plbook)!
nodejs.play(mut plbook)!
python.play(mut plbook)!
@@ -92,6 +97,7 @@ pub fn run_all(args_ PlayArgs) ! {
cloudhypervisor.play(mut plbook)!
docker.play(mut plbook)!
herorunner.play(mut plbook)!
kubernetes_installer.play(mut plbook)!
lima.play(mut plbook)!
pacman.play(mut plbook)!
podman.play(mut plbook)!

View File

@@ -1,94 +0,0 @@
the main data is in key value stor:
- each object has u32 id
- each object has u16 version (version of same data)
- each object has u16 schemaid (if schema changes)
- each object has tags u32 (to tag table)
- each object has a created_at timestamp
- each object has a updated_at timestamp
- each object has binary content (the data)
- each object has link to who can read/write/delete (lists of u32 per read/write/delete to group or user), link to security policy u32
- each object has a signature of the data by the user who created/updated it
- there are users & groups
- groups can have other groups and users inside
- users & groups are unique u32 as well in the DB, so no collision
this database does not know what the data is about, its agnostic to schema
now make the 4 structs which represent above
- data
- user
- group ([]u32) each links to user or group, name, description
- tags ([]string which gets a unique id, so its shorter to link to data object)
- securitypolicy (see below)
and encoding scheme using lib/data/encoder, we need encode/decode on the structs, so we have densest possible encoding
now we need the implementation details for each struct, including the fields and their types, as well as the encoding/decoding logic.
the outside is a server over openrpc which has
- set (userid:u32, id:u32, data: Data, signature: string, tags:[]string) -> u32. (id can be 0 then its new, if existing we need to check if user can do it), tags will be recalculated based on []string (lower case, sorted list then md5 -> u32)
- get (userid:u32, id: u32, signedid: string) -> Data,Tags as []string
- exist (userid:u32, id: u32) -> bool //this we allow without signature
- delete (userid:u32, id: u32, signedid: string) -> bool
- list (userid:u32, signature: string, based on tags, schemaid, from creation/update and to creation/update), returns max 200 items -> u32
the interface is stateless, no previous connection known, based on signature the server can verify the user is allowed to perform the action
the backend database is redis (hsets and sets)
## signing implementation
the signing is in the same redis implemented, so no need to use vlang for that
```bash
# Generate an ephemeral signing keypair
redis-cli -p $PORT AGE GENSIGN
# Example output:
# 1) "<verify_pub_b64>"
# 2) "<sign_secret_b64>"
# Sign a message with the secret
redis-cli -p $PORT AGE SIGN "<sign_secret_b64>" "msg"
# → returns "<signature_b64>"
# Verify with the public key
redis-cli -p $PORT AGE VERIFY "<verify_pub_b64>" "msg" "<signature_b64>"
# → 1 (valid) or 0 (invalid)
```
versioning: when stored we don't have to worry about version the database will check if it exists, newest version and then update
## some of the base objects
```v
@[heap]
pub struct SecurityPolicy {
pub mut:
id u32
read []u32 //links to users & groups
write []u32 //links to users & groups
delete []u32 //links to users & groups
public bool
}
@[heap]
pub struct Tags {
pub mut:
id u32
names []string //unique per id
md5 string //of sorted names, to make easy to find unique id
}
```

View File

@@ -31,6 +31,7 @@ pub mut:
registration_desk DBRegistrationDesk
messages DBMessages
tags DBTags
prd DBPrd
rpc_handler &Handler
}
@@ -91,6 +92,9 @@ pub fn new(args NewArgs) !&ModelsFactory {
tags: DBTags{
db: &mydb
}
prd: DBPrd{
db: &mydb
}
rpc_handler: &h
}

View File

@@ -239,6 +239,7 @@ pub fn (mut self DBMessages) list(args MessageListArg) ![]Message {
return filtered_messages
}
// is how we implement the openrpc calls
pub fn message_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {

View File

@@ -1,6 +0,0 @@
## notes around how to do a calendly feature
- make an agenda for the planning and call it as such, this has the timeboxes available for planning
- create template for calendar_event
- create planning item and link to this template
- select the rules which work for recurrence

275
lib/hero/heromodels/prd.md Normal file
View File

@@ -0,0 +1,275 @@
# 📘 **PRD Manual
# 1. **Product Overview**
### **What to Write**
A 24 sentence summary describing **what the product is**, **what problem it solves**, and **who it is for**.
### **Fields**
* `product_name`
* `version`
* `overview`
* `vision`
### **Example**
```
product_name: "Lumina PRD Builder"
version: "v1.0"
overview: "Lumina PRD Builder allows teams to generate structured, validated Product Requirements Documents using templates and AI guidance."
vision: "Enable any team to create clear requirements in minutes, improving alignment and execution speed."
```
---
# 2. **Goals**
### **What to Write**
A list of measurable outcomes that define success.
### **Fields**
* `id`
* `title`
* `description`
* `gtype` → product / business / operational
### **Example**
```
{
id: "G1"
title: "Faster Requirements Creation"
description: "Reduce PRD creation time from 2 weeks to under 1 day for all teams."
gtype: .product
},
{
id: "G2"
title: "Increase Adoption"
description: "Achieve 500 monthly active users within 90 days of launch."
gtype: .business
}
```
---
# 3. **Use Cases**
### **What to Write**
Realistic user interactions showing how the product will be used.
### **UseCase Fields**
* id
* title
* actor
* goal
* steps
* success
* failure
### **Example**
```
{
id: "UC1"
title: "Generate a PRD from Template"
actor: "Product Manager"
goal: "Create a validated PRD quickly"
steps: [
"User selects 'New PRD'",
"User chooses template type",
"User fills fields or uses AI suggestions",
"User exports PRD to Markdown"
]
success: "A complete PRD is generated without missing required fields."
failure: "Validation fails due to missing required data."
}
```
---
# 4. **Requirements**
### **What to Write**
Describe *what the system must do*, in clear, testable language.
### **Requirement Fields**
* id
* category
* title
* rtype
* description
* priority
* criteria
* dependencies
---
### **Example Requirement**
```
{
id: "R1"
category: "PRD Editor"
title: "Template Selection"
rtype: .functional
description: "The system must allow users to select from a list of predefined PRD templates."
priority: .high
criteria: [
{
id: "AC1"
description: "UI displays at least 5 templates"
condition: "List contains >= 5 template entries"
}
]
dependencies: []
}
```
---
### **Example Requirement with Dependency**
```
{
id: "R3"
category: "Export"
title: "Export PRD to Markdown"
rtype: .functional
description: "Users must be able to export the completed PRD to a Markdown file."
priority: .medium
criteria: [
{
id: "AC4"
description: "File saved in .md format"
condition: "Output file ends with '.md'"
}
]
dependencies: ["R1", "R2"]
}
```
---
# 5. **Constraints**
### **What to Write**
Non-negotiable boundaries the solution must respect.
### **Constraint Fields**
* id
* title
* description
* ctype
### **Example**
```
{
id: "C1"
title: "ARM64 Only"
description: "The system must run on ARM64 servers to match company infrastructure."
ctype: .technica
},
{
id: "C2"
title: "Q1 Deadline"
description: "The first release must be launched before March 31."
ctype: .business
},
{
id: "C3"
title: "GDPR Requirement"
description: "All user data must be deletable within 24 hours of a user request."
ctype: .compliance
}
```
---
# 6. **Risks**
### **What to Write**
Potential problems + mitigation strategies.
### **Example**
```
risks: {
"RISK1": "Template library may be too small → Mitigate by allowing community contributions"
"RISK2": "AI suggestions may be inaccurate → Add review/approve workflow"
"RISK3": "Export format inconsistencies → Create automated format tests"
}
```
---
# 🔧 7. **Minimum PRD Example (Compact)**
Here is a minimal but valid PRD instance:
```
ProductRequirementsDoc{
product_name: "Lumina PRD Builder"
version: "v1.0"
overview: "Tool to create structured PRDs."
vision: "Fast, accurate requirements for all teams."
goals: [
Goal{
id: "G1"
title: "Speed"
description: "Generate PRDs in under 10 minutes."
gtype: .product
}
]
use_cases: [
UseCase{
id: "UC1"
title: "Create PRD"
actor: "PM"
goal: "Produce PRD quickly"
steps: ["Click new", "Fill data", "Export"]
success: "Valid PRD generated"
failure: "Missing fields"
}
]
requirements: [
Requirement{
id: "R1"
category: "Editor"
title: "Input Fields"
rtype: .functional
description: "User can fill out PRD fields"
priority: .high
criteria: []
dependencies: []
}
]
constraints: [
constraint{
id: "C1"
title: "Must Support Markdown"
description: "Export only in .md format"
ctype: .technica
}
]
risks: {
"R1": "User confusion → Add tooltips"
}
}
```

415
lib/hero/heromodels/prd.v Normal file
View File

@@ -0,0 +1,415 @@
module heromodels
import incubaid.herolib.data.encoder
import incubaid.herolib.data.ourtime
import incubaid.herolib.hero.db
import incubaid.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_true }
import incubaid.herolib.hero.user { UserRef }
import json
// Basic enums for clarity
// Core PRD type, this is the root object
@[heap]
pub struct ProductRequirementsDoc {
db.Base
pub mut:
product_name string
version string
overview string
vision string
goals []Goal
use_cases []UseCase
requirements []Requirement
constraints []Constraint
}
pub struct DBPrd {
pub mut:
db &db.DB @[skip; str: skip]
}
pub enum PRDPriority {
low
medium
high
critical
}
pub enum RequirementType {
functional
non_functional
performance
reliability
}
// A reusable acceptance criterion type
pub struct AcceptanceCriterion {
pub:
id string
description string
condition string // testable condition
}
// A generic requirement type (functional or NFR)
pub struct Requirement {
pub:
id string
category string // to group requirements
title string
rtype RequirementType
description string
priority PRDPriority
criteria []AcceptanceCriterion
dependencies []string // list of requirement IDs this one depends on
}
// A use case type
pub struct UseCase {
pub:
id string
title string
actor string
goal string
steps []string
success string
failure string
}
pub enum GoalType {
product
business
operational
}
pub struct Goal {
pub:
id string
title string
description string
gtype GoalType
}
pub enum ConstraintType {
technica
business
operational
scale
compliance
design
}
pub struct Constraint {
pub:
id string
title string
description string
ctype ConstraintType
}
pub fn (self ProductRequirementsDoc) type_name() string {
return 'prd'
}
pub fn (self ProductRequirementsDoc) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a product requirements document. Returns the ID of the PRD.'
}
'get' {
return 'Retrieve a PRD by ID. Returns the complete PRD object.'
}
'delete' {
return 'Delete a PRD by ID. Returns true if successful.'
}
'exist' {
return 'Check if a PRD exists by ID. Returns true or false.'
}
'list' {
return 'List all PRDs. Returns an array of PRD objects.'
}
else {
return 'Generic method for PRD operations.'
}
}
}
pub fn (self ProductRequirementsDoc) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"product_name": "Test Product", "version": "v1.0", "overview": "A test product", "vision": "To test the system", "goals": [], "use_cases": [], "requirements": [], "constraints": []}', '1'
}
'get' {
return '{"id": 1}', '{"product_name": "Test Product", "version": "v1.0", "overview": "A test product", "vision": "To test the system", "goals": [], "use_cases": [], "requirements": [], "constraints": []}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"product_name": "Test Product", "version": "v1.0"}]'
}
else {
return '{}', '{}'
}
}
}
pub fn (self ProductRequirementsDoc) dump(mut e encoder.Encoder) ! {
e.add_string(self.product_name)
e.add_string(self.version)
e.add_string(self.overview)
e.add_string(self.vision)
// Encode goals array
e.add_u16(u16(self.goals.len))
for goal in self.goals {
e.add_string(goal.id)
e.add_string(goal.title)
e.add_string(goal.description)
e.add_u8(u8(goal.gtype))
}
// Encode use_cases array
e.add_u16(u16(self.use_cases.len))
for uc in self.use_cases {
e.add_string(uc.id)
e.add_string(uc.title)
e.add_string(uc.actor)
e.add_string(uc.goal)
e.add_list_string(uc.steps)
e.add_string(uc.success)
e.add_string(uc.failure)
}
// Encode requirements array
e.add_u16(u16(self.requirements.len))
for req in self.requirements {
e.add_string(req.id)
e.add_string(req.category)
e.add_string(req.title)
e.add_u8(u8(req.rtype))
e.add_string(req.description)
e.add_u8(u8(req.priority))
// Encode acceptance criteria
e.add_u16(u16(req.criteria.len))
for criterion in req.criteria {
e.add_string(criterion.id)
e.add_string(criterion.description)
e.add_string(criterion.condition)
}
// Encode dependencies
e.add_list_string(req.dependencies)
}
// Encode constraints array
e.add_u16(u16(self.constraints.len))
for constraint in self.constraints {
e.add_string(constraint.id)
e.add_string(constraint.title)
e.add_string(constraint.description)
e.add_u8(u8(constraint.ctype))
}
}
pub fn (mut self DBPrd) load(mut o ProductRequirementsDoc, mut e encoder.Decoder) ! {
o.product_name = e.get_string()!
o.version = e.get_string()!
o.overview = e.get_string()!
o.vision = e.get_string()!
// Decode goals
goals_len := e.get_u16()!
mut goals := []Goal{}
for _ in 0 .. goals_len {
goals << Goal{
id: e.get_string()!
title: e.get_string()!
description: e.get_string()!
gtype: unsafe { GoalType(e.get_u8()!) }
}
}
o.goals = goals
// Decode use_cases
use_cases_len := e.get_u16()!
mut use_cases := []UseCase{}
for _ in 0 .. use_cases_len {
use_cases << UseCase{
id: e.get_string()!
title: e.get_string()!
actor: e.get_string()!
goal: e.get_string()!
steps: e.get_list_string()!
success: e.get_string()!
failure: e.get_string()!
}
}
o.use_cases = use_cases
// Decode requirements
requirements_len := e.get_u16()!
mut requirements := []Requirement{}
for _ in 0 .. requirements_len {
req_id := e.get_string()!
req_category := e.get_string()!
req_title := e.get_string()!
req_rtype := unsafe { RequirementType(e.get_u8()!) }
req_description := e.get_string()!
req_priority := unsafe { PRDPriority(e.get_u8()!) }
// Decode criteria
criteria_len := e.get_u16()!
mut criteria := []AcceptanceCriterion{}
for _ in 0 .. criteria_len {
criteria << AcceptanceCriterion{
id: e.get_string()!
description: e.get_string()!
condition: e.get_string()!
}
}
// Decode dependencies
dependencies := e.get_list_string()!
requirements << Requirement{
id: req_id
category: req_category
title: req_title
rtype: req_rtype
description: req_description
priority: req_priority
criteria: criteria
dependencies: dependencies
}
}
o.requirements = requirements
// Decode constraints
constraints_len := e.get_u16()!
mut constraints := []Constraint{}
for _ in 0 .. constraints_len {
constraints << Constraint{
id: e.get_string()!
title: e.get_string()!
description: e.get_string()!
ctype: unsafe { ConstraintType(e.get_u8()!) }
}
}
o.constraints = constraints
}
@[params]
pub struct PrdArg {
pub mut:
id u32
product_name string @[required]
version string
overview string
vision string
goals []Goal
use_cases []UseCase
requirements []Requirement
constraints []Constraint
securitypolicy u32
tags []string
}
pub fn (mut self DBPrd) new(args PrdArg) !ProductRequirementsDoc {
mut o := ProductRequirementsDoc{
product_name: args.product_name
version: args.version
overview: args.overview
vision: args.vision
goals: args.goals
use_cases: args.use_cases
requirements: args.requirements
constraints: args.constraints
updated_at: ourtime.now().unix()
}
o.securitypolicy = args.securitypolicy
o.tags = self.db.tags_get(args.tags)!
return o
}
pub fn (mut self DBPrd) set(o ProductRequirementsDoc) !ProductRequirementsDoc {
return self.db.set[ProductRequirementsDoc](o)!
}
pub fn (mut self DBPrd) delete(id u32) !bool {
if !self.db.exists[ProductRequirementsDoc](id)! {
return false
}
self.db.delete[ProductRequirementsDoc](id)!
return true
}
pub fn (mut self DBPrd) exist(id u32) !bool {
return self.db.exists[ProductRequirementsDoc](id)!
}
pub fn (mut self DBPrd) get(id u32) !ProductRequirementsDoc {
mut o, data := self.db.get_data[ProductRequirementsDoc](id)!
mut e_decoder := encoder.decoder_new(data)
self.load(mut o, mut e_decoder)!
return o
}
pub fn (mut self DBPrd) list() ![]ProductRequirementsDoc {
return self.db.list[ProductRequirementsDoc]()!.map(self.get(it)!)
}
pub fn prd_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.prd.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut args := db.decode_generic[PrdArg](params)!
mut o := f.prd.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.prd.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
deleted := f.prd.delete(id)!
if deleted {
return new_response_true(rpcid)
} else {
return new_error(rpcid,
code: 404
message: 'PRD with ID ${id} not found'
)
}
}
'exist' {
id := db.decode_u32(params)!
if f.prd.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.prd.list()!
return new_response(rpcid, json.encode(res))
}
else {
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on prd'
)
}
}
}

View File

@@ -0,0 +1,226 @@
module heromodels
import incubaid.herolib.hero.db
fn test_prd_new() ! {
mut mydb := db.new_test()!
mut db_prd := DBPrd{
db: &mydb
}
mut args := PrdArg{
product_name: 'Test Product'
version: 'v1.0'
overview: 'This is a test product.'
vision: 'To revolutionize testing.'
goals: []
use_cases: []
requirements: []
constraints: []
risks: {}
}
prd := db_prd.new(args)!
assert prd.product_name == 'Test Product'
assert prd.version == 'v1.0'
assert prd.overview == 'This is a test product.'
assert prd.vision == 'To revolutionize testing.'
assert prd.goals.len == 0
assert prd.use_cases.len == 0
assert prd.requirements.len == 0
assert prd.constraints.len == 0
assert prd.risks.len == 0
assert prd.updated_at > 0
println(' PRD new test passed!')
}
fn test_prd_crud_operations() ! {
mut mydb := db.new_test()!
mut db_prd := DBPrd{
db: &mydb
}
// Create a new PRD
mut args := PrdArg{
product_name: 'CRUD Test Product'
version: 'v1.0'
overview: 'This is a test product for CRUD.'
vision: 'To test CRUD operations.'
goals: []
use_cases: []
requirements: []
constraints: []
risks: {}
}
mut prd := db_prd.new(args)!
prd = db_prd.set(prd)!
original_id := prd.id
// Test get
retrieved_prd := db_prd.get(original_id)!
assert retrieved_prd.product_name == 'CRUD Test Product'
assert retrieved_prd.version == 'v1.0'
assert retrieved_prd.id == original_id
// Test exist
exists := db_prd.exist(original_id)!
assert exists == true
// Test delete
db_prd.delete(original_id)!
exists_after_delete := db_prd.exist(original_id)!
assert exists_after_delete == false
println(' PRD CRUD operations test passed!')
}
fn test_prd_encoding_decoding_complex() ! {
mut mydb := db.new_test()!
mut db_prd := DBPrd{
db: &mydb
}
mut goal := Goal{
id: 'G1'
title: 'Speed'
description: 'Generate PRDs in minutes'
gtype: .product
}
mut use_case := UseCase{
id: 'UC1'
title: 'Create PRD'
actor: 'Product Manager'
goal: 'Produce PRD quickly'
steps: ['Click new', 'Fill data', 'Export']
success: 'Valid PRD generated'
failure: 'Missing fields'
}
mut criterion := AcceptanceCriterion{
id: 'AC1'
description: 'System displays template list'
condition: 'List contains >= 5 templates'
}
mut requirement := Requirement{
id: 'R1'
category: 'Editor'
title: 'Template Selection'
rtype: .functional
description: 'User can select from predefined templates'
priority: .high
criteria: [criterion]
dependencies: []
}
mut constraint := Constraint{
id: 'C1'
title: 'ARM64 Only'
description: 'Must run on ARM64 servers'
ctype: .technica
}
mut risks := map[string]string{}
risks['RISK1'] = 'Mitigation strategy here'
mut args := PrdArg{
product_name: 'Complex Test Product'
version: 'v2.0'
overview: 'Complete test with all fields'
vision: 'Full feature test'
goals: [goal]
use_cases: [use_case]
requirements: [requirement]
constraints: [constraint]
risks: risks
}
mut prd := db_prd.new(args)!
prd = db_prd.set(prd)!
prd_id := prd.id
// Retrieve and verify
retrieved_prd := db_prd.get(prd_id)!
assert retrieved_prd.product_name == 'Complex Test Product'
assert retrieved_prd.goals.len == 1
assert retrieved_prd.goals[0].id == 'G1'
assert retrieved_prd.goals[0].gtype == .product
assert retrieved_prd.use_cases.len == 1
assert retrieved_prd.use_cases[0].id == 'UC1'
assert retrieved_prd.use_cases[0].steps.len == 3
assert retrieved_prd.requirements.len == 1
assert retrieved_prd.requirements[0].id == 'R1'
assert retrieved_prd.requirements[0].criteria.len == 1
assert retrieved_prd.requirements[0].priority == .high
assert retrieved_prd.constraints.len == 1
assert retrieved_prd.constraints[0].id == 'C1'
assert retrieved_prd.constraints[0].ctype == .technica
assert retrieved_prd.risks.len == 1
assert retrieved_prd.risks['RISK1'] == 'Mitigation strategy here'
println(' PRD encoding/decoding complex test passed!')
}
fn test_prd_type_name() ! {
mut mydb := db.new_test()!
mut db_prd := DBPrd{
db: &mydb
}
mut args := PrdArg{
product_name: 'Type Name Test'
version: 'v1.0'
overview: 'Test'
vision: 'Test'
goals: []
use_cases: []
requirements: []
constraints: []
risks: {}
}
prd := db_prd.new(args)!
type_name := prd.type_name()
assert type_name == 'prd'
println(' PRD type_name test passed!')
}
fn test_prd_list() ! {
mut mydb := db.new_test()!
mut db_prd := DBPrd{
db: &mydb
}
// Create multiple PRDs
for i in 0 .. 3 {
mut args := PrdArg{
product_name: 'Product ${i}'
version: 'v1.0'
overview: 'Overview ${i}'
vision: 'Vision ${i}'
goals: []
use_cases: []
requirements: []
constraints: []
risks: {}
}
mut prd := db_prd.new(args)!
prd = db_prd.set(prd)!
}
// List all PRDs
all_prds := db_prd.list()!
assert all_prds.len == 3
println(' PRD list test passed!')
}

View File

@@ -59,7 +59,7 @@ pub fn install(args_ InstallArgs) ! {
} else if pl == .alpine {
console.print_header(' - Alpine prepare')
osal.package_refresh()!
osal.package_install('git,curl,mc,tmux,screen,git-lfs,redis-server')!
osal.package_install('git,curl,mc,tmux,screen,git-lfs,redis')!
} else if pl == .arch {
console.print_header(' - Arch prepare')
osal.package_refresh()!

View File

@@ -70,7 +70,7 @@ pub fn decode_request(data string) !Request {
// Returns:
// - A JSON string representation of the Request
pub fn (req Request) encode() string {
return json2.encode(req, prettify: true)
return json2.encode(req)
}
// validate checks if the Request object contains all required fields

View File

@@ -9,7 +9,7 @@ pub mut:
position int
hide_title bool
src string @[required] // always in format collection:page_name, can use the default collection if no : specified
path string @[required] //is without the page name, so just the path to the folder where the page is in
path string @[required] // is without the page name, so just the path to the folder where the page is in
section_name string
title_nr int
slug string