Merge branch 'development' into development_heropods
This commit is contained in:
5
.github/workflows/hero_build.yml
vendored
5
.github/workflows/hero_build.yml
vendored
@@ -46,9 +46,6 @@ jobs:
|
|||||||
cd v
|
cd v
|
||||||
make
|
make
|
||||||
./v symlink
|
./v symlink
|
||||||
if [ "${{ runner.os }}" = "macOS" ]; then
|
|
||||||
sudo sed -i '' '618,631d' /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
|
|
||||||
fi
|
|
||||||
cd -
|
cd -
|
||||||
|
|
||||||
mkdir -p ~/.vmodules/incubaid
|
mkdir -p ~/.vmodules/incubaid
|
||||||
@@ -92,7 +89,7 @@ jobs:
|
|||||||
'
|
'
|
||||||
|
|
||||||
else
|
else
|
||||||
v -w -d use_openssl -enable-globals -gc none -cc tcc cli/hero.v -o cli/hero-${{ matrix.target }}
|
v -w -d use_openssl -enable-globals -cc clang cli/hero.v -o cli/hero-${{ matrix.target }}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload glibc binary
|
- name: Upload glibc binary
|
||||||
|
|||||||
@@ -2,11 +2,12 @@
|
|||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`. These models are used to store structured data in Redis using the HeroDB system.
|
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`.
|
||||||
|
These models are used to store structured data in Redis using the HeroDB system.
|
||||||
|
The message.v can be found in `lib/hero/heromodels/message.v`.s
|
||||||
|
|
||||||
## Key Concepts
|
## Key Concepts
|
||||||
|
|
||||||
- Each model represents a data type stored in Redis hash sets
|
|
||||||
- Models must implement serialization/deserialization using the `encoder` module
|
- Models must implement serialization/deserialization using the `encoder` module
|
||||||
- Models inherit from the `Base` struct which provides common fields
|
- Models inherit from the `Base` struct which provides common fields
|
||||||
- The database uses a factory pattern for model access
|
- The database uses a factory pattern for model access
|
||||||
93
examples/hero/heromodels/prd.vsh
Normal file
93
examples/hero/heromodels/prd.vsh
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import incubaid.herolib.hero.heromodels
|
||||||
|
|
||||||
|
// Initialize database
|
||||||
|
mut mydb := heromodels.new()!
|
||||||
|
|
||||||
|
// Create goals
|
||||||
|
mut goals := [
|
||||||
|
heromodels.Goal{
|
||||||
|
id: 'G1'
|
||||||
|
title: 'Faster Requirements'
|
||||||
|
description: 'Reduce PRD creation time to under 1 day'
|
||||||
|
gtype: .product
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create use cases
|
||||||
|
mut use_cases := [
|
||||||
|
heromodels.UseCase{
|
||||||
|
id: 'UC1'
|
||||||
|
title: 'Generate PRD'
|
||||||
|
actor: 'Product Manager'
|
||||||
|
goal: 'Create validated PRD'
|
||||||
|
steps: ['Select template', 'Fill fields', 'Export to Markdown']
|
||||||
|
success: 'Complete PRD generated'
|
||||||
|
failure: 'Validation failed'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create requirements
|
||||||
|
mut criterion := heromodels.AcceptanceCriterion{
|
||||||
|
id: 'AC1'
|
||||||
|
description: 'Display template list'
|
||||||
|
condition: 'List contains >= 5 templates'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut requirements := [
|
||||||
|
heromodels.Requirement{
|
||||||
|
id: 'R1'
|
||||||
|
category: 'Editor'
|
||||||
|
title: 'Template Selection'
|
||||||
|
rtype: .functional
|
||||||
|
description: 'User can select from templates'
|
||||||
|
priority: .high
|
||||||
|
criteria: [criterion]
|
||||||
|
dependencies: []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create constraints
|
||||||
|
mut constraints := [
|
||||||
|
heromodels.Constraint{
|
||||||
|
id: 'C1'
|
||||||
|
title: 'ARM64 Support'
|
||||||
|
description: 'Must run on ARM64 infrastructure'
|
||||||
|
ctype: .technica
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
// Create risks
|
||||||
|
mut risks := map[string]string{}
|
||||||
|
risks['RISK1'] = 'Templates too limited → Add community contributions'
|
||||||
|
risks['RISK2'] = 'AI suggestions inaccurate → Add review workflow'
|
||||||
|
|
||||||
|
// Create a new PRD object
|
||||||
|
mut prd := mydb.prd.new(
|
||||||
|
product_name: 'Lumina PRD Builder'
|
||||||
|
version: 'v1.0'
|
||||||
|
overview: 'Tool to create structured PRDs quickly'
|
||||||
|
vision: 'Enable teams to generate clear requirements in minutes'
|
||||||
|
goals: goals
|
||||||
|
use_cases: use_cases
|
||||||
|
requirements: requirements
|
||||||
|
constraints: constraints
|
||||||
|
risks: risks
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Save to database
|
||||||
|
prd = mydb.prd.set(prd)!
|
||||||
|
println('✓ Created PRD with ID: ${prd.id}')
|
||||||
|
|
||||||
|
// Retrieve from database
|
||||||
|
mut retrieved := mydb.prd.get(prd.id)!
|
||||||
|
println('✓ Retrieved PRD: ${retrieved.product_name}')
|
||||||
|
|
||||||
|
// List all PRDs
|
||||||
|
mut all_prds := mydb.prd.list()!
|
||||||
|
println('✓ Total PRDs in database: ${all_prds.len}')
|
||||||
|
|
||||||
|
// Check if exists
|
||||||
|
exists := mydb.prd.exist(prd.id)!
|
||||||
|
println('✓ PRD exists: ${exists}')
|
||||||
572
install_v.sh
572
install_v.sh
@@ -1,8 +1,25 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
#==============================================================================
|
||||||
cd "$SCRIPT_DIR"
|
# GLOBAL VARIABLES
|
||||||
|
#==============================================================================
|
||||||
|
RESET=false
|
||||||
|
REMOVE=false
|
||||||
|
INSTALL_ANALYZER=false
|
||||||
|
HEROLIB=false
|
||||||
|
START_REDIS=false
|
||||||
|
|
||||||
|
export DIR_BASE="$HOME"
|
||||||
|
export DIR_BUILD="/tmp"
|
||||||
|
export DIR_CODE="$DIR_BASE/code"
|
||||||
|
export DIR_CODE_V="$DIR_BASE/_code"
|
||||||
|
export OSNAME=""
|
||||||
|
|
||||||
|
|
||||||
|
#==============================================================================
|
||||||
|
# FUNCTION DEFINITIONS
|
||||||
|
#==============================================================================
|
||||||
|
|
||||||
# Help function
|
# Help function
|
||||||
print_help() {
|
print_help() {
|
||||||
@@ -16,6 +33,8 @@ print_help() {
|
|||||||
echo " --remove Remove V installation and exit"
|
echo " --remove Remove V installation and exit"
|
||||||
echo " --analyzer Install/update v-analyzer"
|
echo " --analyzer Install/update v-analyzer"
|
||||||
echo " --herolib Install our herolib"
|
echo " --herolib Install our herolib"
|
||||||
|
echo " --herolib-version=VERSION Install specific herolib tag/branch (default: development)"
|
||||||
|
echo " --start-redis Start the Redis service if installed"
|
||||||
echo
|
echo
|
||||||
echo "Examples:"
|
echo "Examples:"
|
||||||
echo " $0"
|
echo " $0"
|
||||||
@@ -27,38 +46,6 @@ print_help() {
|
|||||||
echo
|
echo
|
||||||
}
|
}
|
||||||
|
|
||||||
# Parse arguments
|
|
||||||
RESET=false
|
|
||||||
REMOVE=false
|
|
||||||
INSTALL_ANALYZER=false
|
|
||||||
HEROLIB=false
|
|
||||||
|
|
||||||
for arg in "$@"; do
|
|
||||||
case $arg in
|
|
||||||
-h|--help)
|
|
||||||
print_help
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
--reset)
|
|
||||||
RESET=true
|
|
||||||
;;
|
|
||||||
--remove)
|
|
||||||
REMOVE=true
|
|
||||||
;;
|
|
||||||
--herolib)
|
|
||||||
HEROLIB=true
|
|
||||||
;;
|
|
||||||
--analyzer)
|
|
||||||
INSTALL_ANALYZER=true
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Unknown option: $arg"
|
|
||||||
echo "Use -h or --help to see available options"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Function to check if command exists
|
# Function to check if command exists
|
||||||
command_exists() {
|
command_exists() {
|
||||||
command -v "$1" >/dev/null 2>&1
|
command -v "$1" >/dev/null 2>&1
|
||||||
@@ -80,25 +67,20 @@ function run_sudo() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
export DIR_BASE="$HOME"
|
|
||||||
export DIR_BUILD="/tmp"
|
|
||||||
export DIR_CODE="$DIR_BASE/code"
|
|
||||||
export DIR_CODE_V="$DIR_BASE/_code"
|
|
||||||
|
|
||||||
check_release() {
|
check_release() {
|
||||||
if ! command -v lsb_release >/dev/null 2>&1; then
|
if ! command -v lsb_release >/dev/null 2>&1; then
|
||||||
echo "❌ lsb_release command not found. Install 'lsb-release' package first."
|
echo "❌ lsb_release command not found. Install 'lsb-release' package first."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
CODENAME=$(lsb_release -sc)
|
CODENAME=$(lsb_release -sc)
|
||||||
RELEASE=$(lsb_release -rs)
|
RELEASE=$(lsb_release -rs)
|
||||||
|
|
||||||
if dpkg --compare-versions "$RELEASE" lt "24.04"; then
|
if dpkg --compare-versions "$RELEASE" lt "24.04"; then
|
||||||
echo "ℹ️ Detected Ubuntu $RELEASE ($CODENAME). Skipping mirror fix (requires 24.04+)."
|
echo "ℹ️ Detected Ubuntu $RELEASE ($CODENAME). Skipping mirror fix (requires 24.04+)."
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,36 +90,36 @@ ubuntu_sources_fix() {
|
|||||||
echo "ℹ️ Not running on Ubuntu. Skipping mirror fix."
|
echo "ℹ️ Not running on Ubuntu. Skipping mirror fix."
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if check_release; then
|
if check_release; then
|
||||||
local CODENAME
|
local CODENAME
|
||||||
CODENAME=$(lsb_release -sc)
|
CODENAME=$(lsb_release -sc)
|
||||||
local TIMESTAMP
|
local TIMESTAMP
|
||||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
|
||||||
echo "🔎 Fixing apt mirror setup for Ubuntu $(lsb_release -rs) ($CODENAME)..."
|
echo "🔎 Fixing apt mirror setup for Ubuntu $(lsb_release -rs) ($CODENAME)..."
|
||||||
|
|
||||||
if [ -f /etc/apt/sources.list ]; then
|
if [ -f /etc/apt/sources.list ]; then
|
||||||
echo "📦 Backing up /etc/apt/sources.list -> /etc/apt/sources.list.backup.$TIMESTAMP"
|
echo "📦 Backing up /etc/apt/sources.list -> /etc/apt/sources.list.backup.$TIMESTAMP"
|
||||||
sudo mv /etc/apt/sources.list /etc/apt/sources.list.backup.$TIMESTAMP
|
run_sudo mv /etc/apt/sources.list /etc/apt/sources.list.backup.$TIMESTAMP
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /etc/apt/sources.list.d/ubuntu.sources ]; then
|
if [ -f /etc/apt/sources.list.d/ubuntu.sources ]; then
|
||||||
echo "📦 Backing up /etc/apt/sources.list.d/ubuntu.sources -> /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP"
|
echo "📦 Backing up /etc/apt/sources.list.d/ubuntu.sources -> /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP"
|
||||||
sudo mv /etc/apt/sources.list.d/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP
|
run_sudo mv /etc/apt/sources.list.d/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources.backup.$TIMESTAMP
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "📝 Writing new /etc/apt/sources.list.d/ubuntu.sources"
|
echo "📝 Writing new /etc/apt/sources.list.d/ubuntu.sources"
|
||||||
sudo tee /etc/apt/sources.list.d/ubuntu.sources >/dev/null <<EOF
|
run_sudo tee /etc/apt/sources.list.d/ubuntu.sources >/dev/null <<EOF
|
||||||
Types: deb
|
Types: deb
|
||||||
URIs: mirror://mirrors.ubuntu.com/mirrors.txt
|
URIs: mirror://mirrors.ubuntu.com/mirrors.txt
|
||||||
Suites: $CODENAME $CODENAME-updates $CODENAME-backports $CODENAME-security
|
Suites: $CODENAME $CODENAME-updates $CODENAME-backports $CODENAME-security
|
||||||
Components: main restricted universe multiverse
|
Components: main restricted universe multiverse
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
echo "🔄 Running apt update..."
|
echo "🔄 Running apt update..."
|
||||||
sudo apt update -qq
|
run_sudo apt update -qq
|
||||||
|
|
||||||
echo "✅ Done! Your system now uses the rotating Ubuntu mirror list."
|
echo "✅ Done! Your system now uses the rotating Ubuntu mirror list."
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -156,37 +138,14 @@ function sshknownkeysadd {
|
|||||||
ssh-keyscan git.threefold.info >> ~/.ssh/known_hosts
|
ssh-keyscan git.threefold.info >> ~/.ssh/known_hosts
|
||||||
fi
|
fi
|
||||||
git config --global pull.rebase false
|
git config --global pull.rebase false
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function package_check_install {
|
# Performs a non-interactive, forceful apt installation.
|
||||||
local command_name="$1"
|
# WARNING: This is designed for CI/automated environments. It can be dangerous
|
||||||
if command -v "$command_name" >/dev/null 2>&1; then
|
# on a personal machine as it may remove essential packages to resolve conflicts.
|
||||||
echo "command '$command_name' is already installed."
|
function apt_force_install {
|
||||||
else
|
run_sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install "$@" -q -y --allow-downgrades --allow-remove-essential
|
||||||
package_install '$command_name'
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function package_install {
|
|
||||||
local command_name="$1"
|
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
|
||||||
if is_github_actions; then
|
|
||||||
run_sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
|
||||||
else
|
|
||||||
apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
|
||||||
fi
|
|
||||||
|
|
||||||
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
|
||||||
brew install $command_name
|
|
||||||
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
|
||||||
apk add $command_name
|
|
||||||
elif [[ "${OSNAME}" == "arch"* ]]; then
|
|
||||||
pacman --noconfirm -Su $command_name
|
|
||||||
else
|
|
||||||
echo "platform : ${OSNAME} not supported"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
is_github_actions() {
|
is_github_actions() {
|
||||||
@@ -201,11 +160,10 @@ is_github_actions() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function myplatform {
|
function myplatform {
|
||||||
if [[ "${OSTYPE}" == "darwin"* ]]; then
|
if [[ "${OSTYPE}" == "darwin"* ]]; then
|
||||||
export OSNAME='darwin'
|
export OSNAME='darwin'
|
||||||
elif [ -e /etc/os-release ]; then
|
elif [ -e /etc/os-release ]; then
|
||||||
# Read the ID field from the /etc/os-release file
|
# Read the ID field from the /etc/os-release file
|
||||||
export OSNAME=$(grep '^ID=' /etc/os-release | cut -d= -f2)
|
export OSNAME=$(grep '^ID=' /etc/os-release | cut -d= -f2)
|
||||||
if [ "${OSNAME,,}" == "ubuntu" ]; then
|
if [ "${OSNAME,,}" == "ubuntu" ]; then
|
||||||
@@ -221,27 +179,14 @@ function myplatform {
|
|||||||
echo "Unable to determine the operating system."
|
echo "Unable to determine the operating system."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# if [ "$(uname -m)" == "x86_64" ]; then
|
|
||||||
# echo "This system is running a 64-bit processor."
|
|
||||||
# else
|
|
||||||
# echo "This system is not running a 64-bit processor."
|
|
||||||
# exit 1
|
|
||||||
# fi
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
myplatform
|
function update_system {
|
||||||
|
echo ' - System Update'
|
||||||
function os_update {
|
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
ubuntu_sources_fix
|
ubuntu_sources_fix
|
||||||
fi
|
|
||||||
echo ' - os update'
|
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
|
||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
echo "github actions"
|
echo "github actions: preparing system"
|
||||||
else
|
else
|
||||||
rm -f /var/lib/apt/lists/lock
|
rm -f /var/lib/apt/lists/lock
|
||||||
rm -f /var/cache/apt/archives/lock
|
rm -f /var/cache/apt/archives/lock
|
||||||
@@ -252,54 +197,56 @@ function os_update {
|
|||||||
run_sudo dpkg --configure -a
|
run_sudo dpkg --configure -a
|
||||||
run_sudo apt update -y
|
run_sudo apt update -y
|
||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
echo "** IN GITHUB ACTIONS, DON'T DO UPDATE"
|
echo "** IN GITHUB ACTIONS, DON'T DO SYSTEM UPGRADE"
|
||||||
else
|
else
|
||||||
set +e
|
set +e
|
||||||
echo "** UPDATE"
|
echo "** System Upgrade"
|
||||||
apt-mark hold grub-efi-amd64-signed
|
apt-mark hold grub-efi-amd64-signed
|
||||||
set -e
|
set -e
|
||||||
apt upgrade -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
|
apt upgrade -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
|
||||||
apt autoremove -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
|
apt autoremove -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
|
||||||
fi
|
fi
|
||||||
#apt install apt-transport-https ca-certificates curl software-properties-common -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" --force-yes
|
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
package_install "apt-transport-https ca-certificates curl wget software-properties-common tmux make gcc"
|
if ! command -v brew >/dev/null 2>&1; then
|
||||||
package_install "rclone rsync mc redis-server screen net-tools git dnsutils htop ca-certificates screen lsb-release binutils pkg-config libssl-dev iproute2"
|
echo ' - Installing Homebrew'
|
||||||
|
|
||||||
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
|
||||||
if command -v brew >/dev/null 2>&1; then
|
|
||||||
echo ' - homebrew installed'
|
|
||||||
else
|
|
||||||
export NONINTERACTIVE=1
|
export NONINTERACTIVE=1
|
||||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||||
unset NONINTERACTIVE
|
unset NONINTERACTIVE
|
||||||
fi
|
fi
|
||||||
|
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
||||||
|
apk update
|
||||||
|
elif [[ "${OSNAME}" == "arch"* ]]; then
|
||||||
|
pacman -Syyu --noconfirm
|
||||||
|
fi
|
||||||
|
echo ' - System Update Done'
|
||||||
|
}
|
||||||
|
|
||||||
|
function install_packages {
|
||||||
|
echo ' - Installing Packages'
|
||||||
|
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
|
apt_force_install apt-transport-https ca-certificates curl wget software-properties-common tmux make gcc rclone rsync mc redis-server screen net-tools git dnsutils htop lsb-release binutils pkg-config libssl-dev iproute2
|
||||||
|
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
|
# The set +e is to prevent script failure if some packages are already installed.
|
||||||
set +e
|
set +e
|
||||||
brew install mc redis curl tmux screen htop wget rclone tcc
|
brew install mc redis curl tmux screen htop wget rclone tcc
|
||||||
set -e
|
set -e
|
||||||
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
||||||
apk update screen git htop tmux
|
apk add --no-cache screen git htop tmux mc curl rsync redis bash bash-completion rclone
|
||||||
apk add mc curl rsync htop redis bash bash-completion screen git rclone
|
# Switch default shell to bash for better interactive use
|
||||||
sed -i 's#/bin/ash#/bin/bash#g' /etc/passwd
|
sed -i 's#/bin/ash#/bin/bash#g' /etc/passwd
|
||||||
elif [[ "${OSNAME}" == "arch"* ]]; then
|
elif [[ "${OSNAME}" == "arch"* ]]; then
|
||||||
pacman -Syy --noconfirm
|
pacman -Su --noconfirm arch-install-scripts gcc mc git tmux curl htop redis wget screen net-tools sudo lsb-release rclone
|
||||||
pacman -Syu --noconfirm
|
|
||||||
pacman -Su --noconfirm arch-install-scripts gcc mc git tmux curl htop redis wget screen net-tools git sudo htop ca-certificates lsb-release screen rclone
|
|
||||||
|
|
||||||
# Check if builduser exists, create if not
|
# Check if builduser exists, create if not
|
||||||
if ! id -u builduser > /dev/null 2>&1; then
|
if ! id -u builduser > /dev/null 2>&1; then
|
||||||
useradd -m builduser
|
useradd -m builduser
|
||||||
echo "builduser:$(openssl rand -base64 32 | sha256sum | base64 | head -c 32)" | chpasswd
|
echo "builduser:$(openssl rand -base64 32 | sha256sum | base64 | head -c 32)" | chpasswd
|
||||||
echo 'builduser ALL=(ALL) NOPASSWD: ALL' | tee /etc/sudoers.d/builduser
|
echo 'builduser ALL=(ALL) NOPASSWD: ALL' | tee /etc/sudoers.d/builduser
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# if [[ -n "${DEBUG}" ]]; then
|
|
||||||
# execute_with_marker "paru_install" paru_install
|
|
||||||
# fi
|
|
||||||
fi
|
fi
|
||||||
echo ' - os update done'
|
echo ' - Package Installation Done'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function hero_lib_pull {
|
function hero_lib_pull {
|
||||||
pushd $DIR_CODE/github/incubaid/herolib 2>&1 >> /dev/null
|
pushd $DIR_CODE/github/incubaid/herolib 2>&1 >> /dev/null
|
||||||
if [[ $(git status -s) ]]; then
|
if [[ $(git status -s) ]]; then
|
||||||
@@ -311,7 +258,7 @@ function hero_lib_pull {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function hero_lib_get {
|
function hero_lib_get {
|
||||||
|
|
||||||
mkdir -p $DIR_CODE/github/incubaid
|
mkdir -p $DIR_CODE/github/incubaid
|
||||||
if [[ -d "$DIR_CODE/github/incubaid/herolib" ]]
|
if [[ -d "$DIR_CODE/github/incubaid/herolib" ]]
|
||||||
then
|
then
|
||||||
@@ -321,46 +268,19 @@ function hero_lib_get {
|
|||||||
git clone --depth 1 --no-single-branch https://github.com/incubaid/herolib.git
|
git clone --depth 1 --no-single-branch https://github.com/incubaid/herolib.git
|
||||||
popd 2>&1 >> /dev/null
|
popd 2>&1 >> /dev/null
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Checkout specific version if requested
|
||||||
|
if [ -n "${HEROLIB_VERSION:-}" ]; then
|
||||||
|
pushd $DIR_CODE/github/incubaid/herolib 2>&1 >> /dev/null
|
||||||
|
if ! git checkout "$HEROLIB_VERSION"; then
|
||||||
|
echo "Failed to checkout herolib version: $HEROLIB_VERSION"
|
||||||
|
popd 2>&1 >> /dev/null
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
popd 2>&1 >> /dev/null
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# function install_secp256k1 {
|
|
||||||
|
|
||||||
# echo "Installing secp256k1..."
|
|
||||||
# if [[ "${OSNAME}" == "darwin"* ]]; then
|
|
||||||
# # Attempt installation only if not already found
|
|
||||||
# echo "Attempting secp256k1 installation via Homebrew..."
|
|
||||||
# brew install secp256k1
|
|
||||||
# elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
|
||||||
# # Install build dependencies
|
|
||||||
# package_install "build-essential wget autoconf libtool"
|
|
||||||
|
|
||||||
# # Download and extract secp256k1
|
|
||||||
# cd "${DIR_BUILD}"
|
|
||||||
# wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
|
||||||
# tar -xvf v0.3.2.tar.gz
|
|
||||||
|
|
||||||
# # Build and install
|
|
||||||
# cd secp256k1-0.3.2/
|
|
||||||
# ./autogen.sh
|
|
||||||
# ./configure
|
|
||||||
# make -j 5
|
|
||||||
# if is_github_actions; then
|
|
||||||
# run_sudo make install
|
|
||||||
# else
|
|
||||||
# make install
|
|
||||||
# fi
|
|
||||||
|
|
||||||
# # Cleanup
|
|
||||||
# cd ..
|
|
||||||
# rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
|
||||||
# else
|
|
||||||
# echo "secp256k1 installation not implemented for ${OSNAME}"
|
|
||||||
# exit 1
|
|
||||||
# fi
|
|
||||||
# echo "secp256k1 installation complete!"
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
||||||
remove_all() {
|
remove_all() {
|
||||||
echo "Removing V installation..."
|
echo "Removing V installation..."
|
||||||
# Set reset to true to use existing reset functionality
|
# Set reset to true to use existing reset functionality
|
||||||
@@ -377,7 +297,7 @@ remove_all() {
|
|||||||
echo "Removing v-analyzer from system..."
|
echo "Removing v-analyzer from system..."
|
||||||
run_sudo rm -f $(which v-analyzer)
|
run_sudo rm -f $(which v-analyzer)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Remove v-analyzer path from rc files
|
# Remove v-analyzer path from rc files
|
||||||
for RC_FILE in ~/.zshrc ~/.bashrc; do
|
for RC_FILE in ~/.zshrc ~/.bashrc; do
|
||||||
if [ -f "$RC_FILE" ]; then
|
if [ -f "$RC_FILE" ]; then
|
||||||
@@ -393,127 +313,54 @@ remove_all() {
|
|||||||
echo "Cleaned up $RC_FILE"
|
echo "Cleaned up $RC_FILE"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "V removal complete"
|
echo "V removal complete"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Starts the Redis service if it is not already running.
|
||||||
|
function start_redis_service() {
|
||||||
|
echo "Attempting to start Redis service..."
|
||||||
|
# Check if redis-server is even installed
|
||||||
|
if ! command_exists redis-server; then
|
||||||
|
echo "Warning: redis-server command not found. Skipping."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if redis is already running by pinging it
|
||||||
|
if redis-cli ping > /dev/null 2>&1; then
|
||||||
|
echo "Redis is already running."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
# Function to check if a service is running and start it if needed
|
echo "Redis is not running. Attempting to start it..."
|
||||||
check_and_start_redis() {
|
if command_exists systemctl; then
|
||||||
|
run_sudo systemctl start redis
|
||||||
# Normal service management for non-container environments
|
# For Alpine, use rc-service
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]] || [[ "${OSNAME}" == "debian" ]]; then
|
elif command_exists rc-service; then
|
||||||
|
run_sudo rc-service redis start
|
||||||
# Handle Redis installation for GitHub Actions environment
|
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
if is_github_actions; then
|
# For macOS, use brew services
|
||||||
|
if ! brew services list | grep -q "^redis.*started"; then
|
||||||
# Import Redis GPG key
|
brew services start redis
|
||||||
curl -fsSL https://packages.redis.io/gpg | run_sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
|
|
||||||
# Add Redis repository
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | run_sudo tee /etc/apt/sources.list.d/redis.list
|
|
||||||
# Install Redis
|
|
||||||
run_sudo apt-get update
|
|
||||||
run_sudo apt-get install -y redis
|
|
||||||
|
|
||||||
# Start Redis
|
|
||||||
redis-server --daemonize yes
|
|
||||||
|
|
||||||
# Print versions
|
|
||||||
redis-cli --version
|
|
||||||
redis-server --version
|
|
||||||
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if running inside a container
|
|
||||||
if grep -q "/docker/" /proc/1/cgroup || [ ! -d "/run/systemd/system" ]; then
|
|
||||||
echo "Running inside a container. Starting redis directly."
|
|
||||||
|
|
||||||
if pgrep redis-server > /dev/null; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it in the background..."
|
|
||||||
redis-server --daemonize yes
|
|
||||||
if pgrep redis-server > /dev/null; then
|
|
||||||
echo "redis started successfully."
|
|
||||||
else
|
|
||||||
echo "Failed to start redis. Please check logs for details."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
if command_exists zinit; then
|
|
||||||
# Check if redis service is managed by zinit and is running
|
|
||||||
if zinit status redis | grep -q "state: Running"; then
|
|
||||||
echo "redis is already running and managed by zinit."
|
|
||||||
return
|
|
||||||
else
|
|
||||||
echo "zinit is installed, but redis is not running or not managed by zinit. Proceeding with other checks."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if systemctl is-active --quiet "redis"; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it..."
|
|
||||||
run_sudo systemctl start "redis"
|
|
||||||
if systemctl is-active --quiet "redis"; then
|
|
||||||
echo "redis started successfully."
|
|
||||||
else
|
|
||||||
echo "Failed to start redis. Please check logs for details."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
|
||||||
# Check if we're in GitHub Actions
|
|
||||||
if is_github_actions; then
|
|
||||||
echo "Running in GitHub Actions on macOS. Starting redis directly..."
|
|
||||||
if pgrep redis-server > /dev/null; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it in the background..."
|
|
||||||
redis-server --daemonize yes
|
|
||||||
if pgrep redis-server > /dev/null; then
|
|
||||||
echo "redis started successfully."
|
|
||||||
else
|
|
||||||
echo "Failed to start redis. Please check logs for details."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# For regular macOS environments, use brew services
|
|
||||||
if brew services list | grep -q "^redis.*started"; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it..."
|
|
||||||
brew services start redis
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
|
||||||
if rc-service "redis" status | grep -q "running"; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it..."
|
|
||||||
rc-service "redis" start
|
|
||||||
fi
|
|
||||||
elif [[ "${OSNAME}" == "arch"* ]]; then
|
|
||||||
if systemctl is-active --quiet "redis"; then
|
|
||||||
echo "redis is already running."
|
|
||||||
else
|
|
||||||
echo "redis is not running. Starting it..."
|
|
||||||
run_sudo systemctl start "redis"
|
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "Service management for redis is not implemented for platform: $OSNAME"
|
echo "No service manager found, starting Redis manually..."
|
||||||
|
redis-server --daemonize yes
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Final check to see if it started
|
||||||
|
sleep 1 # Give it a second to start up
|
||||||
|
if redis-cli ping > /dev/null 2>&1; then
|
||||||
|
echo "Redis started successfully."
|
||||||
|
else
|
||||||
|
echo "Error: Failed to start Redis."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
v-install() {
|
v-install() {
|
||||||
|
|
||||||
# Check if v is already installed and in PATH
|
# Check if v is already installed and in PATH
|
||||||
if command_exists v; then
|
if command_exists v; then
|
||||||
echo "V is already installed and in PATH."
|
echo "V is already installed and in PATH."
|
||||||
@@ -521,8 +368,8 @@ v-install() {
|
|||||||
# For now, just exit the function assuming it's okay
|
# For now, just exit the function assuming it's okay
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Only clone and install if directory doesn't exist
|
# Only clone and install if directory doesn't exist
|
||||||
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
||||||
if [ ! -d ~/_code/v ]; then
|
if [ ! -d ~/_code/v ]; then
|
||||||
@@ -535,8 +382,8 @@ v-install() {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Only clone and install if directory doesn't exist
|
# Only clone and install if directory doesn't exist
|
||||||
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
||||||
# Adjusting the check to the actual installation directory.
|
# Adjusting the check to the actual installation directory.
|
||||||
@@ -555,43 +402,43 @@ v-install() {
|
|||||||
fi
|
fi
|
||||||
echo "V built successfully. Creating symlink..."
|
echo "V built successfully. Creating symlink..."
|
||||||
run_sudo ./v symlink
|
run_sudo ./v symlink
|
||||||
|
|
||||||
# Verify v is in path
|
# Verify v is in path
|
||||||
if ! command_exists v; then
|
if ! command_exists v; then
|
||||||
echo "Error: V installation failed or not in PATH"
|
echo "Error: V installation failed or not in PATH"
|
||||||
echo "Please ensure ~/code/v is in your PATH"
|
echo "Please ensure ~/code/v is in your PATH"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "V installation successful!"
|
echo "V installation successful!"
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
v-analyzer() {
|
v-analyzer() {
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
# Install v-analyzer if requested
|
# Install v-analyzer if requested
|
||||||
if [ "$INSTALL_ANALYZER" = true ]; then
|
if [ "$INSTALL_ANALYZER" = true ]; then
|
||||||
echo "Installing v-analyzer..."
|
echo "Installing v-analyzer..."
|
||||||
cd /tmp
|
cd /tmp
|
||||||
v download -RD https://raw.githubusercontent.com/vlang/v-analyzer/main/install.vsh
|
v download -RD https://raw.githubusercontent.com/vlang/v-analyzer/main/install.vsh
|
||||||
|
|
||||||
# Check if v-analyzer bin directory exists
|
# Check if v-analyzer bin directory exists
|
||||||
if [ ! -d "$HOME/.config/v-analyzer/bin" ]; then
|
if [ ! -d "$HOME/.config/v-analyzer/bin" ]; then
|
||||||
echo "Error: v-analyzer bin directory not found at $HOME/.config/v-analyzer/bin"
|
echo "Error: v-analyzer bin directory not found at $HOME/.config/v-analyzer/bin"
|
||||||
echo "Please ensure v-analyzer was installed correctly"
|
echo "Please ensure v-analyzer was installed correctly"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "v-analyzer installation successful!"
|
echo "v-analyzer installation successful!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add v-analyzer to PATH if installed
|
# Add v-analyzer to PATH if installed
|
||||||
if [ -d "$HOME/.config/v-analyzer/bin" ]; then
|
if [ -d "$HOME/.config/v-analyzer/bin" ]; then
|
||||||
V_ANALYZER_PATH='export PATH="$PATH:$HOME/.config/v-analyzer/bin"'
|
V_ANALYZER_PATH='export PATH="$PATH:$HOME/.config/v-analyzer/bin"'
|
||||||
|
|
||||||
# Function to add path to rc file if not present
|
# Function to add path to rc file if not present
|
||||||
add_to_rc() {
|
add_to_rc() {
|
||||||
local RC_FILE="$1"
|
local RC_FILE="$1"
|
||||||
@@ -605,7 +452,7 @@ v-analyzer() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# Add to both .zshrc and .bashrc if they exist
|
# Add to both .zshrc and .bashrc if they exist
|
||||||
add_to_rc ~/.zshrc
|
add_to_rc ~/.zshrc
|
||||||
if [ "$(uname)" = "Darwin" ] && [ -f ~/.bashrc ]; then
|
if [ "$(uname)" = "Darwin" ] && [ -f ~/.bashrc ]; then
|
||||||
@@ -615,70 +462,119 @@ v-analyzer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#==============================================================================
|
||||||
|
# MAIN EXECUTION
|
||||||
|
#==============================================================================
|
||||||
|
main() {
|
||||||
|
# Make sure we're running in the directory where the script is
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
|
||||||
# Handle remove if requested
|
# Parse arguments
|
||||||
if [ "$REMOVE" = true ]; then
|
for arg in "$@"; do
|
||||||
remove_all
|
case $arg in
|
||||||
exit 0
|
-h|--help)
|
||||||
fi
|
print_help
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
--reset)
|
||||||
|
RESET=true
|
||||||
|
;;
|
||||||
|
--remove)
|
||||||
|
REMOVE=true
|
||||||
|
;;
|
||||||
|
--herolib)
|
||||||
|
HEROLIB=true
|
||||||
|
;;
|
||||||
|
--herolib-version=*)
|
||||||
|
HEROLIB_VERSION="${arg#*=}"
|
||||||
|
if [ -z "$HEROLIB_VERSION" ]; then
|
||||||
|
echo "Error: --herolib-version requires a version argument"
|
||||||
|
echo "Example: $0 --herolib-version=v1.0.0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
--analyzer)
|
||||||
|
INSTALL_ANALYZER=true
|
||||||
|
;;
|
||||||
|
--start-redis)
|
||||||
|
START_REDIS=true
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $arg"
|
||||||
|
echo "Use -h or --help to see available options"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Create code directory if it doesn't exist
|
myplatform
|
||||||
mkdir -p ~/code
|
|
||||||
|
# Handle remove if requested
|
||||||
|
if [ "$REMOVE" = true ]; then
|
||||||
|
remove_all
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create code directory if it doesn't exist
|
||||||
|
mkdir -p ~/code
|
||||||
|
|
||||||
|
|
||||||
# Check if v needs to be installed
|
# Check if v needs to be installed
|
||||||
if [ "$RESET" = true ] || ! command_exists v; then
|
if [ "$RESET" = true ] || ! command_exists v; then
|
||||||
|
|
||||||
os_update
|
|
||||||
|
|
||||||
sshknownkeysadd
|
|
||||||
|
|
||||||
# Install secp256k1
|
|
||||||
|
|
||||||
v-install
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
# set -x
|
update_system
|
||||||
check_and_start_redis
|
install_packages
|
||||||
|
|
||||||
if [ "$HEROLIB" = true ]; then
|
sshknownkeysadd
|
||||||
echo "=== Herolib Installation ==="
|
|
||||||
echo "Current directory: $(pwd)"
|
|
||||||
echo "Checking for install_herolib.vsh: $([ -f "./install_herolib.vsh" ] && echo "found" || echo "not found")"
|
|
||||||
echo "Checking for lib directory: $([ -d "./lib" ] && echo "found" || echo "not found")"
|
|
||||||
|
|
||||||
# Check if we're in GitHub Actions and already in the herolib directory
|
# Install secp256k1
|
||||||
if is_github_actions; then
|
|
||||||
# In GitHub Actions, check if we're already in a herolib checkout
|
v-install
|
||||||
if [ -f "./install_herolib.vsh" ] && [ -d "./lib" ]; then
|
fi
|
||||||
echo "✓ Running in GitHub Actions, using current directory for herolib installation"
|
|
||||||
HEROLIB_DIR="$(pwd)"
|
if [ "$START_REDIS" = true ]; then
|
||||||
|
start_redis_service
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$HEROLIB" = true ]; then
|
||||||
|
echo "=== Herolib Installation ==="
|
||||||
|
echo "Current directory: $(pwd)"
|
||||||
|
echo "Checking for install_herolib.vsh: $([ -f "./install_herolib.vsh" ] && echo "found" || echo "not found")"
|
||||||
|
echo "Checking for lib directory: $([ -d "./lib" ] && echo "found" || echo "not found")"
|
||||||
|
|
||||||
|
# Check if we're in GitHub Actions and already in the herolib directory
|
||||||
|
if is_github_actions; then
|
||||||
|
# In GitHub Actions, check if we're already in a herolib checkout
|
||||||
|
if [ -f "./install_herolib.vsh" ] && [ -d "./lib" ]; then
|
||||||
|
echo "✓ Running in GitHub Actions, using current directory for herolib installation"
|
||||||
|
HEROLIB_DIR="$(pwd)"
|
||||||
|
else
|
||||||
|
echo "⚠ Running in GitHub Actions, but not in herolib directory. Cloning..."
|
||||||
|
hero_lib_get
|
||||||
|
HEROLIB_DIR="$HOME/code/github/incubaid/herolib"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "⚠ Running in GitHub Actions, but not in herolib directory. Cloning..."
|
echo "Not in GitHub Actions, using standard installation path"
|
||||||
hero_lib_get
|
hero_lib_get
|
||||||
HEROLIB_DIR="$HOME/code/github/incubaid/herolib"
|
HEROLIB_DIR="$HOME/code/github/incubaid/herolib"
|
||||||
fi
|
fi
|
||||||
else
|
|
||||||
echo "Not in GitHub Actions, using standard installation path"
|
echo "Installing herolib from: $HEROLIB_DIR"
|
||||||
hero_lib_get
|
"$HEROLIB_DIR/install_herolib.vsh"
|
||||||
HEROLIB_DIR="$HOME/code/github/incubaid/herolib"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installing herolib from: $HEROLIB_DIR"
|
|
||||||
"$HEROLIB_DIR/install_herolib.vsh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
if [ "$INSTALL_ANALYZER" = true ]; then
|
||||||
if [ "$INSTALL_ANALYZER" = true ]; then
|
# Only install v-analyzer if not in GitHub Actions environment
|
||||||
# Only install v-analyzer if not in GitHub Actions environment
|
if ! is_github_actions; then
|
||||||
if ! is_github_actions; then
|
v-analyzer
|
||||||
v-analyzer
|
fi
|
||||||
|
echo "Run 'source ~/.bashrc' or 'source ~/.zshrc' to update PATH for v-analyzer"
|
||||||
fi
|
fi
|
||||||
echo "Run 'source ~/.bashrc' or 'source ~/.zshrc' to update PATH for v-analyzer"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
echo "Installation complete!"
|
echo "Installation complete!"
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
# ipapi
|
# ip api (IP INFO SERVICE CLIENT)
|
||||||
|
|
||||||
To get started
|
To get started
|
||||||
|
|
||||||
```v
|
```v
|
||||||
|
|
||||||
|
|
||||||
import incubaid.herolib.clients. ipapi
|
import incubaid.herolib.clients.ipapi
|
||||||
|
|
||||||
mut client:= ipapi.get()!
|
mut client:= ipapi.get()!
|
||||||
|
|
||||||
|
|||||||
@@ -205,8 +205,7 @@ pub fn create_heroscript(args ModuleMeta) ! {
|
|||||||
'1'
|
'1'
|
||||||
} else {
|
} else {
|
||||||
'0'
|
'0'
|
||||||
}
|
}}"
|
||||||
}"
|
|
||||||
}
|
}
|
||||||
if !os.exists(args.path) {
|
if !os.exists(args.path) {
|
||||||
os.mkdir(args.path)!
|
os.mkdir(args.path)!
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ import incubaid.herolib.installers.infra.coredns
|
|||||||
import incubaid.herolib.installers.infra.gitea
|
import incubaid.herolib.installers.infra.gitea
|
||||||
import incubaid.herolib.installers.infra.livekit
|
import incubaid.herolib.installers.infra.livekit
|
||||||
import incubaid.herolib.installers.infra.zinit_installer
|
import incubaid.herolib.installers.infra.zinit_installer
|
||||||
|
import incubaid.herolib.installers.k8s.cryptpad
|
||||||
|
import incubaid.herolib.installers.k8s.element_chat
|
||||||
import incubaid.herolib.installers.lang.golang
|
import incubaid.herolib.installers.lang.golang
|
||||||
import incubaid.herolib.installers.lang.nodejs
|
import incubaid.herolib.installers.lang.nodejs
|
||||||
import incubaid.herolib.installers.lang.python
|
import incubaid.herolib.installers.lang.python
|
||||||
@@ -37,6 +39,7 @@ import incubaid.herolib.installers.threefold.griddriver
|
|||||||
import incubaid.herolib.installers.virt.cloudhypervisor
|
import incubaid.herolib.installers.virt.cloudhypervisor
|
||||||
import incubaid.herolib.installers.virt.docker
|
import incubaid.herolib.installers.virt.docker
|
||||||
import incubaid.herolib.installers.virt.herorunner
|
import incubaid.herolib.installers.virt.herorunner
|
||||||
|
import incubaid.herolib.installers.virt.kubernetes_installer
|
||||||
import incubaid.herolib.installers.virt.lima
|
import incubaid.herolib.installers.virt.lima
|
||||||
import incubaid.herolib.installers.virt.pacman
|
import incubaid.herolib.installers.virt.pacman
|
||||||
import incubaid.herolib.installers.virt.podman
|
import incubaid.herolib.installers.virt.podman
|
||||||
@@ -80,6 +83,8 @@ pub fn run_all(args_ PlayArgs) ! {
|
|||||||
gitea.play(mut plbook)!
|
gitea.play(mut plbook)!
|
||||||
livekit.play(mut plbook)!
|
livekit.play(mut plbook)!
|
||||||
zinit_installer.play(mut plbook)!
|
zinit_installer.play(mut plbook)!
|
||||||
|
cryptpad.play(mut plbook)!
|
||||||
|
element_chat.play(mut plbook)!
|
||||||
golang.play(mut plbook)!
|
golang.play(mut plbook)!
|
||||||
nodejs.play(mut plbook)!
|
nodejs.play(mut plbook)!
|
||||||
python.play(mut plbook)!
|
python.play(mut plbook)!
|
||||||
@@ -92,6 +97,7 @@ pub fn run_all(args_ PlayArgs) ! {
|
|||||||
cloudhypervisor.play(mut plbook)!
|
cloudhypervisor.play(mut plbook)!
|
||||||
docker.play(mut plbook)!
|
docker.play(mut plbook)!
|
||||||
herorunner.play(mut plbook)!
|
herorunner.play(mut plbook)!
|
||||||
|
kubernetes_installer.play(mut plbook)!
|
||||||
lima.play(mut plbook)!
|
lima.play(mut plbook)!
|
||||||
pacman.play(mut plbook)!
|
pacman.play(mut plbook)!
|
||||||
podman.play(mut plbook)!
|
podman.play(mut plbook)!
|
||||||
|
|||||||
@@ -313,7 +313,7 @@ pub fn (mut c AtlasClient) copy_files(collection_name string, page_name string,
|
|||||||
}
|
}
|
||||||
if link.status == .external {
|
if link.status == .external {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// println(link)
|
// println(link)
|
||||||
// Get file path and copy
|
// Get file path and copy
|
||||||
file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
||||||
|
|||||||
@@ -1,34 +1,34 @@
|
|||||||
module atlas
|
module atlas
|
||||||
|
|
||||||
pub struct CollectionNotFound {
|
pub struct CollectionNotFound {
|
||||||
Error
|
Error
|
||||||
pub:
|
pub:
|
||||||
name string
|
name string
|
||||||
msg string
|
msg string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (err CollectionNotFound) msg() string {
|
pub fn (err CollectionNotFound) msg() string {
|
||||||
return 'Collection ${err.name} not found: ${err.msg}'
|
return 'Collection ${err.name} not found: ${err.msg}'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct PageNotFound {
|
pub struct PageNotFound {
|
||||||
Error
|
Error
|
||||||
pub:
|
pub:
|
||||||
collection string
|
collection string
|
||||||
page string
|
page string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (err PageNotFound) msg() string {
|
pub fn (err PageNotFound) msg() string {
|
||||||
return 'Page ${err.page} not found in collection ${err.collection}'
|
return 'Page ${err.page} not found in collection ${err.collection}'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FileNotFound {
|
pub struct FileNotFound {
|
||||||
Error
|
Error
|
||||||
pub:
|
pub:
|
||||||
collection string
|
collection string
|
||||||
file string
|
file string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (err FileNotFound) msg() string {
|
pub fn (err FileNotFound) msg() string {
|
||||||
return 'File ${err.file} not found in collection ${err.collection}'
|
return 'File ${err.file} not found in collection ${err.collection}'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
the main data is in key value stor:
|
|
||||||
|
|
||||||
- each object has u32 id
|
|
||||||
- each object has u16 version (version of same data)
|
|
||||||
- each object has u16 schemaid (if schema changes)
|
|
||||||
- each object has tags u32 (to tag table)
|
|
||||||
- each object has a created_at timestamp
|
|
||||||
- each object has a updated_at timestamp
|
|
||||||
- each object has binary content (the data)
|
|
||||||
- each object has link to who can read/write/delete (lists of u32 per read/write/delete to group or user), link to security policy u32
|
|
||||||
- each object has a signature of the data by the user who created/updated it
|
|
||||||
|
|
||||||
|
|
||||||
- there are users & groups
|
|
||||||
- groups can have other groups and users inside
|
|
||||||
- users & groups are unique u32 as well in the DB, so no collision
|
|
||||||
|
|
||||||
this database does not know what the data is about, its agnostic to schema
|
|
||||||
|
|
||||||
|
|
||||||
now make the 4 structs which represent above
|
|
||||||
|
|
||||||
- data
|
|
||||||
- user
|
|
||||||
- group ([]u32) each links to user or group, name, description
|
|
||||||
- tags ([]string which gets a unique id, so its shorter to link to data object)
|
|
||||||
- securitypolicy (see below)
|
|
||||||
|
|
||||||
and encoding scheme using lib/data/encoder, we need encode/decode on the structs, so we have densest possible encoding
|
|
||||||
|
|
||||||
now we need the implementation details for each struct, including the fields and their types, as well as the encoding/decoding logic.
|
|
||||||
|
|
||||||
the outside is a server over openrpc which has
|
|
||||||
|
|
||||||
- set (userid:u32, id:u32, data: Data, signature: string, tags:[]string) -> u32. (id can be 0 then its new, if existing we need to check if user can do it), tags will be recalculated based on []string (lower case, sorted list then md5 -> u32)
|
|
||||||
- get (userid:u32, id: u32, signedid: string) -> Data,Tags as []string
|
|
||||||
- exist (userid:u32, id: u32) -> bool //this we allow without signature
|
|
||||||
- delete (userid:u32, id: u32, signedid: string) -> bool
|
|
||||||
- list (userid:u32, signature: string, based on tags, schemaid, from creation/update and to creation/update), returns max 200 items -> u32
|
|
||||||
|
|
||||||
|
|
||||||
the interface is stateless, no previous connection known, based on signature the server can verify the user is allowed to perform the action
|
|
||||||
|
|
||||||
the backend database is redis (hsets and sets)
|
|
||||||
|
|
||||||
|
|
||||||
## signing implementation
|
|
||||||
|
|
||||||
the signing is in the same redis implemented, so no need to use vlang for that
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Generate an ephemeral signing keypair
|
|
||||||
redis-cli -p $PORT AGE GENSIGN
|
|
||||||
# Example output:
|
|
||||||
# 1) "<verify_pub_b64>"
|
|
||||||
# 2) "<sign_secret_b64>"
|
|
||||||
|
|
||||||
# Sign a message with the secret
|
|
||||||
redis-cli -p $PORT AGE SIGN "<sign_secret_b64>" "msg"
|
|
||||||
# → returns "<signature_b64>"
|
|
||||||
|
|
||||||
# Verify with the public key
|
|
||||||
redis-cli -p $PORT AGE VERIFY "<verify_pub_b64>" "msg" "<signature_b64>"
|
|
||||||
# → 1 (valid) or 0 (invalid)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
versioning: when stored we don't have to worry about version the database will check if it exists, newest version and then update
|
|
||||||
|
|
||||||
|
|
||||||
## some of the base objects
|
|
||||||
|
|
||||||
```v
|
|
||||||
@[heap]
|
|
||||||
pub struct SecurityPolicy {
|
|
||||||
pub mut:
|
|
||||||
id u32
|
|
||||||
read []u32 //links to users & groups
|
|
||||||
write []u32 //links to users & groups
|
|
||||||
delete []u32 //links to users & groups
|
|
||||||
public bool
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@[heap]
|
|
||||||
pub struct Tags {
|
|
||||||
pub mut:
|
|
||||||
id u32
|
|
||||||
names []string //unique per id
|
|
||||||
md5 string //of sorted names, to make easy to find unique id
|
|
||||||
}
|
|
||||||
```
|
|
||||||
@@ -31,6 +31,7 @@ pub mut:
|
|||||||
registration_desk DBRegistrationDesk
|
registration_desk DBRegistrationDesk
|
||||||
messages DBMessages
|
messages DBMessages
|
||||||
tags DBTags
|
tags DBTags
|
||||||
|
prd DBPrd
|
||||||
rpc_handler &Handler
|
rpc_handler &Handler
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,6 +92,9 @@ pub fn new(args NewArgs) !&ModelsFactory {
|
|||||||
tags: DBTags{
|
tags: DBTags{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
|
prd: DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
rpc_handler: &h
|
rpc_handler: &h
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -239,6 +239,7 @@ pub fn (mut self DBMessages) list(args MessageListArg) ![]Message {
|
|||||||
return filtered_messages
|
return filtered_messages
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// is how we implement the openrpc calls
|
||||||
pub fn message_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
|
pub fn message_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
|
||||||
match method {
|
match method {
|
||||||
'get' {
|
'get' {
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
## notes around how to do a calendly feature
|
|
||||||
|
|
||||||
- make an agenda for the planning and call it as such, this has the timeboxes available for planning
|
|
||||||
- create template for calendar_event
|
|
||||||
- create planning item and link to this template
|
|
||||||
- select the rules which work for recurrence
|
|
||||||
275
lib/hero/heromodels/prd.md
Normal file
275
lib/hero/heromodels/prd.md
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
# 📘 **PRD Manual
|
||||||
|
|
||||||
|
# 1. **Product Overview**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
A 2–4 sentence summary describing **what the product is**, **what problem it solves**, and **who it is for**.
|
||||||
|
|
||||||
|
### **Fields**
|
||||||
|
|
||||||
|
* `product_name`
|
||||||
|
* `version`
|
||||||
|
* `overview`
|
||||||
|
* `vision`
|
||||||
|
|
||||||
|
### **Example**
|
||||||
|
|
||||||
|
```
|
||||||
|
product_name: "Lumina PRD Builder"
|
||||||
|
version: "v1.0"
|
||||||
|
overview: "Lumina PRD Builder allows teams to generate structured, validated Product Requirements Documents using templates and AI guidance."
|
||||||
|
vision: "Enable any team to create clear requirements in minutes, improving alignment and execution speed."
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2. **Goals**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
A list of measurable outcomes that define success.
|
||||||
|
|
||||||
|
### **Fields**
|
||||||
|
|
||||||
|
* `id`
|
||||||
|
* `title`
|
||||||
|
* `description`
|
||||||
|
* `gtype` → product / business / operational
|
||||||
|
|
||||||
|
### **Example**
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
id: "G1"
|
||||||
|
title: "Faster Requirements Creation"
|
||||||
|
description: "Reduce PRD creation time from 2 weeks to under 1 day for all teams."
|
||||||
|
gtype: .product
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "G2"
|
||||||
|
title: "Increase Adoption"
|
||||||
|
description: "Achieve 500 monthly active users within 90 days of launch."
|
||||||
|
gtype: .business
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 3. **Use Cases**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
Realistic user interactions showing how the product will be used.
|
||||||
|
|
||||||
|
### **UseCase Fields**
|
||||||
|
|
||||||
|
* id
|
||||||
|
* title
|
||||||
|
* actor
|
||||||
|
* goal
|
||||||
|
* steps
|
||||||
|
* success
|
||||||
|
* failure
|
||||||
|
|
||||||
|
### **Example**
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
id: "UC1"
|
||||||
|
title: "Generate a PRD from Template"
|
||||||
|
actor: "Product Manager"
|
||||||
|
goal: "Create a validated PRD quickly"
|
||||||
|
steps: [
|
||||||
|
"User selects 'New PRD'",
|
||||||
|
"User chooses template type",
|
||||||
|
"User fills fields or uses AI suggestions",
|
||||||
|
"User exports PRD to Markdown"
|
||||||
|
]
|
||||||
|
success: "A complete PRD is generated without missing required fields."
|
||||||
|
failure: "Validation fails due to missing required data."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 4. **Requirements**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
Describe *what the system must do*, in clear, testable language.
|
||||||
|
|
||||||
|
### **Requirement Fields**
|
||||||
|
|
||||||
|
* id
|
||||||
|
* category
|
||||||
|
* title
|
||||||
|
* rtype
|
||||||
|
* description
|
||||||
|
* priority
|
||||||
|
* criteria
|
||||||
|
* dependencies
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### **Example Requirement**
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
id: "R1"
|
||||||
|
category: "PRD Editor"
|
||||||
|
title: "Template Selection"
|
||||||
|
rtype: .functional
|
||||||
|
description: "The system must allow users to select from a list of predefined PRD templates."
|
||||||
|
priority: .high
|
||||||
|
criteria: [
|
||||||
|
{
|
||||||
|
id: "AC1"
|
||||||
|
description: "UI displays at least 5 templates"
|
||||||
|
condition: "List contains >= 5 template entries"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
dependencies: []
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### **Example Requirement with Dependency**
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
id: "R3"
|
||||||
|
category: "Export"
|
||||||
|
title: "Export PRD to Markdown"
|
||||||
|
rtype: .functional
|
||||||
|
description: "Users must be able to export the completed PRD to a Markdown file."
|
||||||
|
priority: .medium
|
||||||
|
criteria: [
|
||||||
|
{
|
||||||
|
id: "AC4"
|
||||||
|
description: "File saved in .md format"
|
||||||
|
condition: "Output file ends with '.md'"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
dependencies: ["R1", "R2"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 5. **Constraints**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
Non-negotiable boundaries the solution must respect.
|
||||||
|
|
||||||
|
### **Constraint Fields**
|
||||||
|
|
||||||
|
* id
|
||||||
|
* title
|
||||||
|
* description
|
||||||
|
* ctype
|
||||||
|
|
||||||
|
### **Example**
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
id: "C1"
|
||||||
|
title: "ARM64 Only"
|
||||||
|
description: "The system must run on ARM64 servers to match company infrastructure."
|
||||||
|
ctype: .technica
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "C2"
|
||||||
|
title: "Q1 Deadline"
|
||||||
|
description: "The first release must be launched before March 31."
|
||||||
|
ctype: .business
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "C3"
|
||||||
|
title: "GDPR Requirement"
|
||||||
|
description: "All user data must be deletable within 24 hours of a user request."
|
||||||
|
ctype: .compliance
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 6. **Risks**
|
||||||
|
|
||||||
|
### **What to Write**
|
||||||
|
|
||||||
|
Potential problems + mitigation strategies.
|
||||||
|
|
||||||
|
### **Example**
|
||||||
|
|
||||||
|
```
|
||||||
|
risks: {
|
||||||
|
"RISK1": "Template library may be too small → Mitigate by allowing community contributions"
|
||||||
|
"RISK2": "AI suggestions may be inaccurate → Add review/approve workflow"
|
||||||
|
"RISK3": "Export format inconsistencies → Create automated format tests"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# 🔧 7. **Minimum PRD Example (Compact)**
|
||||||
|
|
||||||
|
Here is a minimal but valid PRD instance:
|
||||||
|
|
||||||
|
```
|
||||||
|
ProductRequirementsDoc{
|
||||||
|
product_name: "Lumina PRD Builder"
|
||||||
|
version: "v1.0"
|
||||||
|
overview: "Tool to create structured PRDs."
|
||||||
|
vision: "Fast, accurate requirements for all teams."
|
||||||
|
|
||||||
|
goals: [
|
||||||
|
Goal{
|
||||||
|
id: "G1"
|
||||||
|
title: "Speed"
|
||||||
|
description: "Generate PRDs in under 10 minutes."
|
||||||
|
gtype: .product
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
use_cases: [
|
||||||
|
UseCase{
|
||||||
|
id: "UC1"
|
||||||
|
title: "Create PRD"
|
||||||
|
actor: "PM"
|
||||||
|
goal: "Produce PRD quickly"
|
||||||
|
steps: ["Click new", "Fill data", "Export"]
|
||||||
|
success: "Valid PRD generated"
|
||||||
|
failure: "Missing fields"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
requirements: [
|
||||||
|
Requirement{
|
||||||
|
id: "R1"
|
||||||
|
category: "Editor"
|
||||||
|
title: "Input Fields"
|
||||||
|
rtype: .functional
|
||||||
|
description: "User can fill out PRD fields"
|
||||||
|
priority: .high
|
||||||
|
criteria: []
|
||||||
|
dependencies: []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
constraints: [
|
||||||
|
constraint{
|
||||||
|
id: "C1"
|
||||||
|
title: "Must Support Markdown"
|
||||||
|
description: "Export only in .md format"
|
||||||
|
ctype: .technica
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
risks: {
|
||||||
|
"R1": "User confusion → Add tooltips"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
415
lib/hero/heromodels/prd.v
Normal file
415
lib/hero/heromodels/prd.v
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
module heromodels
|
||||||
|
|
||||||
|
import incubaid.herolib.data.encoder
|
||||||
|
import incubaid.herolib.data.ourtime
|
||||||
|
import incubaid.herolib.hero.db
|
||||||
|
import incubaid.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_true }
|
||||||
|
import incubaid.herolib.hero.user { UserRef }
|
||||||
|
import json
|
||||||
|
|
||||||
|
// Basic enums for clarity
|
||||||
|
|
||||||
|
// Core PRD type, this is the root object
|
||||||
|
@[heap]
|
||||||
|
pub struct ProductRequirementsDoc {
|
||||||
|
db.Base
|
||||||
|
pub mut:
|
||||||
|
product_name string
|
||||||
|
version string
|
||||||
|
overview string
|
||||||
|
vision string
|
||||||
|
goals []Goal
|
||||||
|
use_cases []UseCase
|
||||||
|
requirements []Requirement
|
||||||
|
constraints []Constraint
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct DBPrd {
|
||||||
|
pub mut:
|
||||||
|
db &db.DB @[skip; str: skip]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum PRDPriority {
|
||||||
|
low
|
||||||
|
medium
|
||||||
|
high
|
||||||
|
critical
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum RequirementType {
|
||||||
|
functional
|
||||||
|
non_functional
|
||||||
|
performance
|
||||||
|
reliability
|
||||||
|
}
|
||||||
|
|
||||||
|
// A reusable acceptance criterion type
|
||||||
|
pub struct AcceptanceCriterion {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
description string
|
||||||
|
condition string // testable condition
|
||||||
|
}
|
||||||
|
|
||||||
|
// A generic requirement type (functional or NFR)
|
||||||
|
pub struct Requirement {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
category string // to group requirements
|
||||||
|
title string
|
||||||
|
rtype RequirementType
|
||||||
|
description string
|
||||||
|
priority PRDPriority
|
||||||
|
criteria []AcceptanceCriterion
|
||||||
|
dependencies []string // list of requirement IDs this one depends on
|
||||||
|
}
|
||||||
|
|
||||||
|
// A use case type
|
||||||
|
pub struct UseCase {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
title string
|
||||||
|
actor string
|
||||||
|
goal string
|
||||||
|
steps []string
|
||||||
|
success string
|
||||||
|
failure string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum GoalType {
|
||||||
|
product
|
||||||
|
business
|
||||||
|
operational
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Goal {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
title string
|
||||||
|
description string
|
||||||
|
gtype GoalType
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum ConstraintType {
|
||||||
|
technica
|
||||||
|
business
|
||||||
|
operational
|
||||||
|
scale
|
||||||
|
compliance
|
||||||
|
design
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Constraint {
|
||||||
|
pub:
|
||||||
|
id string
|
||||||
|
title string
|
||||||
|
description string
|
||||||
|
ctype ConstraintType
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (self ProductRequirementsDoc) type_name() string {
|
||||||
|
return 'prd'
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (self ProductRequirementsDoc) description(methodname string) string {
|
||||||
|
match methodname {
|
||||||
|
'set' {
|
||||||
|
return 'Create or update a product requirements document. Returns the ID of the PRD.'
|
||||||
|
}
|
||||||
|
'get' {
|
||||||
|
return 'Retrieve a PRD by ID. Returns the complete PRD object.'
|
||||||
|
}
|
||||||
|
'delete' {
|
||||||
|
return 'Delete a PRD by ID. Returns true if successful.'
|
||||||
|
}
|
||||||
|
'exist' {
|
||||||
|
return 'Check if a PRD exists by ID. Returns true or false.'
|
||||||
|
}
|
||||||
|
'list' {
|
||||||
|
return 'List all PRDs. Returns an array of PRD objects.'
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return 'Generic method for PRD operations.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (self ProductRequirementsDoc) example(methodname string) (string, string) {
|
||||||
|
match methodname {
|
||||||
|
'set' {
|
||||||
|
return '{"product_name": "Test Product", "version": "v1.0", "overview": "A test product", "vision": "To test the system", "goals": [], "use_cases": [], "requirements": [], "constraints": []}', '1'
|
||||||
|
}
|
||||||
|
'get' {
|
||||||
|
return '{"id": 1}', '{"product_name": "Test Product", "version": "v1.0", "overview": "A test product", "vision": "To test the system", "goals": [], "use_cases": [], "requirements": [], "constraints": []}'
|
||||||
|
}
|
||||||
|
'delete' {
|
||||||
|
return '{"id": 1}', 'true'
|
||||||
|
}
|
||||||
|
'exist' {
|
||||||
|
return '{"id": 1}', 'true'
|
||||||
|
}
|
||||||
|
'list' {
|
||||||
|
return '{}', '[{"product_name": "Test Product", "version": "v1.0"}]'
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return '{}', '{}'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (self ProductRequirementsDoc) dump(mut e encoder.Encoder) ! {
|
||||||
|
e.add_string(self.product_name)
|
||||||
|
e.add_string(self.version)
|
||||||
|
e.add_string(self.overview)
|
||||||
|
e.add_string(self.vision)
|
||||||
|
|
||||||
|
// Encode goals array
|
||||||
|
e.add_u16(u16(self.goals.len))
|
||||||
|
for goal in self.goals {
|
||||||
|
e.add_string(goal.id)
|
||||||
|
e.add_string(goal.title)
|
||||||
|
e.add_string(goal.description)
|
||||||
|
e.add_u8(u8(goal.gtype))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode use_cases array
|
||||||
|
e.add_u16(u16(self.use_cases.len))
|
||||||
|
for uc in self.use_cases {
|
||||||
|
e.add_string(uc.id)
|
||||||
|
e.add_string(uc.title)
|
||||||
|
e.add_string(uc.actor)
|
||||||
|
e.add_string(uc.goal)
|
||||||
|
e.add_list_string(uc.steps)
|
||||||
|
e.add_string(uc.success)
|
||||||
|
e.add_string(uc.failure)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode requirements array
|
||||||
|
e.add_u16(u16(self.requirements.len))
|
||||||
|
for req in self.requirements {
|
||||||
|
e.add_string(req.id)
|
||||||
|
e.add_string(req.category)
|
||||||
|
e.add_string(req.title)
|
||||||
|
e.add_u8(u8(req.rtype))
|
||||||
|
e.add_string(req.description)
|
||||||
|
e.add_u8(u8(req.priority))
|
||||||
|
|
||||||
|
// Encode acceptance criteria
|
||||||
|
e.add_u16(u16(req.criteria.len))
|
||||||
|
for criterion in req.criteria {
|
||||||
|
e.add_string(criterion.id)
|
||||||
|
e.add_string(criterion.description)
|
||||||
|
e.add_string(criterion.condition)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode dependencies
|
||||||
|
e.add_list_string(req.dependencies)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode constraints array
|
||||||
|
e.add_u16(u16(self.constraints.len))
|
||||||
|
for constraint in self.constraints {
|
||||||
|
e.add_string(constraint.id)
|
||||||
|
e.add_string(constraint.title)
|
||||||
|
e.add_string(constraint.description)
|
||||||
|
e.add_u8(u8(constraint.ctype))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) load(mut o ProductRequirementsDoc, mut e encoder.Decoder) ! {
|
||||||
|
o.product_name = e.get_string()!
|
||||||
|
o.version = e.get_string()!
|
||||||
|
o.overview = e.get_string()!
|
||||||
|
o.vision = e.get_string()!
|
||||||
|
|
||||||
|
// Decode goals
|
||||||
|
goals_len := e.get_u16()!
|
||||||
|
mut goals := []Goal{}
|
||||||
|
for _ in 0 .. goals_len {
|
||||||
|
goals << Goal{
|
||||||
|
id: e.get_string()!
|
||||||
|
title: e.get_string()!
|
||||||
|
description: e.get_string()!
|
||||||
|
gtype: unsafe { GoalType(e.get_u8()!) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
o.goals = goals
|
||||||
|
|
||||||
|
// Decode use_cases
|
||||||
|
use_cases_len := e.get_u16()!
|
||||||
|
mut use_cases := []UseCase{}
|
||||||
|
for _ in 0 .. use_cases_len {
|
||||||
|
use_cases << UseCase{
|
||||||
|
id: e.get_string()!
|
||||||
|
title: e.get_string()!
|
||||||
|
actor: e.get_string()!
|
||||||
|
goal: e.get_string()!
|
||||||
|
steps: e.get_list_string()!
|
||||||
|
success: e.get_string()!
|
||||||
|
failure: e.get_string()!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
o.use_cases = use_cases
|
||||||
|
|
||||||
|
// Decode requirements
|
||||||
|
requirements_len := e.get_u16()!
|
||||||
|
mut requirements := []Requirement{}
|
||||||
|
for _ in 0 .. requirements_len {
|
||||||
|
req_id := e.get_string()!
|
||||||
|
req_category := e.get_string()!
|
||||||
|
req_title := e.get_string()!
|
||||||
|
req_rtype := unsafe { RequirementType(e.get_u8()!) }
|
||||||
|
req_description := e.get_string()!
|
||||||
|
req_priority := unsafe { PRDPriority(e.get_u8()!) }
|
||||||
|
|
||||||
|
// Decode criteria
|
||||||
|
criteria_len := e.get_u16()!
|
||||||
|
mut criteria := []AcceptanceCriterion{}
|
||||||
|
for _ in 0 .. criteria_len {
|
||||||
|
criteria << AcceptanceCriterion{
|
||||||
|
id: e.get_string()!
|
||||||
|
description: e.get_string()!
|
||||||
|
condition: e.get_string()!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode dependencies
|
||||||
|
dependencies := e.get_list_string()!
|
||||||
|
|
||||||
|
requirements << Requirement{
|
||||||
|
id: req_id
|
||||||
|
category: req_category
|
||||||
|
title: req_title
|
||||||
|
rtype: req_rtype
|
||||||
|
description: req_description
|
||||||
|
priority: req_priority
|
||||||
|
criteria: criteria
|
||||||
|
dependencies: dependencies
|
||||||
|
}
|
||||||
|
}
|
||||||
|
o.requirements = requirements
|
||||||
|
|
||||||
|
// Decode constraints
|
||||||
|
constraints_len := e.get_u16()!
|
||||||
|
mut constraints := []Constraint{}
|
||||||
|
for _ in 0 .. constraints_len {
|
||||||
|
constraints << Constraint{
|
||||||
|
id: e.get_string()!
|
||||||
|
title: e.get_string()!
|
||||||
|
description: e.get_string()!
|
||||||
|
ctype: unsafe { ConstraintType(e.get_u8()!) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
o.constraints = constraints
|
||||||
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct PrdArg {
|
||||||
|
pub mut:
|
||||||
|
id u32
|
||||||
|
product_name string @[required]
|
||||||
|
version string
|
||||||
|
overview string
|
||||||
|
vision string
|
||||||
|
goals []Goal
|
||||||
|
use_cases []UseCase
|
||||||
|
requirements []Requirement
|
||||||
|
constraints []Constraint
|
||||||
|
securitypolicy u32
|
||||||
|
tags []string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) new(args PrdArg) !ProductRequirementsDoc {
|
||||||
|
mut o := ProductRequirementsDoc{
|
||||||
|
product_name: args.product_name
|
||||||
|
version: args.version
|
||||||
|
overview: args.overview
|
||||||
|
vision: args.vision
|
||||||
|
goals: args.goals
|
||||||
|
use_cases: args.use_cases
|
||||||
|
requirements: args.requirements
|
||||||
|
constraints: args.constraints
|
||||||
|
updated_at: ourtime.now().unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
o.securitypolicy = args.securitypolicy
|
||||||
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
|
|
||||||
|
return o
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) set(o ProductRequirementsDoc) !ProductRequirementsDoc {
|
||||||
|
return self.db.set[ProductRequirementsDoc](o)!
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) delete(id u32) !bool {
|
||||||
|
if !self.db.exists[ProductRequirementsDoc](id)! {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
self.db.delete[ProductRequirementsDoc](id)!
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) exist(id u32) !bool {
|
||||||
|
return self.db.exists[ProductRequirementsDoc](id)!
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) get(id u32) !ProductRequirementsDoc {
|
||||||
|
mut o, data := self.db.get_data[ProductRequirementsDoc](id)!
|
||||||
|
mut e_decoder := encoder.decoder_new(data)
|
||||||
|
self.load(mut o, mut e_decoder)!
|
||||||
|
return o
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn (mut self DBPrd) list() ![]ProductRequirementsDoc {
|
||||||
|
return self.db.list[ProductRequirementsDoc]()!.map(self.get(it)!)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prd_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
|
||||||
|
match method {
|
||||||
|
'get' {
|
||||||
|
id := db.decode_u32(params)!
|
||||||
|
res := f.prd.get(id)!
|
||||||
|
return new_response(rpcid, json.encode(res))
|
||||||
|
}
|
||||||
|
'set' {
|
||||||
|
mut args := db.decode_generic[PrdArg](params)!
|
||||||
|
mut o := f.prd.new(args)!
|
||||||
|
if args.id != 0 {
|
||||||
|
o.id = args.id
|
||||||
|
}
|
||||||
|
o = f.prd.set(o)!
|
||||||
|
return new_response_int(rpcid, int(o.id))
|
||||||
|
}
|
||||||
|
'delete' {
|
||||||
|
id := db.decode_u32(params)!
|
||||||
|
deleted := f.prd.delete(id)!
|
||||||
|
if deleted {
|
||||||
|
return new_response_true(rpcid)
|
||||||
|
} else {
|
||||||
|
return new_error(rpcid,
|
||||||
|
code: 404
|
||||||
|
message: 'PRD with ID ${id} not found'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'exist' {
|
||||||
|
id := db.decode_u32(params)!
|
||||||
|
if f.prd.exist(id)! {
|
||||||
|
return new_response_true(rpcid)
|
||||||
|
} else {
|
||||||
|
return new_response_false(rpcid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'list' {
|
||||||
|
res := f.prd.list()!
|
||||||
|
return new_response(rpcid, json.encode(res))
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return new_error(rpcid,
|
||||||
|
code: 32601
|
||||||
|
message: 'Method ${method} not found on prd'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
226
lib/hero/heromodels/prd_test.v
Normal file
226
lib/hero/heromodels/prd_test.v
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
module heromodels
|
||||||
|
|
||||||
|
import incubaid.herolib.hero.db
|
||||||
|
|
||||||
|
fn test_prd_new() ! {
|
||||||
|
mut mydb := db.new_test()!
|
||||||
|
mut db_prd := DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
|
|
||||||
|
mut args := PrdArg{
|
||||||
|
product_name: 'Test Product'
|
||||||
|
version: 'v1.0'
|
||||||
|
overview: 'This is a test product.'
|
||||||
|
vision: 'To revolutionize testing.'
|
||||||
|
goals: []
|
||||||
|
use_cases: []
|
||||||
|
requirements: []
|
||||||
|
constraints: []
|
||||||
|
risks: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
prd := db_prd.new(args)!
|
||||||
|
|
||||||
|
assert prd.product_name == 'Test Product'
|
||||||
|
assert prd.version == 'v1.0'
|
||||||
|
assert prd.overview == 'This is a test product.'
|
||||||
|
assert prd.vision == 'To revolutionize testing.'
|
||||||
|
assert prd.goals.len == 0
|
||||||
|
assert prd.use_cases.len == 0
|
||||||
|
assert prd.requirements.len == 0
|
||||||
|
assert prd.constraints.len == 0
|
||||||
|
assert prd.risks.len == 0
|
||||||
|
assert prd.updated_at > 0
|
||||||
|
|
||||||
|
println('✓ PRD new test passed!')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_prd_crud_operations() ! {
|
||||||
|
mut mydb := db.new_test()!
|
||||||
|
mut db_prd := DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new PRD
|
||||||
|
mut args := PrdArg{
|
||||||
|
product_name: 'CRUD Test Product'
|
||||||
|
version: 'v1.0'
|
||||||
|
overview: 'This is a test product for CRUD.'
|
||||||
|
vision: 'To test CRUD operations.'
|
||||||
|
goals: []
|
||||||
|
use_cases: []
|
||||||
|
requirements: []
|
||||||
|
constraints: []
|
||||||
|
risks: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
mut prd := db_prd.new(args)!
|
||||||
|
prd = db_prd.set(prd)!
|
||||||
|
original_id := prd.id
|
||||||
|
|
||||||
|
// Test get
|
||||||
|
retrieved_prd := db_prd.get(original_id)!
|
||||||
|
assert retrieved_prd.product_name == 'CRUD Test Product'
|
||||||
|
assert retrieved_prd.version == 'v1.0'
|
||||||
|
assert retrieved_prd.id == original_id
|
||||||
|
|
||||||
|
// Test exist
|
||||||
|
exists := db_prd.exist(original_id)!
|
||||||
|
assert exists == true
|
||||||
|
|
||||||
|
// Test delete
|
||||||
|
db_prd.delete(original_id)!
|
||||||
|
exists_after_delete := db_prd.exist(original_id)!
|
||||||
|
assert exists_after_delete == false
|
||||||
|
|
||||||
|
println('✓ PRD CRUD operations test passed!')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_prd_encoding_decoding_complex() ! {
|
||||||
|
mut mydb := db.new_test()!
|
||||||
|
mut db_prd := DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
|
|
||||||
|
mut goal := Goal{
|
||||||
|
id: 'G1'
|
||||||
|
title: 'Speed'
|
||||||
|
description: 'Generate PRDs in minutes'
|
||||||
|
gtype: .product
|
||||||
|
}
|
||||||
|
|
||||||
|
mut use_case := UseCase{
|
||||||
|
id: 'UC1'
|
||||||
|
title: 'Create PRD'
|
||||||
|
actor: 'Product Manager'
|
||||||
|
goal: 'Produce PRD quickly'
|
||||||
|
steps: ['Click new', 'Fill data', 'Export']
|
||||||
|
success: 'Valid PRD generated'
|
||||||
|
failure: 'Missing fields'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut criterion := AcceptanceCriterion{
|
||||||
|
id: 'AC1'
|
||||||
|
description: 'System displays template list'
|
||||||
|
condition: 'List contains >= 5 templates'
|
||||||
|
}
|
||||||
|
|
||||||
|
mut requirement := Requirement{
|
||||||
|
id: 'R1'
|
||||||
|
category: 'Editor'
|
||||||
|
title: 'Template Selection'
|
||||||
|
rtype: .functional
|
||||||
|
description: 'User can select from predefined templates'
|
||||||
|
priority: .high
|
||||||
|
criteria: [criterion]
|
||||||
|
dependencies: []
|
||||||
|
}
|
||||||
|
|
||||||
|
mut constraint := Constraint{
|
||||||
|
id: 'C1'
|
||||||
|
title: 'ARM64 Only'
|
||||||
|
description: 'Must run on ARM64 servers'
|
||||||
|
ctype: .technica
|
||||||
|
}
|
||||||
|
|
||||||
|
mut risks := map[string]string{}
|
||||||
|
risks['RISK1'] = 'Mitigation strategy here'
|
||||||
|
|
||||||
|
mut args := PrdArg{
|
||||||
|
product_name: 'Complex Test Product'
|
||||||
|
version: 'v2.0'
|
||||||
|
overview: 'Complete test with all fields'
|
||||||
|
vision: 'Full feature test'
|
||||||
|
goals: [goal]
|
||||||
|
use_cases: [use_case]
|
||||||
|
requirements: [requirement]
|
||||||
|
constraints: [constraint]
|
||||||
|
risks: risks
|
||||||
|
}
|
||||||
|
|
||||||
|
mut prd := db_prd.new(args)!
|
||||||
|
prd = db_prd.set(prd)!
|
||||||
|
prd_id := prd.id
|
||||||
|
|
||||||
|
// Retrieve and verify
|
||||||
|
retrieved_prd := db_prd.get(prd_id)!
|
||||||
|
|
||||||
|
assert retrieved_prd.product_name == 'Complex Test Product'
|
||||||
|
assert retrieved_prd.goals.len == 1
|
||||||
|
assert retrieved_prd.goals[0].id == 'G1'
|
||||||
|
assert retrieved_prd.goals[0].gtype == .product
|
||||||
|
|
||||||
|
assert retrieved_prd.use_cases.len == 1
|
||||||
|
assert retrieved_prd.use_cases[0].id == 'UC1'
|
||||||
|
assert retrieved_prd.use_cases[0].steps.len == 3
|
||||||
|
|
||||||
|
assert retrieved_prd.requirements.len == 1
|
||||||
|
assert retrieved_prd.requirements[0].id == 'R1'
|
||||||
|
assert retrieved_prd.requirements[0].criteria.len == 1
|
||||||
|
assert retrieved_prd.requirements[0].priority == .high
|
||||||
|
|
||||||
|
assert retrieved_prd.constraints.len == 1
|
||||||
|
assert retrieved_prd.constraints[0].id == 'C1'
|
||||||
|
assert retrieved_prd.constraints[0].ctype == .technica
|
||||||
|
|
||||||
|
assert retrieved_prd.risks.len == 1
|
||||||
|
assert retrieved_prd.risks['RISK1'] == 'Mitigation strategy here'
|
||||||
|
|
||||||
|
println('✓ PRD encoding/decoding complex test passed!')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_prd_type_name() ! {
|
||||||
|
mut mydb := db.new_test()!
|
||||||
|
mut db_prd := DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
|
|
||||||
|
mut args := PrdArg{
|
||||||
|
product_name: 'Type Name Test'
|
||||||
|
version: 'v1.0'
|
||||||
|
overview: 'Test'
|
||||||
|
vision: 'Test'
|
||||||
|
goals: []
|
||||||
|
use_cases: []
|
||||||
|
requirements: []
|
||||||
|
constraints: []
|
||||||
|
risks: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
prd := db_prd.new(args)!
|
||||||
|
type_name := prd.type_name()
|
||||||
|
assert type_name == 'prd'
|
||||||
|
|
||||||
|
println('✓ PRD type_name test passed!')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_prd_list() ! {
|
||||||
|
mut mydb := db.new_test()!
|
||||||
|
mut db_prd := DBPrd{
|
||||||
|
db: &mydb
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create multiple PRDs
|
||||||
|
for i in 0 .. 3 {
|
||||||
|
mut args := PrdArg{
|
||||||
|
product_name: 'Product ${i}'
|
||||||
|
version: 'v1.0'
|
||||||
|
overview: 'Overview ${i}'
|
||||||
|
vision: 'Vision ${i}'
|
||||||
|
goals: []
|
||||||
|
use_cases: []
|
||||||
|
requirements: []
|
||||||
|
constraints: []
|
||||||
|
risks: {}
|
||||||
|
}
|
||||||
|
mut prd := db_prd.new(args)!
|
||||||
|
prd = db_prd.set(prd)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// List all PRDs
|
||||||
|
all_prds := db_prd.list()!
|
||||||
|
assert all_prds.len == 3
|
||||||
|
|
||||||
|
println('✓ PRD list test passed!')
|
||||||
|
}
|
||||||
@@ -59,7 +59,7 @@ pub fn install(args_ InstallArgs) ! {
|
|||||||
} else if pl == .alpine {
|
} else if pl == .alpine {
|
||||||
console.print_header(' - Alpine prepare')
|
console.print_header(' - Alpine prepare')
|
||||||
osal.package_refresh()!
|
osal.package_refresh()!
|
||||||
osal.package_install('git,curl,mc,tmux,screen,git-lfs,redis-server')!
|
osal.package_install('git,curl,mc,tmux,screen,git-lfs,redis')!
|
||||||
} else if pl == .arch {
|
} else if pl == .arch {
|
||||||
console.print_header(' - Arch prepare')
|
console.print_header(' - Arch prepare')
|
||||||
osal.package_refresh()!
|
osal.package_refresh()!
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ pub fn decode_request(data string) !Request {
|
|||||||
// Returns:
|
// Returns:
|
||||||
// - A JSON string representation of the Request
|
// - A JSON string representation of the Request
|
||||||
pub fn (req Request) encode() string {
|
pub fn (req Request) encode() string {
|
||||||
return json2.encode(req, prettify: true)
|
return json2.encode(req)
|
||||||
}
|
}
|
||||||
|
|
||||||
// validate checks if the Request object contains all required fields
|
// validate checks if the Request object contains all required fields
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ pub mut:
|
|||||||
template_update bool
|
template_update bool
|
||||||
coderoot string
|
coderoot string
|
||||||
// Client configuration
|
// Client configuration
|
||||||
use_atlas bool // true = atlas_client, false = doctreeclient
|
use_atlas bool // true = atlas_client, false = doctreeclient
|
||||||
atlas_dir string // Required when use_atlas = true
|
atlas_dir string // Required when use_atlas = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
@@ -32,8 +32,8 @@ pub mut:
|
|||||||
template_update bool
|
template_update bool
|
||||||
coderoot string
|
coderoot string
|
||||||
// Client configuration
|
// Client configuration
|
||||||
use_atlas bool // true = atlas_client, false = doctreeclient
|
use_atlas bool // true = atlas_client, false = doctreeclient
|
||||||
atlas_dir string // Required when use_atlas = true
|
atlas_dir string // Required when use_atlas = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// return the last know config
|
// return the last know config
|
||||||
|
|||||||
@@ -2,15 +2,15 @@ module site
|
|||||||
|
|
||||||
pub struct Page {
|
pub struct Page {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
title string
|
title string
|
||||||
description string
|
description string
|
||||||
draft bool
|
draft bool
|
||||||
position int
|
position int
|
||||||
hide_title bool
|
hide_title bool
|
||||||
src string @[required] // always in format collection:page_name, can use the default collection if no : specified
|
src string @[required] // always in format collection:page_name, can use the default collection if no : specified
|
||||||
path string @[required] //is without the page name, so just the path to the folder where the page is in
|
path string @[required] // is without the page name, so just the path to the folder where the page is in
|
||||||
section_name string
|
section_name string
|
||||||
title_nr int
|
title_nr int
|
||||||
slug string
|
slug string
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user