Merge branch 'development' of https://github.com/freeflowuniverse/herolib into development

This commit is contained in:
timurgordon
2025-02-09 17:55:25 +00:00
516 changed files with 20551 additions and 4424 deletions

View File

@@ -2,9 +2,9 @@ name: Deploy Documentation to Pages
on:
push:
branches: ["main"]
branches: ["development"]
workflow_dispatch:
branches: ["main"]
branches: ["development"]
permissions:
contents: read
@@ -17,16 +17,14 @@ concurrency:
jobs:
deploy-documentation:
#if: startsWith(github.ref, 'refs/tags/')
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Install Vlang dependencies
run: sudo apt update && sudo apt install -y libgc-dev
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Vlang
run: ./install_v.sh
@@ -34,17 +32,16 @@ jobs:
- name: Generate documentation
run: |
./doc.vsh
# ls /home/runner/work/herolib/docs
find .
- name: Setup Pages
uses: actions/configure-pages@v3
uses: actions/configure-pages@v4
- name: Upload artifact
uses: actions/upload-pages-artifact@v1
uses: actions/upload-pages-artifact@v3
with:
path: "/home/runner/work/herolib/herolib/docs"
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v1
uses: actions/deploy-pages@v4

88
.github/workflows/hero_build.yml vendored Normal file
View File

@@ -0,0 +1,88 @@
name: Release Hero
permissions:
contents: write
on:
push:
workflow_dispatch:
jobs:
build:
timeout-minutes: 60
if: startsWith(github.ref, 'refs/tags/')
strategy:
fail-fast: false
matrix:
include:
- target: x86_64-unknown-linux-musl
os: ubuntu-latest
short-name: linux-i64
- target: aarch64-unknown-linux-musl
os: ubuntu-latest
short-name: linux-arm64
- target: aarch64-apple-darwin
os: macos-latest
short-name: macos-arm64
- target: x86_64-apple-darwin
os: macos-13
short-name: macos-i64
runs-on: ${{ matrix.os }}
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup V & Herolib
id: setup
run: ./install_v.sh --herolib
timeout-minutes: 10
- name: Do all the basic tests
timeout-minutes: 25
run: ./test_basic.vsh
- name: Build Hero
timeout-minutes: 15
run: |
set -e
v -w -d use_openssl -enable-globals cli/hero.v -o cli/hero-${{ matrix.target }}
- name: Upload
uses: actions/upload-artifact@v4
with:
name: hero-${{ matrix.target }}
path: cli/hero-${{ matrix.target }}
release_hero:
needs: build
runs-on: ubuntu-latest
permissions:
contents: write
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Download Artifacts
uses: actions/download-artifact@v4
with:
path: cli/bins
merge-multiple: true
- name: Release
uses: softprops/action-gh-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref_name }}
name: Release ${{ github.ref_name }}
draft: false
fail_on_unmatched_files: true
generate_release_notes: true
files: cli/bins/*

View File

@@ -1,70 +0,0 @@
name: Build Hero on Macos & Run tests
permissions:
contents: write
on:
push:
workflow_dispatch:
jobs:
build:
strategy:
matrix:
include:
- target: aarch64-apple-darwin
os: macos-latest
short-name: macos-arm64
- target: x86_64-apple-darwin
os: macos-13
short-name: macos-i64
runs-on: ${{ matrix.os }}
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
- name: Check out repository code
uses: actions/checkout@v3
- name: Setup Vlang
run: |
git clone --depth=1 https://github.com/vlang/v
cd v
make
sudo ./v symlink
cd ..
- name: Setup Herolib
run: |
mkdir -p ~/.vmodules/freeflowuniverse
ln -s $GITHUB_WORKSPACE/lib ~/.vmodules/freeflowuniverse/herolib
echo "Installing secp256k1..."
brew install secp256k1
echo "secp256k1 installation complete!"
- name: Install and Start Redis
run: |
brew update
brew install redis
# Start Redis
redis-server --daemonize yes
# Print versions
redis-cli --version
redis-server --version
- name: Build Hero
run: |
v -w -cg -gc none -no-retry-compilation -d use_openssl -enable-globals cli/hero.v
- name: Do all the basic tests
run: |
./test_basic.vsh
env:
LIVEKIT_API_KEY: ${{secrets.LIVEKIT_API_KEY}}
LIVEKIT_API_SECRET: ${{secrets.LIVEKIT_API_SECRET}}
LIVEKIT_URL: ${{secrets.LIVEKIT_URL}}

32
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: Build on Linux & Run tests
permissions:
contents: write
on:
push:
workflow_dispatch:
jobs:
build:
strategy:
matrix:
include:
- target: x86_64-unknown-linux-musl
os: ubuntu-latest
short-name: linux-i64
runs-on: ${{ matrix.os }}
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
- name: Check out repository code
uses: actions/checkout@v3
- name: Setup V & Herolib
run: ./install_v.sh --herolib
- name: Do all the basic tests
run: ./test_basic.vsh

2
.gitignore vendored
View File

@@ -7,6 +7,7 @@ vls.*
vls.log
node_modules/
docs/
vdocs/
photonwrapper.so
x
.env
@@ -25,7 +26,6 @@ dump.rdb
output/
*.db
.stellar
vdocs/
data.ms/
test_basic
cli/hero

View File

@@ -1,13 +1,22 @@
# herolib
a smaller version of herolib with only the items we need for hero
> [documentation here](https://freeflowuniverse.github.io/herolib/)
> [documentation of the library](https://freeflowuniverse.github.io/herolib/)
## automated install
## hero install for users
```bash
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/main/install_v.sh' > /tmp/install_v.sh
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install_hero.sh
bash /tmp/install_hero.sh
```
this tool can be used to work with git, build books, play with hero AI, ...
## automated install for developers
```bash
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
bash /tmp/install_v.sh --analyzer --herolib
#DONT FORGET TO START A NEW SHELL (otherwise the paths will not be set)
```
@@ -16,7 +25,7 @@ bash /tmp/install_v.sh --analyzer --herolib
```bash
#~/code/github/freeflowuniverse/herolib/install_v.sh --help
~/code/github/freeflowuniverse/herolib/install_v.sh --help
V & HeroLib Installer Script

187
aiprompts/reflection.md Normal file
View File

@@ -0,0 +1,187 @@
## Compile time reflection
$ is used as a prefix for compile time (also referred to as 'comptime') operations.
Having built-in JSON support is nice, but V also allows you to create efficient serializers for any data format. V has compile time if and for constructs:
.fields
You can iterate over struct fields using .fields, it also works with generic types (e.g. T.fields) and generic arguments (e.g. param.fields where fn gen[T](param T) {).
struct User {
name string
age int
}
fn main() {
$for field in User.fields {
$if field.typ is string {
println('${field.name} is of type string')
}
}
}
// Output:
// name is of type string
.values
You can read Enum values and their attributes.
enum Color {
red @[RED] // first attribute
blue @[BLUE] // second attribute
}
fn main() {
$for e in Color.values {
println(e.name)
println(e.attrs)
}
}
// Output:
// red
// ['RED']
// blue
// ['BLUE']
.attributes
You can read Struct attributes.
@[COLOR]
struct Foo {
a int
}
fn main() {
$for e in Foo.attributes {
println(e)
}
}
// Output:
// StructAttribute{
// name: 'COLOR'
// has_arg: false
// arg: ''
// kind: plain
// }
.variants
You can read variant types from Sum type.
type MySum = int | string
fn main() {
$for v in MySum.variants {
$if v.typ is int {
println('has int type')
} $else $if v.typ is string {
println('has string type')
}
}
}
// Output:
// has int type
// has string type
.methods
You can retrieve information about struct methods.
struct Foo {
}
fn (f Foo) test() int {
return 123
}
fn (f Foo) test2() string {
return 'foo'
}
fn main() {
foo := Foo{}
$for m in Foo.methods {
$if m.return_type is int {
print('${m.name} returns int: ')
println(foo.$method())
} $else $if m.return_type is string {
print('${m.name} returns string: ')
println(foo.$method())
}
}
}
// Output:
// test returns int: 123
// test2 returns string: foo
.params
You can retrieve information about struct method params.
struct Test {
}
fn (t Test) foo(arg1 int, arg2 string) {
}
fn main() {
$for m in Test.methods {
$for param in m.params {
println('${typeof(param.typ).name}: ${param.name}')
}
}
}
// Output:
// int: arg1
// string: arg2
## Example
```v
// An example deserializer implementation
struct User {
name string
age int
}
fn main() {
data := 'name=Alice\nage=18'
user := decode[User](data)
println(user)
}
fn decode[T](data string) T {
mut result := T{}
// compile-time `for` loop
// T.fields gives an array of a field metadata type
$for field in T.fields {
$if field.typ is string {
// $(string_expr) produces an identifier
result.$(field.name) = get_string(data, field.name)
} $else $if field.typ is int {
result.$(field.name) = get_int(data, field.name)
}
}
return result
}
fn get_string(data string, field_name string) string {
for line in data.split_into_lines() {
key_val := line.split('=')
if key_val[0] == field_name {
return key_val[1]
}
}
return ''
}
fn get_int(data string, field string) int {
return get_string(data, field).int()
}
// `decode<User>` generates:
// fn decode_User(data string) User {
// mut result := User{}
// result.name = get_string(data, 'name')
// result.age = get_int(data, 'age')
// return result
// }
```

View File

@@ -11,7 +11,7 @@
when I generate vlang scripts I will always use .vsh extension and use following as first line:
```
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
```
- a .vsh is a v shell script and can be executed as is, no need to use v ...
@@ -21,7 +21,7 @@ when I generate vlang scripts I will always use .vsh extension and use following
## to do argument parsing use following examples
```v
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import os
import flag

View File

@@ -2238,7 +2238,7 @@ be faster, since there is no need for a re-compilation of a script, that has not
An example `deploy.vsh`:
```v oksyntax
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
// Note: The shebang line above, associates the .vsh file to V on Unix-like systems,
// so it can be run just by specifying the path to the .vsh file, once it's made
@@ -2300,11 +2300,11 @@ Whilst V does normally not allow vsh scripts without the designated file extensi
to circumvent this rule and have a file with a fully custom name and shebang. Whilst this feature
exists it is only recommended for specific usecases like scripts that will be put in the path and
should **not** be used for things like build or deploy scripts. To access this feature start the
file with `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
file with `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
the built executable. This will run in crun mode so it will only rebuild if changes to the script
were made and keep the binary as `tmp.<scriptfilename>`. **Caution**: if this filename already
exists the file will be overridden. If you want to rebuild each time and not keep this binary
instead use `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
instead use `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
# Appendices

View File

@@ -3,7 +3,7 @@
this is how we want example scripts to be, see the first line
```vlang
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.sysadmintools.daguserver

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env -S v -n -w -parallel-cc -enable-globals run
// #!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import flag
@@ -45,7 +45,7 @@ compile_cmd := if os.user_os() == 'macos' {
if prod_mode {
'v -enable-globals -w -n -prod hero.v'
} else {
'v -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals hero.v'
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
}
} else {
if prod_mode {
@@ -66,7 +66,7 @@ os.chmod('hero', 0o755) or { panic('Failed to make hero binary executable: ${err
// Ensure destination directory exists
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
println(heropath)
// Copy to destination paths
os.cp('hero', heropath) or { panic('Failed to copy hero binary to ${heropath}: ${err}') }
os.cp('hero', '/tmp/hero') or { panic('Failed to copy hero binary to /tmp/hero: ${err}') }

View File

@@ -89,5 +89,9 @@ fn hero_upload() ! {
}
fn main() {
//os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
println("compile hero can take 60 sec+ on osx.")
os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
println( "upload:")
hero_upload() or { eprintln(err) exit(1) }
}

View File

@@ -19,6 +19,26 @@ fn playcmds_do(path string) ! {
}
fn do() ! {
if ! core.is_osx()! {
if os.getenv('SUDO_COMMAND') != '' || os.getenv('SUDO_USER') != '' {
println('Error: Please do not run this program with sudo!')
exit(1) // Exit with error code
}
}
if os.getuid() == 0 {
if core.is_osx()! {
eprintln("please do not run hero as root in osx.")
exit(1)
}
} else {
if ! core.is_osx()! {
eprintln("please do run hero as root, don't use sudo.")
exit(1)
}
}
if os.args.len == 2 {
mypath := os.args[1]
if mypath.to_lower().ends_with('.hero') {
@@ -31,7 +51,7 @@ fn do() ! {
mut cmd := Command{
name: 'hero'
description: 'Your HERO toolset.'
version: '2.0.0'
version: '1.0.8'
}
// herocmds.cmd_run_add_flags(mut cmd)
@@ -81,6 +101,7 @@ fn do() ! {
// herocmds.cmd_zola(mut cmd)
// herocmds.cmd_juggler(mut cmd)
herocmds.cmd_generator(mut cmd)
herocmds.cmd_docusaurus(mut cmd)
// herocmds.cmd_docsorter(mut cmd)
// cmd.add_command(publishing.cmd_publisher(pre_func))
cmd.setup()

40
doc.vsh
View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
@@ -26,9 +26,9 @@ os.chdir(herolib_path) or {
panic('Failed to change directory to herolib: ${err}')
}
os.rmdir_all('_docs') or {}
os.rmdir_all('docs') or {}
os.rmdir_all('vdocs') or {}
os.mkdir_all('_docs') or {}
os.mkdir_all('docs') or {}
os.mkdir_all('vdocs') or {}
// Generate HTML documentation
println('Generating HTML documentation...')
@@ -42,13 +42,12 @@ os.chdir(abs_dir_of_script) or {
// Generate Markdown documentation
println('Generating Markdown documentation...')
os.rmdir_all('vdocs') or {}
// if os.system('v doc -m -no-color -f md -o ../vdocs/v/') != 0 {
// panic('Failed to generate V markdown documentation')
// }
if os.system('v doc -m -no-color -f md -o vdocs/herolib/') != 0 {
if os.system('v doc -m -no-color -f md -o vdocs/') != 0 {
panic('Failed to generate Hero markdown documentation')
}
@@ -62,4 +61,33 @@ $if !linux {
}
}
// Create Jekyll required files
println('Creating Jekyll files...')
os.mkdir_all('docs/assets/css') or {}
// Create style.scss
style_content := '---\n---\n\n@import "{{ site.theme }}";'
os.write_file('docs/assets/css/style.scss', style_content) or {
panic('Failed to create style.scss: ${err}')
}
// Create _config.yml
config_content := 'title: HeroLib Documentation
description: Documentation for the HeroLib project
theme: jekyll-theme-primer
baseurl: /herolib
exclude:
- Gemfile
- Gemfile.lock
- node_modules
- vendor/bundle/
- vendor/cache/
- vendor/gems/
- vendor/ruby/'
os.write_file('docs/_config.yml', config_content) or {
panic('Failed to create _config.yml: ${err}')
}
println('Documentation generation completed successfully!')

View File

@@ -1,3 +0,0 @@
.bash_history
.openvscode-server/
.cache/

View File

@@ -1,48 +0,0 @@
# Use Ubuntu 24.04 as the base image
FROM ubuntu:24.04
# Set the working directory
WORKDIR /root
# Copy local installation scripts into the container
COPY scripts/install_v.sh /tmp/install_v.sh
COPY scripts/install_herolib.vsh /tmp/install_herolib.vsh
COPY scripts/install_vscode.sh /tmp/install_vscode.sh
COPY scripts/ourinit.sh /usr/local/bin/
# Make the scripts executable
RUN chmod +x /tmp/install_v.sh /tmp/install_herolib.vsh
RUN apt-get update && apt-get install -y \
curl bash sudo mc wget tmux htop openssh-server
RUN bash /tmp/install_v.sh
RUN yes y | bash /tmp/install_v.sh --analyzer
RUN bash /tmp/install_vscode.sh
RUN /tmp/install_herolib.vsh && \
mkdir -p /var/run/sshd && \
echo 'PermitRootLogin yes' >> /etc/ssh/sshd_config && \
echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config && \
chown -R root:root /root/.ssh && \
chmod -R 700 /root/.ssh/ && \
chmod 600 /root/.ssh/authorized_keys && \
service ssh start && \
apt-get clean && \
echo "PS1='HERO: \w \$ '" >> ~/.bashrc \
rm -rf /var/lib/apt/lists/*
#SSH
RUN mkdir -p /var/run/sshd && \
echo 'PermitRootLogin yes' >> /etc/ssh/sshd_config && \
echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config && \
chown -R root:root /root/.ssh && \
chmod -R 700 /root/.ssh/ && \
chmod 600 /root/.ssh/authorized_keys && \
service ssh start
ENTRYPOINT ["/bin/bash"]
CMD ["/bin/bash"]

View File

@@ -1,36 +0,0 @@
#!/bin/bash -e
# Get the directory where the script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
# Docker image and container names
DOCKER_IMAGE_NAME="docusaurus"
DEBUG_CONTAINER_NAME="herolib"
function cleanup {
if docker ps -aq -f name="$DEBUG_CONTAINER_NAME" &>/dev/null; then
echo "Cleaning up leftover debug container..."
docker rm -f "$DEBUG_CONTAINER_NAME" &>/dev/null || true
fi
}
trap cleanup EXIT
# Attempt to build the Docker image
BUILD_LOG=$(mktemp)
set +e
docker build --name herolib --progress=plain -t "$DOCKER_IMAGE_NAME" .
BUILD_EXIT_CODE=$?
set -e
# Handle build failure
if [ $BUILD_EXIT_CODE -ne 0 ]; then
echo -e "\\n[ERROR] Docker build failed.\n"
echo -e "remove the part which didn't build in the Dockerfile, the run again and to debug do:"
echo docker run --name herolib -it --entrypoint=/bin/bash "herolib"
exit $BUILD_EXIT_CODE
else
echo -e "\\n[INFO] Docker build completed successfully."
fi

View File

@@ -1,19 +0,0 @@
#!/bin/bash -ex
# Get the directory where the script is located
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "$SCRIPT_DIR"
# Remove any existing container named 'debug' (ignore errors)
docker rm -f herolib > /dev/null 2>&1
docker run --name herolib -it \
--entrypoint="/usr/local/bin/ourinit.sh" \
-v "${SCRIPT_DIR}/scripts:/scripts" \
-v "$HOME/code:/root/code" \
-p 4100:8100 \
-p 4101:8101 \
-p 4102:8102 \
-p 4379:6379 \
-p 4022:22 \
-p 4000:3000 herolib

View File

@@ -1,34 +0,0 @@
services:
postgres:
image: postgres:latest
container_name: postgres_service
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: planetfirst
POSTGRES_DB: mydb
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
herolib:
build:
context: .
dockerfile: Dockerfile
image: herolib:latest
container_name: herolib
volumes:
- ~/code:/root/code
stdin_open: true
tty: true
ports:
- "4100:8100"
- "4101:8101"
- "4102:8102"
- "4379:6379"
- "4000:3000"
- "4022:22"
command: ["/usr/local/bin/ourinit.sh"]
volumes:
postgres_data:

View File

@@ -1,98 +0,0 @@
#!/bin/bash -e
# Set version and file variables
OPENVSCODE_SERVER_VERSION="1.97.0"
TMP_DIR="/tmp"
FILENAME="openvscode.tar.gz"
FILE_PATH="$TMP_DIR/$FILENAME"
INSTALL_DIR="/opt/openvscode"
BIN_PATH="/usr/local/bin/openvscode-server"
TMUX_SESSION="openvscode-server"
# Function to detect architecture
get_architecture() {
ARCH=$(uname -m)
case "$ARCH" in
x86_64)
echo "x64"
;;
aarch64)
echo "arm64"
;;
*)
echo "Unsupported architecture: $ARCH" >&2
exit 1
;;
esac
}
# Check if OpenVSCode Server is already installed
if [ -d "$INSTALL_DIR" ] && [ -x "$BIN_PATH" ]; then
echo "OpenVSCode Server is already installed at $INSTALL_DIR. Skipping download and installation."
else
# Determine architecture-specific URL
ARCH=$(get_architecture)
if [ "$ARCH" == "x64" ]; then
DOWNLOAD_URL="https://github.com/gitpod-io/openvscode-server/releases/download/openvscode-server-insiders-v${OPENVSCODE_SERVER_VERSION}/openvscode-server-insiders-v${OPENVSCODE_SERVER_VERSION}-linux-x64.tar.gz"
elif [ "$ARCH" == "arm64" ]; then
DOWNLOAD_URL="https://github.com/gitpod-io/openvscode-server/releases/download/openvscode-server-insiders-v${OPENVSCODE_SERVER_VERSION}/openvscode-server-insiders-v${OPENVSCODE_SERVER_VERSION}-linux-arm64.tar.gz"
fi
# Navigate to temporary directory
cd "$TMP_DIR"
# Remove existing file if it exists
if [ -f "$FILE_PATH" ]; then
rm -f "$FILE_PATH"
fi
# Download file using curl
curl -L "$DOWNLOAD_URL" -o "$FILE_PATH"
# Verify file size is greater than 40 MB (40 * 1024 * 1024 bytes)
FILE_SIZE=$(stat -c%s "$FILE_PATH")
if [ "$FILE_SIZE" -le $((40 * 1024 * 1024)) ]; then
echo "Error: Downloaded file size is less than 40 MB." >&2
exit 1
fi
# Extract the tar.gz file
EXTRACT_DIR="openvscode-server-insiders-v${OPENVSCODE_SERVER_VERSION}-linux-${ARCH}"
tar -xzf "$FILE_PATH"
# Move the extracted directory to the install location
if [ -d "$INSTALL_DIR" ]; then
rm -rf "$INSTALL_DIR"
fi
mv "$EXTRACT_DIR" "$INSTALL_DIR"
# Create a symlink for easy access
ln -sf "$INSTALL_DIR/bin/openvscode-server" "$BIN_PATH"
# Verify installation
if ! command -v openvscode-server >/dev/null 2>&1; then
echo "Error: Failed to create symlink for openvscode-server." >&2
exit 1
fi
# Install default plugins
PLUGINS=("ms-python.python" "esbenp.prettier-vscode" "saoudrizwan.claude-dev" "yzhang.markdown-all-in-one" "ms-vscode-remote.remote-ssh" "ms-vscode.remote-explorer" "charliermarsh.ruff" "qwtel.sqlite-viewer" "vosca.vscode-v-analyzer" "tomoki1207.pdf")
for PLUGIN in "${PLUGINS[@]}"; do
"$INSTALL_DIR/bin/openvscode-server" --install-extension "$PLUGIN"
done
echo "Default plugins installed: ${PLUGINS[*]}"
# Clean up temporary directory
if [ -d "$TMP_DIR" ]; then
find "$TMP_DIR" -maxdepth 1 -type f -name "openvscode*" -exec rm -f {} \;
fi
fi
# Start OpenVSCode Server in a tmux session
if tmux has-session -t "$TMUX_SESSION" 2>/dev/null; then
tmux kill-session -t "$TMUX_SESSION"
fi
tmux new-session -d -s "$TMUX_SESSION" "$INSTALL_DIR/bin/openvscode-server"
echo "OpenVSCode Server is running in a tmux session named '$TMUX_SESSION'."

View File

@@ -1,14 +0,0 @@
#!/bin/bash -e
# redis-server --daemonize yes
# TMUX_SESSION="vscode"
# # Start OpenVSCode Server in a tmux session
# if tmux has-session -t "$TMUX_SESSION" 2>/dev/null; then
# tmux kill-session -t "$TMUX_SESSION"
# fi
# tmux new-session -d -s "$TMUX_SESSION" "/usr/local/bin/openvscode-server --host 0.0.0.0 --without-connection-token"
# service ssh start
exec /bin/bash

View File

@@ -1,61 +0,0 @@
#!/bin/bash -e
# Get the directory where the script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
CONTAINER_NAME="herolib"
TARGET_PORT=4000
# Function to check if a container is running
is_container_running() {
docker ps --filter "name=$CONTAINER_NAME" --filter "status=running" -q
}
# Function to check if a port is accessible
is_port_accessible() {
nc -zv 127.0.0.1 "$1" &>/dev/null
}
# Check if the container exists and is running
if ! is_container_running; then
echo "Container $CONTAINER_NAME is not running."
# Check if the container exists but is stopped
if docker ps -a --filter "name=$CONTAINER_NAME" -q | grep -q .; then
echo "Starting existing container $CONTAINER_NAME..."
docker start "$CONTAINER_NAME"
else
echo "Container $CONTAINER_NAME does not exist. Attempting to start with start.sh..."
if [[ -f "$SCRIPT_DIR/start.sh" ]]; then
bash "$SCRIPT_DIR/start.sh"
else
echo "Error: start.sh not found in $SCRIPT_DIR."
exit 1
fi
fi
# Wait for the container to be fully up
sleep 5
fi
# Verify the container is running
if ! is_container_running; then
echo "Error: Failed to start container $CONTAINER_NAME."
exit 1
fi
echo "Container $CONTAINER_NAME is running."
# Check if the target port is accessible
if is_port_accessible "$TARGET_PORT"; then
echo "Port $TARGET_PORT is accessible."
else
echo "Port $TARGET_PORT is not accessible. Please check the service inside the container."
fi
# Enter the container
echo
echo " ** WE NOW LOGIN TO THE CONTAINER ** "
echo
docker exec -it herolib bash

View File

@@ -1,3 +0,0 @@
#!/bin/bash -e
ssh root@localhost -p 4022

View File

@@ -1,63 +0,0 @@
#!/bin/bash -e
# Get the directory where the script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
# Define variables
CONTAINER_NAME="herolib"
CONTAINER_SSH_DIR="/root/.ssh"
AUTHORIZED_KEYS="authorized_keys"
TEMP_AUTH_KEYS="/tmp/authorized_keys"
# Step 1: Create a temporary file to store public keys
> $TEMP_AUTH_KEYS # Clear the file if it exists
# Step 2: Add public keys from ~/.ssh/ if they exist
if ls ~/.ssh/*.pub 1>/dev/null 2>&1; then
cat ~/.ssh/*.pub >> $TEMP_AUTH_KEYS
fi
# Step 3: Check if ssh-agent is running and get public keys from it
if pgrep ssh-agent >/dev/null; then
echo "ssh-agent is running. Fetching keys..."
ssh-add -L >> $TEMP_AUTH_KEYS 2>/dev/null
else
echo "ssh-agent is not running or no keys loaded."
fi
# Step 4: Ensure the temporary file is not empty
if [ ! -s $TEMP_AUTH_KEYS ]; then
echo "No public keys found. Exiting."
exit 1
fi
# Step 5: Ensure the container's SSH directory exists
docker exec -it $CONTAINER_NAME mkdir -p $CONTAINER_SSH_DIR
docker exec -it $CONTAINER_NAME chmod 700 $CONTAINER_SSH_DIR
# Step 6: Copy the public keys into the container's authorized_keys file
docker cp $TEMP_AUTH_KEYS $CONTAINER_NAME:$CONTAINER_SSH_DIR/$AUTHORIZED_KEYS
# Step 7: Set proper permissions for authorized_keys
docker exec -it $CONTAINER_NAME chmod 600 $CONTAINER_SSH_DIR/$AUTHORIZED_KEYS
# Step 8: Install and start the SSH server inside the container
docker exec -it $CONTAINER_NAME bash -c "
apt-get update &&
apt-get install -y openssh-server &&
mkdir -p /var/run/sshd &&
echo 'PermitRootLogin yes' >> /etc/ssh/sshd_config &&
echo 'PasswordAuthentication no' >> /etc/ssh/sshd_config &&
chown -R root:root /root/.ssh &&
chmod -R 700 /root/.ssh/ &&
chmod 600 /root/.ssh/authorized_keys &&
service ssh start
"
# Step 9: Clean up temporary file on the host
rm $TEMP_AUTH_KEYS
echo "SSH keys added and SSH server configured. You can now SSH into the container."
ssh root@localhost -p 4022

View File

@@ -1,8 +0,0 @@
#!/bin/bash -e
# Get the directory where the script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import flag
@@ -64,7 +64,7 @@ os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowunive
println('Herolib installation completed successfully!')
// Add vtest alias
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -no-retry-compilation -cc tcc test\' ') or {
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -cc tcc test\' ') or {
eprintln('Failed to add vtest alias: ${err}')
}

View File

@@ -0,0 +1,22 @@
version: '3.9'
services:
db:
image: 'postgres:17.2-alpine3.21'
restart: always
ports:
- 5432:5432
environment:
POSTGRES_PASSWORD: 1234
networks:
- my_network
adminer:
image: adminer
restart: always
ports:
- 8080:8080
networks:
- my_network
networks:
my_network:

View File

@@ -0,0 +1,6 @@
Server (Host): db (because Docker Compose creates an internal network and uses service names as hostnames)
Username: postgres (default PostgreSQL username)
Password: 1234 (as set in your POSTGRES_PASSWORD environment variable)
Database: Leave it empty or enter postgres (default database)

13
docker/postgresql/start.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash -e
# Get the directory where the script is located
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$SCRIPT_DIR"
# Stop any existing containers and remove them
docker compose down
# Start the services in detached mode
docker compose up -d
echo "PostgreSQL is ready"

View File

@@ -34,7 +34,7 @@ The examples directory demonstrates various capabilities of HeroLib:
When creating V scripts (.vsh files), always use the following shebang:
```bash
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
```
This shebang ensures:

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.builder
import freeflowuniverse.herolib.core.pathlib

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.builder
import freeflowuniverse.herolib.core.pathlib

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.builder
import freeflowuniverse.herolib.core.pathlib

25
examples/clients/mail.vsh Executable file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.mailclient
// remove the previous one, otherwise the env variables are not read
mailclient.config_delete(name: 'test')!
// env variables which need to be set are:
// - MAIL_FROM=...
// - MAIL_PASSWORD=...
// - MAIL_PORT=465
// - MAIL_SERVER=...
// - MAIL_USERNAME=...
mut client := mailclient.get(name: 'test')!
println(client)
client.send(
subject: 'this is a test'
to: 'kristof@incubaid.com'
body: '
this is my email content
'
)!

108
examples/clients/mycelium.vsh Executable file
View File

@@ -0,0 +1,108 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.mycelium
import freeflowuniverse.herolib.installers.net.mycelium as mycelium_installer
import freeflowuniverse.herolib.osal
import time
import os
import encoding.base64
const server1_port = 9001
const server2_port = 9002
fn terminate(port int) ! {
// Step 1: Run lsof to get process details
res := os.execute('lsof -i:${port}')
if res.exit_code != 0 {
return error('no service running at port ${port} due to: ${res.output}')
}
// Step 2: Parse the output to extract the PID
lines := res.output.split('\n')
if lines.len < 2 {
return error('no process found running on port ${port}')
}
// The PID is the second column in the output
fields := lines[1].split(' ')
if fields.len < 2 {
return error('failed to parse lsof output')
}
pid := fields[1]
// Step 3: Kill the process using the PID
kill_res := os.execute('kill ${pid}')
if kill_res.exit_code != 0 {
return error('failed to kill process ${pid}: ${kill_res.output}')
}
println('Successfully terminated process ${pid} running on port ${port}')
}
// Check if not installed install it.
mut installer := mycelium_installer.get()!
installer.install()!
mycelium.delete()!
spawn fn () {
os.execute('mkdir -p /tmp/mycelium_server1 && cd /tmp/mycelium_server1 && mycelium --peers tcp://188.40.132.242:9651 quic://[2a01:4f8:212:fa6::2]:9651 tcp://185.69.166.7:9651 quic://[2a02:1802:5e:0:ec4:7aff:fe51:e36b]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651 tcp://[2604:a00:50:17b:9e6b:ff:fe1f:e054]:9651 quic://5.78.122.16:9651 tcp://[2a01:4ff:2f0:3621::1]:9651 quic://142.93.217.194:9651 --tun-name tun2 --tcp-listen-port 9652 --quic-listen-port 9653 --api-addr 127.0.0.1:${server1_port}')
}()
spawn fn () {
os.execute('mkdir -p /tmp/mycelium_server2 && cd /tmp/mycelium_server2 && mycelium --peers tcp://188.40.132.242:9651 quic://[2a01:4f8:212:fa6::2]:9651 tcp://185.69.166.7:9651 quic://[2a02:1802:5e:0:ec4:7aff:fe51:e36b]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651 tcp://[2604:a00:50:17b:9e6b:ff:fe1f:e054]:9651 quic://5.78.122.16:9651 tcp://[2a01:4ff:2f0:3621::1]:9651 quic://142.93.217.194:9651 --tun-name tun3 --tcp-listen-port 9654 --quic-listen-port 9655 --api-addr 127.0.0.1:${server2_port}')
}()
defer {
terminate(server1_port) or {}
terminate(server2_port) or {}
}
time.sleep(2 * time.second)
mut client1 := mycelium.get()!
client1.server_url = 'http://localhost:${server1_port}'
client1.name = 'client1'
println(client1)
mut client2 := mycelium.get()!
client2.server_url = 'http://localhost:${server2_port}'
client2.name = 'client2'
println(client2)
inspect1 := mycelium.inspect(key_file_path: '/tmp/mycelium_server1/priv_key.bin')!
inspect2 := mycelium.inspect(key_file_path: '/tmp/mycelium_server2/priv_key.bin')!
println('Server 1 public key: ${inspect1.public_key}')
println('Server 2 public key: ${inspect2.public_key}')
// Send a message to a node by public key
// Parameters: public_key, payload, topic, wait_for_reply
msg := client1.send_msg(
public_key: inspect2.public_key // destination public key
payload: 'Sending a message from the client 1 to the client 2' // message payload
topic: 'testing' // optional topic
)!
println('Sent message ID: ${msg.id}')
println('send succeeded')
// Receive messages
// Parameters: wait_for_message, peek_only, topic_filter
received := client2.receive_msg(wait: true, peek: false, topic: 'testing')!
println('Received message from: ${received.src_pk}')
println('Message payload: ${base64.decode_str(received.payload)}')
// Reply to a message
// client1.reply_msg(
// id: received.id
// public_key: received.src_pk
// payload: 'Got your message!'
// topic: 'greetings'
// )!
// // // Check message status
// // status := client.get_msg_status(msg.id)!
// // println('Message status: ${status.state}')
// // println('Created at: ${status.created}')
// // println('Expires at: ${status.deadline}')

43
examples/clients/psql.vsh Executable file
View File

@@ -0,0 +1,43 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.clients.postgresql_client
// Configure PostgreSQL client
heroscript := "
!!postgresql_client.configure
name:'test'
user: 'postgres'
port: 5432
host: 'localhost'
password: '1234'
dbname: 'postgres'
"
// Process the heroscript configuration
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: 'test')!
// Check if test database exists, create if not
if !db_client.db_exists('test')! {
println('Creating database test...')
db_client.db_create('test')!
}
// Switch to test database
db_client.dbname = 'test'
// Create table if not exists
create_table_sql := 'CREATE TABLE IF NOT EXISTS users (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)'
println('Creating table users if not exists...')
db_client.exec(create_table_sql)!
println('Database and table setup completed successfully!')

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.base

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.base

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.core.codeparser

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
import time
import freeflowuniverse.herolib.core.smartid

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.dbfs
import time

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.generator.installer

View File

@@ -1,7 +1,7 @@
module dagu
// import os
import freeflowuniverse.herolib.clients.httpconnection
import freeflowuniverse.herolib.core.httpconnection
import os
struct GiteaClient[T] {

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import json

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.data.paramsparser

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.crypt.secrets

1
examples/data/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
cache

139
examples/data/cache.vsh Executable file
View File

@@ -0,0 +1,139 @@
#!/usr/bin/env -S v run
// Example struct to cache
import freeflowuniverse.herolib.data.cache
import time
@[heap]
struct User {
id u32
name string
age int
}
fn main() {
// Create a cache with custom configuration
config := cache.CacheConfig{
max_entries: 1000 // Maximum number of entries
max_size_mb: 10.0 // Maximum cache size in MB
ttl_seconds: 300 // Items expire after 5 minutes
eviction_ratio: 0.2 // Evict 20% of entries when full
}
mut user_cache := cache.new_cache[User](config)
// Create some example users
user1 := &User{
id: 1
name: 'Alice'
age: 30
}
user2 := &User{
id: 2
name: 'Bob'
age: 25
}
// Add users to cache
println('Adding users to cache...')
user_cache.set(user1.id, user1)
user_cache.set(user2.id, user2)
// Retrieve users from cache
println('\nRetrieving users from cache:')
if cached_user1 := user_cache.get(1) {
println('Found user 1: ${cached_user1.name}, age ${cached_user1.age}')
}
if cached_user2 := user_cache.get(2) {
println('Found user 2: ${cached_user2.name}, age ${cached_user2.age}')
}
// Try to get non-existent user
println('\nTrying to get non-existent user:')
if user := user_cache.get(999) {
println('Found user: ${user.name}')
} else {
println('User not found in cache')
}
// Demonstrate cache stats
println('\nCache statistics:')
println('Number of entries: ${user_cache.len()}')
// Clear the cache
println('\nClearing cache...')
user_cache.clear()
println('Cache entries after clear: ${user_cache.len()}')
// Demonstrate max entries limit
println('\nDemonstrating max entries limit (adding 2000 entries):')
println('Initial cache size: ${user_cache.len()}')
for i := u32(0); i < 2000; i++ {
user := &User{
id: i
name: 'User${i}'
age: 20 + int(i % 50)
}
user_cache.set(i, user)
if i % 200 == 0 {
println('After adding ${i} entries:')
println(' Cache size: ${user_cache.len()}')
// Check some entries to verify LRU behavior
if i >= 500 {
old_id := if i < 1000 { u32(0) } else { i - 1000 }
recent_id := i - 1
println(' Entry ${old_id} (old): ${if _ := user_cache.get(old_id) {
'found'
} else {
'evicted'
}}')
println(' Entry ${recent_id} (recent): ${if _ := user_cache.get(recent_id) {
'found'
} else {
'evicted'
}}')
}
println('')
}
}
println('Final statistics:')
println('Cache size: ${user_cache.len()} (should be max 1000)')
// Verify we can only access recent entries
println('\nVerifying LRU behavior:')
println('First entry (0): ${if _ := user_cache.get(0) { 'found' } else { 'evicted' }}')
println('Middle entry (1000): ${if _ := user_cache.get(1000) { 'found' } else { 'evicted' }}')
println('Recent entry (1900): ${if _ := user_cache.get(1900) { 'found' } else { 'evicted' }}')
println('Last entry (1999): ${if _ := user_cache.get(1999) { 'found' } else { 'evicted' }}')
// Demonstrate TTL expiration
println('\nDemonstrating TTL expiration:')
quick_config := cache.CacheConfig{
ttl_seconds: 2 // Set short TTL for demo
}
mut quick_cache := cache.new_cache[User](quick_config)
// Add a user
quick_cache.set(user1.id, user1)
println('Added user to cache with 2 second TTL')
if cached := quick_cache.get(user1.id) {
println('User found immediately: ${cached.name}')
}
// Wait for TTL to expire
println('Waiting for TTL to expire...')
time.sleep(3 * time.second)
if _ := quick_cache.get(user1.id) {
println('User still in cache')
} else {
println('User expired from cache as expected')
}
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.encoder
import crypto.ed25519

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.crypt.aes_symmetric { decrypt, encrypt }
import freeflowuniverse.herolib.ui.console

175
examples/data/graphdb.vsh Executable file
View File

@@ -0,0 +1,175 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
// Example demonstrating GraphDB usage in a social network context
import freeflowuniverse.herolib.data.graphdb
fn main() {
// Initialize a new graph database with default cache settings
mut gdb := graphdb.new(
path: '/tmp/social_network_example'
reset: true // Start fresh each time
)!
println('=== Social Network Graph Example ===\n')
// 1. Creating User Nodes
println('Creating users...')
mut alice_id := gdb.create_node({
'type': 'user'
'name': 'Alice Chen'
'age': '28'
'location': 'San Francisco'
'occupation': 'Software Engineer'
})!
println('Created user: ${gdb.debug_node(alice_id)!}')
mut bob_id := gdb.create_node({
'type': 'user'
'name': 'Bob Smith'
'age': '32'
'location': 'New York'
'occupation': 'Product Manager'
})!
println('Created user: ${gdb.debug_node(bob_id)!}')
mut carol_id := gdb.create_node({
'type': 'user'
'name': 'Carol Davis'
'age': '27'
'location': 'San Francisco'
'occupation': 'Data Scientist'
})!
println('Created user: ${gdb.debug_node(carol_id)!}')
// 2. Creating Organization Nodes
println('\nCreating organizations...')
mut techcorp_id := gdb.create_node({
'type': 'organization'
'name': 'TechCorp'
'industry': 'Technology'
'location': 'San Francisco'
'size': '500+'
})!
println('Created organization: ${gdb.debug_node(techcorp_id)!}')
mut datacorp_id := gdb.create_node({
'type': 'organization'
'name': 'DataCorp'
'industry': 'Data Analytics'
'location': 'New York'
'size': '100-500'
})!
println('Created organization: ${gdb.debug_node(datacorp_id)!}')
// 3. Creating Interest Nodes
println('\nCreating interest groups...')
mut ai_group_id := gdb.create_node({
'type': 'group'
'name': 'AI Enthusiasts'
'category': 'Technology'
'members': '0'
})!
println('Created group: ${gdb.debug_node(ai_group_id)!}')
// 4. Establishing Relationships
println('\nCreating relationships...')
// Friendship relationships
gdb.create_edge(alice_id, bob_id, 'FRIENDS', {
'since': '2022'
'strength': 'close'
})!
gdb.create_edge(alice_id, carol_id, 'FRIENDS', {
'since': '2023'
'strength': 'close'
})!
// Employment relationships
gdb.create_edge(alice_id, techcorp_id, 'WORKS_AT', {
'role': 'Senior Engineer'
'since': '2021'
'department': 'Engineering'
})!
gdb.create_edge(bob_id, datacorp_id, 'WORKS_AT', {
'role': 'Product Lead'
'since': '2020'
'department': 'Product'
})!
gdb.create_edge(carol_id, techcorp_id, 'WORKS_AT', {
'role': 'Data Scientist'
'since': '2022'
'department': 'Analytics'
})!
// Group memberships
gdb.create_edge(alice_id, ai_group_id, 'MEMBER_OF', {
'joined': '2023'
'status': 'active'
})!
gdb.create_edge(carol_id, ai_group_id, 'MEMBER_OF', {
'joined': '2023'
'status': 'active'
})!
// 5. Querying the Graph
println('\nPerforming queries...')
// Find users in San Francisco
println('\nUsers in San Francisco:')
sf_users := gdb.query_nodes_by_property('location', 'San Francisco')!
for user in sf_users {
if user.properties['type'] == 'user' {
println('- ${user.properties['name']} (${user.properties['occupation']})')
}
}
// Find Alice's friends
println("\nAlice's friends:")
alice_friends := gdb.get_connected_nodes(alice_id, 'FRIENDS', 'out')!
for friend in alice_friends {
println('- ${friend.properties['name']} in ${friend.properties['location']}')
}
// Find where Alice works
println("\nAlice's workplace:")
alice_workplaces := gdb.get_connected_nodes(alice_id, 'WORKS_AT', 'out')!
for workplace in alice_workplaces {
println('- ${workplace.properties['name']} (${workplace.properties['industry']})')
}
// Find TechCorp employees
println('\nTechCorp employees:')
techcorp_employees := gdb.get_connected_nodes(techcorp_id, 'WORKS_AT', 'in')!
for employee in techcorp_employees {
println('- ${employee.properties['name']} as ${employee.properties['occupation']}')
}
// Find AI group members
println('\nAI Enthusiasts group members:')
ai_members := gdb.get_connected_nodes(ai_group_id, 'MEMBER_OF', 'in')!
for member in ai_members {
println('- ${member.properties['name']}')
}
// 6. Updating Data
println('\nUpdating data...')
// Promote Alice
println('\nPromoting Alice...')
mut alice := gdb.get_node(alice_id)!
alice.properties['occupation'] = 'Lead Software Engineer'
gdb.update_node(alice_id, alice.properties)!
// Update Alice's work relationship
mut edges := gdb.get_edges_between(alice_id, techcorp_id)!
if edges.len > 0 {
gdb.update_edge(edges[0].id, {
'role': 'Engineering Team Lead'
'since': '2021'
'department': 'Engineering'
})!
}
println('\nFinal graph structure:')
gdb.print_graph()!
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.core.base

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.core.base
import time
struct Person {
mut:
name string
age int = 20
birthday time.Time
}
mut person := Person{
name: 'Bob'
birthday: time.now()
}
heroscript := encoderhero.encode[Person](person)!
println(heroscript)
person2 := encoderhero.decode[Person](heroscript)!
println(person2)
// show that it doesn't matter which action & method is used
heroscript2 := "!!a.b name:Bob age:20 birthday:'2025-02-06 09:57:30'"
person3 := encoderhero.decode[Person](heroscript)!
println(person3)

35
examples/data/jsonexample.vsh Executable file
View File

@@ -0,0 +1,35 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import json
enum JobTitle {
manager
executive
worker
}
struct Employee {
mut:
name string
family string @[json: '-'] // this field will be skipped
age int
salary f32
title JobTitle @[json: 'ETitle'] // the key for this field will be 'ETitle', not 'title'
notes string @[omitempty] // the JSON property is not created if the string is equal to '' (an empty string).
// TODO: document @[raw]
}
x := Employee{'Peter', 'Begins', 28, 95000.5, .worker, ''}
println(x)
s := json.encode(x)
println('JSON encoding of employee x: ${s}')
assert s == '{"name":"Peter","age":28,"salary":95000.5,"ETitle":"worker"}'
mut y := json.decode(Employee, s)!
assert y != x
assert y.family == ''
y.family = 'Begins'
assert y == x
println(y)
ss := json.encode(y)
println('JSON encoding of employee y: ${ss}')
assert ss == s

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env -S v -n -w -cg -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.postgresql_client
import freeflowuniverse.herolib.data.location
// Configure PostgreSQL client
heroscript := "
!!postgresql_client.configure
name:'test'
user: 'postgres'
port: 5432
host: 'localhost'
password: '1234'
dbname: 'postgres'
"
// Process the heroscript configuration
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: 'test')!
// Create a new location instance
mut loc := location.new(mut db_client, false) or { panic(err) }
println('Location database initialized')
// Initialize the database (downloads and imports data)
// This only needs to be done once or when updating data
println('Downloading and importing location data (this may take a few minutes)...')
// the arg is if we redownload
loc.download_and_import(false) or { panic(err) }
println('Data import complete')
// // Example 1: Search for a city
// println('\nSearching for London...')
// results := loc.search('London', 'GB', 5, true) or { panic(err) }
// for result in results {
// println('${result.city.name}, ${result.country.name} (${result.country.iso2})')
// println('Coordinates: ${result.city.latitude}, ${result.city.longitude}')
// println('Population: ${result.city.population}')
// println('Timezone: ${result.city.timezone}')
// println('---')
// }
// // Example 2: Search near coordinates (10km radius from London)
// println('\nSearching for cities within 10km of London...')
// nearby := loc.search_near(51.5074, -0.1278, 10.0, 5) or { panic(err) }
// for result in nearby {
// println('${result.city.name}, ${result.country.name}')
// println('Distance from center: Approx ${result.similarity:.1f}km')
// println('---')
// }
// // Example 3: Fuzzy search in a specific country
// println('\nFuzzy searching for "New" in United States...')
// us_cities := loc.search('New', 'US', 5, true) or { panic(err) }
// for result in us_cities {
// println('${result.city.name}, ${result.country.name}')
// println('State: ${result.city.state_name} (${result.city.state_code})')
// println('Population: ${result.city.population}')
// println('---')
// }

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.postgresql_client
import freeflowuniverse.herolib.data.location
// Configure PostgreSQL client
heroscript := "
!!postgresql_client.configure
name:'test'
user: 'postgres'
port: 5432
host: 'localhost'
password: '1234'
dbname: 'postgres'
"
// Process the heroscript configuration
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: 'test')!
// Create a new location instance
mut loc := location.new(mut db_client, false) or { panic(err) }
println('Location database initialized')
// Initialize the database (downloads and imports data)
// This only needs to be done once or when updating data
println('Downloading and importing location data (this may take a few minutes)...')
// the arg is if we redownload
loc.download_and_import(false) or { panic(err) }
println('Data import complete')
// // Example 1: Search for a city
// println('\nSearching for London...')
// results := loc.search('London', 'GB', 5, true) or { panic(err) }
// for result in results {
// println('${result.city.name}, ${result.country.name} (${result.country.iso2})')
// println('Coordinates: ${result.city.latitude}, ${result.city.longitude}')
// println('Population: ${result.city.population}')
// println('Timezone: ${result.city.timezone}')
// println('---')
// }
// // Example 2: Search near coordinates (10km radius from London)
// println('\nSearching for cities within 10km of London...')
// nearby := loc.search_near(51.5074, -0.1278, 10.0, 5) or { panic(err) }
// for result in nearby {
// println('${result.city.name}, ${result.country.name}')
// println('Distance from center: Approx ${result.similarity:.1f}km')
// println('---')
// }
// // Example 3: Fuzzy search in a specific country
// println('\nFuzzy searching for "New" in United States...')
// us_cities := loc.search('New', 'US', 5, true) or { panic(err) }
// for result in us_cities {
// println('${result.city.name}, ${result.country.name}')
// println('State: ${result.city.state_name} (${result.city.state_code})')
// println('Population: ${result.city.population}')
// println('---')
// }

40
examples/data/ourdb_example.vsh Executable file
View File

@@ -0,0 +1,40 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.ourdb
const test_dir = '/tmp/ourdb'
mut db := ourdb.new(
record_nr_max: 16777216 - 1 // max size of records
record_size_max: 1024
path: test_dir
reset: true
)!
defer {
db.destroy() or { panic('failed to destroy db: ${err}') }
}
// Test set and get
test_data := 'Hello, World!'.bytes()
id := db.set(data: test_data)!
retrieved := db.get(id)!
assert retrieved == test_data
assert id == 0
// Test overwrite
new_data := 'Updated data'.bytes()
id2 := db.set(id: 0, data: new_data)!
assert id2 == 0
// // Verify lookup table has the correct location
// location := db.lookup.get(id2)!
// println('Location after update - file_nr: ${location.file_nr}, position: ${location.position}')
// Get and verify the updated data
retrieved2 := db.get(id2)!
println('Retrieved data: ${retrieved2}')
println('Expected data: ${new_data}')
assert retrieved2 == new_data

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.data.paramsparser

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.paramsparser { Params, parse }
import time

33
examples/data/radixtree.vsh Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.radixtree
mut rt := radixtree.new(path: '/tmp/radixtree_test', reset: true)!
// Show initial state
println('\nInitial state:')
rt.debug_db()!
// Test insert
println('\nInserting key "test" with value "value1"')
rt.insert('test', 'value1'.bytes())!
// Show state after insert
println('\nState after insert:')
rt.debug_db()!
// Print tree structure
rt.print_tree()!
// Test search
if value := rt.search('test') {
println('\nFound value: ${value.bytestr()}')
} else {
println('\nError: ${err}')
}
println('\nInserting key "test2" with value "value2"')
rt.insert('test2', 'value2'.bytes())!
// Print tree structure
rt.print_tree()!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.resp
import crypto.ed25519

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env -S v -n -w -no-retry-compilation -d use_openssl -enable-globals run
//#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
//-parallel-cc
import os
import freeflowuniverse.herolib.develop.gittools
// import freeflowuniverse.herolib.develop.performance
mut silent := false
coderoot := if 'CODEROOT' in os.environ() {
os.environ()['CODEROOT']
} else {
os.join_path(os.home_dir(), 'code')
}
// timer := performance.new('gittools')
mut gs := gittools.get()!
if coderoot.len > 0 {
// is a hack for now
gs = gittools.new(coderoot: coderoot)!
}
mypath := gs.do(
recursive: true
cmd: 'list'
)!
// timer.timeline()

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import time
mut gs_default := gittools.new()!
println(gs_default)
// // Initializes the Git structure with the coderoot path.
// coderoot := '/tmp/code'
// mut gs_tmo := gittools.new(coderoot: coderoot)!
// // Retrieve the specified repository.
// mut repo := gs_default.get_repo(name: 'herolib')!
// println(repo)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import time
mut gs := gittools.new()!
mydocs_path := gs.get_path(
pull: true
reset: false
url: 'https://git.ourworld.tf/tfgrid/info_docs_depin/src/branch/main/docs'
)!
println(mydocs_path)

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
// #!/usr/bin/env -S v -n -w -cg -d use_openssl -enable-globals run
//-parallel-cc
import os
import freeflowuniverse.herolib.develop.gittools
mut gs := gittools.get(reload: true)!
gs.repos_print()!

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.ipapi
import os
mut ip_api_client := ipapi.get()!
info := ip_api_client.get_ip_info('37.27.132.46')!
println('info: ${info}')

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.osal

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.sysadmin.startupmanager
import os

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.develop.luadns

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.openai as op

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
// import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.clients.runpod
import json
import x.json2
// Create client with direct API key
// This uses RUNPOD_API_KEY from environment
mut rp := runpod.get()!
// Create a new on demand pod
on_demand_pod_response := rp.create_on_demand_pod(
name: 'RunPod Tensorflow'
image_name: 'runpod/tensorflow'
cloud_type: 'ALL'
gpu_count: 1
volume_in_gb: 5
container_disk_in_gb: 5
min_memory_in_gb: 4
min_vcpu_count: 1
gpu_type_id: 'NVIDIA RTX A4000'
ports: '8888/http'
volume_mount_path: '/workspace'
env: [
runpod.EnvironmentVariableInput{
key: 'JUPYTER_PASSWORD'
value: 'rn51hunbpgtltcpac3ol'
},
]
)!
println('Created pod with ID: ${on_demand_pod_response.id}')
// create a spot pod
spot_pod_response := rp.create_spot_pod(
port: 1826
bid_per_gpu: 0.2
cloud_type: 'SECURE'
gpu_count: 1
volume_in_gb: 5
container_disk_in_gb: 5
min_vcpu_count: 1
min_memory_in_gb: 4
gpu_type_id: 'NVIDIA RTX A4000'
name: 'RunPod Pytorch'
image_name: 'runpod/pytorch'
docker_args: ''
ports: '8888/http'
volume_mount_path: '/workspace'
env: [
runpod.EnvironmentVariableInput{
key: 'JUPYTER_PASSWORD'
value: 'rn51hunbpgtltcpac3ol'
},
]
)!
println('Created spot pod with ID: ${spot_pod_response.id}')
// stop on-demand pod
stop_on_demand_pod := rp.stop_pod(
pod_id: '${on_demand_pod_response.id}'
)!
println('Stopped on-demand pod with ID: ${stop_on_demand_pod.id}')
// stop spot pod
stop_spot_pod := rp.stop_pod(
pod_id: '${spot_pod_response.id}'
)!
println('Stopped spot pod with ID: ${stop_spot_pod.id}')
// start on-demand pod
start_on_demand_pod := rp.start_on_demand_pod(pod_id: '${on_demand_pod_response.id}', gpu_count: 1)!
println('Started on demand pod with ID: ${on_demand_pod_response.id}')
// start spot pod
start_spot_pod := rp.start_spot_pod(
pod_id: '${spot_pod_response.id}'
gpu_count: 1
bid_per_gpu: 0.2
)!
println('Started spot pod with ID: ${spot_pod_response.id}')
get_pod := rp.get_pod(
pod_id: '${spot_pod_response.id}'
)!
println('Get pod result: ${get_pod}')
rp.terminate_pod(pod_id: '${spot_pod_response.id}')!
println('pod with id ${spot_pod_response.id} is terminated')
rp.terminate_pod(pod_id: '${on_demand_pod_response.id}')!
println('pod with id ${on_demand_pod_response.id} is terminated')

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.vastai
import json
import x.json2
// Create client with direct API key
// This uses VASTAI_API_KEY from environment
mut va := vastai.get()!
offers := va.search_offers()!
println('offers: ${offers}')
top_offers := va.get_top_offers(5)!
println('top offers: ${top_offers}')
create_instance_res := va.create_instance(
id: top_offers[0].id
config: vastai.CreateInstanceConfig{
image: 'pytorch/pytorch:2.5.1-cuda12.4-cudnn9-runtime'
disk: 10
}
)!
println('create instance res: ${create_instance_res}')
attach_sshkey_to_instance_res := va.attach_sshkey_to_instance(
id: 1
ssh_key: 'ssh-rsa AAAA...'
)!
println('attach sshkey to instance res: ${attach_sshkey_to_instance_res}')
stop_instance_res := va.stop_instance(
id: 1
state: 'stopped'
)!
println('stop instance res: ${stop_instance_res}')
destroy_instance_res := va.destroy_instance(
id: 1
)!
println('destroy instance res: ${destroy_instance_res}')
// For some reason this method returns an error from their server, 500 ERROR
// (request failed with code 500: {"error":"server_error","msg":"Something went wrong on the server"})
launch_instance_res := va.launch_instance(
// Required
num_gpus: 1
gpu_name: 'RTX_3090'
image: 'vastai/tensorflow'
disk: 10
region: 'us-west'
// Optional
env: 'user=7amada, home=/home/7amada'
)!
println('destroy instance res: ${launch_instance_res}')
start_instances_res := va.start_instances(
ids: [1, 2, 3]
)!
println('start instances res: ${start_instances_res}')
start_instance_res := va.start_instance(
id: 1
)!
println('start instance res: ${start_instance_res}')

View File

@@ -0,0 +1,8 @@
[Interface]
Address = 10.10.3.0/24
PrivateKey = wDewSiri8jlaGnUDN6SwK7QhN082U7gfX27YMGILvVA=
[Peer]
PublicKey = 2JEGJQ8FbajdFk0fFs/881H/D3FRjwlUxvNDZFxDeWQ=
AllowedIPs = 10.10.0.0/16, 100.64.0.0/16
PersistentKeepalive = 25
Endpoint = 185.206.122.31:3241

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.wireguard
import freeflowuniverse.herolib.installers.net.wireguard as wireguard_installer
import time
import os
mut wg_installer := wireguard_installer.get()!
wg_installer.install()!
// Create Wireguard client
mut wg := wireguard.get()!
config_file_path := '${os.dir(@FILE)}/wg0.conf'
wg.start(config_file_path: config_file_path)!
println('${config_file_path} is started')
time.sleep(time.second * 2)
info := wg.show()!
println('info: ${info}')
config := wg.show_config(interface_name: 'wg0')!
println('config: ${config}')
private_key := wg.generate_private_key()!
println('private_key: ${private_key}')
public_key := wg.get_public_key(private_key: private_key)!
println('public_key: ${public_key}')
wg.down(config_file_path: config_file_path)!
println('${config_file_path} is down')
wg_installer.destroy()!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.hero.bootstrap

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.hero.generation

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.hero.generation

View File

@@ -1 +1 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run

View File

@@ -1 +1 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import example_actor

View File

@@ -1 +1 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.sysadmintools.actrunner
import freeflowuniverse.herolib.installers.virt.herocontainers

11
examples/installers/buildah.vsh Executable file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.virt.buildah as buildah_installer
mut buildah := buildah_installer.get()!
// To install
buildah.install()!
// To remove
buildah.destroy()!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.fediverse.conduit

View File

@@ -1,5 +1,8 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.infra.coredns as coredns_installer
import freeflowuniverse.herolib.osal
coredns_installer.install()!
// coredns_installer.delete()!
mut installer := coredns_installer.get()!
installer.build()!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.sysadmintools.daguserver
import freeflowuniverse.herolib.installers.infra.zinit

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.sysadmintools.daguserver

11
examples/installers/docker.vsh Executable file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.virt.docker as docker_installer
mut docker := docker_installer.get()!
// To install
docker.install()!
// To remove
docker.destroy()!

View File

@@ -1,34 +1,16 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.infra.gitea as gitea_installer
// First of all, we need to set the gitea configuration
// heroscript := "
// !!gitea.configure
// name:'default'
// version:'1.22.6'
// path: '/var/lib/git'
// passwd: '12345678'
// postgresql_name: 'default'
// mail_from: 'git@meet.tf'
// smtp_addr: 'smtp-relay.brevo.com'
// smtp_login: 'admin'
// smtp_port: 587
// smtp_passwd: '12345678'
// domain: 'meet.tf'
// jwt_secret: ''
// lfs_jwt_secret: ''
// internal_token: ''
// secret_key: ''
// "
mut installer := gitea_installer.get(name: 'test')!
// gitea_installer.play(
// name: 'default'
// heroscript: heroscript
// )!
// if you want to configure using heroscript
gitea_installer.play(
heroscript: "
!!gitea.configure name:test
passwd:'something'
domain: 'docs.info.com'
"
)!
// Then we need to get an instace of the installer and call the install
mut gitea := gitea_installer.get()!
// println('gitea configs: ${gitea}')
gitea.install()!
gitea.start()!
installer.start()!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.threefold.griddriver

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.lang.vlang
import freeflowuniverse.herolib.installers.sysadmintools.daguserver

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.installers.lang.golang

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.installers.lang.python

View File

@@ -1,5 +1,40 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.net.mycelium as mycelium_installer
import freeflowuniverse.herolib.clients.mycelium
mycelium_installer.start()!
mut installer := mycelium_installer.get()!
installer.start()!
mut r := mycelium.inspect()!
println(r)
mut client := mycelium.get()!
// Send a message to a node by public key
// Parameters: public_key, payload, topic, wait_for_reply
msg := client.send_msg('abc123...', // destination public key
'Hello World', // message payload
'greetings', // optional topic
true // wait for reply
)!
println('Sent message ID: ${msg.id}')
// Receive messages
// Parameters: wait_for_message, peek_only, topic_filter
received := client.receive_msg(true, false, 'greetings')!
println('Received message from: ${received.src_pk}')
println('Message payload: ${received.payload}')
// Reply to a message
client.reply_msg(received.id, // original message ID
received.src_pk, // sender's public key
'Got your message!', // reply payload
'greetings' // topic
)!
// Check message status
status := client.get_msg_status(msg.id)!
println('Message status: ${status.state}')
println('Created at: ${status.created}')
println('Expires at: ${status.deadline}')

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.virt.podman as podman_installer

Some files were not shown because too many files have changed in this diff Show More