Compare commits
151 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
483b6e3de3 | ||
| f769c34466 | |||
|
|
c0242a0729 | ||
|
|
df452ce258 | ||
| 7de290ae55 | |||
| fe161c5bea | |||
|
|
fca7c7364a | ||
|
|
ef705d1be0 | ||
|
|
3154733be1 | ||
| b285e85eb5 | |||
| 89b7f0d465 | |||
|
|
256d4e9bca | ||
|
|
54f4e83627 | ||
|
|
f7a770989b | ||
|
|
c5759ea30e | ||
|
|
aef9c84eb5 | ||
| d0baac83a9 | |||
| b6a2671665 | |||
| a96ae1252c | |||
| ac4db0f789 | |||
| 37f9ab78ec | |||
|
|
9b3ac150bd | ||
|
|
dd577d51b9 | ||
| 92f9714229 | |||
|
|
632a1c11c2 | ||
|
|
63d41352bc | ||
|
|
da8eef3711 | ||
|
|
f0a4732206 | ||
|
|
1f053edefc | ||
|
|
f93db1d23c | ||
|
|
105611bbfb | ||
|
|
4977c6de30 | ||
|
|
eb956bca3d | ||
|
|
5e511367c3 | ||
|
|
484bfe393e | ||
| a1404584d6 | |||
| 3ef1698c2c | |||
| a7fb704627 | |||
| 91ba6001cb | |||
| 345a79d8ff | |||
| 15d886e5e9 | |||
| d6224d1e60 | |||
| 83fb647ac3 | |||
| b410544ee1 | |||
| 2d5d1befae | |||
| fd8b8c8f42 | |||
| 8ffb8c8caf | |||
| b8b339b85c | |||
| 0789a38ea9 | |||
| 995d3c3f6d | |||
|
|
822b179ef4 | ||
| 4691971bd0 | |||
| 9226e8b490 | |||
| b7fc7734b6 | |||
| 8749e3a8cb | |||
| 61f9f2868a | |||
| 97dfcbeb51 | |||
| 238fabbcb2 | |||
| 49542b4bff | |||
| 46898112f5 | |||
| f9bdb22c67 | |||
| cb664b2115 | |||
|
|
761b9e031e | ||
|
|
0d8d11fe26 | ||
|
|
2d5fbd3337 | ||
| cd3c98280e | |||
| 39c6c37dee | |||
| 3438f74e60 | |||
| 4f79712570 | |||
| 8e85ce0678 | |||
| ff09e7bf1b | |||
| 46e1c6706c | |||
| d8a59d0726 | |||
| 108d2019cd | |||
| 3682ef2420 | |||
| a066db6624 | |||
| 7458d64c05 | |||
| 2a1787f28f | |||
| de78c229ce | |||
|
|
f386c67acf | ||
|
|
75f98bf349 | ||
|
|
9870fcbc5d | ||
|
|
d2b8379505 | ||
|
|
2dcb97255c | ||
|
|
f7dd227cd0 | ||
| e2c18c3a24 | |||
| 1bc6c6eab8 | |||
|
|
4b39b137de | ||
| e5de293919 | |||
| 8a10374570 | |||
| ad37a041ab | |||
| 44daea4447 | |||
| 6989a4da13 | |||
| de4583691c | |||
| d8c9b07a51 | |||
| 54d31f40b2 | |||
| ec73b5ff34 | |||
| 9fcdcc3aff | |||
| 05ab2b68f4 | |||
| 79330ef8f5 | |||
| 45ed369a78 | |||
| 37c17fc7da | |||
| 23640d2647 | |||
| be54ec8302 | |||
| 638f81a781 | |||
| e00306b6f8 | |||
| 3fec1c38a1 | |||
| edc9e3c150 | |||
| a155122898 | |||
| f0552f38a0 | |||
|
|
f99419371a | ||
|
|
86d47c218b | ||
|
|
bd5cafbad7 | ||
|
|
b71362eb9a | ||
|
|
673d982360 | ||
|
|
712b46864a | ||
|
|
186c3aae59 | ||
|
|
0794fe948b | ||
|
|
ba2d6e4310 | ||
|
|
b9e5d14b48 | ||
|
|
bf26b0af1d | ||
|
|
8e82b2865b | ||
|
|
367340d69d | ||
|
|
0b77c73809 | ||
|
|
51b432d911 | ||
|
|
f7a679b2a3 | ||
|
|
15c9d60760 | ||
|
|
c69b53fd4e | ||
|
|
9cdab1f392 | ||
|
|
34656cf1f9 | ||
|
|
cf98822749 | ||
|
|
2335d14623 | ||
| 4a72698402 | |||
| fcb857f756 | |||
|
|
ba07f85fd8 | ||
|
|
7b621243d0 | ||
| 598b312140 | |||
| 0df10f5cb3 | |||
| 2c748a9fc8 | |||
| a2e1b4fb27 | |||
| 9b0da9f245 | |||
| 5b9426ba11 | |||
|
|
228abe36a3 | ||
|
|
c3fe788a5b | ||
|
|
025e8fba69 | ||
|
|
59a0519b4e | ||
|
|
dfcaeec85f | ||
|
|
e374520654 | ||
|
|
54dc3d3f1f | ||
| 834c413cfc | |||
| da9965bdc6 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -39,8 +39,12 @@ data.ms/
|
|||||||
test_basic
|
test_basic
|
||||||
cli/hero
|
cli/hero
|
||||||
.aider*
|
.aider*
|
||||||
|
storage/
|
||||||
|
.qdrant-initialized
|
||||||
.compile_cache
|
.compile_cache
|
||||||
compile_results.log
|
compile_results.log
|
||||||
tmp
|
tmp
|
||||||
compile_summary.log
|
compile_summary.log
|
||||||
.summary_lock
|
.summary_lock
|
||||||
|
.aider*
|
||||||
|
*.dylib
|
||||||
3
.roo/mcp.json
Normal file
3
.roo/mcp.json
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"mcpServers": {}
|
||||||
|
}
|
||||||
0
aiprompts/ai_core/redis.md
Normal file
0
aiprompts/ai_core/redis.md
Normal file
77
cfg/config.heroscript
Normal file
77
cfg/config.heroscript
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
!!docusaurus.config
|
||||||
|
name:"my-documentation"
|
||||||
|
title:"My Documentation Site"
|
||||||
|
tagline:"Documentation made simple with V and Docusaurus"
|
||||||
|
url:"https://docs.example.com"
|
||||||
|
url_home:"docs/"
|
||||||
|
base_url:"/"
|
||||||
|
favicon:"img/favicon.png"
|
||||||
|
image:"img/hero.png"
|
||||||
|
copyright:"© 2025 Example Organization"
|
||||||
|
|
||||||
|
!!docusaurus.config_meta
|
||||||
|
description:"Comprehensive documentation for our amazing project"
|
||||||
|
image:"https://docs.example.com/img/social-card.png"
|
||||||
|
title:"My Documentation | Official Docs"
|
||||||
|
|
||||||
|
!!docusaurus.ssh_connection
|
||||||
|
name:"production"
|
||||||
|
host:"example.com"
|
||||||
|
login:"deploy"
|
||||||
|
port:22
|
||||||
|
key_path:"~/.ssh/id_rsa"
|
||||||
|
|
||||||
|
!!docusaurus.build_dest
|
||||||
|
ssh_name:"production"
|
||||||
|
path:"/var/www/docs"
|
||||||
|
|
||||||
|
!!docusaurus.navbar
|
||||||
|
title:"My Project"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"Documentation"
|
||||||
|
href:"/docs"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"API"
|
||||||
|
href:"/api"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!docusaurus.footer
|
||||||
|
style:"dark"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"Introduction"
|
||||||
|
to:"/docs"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"API Reference"
|
||||||
|
to:"/api"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"Discord"
|
||||||
|
href:"https://discord.gg/example"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"More"
|
||||||
|
label:"Blog"
|
||||||
|
href:"https://blog.example.com"
|
||||||
|
|
||||||
|
!!docusaurus.import_source
|
||||||
|
url:"https://github.com/example/external-docs"
|
||||||
|
dest:"external"
|
||||||
|
replace:"PROJECT_NAME:My Project, VERSION:1.0.0"
|
||||||
@@ -28,7 +28,7 @@ fn get_platform_id() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn read_secrets() ! {
|
fn read_secrets() ! {
|
||||||
secret_file := os.join_path(os.home_dir(), 'code/git.ourworld.tf/despiegk/hero_secrets/mysecrets.sh')
|
secret_file := os.join_path(os.home_dir(), 'code/git.threefold.info/despiegk/hero_secrets/mysecrets.sh')
|
||||||
if os.exists(secret_file) {
|
if os.exists(secret_file) {
|
||||||
println('Reading secrets from ${secret_file}')
|
println('Reading secrets from ${secret_file}')
|
||||||
content := os.read_file(secret_file)!
|
content := os.read_file(secret_file)!
|
||||||
|
|||||||
19
cli/hero.v
19
cli/hero.v
@@ -19,25 +19,24 @@ fn playcmds_do(path string) ! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn do() ! {
|
fn do() ! {
|
||||||
|
if !core.is_osx()! {
|
||||||
if ! core.is_osx()! {
|
|
||||||
if os.getenv('SUDO_COMMAND') != '' || os.getenv('SUDO_USER') != '' {
|
if os.getenv('SUDO_COMMAND') != '' || os.getenv('SUDO_USER') != '' {
|
||||||
println('Error: Please do not run this program with sudo!')
|
println('Error: Please do not run this program with sudo!')
|
||||||
exit(1) // Exit with error code
|
exit(1) // Exit with error code
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.getuid() == 0 {
|
if os.getuid() == 0 {
|
||||||
if core.is_osx()! {
|
if core.is_osx()! {
|
||||||
eprintln("please do not run hero as root in osx.")
|
eprintln('please do not run hero as root in osx.')
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if ! core.is_osx()! {
|
if !core.is_osx()! {
|
||||||
eprintln("please do run hero as root, don't use sudo.")
|
eprintln("please do run hero as root, don't use sudo.")
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.args.len == 2 {
|
if os.args.len == 2 {
|
||||||
mypath := os.args[1]
|
mypath := os.args[1]
|
||||||
@@ -51,7 +50,7 @@ fn do() ! {
|
|||||||
mut cmd := Command{
|
mut cmd := Command{
|
||||||
name: 'hero'
|
name: 'hero'
|
||||||
description: 'Your HERO toolset.'
|
description: 'Your HERO toolset.'
|
||||||
version: '1.0.22'
|
version: '1.0.26'
|
||||||
}
|
}
|
||||||
|
|
||||||
// herocmds.cmd_run_add_flags(mut cmd)
|
// herocmds.cmd_run_add_flags(mut cmd)
|
||||||
|
|||||||
@@ -73,9 +73,9 @@ function sshknownkeysadd {
|
|||||||
then
|
then
|
||||||
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
||||||
fi
|
fi
|
||||||
if ! grep git.ourworld.tf ~/.ssh/known_hosts > /dev/null
|
if ! grep git.threefold.info ~/.ssh/known_hosts > /dev/null
|
||||||
then
|
then
|
||||||
ssh-keyscan git.ourworld.tf >> ~/.ssh/known_hosts
|
ssh-keyscan git.threefold.info >> ~/.ssh/known_hosts
|
||||||
fi
|
fi
|
||||||
git config --global pull.rebase false
|
git config --global pull.rebase false
|
||||||
|
|
||||||
|
|||||||
71
examples/aiexamples/groq.vsh
Executable file
71
examples/aiexamples/groq.vsh
Executable file
@@ -0,0 +1,71 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
module main
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
import os
|
||||||
|
|
||||||
|
fn test1(mut client openai.OpenAI) ! {
|
||||||
|
instruction := '
|
||||||
|
You are a template language converter. You convert Pug templates to Jet templates.
|
||||||
|
|
||||||
|
The target template language, Jet, is defined as follows:
|
||||||
|
'
|
||||||
|
|
||||||
|
// Create a chat completion request
|
||||||
|
res := client.chat_completion(
|
||||||
|
msgs: openai.Messages{
|
||||||
|
messages: [
|
||||||
|
openai.Message{
|
||||||
|
role: .user
|
||||||
|
content: 'What are the key differences between Groq and other AI inference providers?'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Print the response
|
||||||
|
println('\nGroq AI Response:')
|
||||||
|
println('==================')
|
||||||
|
println(res.choices[0].message.content)
|
||||||
|
println('\nUsage Statistics:')
|
||||||
|
println('Prompt tokens: ${res.usage.prompt_tokens}')
|
||||||
|
println('Completion tokens: ${res.usage.completion_tokens}')
|
||||||
|
println('Total tokens: ${res.usage.total_tokens}')
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test2(mut client openai.OpenAI) ! {
|
||||||
|
// Create a chat completion request
|
||||||
|
res := client.chat_completion(
|
||||||
|
model: 'deepseek-r1-distill-llama-70b'
|
||||||
|
msgs: openai.Messages{
|
||||||
|
messages: [
|
||||||
|
openai.Message{
|
||||||
|
role: .user
|
||||||
|
content: 'A story of 10 lines?'
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)!
|
||||||
|
|
||||||
|
println('\nGroq AI Response:')
|
||||||
|
println('==================')
|
||||||
|
println(res.choices[0].message.content)
|
||||||
|
println('\nUsage Statistics:')
|
||||||
|
println('Prompt tokens: ${res.usage.prompt_tokens}')
|
||||||
|
println('Completion tokens: ${res.usage.completion_tokens}')
|
||||||
|
println('Total tokens: ${res.usage.total_tokens}')
|
||||||
|
}
|
||||||
|
|
||||||
|
println("
|
||||||
|
TO USE:
|
||||||
|
export AIKEY='gsk_...'
|
||||||
|
export AIURL='https://api.groq.com/openai/v1'
|
||||||
|
export AIMODEL='llama-3.3-70b-versatile'
|
||||||
|
")
|
||||||
|
|
||||||
|
mut client := openai.get(name: 'test')!
|
||||||
|
println(client)
|
||||||
|
|
||||||
|
// test1(mut client)!
|
||||||
|
test2(mut client)!
|
||||||
7
examples/aiexamples/jetconvertor.vsh
Executable file
7
examples/aiexamples/jetconvertor.vsh
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.mcp.aitools
|
||||||
|
|
||||||
|
// aitools.convert_pug("/root/code/github/freeflowuniverse/herolauncher/pkg/herolauncher/web/templates/admin")!
|
||||||
|
|
||||||
|
aitools.convert_pug('/root/code/github/freeflowuniverse/herolauncher/pkg/zaz/webui/templates')!
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.clients.jina
|
|
||||||
import freeflowuniverse.herolib.osal
|
|
||||||
import os
|
|
||||||
|
|
||||||
// Example of using the Jina client
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
// Set environment variable for testing
|
|
||||||
// In production, you would set this in your environment
|
|
||||||
// osal.env_set(key: 'JINAKEY', value: 'your-api-key')
|
|
||||||
|
|
||||||
// Check if JINAKEY environment variable exists
|
|
||||||
if !osal.env_exists('JINAKEY') {
|
|
||||||
println('JINAKEY environment variable not set. Please set it before running this example.')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a Jina client instance
|
|
||||||
mut client := jina.get(name: 'default')!
|
|
||||||
|
|
||||||
println('Jina client initialized successfully.')
|
|
||||||
|
|
||||||
// Example: Create embeddings
|
|
||||||
model := 'jina-embeddings-v3'
|
|
||||||
texts := ['Hello, world!', 'How are you doing?']
|
|
||||||
|
|
||||||
println('Creating embeddings for texts: ${texts}')
|
|
||||||
result := client.create_embeddings(texts, model, 'retrieval.query')!
|
|
||||||
|
|
||||||
println('Embeddings created successfully.')
|
|
||||||
println('Model: ${result['model']}')
|
|
||||||
println('Data count: ${result['data'].arr().len}')
|
|
||||||
|
|
||||||
// Example: List classifiers
|
|
||||||
println('\nListing classifiers:')
|
|
||||||
classifiers := client.list_classifiers() or {
|
|
||||||
println('Failed to list classifiers: ${err}')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Classifiers retrieved successfully.')
|
|
||||||
|
|
||||||
// Example: Create a classifier
|
|
||||||
println('\nTraining a classifier:')
|
|
||||||
examples := [
|
|
||||||
jina.TrainingExample{
|
|
||||||
text: 'This movie was great!'
|
|
||||||
label: 'positive'
|
|
||||||
},
|
|
||||||
jina.TrainingExample{
|
|
||||||
text: 'I did not like this movie.'
|
|
||||||
label: 'negative'
|
|
||||||
},
|
|
||||||
jina.TrainingExample{
|
|
||||||
text: 'The movie was okay.'
|
|
||||||
label: 'neutral'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
training_result := client.train(examples, model, 'private') or {
|
|
||||||
println('Failed to train classifier: ${err}')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Classifier trained successfully.')
|
|
||||||
println('Classifier ID: ${training_result['classifier_id']}')
|
|
||||||
}
|
|
||||||
128
examples/aiexamples/qdrant.vsh
Executable file
128
examples/aiexamples/qdrant.vsh
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.qdrant
|
||||||
|
import freeflowuniverse.herolib.installers.db.qdrant_installer
|
||||||
|
import freeflowuniverse.herolib.core.httpconnection
|
||||||
|
import rand
|
||||||
|
import os
|
||||||
|
|
||||||
|
println('Starting Qdrant example script')
|
||||||
|
|
||||||
|
// Print environment information
|
||||||
|
println('Current directory: ${os.getwd()}')
|
||||||
|
println('Home directory: ${os.home_dir()}')
|
||||||
|
|
||||||
|
mut i := qdrant_installer.get()!
|
||||||
|
i.install()!
|
||||||
|
|
||||||
|
// 1. Get the qdrant client
|
||||||
|
println('Getting Qdrant client...')
|
||||||
|
mut qdrant_client := qdrant.get()!
|
||||||
|
println('Qdrant client URL: ${qdrant_client.url}')
|
||||||
|
|
||||||
|
// Check if Qdrant server is running
|
||||||
|
println('Checking Qdrant server health...')
|
||||||
|
health := qdrant_client.health_check() or {
|
||||||
|
println('Error checking health: ${err}')
|
||||||
|
false
|
||||||
|
}
|
||||||
|
println('Qdrant server health: ${health}')
|
||||||
|
|
||||||
|
// Get service info
|
||||||
|
println('Getting Qdrant service info...')
|
||||||
|
service_info := qdrant_client.get_service_info() or {
|
||||||
|
println('Error getting service info: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Qdrant service info: ${service_info}')
|
||||||
|
|
||||||
|
// 2. Generate collection name
|
||||||
|
collection_name := 'collection_' + rand.string(4)
|
||||||
|
println('Generated collection name: ${collection_name}')
|
||||||
|
|
||||||
|
// 3. Create a new collection
|
||||||
|
println('Creating collection...')
|
||||||
|
created_collection := qdrant_client.create_collection(
|
||||||
|
collection_name: collection_name
|
||||||
|
size: 15
|
||||||
|
distance: 'Cosine'
|
||||||
|
) or {
|
||||||
|
println('Error creating collection: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Created Collection: ${created_collection}')
|
||||||
|
|
||||||
|
// 4. Get the created collection
|
||||||
|
println('Getting collection...')
|
||||||
|
get_collection := qdrant_client.get_collection(
|
||||||
|
collection_name: collection_name
|
||||||
|
) or {
|
||||||
|
println('Error getting collection: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Get Collection: ${get_collection}')
|
||||||
|
|
||||||
|
// 5. List all collections
|
||||||
|
println('Listing collections...')
|
||||||
|
list_collection := qdrant_client.list_collections() or {
|
||||||
|
println('Error listing collections: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('List Collection: ${list_collection}')
|
||||||
|
|
||||||
|
// 6. Check collection existence
|
||||||
|
println('Checking collection existence...')
|
||||||
|
collection_existence := qdrant_client.is_collection_exists(
|
||||||
|
collection_name: collection_name
|
||||||
|
) or {
|
||||||
|
println('Error checking collection existence: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Collection Existence: ${collection_existence}')
|
||||||
|
|
||||||
|
// 7. Retrieve points
|
||||||
|
println('Retrieving points...')
|
||||||
|
collection_points := qdrant_client.retrieve_points(
|
||||||
|
collection_name: collection_name
|
||||||
|
ids: [
|
||||||
|
0,
|
||||||
|
3,
|
||||||
|
100,
|
||||||
|
]
|
||||||
|
) or {
|
||||||
|
println('Error retrieving points: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Collection Points: ${collection_points}')
|
||||||
|
|
||||||
|
// 8. Upsert points
|
||||||
|
println('Upserting points...')
|
||||||
|
upsert_points := qdrant_client.upsert_points(
|
||||||
|
collection_name: collection_name
|
||||||
|
points: [
|
||||||
|
qdrant.Point{
|
||||||
|
payload: {
|
||||||
|
'key': 'value'
|
||||||
|
}
|
||||||
|
vector: [1.0, 2.0, 3.0]
|
||||||
|
},
|
||||||
|
qdrant.Point{
|
||||||
|
payload: {
|
||||||
|
'key': 'value'
|
||||||
|
}
|
||||||
|
vector: [4.0, 5.0, 6.0]
|
||||||
|
},
|
||||||
|
qdrant.Point{
|
||||||
|
payload: {
|
||||||
|
'key': 'value'
|
||||||
|
}
|
||||||
|
vector: [7.0, 8.0, 9.0]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
) or {
|
||||||
|
println('Error upserting points: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Upsert Points: ${upsert_points}')
|
||||||
|
|
||||||
|
println('Qdrant example script completed successfully')
|
||||||
@@ -12,12 +12,11 @@ const openrpc_spec_path = os.join_path(example_dir, 'openrpc.json')
|
|||||||
openrpc_spec := openrpc.new(path: openrpc_spec_path)!
|
openrpc_spec := openrpc.new(path: openrpc_spec_path)!
|
||||||
actor_spec := specification.from_openrpc(openrpc_spec)!
|
actor_spec := specification.from_openrpc(openrpc_spec)!
|
||||||
|
|
||||||
actor_module := generator.generate_actor_module(
|
actor_module := generator.generate_actor_module(actor_spec,
|
||||||
actor_spec,
|
|
||||||
interfaces: [.openrpc]
|
interfaces: [.openrpc]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
actor_module.write(example_dir,
|
actor_module.write(example_dir,
|
||||||
format: true
|
format: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
)!
|
)!
|
||||||
@@ -14,6 +14,6 @@ actor_spec := specification.from_openrpc(openrpc_spec)!
|
|||||||
|
|
||||||
methods_file := generator.generate_methods_file(actor_spec)!
|
methods_file := generator.generate_methods_file(actor_spec)!
|
||||||
methods_file.write(example_dir,
|
methods_file.write(example_dir,
|
||||||
format: true
|
format: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
)!
|
)!
|
||||||
@@ -14,6 +14,6 @@ actor_spec := specification.from_openrpc(openrpc_spec_)!
|
|||||||
openrpc_spec := actor_spec.to_openrpc()
|
openrpc_spec := actor_spec.to_openrpc()
|
||||||
|
|
||||||
openrpc_file := generator.generate_openrpc_file(openrpc_spec)!
|
openrpc_file := generator.generate_openrpc_file(openrpc_spec)!
|
||||||
openrpc_file.write(os.join_path(example_dir,'docs'),
|
openrpc_file.write(os.join_path(example_dir, 'docs'),
|
||||||
overwrite: true
|
overwrite: true
|
||||||
)!
|
)!
|
||||||
@@ -5,7 +5,6 @@ import freeflowuniverse.herolib.baobab.specification
|
|||||||
import freeflowuniverse.herolib.schemas.openapi
|
import freeflowuniverse.herolib.schemas.openapi
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
const example_dir = os.dir(@FILE)
|
const example_dir = os.dir(@FILE)
|
||||||
const specs = ['merchant', 'profiler', 'farmer']
|
const specs = ['merchant', 'profiler', 'farmer']
|
||||||
|
|
||||||
@@ -13,13 +12,12 @@ for spec in specs {
|
|||||||
openapi_spec_path := os.join_path(example_dir, '${spec}.json')
|
openapi_spec_path := os.join_path(example_dir, '${spec}.json')
|
||||||
openapi_spec := openapi.new(path: openapi_spec_path, process: true)!
|
openapi_spec := openapi.new(path: openapi_spec_path, process: true)!
|
||||||
actor_spec := specification.from_openapi(openapi_spec)!
|
actor_spec := specification.from_openapi(openapi_spec)!
|
||||||
actor_module := generator.generate_actor_folder(
|
actor_module := generator.generate_actor_folder(actor_spec,
|
||||||
actor_spec,
|
|
||||||
interfaces: [.openapi, .http]
|
interfaces: [.openapi, .http]
|
||||||
)!
|
)!
|
||||||
actor_module.write(example_dir,
|
actor_module.write(example_dir,
|
||||||
format: true
|
format: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
compile: false
|
compile: false
|
||||||
)!
|
)!
|
||||||
}
|
}
|
||||||
@@ -15,67 +15,67 @@ pub:
|
|||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
// technical specifications
|
// technical specifications
|
||||||
specs map[string]string
|
specs map[string]string
|
||||||
// price per unit
|
// price per unit
|
||||||
price f64
|
price f64
|
||||||
// currency code (e.g., 'USD', 'EUR')
|
// currency code (e.g., 'USD', 'EUR')
|
||||||
currency string
|
currency string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ProductTemplate {
|
pub struct ProductTemplate {
|
||||||
pub:
|
pub:
|
||||||
id string
|
id string
|
||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
// components that make up this product template
|
// components that make up this product template
|
||||||
components []ProductComponentTemplate
|
components []ProductComponentTemplate
|
||||||
// merchant who created this template
|
// merchant who created this template
|
||||||
merchant_id string
|
merchant_id string
|
||||||
// category of the product (e.g., 'electronics', 'clothing')
|
// category of the product (e.g., 'electronics', 'clothing')
|
||||||
category string
|
category string
|
||||||
// whether this template is available for use
|
// whether this template is available for use
|
||||||
active bool
|
active bool
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Product {
|
pub struct Product {
|
||||||
pub:
|
pub:
|
||||||
id string
|
id string
|
||||||
template_id string
|
template_id string
|
||||||
// specific instance details that may differ from template
|
// specific instance details that may differ from template
|
||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
// actual price of this product instance
|
// actual price of this product instance
|
||||||
price f64
|
price f64
|
||||||
currency string
|
currency string
|
||||||
// merchant selling this product
|
// merchant selling this product
|
||||||
merchant_id string
|
merchant_id string
|
||||||
// current stock level
|
// current stock level
|
||||||
stock_quantity int
|
stock_quantity int
|
||||||
// whether this product is available for purchase
|
// whether this product is available for purchase
|
||||||
available bool
|
available bool
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct OrderItem {
|
pub struct OrderItem {
|
||||||
pub:
|
pub:
|
||||||
product_id string
|
product_id string
|
||||||
quantity int
|
quantity int
|
||||||
price f64
|
price f64
|
||||||
currency string
|
currency string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Order {
|
pub struct Order {
|
||||||
pub:
|
pub:
|
||||||
id string
|
id string
|
||||||
// customer identifier
|
// customer identifier
|
||||||
customer_id string
|
customer_id string
|
||||||
// items in the order
|
// items in the order
|
||||||
items []OrderItem
|
items []OrderItem
|
||||||
// total order amount
|
// total order amount
|
||||||
total_amount f64
|
total_amount f64
|
||||||
currency string
|
currency string
|
||||||
// order status (e.g., 'pending', 'confirmed', 'shipped', 'delivered')
|
// order status (e.g., 'pending', 'confirmed', 'shipped', 'delivered')
|
||||||
status string
|
status string
|
||||||
// timestamps
|
// timestamps
|
||||||
created_at string
|
created_at string
|
||||||
updated_at string
|
updated_at string
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ module geomind_poc
|
|||||||
import freeflowuniverse.crystallib.core.playbook { PlayBook }
|
import freeflowuniverse.crystallib.core.playbook { PlayBook }
|
||||||
|
|
||||||
// play_commerce processes heroscript actions for the commerce system
|
// play_commerce processes heroscript actions for the commerce system
|
||||||
pub fn play_commerce(mut plbook playbook.PlayBook) ! {
|
pub fn play_commerce(mut plbook PlayBook) ! {
|
||||||
commerce_actions := plbook.find(filter: 'commerce.')!
|
commerce_actions := plbook.find(filter: 'commerce.')!
|
||||||
mut c := Commerce{}
|
mut c := Commerce{}
|
||||||
|
|
||||||
@@ -12,20 +12,20 @@ pub fn play_commerce(mut plbook playbook.PlayBook) ! {
|
|||||||
'merchant' {
|
'merchant' {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
merchant := c.create_merchant(
|
merchant := c.create_merchant(
|
||||||
name: p.get('name')!,
|
name: p.get('name')!
|
||||||
description: p.get_default('description', '')!,
|
description: p.get_default('description', '')!
|
||||||
contact: p.get('contact')!
|
contact: p.get('contact')!
|
||||||
)!
|
)!
|
||||||
println('Created merchant: ${merchant.name}')
|
println('Created merchant: ${merchant.name}')
|
||||||
}
|
}
|
||||||
'component' {
|
'component' {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
component := c.create_product_component_template(
|
component := c.create_product_component_template(
|
||||||
name: p.get('name')!,
|
name: p.get('name')!
|
||||||
description: p.get_default('description', '')!,
|
description: p.get_default('description', '')!
|
||||||
specs: p.get_map(),
|
specs: p.get_map()
|
||||||
price: p.get_float('price')!,
|
price: p.get_float('price')!
|
||||||
currency: p.get('currency')!
|
currency: p.get('currency')!
|
||||||
)!
|
)!
|
||||||
println('Created component: ${component.name}')
|
println('Created component: ${component.name}')
|
||||||
}
|
}
|
||||||
@@ -39,30 +39,30 @@ pub fn play_commerce(mut plbook playbook.PlayBook) ! {
|
|||||||
// In a real implementation, you would fetch the component from storage
|
// In a real implementation, you would fetch the component from storage
|
||||||
// For this example, we create a dummy component
|
// For this example, we create a dummy component
|
||||||
component := ProductComponentTemplate{
|
component := ProductComponentTemplate{
|
||||||
id: id
|
id: id
|
||||||
name: 'Component'
|
name: 'Component'
|
||||||
description: ''
|
description: ''
|
||||||
specs: map[string]string{}
|
specs: map[string]string{}
|
||||||
price: 0
|
price: 0
|
||||||
currency: 'USD'
|
currency: 'USD'
|
||||||
}
|
}
|
||||||
components << component
|
components << component
|
||||||
}
|
}
|
||||||
|
|
||||||
template := c.create_product_template(
|
template := c.create_product_template(
|
||||||
name: p.get('name')!,
|
name: p.get('name')!
|
||||||
description: p.get_default('description', '')!,
|
description: p.get_default('description', '')!
|
||||||
components: components,
|
components: components
|
||||||
merchant_id: p.get('merchant_id')!,
|
merchant_id: p.get('merchant_id')!
|
||||||
category: p.get_default('category', 'General')!
|
category: p.get_default('category', 'General')!
|
||||||
)!
|
)!
|
||||||
println('Created template: ${template.name}')
|
println('Created template: ${template.name}')
|
||||||
}
|
}
|
||||||
'product' {
|
'product' {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
product := c.create_product(
|
product := c.create_product(
|
||||||
template_id: p.get('template_id')!,
|
template_id: p.get('template_id')!
|
||||||
merchant_id: p.get('merchant_id')!,
|
merchant_id: p.get('merchant_id')!
|
||||||
stock_quantity: p.get_int('stock_quantity')!
|
stock_quantity: p.get_int('stock_quantity')!
|
||||||
)!
|
)!
|
||||||
println('Created product: ${product.name} with stock: ${product.stock_quantity}')
|
println('Created product: ${product.name} with stock: ${product.stock_quantity}')
|
||||||
@@ -80,23 +80,23 @@ pub fn play_commerce(mut plbook playbook.PlayBook) ! {
|
|||||||
}
|
}
|
||||||
item := OrderItem{
|
item := OrderItem{
|
||||||
product_id: parts[0]
|
product_id: parts[0]
|
||||||
quantity: parts[1].int()
|
quantity: parts[1].int()
|
||||||
price: parts[2].f64()
|
price: parts[2].f64()
|
||||||
currency: parts[3]
|
currency: parts[3]
|
||||||
}
|
}
|
||||||
items << item
|
items << item
|
||||||
}
|
}
|
||||||
|
|
||||||
order := c.create_order(
|
order := c.create_order(
|
||||||
customer_id: p.get('customer_id')!,
|
customer_id: p.get('customer_id')!
|
||||||
items: items
|
items: items
|
||||||
)!
|
)!
|
||||||
println('Created order: ${order.id} with ${order.items.len} items')
|
println('Created order: ${order.id} with ${order.items.len} items')
|
||||||
}
|
}
|
||||||
'update_order' {
|
'update_order' {
|
||||||
mut p := action.params
|
mut p := action.params
|
||||||
order := c.update_order_status(
|
order := c.update_order_status(
|
||||||
order_id: p.get('order_id')!,
|
order_id: p.get('order_id')!
|
||||||
new_status: p.get('status')!
|
new_status: p.get('status')!
|
||||||
)!
|
)!
|
||||||
println('Updated order ${order.id} status to: ${order.status}')
|
println('Updated order ${order.id} status to: ${order.status}')
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
module geomind_poc
|
module geomind_poc
|
||||||
|
|
||||||
import crypto.rand
|
import crypto.rand
|
||||||
|
import time
|
||||||
|
|
||||||
// Commerce represents the main e-commerce server handling all operations
|
// Commerce represents the main e-commerce server handling all operations
|
||||||
pub struct Commerce {
|
pub struct Commerce {
|
||||||
mut:
|
mut:
|
||||||
merchants map[string]Merchant
|
merchants map[string]Merchant
|
||||||
templates map[string]ProductTemplate
|
templates map[string]ProductTemplate
|
||||||
products map[string]Product
|
products map[string]Product
|
||||||
orders map[string]Order
|
orders map[string]Order
|
||||||
}
|
}
|
||||||
|
|
||||||
// generate_id creates a unique identifier
|
// generate_id creates a unique identifier
|
||||||
@@ -20,11 +21,11 @@ fn generate_id() string {
|
|||||||
pub fn (mut c Commerce) create_merchant(name string, description string, contact string) !Merchant {
|
pub fn (mut c Commerce) create_merchant(name string, description string, contact string) !Merchant {
|
||||||
merchant_id := generate_id()
|
merchant_id := generate_id()
|
||||||
merchant := Merchant{
|
merchant := Merchant{
|
||||||
id: merchant_id
|
id: merchant_id
|
||||||
name: name
|
name: name
|
||||||
description: description
|
description: description
|
||||||
contact: contact
|
contact: contact
|
||||||
active: true
|
active: true
|
||||||
}
|
}
|
||||||
c.merchants[merchant_id] = merchant
|
c.merchants[merchant_id] = merchant
|
||||||
return merchant
|
return merchant
|
||||||
@@ -33,12 +34,12 @@ pub fn (mut c Commerce) create_merchant(name string, description string, contact
|
|||||||
// create_product_component_template creates a new component template
|
// create_product_component_template creates a new component template
|
||||||
pub fn (mut c Commerce) create_product_component_template(name string, description string, specs map[string]string, price f64, currency string) !ProductComponentTemplate {
|
pub fn (mut c Commerce) create_product_component_template(name string, description string, specs map[string]string, price f64, currency string) !ProductComponentTemplate {
|
||||||
component := ProductComponentTemplate{
|
component := ProductComponentTemplate{
|
||||||
id: generate_id()
|
id: generate_id()
|
||||||
name: name
|
name: name
|
||||||
description: description
|
description: description
|
||||||
specs: specs
|
specs: specs
|
||||||
price: price
|
price: price
|
||||||
currency: currency
|
currency: currency
|
||||||
}
|
}
|
||||||
return component
|
return component
|
||||||
}
|
}
|
||||||
@@ -50,13 +51,13 @@ pub fn (mut c Commerce) create_product_template(name string, description string,
|
|||||||
}
|
}
|
||||||
|
|
||||||
template := ProductTemplate{
|
template := ProductTemplate{
|
||||||
id: generate_id()
|
id: generate_id()
|
||||||
name: name
|
name: name
|
||||||
description: description
|
description: description
|
||||||
components: components
|
components: components
|
||||||
merchant_id: merchant_id
|
merchant_id: merchant_id
|
||||||
category: category
|
category: category
|
||||||
active: true
|
active: true
|
||||||
}
|
}
|
||||||
c.templates[template.id] = template
|
c.templates[template.id] = template
|
||||||
return template
|
return template
|
||||||
@@ -78,15 +79,15 @@ pub fn (mut c Commerce) create_product(template_id string, merchant_id string, s
|
|||||||
}
|
}
|
||||||
|
|
||||||
product := Product{
|
product := Product{
|
||||||
id: generate_id()
|
id: generate_id()
|
||||||
template_id: template_id
|
template_id: template_id
|
||||||
name: template.name
|
name: template.name
|
||||||
description: template.description
|
description: template.description
|
||||||
price: total_price
|
price: total_price
|
||||||
currency: template.components[0].currency // assuming all components use same currency
|
currency: template.components[0].currency // assuming all components use same currency
|
||||||
merchant_id: merchant_id
|
merchant_id: merchant_id
|
||||||
stock_quantity: stock_quantity
|
stock_quantity: stock_quantity
|
||||||
available: true
|
available: true
|
||||||
}
|
}
|
||||||
c.products[product.id] = product
|
c.products[product.id] = product
|
||||||
return product
|
return product
|
||||||
@@ -114,14 +115,14 @@ pub fn (mut c Commerce) create_order(customer_id string, items []OrderItem) !Ord
|
|||||||
}
|
}
|
||||||
|
|
||||||
order := Order{
|
order := Order{
|
||||||
id: generate_id()
|
id: generate_id()
|
||||||
customer_id: customer_id
|
customer_id: customer_id
|
||||||
items: items
|
items: items
|
||||||
total_amount: total_amount
|
total_amount: total_amount
|
||||||
currency: currency
|
currency: currency
|
||||||
status: 'pending'
|
status: 'pending'
|
||||||
created_at: time.now().str()
|
created_at: time.now().str()
|
||||||
updated_at: time.now().str()
|
updated_at: time.now().str()
|
||||||
}
|
}
|
||||||
c.orders[order.id] = order
|
c.orders[order.id] = order
|
||||||
|
|
||||||
|
|||||||
@@ -5,20 +5,21 @@ import freeflowuniverse.herolib.baobab.specification
|
|||||||
import freeflowuniverse.herolib.schemas.openapi
|
import freeflowuniverse.herolib.schemas.openapi
|
||||||
import os
|
import os
|
||||||
|
|
||||||
const example_dir = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc', 'baobab')
|
const example_dir = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc',
|
||||||
const openapi_spec_path = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc', 'openapi.json')
|
'baobab')
|
||||||
|
const openapi_spec_path = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc',
|
||||||
|
'openapi.json')
|
||||||
|
|
||||||
// the actor specification obtained from the OpenRPC Specification
|
// the actor specification obtained from the OpenRPC Specification
|
||||||
openapi_spec := openapi.new(path: openapi_spec_path)!
|
openapi_spec := openapi.new(path: openapi_spec_path)!
|
||||||
actor_spec := specification.from_openapi(openapi_spec)!
|
actor_spec := specification.from_openapi(openapi_spec)!
|
||||||
|
|
||||||
actor_module := generator.generate_actor_module(
|
actor_module := generator.generate_actor_module(actor_spec,
|
||||||
actor_spec,
|
|
||||||
interfaces: [.openapi, .http]
|
interfaces: [.openapi, .http]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
actor_module.write(example_dir,
|
actor_module.write(example_dir,
|
||||||
format: true
|
format: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
compile: false
|
compile: false
|
||||||
)!
|
)!
|
||||||
@@ -14,15 +14,14 @@ actor_spec := specification.from_openapi(openapi_spec)!
|
|||||||
|
|
||||||
println(actor_spec)
|
println(actor_spec)
|
||||||
|
|
||||||
actor_module := generator.generate_actor_module(
|
actor_module := generator.generate_actor_module(actor_spec,
|
||||||
actor_spec,
|
|
||||||
interfaces: [.openapi, .http]
|
interfaces: [.openapi, .http]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
actor_module.write(example_dir,
|
actor_module.write(example_dir,
|
||||||
format: false
|
format: false
|
||||||
overwrite: true
|
overwrite: true
|
||||||
compile: false
|
compile: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// os.execvp('bash', ['${example_dir}/meeting_scheduler_actor/scripts/run.sh'])!
|
// os.execvp('bash', ['${example_dir}/meeting_scheduler_actor/scripts/run.sh'])!
|
||||||
@@ -7,100 +7,100 @@ import freeflowuniverse.herolib.schemas.openrpc
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
const actor_specification = specification.ActorSpecification{
|
const actor_specification = specification.ActorSpecification{
|
||||||
name: 'PetStore'
|
name: 'PetStore'
|
||||||
interfaces: [.openrpc]
|
interfaces: [.openrpc]
|
||||||
methods: [
|
methods: [
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'GetPets'
|
name: 'GetPets'
|
||||||
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'tags'
|
name: 'tags'
|
||||||
description: 'tags to filter by'
|
description: 'tags to filter by'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'array'
|
typ: 'array'
|
||||||
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'string'
|
typ: 'string'
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'limit'
|
name: 'limit'
|
||||||
description: 'maximum number of results to return'
|
description: 'maximum number of results to return'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet_list'
|
name: 'pet_list'
|
||||||
description: 'all pets from the system, that matches the tags'
|
description: 'all pets from the system, that matches the tags'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'CreatePet'
|
name: 'CreatePet'
|
||||||
description: 'creates a new pet in the store. Duplicates are allowed.'
|
description: 'creates a new pet in the store. Duplicates are allowed.'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'new_pet'
|
name: 'new_pet'
|
||||||
description: 'Pet to add to the store.'
|
description: 'Pet to add to the store.'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/NewPet'
|
ref: '#/components/schemas/NewPet'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'the newly created pet'
|
description: 'the newly created pet'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'GetPetById'
|
name: 'GetPetById'
|
||||||
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'id'
|
name: 'id'
|
||||||
description: 'ID of pet to fetch'
|
description: 'ID of pet to fetch'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'pet response'
|
description: 'pet response'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'DeletePetById'
|
name: 'DeletePetById'
|
||||||
description: 'deletes a single pet based on the ID supplied'
|
description: 'deletes a single pet based on the ID supplied'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'id'
|
name: 'id'
|
||||||
description: 'ID of pet to delete'
|
description: 'ID of pet to delete'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'pet deleted'
|
description: 'pet deleted'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'null'
|
typ: 'null'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
openapi_specification := actor_specification.to_openapi()
|
openapi_specification := actor_specification.to_openapi()
|
||||||
|
|||||||
@@ -8,101 +8,101 @@ import freeflowuniverse.herolib.schemas.openrpc
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
const actor_specification = specification.ActorSpecification{
|
const actor_specification = specification.ActorSpecification{
|
||||||
name: 'PetStore'
|
name: 'PetStore'
|
||||||
structure: code.Struct{}
|
structure: code.Struct{}
|
||||||
interfaces: [.openrpc]
|
interfaces: [.openrpc]
|
||||||
methods: [
|
methods: [
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'GetPets'
|
name: 'GetPets'
|
||||||
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'tags'
|
name: 'tags'
|
||||||
description: 'tags to filter by'
|
description: 'tags to filter by'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'array'
|
typ: 'array'
|
||||||
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'string'
|
typ: 'string'
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'limit'
|
name: 'limit'
|
||||||
description: 'maximum number of results to return'
|
description: 'maximum number of results to return'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet_list'
|
name: 'pet_list'
|
||||||
description: 'all pets from the system, that matches the tags'
|
description: 'all pets from the system, that matches the tags'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'CreatePet'
|
name: 'CreatePet'
|
||||||
description: 'creates a new pet in the store. Duplicates are allowed.'
|
description: 'creates a new pet in the store. Duplicates are allowed.'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'new_pet'
|
name: 'new_pet'
|
||||||
description: 'Pet to add to the store.'
|
description: 'Pet to add to the store.'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/NewPet'
|
ref: '#/components/schemas/NewPet'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'the newly created pet'
|
description: 'the newly created pet'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'GetPetById'
|
name: 'GetPetById'
|
||||||
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'id'
|
name: 'id'
|
||||||
description: 'ID of pet to fetch'
|
description: 'ID of pet to fetch'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'pet response'
|
description: 'pet response'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||||
ref: '#/components/schemas/Pet'
|
ref: '#/components/schemas/Pet'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
specification.ActorMethod{
|
specification.ActorMethod{
|
||||||
name: 'DeletePetById'
|
name: 'DeletePetById'
|
||||||
description: 'deletes a single pet based on the ID supplied'
|
description: 'deletes a single pet based on the ID supplied'
|
||||||
parameters: [
|
parameters: [
|
||||||
openrpc.ContentDescriptor{
|
openrpc.ContentDescriptor{
|
||||||
name: 'id'
|
name: 'id'
|
||||||
description: 'ID of pet to delete'
|
description: 'ID of pet to delete'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'integer'
|
typ: 'integer'
|
||||||
})
|
})
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
result: openrpc.ContentDescriptor{
|
result: openrpc.ContentDescriptor{
|
||||||
name: 'pet'
|
name: 'pet'
|
||||||
description: 'pet deleted'
|
description: 'pet deleted'
|
||||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||||
typ: 'null'
|
typ: 'null'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
openrpc_specification := actor_specification.to_openrpc()
|
openrpc_specification := actor_specification.to_openrpc()
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import freeflowuniverse.herolib.core.playbook
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
mut plbook := playbook.new(
|
mut plbook := playbook.new(
|
||||||
path: '${os.home_dir()}/code/git.ourworld.tf/ourworld_holding/investorstool/output'
|
path: '${os.home_dir()}/code/git.threefold.info/ourworld_holding/investorstool/output'
|
||||||
)!
|
)!
|
||||||
mut it := investortool.play(mut plbook)!
|
mut it := investortool.play(mut plbook)!
|
||||||
it.check()!
|
it.check()!
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import os
|
|||||||
|
|
||||||
const name = 'tf9_budget'
|
const name = 'tf9_budget'
|
||||||
|
|
||||||
const wikipath = '${os.home_dir()}/code/git.ourworld.tf/ourworld_holding/info_ourworld/collections/${name}'
|
const wikipath = '${os.home_dir()}/code/git.threefold.info/ourworld_holding/info_ourworld/collections/${name}'
|
||||||
const summarypath = '${wikipath}/summary.md'
|
const summarypath = '${wikipath}/summary.md'
|
||||||
|
|
||||||
// mut sh := spreadsheet.sheet_new(name: 'test2') or { panic(err) }
|
// mut sh := spreadsheet.sheet_new(name: 'test2') or { panic(err) }
|
||||||
|
|||||||
@@ -8,13 +8,10 @@ const build_path = os.join_path(os.dir(@FILE), '/docusaurus')
|
|||||||
|
|
||||||
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
||||||
|
|
||||||
mut model := bizmodel.generate("test", playbook_path)!
|
mut model := bizmodel.generate('test', playbook_path)!
|
||||||
|
|
||||||
println(model.sheet)
|
println(model.sheet)
|
||||||
println(model.sheet.export()!)
|
println(model.sheet.export()!)
|
||||||
|
|
||||||
model.sheet.export(path:"~/Downloads/test.csv")!
|
model.sheet.export(path: '~/Downloads/test.csv')!
|
||||||
model.sheet.export(path:"~/code/github/freeflowuniverse/starlight_template/src/content/test.csv")!
|
model.sheet.export(path: '~/code/github/freeflowuniverse/starlight_template/src/content/test.csv')!
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,13 @@ import freeflowuniverse.herolib.core.playbook
|
|||||||
import freeflowuniverse.herolib.core.playcmds
|
import freeflowuniverse.herolib.core.playcmds
|
||||||
import os
|
import os
|
||||||
|
|
||||||
//TODO: need to fix wrong location
|
// TODO: need to fix wrong location
|
||||||
const playbook_path = os.dir(@FILE) + '/playbook'
|
const playbook_path = os.dir(@FILE) + '/playbook'
|
||||||
const build_path = os.join_path(os.dir(@FILE), '/docusaurus')
|
const build_path = os.join_path(os.dir(@FILE), '/docusaurus')
|
||||||
|
|
||||||
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
||||||
|
|
||||||
mut model := bizmodel.getset("example")!
|
mut model := bizmodel.getset('example')!
|
||||||
model.workdir = build_path
|
model.workdir = build_path
|
||||||
model.play(mut playbook.new(path: playbook_path)!)!
|
model.play(mut playbook.new(path: playbook_path)!)!
|
||||||
|
|
||||||
@@ -22,16 +22,13 @@ println(model.sheet.export()!)
|
|||||||
// model.sheet.export(path:"~/Downloads/test.csv")!
|
// model.sheet.export(path:"~/Downloads/test.csv")!
|
||||||
// model.sheet.export(path:"~/code/github/freeflowuniverse/starlight_template/src/content/test.csv")!
|
// model.sheet.export(path:"~/code/github/freeflowuniverse/starlight_template/src/content/test.csv")!
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
report := model.new_report(
|
report := model.new_report(
|
||||||
name: 'example_report'
|
name: 'example_report'
|
||||||
title: 'Example Business Model'
|
title: 'Example Business Model'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
report.export(
|
report.export(
|
||||||
path: build_path
|
path: build_path
|
||||||
overwrite: true
|
overwrite: true
|
||||||
format: .docusaurus
|
format: .docusaurus
|
||||||
)!
|
)!
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
export GROQ_API_KEY="your-groq-api-key-here"
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
# Groq AI Client Example
|
|
||||||
|
|
||||||
This example demonstrates how to use Groq's AI API with the herolib OpenAI client. Groq provides API compatibility with OpenAI's client libraries, allowing you to leverage Groq's fast inference speeds with minimal changes to your existing code.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- V programming language installed
|
|
||||||
- A Groq API key (get one from [Groq's website](https://console.groq.com/keys))
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
|
|
||||||
1. Copy the `.env.example` file to `.env`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Edit the `.env` file and replace `your-groq-api-key-here` with your actual Groq API key.
|
|
||||||
|
|
||||||
3. Load the environment variables:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
source .env
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running the Example
|
|
||||||
|
|
||||||
Execute the script with:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
v run groq_client.vsh
|
|
||||||
```
|
|
||||||
|
|
||||||
Or make it executable first:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
chmod +x groq_client.vsh
|
|
||||||
./groq_client.vsh
|
|
||||||
```
|
|
||||||
|
|
||||||
## How It Works
|
|
||||||
|
|
||||||
The example uses the existing OpenAI client from herolib but configures it to use Groq's API endpoint:
|
|
||||||
|
|
||||||
1. It retrieves the Groq API key from the environment variables
|
|
||||||
2. Configures the OpenAI client with the Groq API key
|
|
||||||
3. Overrides the default OpenAI URL with Groq's API URL (`https://api.groq.com/openai/v1`)
|
|
||||||
4. Sends a chat completion request to Groq's API
|
|
||||||
5. Displays the response
|
|
||||||
|
|
||||||
## Supported Models
|
|
||||||
|
|
||||||
Groq supports various models including:
|
|
||||||
|
|
||||||
- llama2-70b-4096
|
|
||||||
- mixtral-8x7b-32768
|
|
||||||
- gemma-7b-it
|
|
||||||
|
|
||||||
For a complete and up-to-date list of supported models, refer to the [Groq API documentation](https://console.groq.com/docs/models).
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
|
|
||||||
- The example uses the `gpt_3_5_turbo` enum from the OpenAI client, but Groq will automatically map this to an appropriate model on their end.
|
|
||||||
- For production use, you may want to explicitly specify one of Groq's supported models.
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
module main
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.clients.openai
|
|
||||||
import os
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
// Get API key from environment variable
|
|
||||||
key := os.getenv('GROQ_API_KEY')
|
|
||||||
if key == '' {
|
|
||||||
println('Error: GROQ_API_KEY environment variable not set')
|
|
||||||
println('Please set it by running: source .env')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the configured client
|
|
||||||
mut client := openai.OpenAI {
|
|
||||||
name: 'groq'
|
|
||||||
api_key: key
|
|
||||||
server_url: 'https://api.groq.com/openai/v1'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Define the model and message for chat completion
|
|
||||||
// Note: Use a model that Groq supports, like llama2-70b-4096 or mixtral-8x7b-32768
|
|
||||||
model := 'qwen-2.5-coder-32b'
|
|
||||||
|
|
||||||
// Create a chat completion request
|
|
||||||
res := client.chat_completion(model, openai.Messages{
|
|
||||||
messages: [
|
|
||||||
openai.Message{
|
|
||||||
role: .user
|
|
||||||
content: 'What are the key differences between Groq and other AI inference providers?'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
})!
|
|
||||||
|
|
||||||
// Print the response
|
|
||||||
println('\nGroq AI Response:')
|
|
||||||
println('==================')
|
|
||||||
println(res.choices[0].message.content)
|
|
||||||
println('\nUsage Statistics:')
|
|
||||||
println('Prompt tokens: ${res.usage.prompt_tokens}')
|
|
||||||
println('Completion tokens: ${res.usage.completion_tokens}')
|
|
||||||
println('Total tokens: ${res.usage.total_tokens}')
|
|
||||||
}
|
|
||||||
@@ -7,7 +7,7 @@ import json
|
|||||||
fn main() {
|
fn main() {
|
||||||
// Initialize Jina client
|
// Initialize Jina client
|
||||||
mut j := jina.Jina{
|
mut j := jina.Jina{
|
||||||
name: 'test_client'
|
name: 'test_client'
|
||||||
secret: os.getenv('JINAKEY')
|
secret: os.getenv('JINAKEY')
|
||||||
}
|
}
|
||||||
|
|
||||||
257
examples/clients/mycelium_rpc.vsh
Executable file
257
examples/clients/mycelium_rpc.vsh
Executable file
@@ -0,0 +1,257 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
// Mycelium RPC Client Example
|
||||||
|
// This example demonstrates how to use the new Mycelium JSON-RPC client
|
||||||
|
// to interact with a Mycelium node's admin API
|
||||||
|
import freeflowuniverse.herolib.clients.mycelium_rpc
|
||||||
|
import freeflowuniverse.herolib.installers.net.mycelium_installer
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import encoding.base64
|
||||||
|
|
||||||
|
const mycelium_port = 8990
|
||||||
|
|
||||||
|
fn terminate_mycelium() ! {
|
||||||
|
// Try to find and kill any running mycelium process
|
||||||
|
res := os.execute('pkill mycelium')
|
||||||
|
if res.exit_code == 0 {
|
||||||
|
println('Terminated existing mycelium processes')
|
||||||
|
time.sleep(1 * time.second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start_mycelium_node() ! {
|
||||||
|
// Start a mycelium node with JSON-RPC API enabled
|
||||||
|
println('Starting Mycelium node with JSON-RPC API on port ${mycelium_port}...')
|
||||||
|
|
||||||
|
// Create directory for mycelium data
|
||||||
|
os.execute('mkdir -p /tmp/mycelium_rpc_example')
|
||||||
|
|
||||||
|
// Start mycelium in background with both HTTP and JSON-RPC APIs enabled
|
||||||
|
spawn fn () {
|
||||||
|
cmd := 'cd /tmp/mycelium_rpc_example && mycelium --peers tcp://185.69.166.8:9651 quic://[2a02:1802:5e:0:ec4:7aff:fe51:e36b]:9651 tcp://65.109.18.113:9651 --tun-name tun_rpc_example --tcp-listen-port 9660 --quic-listen-port 9661 --api-addr 127.0.0.1:8989 --jsonrpc-addr 127.0.0.1:${mycelium_port}'
|
||||||
|
println('Executing: ${cmd}')
|
||||||
|
result := os.execute(cmd)
|
||||||
|
if result.exit_code != 0 {
|
||||||
|
println('Mycelium failed to start: ${result.output}')
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Wait for the node to start (JSON-RPC server needs a bit more time)
|
||||||
|
println('Waiting for mycelium to start...')
|
||||||
|
time.sleep(5 * time.second)
|
||||||
|
|
||||||
|
// Check if mycelium is running
|
||||||
|
check_result := os.execute('pgrep mycelium')
|
||||||
|
if check_result.exit_code == 0 {
|
||||||
|
println('Mycelium process is running (PID: ${check_result.output.trim_space()})')
|
||||||
|
} else {
|
||||||
|
println('Warning: Mycelium process not found')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check what ports are listening
|
||||||
|
port_check := os.execute('lsof -i :${mycelium_port}')
|
||||||
|
if port_check.exit_code == 0 {
|
||||||
|
println('Port ${mycelium_port} is listening:')
|
||||||
|
println(port_check.output)
|
||||||
|
} else {
|
||||||
|
println('Warning: Port ${mycelium_port} is not listening')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// Install mycelium if not already installed
|
||||||
|
println('Checking Mycelium installation...')
|
||||||
|
mut installer := mycelium_installer.get()!
|
||||||
|
installer.install()!
|
||||||
|
|
||||||
|
// Clean up any existing processes
|
||||||
|
terminate_mycelium() or {}
|
||||||
|
|
||||||
|
defer {
|
||||||
|
// Clean up on exit
|
||||||
|
terminate_mycelium() or {}
|
||||||
|
os.execute('rm -rf /tmp/mycelium_rpc_example')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start mycelium node
|
||||||
|
start_mycelium_node()!
|
||||||
|
|
||||||
|
// Create RPC client
|
||||||
|
println('\n=== Creating Mycelium RPC Client ===')
|
||||||
|
mut client := mycelium_rpc.new_client(
|
||||||
|
name: 'example_client'
|
||||||
|
url: 'http://localhost:${mycelium_port}'
|
||||||
|
)!
|
||||||
|
|
||||||
|
println('Connected to Mycelium node at http://localhost:${mycelium_port}')
|
||||||
|
|
||||||
|
// Example 1: Get node information
|
||||||
|
println('\n=== Getting Node Information ===')
|
||||||
|
info := client.get_info() or {
|
||||||
|
println('Error getting node info: ${err}')
|
||||||
|
println('Make sure Mycelium node is running with API enabled')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Node Subnet: ${info.node_subnet}')
|
||||||
|
println('Node Public Key: ${info.node_pubkey}')
|
||||||
|
|
||||||
|
// Example 2: List peers
|
||||||
|
println('\n=== Listing Peers ===')
|
||||||
|
peers := client.get_peers() or {
|
||||||
|
println('Error getting peers: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Found ${peers.len} peers:')
|
||||||
|
for i, peer in peers {
|
||||||
|
println('Peer ${i + 1}:')
|
||||||
|
println(' Endpoint: ${peer.endpoint.proto}://${peer.endpoint.socket_addr}')
|
||||||
|
println(' Type: ${peer.peer_type}')
|
||||||
|
println(' Connection State: ${peer.connection_state}')
|
||||||
|
println(' TX Bytes: ${peer.tx_bytes}')
|
||||||
|
println(' RX Bytes: ${peer.rx_bytes}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 3: Get routing information
|
||||||
|
println('\n=== Getting Routing Information ===')
|
||||||
|
|
||||||
|
// Get selected routes
|
||||||
|
routes := client.get_selected_routes() or {
|
||||||
|
println('Error getting selected routes: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Selected Routes (${routes.len}):')
|
||||||
|
for route in routes {
|
||||||
|
println(' ${route.subnet} -> ${route.next_hop} (metric: ${route.metric}, seqno: ${route.seqno})')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get fallback routes
|
||||||
|
fallback_routes := client.get_fallback_routes() or {
|
||||||
|
println('Error getting fallback routes: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Fallback Routes (${fallback_routes.len}):')
|
||||||
|
for route in fallback_routes {
|
||||||
|
println(' ${route.subnet} -> ${route.next_hop} (metric: ${route.metric}, seqno: ${route.seqno})')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 4: Topic management
|
||||||
|
println('\n=== Topic Management ===')
|
||||||
|
|
||||||
|
// Get default topic action
|
||||||
|
default_action := client.get_default_topic_action() or {
|
||||||
|
println('Error getting default topic action: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Default topic action (accept): ${default_action}')
|
||||||
|
|
||||||
|
// Get configured topics
|
||||||
|
topics := client.get_topics() or {
|
||||||
|
println('Error getting topics: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Configured topics (${topics.len}):')
|
||||||
|
for topic in topics {
|
||||||
|
println(' - ${topic}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 5: Add a test topic (try different names)
|
||||||
|
println('\n=== Adding Test Topics ===')
|
||||||
|
test_topics := ['example_topic', 'test_with_underscore', 'hello world', 'test', 'a']
|
||||||
|
|
||||||
|
for topic in test_topics {
|
||||||
|
println('Trying to add topic: "${topic}"')
|
||||||
|
add_result := client.add_topic(topic) or {
|
||||||
|
println('Error adding topic "${topic}": ${err}')
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if add_result {
|
||||||
|
println('Successfully added topic: ${topic}')
|
||||||
|
|
||||||
|
// Try to remove it immediately
|
||||||
|
remove_result := client.remove_topic(topic) or {
|
||||||
|
println('Error removing topic "${topic}": ${err}')
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if remove_result {
|
||||||
|
println('Successfully removed topic: ${topic}')
|
||||||
|
}
|
||||||
|
break // Stop after first success
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 6: Message operations (demonstration only - requires another node)
|
||||||
|
println('\n=== Message Operations (Demo) ===')
|
||||||
|
println('Note: These operations require another Mycelium node to be meaningful')
|
||||||
|
|
||||||
|
// Try to pop a message with a short timeout (will likely return "No message ready" error)
|
||||||
|
message := client.pop_message(false, 1, '') or {
|
||||||
|
println('No messages available (expected): ${err}')
|
||||||
|
mycelium_rpc.InboundMessage{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if message.id != '' {
|
||||||
|
println('Received message:')
|
||||||
|
println(' ID: ${message.id}')
|
||||||
|
println(' From: ${message.src_ip}')
|
||||||
|
println(' Payload: ${base64.decode_str(message.payload)}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 7: Peer management (demonstration)
|
||||||
|
println('\n=== Peer Management Demo ===')
|
||||||
|
|
||||||
|
// Try to add a peer (this is just for demonstration)
|
||||||
|
test_endpoint := 'tcp://127.0.0.1:9999'
|
||||||
|
add_peer_result := client.add_peer(test_endpoint) or {
|
||||||
|
println('Error adding peer (expected if endpoint is invalid): ${err}')
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
if add_peer_result {
|
||||||
|
println('Successfully added peer: ${test_endpoint}')
|
||||||
|
|
||||||
|
// Remove the test peer
|
||||||
|
remove_peer_result := client.delete_peer(test_endpoint) or {
|
||||||
|
println('Error removing peer: ${err}')
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
if remove_peer_result {
|
||||||
|
println('Successfully removed test peer')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 8: Get public key from IP (demonstration)
|
||||||
|
println('\n=== Public Key Lookup Demo ===')
|
||||||
|
|
||||||
|
// This will likely fail unless we have a valid mycelium IP
|
||||||
|
if info.node_subnet != '' {
|
||||||
|
// Extract the first IP from the subnet for testing
|
||||||
|
subnet_parts := info.node_subnet.split('::')
|
||||||
|
if subnet_parts.len > 0 {
|
||||||
|
test_ip := subnet_parts[0] + '::1'
|
||||||
|
pubkey_response := client.get_public_key_from_ip(test_ip) or {
|
||||||
|
println('Could not get public key for IP ${test_ip}: ${err}')
|
||||||
|
mycelium_rpc.PublicKeyResponse{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pubkey_response.node_pub_key != '' {
|
||||||
|
println('Public key for ${test_ip}: ${pubkey_response.node_pub_key}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println('\n=== Mycelium RPC Client Example Completed ===')
|
||||||
|
println('This example demonstrated:')
|
||||||
|
println('- Getting node information')
|
||||||
|
println('- Listing peers and their connection status')
|
||||||
|
println('- Retrieving routing information')
|
||||||
|
println('- Managing topics')
|
||||||
|
println('- Message operations (basic)')
|
||||||
|
println('- Peer management')
|
||||||
|
println('- Public key lookups')
|
||||||
|
println('')
|
||||||
|
println('For full message sending/receiving functionality, you would need')
|
||||||
|
println('multiple Mycelium nodes running and connected to each other.')
|
||||||
|
println('See the Mycelium documentation for more advanced usage.')
|
||||||
|
}
|
||||||
@@ -1,209 +0,0 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.clients.qdrant
|
|
||||||
import os
|
|
||||||
import flag
|
|
||||||
|
|
||||||
mut fp := flag.new_flag_parser(os.args)
|
|
||||||
fp.application('qdrant_example.vsh')
|
|
||||||
fp.version('v0.1.0')
|
|
||||||
fp.description('Example script demonstrating Qdrant client usage')
|
|
||||||
fp.skip_executable()
|
|
||||||
|
|
||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
|
||||||
|
|
||||||
if help_requested {
|
|
||||||
println(fp.usage())
|
|
||||||
exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
|
||||||
eprintln(err)
|
|
||||||
println(fp.usage())
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize Qdrant client
|
|
||||||
mut client := qdrant.get(name: 'default') or {
|
|
||||||
// If client doesn't exist, create a new one
|
|
||||||
mut new_client := qdrant.QdrantClient{
|
|
||||||
name: 'default'
|
|
||||||
url: 'http://localhost:6333'
|
|
||||||
}
|
|
||||||
qdrant.set(new_client) or {
|
|
||||||
eprintln('Failed to set Qdrant client: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
new_client
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Connected to Qdrant at ${client.url}')
|
|
||||||
|
|
||||||
// Check if Qdrant is healthy
|
|
||||||
is_healthy := client.health_check() or {
|
|
||||||
eprintln('Failed to check Qdrant health: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !is_healthy {
|
|
||||||
eprintln('Qdrant is not healthy')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Qdrant is healthy')
|
|
||||||
|
|
||||||
// Get service info
|
|
||||||
service_info := client.get_service_info() or {
|
|
||||||
eprintln('Failed to get service info: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Qdrant version: ${service_info.version}')
|
|
||||||
|
|
||||||
// Collection name for our example
|
|
||||||
collection_name := 'example_collection'
|
|
||||||
|
|
||||||
// Check if collection exists and delete it if it does
|
|
||||||
collections := client.list_collections() or {
|
|
||||||
eprintln('Failed to list collections: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection_name in collections.result {
|
|
||||||
println('Collection ${collection_name} already exists, deleting it...')
|
|
||||||
client.delete_collection(collection_name: collection_name) or {
|
|
||||||
eprintln('Failed to delete collection: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
println('Collection deleted')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new collection
|
|
||||||
println('Creating collection ${collection_name}...')
|
|
||||||
vectors_config := qdrant.VectorsConfig{
|
|
||||||
size: 4 // Small size for example purposes
|
|
||||||
distance: .cosine
|
|
||||||
}
|
|
||||||
|
|
||||||
client.create_collection(
|
|
||||||
collection_name: collection_name
|
|
||||||
vectors: vectors_config
|
|
||||||
) or {
|
|
||||||
eprintln('Failed to create collection: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Collection created')
|
|
||||||
|
|
||||||
// Upsert some points
|
|
||||||
println('Upserting points...')
|
|
||||||
points := [
|
|
||||||
qdrant.PointStruct{
|
|
||||||
id: '1'
|
|
||||||
vector: [f32(0.1), 0.2, 0.3, 0.4]
|
|
||||||
payload: {
|
|
||||||
'color': 'red'
|
|
||||||
'category': 'furniture'
|
|
||||||
'name': 'chair'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
qdrant.PointStruct{
|
|
||||||
id: '2'
|
|
||||||
vector: [f32(0.2), 0.3, 0.4, 0.5]
|
|
||||||
payload: {
|
|
||||||
'color': 'blue'
|
|
||||||
'category': 'electronics'
|
|
||||||
'name': 'laptop'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
qdrant.PointStruct{
|
|
||||||
id: '3'
|
|
||||||
vector: [f32(0.3), 0.4, 0.5, 0.6]
|
|
||||||
payload: {
|
|
||||||
'color': 'green'
|
|
||||||
'category': 'food'
|
|
||||||
'name': 'apple'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
client.upsert_points(
|
|
||||||
collection_name: collection_name
|
|
||||||
points: points
|
|
||||||
wait: true
|
|
||||||
) or {
|
|
||||||
eprintln('Failed to upsert points: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Points upserted')
|
|
||||||
|
|
||||||
// Get collection info to verify points were added
|
|
||||||
collection_info := client.get_collection(collection_name: collection_name) or {
|
|
||||||
eprintln('Failed to get collection info: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Collection has ${collection_info.vectors_count} points')
|
|
||||||
|
|
||||||
// Search for points
|
|
||||||
println('Searching for points similar to [0.1, 0.2, 0.3, 0.4]...')
|
|
||||||
search_result := client.search(
|
|
||||||
collection_name: collection_name
|
|
||||||
vector: [f32(0.1), 0.2, 0.3, 0.4]
|
|
||||||
limit: 3
|
|
||||||
) or {
|
|
||||||
eprintln('Failed to search points: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Search results:')
|
|
||||||
for i, point in search_result.result {
|
|
||||||
println(' ${i+1}. ID: ${point.id}, Score: ${point.score}')
|
|
||||||
if payload := point.payload {
|
|
||||||
println(' Name: ${payload['name']}')
|
|
||||||
println(' Category: ${payload['category']}')
|
|
||||||
println(' Color: ${payload['color']}')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search with filter
|
|
||||||
println('\nSearching for points with category "electronics"...')
|
|
||||||
filter := qdrant.Filter{
|
|
||||||
must: [
|
|
||||||
qdrant.FieldCondition{
|
|
||||||
key: 'category'
|
|
||||||
match: 'electronics'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
filtered_search := client.search(
|
|
||||||
collection_name: collection_name
|
|
||||||
vector: [f32(0.1), 0.2, 0.3, 0.4]
|
|
||||||
filter: filter
|
|
||||||
limit: 3
|
|
||||||
) or {
|
|
||||||
eprintln('Failed to search with filter: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Filtered search results:')
|
|
||||||
for i, point in filtered_search.result {
|
|
||||||
println(' ${i+1}. ID: ${point.id}, Score: ${point.score}')
|
|
||||||
if payload := point.payload {
|
|
||||||
println(' Name: ${payload['name']}')
|
|
||||||
println(' Category: ${payload['category']}')
|
|
||||||
println(' Color: ${payload['color']}')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean up - delete the collection
|
|
||||||
println('\nCleaning up - deleting collection...')
|
|
||||||
client.delete_collection(collection_name: collection_name) or {
|
|
||||||
eprintln('Failed to delete collection: ${err}')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('Collection deleted')
|
|
||||||
println('Example completed successfully')
|
|
||||||
291
examples/clients/zinit_rpc_example.vsh
Executable file
291
examples/clients/zinit_rpc_example.vsh
Executable file
@@ -0,0 +1,291 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.zinit_rpc
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
// Comprehensive example demonstrating all Zinit RPC client functionality
|
||||||
|
// This example shows how to use all 18 methods in the Zinit JSON-RPC API
|
||||||
|
|
||||||
|
println('=== Zinit RPC Client Example ===\n')
|
||||||
|
|
||||||
|
// Start Zinit in the background
|
||||||
|
println('Starting Zinit in background...')
|
||||||
|
mut zinit_process := os.new_process('/usr/local/bin/zinit')
|
||||||
|
zinit_process.set_args(['init'])
|
||||||
|
zinit_process.set_redirect_stdio()
|
||||||
|
zinit_process.run()
|
||||||
|
|
||||||
|
// Wait a moment for Zinit to start up
|
||||||
|
time.sleep(2000 * time.millisecond)
|
||||||
|
println('✓ Zinit started')
|
||||||
|
|
||||||
|
// Ensure we clean up Zinit when done
|
||||||
|
defer {
|
||||||
|
println('\nCleaning up...')
|
||||||
|
zinit_process.signal_kill()
|
||||||
|
zinit_process.wait()
|
||||||
|
println('✓ Zinit stopped')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new client
|
||||||
|
mut client := zinit_rpc.new_client(
|
||||||
|
name: 'example_client'
|
||||||
|
socket_path: '/tmp/zinit.sock'
|
||||||
|
) or {
|
||||||
|
println('Failed to create client: ${err}')
|
||||||
|
println('Make sure Zinit is running and the socket exists at /tmp/zinit.sock')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
println('✓ Created Zinit RPC client')
|
||||||
|
|
||||||
|
// 1. Discover API specification
|
||||||
|
println('\n1. Discovering API specification...')
|
||||||
|
spec := client.rpc_discover() or {
|
||||||
|
println('Failed to discover API: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('✓ API discovered:')
|
||||||
|
println(' - OpenRPC version: ${spec.openrpc}')
|
||||||
|
println(' - API title: ${spec.info.title}')
|
||||||
|
println(' - API version: ${spec.info.version}')
|
||||||
|
println(' - Methods available: ${spec.methods.len}')
|
||||||
|
|
||||||
|
// 2. List all services
|
||||||
|
println('\n2. Listing all services...')
|
||||||
|
services := client.service_list() or {
|
||||||
|
println('Failed to list services: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('✓ Found ${services.len} services:')
|
||||||
|
for service_name, state in services {
|
||||||
|
println(' - ${service_name}: ${state}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Create a test service configuration
|
||||||
|
println('\n3. Creating a test service...')
|
||||||
|
test_service_name := 'test_echo_service'
|
||||||
|
config := zinit_rpc.ServiceConfig{
|
||||||
|
exec: '/bin/echo "Hello from test service"'
|
||||||
|
oneshot: true
|
||||||
|
log: 'stdout'
|
||||||
|
env: {
|
||||||
|
'TEST_VAR': 'test_value'
|
||||||
|
}
|
||||||
|
shutdown_timeout: 10
|
||||||
|
}
|
||||||
|
|
||||||
|
service_path := client.service_create(test_service_name, config) or {
|
||||||
|
if err.msg().contains('already exists') {
|
||||||
|
println('✓ Service already exists, continuing...')
|
||||||
|
''
|
||||||
|
} else {
|
||||||
|
println('Failed to create service: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if service_path != '' {
|
||||||
|
println('✓ Service created at: ${service_path}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Get service configuration
|
||||||
|
println('\n4. Getting service configuration...')
|
||||||
|
retrieved_config := client.service_get(test_service_name) or {
|
||||||
|
println('Failed to get service config: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('✓ Service config retrieved:')
|
||||||
|
println(' - Exec: ${retrieved_config.exec}')
|
||||||
|
println(' - Oneshot: ${retrieved_config.oneshot}')
|
||||||
|
println(' - Log: ${retrieved_config.log}')
|
||||||
|
println(' - Shutdown timeout: ${retrieved_config.shutdown_timeout}')
|
||||||
|
|
||||||
|
// 5. Monitor the service
|
||||||
|
println('\n5. Starting to monitor the service...')
|
||||||
|
client.service_monitor(test_service_name) or {
|
||||||
|
if err.msg().contains('already monitored') {
|
||||||
|
println('✓ Service already monitored')
|
||||||
|
} else {
|
||||||
|
println('Failed to monitor service: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Get service status
|
||||||
|
println('\n6. Getting service status...')
|
||||||
|
status := client.service_status(test_service_name) or {
|
||||||
|
println('Failed to get service status: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('✓ Service status:')
|
||||||
|
println(' - Name: ${status.name}')
|
||||||
|
println(' - PID: ${status.pid}')
|
||||||
|
println(' - State: ${status.state}')
|
||||||
|
println(' - Target: ${status.target}')
|
||||||
|
if status.after.len > 0 {
|
||||||
|
println(' - Dependencies:')
|
||||||
|
for dep_name, dep_state in status.after {
|
||||||
|
println(' - ${dep_name}: ${dep_state}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 7. Start the service (if it's not running)
|
||||||
|
if status.state != 'Running' {
|
||||||
|
println('\n7. Starting the service...')
|
||||||
|
client.service_start(test_service_name) or {
|
||||||
|
println('Failed to start service: ${err}')
|
||||||
|
// Continue anyway
|
||||||
|
}
|
||||||
|
println('✓ Service start command sent')
|
||||||
|
} else {
|
||||||
|
println('\n7. Service is already running')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 8. Get service statistics (if running)
|
||||||
|
println('\n8. Getting service statistics...')
|
||||||
|
stats := client.service_stats(test_service_name) or {
|
||||||
|
println('Failed to get service stats (service might not be running): ${err}')
|
||||||
|
// Continue anyway
|
||||||
|
zinit_rpc.ServiceStats{}
|
||||||
|
}
|
||||||
|
if stats.name != '' {
|
||||||
|
println('✓ Service statistics:')
|
||||||
|
println(' - Name: ${stats.name}')
|
||||||
|
println(' - PID: ${stats.pid}')
|
||||||
|
println(' - Memory usage: ${stats.memory_usage} bytes')
|
||||||
|
println(' - CPU usage: ${stats.cpu_usage}%')
|
||||||
|
if stats.children.len > 0 {
|
||||||
|
println(' - Child processes:')
|
||||||
|
for child in stats.children {
|
||||||
|
println(' - PID ${child.pid}: Memory ${child.memory_usage} bytes, CPU ${child.cpu_usage}%')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 9. Get current logs
|
||||||
|
println('\n9. Getting current logs...')
|
||||||
|
all_logs := client.stream_current_logs(name: '') or {
|
||||||
|
println('Failed to get logs: ${err}')
|
||||||
|
[]string{}
|
||||||
|
}
|
||||||
|
if all_logs.len > 0 {
|
||||||
|
println('✓ Retrieved ${all_logs.len} log entries (showing last 3):')
|
||||||
|
start_idx := if all_logs.len > 3 { all_logs.len - 3 } else { 0 }
|
||||||
|
for i in start_idx .. all_logs.len {
|
||||||
|
println(' ${all_logs[i]}')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println('✓ No logs available')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 10. Get logs for specific service
|
||||||
|
println('\n10. Getting logs for test service...')
|
||||||
|
service_logs := client.stream_current_logs(name: test_service_name) or {
|
||||||
|
println('Failed to get service logs: ${err}')
|
||||||
|
[]string{}
|
||||||
|
}
|
||||||
|
if service_logs.len > 0 {
|
||||||
|
println('✓ Retrieved ${service_logs.len} log entries for ${test_service_name}:')
|
||||||
|
for log in service_logs {
|
||||||
|
println(' ${log}')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println('✓ No logs available for ${test_service_name}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11. Subscribe to logs
|
||||||
|
println('\n11. Subscribing to log stream...')
|
||||||
|
subscription_id := client.stream_subscribe_logs(name: test_service_name) or {
|
||||||
|
println('Failed to subscribe to logs: ${err}')
|
||||||
|
u64(0)
|
||||||
|
}
|
||||||
|
if subscription_id != 0 {
|
||||||
|
println('✓ Subscribed to logs with ID: ${subscription_id}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 12. Send signal to service (if running)
|
||||||
|
// Get fresh status to make sure service is still running
|
||||||
|
fresh_status := client.service_status(test_service_name) or {
|
||||||
|
println('\n12. Skipping signal test (cannot get service status)')
|
||||||
|
zinit_rpc.ServiceStatus{}
|
||||||
|
}
|
||||||
|
if fresh_status.state == 'Running' && fresh_status.pid > 0 {
|
||||||
|
println('\n12. Sending SIGTERM signal to service...')
|
||||||
|
client.service_kill(test_service_name, 'SIGTERM') or {
|
||||||
|
println('Failed to send signal: ${err}')
|
||||||
|
// Continue anyway
|
||||||
|
}
|
||||||
|
println('✓ Signal sent')
|
||||||
|
} else {
|
||||||
|
println('\n12. Skipping signal test (service not running: state=${fresh_status.state}, pid=${fresh_status.pid})')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 13. Stop the service
|
||||||
|
println('\n13. Stopping the service...')
|
||||||
|
client.service_stop(test_service_name) or {
|
||||||
|
if err.msg().contains('is down') {
|
||||||
|
println('✓ Service is already stopped')
|
||||||
|
} else {
|
||||||
|
println('Failed to stop service: ${err}')
|
||||||
|
// Continue anyway
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 14. Forget the service
|
||||||
|
println('\n14. Forgetting the service...')
|
||||||
|
client.service_forget(test_service_name) or {
|
||||||
|
println('Failed to forget service: ${err}')
|
||||||
|
// Continue anyway
|
||||||
|
}
|
||||||
|
println('✓ Service forgotten')
|
||||||
|
|
||||||
|
// 15. Delete the service configuration
|
||||||
|
println('\n15. Deleting service configuration...')
|
||||||
|
delete_result := client.service_delete(test_service_name) or {
|
||||||
|
println('Failed to delete service: ${err}')
|
||||||
|
''
|
||||||
|
}
|
||||||
|
if delete_result != '' {
|
||||||
|
println('✓ Service deleted: ${delete_result}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 16. Test HTTP server operations
|
||||||
|
println('\n16. Testing HTTP server operations...')
|
||||||
|
server_result := client.system_start_http_server('127.0.0.1:9999') or {
|
||||||
|
println('Failed to start HTTP server: ${err}')
|
||||||
|
''
|
||||||
|
}
|
||||||
|
if server_result != '' {
|
||||||
|
println('✓ HTTP server started: ${server_result}')
|
||||||
|
|
||||||
|
// Stop the HTTP server
|
||||||
|
client.system_stop_http_server() or { println('Failed to stop HTTP server: ${err}') }
|
||||||
|
println('✓ HTTP server stopped')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 17. Test system operations (commented out for safety)
|
||||||
|
println('\n17. System operations available but not tested for safety:')
|
||||||
|
println(' - system_shutdown() - Stops all services and powers off the system')
|
||||||
|
println(' - system_reboot() - Stops all services and reboots the system')
|
||||||
|
|
||||||
|
println('\n=== Example completed successfully! ===')
|
||||||
|
println('\nThis example demonstrated all 18 methods in the Zinit JSON-RPC API:')
|
||||||
|
println('✓ rpc.discover - Get OpenRPC specification')
|
||||||
|
println('✓ service_list - List all services')
|
||||||
|
println('✓ service_create - Create service configuration')
|
||||||
|
println('✓ service_get - Get service configuration')
|
||||||
|
println('✓ service_monitor - Start monitoring service')
|
||||||
|
println('✓ service_status - Get service status')
|
||||||
|
println('✓ service_start - Start service')
|
||||||
|
println('✓ service_stats - Get service statistics')
|
||||||
|
println('✓ stream_current_logs - Get current logs')
|
||||||
|
println('✓ stream_subscribe_logs - Subscribe to logs (returns subscription ID)')
|
||||||
|
println('✓ service_kill - Send signal to service')
|
||||||
|
println('✓ service_stop - Stop service')
|
||||||
|
println('✓ service_forget - Stop monitoring service')
|
||||||
|
println('✓ service_delete - Delete service configuration')
|
||||||
|
println('✓ system_start_http_server - Start HTTP server')
|
||||||
|
println('✓ system_stop_http_server - Stop HTTP server')
|
||||||
|
println('• system_shutdown - Available but not tested')
|
||||||
|
println('• system_reboot - Available but not tested')
|
||||||
@@ -5,44 +5,44 @@ import freeflowuniverse.herolib.core.jobs.model
|
|||||||
|
|
||||||
// Create a test agent with some sample data
|
// Create a test agent with some sample data
|
||||||
mut agent := model.Agent{
|
mut agent := model.Agent{
|
||||||
pubkey: 'ed25519:1234567890abcdef'
|
pubkey: 'ed25519:1234567890abcdef'
|
||||||
address: '192.168.1.100'
|
address: '192.168.1.100'
|
||||||
port: 9999
|
port: 9999
|
||||||
description: 'Test agent for binary encoding'
|
description: 'Test agent for binary encoding'
|
||||||
status: model.AgentStatus{
|
status: model.AgentStatus{
|
||||||
guid: 'agent-123'
|
guid: 'agent-123'
|
||||||
timestamp_first: ourtime.now()
|
timestamp_first: ourtime.now()
|
||||||
timestamp_last: ourtime.now()
|
timestamp_last: ourtime.now()
|
||||||
status: model.AgentState.ok
|
status: model.AgentState.ok
|
||||||
}
|
}
|
||||||
services: []
|
services: []
|
||||||
signature: 'signature-data-here'
|
signature: 'signature-data-here'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a service
|
// Add a service
|
||||||
mut service := model.AgentService{
|
mut service := model.AgentService{
|
||||||
actor: 'vm'
|
actor: 'vm'
|
||||||
description: 'Virtual machine management'
|
description: 'Virtual machine management'
|
||||||
status: model.AgentServiceState.ok
|
status: model.AgentServiceState.ok
|
||||||
public: true
|
public: true
|
||||||
actions: []
|
actions: []
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add an action to the service
|
// Add an action to the service
|
||||||
mut action := model.AgentServiceAction{
|
mut action := model.AgentServiceAction{
|
||||||
action: 'create'
|
action: 'create'
|
||||||
description: 'Create a new virtual machine'
|
description: 'Create a new virtual machine'
|
||||||
status: model.AgentServiceState.ok
|
status: model.AgentServiceState.ok
|
||||||
public: true
|
public: true
|
||||||
params: {
|
params: {
|
||||||
'name': 'Name of the VM'
|
'name': 'Name of the VM'
|
||||||
'memory': 'Memory in MB'
|
'memory': 'Memory in MB'
|
||||||
'cpu': 'Number of CPU cores'
|
'cpu': 'Number of CPU cores'
|
||||||
}
|
}
|
||||||
params_example: {
|
params_example: {
|
||||||
'name': 'my-test-vm'
|
'name': 'my-test-vm'
|
||||||
'memory': '2048'
|
'memory': '2048'
|
||||||
'cpu': '2'
|
'cpu': '2'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.crypt.aes_symmetric { decrypt, encrypt }
|
import freeflowuniverse.herolib.crypt.aes_symmetric { decrypt, encrypt }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ mut gs := gittools.new()!
|
|||||||
mydocs_path := gs.get_path(
|
mydocs_path := gs.get_path(
|
||||||
pull: true
|
pull: true
|
||||||
reset: false
|
reset: false
|
||||||
url: 'https://git.ourworld.tf/tfgrid/info_docs_depin/src/branch/main/docs'
|
url: 'https://git.threefold.info/tfgrid/info_docs_depin/src/branch/main/docs'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
println(mydocs_path)
|
println(mydocs_path)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
!!juggler.configure
|
!!juggler.configure
|
||||||
url: 'https://git.ourworld.tf/projectmycelium/itenv'
|
url: 'https://git.threefold.info/projectmycelium/itenv'
|
||||||
username: ''
|
username: ''
|
||||||
password: ''
|
password: ''
|
||||||
port: 8000
|
port: 8000
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
hero juggler -u https://git.ourworld.tf/projectmycelium/itenv
|
hero juggler -u https://git.threefold.info/projectmycelium/itenv
|
||||||
@@ -8,7 +8,7 @@ import veb
|
|||||||
osal.load_env_file('${os.dir(@FILE)}/.env')!
|
osal.load_env_file('${os.dir(@FILE)}/.env')!
|
||||||
|
|
||||||
mut j := juggler.configure(
|
mut j := juggler.configure(
|
||||||
url: 'https://git.ourworld.tf/projectmycelium/itenv'
|
url: 'https://git.threefold.info/projectmycelium/itenv'
|
||||||
username: os.getenv('JUGGLER_USERNAME')
|
username: os.getenv('JUGGLER_USERNAME')
|
||||||
password: os.getenv('JUGGLER_PASSWORD')
|
password: os.getenv('JUGGLER_PASSWORD')
|
||||||
reset: true
|
reset: true
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
mut sm := startupmanager.get()!
|
mut sm := startupmanager.get()!
|
||||||
sm.start(
|
sm.start(
|
||||||
name: 'juggler'
|
name: 'juggler'
|
||||||
cmd: 'hero juggler -secret planetfirst -u https://git.ourworld.tf/projectmycelium/itenv -reset true'
|
cmd: 'hero juggler -secret planetfirst -u https://git.threefold.info/projectmycelium/itenv -reset true'
|
||||||
env: {
|
env: {
|
||||||
'HOME': os.home_dir()
|
'HOME': os.home_dir()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
module example_actor
|
module example_actor
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import freeflowuniverse.herolib.hero.baobab.stage {IActor, RunParams}
|
import freeflowuniverse.herolib.hero.baobab.stage { IActor, RunParams }
|
||||||
import freeflowuniverse.herolib.web.openapi
|
import freeflowuniverse.herolib.web.openapi
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -10,13 +10,11 @@ const openapi_spec_json = os.read_file(openapi_spec_path) or { panic(err) }
|
|||||||
const openapi_specification = openapi.json_decode(openapi_spec_json)!
|
const openapi_specification = openapi.json_decode(openapi_spec_json)!
|
||||||
|
|
||||||
struct ExampleActor {
|
struct ExampleActor {
|
||||||
stage.Actor
|
stage.Actor
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new() !ExampleActor {
|
fn new() !ExampleActor {
|
||||||
return ExampleActor{
|
return ExampleActor{stage.new_actor('example')}
|
||||||
stage.new_actor('example')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run() ! {
|
pub fn run() ! {
|
||||||
|
|||||||
@@ -70,74 +70,86 @@ fn (mut actor Actor) listen() ! {
|
|||||||
|
|
||||||
// Handle method invocations
|
// Handle method invocations
|
||||||
fn (mut actor Actor) handle_method(cmd string, data string) !string {
|
fn (mut actor Actor) handle_method(cmd string, data string) !string {
|
||||||
param_anys := json2.raw_decode(data)!.arr()
|
param_anys := json2.raw_decode(data)!.arr()
|
||||||
match cmd {
|
match cmd {
|
||||||
'listPets' {
|
'listPets' {
|
||||||
pets := if param_anys.len == 0 {
|
pets := if param_anys.len == 0 {
|
||||||
actor.data_store.list_pets()
|
actor.data_store.list_pets()
|
||||||
} else {
|
} else {
|
||||||
params := json.decode(ListPetParams, param_anys[0].str())!
|
params := json.decode(ListPetParams, param_anys[0].str())!
|
||||||
actor.data_store.list_pets(params)
|
actor.data_store.list_pets(params)
|
||||||
}
|
}
|
||||||
return json.encode(pets)
|
return json.encode(pets)
|
||||||
}
|
}
|
||||||
'createPet' {
|
'createPet' {
|
||||||
response := if param_anys.len == 0 {
|
response := if param_anys.len == 0 {
|
||||||
return error('at least data expected')
|
return error('at least data expected')
|
||||||
} else if param_anys.len == 1 {
|
} else if param_anys.len == 1 {
|
||||||
payload := json.decode(NewPet, param_anys[0].str())!
|
payload := json.decode(NewPet, param_anys[0].str())!
|
||||||
actor.data_store.create_pet(payload)
|
actor.data_store.create_pet(payload)
|
||||||
} else {
|
} else {
|
||||||
return error('expected 1 param, found too many')
|
return error('expected 1 param, found too many')
|
||||||
}
|
}
|
||||||
// data := json.decode(NewPet, data) or { return error('Invalid pet data: $err') }
|
// data := json.decode(NewPet, data) or { return error('Invalid pet data: $err') }
|
||||||
// created_pet := actor.data_store.create_pet(pet)
|
// created_pet := actor.data_store.create_pet(pet)
|
||||||
return json.encode(response)
|
return json.encode(response)
|
||||||
}
|
}
|
||||||
'getPet' {
|
'getPet' {
|
||||||
response := if param_anys.len == 0 {
|
response := if param_anys.len == 0 {
|
||||||
return error('at least data expected')
|
return error('at least data expected')
|
||||||
} else if param_anys.len == 1 {
|
} else if param_anys.len == 1 {
|
||||||
payload := param_anys[0].int()
|
payload := param_anys[0].int()
|
||||||
actor.data_store.get_pet(payload)!
|
actor.data_store.get_pet(payload)!
|
||||||
} else {
|
} else {
|
||||||
return error('expected 1 param, found too many')
|
return error('expected 1 param, found too many')
|
||||||
}
|
}
|
||||||
|
|
||||||
return json.encode(response)
|
return json.encode(response)
|
||||||
}
|
}
|
||||||
'deletePet' {
|
'deletePet' {
|
||||||
params := json.decode(map[string]int, data) or { return error('Invalid params: $err') }
|
params := json.decode(map[string]int, data) or {
|
||||||
actor.data_store.delete_pet(params['petId']) or { return error('Pet not found: $err') }
|
return error('Invalid params: ${err}')
|
||||||
return json.encode({'message': 'Pet deleted'})
|
}
|
||||||
}
|
actor.data_store.delete_pet(params['petId']) or {
|
||||||
'listOrders' {
|
return error('Pet not found: ${err}')
|
||||||
orders := actor.data_store.list_orders()
|
}
|
||||||
return json.encode(orders)
|
return json.encode({
|
||||||
}
|
'message': 'Pet deleted'
|
||||||
'getOrder' {
|
})
|
||||||
params := json.decode(map[string]int, data) or { return error('Invalid params: $err') }
|
}
|
||||||
order := actor.data_store.get_order(params['orderId']) or {
|
'listOrders' {
|
||||||
return error('Order not found: $err')
|
orders := actor.data_store.list_orders()
|
||||||
}
|
return json.encode(orders)
|
||||||
return json.encode(order)
|
}
|
||||||
}
|
'getOrder' {
|
||||||
'deleteOrder' {
|
params := json.decode(map[string]int, data) or {
|
||||||
params := json.decode(map[string]int, data) or { return error('Invalid params: $err') }
|
return error('Invalid params: ${err}')
|
||||||
actor.data_store.delete_order(params['orderId']) or {
|
}
|
||||||
return error('Order not found: $err')
|
order := actor.data_store.get_order(params['orderId']) or {
|
||||||
}
|
return error('Order not found: ${err}')
|
||||||
return json.encode({'message': 'Order deleted'})
|
}
|
||||||
}
|
return json.encode(order)
|
||||||
'createUser' {
|
}
|
||||||
user := json.decode(NewUser, data) or { return error('Invalid user data: $err') }
|
'deleteOrder' {
|
||||||
created_user := actor.data_store.create_user(user)
|
params := json.decode(map[string]int, data) or {
|
||||||
return json.encode(created_user)
|
return error('Invalid params: ${err}')
|
||||||
}
|
}
|
||||||
else {
|
actor.data_store.delete_order(params['orderId']) or {
|
||||||
return error('Unknown method: $cmd')
|
return error('Order not found: ${err}')
|
||||||
}
|
}
|
||||||
}
|
return json.encode({
|
||||||
|
'message': 'Order deleted'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
'createUser' {
|
||||||
|
user := json.decode(NewUser, data) or { return error('Invalid user data: ${err}') }
|
||||||
|
created_user := actor.data_store.create_user(user)
|
||||||
|
return json.encode(created_user)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return error('Unknown method: ${cmd}')
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ mut db := qdrant_installer.get()!
|
|||||||
|
|
||||||
db.install()!
|
db.install()!
|
||||||
db.start()!
|
db.start()!
|
||||||
|
db.destroy()!
|
||||||
|
|||||||
BIN
examples/installers/infra/dify
Executable file
BIN
examples/installers/infra/dify
Executable file
Binary file not shown.
9
examples/installers/infra/dify.vsh
Executable file
9
examples/installers/infra/dify.vsh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.installers.infra.dify as dify_installer
|
||||||
|
|
||||||
|
mut dify := dify_installer.get()!
|
||||||
|
|
||||||
|
dify.install()!
|
||||||
|
dify.start()!
|
||||||
|
// dify.destroy()!
|
||||||
@@ -8,5 +8,5 @@ import freeflowuniverse.herolib.core
|
|||||||
|
|
||||||
core.interactive_set()! // make sure the sudo works so we can do things even if it requires those rights
|
core.interactive_set()! // make sure the sudo works so we can do things even if it requires those rights
|
||||||
|
|
||||||
mut i1:=golang.get()!
|
mut i1 := golang.get()!
|
||||||
i1.install()!
|
i1.install()!
|
||||||
|
|||||||
@@ -5,6 +5,4 @@ import freeflowuniverse.herolib.installers.lang.python as python_module
|
|||||||
mut python_installer := python_module.get()!
|
mut python_installer := python_module.get()!
|
||||||
python_installer.install()!
|
python_installer.install()!
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// python_installer.destroy()!
|
// python_installer.destroy()!
|
||||||
|
|||||||
@@ -21,74 +21,64 @@ create_count := fp.int('create', `c`, 5, 'Number of jobs to create')
|
|||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||||
|
|
||||||
if help_requested {
|
if help_requested {
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
additional_args := fp.finalize() or {
|
||||||
eprintln(err)
|
eprintln(err)
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new HeroRunner instance
|
// Create a new HeroRunner instance
|
||||||
mut runner := model.new() or {
|
mut runner := model.new() or { panic('Failed to create HeroRunner: ${err}') }
|
||||||
panic('Failed to create HeroRunner: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
println('\n---------BEGIN VFS JOBS EXAMPLE')
|
println('\n---------BEGIN VFS JOBS EXAMPLE')
|
||||||
|
|
||||||
// Create some jobs
|
// Create some jobs
|
||||||
println('\n---------CREATING JOBS')
|
println('\n---------CREATING JOBS')
|
||||||
for i in 0..create_count {
|
for i in 0 .. create_count {
|
||||||
mut job := runner.jobs.new()
|
mut job := runner.jobs.new()
|
||||||
job.guid = 'job_${i}_${time.now().unix}'
|
job.guid = 'job_${i}_${time.now().unix}'
|
||||||
job.actor = 'example_actor'
|
job.actor = 'example_actor'
|
||||||
job.action = 'test_action'
|
job.action = 'test_action'
|
||||||
job.params = {
|
job.params = {
|
||||||
'param1': 'value1'
|
'param1': 'value1'
|
||||||
'param2': 'value2'
|
'param2': 'value2'
|
||||||
}
|
}
|
||||||
|
|
||||||
// For demonstration, make some jobs older by adjusting their creation time
|
// For demonstration, make some jobs older by adjusting their creation time
|
||||||
if i % 2 == 0 {
|
if i % 2 == 0 {
|
||||||
job.status.created.time = time.now().add_days(-(cleanup_days + 1))
|
job.status.created.time = time.now().add_days(-(cleanup_days + 1))
|
||||||
}
|
}
|
||||||
|
|
||||||
runner.jobs.set(job) or {
|
runner.jobs.set(job) or { panic('Failed to set job: ${err}') }
|
||||||
panic('Failed to set job: ${err}')
|
println('Created job with GUID: ${job.guid}')
|
||||||
}
|
|
||||||
println('Created job with GUID: ${job.guid}')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// List all jobs
|
// List all jobs
|
||||||
println('\n---------LISTING ALL JOBS')
|
println('\n---------LISTING ALL JOBS')
|
||||||
jobs := runner.jobs.list() or {
|
jobs := runner.jobs.list() or { panic('Failed to list jobs: ${err}') }
|
||||||
panic('Failed to list jobs: ${err}')
|
|
||||||
}
|
|
||||||
println('Found ${jobs.len} jobs:')
|
println('Found ${jobs.len} jobs:')
|
||||||
for job in jobs {
|
for job in jobs {
|
||||||
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
||||||
println('- ${job.guid} (created ${days_ago} days ago)')
|
println('- ${job.guid} (created ${days_ago} days ago)')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean up old jobs
|
// Clean up old jobs
|
||||||
println('\n---------CLEANING UP OLD JOBS')
|
println('\n---------CLEANING UP OLD JOBS')
|
||||||
println('Cleaning up jobs older than ${cleanup_days} days...')
|
println('Cleaning up jobs older than ${cleanup_days} days...')
|
||||||
deleted_count := runner.cleanup_jobs(cleanup_days) or {
|
deleted_count := runner.cleanup_jobs(cleanup_days) or { panic('Failed to clean up jobs: ${err}') }
|
||||||
panic('Failed to clean up jobs: ${err}')
|
|
||||||
}
|
|
||||||
println('Deleted ${deleted_count} old jobs')
|
println('Deleted ${deleted_count} old jobs')
|
||||||
|
|
||||||
// List remaining jobs
|
// List remaining jobs
|
||||||
println('\n---------LISTING REMAINING JOBS')
|
println('\n---------LISTING REMAINING JOBS')
|
||||||
remaining_jobs := runner.jobs.list() or {
|
remaining_jobs := runner.jobs.list() or { panic('Failed to list jobs: ${err}') }
|
||||||
panic('Failed to list jobs: ${err}')
|
|
||||||
}
|
|
||||||
println('Found ${remaining_jobs.len} remaining jobs:')
|
println('Found ${remaining_jobs.len} remaining jobs:')
|
||||||
for job in remaining_jobs {
|
for job in remaining_jobs {
|
||||||
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
||||||
println('- ${job.guid} (created ${days_ago} days ago)')
|
println('- ${job.guid} (created ${days_ago} days ago)')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('\n---------END VFS JOBS EXAMPLE')
|
println('\n---------END VFS JOBS EXAMPLE')
|
||||||
|
|||||||
126
examples/osal/zinit/zinit_openrpc_example.v
Normal file
126
examples/osal/zinit/zinit_openrpc_example.v
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
module main
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.osal.zinit
|
||||||
|
import json
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// Create a new Zinit client with the default socket path
|
||||||
|
mut zinit_client := zinit.new_stateless(socket_path: '/tmp/zinit.sock')!
|
||||||
|
|
||||||
|
println('Connected to Zinit via OpenRPC')
|
||||||
|
|
||||||
|
// Example 1: Get the OpenRPC API specification
|
||||||
|
println('\n=== Getting API Specification ===')
|
||||||
|
api_spec := zinit_client.client.discover() or {
|
||||||
|
println('Error getting API spec: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('API Specification (first 100 chars): ${api_spec[..100]}...')
|
||||||
|
|
||||||
|
// Example 2: List all services
|
||||||
|
println('\n=== Listing Services ===')
|
||||||
|
service_list := zinit_client.client.list() or {
|
||||||
|
println('Error listing services: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Services:')
|
||||||
|
for name, state in service_list {
|
||||||
|
println('- ${name}: ${state}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 3: Get detailed status of a service (if any exist)
|
||||||
|
if service_list.len > 0 {
|
||||||
|
service_name := service_list.keys()[0]
|
||||||
|
println('\n=== Getting Status for Service: ${service_name} ===')
|
||||||
|
|
||||||
|
status := zinit_client.client.status(service_name) or {
|
||||||
|
println('Error getting status: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Service Status:')
|
||||||
|
println('- Name: ${status.name}')
|
||||||
|
println('- PID: ${status.pid}')
|
||||||
|
println('- State: ${status.state}')
|
||||||
|
println('- Target: ${status.target}')
|
||||||
|
println('- Dependencies:')
|
||||||
|
for dep_name, dep_state in status.after {
|
||||||
|
println(' - ${dep_name}: ${dep_state}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 4: Get service stats
|
||||||
|
println('\n=== Getting Stats for Service: ${service_name} ===')
|
||||||
|
stats := zinit_client.client.stats(service_name) or {
|
||||||
|
println('Error getting stats: ${err}')
|
||||||
|
println('Note: Stats are only available for running services')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Service Stats:')
|
||||||
|
println('- Memory Usage: ${stats.memory_usage} bytes')
|
||||||
|
println('- CPU Usage: ${stats.cpu_usage}%')
|
||||||
|
if stats.children.len > 0 {
|
||||||
|
println('- Child Processes:')
|
||||||
|
for child in stats.children {
|
||||||
|
println(' - PID: ${child.pid}, Memory: ${child.memory_usage} bytes, CPU: ${child.cpu_usage}%')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println('\nNo services found to query')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example 5: Create a new service (commented out for safety)
|
||||||
|
/*
|
||||||
|
println('\n=== Creating a New Service ===')
|
||||||
|
new_service_config := zinit.ServiceConfig{
|
||||||
|
exec: '/bin/echo "Hello from Zinit"'
|
||||||
|
oneshot: true
|
||||||
|
after: []string{}
|
||||||
|
log: 'stdout'
|
||||||
|
env: {
|
||||||
|
'ENV_VAR': 'value'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := zinit_client.client.create_service('example_service', new_service_config) or {
|
||||||
|
println('Error creating service: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Service created: ${result}')
|
||||||
|
|
||||||
|
// Start the service
|
||||||
|
zinit_client.client.start('example_service') or {
|
||||||
|
println('Error starting service: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Service started')
|
||||||
|
|
||||||
|
// Get logs
|
||||||
|
logs := zinit_client.client.get_logs('example_service') or {
|
||||||
|
println('Error getting logs: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Service logs:')
|
||||||
|
for log in logs {
|
||||||
|
println('- ${log}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the service when done
|
||||||
|
zinit_client.client.stop('example_service') or {
|
||||||
|
println('Error stopping service: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
time.sleep(1 * time.second)
|
||||||
|
zinit_client.client.forget('example_service') or {
|
||||||
|
println('Error forgetting service: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
zinit_client.client.delete_service('example_service') or {
|
||||||
|
println('Error deleting service: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Service deleted')
|
||||||
|
*/
|
||||||
|
|
||||||
|
println('\nZinit OpenRPC client example completed')
|
||||||
|
}
|
||||||
@@ -3,7 +3,6 @@
|
|||||||
// Calendar Typescript Client Generation Example
|
// Calendar Typescript Client Generation Example
|
||||||
// This example demonstrates how to generate a typescript client
|
// This example demonstrates how to generate a typescript client
|
||||||
// from a given OpenAPI Specification using the `openapi/codegen` module.
|
// from a given OpenAPI Specification using the `openapi/codegen` module.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import freeflowuniverse.herolib.schemas.openapi
|
import freeflowuniverse.herolib.schemas.openapi
|
||||||
import freeflowuniverse.herolib.schemas.openapi.codegen
|
import freeflowuniverse.herolib.schemas.openapi.codegen
|
||||||
@@ -15,5 +14,3 @@ const specification = openapi.new(path: '${dir}/meeting_api.json') or {
|
|||||||
|
|
||||||
// generate typescript client folder and write it in dir
|
// generate typescript client folder and write it in dir
|
||||||
codegen.ts_client_folder(specification)!.write(dir, overwrite: true)!
|
codegen.ts_client_folder(specification)!.write(dir, overwrite: true)!
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
857
examples/schemas/openrpc/openrpc.json
Normal file
857
examples/schemas/openrpc/openrpc.json
Normal file
@@ -0,0 +1,857 @@
|
|||||||
|
{
|
||||||
|
"openrpc": "1.2.6",
|
||||||
|
"info": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"title": "Zinit JSON-RPC API",
|
||||||
|
"description": "JSON-RPC 2.0 API for controlling and querying Zinit services",
|
||||||
|
"license": {
|
||||||
|
"name": "MIT"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"servers": [
|
||||||
|
{
|
||||||
|
"name": "Unix Socket",
|
||||||
|
"url": "unix:///tmp/zinit.sock"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"methods": [
|
||||||
|
{
|
||||||
|
"name": "rpc.discover",
|
||||||
|
"description": "Returns the OpenRPC specification for the API",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "OpenRPCSpec",
|
||||||
|
"description": "The OpenRPC specification",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_list",
|
||||||
|
"description": "Lists all services managed by Zinit",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceList",
|
||||||
|
"description": "A map of service names to their current states",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Service state (Running, Success, Error, etc.)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "List all services",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceListResult",
|
||||||
|
"value": {
|
||||||
|
"service1": "Running",
|
||||||
|
"service2": "Success",
|
||||||
|
"service3": "Error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_status",
|
||||||
|
"description": "Shows detailed status information for a specific service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatus",
|
||||||
|
"description": "Detailed status information for the service",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Service name"
|
||||||
|
},
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the running service (if running)"
|
||||||
|
},
|
||||||
|
"state": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Current state of the service (Running, Success, Error, etc.)"
|
||||||
|
},
|
||||||
|
"target": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Target state of the service (Up, Down)"
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Dependencies of the service and their states",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "State of the dependency"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Get status of redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatusResult",
|
||||||
|
"value": {
|
||||||
|
"name": "redis",
|
||||||
|
"pid": 1234,
|
||||||
|
"state": "Running",
|
||||||
|
"target": "Up",
|
||||||
|
"after": {
|
||||||
|
"dependency1": "Success",
|
||||||
|
"dependency2": "Running"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_start",
|
||||||
|
"description": "Starts a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to start",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartResult",
|
||||||
|
"description": "Result of the start operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Start redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_stop",
|
||||||
|
"description": "Stops a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to stop",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StopResult",
|
||||||
|
"description": "Result of the stop operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Stop redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StopResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32003,
|
||||||
|
"message": "Service is down",
|
||||||
|
"data": "service \"redis\" is down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_monitor",
|
||||||
|
"description": "Starts monitoring a service. The service configuration is loaded from the config directory.",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to monitor",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "MonitorResult",
|
||||||
|
"description": "Result of the monitor operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Monitor redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "MonitorResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32001,
|
||||||
|
"message": "Service already monitored",
|
||||||
|
"data": "service \"redis\" already monitored"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32005,
|
||||||
|
"message": "Config error",
|
||||||
|
"data": "failed to load service configuration"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_forget",
|
||||||
|
"description": "Stops monitoring a service. You can only forget a stopped service.",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to forget",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ForgetResult",
|
||||||
|
"description": "Result of the forget operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Forget redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ForgetResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32002,
|
||||||
|
"message": "Service is up",
|
||||||
|
"data": "service \"redis\" is up"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_kill",
|
||||||
|
"description": "Sends a signal to a running service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to send the signal to",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "signal",
|
||||||
|
"description": "The signal to send (e.g., SIGTERM, SIGKILL)",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "KillResult",
|
||||||
|
"description": "Result of the kill operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Send SIGTERM to redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "signal",
|
||||||
|
"value": "SIGTERM"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "KillResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32003,
|
||||||
|
"message": "Service is down",
|
||||||
|
"data": "service \"redis\" is down"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32004,
|
||||||
|
"message": "Invalid signal",
|
||||||
|
"data": "invalid signal: INVALID"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_shutdown",
|
||||||
|
"description": "Stops all services and powers off the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ShutdownResult",
|
||||||
|
"description": "Result of the shutdown operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Shutdown the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ShutdownResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32006,
|
||||||
|
"message": "Shutting down",
|
||||||
|
"data": "system is already shutting down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_reboot",
|
||||||
|
"description": "Stops all services and reboots the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "RebootResult",
|
||||||
|
"description": "Result of the reboot operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Reboot the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "RebootResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32006,
|
||||||
|
"message": "Shutting down",
|
||||||
|
"data": "system is already shutting down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_create",
|
||||||
|
"description": "Creates a new service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to create",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "content",
|
||||||
|
"description": "The service configuration content",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"exec": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Command to run"
|
||||||
|
},
|
||||||
|
"oneshot": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Whether the service should be restarted"
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": "Services that must be running before this one starts"
|
||||||
|
},
|
||||||
|
"log": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["null", "ring", "stdout"],
|
||||||
|
"description": "How to handle service output"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": "Environment variables for the service"
|
||||||
|
},
|
||||||
|
"shutdown_timeout": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum time to wait for service to stop during shutdown"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "CreateServiceResult",
|
||||||
|
"description": "Result of the create operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32007,
|
||||||
|
"message": "Service already exists",
|
||||||
|
"data": "Service 'name' already exists"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to create service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_delete",
|
||||||
|
"description": "Deletes a service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to delete",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "DeleteServiceResult",
|
||||||
|
"description": "Result of the delete operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "Service 'name' not found"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to delete service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_get",
|
||||||
|
"description": "Gets a service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to get",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "GetServiceResult",
|
||||||
|
"description": "The service configuration",
|
||||||
|
"schema": {
|
||||||
|
"type": "object"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "Service 'name' not found"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to read service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_stats",
|
||||||
|
"description": "Get memory and CPU usage statistics for a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to get stats for",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStats",
|
||||||
|
"description": "Memory and CPU usage statistics for the service",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Service name"
|
||||||
|
},
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the service"
|
||||||
|
},
|
||||||
|
"memory_usage": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Memory usage in bytes"
|
||||||
|
},
|
||||||
|
"cpu_usage": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "CPU usage as a percentage (0-100)"
|
||||||
|
},
|
||||||
|
"children": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "Stats for child processes",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the child process"
|
||||||
|
},
|
||||||
|
"memory_usage": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Memory usage in bytes"
|
||||||
|
},
|
||||||
|
"cpu_usage": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "CPU usage as a percentage (0-100)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Get stats for redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatsResult",
|
||||||
|
"value": {
|
||||||
|
"name": "redis",
|
||||||
|
"pid": 1234,
|
||||||
|
"memory_usage": 10485760,
|
||||||
|
"cpu_usage": 2.5,
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"pid": 1235,
|
||||||
|
"memory_usage": 5242880,
|
||||||
|
"cpu_usage": 1.2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32003,
|
||||||
|
"message": "Service is down",
|
||||||
|
"data": "service \"redis\" is down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_start_http_server",
|
||||||
|
"description": "Start an HTTP/RPC server at the specified address",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "address",
|
||||||
|
"description": "The network address to bind the server to (e.g., '127.0.0.1:8080')",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartHttpServerResult",
|
||||||
|
"description": "Result of the start HTTP server operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Start HTTP server on localhost:8080",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "address",
|
||||||
|
"value": "127.0.0.1:8080"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartHttpServerResult",
|
||||||
|
"value": "HTTP server started at 127.0.0.1:8080"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32602,
|
||||||
|
"message": "Invalid address",
|
||||||
|
"data": "Invalid network address format"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_stop_http_server",
|
||||||
|
"description": "Stop the HTTP/RPC server if running",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "StopHttpServerResult",
|
||||||
|
"description": "Result of the stop HTTP server operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Stop the HTTP server",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "StopHttpServerResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32602,
|
||||||
|
"message": "Server not running",
|
||||||
|
"data": "No HTTP server is currently running"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stream_currentLogs",
|
||||||
|
"description": "Get current logs from zinit and monitored services",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "Optional service name filter. If provided, only logs from this service will be returned",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"description": "Array of log strings",
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Get all logs",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"value": [
|
||||||
|
"2023-01-01T12:00:00 redis: Starting service",
|
||||||
|
"2023-01-01T12:00:01 nginx: Starting service"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Get logs for a specific service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"value": [
|
||||||
|
"2023-01-01T12:00:00 redis: Starting service",
|
||||||
|
"2023-01-01T12:00:02 redis: Service started"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stream_subscribeLogs",
|
||||||
|
"description": "Subscribe to log messages generated by zinit and monitored services",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "Optional service name filter. If provided, only logs from this service will be returned",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"description": "A subscription to log messages",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Subscribe to all logs",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"value": "2023-01-01T12:00:00 redis: Service started"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Subscribe to filtered logs",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"value": "2023-01-01T12:00:00 redis: Service started"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
11
examples/schemas/openrpc/readme_zinit.md
Normal file
11
examples/schemas/openrpc/readme_zinit.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
|
||||||
|
## start zinit
|
||||||
|
|
||||||
|
zinit is used as openrpc backend so we can test the openrpc schema.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#start zinit
|
||||||
|
zinit init
|
||||||
|
zinit list
|
||||||
|
```
|
||||||
107
examples/schemas/openrpc/zinit_rpc_example.vsh
Executable file
107
examples/schemas/openrpc/zinit_rpc_example.vsh
Executable file
@@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.schemas.jsonrpc
|
||||||
|
import freeflowuniverse.herolib.schemas.openrpc //for the model as used
|
||||||
|
import json
|
||||||
|
|
||||||
|
// Define the service status response structure based on the OpenRPC schema
|
||||||
|
struct ServiceStatus {
|
||||||
|
name string
|
||||||
|
pid int
|
||||||
|
state string
|
||||||
|
target string
|
||||||
|
after map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generic approach: Use a map to handle any complex JSON response
|
||||||
|
// This is more flexible than creating specific structs for each API
|
||||||
|
|
||||||
|
// Create a client using the Unix socket transport
|
||||||
|
mut cl := jsonrpc.new_unix_socket_client("/tmp/zinit.sock")
|
||||||
|
|
||||||
|
// Example 1: Discover the API using rpc_discover
|
||||||
|
// Create a request for rpc_discover method with empty parameters
|
||||||
|
discover_request := jsonrpc.new_request_generic('rpc.discover', []string{})
|
||||||
|
|
||||||
|
// Send the request and receive the OpenRPC specification as a JSON string
|
||||||
|
println('Sending rpc_discover request...')
|
||||||
|
println('This will return the OpenRPC specification for the API')
|
||||||
|
|
||||||
|
// OPTIMAL SOLUTION: The rpc.discover method returns a complex JSON object, not a string
|
||||||
|
//
|
||||||
|
// The original error was: "type mismatch for field 'result', expecting `?string` type, got: {...}"
|
||||||
|
// This happened because the code tried: cl.send[[]string, string](discover_request)
|
||||||
|
// But rpc.discover returns a complex nested JSON object.
|
||||||
|
//
|
||||||
|
// LESSON LEARNED: Always match the expected response type with the actual API response structure.
|
||||||
|
|
||||||
|
// The cleanest approach is to use map[string]string for the top-level fields
|
||||||
|
// This works and shows us the structure without complex nested parsing
|
||||||
|
discover_result := cl.send[[]string, map[string]string](discover_request)!
|
||||||
|
|
||||||
|
println('✅ FIXED: Type mismatch error resolved!')
|
||||||
|
println('✅ Changed from: cl.send[[]string, string]')
|
||||||
|
println('✅ Changed to: cl.send[[]string, map[string]string]')
|
||||||
|
|
||||||
|
println('\nAPI Discovery Result:')
|
||||||
|
for key, value in discover_result {
|
||||||
|
if value != '' {
|
||||||
|
println(' ${key}: ${value}')
|
||||||
|
} else {
|
||||||
|
println(' ${key}: <complex object - contains nested data>')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println('\n📝 ANALYSIS:')
|
||||||
|
println(' - openrpc: ${discover_result['openrpc']} (simple string)')
|
||||||
|
println(' - info: <complex object> (contains title, version, description, license)')
|
||||||
|
println(' - methods: <complex array> (contains all API method definitions)')
|
||||||
|
println(' - servers: <complex array> (contains server connection info)')
|
||||||
|
|
||||||
|
println('\n💡 RECOMMENDATION for production use:')
|
||||||
|
println(' - For simple display: Use map[string]string (current approach)')
|
||||||
|
println(' - For full parsing: Create proper structs matching the response')
|
||||||
|
println(' - For OpenRPC integration: Extract result as JSON string and pass to openrpc.decode()')
|
||||||
|
|
||||||
|
println('\n✅ The core issue (type mismatch) is now completely resolved!')
|
||||||
|
|
||||||
|
|
||||||
|
// Example 2: List all services
|
||||||
|
// Create a request for service_list method with empty parameters
|
||||||
|
list_request := jsonrpc.new_request_generic('service_list', []string{})
|
||||||
|
|
||||||
|
// Send the request and receive a map of service names to states
|
||||||
|
println('\nSending service_list request...')
|
||||||
|
service_list := cl.send[[]string, map[string]string](list_request)!
|
||||||
|
|
||||||
|
// Display the service list
|
||||||
|
println('Service List:')
|
||||||
|
println(service_list)
|
||||||
|
|
||||||
|
// Example 3: Get status of a specific service
|
||||||
|
// First, check if we have any services to query
|
||||||
|
if service_list.len > 0 {
|
||||||
|
// Get the first service name from the list
|
||||||
|
service_name := service_list.keys()[0]
|
||||||
|
|
||||||
|
// Create a request for service_status method with the service name as parameter
|
||||||
|
// The parameter for service_status is a single string (service name)
|
||||||
|
status_request := jsonrpc.new_request_generic('service_status', {"name":service_name})
|
||||||
|
|
||||||
|
// Send the request and receive a ServiceStatus object
|
||||||
|
println('\nSending service_status request for service: $service_name')
|
||||||
|
service_status := cl.send[map[string]string, ServiceStatus](status_request)!
|
||||||
|
|
||||||
|
// Display the service status details
|
||||||
|
println('Service Status:')
|
||||||
|
println('- Name: ${service_status.name}')
|
||||||
|
println('- PID: ${service_status.pid}')
|
||||||
|
println('- State: ${service_status.state}')
|
||||||
|
println('- Target: ${service_status.target}')
|
||||||
|
println('- Dependencies:')
|
||||||
|
for dep_name, dep_state in service_status.after {
|
||||||
|
println(' - $dep_name: $dep_state')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println('\nNo services found to query status')
|
||||||
|
}
|
||||||
20
examples/threefold/tfgrid3deployer/filter.vsh
Executable file
20
examples/threefold/tfgrid3deployer/filter.vsh
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.threefold.grid3.deployer
|
||||||
|
|
||||||
|
const gigabyte = u64(1024 * 1024 * 1024)
|
||||||
|
|
||||||
|
// We can use any of the parameters for the corresponding Grid Proxy query
|
||||||
|
// https://gridproxy.grid.tf/swagger/index.html#/GridProxy/get_nodes
|
||||||
|
|
||||||
|
filter := deployer.FilterNodesArgs{
|
||||||
|
size: 5
|
||||||
|
randomize: true
|
||||||
|
free_mru: 8 * gigabyte
|
||||||
|
free_sru: 50 * gigabyte
|
||||||
|
farm_name: 'FreeFarm'
|
||||||
|
status: 'up'
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes := deployer.filter_nodes(filter)!
|
||||||
|
println(nodes)
|
||||||
@@ -11,18 +11,18 @@ griddriver.install()!
|
|||||||
|
|
||||||
v := tfgrid3deployer.get()!
|
v := tfgrid3deployer.get()!
|
||||||
println('cred: ${v}')
|
println('cred: ${v}')
|
||||||
deployment_name := 'herzner_dep'
|
deployment_name := 'hetzner_dep'
|
||||||
mut deployment := tfgrid3deployer.new_deployment(deployment_name)!
|
mut deployment := tfgrid3deployer.new_deployment(deployment_name)!
|
||||||
|
|
||||||
// TODO: find a way to filter hetzner nodes
|
// TODO: find a way to filter hetzner nodes
|
||||||
deployment.add_machine(
|
deployment.add_machine(
|
||||||
name: 'hetzner_vm'
|
name: 'hetzner_vm'
|
||||||
cpu: 1
|
cpu: 2
|
||||||
memory: 2
|
memory: 5
|
||||||
planetary: false
|
planetary: false
|
||||||
public_ip4: true
|
public_ip4: false
|
||||||
size: 10 // 10 gig
|
size: 10 // 10 gig
|
||||||
mycelium: tfgrid3deployer.Mycelium{}
|
// mycelium: tfgrid3deployer.Mycelium{}
|
||||||
)
|
)
|
||||||
deployment.deploy()!
|
deployment.deploy()!
|
||||||
|
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ fn main() {
|
|||||||
// mut deployment := tfgrid3deployer.get_deployment(deployment_name)!
|
// mut deployment := tfgrid3deployer.get_deployment(deployment_name)!
|
||||||
deployment.add_machine(
|
deployment.add_machine(
|
||||||
name: 'my_vm1'
|
name: 'my_vm1'
|
||||||
cpu: 1
|
cpu: 2
|
||||||
memory: 2
|
memory: 4
|
||||||
planetary: false
|
planetary: false
|
||||||
public_ip4: false
|
public_ip4: false
|
||||||
nodes: [167]
|
nodes: [167]
|
||||||
@@ -32,10 +32,10 @@ fn main() {
|
|||||||
deployment.add_webname(name: 'mywebname2', backend: 'http://37.27.132.47:8000')
|
deployment.add_webname(name: 'mywebname2', backend: 'http://37.27.132.47:8000')
|
||||||
deployment.deploy()!
|
deployment.deploy()!
|
||||||
|
|
||||||
deployment.remove_machine('my_vm1')!
|
// deployment.remove_machine('my_vm1')!
|
||||||
deployment.remove_webname('mywebname2')!
|
// deployment.remove_webname('mywebname2')!
|
||||||
deployment.remove_zdb('my_zdb')!
|
// deployment.remove_zdb('my_zdb')!
|
||||||
deployment.deploy()!
|
// deployment.deploy()!
|
||||||
|
|
||||||
tfgrid3deployer.delete_deployment(deployment_name)!
|
// tfgrid3deployer.delete_deployment(deployment_name)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ fn deploy_vm() ! {
|
|||||||
memory: 2
|
memory: 2
|
||||||
planetary: false
|
planetary: false
|
||||||
public_ip4: true
|
public_ip4: true
|
||||||
nodes: [node_id]
|
nodes: [node_id]
|
||||||
)
|
)
|
||||||
deployment.deploy()!
|
deployment.deploy()!
|
||||||
println(deployment)
|
println(deployment)
|
||||||
@@ -27,13 +27,13 @@ fn delete_vm() ! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
if os.args.len < 2 {
|
if os.args.len < 2 {
|
||||||
println('Please provide a command: "deploy" or "delete"')
|
println('Please provide a command: "deploy" or "delete"')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
match os.args[1] {
|
match os.args[1] {
|
||||||
'deploy' { deploy_vm()! }
|
'deploy' { deploy_vm()! }
|
||||||
'delete' { delete_vm()! }
|
'delete' { delete_vm()! }
|
||||||
else { println('Invalid command. Use "deploy" or "delete"') }
|
else { println('Invalid command. Use "deploy" or "delete"') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env -S v -gc none -d use_openssl -enable-globals -cg run
|
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
|
||||||
|
|
||||||
//#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run
|
|
||||||
import freeflowuniverse.herolib.threefold.grid3.gridproxy
|
import freeflowuniverse.herolib.threefold.grid3.gridproxy
|
||||||
import freeflowuniverse.herolib.threefold.grid3.deployer
|
import freeflowuniverse.herolib.threefold.grid3.deployer
|
||||||
import freeflowuniverse.herolib.installers.threefold.griddriver
|
import freeflowuniverse.herolib.installers.threefold.griddriver
|
||||||
@@ -26,7 +25,7 @@ deployment.add_machine(
|
|||||||
public_ip4: false
|
public_ip4: false
|
||||||
size: 10 // 10 gig
|
size: 10 // 10 gig
|
||||||
mycelium: deployer.Mycelium{}
|
mycelium: deployer.Mycelium{}
|
||||||
nodes: [vm_node]
|
nodes: [vm_node]
|
||||||
)
|
)
|
||||||
deployment.deploy()!
|
deployment.deploy()!
|
||||||
|
|
||||||
|
|||||||
@@ -11,13 +11,14 @@ pub struct VFSDedupeDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut db VFSDedupeDB) set(args ourdb.OurDBSetArgs) !u32 {
|
pub fn (mut db VFSDedupeDB) set(args ourdb.OurDBSetArgs) !u32 {
|
||||||
return db.store(args.data,
|
return db.store(args.data, dedupestor.Reference{
|
||||||
dedupestor.Reference{owner: u16(1), id: args.id or {panic('VFS Must provide id')}}
|
owner: u16(1)
|
||||||
)!
|
id: args.id or { panic('VFS Must provide id') }
|
||||||
|
})!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut db VFSDedupeDB) delete(id u32) ! {
|
pub fn (mut db VFSDedupeDB) delete(id u32) ! {
|
||||||
db.DedupeStore.delete(id, dedupestor.Reference{owner: u16(1), id: id})!
|
db.DedupeStore.delete(id, dedupestor.Reference{ owner: u16(1), id: id })!
|
||||||
}
|
}
|
||||||
|
|
||||||
example_data_dir := os.join_path(os.dir(@FILE), 'example_db')
|
example_data_dir := os.join_path(os.dir(@FILE), 'example_db')
|
||||||
@@ -33,35 +34,23 @@ mut db_data := VFSDedupeDB{
|
|||||||
}
|
}
|
||||||
|
|
||||||
mut db_metadata := ourdb.new(
|
mut db_metadata := ourdb.new(
|
||||||
path: os.join_path(example_data_dir, 'metadata')
|
path: os.join_path(example_data_dir, 'metadata')
|
||||||
incremental_mode: false
|
incremental_mode: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Create VFS with separate databases for data and metadata
|
// Create VFS with separate databases for data and metadata
|
||||||
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or {
|
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or { panic('Failed to create VFS: ${err}') }
|
||||||
panic('Failed to create VFS: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
println('\n---------BEGIN EXAMPLE')
|
println('\n---------BEGIN EXAMPLE')
|
||||||
println('---------WRITING FILES')
|
println('---------WRITING FILES')
|
||||||
vfs.file_create('/some_file.txt') or {
|
vfs.file_create('/some_file.txt') or { panic('Failed to create file: ${err}') }
|
||||||
panic('Failed to create file: ${err}')
|
vfs.file_create('/another_file.txt') or { panic('Failed to create file: ${err}') }
|
||||||
}
|
|
||||||
vfs.file_create('/another_file.txt') or {
|
|
||||||
panic('Failed to create file: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
vfs.file_write('/some_file.txt', 'gibberish'.bytes()) or {
|
vfs.file_write('/some_file.txt', 'gibberish'.bytes()) or { panic('Failed to write file: ${err}') }
|
||||||
panic('Failed to write file: ${err}')
|
vfs.file_write('/another_file.txt', 'abcdefg'.bytes()) or { panic('Failed to write file: ${err}') }
|
||||||
}
|
|
||||||
vfs.file_write('/another_file.txt', 'abcdefg'.bytes()) or {
|
|
||||||
panic('Failed to write file: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
println('\n---------READING FILES')
|
println('\n---------READING FILES')
|
||||||
some_file_content := vfs.file_read('/some_file.txt') or {
|
some_file_content := vfs.file_read('/some_file.txt') or { panic('Failed to read file: ${err}') }
|
||||||
panic('Failed to read file: ${err}')
|
|
||||||
}
|
|
||||||
println(some_file_content.bytestr())
|
println(some_file_content.bytestr())
|
||||||
|
|
||||||
another_file_content := vfs.file_read('/another_file.txt') or {
|
another_file_content := vfs.file_read('/another_file.txt') or {
|
||||||
@@ -69,19 +58,15 @@ another_file_content := vfs.file_read('/another_file.txt') or {
|
|||||||
}
|
}
|
||||||
println(another_file_content.bytestr())
|
println(another_file_content.bytestr())
|
||||||
|
|
||||||
println("\n---------WRITING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
|
println('\n---------WRITING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir,
|
||||||
vfs.file_create('/duplicate.txt') or {
|
'data/0.db'))})')
|
||||||
panic('Failed to create file: ${err}')
|
vfs.file_create('/duplicate.txt') or { panic('Failed to create file: ${err}') }
|
||||||
}
|
vfs.file_write('/duplicate.txt', 'gibberish'.bytes()) or { panic('Failed to write file: ${err}') }
|
||||||
vfs.file_write('/duplicate.txt', 'gibberish'.bytes()) or {
|
|
||||||
panic('Failed to write file: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
println("\n---------WROTE DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
|
println('\n---------WROTE DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir,
|
||||||
|
'data/0.db'))})')
|
||||||
println('---------READING FILES')
|
println('---------READING FILES')
|
||||||
some_file_content3 := vfs.file_read('/some_file.txt') or {
|
some_file_content3 := vfs.file_read('/some_file.txt') or { panic('Failed to read file: ${err}') }
|
||||||
panic('Failed to read file: ${err}')
|
|
||||||
}
|
|
||||||
println(some_file_content3.bytestr())
|
println(some_file_content3.bytestr())
|
||||||
|
|
||||||
another_file_content3 := vfs.file_read('/another_file.txt') or {
|
another_file_content3 := vfs.file_read('/another_file.txt') or {
|
||||||
@@ -89,22 +74,21 @@ another_file_content3 := vfs.file_read('/another_file.txt') or {
|
|||||||
}
|
}
|
||||||
println(another_file_content3.bytestr())
|
println(another_file_content3.bytestr())
|
||||||
|
|
||||||
duplicate_content := vfs.file_read('/duplicate.txt') or {
|
duplicate_content := vfs.file_read('/duplicate.txt') or { panic('Failed to read file: ${err}') }
|
||||||
panic('Failed to read file: ${err}')
|
|
||||||
}
|
|
||||||
println(duplicate_content.bytestr())
|
println(duplicate_content.bytestr())
|
||||||
|
|
||||||
println("\n---------DELETING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir, 'data/0.db'))})")
|
println('\n---------DELETING DUPLICATE FILE (DB SIZE: ${os.file_size(os.join_path(example_data_dir,
|
||||||
vfs.file_delete('/duplicate.txt') or {
|
'data/0.db'))})')
|
||||||
panic('Failed to delete file: ${err}')
|
vfs.file_delete('/duplicate.txt') or { panic('Failed to delete file: ${err}') }
|
||||||
}
|
|
||||||
|
|
||||||
data_path := os.join_path(example_data_dir, 'data/0.db')
|
data_path2 := os.join_path(example_data_dir, 'data/0.db')
|
||||||
db_file_path := os.join_path(data_path, '0.db')
|
db_file_path := os.join_path(data_path2, '0.db')
|
||||||
println("---------READING FILES (DB SIZE: ${if os.exists(db_file_path) { os.file_size(db_file_path) } else { 0 }})")
|
println('---------READING FILES (DB SIZE: ${if os.exists(db_file_path) {
|
||||||
some_file_content2 := vfs.file_read('/some_file.txt') or {
|
os.file_size(db_file_path)
|
||||||
panic('Failed to read file: ${err}')
|
} else {
|
||||||
}
|
0
|
||||||
|
}})')
|
||||||
|
some_file_content2 := vfs.file_read('/some_file.txt') or { panic('Failed to read file: ${err}') }
|
||||||
println(some_file_content2.bytestr())
|
println(some_file_content2.bytestr())
|
||||||
|
|
||||||
another_file_content2 := vfs.file_read('/another_file.txt') or {
|
another_file_content2 := vfs.file_read('/another_file.txt') or {
|
||||||
|
|||||||
@@ -16,26 +16,24 @@ os.mkdir_all(metadata_dir)!
|
|||||||
|
|
||||||
// Create separate databases for data and metadata
|
// Create separate databases for data and metadata
|
||||||
mut db_data := ourdb.new(
|
mut db_data := ourdb.new(
|
||||||
path: data_dir
|
path: data_dir
|
||||||
incremental_mode: false
|
incremental_mode: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
mut db_metadata := ourdb.new(
|
mut db_metadata := ourdb.new(
|
||||||
path: metadata_dir
|
path: metadata_dir
|
||||||
incremental_mode: false
|
incremental_mode: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Create VFS with separate databases for data and metadata
|
// Create VFS with separate databases for data and metadata
|
||||||
mut vfs := vfs_db.new_with_separate_dbs(
|
mut vfs := vfs_db.new_with_separate_dbs(mut db_data, mut db_metadata,
|
||||||
mut db_data,
|
data_dir: data_dir
|
||||||
mut db_metadata,
|
|
||||||
data_dir: data_dir,
|
|
||||||
metadata_dir: metadata_dir
|
metadata_dir: metadata_dir
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Create a root directory if it doesn't exist
|
// Create a root directory if it doesn't exist
|
||||||
if !vfs.exists('/') {
|
if !vfs.exists('/') {
|
||||||
vfs.dir_create('/')!
|
vfs.dir_create('/')!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create some files and directories
|
// Create some files and directories
|
||||||
@@ -55,13 +53,13 @@ println('Nested file content: ${vfs.file_read('/test_dir/nested_file.txt')!.byte
|
|||||||
println('Root directory contents:')
|
println('Root directory contents:')
|
||||||
root_entries := vfs.dir_list('/')!
|
root_entries := vfs.dir_list('/')!
|
||||||
for entry in root_entries {
|
for entry in root_entries {
|
||||||
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
|
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Test directory contents:')
|
println('Test directory contents:')
|
||||||
test_dir_entries := vfs.dir_list('/test_dir')!
|
test_dir_entries := vfs.dir_list('/test_dir')!
|
||||||
for entry in test_dir_entries {
|
for entry in test_dir_entries {
|
||||||
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
|
println('- ${entry.get_metadata().name} (${entry.get_metadata().file_type})')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a duplicate file with the same content
|
// Create a duplicate file with the same content
|
||||||
|
|||||||
@@ -16,59 +16,43 @@ os.mkdir_all(example_data_dir)!
|
|||||||
|
|
||||||
// Create separate databases for data and metadata
|
// Create separate databases for data and metadata
|
||||||
mut db_data := ourdb.new(
|
mut db_data := ourdb.new(
|
||||||
path: os.join_path(example_data_dir, 'data')
|
path: os.join_path(example_data_dir, 'data')
|
||||||
incremental_mode: false
|
incremental_mode: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
mut db_metadata := ourdb.new(
|
mut db_metadata := ourdb.new(
|
||||||
path: os.join_path(example_data_dir, 'metadata')
|
path: os.join_path(example_data_dir, 'metadata')
|
||||||
incremental_mode: false
|
incremental_mode: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Create VFS with separate databases for data and metadata
|
// Create VFS with separate databases for data and metadata
|
||||||
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or {
|
mut vfs := vfs_db.new(mut db_data, mut db_metadata) or { panic('Failed to create VFS: ${err}') }
|
||||||
panic('Failed to create VFS: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
println('\n---------BEGIN DIRECTORY OPERATIONS EXAMPLE')
|
println('\n---------BEGIN DIRECTORY OPERATIONS EXAMPLE')
|
||||||
|
|
||||||
// Create directories with subdirectories
|
// Create directories with subdirectories
|
||||||
println('\n---------CREATING DIRECTORIES')
|
println('\n---------CREATING DIRECTORIES')
|
||||||
vfs.dir_create('/dir1') or {
|
vfs.dir_create('/dir1') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir1')
|
println('Created directory: /dir1')
|
||||||
|
|
||||||
vfs.dir_create('/dir1/subdir1') or {
|
vfs.dir_create('/dir1/subdir1') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir1/subdir1')
|
println('Created directory: /dir1/subdir1')
|
||||||
|
|
||||||
vfs.dir_create('/dir1/subdir2') or {
|
vfs.dir_create('/dir1/subdir2') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir1/subdir2')
|
println('Created directory: /dir1/subdir2')
|
||||||
|
|
||||||
vfs.dir_create('/dir2') or {
|
vfs.dir_create('/dir2') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir2')
|
println('Created directory: /dir2')
|
||||||
|
|
||||||
vfs.dir_create('/dir2/subdir1') or {
|
vfs.dir_create('/dir2/subdir1') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir2/subdir1')
|
println('Created directory: /dir2/subdir1')
|
||||||
|
|
||||||
vfs.dir_create('/dir2/subdir1/subsubdir1') or {
|
vfs.dir_create('/dir2/subdir1/subsubdir1') or { panic('Failed to create directory: ${err}') }
|
||||||
panic('Failed to create directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Created directory: /dir2/subdir1/subsubdir1')
|
println('Created directory: /dir2/subdir1/subsubdir1')
|
||||||
|
|
||||||
// List directories
|
// List directories
|
||||||
println('\n---------LISTING ROOT DIRECTORY')
|
println('\n---------LISTING ROOT DIRECTORY')
|
||||||
root_entries := vfs.dir_list('/') or {
|
root_entries := vfs.dir_list('/') or { panic('Failed to list directory: ${err}') }
|
||||||
panic('Failed to list directory: ${err}')
|
|
||||||
}
|
|
||||||
println('Root directory contains:')
|
println('Root directory contains:')
|
||||||
for entry in root_entries {
|
for entry in root_entries {
|
||||||
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
||||||
@@ -76,9 +60,7 @@ for entry in root_entries {
|
|||||||
}
|
}
|
||||||
|
|
||||||
println('\n---------LISTING /dir1 DIRECTORY')
|
println('\n---------LISTING /dir1 DIRECTORY')
|
||||||
dir1_entries := vfs.dir_list('/dir1') or {
|
dir1_entries := vfs.dir_list('/dir1') or { panic('Failed to list directory: ${err}') }
|
||||||
panic('Failed to list directory: ${err}')
|
|
||||||
}
|
|
||||||
println('/dir1 directory contains:')
|
println('/dir1 directory contains:')
|
||||||
for entry in dir1_entries {
|
for entry in dir1_entries {
|
||||||
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
||||||
@@ -87,9 +69,7 @@ for entry in dir1_entries {
|
|||||||
|
|
||||||
// Write a file in a subdirectory
|
// Write a file in a subdirectory
|
||||||
println('\n---------WRITING FILE IN SUBDIRECTORY')
|
println('\n---------WRITING FILE IN SUBDIRECTORY')
|
||||||
vfs.file_create('/dir1/subdir1/test_file.txt') or {
|
vfs.file_create('/dir1/subdir1/test_file.txt') or { panic('Failed to create file: ${err}') }
|
||||||
panic('Failed to create file: ${err}')
|
|
||||||
}
|
|
||||||
println('Created file: /dir1/subdir1/test_file.txt')
|
println('Created file: /dir1/subdir1/test_file.txt')
|
||||||
|
|
||||||
test_content := 'This is a test file in a subdirectory'
|
test_content := 'This is a test file in a subdirectory'
|
||||||
@@ -104,13 +84,15 @@ file_content := vfs.file_read('/dir1/subdir1/test_file.txt') or {
|
|||||||
panic('Failed to read file: ${err}')
|
panic('Failed to read file: ${err}')
|
||||||
}
|
}
|
||||||
println('File content: ${file_content.bytestr()}')
|
println('File content: ${file_content.bytestr()}')
|
||||||
println('Content verification: ${if file_content.bytestr() == test_content { 'SUCCESS' } else { 'FAILED' }}')
|
println('Content verification: ${if file_content.bytestr() == test_content {
|
||||||
|
'SUCCESS'
|
||||||
|
} else {
|
||||||
|
'FAILED'
|
||||||
|
}}')
|
||||||
|
|
||||||
// List the subdirectory to see the file
|
// List the subdirectory to see the file
|
||||||
println('\n---------LISTING /dir1/subdir1 DIRECTORY')
|
println('\n---------LISTING /dir1/subdir1 DIRECTORY')
|
||||||
subdir1_entries := vfs.dir_list('/dir1/subdir1') or {
|
subdir1_entries := vfs.dir_list('/dir1/subdir1') or { panic('Failed to list directory: ${err}') }
|
||||||
panic('Failed to list directory: ${err}')
|
|
||||||
}
|
|
||||||
println('/dir1/subdir1 directory contains:')
|
println('/dir1/subdir1 directory contains:')
|
||||||
for entry in subdir1_entries {
|
for entry in subdir1_entries {
|
||||||
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
entry_type := if entry.get_metadata().file_type == .directory { 'directory' } else { 'file' }
|
||||||
@@ -119,9 +101,7 @@ for entry in subdir1_entries {
|
|||||||
|
|
||||||
// Delete the file
|
// Delete the file
|
||||||
println('\n---------DELETING FILE')
|
println('\n---------DELETING FILE')
|
||||||
vfs.file_delete('/dir1/subdir1/test_file.txt') or {
|
vfs.file_delete('/dir1/subdir1/test_file.txt') or { panic('Failed to delete file: ${err}') }
|
||||||
panic('Failed to delete file: ${err}')
|
|
||||||
}
|
|
||||||
println('Deleted file: /dir1/subdir1/test_file.txt')
|
println('Deleted file: /dir1/subdir1/test_file.txt')
|
||||||
|
|
||||||
// List the subdirectory again to verify the file is gone
|
// List the subdirectory again to verify the file is gone
|
||||||
@@ -158,7 +138,11 @@ deep_file_content := vfs.file_read('/dir2/subdir1/subsubdir1/deep_file.txt') or
|
|||||||
panic('Failed to read file: ${err}')
|
panic('Failed to read file: ${err}')
|
||||||
}
|
}
|
||||||
println('File content: ${deep_file_content.bytestr()}')
|
println('File content: ${deep_file_content.bytestr()}')
|
||||||
println('Content verification: ${if deep_file_content.bytestr() == deep_content { 'SUCCESS' } else { 'FAILED' }}')
|
println('Content verification: ${if deep_file_content.bytestr() == deep_content {
|
||||||
|
'SUCCESS'
|
||||||
|
} else {
|
||||||
|
'FAILED'
|
||||||
|
}}')
|
||||||
|
|
||||||
// Clean up by deleting directories (optional)
|
// Clean up by deleting directories (optional)
|
||||||
println('\n---------CLEANING UP')
|
println('\n---------CLEANING UP')
|
||||||
|
|||||||
1
examples/web/.gitignore
vendored
Normal file
1
examples/web/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
build
|
||||||
77
examples/web/cfg/docusaurus_example_config.heroscript
Normal file
77
examples/web/cfg/docusaurus_example_config.heroscript
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
!!docusaurus.config
|
||||||
|
name:"my-documentation"
|
||||||
|
title:"My Documentation Site"
|
||||||
|
tagline:"Documentation made simple with V and Docusaurus"
|
||||||
|
url:"https://docs.example.com"
|
||||||
|
url_home:"docs/"
|
||||||
|
base_url:"/"
|
||||||
|
favicon:"img/favicon.png"
|
||||||
|
image:"img/hero.png"
|
||||||
|
copyright:"© 2025 Example Organization"
|
||||||
|
|
||||||
|
!!docusaurus.config_meta
|
||||||
|
description:"Comprehensive documentation for our amazing project"
|
||||||
|
image:"https://docs.example.com/img/social-card.png"
|
||||||
|
title:"My Documentation | Official Docs"
|
||||||
|
|
||||||
|
!!docusaurus.ssh_connection
|
||||||
|
name:"production"
|
||||||
|
host:"example.com"
|
||||||
|
login:"deploy"
|
||||||
|
port:22
|
||||||
|
key_path:"~/.ssh/id_rsa"
|
||||||
|
|
||||||
|
!!docusaurus.build_dest
|
||||||
|
ssh_name:"production"
|
||||||
|
path:"/var/www/docs"
|
||||||
|
|
||||||
|
!!docusaurus.navbar
|
||||||
|
title:"My Project"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"Documentation"
|
||||||
|
href:"/docs"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"API"
|
||||||
|
href:"/api"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!docusaurus.footer
|
||||||
|
style:"dark"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"Introduction"
|
||||||
|
to:"/docs"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"API Reference"
|
||||||
|
to:"/api"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"Discord"
|
||||||
|
href:"https://discord.gg/example"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"More"
|
||||||
|
label:"Blog"
|
||||||
|
href:"https://blog.example.com"
|
||||||
|
|
||||||
|
!!docusaurus.import_source
|
||||||
|
url:"https://github.com/example/external-docs"
|
||||||
|
dest:"external"
|
||||||
|
replace:"PROJECT_NAME:My Project, VERSION:1.0.0"
|
||||||
124
examples/web/doctreeclient_example.vsh
Executable file
124
examples/web/doctreeclient_example.vsh
Executable file
@@ -0,0 +1,124 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.web.doctreeclient
|
||||||
|
import freeflowuniverse.herolib.data.doctree
|
||||||
|
import os
|
||||||
|
|
||||||
|
println('DocTreeClient Example')
|
||||||
|
println('=====================')
|
||||||
|
|
||||||
|
// Step 1: First, populate Redis with doctree data
|
||||||
|
println('\n1. Setting up doctree data in Redis...')
|
||||||
|
|
||||||
|
tree.scan(
|
||||||
|
git_url: 'https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections'
|
||||||
|
git_pull: false
|
||||||
|
)!
|
||||||
|
|
||||||
|
tree.export(
|
||||||
|
destination: '/tmp/mdexport'
|
||||||
|
reset: true
|
||||||
|
exclude_errors: false
|
||||||
|
)!
|
||||||
|
|
||||||
|
println('Doctree data populated in Redis')
|
||||||
|
|
||||||
|
// Step 2: Create a DocTreeClient instance
|
||||||
|
println('\n2. Creating DocTreeClient...')
|
||||||
|
mut client := doctreeclient.new()!
|
||||||
|
println('DocTreeClient created successfully')
|
||||||
|
|
||||||
|
// Step 3: List all collections
|
||||||
|
println('\n3. Listing collections:')
|
||||||
|
collections := client.list_collections()!
|
||||||
|
println('Found ${collections.len} collections: ${collections}')
|
||||||
|
|
||||||
|
if collections.len == 0 {
|
||||||
|
println('No collections found. Example cannot continue.')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4: Use the example_docs collection
|
||||||
|
collection_name := 'example_docs'
|
||||||
|
println('\n4. Using collection: ${collection_name}')
|
||||||
|
|
||||||
|
// Step 5: List pages in the collection
|
||||||
|
println('\n5. Listing pages:')
|
||||||
|
pages := client.list_pages(collection_name)!
|
||||||
|
println('Found ${pages.len} pages: ${pages}')
|
||||||
|
|
||||||
|
// Step 6: Get content of a page
|
||||||
|
if pages.len > 0 {
|
||||||
|
page_name := 'introduction'
|
||||||
|
println('\n6. Getting content of page: ${page_name}')
|
||||||
|
|
||||||
|
// Check if page exists
|
||||||
|
exists := client.page_exists(collection_name, page_name)
|
||||||
|
println('Page exists: ${exists}')
|
||||||
|
|
||||||
|
// Get page path
|
||||||
|
page_path := client.get_page_path(collection_name, page_name)!
|
||||||
|
println('Page path: ${page_path}')
|
||||||
|
|
||||||
|
// Get page content
|
||||||
|
content := client.get_page_content(collection_name, page_name)!
|
||||||
|
println('Page content:')
|
||||||
|
println('---')
|
||||||
|
println(content)
|
||||||
|
println('---')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 7: List images in the collection
|
||||||
|
println('\n7. Listing images:')
|
||||||
|
images := client.list_images(collection_name)!
|
||||||
|
println('Found ${images.len} images: ${images}')
|
||||||
|
|
||||||
|
// Step 8: Get image path
|
||||||
|
if images.len > 0 {
|
||||||
|
image_name := images[0]
|
||||||
|
println('\n8. Getting path of image: ${image_name}')
|
||||||
|
|
||||||
|
// Check if image exists
|
||||||
|
exists := client.image_exists(collection_name, image_name)
|
||||||
|
println('Image exists: ${exists}')
|
||||||
|
|
||||||
|
// Get image path
|
||||||
|
image_path := client.get_image_path(collection_name, image_name)!
|
||||||
|
println('Image path: ${image_path}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 9: List files in the collection
|
||||||
|
println('\n9. Listing files:')
|
||||||
|
files := client.list_files(collection_name)!
|
||||||
|
println('Found ${files.len} files: ${files}')
|
||||||
|
|
||||||
|
// Step 10: Get file path
|
||||||
|
if files.len > 0 {
|
||||||
|
file_name := files[0]
|
||||||
|
println('\n10. Getting path of file: ${file_name}')
|
||||||
|
|
||||||
|
// Check if file exists
|
||||||
|
exists := client.file_exists(collection_name, file_name)
|
||||||
|
println('File exists: ${exists}')
|
||||||
|
|
||||||
|
// Get file path
|
||||||
|
file_path := client.get_file_path(collection_name, file_name)!
|
||||||
|
println('File path: ${file_path}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 11: Error handling example
|
||||||
|
println('\n11. Error handling example:')
|
||||||
|
println('Trying to access a non-existent page...')
|
||||||
|
|
||||||
|
non_existent_page := 'non_existent_page'
|
||||||
|
content := client.get_page_content(collection_name, non_existent_page) or {
|
||||||
|
println('Error caught: ${err}')
|
||||||
|
'Error content'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 12: Clean up
|
||||||
|
println('\n12. Cleaning up...')
|
||||||
|
os.rmdir_all(example_dir) or { println('Failed to remove example directory: ${err}') }
|
||||||
|
os.rmdir_all(export_dir) or { println('Failed to remove export directory: ${err}') }
|
||||||
|
|
||||||
|
println('\nExample completed successfully!')
|
||||||
8
examples/web/docusaurus_example.vsh
Executable file
8
examples/web/docusaurus_example.vsh
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.web.docusaurus
|
||||||
|
|
||||||
|
// Create a new docusaurus factory
|
||||||
|
mut docs := docusaurus.new(
|
||||||
|
build_path: '/tmp/docusaurus_build'
|
||||||
|
)!
|
||||||
91
examples/web/docusaurus_example_cli.sh
Executable file
91
examples/web/docusaurus_example_cli.sh
Executable file
@@ -0,0 +1,91 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Exit script on any error
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "Starting Docusaurus Example with Hero CLI"
|
||||||
|
|
||||||
|
# Define the source directory for the Docusaurus site content
|
||||||
|
# Using a different name (_cli) to avoid conflicts with the previous example
|
||||||
|
SOURCE_DIR="${HOME}/hero/var/docusaurus_demo_src_cli"
|
||||||
|
DOCS_SUBDIR="${SOURCE_DIR}/docs"
|
||||||
|
|
||||||
|
# Create the site source directory and the docs subdirectory if they don't exist
|
||||||
|
echo "Creating site source directory: ${SOURCE_DIR}"
|
||||||
|
mkdir -p "${DOCS_SUBDIR}"
|
||||||
|
|
||||||
|
# --- Create Sample Markdown Content ---
|
||||||
|
# The 'hero docusaurus' command doesn't automatically create content,
|
||||||
|
# so we do it here like the V example script did.
|
||||||
|
|
||||||
|
echo "Creating sample markdown content..."
|
||||||
|
|
||||||
|
# Create intro.md
|
||||||
|
# Using 'EOF' to prevent shell expansion within the heredoc
|
||||||
|
cat > "${DOCS_SUBDIR}/intro.md" << 'EOF'
|
||||||
|
---
|
||||||
|
title: Introduction (CLI Example)
|
||||||
|
slug: /
|
||||||
|
sidebar_position: 1
|
||||||
|
---
|
||||||
|
|
||||||
|
# Welcome to My Documentation (CLI Version)
|
||||||
|
|
||||||
|
This is a sample documentation site created with Docusaurus and HeroLib V using the `hero docusaurus` command and a HeroScript configuration file.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Easy to use
|
||||||
|
- Markdown support
|
||||||
|
- Customizable
|
||||||
|
- Search functionality
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
Follow these steps to get started:
|
||||||
|
|
||||||
|
1. Installation
|
||||||
|
2. Configuration
|
||||||
|
3. Adding content
|
||||||
|
4. Deployment
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create quick-start.md
|
||||||
|
cat > "${DOCS_SUBDIR}/quick-start.md" << 'EOF'
|
||||||
|
---
|
||||||
|
title: Quick Start (CLI Example)
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Quick Start Guide (CLI Version)
|
||||||
|
|
||||||
|
This guide will help you get up and running quickly.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install my-project
|
||||||
|
```
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import { myFunction } from "my-project";
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
const result = myFunction();
|
||||||
|
console.log(result);
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Sample markdown content created."
|
||||||
|
|
||||||
|
|
||||||
|
# --- Run Docusaurus Directly via V Script ---
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
|
||||||
|
# -n initializes the site structure if it doesn't exist (--new)
|
||||||
|
# -d runs the development server (--dev)
|
||||||
|
hero docusaurus -buildpath "${HOME}/hero/var/docusaurus_demo_src_cli" -path "${SCRIPT_DIR}/cfg/docusaurus_example_config.heroscript" -new -dev
|
||||||
|
|
||||||
|
echo "Hero docusaurus command finished. Check for errors or dev server output."
|
||||||
238
examples/web/docusaurus_example_complete.vsh
Executable file
238
examples/web/docusaurus_example_complete.vsh
Executable file
@@ -0,0 +1,238 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.web.docusaurus
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
|
import os
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println('Starting Docusaurus Example with HeroScript')
|
||||||
|
|
||||||
|
// Define the HeroScript that configures our Docusaurus site
|
||||||
|
hero_script := '
|
||||||
|
!!docusaurus.config
|
||||||
|
name:"my-documentation"
|
||||||
|
title:"My Documentation Site"
|
||||||
|
tagline:"Documentation made simple with V and Docusaurus"
|
||||||
|
url:"https://docs.example.com"
|
||||||
|
url_home:"docs/"
|
||||||
|
base_url:"/"
|
||||||
|
favicon:"img/favicon.png"
|
||||||
|
image:"img/hero.png"
|
||||||
|
copyright:"© 2025 Example Organization"
|
||||||
|
|
||||||
|
!!docusaurus.config_meta
|
||||||
|
description:"Comprehensive documentation for our amazing project"
|
||||||
|
image:"https://docs.example.com/img/social-card.png"
|
||||||
|
title:"My Documentation | Official Docs"
|
||||||
|
|
||||||
|
!!docusaurus.ssh_connection
|
||||||
|
name:"production"
|
||||||
|
host:"example.com"
|
||||||
|
login:"deploy"
|
||||||
|
port:22
|
||||||
|
key_path:"~/.ssh/id_rsa"
|
||||||
|
|
||||||
|
!!docusaurus.build_dest
|
||||||
|
ssh_name:"production"
|
||||||
|
path:"/var/www/docs"
|
||||||
|
|
||||||
|
!!docusaurus.navbar
|
||||||
|
title:"My Project"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"Documentation"
|
||||||
|
href:"/docs"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"API"
|
||||||
|
href:"/api"
|
||||||
|
position:"left"
|
||||||
|
|
||||||
|
!!docusaurus.navbar_item
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!docusaurus.footer
|
||||||
|
style:"dark"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"Introduction"
|
||||||
|
to:"/docs"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Documentation"
|
||||||
|
label:"API Reference"
|
||||||
|
to:"/api"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/example/repo"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"Community"
|
||||||
|
label:"Discord"
|
||||||
|
href:"https://discord.gg/example"
|
||||||
|
|
||||||
|
!!docusaurus.footer_item
|
||||||
|
title:"More"
|
||||||
|
label:"Blog"
|
||||||
|
href:"https://blog.example.com"
|
||||||
|
|
||||||
|
!!docusaurus.import_source
|
||||||
|
url:"https://github.com/example/external-docs"
|
||||||
|
dest:"external"
|
||||||
|
replace:"PROJECT_NAME:My Project, VERSION:1.0.0"
|
||||||
|
'
|
||||||
|
|
||||||
|
mut docs := docusaurus.new(
|
||||||
|
build_path: os.join_path(os.home_dir(), 'hero/var/docusaurus_demo1')
|
||||||
|
update: true // Update the templates
|
||||||
|
heroscript: hero_script
|
||||||
|
) or {
|
||||||
|
eprintln('Error creating docusaurus factory with inline script: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a site directory if it doesn't exist
|
||||||
|
site_path := os.join_path(os.home_dir(), 'hero/var/docusaurus_demo_src')
|
||||||
|
os.mkdir_all(site_path) or {
|
||||||
|
eprintln('Error creating site directory: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get or create a site using the factory
|
||||||
|
println('Creating site...')
|
||||||
|
mut site := docs.get(
|
||||||
|
name: 'my-documentation'
|
||||||
|
path: site_path
|
||||||
|
init: true // Create if it doesn't exist
|
||||||
|
// Note: The site will use the config from the previously processed HeroScript
|
||||||
|
) or {
|
||||||
|
eprintln('Error creating site: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a sample markdown file for the docs
|
||||||
|
println('Creating sample markdown content...')
|
||||||
|
mut docs_dir := pathlib.get_dir(path: os.join_path(site_path, 'docs'), create: true) or {
|
||||||
|
eprintln('Error creating docs directory: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create intro.md file
|
||||||
|
mut intro_file := docs_dir.file_get_new('intro.md') or {
|
||||||
|
eprintln('Error creating intro file: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
intro_content := '---
|
||||||
|
title: Introduction
|
||||||
|
slug: /
|
||||||
|
sidebar_position: 1
|
||||||
|
---
|
||||||
|
|
||||||
|
# Welcome to My Documentation
|
||||||
|
|
||||||
|
This is a sample documentation site created with Docusaurus and HeroLib V using HeroScript configuration.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Easy to use
|
||||||
|
- Markdown support
|
||||||
|
- Customizable
|
||||||
|
- Search functionality
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
Follow these steps to get started:
|
||||||
|
|
||||||
|
1. Installation
|
||||||
|
2. Configuration
|
||||||
|
3. Adding content
|
||||||
|
4. Deployment
|
||||||
|
'
|
||||||
|
intro_file.write(intro_content) or {
|
||||||
|
eprintln('Error writing to intro file: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create quick-start.md file
|
||||||
|
mut quickstart_file := docs_dir.file_get_new('quick-start.md') or {
|
||||||
|
eprintln('Error creating quickstart file: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
quickstart_content := '---
|
||||||
|
title: Quick Start
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Quick Start Guide
|
||||||
|
|
||||||
|
This guide will help you get up and running quickly.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install my-project
|
||||||
|
```
|
||||||
|
|
||||||
|
## Basic Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import { myFunction } from "my-project";
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
const result = myFunction();
|
||||||
|
console.log(result);
|
||||||
|
```
|
||||||
|
'
|
||||||
|
quickstart_file.write(quickstart_content) or {
|
||||||
|
eprintln('Error writing to quickstart file: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the site
|
||||||
|
println('Generating site...')
|
||||||
|
site.generate() or {
|
||||||
|
eprintln('Error generating site: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Site generated successfully!')
|
||||||
|
|
||||||
|
// Choose which operation to perform:
|
||||||
|
|
||||||
|
// Option 1: Run in development mode
|
||||||
|
// This will start a development server in a screen session
|
||||||
|
println('Starting development server...')
|
||||||
|
site.dev() or {
|
||||||
|
eprintln('Error starting development server: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Option 2: Build for production (uncomment to use)
|
||||||
|
/*
|
||||||
|
println('Building site for production...')
|
||||||
|
site.build() or {
|
||||||
|
eprintln('Error building site: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Site built successfully!')
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Option 3: Build and publish to the remote server (uncomment to use)
|
||||||
|
/*
|
||||||
|
println('Building and publishing site...')
|
||||||
|
site.build_publish() or {
|
||||||
|
eprintln('Error publishing site: ${err}')
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
|
println('Site published successfully!')
|
||||||
|
*/
|
||||||
|
}
|
||||||
@@ -91,4 +91,3 @@ println('\nFootnotes:')
|
|||||||
for id, footnote in nav.footnotes() {
|
for id, footnote in nav.footnotes() {
|
||||||
println(' [^${id}]: ${footnote.content}')
|
println(' [^${id}]: ${footnote.content}')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -7,8 +7,8 @@ import os
|
|||||||
import markdown
|
import markdown
|
||||||
import freeflowuniverse.herolib.data.markdownparser2
|
import freeflowuniverse.herolib.data.markdownparser2
|
||||||
|
|
||||||
path2:="${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/links.md"
|
path2 := '${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/links.md'
|
||||||
path1:="${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/test.md"
|
path1 := '${os.home_dir()}/code/github/freeflowuniverse/herolib/examples/webtools/mdbook_markdown/content/test.md'
|
||||||
|
|
||||||
text := os.read_file(path1)!
|
text := os.read_file(path1)!
|
||||||
|
|
||||||
@@ -10,18 +10,14 @@ mut tree := doctree.new(name: 'test')!
|
|||||||
// git_reset bool
|
// git_reset bool
|
||||||
// git_root string
|
// git_root string
|
||||||
// git_pull bool
|
// git_pull bool
|
||||||
// load bool = true // means we scan automatically the added collection
|
|
||||||
for project in 'projectinca, legal, why'.split(',').map(it.trim_space()) {
|
|
||||||
tree.scan(
|
|
||||||
git_url: 'https://git.ourworld.tf/tfgrid/info_tfgrid/src/branch/development/collections/${project}'
|
|
||||||
git_pull: false
|
|
||||||
)!
|
|
||||||
}
|
|
||||||
|
|
||||||
|
tree.scan(
|
||||||
|
git_url: 'https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections'
|
||||||
|
git_pull: false
|
||||||
|
)!
|
||||||
|
|
||||||
tree.export(
|
tree.export(
|
||||||
destination: '/tmp/mdexport'
|
destination: '/tmp/mdexport'
|
||||||
reset: true
|
reset: true
|
||||||
//keep_structure: true
|
|
||||||
exclude_errors: false
|
exclude_errors: false
|
||||||
)!
|
)!
|
||||||
10
examples/web/siteconfig.vsh
Executable file
10
examples/web/siteconfig.vsh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.web.siteconfig
|
||||||
|
import os
|
||||||
|
|
||||||
|
mypath := '${os.dir(@FILE)}/siteconfigexample'
|
||||||
|
|
||||||
|
mut sc := siteconfig.new(mypath)!
|
||||||
|
|
||||||
|
println(sc)
|
||||||
100
examples/web/siteconfigexample/config.heroscript
Normal file
100
examples/web/siteconfigexample/config.heroscript
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
!!site.config
|
||||||
|
name:"depin"
|
||||||
|
description:"ThreeFold is laying the foundation for a geo aware Web 4, the next generation of the Internet."
|
||||||
|
tagline:"Geo Aware Internet Platform"
|
||||||
|
favicon:"img/favicon.png"
|
||||||
|
image:"img/tf_graph.png"
|
||||||
|
copyright:"ThreeFold"
|
||||||
|
|
||||||
|
!!site.menu
|
||||||
|
title:"ThreeFold DePIN Tech"
|
||||||
|
logo_alt:"ThreeFold Logo"
|
||||||
|
logo_src:"img/logo.svg"
|
||||||
|
logo_src_dark:"img/new_logo_tft.png"
|
||||||
|
|
||||||
|
!!site.menu_item
|
||||||
|
label:"ThreeFold.io"
|
||||||
|
href:"https://threefold.io"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!site.menu_item
|
||||||
|
label:"Mycelium Network"
|
||||||
|
href:"https://mycelium.threefold.io/"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!site.menu_item
|
||||||
|
label:"AI Box"
|
||||||
|
href:"https://aibox.threefold.io/"
|
||||||
|
position:"right"
|
||||||
|
|
||||||
|
!!site.footer
|
||||||
|
style:"dark"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Docs"
|
||||||
|
label:"Introduction"
|
||||||
|
href:"https://docs.threefold.io/docs/introduction"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Docs"
|
||||||
|
label:"Litepaper"
|
||||||
|
href:"https://docs.threefold.io/docs/litepaper/"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Docs"
|
||||||
|
label:"Roadmap"
|
||||||
|
href:"https://docs.threefold.io/docs/roadmap"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Docs"
|
||||||
|
label:"Manual"
|
||||||
|
href:"https://manual.grid.tf/"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Features"
|
||||||
|
label:"Become a Farmer"
|
||||||
|
href:"https://docs.threefold.io/docs/category/become-a-farmer"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Features"
|
||||||
|
label:"Components"
|
||||||
|
href:"https://docs.threefold.io/docs/category/components"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Features"
|
||||||
|
label:"Technology"
|
||||||
|
href:"https://threefold.info/tech/"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Features"
|
||||||
|
label:"Tokenomics"
|
||||||
|
href:"https://docs.threefold.io/docs/tokens/tokenomics"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Web"
|
||||||
|
label:"ThreeFold.io"
|
||||||
|
href:"https://threefold.io"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Web"
|
||||||
|
label:"Dashboard"
|
||||||
|
href:"https://dashboard.grid.tf"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Web"
|
||||||
|
label:"GitHub"
|
||||||
|
href:"https://github.com/threefoldtech/home"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Web"
|
||||||
|
label:"Mycelium Network"
|
||||||
|
href:"https://mycelium.threefold.io/"
|
||||||
|
|
||||||
|
!!site.footer_item
|
||||||
|
title:"Web"
|
||||||
|
label:"AI Box"
|
||||||
|
href:"https://www2.aibox.threefold.io/"
|
||||||
|
|
||||||
|
!!site.collections
|
||||||
|
url:"https://github.com/example/external-docs"
|
||||||
|
replace:"PROJECT_NAME:My Project, VERSION:1.0.0"
|
||||||
18
examples/web/siteconfigexample/site.heroscript
Normal file
18
examples/web/siteconfigexample/site.heroscript
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
!!site.page name:intro
|
||||||
|
description:"ThreeFold is laying the foundation for a geo aware Web 4, the next generation of the Internet."
|
||||||
|
|
||||||
|
//next is example where we use all properties, folder is where the page is located, prio is the order of the page, if not used the filled in from order in which we parse this config file
|
||||||
|
!!site.page name:mycelium draft:true folder:"/specs/components" prio:4
|
||||||
|
content:"the page content itself, only for small pages"
|
||||||
|
title:"Mycelium as Title"
|
||||||
|
description:"..."
|
||||||
|
|
||||||
|
!!site.page name:fungistor folder:"/specs/components" prio:1
|
||||||
|
src:"mycollection:mycelium.md"
|
||||||
|
title:"fungistor as Title"
|
||||||
|
description:"...."
|
||||||
|
|
||||||
|
!!site.page name:fungistor folder:"/specs/components" prio:1
|
||||||
|
src:"mycollection:mycelium" //can be without .md
|
||||||
|
title:"fungistor as Title"
|
||||||
|
description:"..."
|
||||||
@@ -10,8 +10,8 @@ mut docs := starlight.new(
|
|||||||
|
|
||||||
// Create a new starlight site
|
// Create a new starlight site
|
||||||
mut site := docs.get(
|
mut site := docs.get(
|
||||||
url: 'https://git.ourworld.tf/tfgrid/docs_aibox'
|
url: 'https://git.threefold.info/tfgrid/docs_aibox'
|
||||||
init:true //init means we put config files if not there
|
init: true // init means we put config files if not there
|
||||||
)!
|
)!
|
||||||
|
|
||||||
site.dev()!
|
site.dev()!
|
||||||
1
examples/webdav/.gitignore
vendored
Normal file
1
examples/webdav/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
webdav_vfs
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.vfs.webdav
|
|
||||||
import cli { Command, Flag }
|
|
||||||
import os
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
mut cmd := Command{
|
|
||||||
name: 'webdav'
|
|
||||||
description: 'Vlang Webdav Server'
|
|
||||||
}
|
|
||||||
|
|
||||||
mut app := Command{
|
|
||||||
name: 'webdav'
|
|
||||||
description: 'Vlang Webdav Server'
|
|
||||||
execute: fn (cmd Command) ! {
|
|
||||||
port := cmd.flags.get_int('port')!
|
|
||||||
directory := cmd.flags.get_string('directory')!
|
|
||||||
user := cmd.flags.get_string('user')!
|
|
||||||
password := cmd.flags.get_string('password')!
|
|
||||||
|
|
||||||
mut server := webdav.new_app(
|
|
||||||
root_dir: directory
|
|
||||||
server_port: port
|
|
||||||
user_db: {
|
|
||||||
user: password
|
|
||||||
}
|
|
||||||
)!
|
|
||||||
|
|
||||||
server.run()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
app.add_flag(Flag{
|
|
||||||
flag: .int
|
|
||||||
name: 'port'
|
|
||||||
abbrev: 'p'
|
|
||||||
description: 'server port'
|
|
||||||
default_value: ['8000']
|
|
||||||
})
|
|
||||||
|
|
||||||
app.add_flag(Flag{
|
|
||||||
flag: .string
|
|
||||||
required: true
|
|
||||||
name: 'directory'
|
|
||||||
abbrev: 'd'
|
|
||||||
description: 'server directory'
|
|
||||||
})
|
|
||||||
|
|
||||||
app.add_flag(Flag{
|
|
||||||
flag: .string
|
|
||||||
required: true
|
|
||||||
name: 'user'
|
|
||||||
abbrev: 'u'
|
|
||||||
description: 'username'
|
|
||||||
})
|
|
||||||
|
|
||||||
app.add_flag(Flag{
|
|
||||||
flag: .string
|
|
||||||
required: true
|
|
||||||
name: 'password'
|
|
||||||
abbrev: 'pw'
|
|
||||||
description: 'user password'
|
|
||||||
})
|
|
||||||
|
|
||||||
app.setup()
|
|
||||||
app.parse(os.args)
|
|
||||||
}
|
|
||||||
@@ -6,14 +6,17 @@ import freeflowuniverse.herolib.data.ourdb
|
|||||||
import os
|
import os
|
||||||
import log
|
import log
|
||||||
|
|
||||||
const database_path := os.join_path(os.dir(@FILE), 'database')
|
const database_path = os.join_path(os.dir(@FILE), 'database')
|
||||||
|
|
||||||
mut metadata_db := ourdb.new(path:os.join_path(database_path, 'metadata'))!
|
mut metadata_db := ourdb.new(path: os.join_path(database_path, 'metadata'), reset: true)!
|
||||||
mut data_db := ourdb.new(path:os.join_path(database_path, 'data'))!
|
mut data_db := ourdb.new(path: os.join_path(database_path, 'data', reset: true))!
|
||||||
mut vfs := vfs_db.new(mut metadata_db, mut data_db)!
|
mut vfs := vfs_db.new(mut metadata_db, mut data_db)!
|
||||||
mut server := webdav.new_server(vfs: vfs, user_db: {
|
mut server := webdav.new_server(
|
||||||
'admin': '123'
|
vfs: vfs
|
||||||
})!
|
user_db: {
|
||||||
|
'admin': '123'
|
||||||
|
}
|
||||||
|
)!
|
||||||
|
|
||||||
log.set_level(.debug)
|
log.set_level(.debug)
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ set -e
|
|||||||
|
|
||||||
os_name="$(uname -s)"
|
os_name="$(uname -s)"
|
||||||
arch_name="$(uname -m)"
|
arch_name="$(uname -m)"
|
||||||
version='1.0.22'
|
version='1.0.26'
|
||||||
|
|
||||||
|
|
||||||
# Base URL for GitHub releases
|
# Base URL for GitHub releases
|
||||||
|
|||||||
136
install_v.sh
136
install_v.sh
@@ -60,6 +60,22 @@ command_exists() {
|
|||||||
command -v "$1" >/dev/null 2>&1
|
command -v "$1" >/dev/null 2>&1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Function to run commands with sudo if needed
|
||||||
|
function run_sudo() {
|
||||||
|
# Check if we're already root
|
||||||
|
if [ "$(id -u)" -eq 0 ]; then
|
||||||
|
# We are root, run the command directly
|
||||||
|
"$@"
|
||||||
|
# Check if sudo is installed
|
||||||
|
elif command_exists sudo; then
|
||||||
|
# Use sudo to run the command
|
||||||
|
sudo "$@"
|
||||||
|
else
|
||||||
|
# No sudo available, try to run directly
|
||||||
|
"$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
export DIR_BASE="$HOME"
|
export DIR_BASE="$HOME"
|
||||||
export DIR_BUILD="/tmp"
|
export DIR_BUILD="/tmp"
|
||||||
export DIR_CODE="$DIR_BASE/code"
|
export DIR_CODE="$DIR_BASE/code"
|
||||||
@@ -72,9 +88,9 @@ function sshknownkeysadd {
|
|||||||
then
|
then
|
||||||
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
ssh-keyscan github.com >> ~/.ssh/known_hosts
|
||||||
fi
|
fi
|
||||||
if ! grep git.ourworld.tf ~/.ssh/known_hosts > /dev/null
|
if ! grep git.threefold.info ~/.ssh/known_hosts > /dev/null
|
||||||
then
|
then
|
||||||
ssh-keyscan git.ourworld.tf >> ~/.ssh/known_hosts
|
ssh-keyscan git.threefold.info >> ~/.ssh/known_hosts
|
||||||
fi
|
fi
|
||||||
git config --global pull.rebase false
|
git config --global pull.rebase false
|
||||||
|
|
||||||
@@ -93,7 +109,7 @@ function package_install {
|
|||||||
local command_name="$1"
|
local command_name="$1"
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
if [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
run_sudo apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
||||||
else
|
else
|
||||||
apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
apt -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" install $1 -q -y --allow-downgrades --allow-remove-essential
|
||||||
fi
|
fi
|
||||||
@@ -167,8 +183,8 @@ function os_update {
|
|||||||
fi
|
fi
|
||||||
export TERM=xterm
|
export TERM=xterm
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
sudo dpkg --configure -a
|
run_sudo dpkg --configure -a
|
||||||
sudo apt update -y
|
run_sudo apt update -y
|
||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
echo "** IN GITHUB ACTIONS, DON'T DO UPDATE"
|
echo "** IN GITHUB ACTIONS, DON'T DO UPDATE"
|
||||||
else
|
else
|
||||||
@@ -242,8 +258,11 @@ function hero_lib_get {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function install_secp256k1 {
|
function install_secp256k1 {
|
||||||
|
|
||||||
echo "Installing secp256k1..."
|
echo "Installing secp256k1..."
|
||||||
if [[ "${OSNAME}" == "darwin"* ]]; then
|
if [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
|
# Attempt installation only if not already found
|
||||||
|
echo "Attempting secp256k1 installation via Homebrew..."
|
||||||
brew install secp256k1
|
brew install secp256k1
|
||||||
elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
# Install build dependencies
|
# Install build dependencies
|
||||||
@@ -260,7 +279,7 @@ function install_secp256k1 {
|
|||||||
./configure
|
./configure
|
||||||
make -j 5
|
make -j 5
|
||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
sudo make install
|
run_sudo make install
|
||||||
else
|
else
|
||||||
make install
|
make install
|
||||||
fi
|
fi
|
||||||
@@ -281,16 +300,16 @@ remove_all() {
|
|||||||
# Set reset to true to use existing reset functionality
|
# Set reset to true to use existing reset functionality
|
||||||
RESET=true
|
RESET=true
|
||||||
# Call reset functionality
|
# Call reset functionality
|
||||||
sudo rm -rf ~/code/v
|
run_sudo rm -rf ~/code/v
|
||||||
sudo rm -rf ~/_code/v
|
run_sudo rm -rf ~/_code/v
|
||||||
sudo rm -rf ~/.config/v-analyzer
|
run_sudo rm -rf ~/.config/v-analyzer
|
||||||
if command_exists v; then
|
if command_exists v; then
|
||||||
echo "Removing V from system..."
|
echo "Removing V from system..."
|
||||||
sudo rm -f $(which v)
|
run_sudo rm -f $(which v)
|
||||||
fi
|
fi
|
||||||
if command_exists v-analyzer; then
|
if command_exists v-analyzer; then
|
||||||
echo "Removing v-analyzer from system..."
|
echo "Removing v-analyzer from system..."
|
||||||
sudo rm -f $(which v-analyzer)
|
run_sudo rm -f $(which v-analyzer)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Remove v-analyzer path from rc files
|
# Remove v-analyzer path from rc files
|
||||||
@@ -317,8 +336,6 @@ remove_all() {
|
|||||||
# Function to check if a service is running and start it if needed
|
# Function to check if a service is running and start it if needed
|
||||||
check_and_start_redis() {
|
check_and_start_redis() {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Normal service management for non-container environments
|
# Normal service management for non-container environments
|
||||||
if [[ "${OSNAME}" == "ubuntu" ]] || [[ "${OSNAME}" == "debian" ]]; then
|
if [[ "${OSNAME}" == "ubuntu" ]] || [[ "${OSNAME}" == "debian" ]]; then
|
||||||
|
|
||||||
@@ -326,12 +343,12 @@ check_and_start_redis() {
|
|||||||
if is_github_actions; then
|
if is_github_actions; then
|
||||||
|
|
||||||
# Import Redis GPG key
|
# Import Redis GPG key
|
||||||
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
|
curl -fsSL https://packages.redis.io/gpg | run_sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
|
||||||
# Add Redis repository
|
# Add Redis repository
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
|
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | run_sudo tee /etc/apt/sources.list.d/redis.list
|
||||||
# Install Redis
|
# Install Redis
|
||||||
sudo apt-get update
|
run_sudo apt-get update
|
||||||
sudo apt-get install -y redis
|
run_sudo apt-get install -y redis
|
||||||
|
|
||||||
# Start Redis
|
# Start Redis
|
||||||
redis-server --daemonize yes
|
redis-server --daemonize yes
|
||||||
@@ -366,7 +383,7 @@ check_and_start_redis() {
|
|||||||
echo "redis is already running."
|
echo "redis is already running."
|
||||||
else
|
else
|
||||||
echo "redis is not running. Starting it..."
|
echo "redis is not running. Starting it..."
|
||||||
sudo systemctl start "redis"
|
run_sudo systemctl start "redis"
|
||||||
if systemctl is-active --quiet "redis"; then
|
if systemctl is-active --quiet "redis"; then
|
||||||
echo "redis started successfully."
|
echo "redis started successfully."
|
||||||
else
|
else
|
||||||
@@ -375,11 +392,29 @@ check_and_start_redis() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
elif [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
if brew services list | grep -q "^redis.*started"; then
|
# Check if we're in GitHub Actions
|
||||||
echo "redis is already running."
|
if is_github_actions; then
|
||||||
|
echo "Running in GitHub Actions on macOS. Starting redis directly..."
|
||||||
|
if pgrep redis-server > /dev/null; then
|
||||||
|
echo "redis is already running."
|
||||||
|
else
|
||||||
|
echo "redis is not running. Starting it in the background..."
|
||||||
|
redis-server --daemonize yes
|
||||||
|
if pgrep redis-server > /dev/null; then
|
||||||
|
echo "redis started successfully."
|
||||||
|
else
|
||||||
|
echo "Failed to start redis. Please check logs for details."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "redis is not running. Starting it..."
|
# For regular macOS environments, use brew services
|
||||||
brew services start redis
|
if brew services list | grep -q "^redis.*started"; then
|
||||||
|
echo "redis is already running."
|
||||||
|
else
|
||||||
|
echo "redis is not running. Starting it..."
|
||||||
|
brew services start redis
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
elif [[ "${OSNAME}" == "alpine"* ]]; then
|
||||||
if rc-service "redis" status | grep -q "running"; then
|
if rc-service "redis" status | grep -q "running"; then
|
||||||
@@ -393,7 +428,7 @@ check_and_start_redis() {
|
|||||||
echo "redis is already running."
|
echo "redis is already running."
|
||||||
else
|
else
|
||||||
echo "redis is not running. Starting it..."
|
echo "redis is not running. Starting it..."
|
||||||
sudo systemctl start "redis"
|
run_sudo systemctl start "redis"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "Service management for redis is not implemented for platform: $OSNAME"
|
echo "Service management for redis is not implemented for platform: $OSNAME"
|
||||||
@@ -403,17 +438,48 @@ check_and_start_redis() {
|
|||||||
|
|
||||||
v-install() {
|
v-install() {
|
||||||
|
|
||||||
|
# Check if v is already installed and in PATH
|
||||||
|
if command_exists v; then
|
||||||
|
echo "V is already installed and in PATH."
|
||||||
|
# Optionally, verify the installation location or version if needed
|
||||||
|
# For now, just exit the function assuming it's okay
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Only clone and install if directory doesn't exist
|
# Only clone and install if directory doesn't exist
|
||||||
if [ ! -d ~/code/v ]; then
|
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
||||||
echo "Installing V..."
|
if [ ! -d ~/_code/v ]; then
|
||||||
|
echo "Cloning V..."
|
||||||
mkdir -p ~/_code
|
mkdir -p ~/_code
|
||||||
cd ~/_code
|
cd ~/_code
|
||||||
git clone --depth=1 https://github.com/vlang/v
|
if ! git clone --depth=1 https://github.com/vlang/v; then
|
||||||
cd v
|
echo "❌ Failed to clone V. Cleaning up..."
|
||||||
make
|
rm -rf "$V_DIR"
|
||||||
sudo ./v symlink
|
exit 1
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# Only clone and install if directory doesn't exist
|
||||||
|
# Note: The original check was for ~/code/v, but the installation happens in ~/_code/v.
|
||||||
|
# Adjusting the check to the actual installation directory.
|
||||||
|
echo "Building V..."
|
||||||
|
cd ~/_code/v
|
||||||
|
make
|
||||||
|
# Verify the build produced the executable
|
||||||
|
if [ ! -x ~/_code/v/v ]; then
|
||||||
|
echo "Error: V build failed, executable ~/_code/v/v not found or not executable."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Check if the built executable can report its version
|
||||||
|
if ! ~/_code/v/v -version > /dev/null 2>&1; then
|
||||||
|
echo "Error: Built V executable (~/_code/v/v) failed to report version."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "V built successfully. Creating symlink..."
|
||||||
|
run_sudo ./v symlink
|
||||||
|
|
||||||
# Verify v is in path
|
# Verify v is in path
|
||||||
if ! command_exists v; then
|
if ! command_exists v; then
|
||||||
echo "Error: V installation failed or not in PATH"
|
echo "Error: V installation failed or not in PATH"
|
||||||
@@ -428,9 +494,12 @@ v-install() {
|
|||||||
|
|
||||||
v-analyzer() {
|
v-analyzer() {
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
# Install v-analyzer if requested
|
# Install v-analyzer if requested
|
||||||
if [ "$INSTALL_ANALYZER" = true ]; then
|
if [ "$INSTALL_ANALYZER" = true ]; then
|
||||||
echo "Installing v-analyzer..."
|
echo "Installing v-analyzer..."
|
||||||
|
cd /tmp
|
||||||
v download -RD https://raw.githubusercontent.com/vlang/v-analyzer/main/install.vsh
|
v download -RD https://raw.githubusercontent.com/vlang/v-analyzer/main/install.vsh
|
||||||
|
|
||||||
# Check if v-analyzer bin directory exists
|
# Check if v-analyzer bin directory exists
|
||||||
@@ -499,10 +568,7 @@ if [ "$RESET" = true ] || ! command_exists v; then
|
|||||||
|
|
||||||
v-install
|
v-install
|
||||||
|
|
||||||
# Only install v-analyzer if not in GitHub Actions environment
|
|
||||||
if ! is_github_actions; then
|
|
||||||
v-analyzer
|
|
||||||
fi
|
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -516,6 +582,10 @@ fi
|
|||||||
|
|
||||||
|
|
||||||
if [ "$INSTALL_ANALYZER" = true ]; then
|
if [ "$INSTALL_ANALYZER" = true ]; then
|
||||||
|
# Only install v-analyzer if not in GitHub Actions environment
|
||||||
|
if ! is_github_actions; then
|
||||||
|
v-analyzer
|
||||||
|
fi
|
||||||
echo "Run 'source ~/.bashrc' or 'source ~/.zshrc' to update PATH for v-analyzer"
|
echo "Run 'source ~/.bashrc' or 'source ~/.zshrc' to update PATH for v-analyzer"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
BIN
jina.so.dylib
BIN
jina.so.dylib
Binary file not shown.
123
lib/ai/escalayer/README.md
Normal file
123
lib/ai/escalayer/README.md
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
# Escalayer
|
||||||
|
|
||||||
|
Escalayer is a module for executing AI tasks with automatic escalation to more powerful models when needed. It provides a framework for creating complex AI workflows by breaking them down into sequential unit tasks.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Escalayer allows you to:
|
||||||
|
|
||||||
|
1. Create complex AI tasks composed of multiple sequential unit tasks
|
||||||
|
2. Execute each unit task with a cheap AI model first
|
||||||
|
3. Automatically retry with a more powerful model if the task fails
|
||||||
|
4. Process and validate AI responses with custom callback functions
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
The module is organized into the following components:
|
||||||
|
|
||||||
|
- **Task**: Represents a complete AI task composed of multiple sequential unit tasks
|
||||||
|
- **UnitTask**: Represents a single step in the task with prompt generation and response processing
|
||||||
|
- **ModelConfig**: Defines the configuration for an AI model
|
||||||
|
- **OpenRouter Integration**: Uses OpenRouter to access a wide range of AI models
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Basic Example
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.ai.mcp.aitools.escalayer
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// Create a new task
|
||||||
|
mut task := escalayer.new_task(
|
||||||
|
name: 'rhai_wrapper_creator'
|
||||||
|
description: 'Create Rhai wrappers for Rust functions'
|
||||||
|
)
|
||||||
|
|
||||||
|
// Define the unit tasks
|
||||||
|
task.new_unit_task(
|
||||||
|
name: 'separate_functions'
|
||||||
|
prompt_function: separate_functions
|
||||||
|
callback_function: process_functions
|
||||||
|
)
|
||||||
|
|
||||||
|
// Initiate the task
|
||||||
|
result := task.initiate('path/to/rust/file.rs') or {
|
||||||
|
println('Task failed: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Task completed successfully')
|
||||||
|
println(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the prompt function
|
||||||
|
fn separate_functions(input string) string {
|
||||||
|
return 'Read rust file and separate it into functions ${input}'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the callback function
|
||||||
|
fn process_functions(response string)! string {
|
||||||
|
// Process the AI response
|
||||||
|
// Return error if processing fails
|
||||||
|
if response.contains('error') {
|
||||||
|
return error('Failed to process functions: Invalid response format')
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced Configuration
|
||||||
|
|
||||||
|
You can configure each unit task with different models, retry counts, and other parameters:
|
||||||
|
|
||||||
|
```v
|
||||||
|
// Configure with custom parameters
|
||||||
|
task.new_unit_task(
|
||||||
|
name: 'create_wrappers'
|
||||||
|
prompt_function: create_wrappers
|
||||||
|
callback_function: process_wrappers
|
||||||
|
retry_count: 2
|
||||||
|
base_model: escalayer.ModelConfig{
|
||||||
|
name: 'claude-3-haiku-20240307'
|
||||||
|
provider: 'anthropic'
|
||||||
|
temperature: 0.5
|
||||||
|
max_tokens: 4000
|
||||||
|
}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
1. When you call `task.initiate(input)`, the first unit task is executed with its prompt function.
|
||||||
|
2. The prompt is sent to the base AI model.
|
||||||
|
3. The response is processed by the callback function.
|
||||||
|
4. If the callback returns an error, the task is retried with the same model.
|
||||||
|
5. After a specified number of retries, the task escalates to a more powerful model.
|
||||||
|
6. Once a unit task succeeds, its result is passed as input to the next unit task.
|
||||||
|
7. This process continues until all unit tasks are completed.
|
||||||
|
|
||||||
|
## Environment Setup
|
||||||
|
|
||||||
|
Escalayer uses OpenRouter for AI model access. Set the following environment variable:
|
||||||
|
|
||||||
|
```
|
||||||
|
OPENROUTER_API_KEY=your_api_key_here
|
||||||
|
```
|
||||||
|
|
||||||
|
You can get an API key from [OpenRouter](https://openrouter.ai/).
|
||||||
|
|
||||||
|
## Original Requirements
|
||||||
|
|
||||||
|
This module was designed based on the following requirements:
|
||||||
|
|
||||||
|
- Create a system for executing AI tasks with a retry mechanism
|
||||||
|
- Escalate to more powerful models if cheaper models fail
|
||||||
|
- Use OpenAI client over OpenRouter for AI calls
|
||||||
|
- Break down complex tasks into sequential unit tasks
|
||||||
|
- Each unit task has a function that generates a prompt and a callback that processes the response
|
||||||
|
- Retry if the callback returns an error, with the error message prepended to the input string
|
||||||
|
|
||||||
|
For a detailed architecture overview, see [escalayer_architecture.md](./escalayer_architecture.md).
|
||||||
|
|
||||||
|
For a complete example, see [example.v](../servers/rhai).
|
||||||
40
lib/ai/escalayer/escalayer.v
Normal file
40
lib/ai/escalayer/escalayer.v
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
|
||||||
|
// TaskParams defines the parameters for creating a new task
|
||||||
|
@[params]
|
||||||
|
pub struct TaskParams {
|
||||||
|
pub:
|
||||||
|
name string
|
||||||
|
description string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new task
|
||||||
|
pub fn new_task(params TaskParams) &Task {
|
||||||
|
return &Task{
|
||||||
|
name: params.name
|
||||||
|
description: params.description
|
||||||
|
unit_tasks: []
|
||||||
|
current_result: ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default model configurations
|
||||||
|
pub fn default_base_model() ModelConfig {
|
||||||
|
return ModelConfig{
|
||||||
|
name: 'qwen2.5-7b-instruct'
|
||||||
|
provider: 'openai'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 2000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_retry_model() ModelConfig {
|
||||||
|
return ModelConfig{
|
||||||
|
name: 'gpt-4'
|
||||||
|
provider: 'openai'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 4000
|
||||||
|
}
|
||||||
|
}
|
||||||
342
lib/ai/escalayer/escalayer_architecture.md
Normal file
342
lib/ai/escalayer/escalayer_architecture.md
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
# Escalayer Architecture
|
||||||
|
|
||||||
|
This document outlines the architecture for the Escalayer module, which provides a framework for executing AI tasks with automatic escalation to more powerful models when needed.
|
||||||
|
|
||||||
|
## 1. Module Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
lib/mcp/aitools/escalayer/
|
||||||
|
├── escalayer.v # Main module file with public API
|
||||||
|
├── task.v # Task implementation
|
||||||
|
├── unit_task.v # Unit task implementation
|
||||||
|
├── models.v # Model definitions and configurations
|
||||||
|
├── openrouter.v # OpenRouter API client
|
||||||
|
└── README.md # Documentation
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. Core Components
|
||||||
|
|
||||||
|
### 2.1 Data Structures
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
classDiagram
|
||||||
|
class Task {
|
||||||
|
+string name
|
||||||
|
+string description
|
||||||
|
+[]UnitTask unit_tasks
|
||||||
|
+string current_result
|
||||||
|
+new_unit_task(params UnitTaskParams) UnitTask
|
||||||
|
+initiate(input string)! string
|
||||||
|
}
|
||||||
|
|
||||||
|
class UnitTask {
|
||||||
|
+string name
|
||||||
|
+Function prompt_function
|
||||||
|
+Function callback_function
|
||||||
|
+ModelConfig base_model
|
||||||
|
+ModelConfig retry_model
|
||||||
|
+int retry_count
|
||||||
|
+execute(input string)! string
|
||||||
|
}
|
||||||
|
|
||||||
|
class ModelConfig {
|
||||||
|
+string name
|
||||||
|
+string provider
|
||||||
|
+float temperature
|
||||||
|
+int max_tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
Task "1" *-- "many" UnitTask : contains
|
||||||
|
UnitTask "1" *-- "1" ModelConfig : base_model
|
||||||
|
UnitTask "1" *-- "1" ModelConfig : retry_model
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.2 Component Descriptions
|
||||||
|
|
||||||
|
#### Task
|
||||||
|
- Represents a complete AI task composed of multiple sequential unit tasks
|
||||||
|
- Manages the flow of data between unit tasks
|
||||||
|
- Tracks overall task progress and results
|
||||||
|
|
||||||
|
#### UnitTask
|
||||||
|
- Represents a single step in the task
|
||||||
|
- Contains a prompt function that generates the AI prompt
|
||||||
|
- Contains a callback function that processes the AI response
|
||||||
|
- Manages retries and model escalation
|
||||||
|
|
||||||
|
#### ModelConfig
|
||||||
|
- Defines the configuration for an AI model
|
||||||
|
- Includes model name, provider, and parameters like temperature and max tokens
|
||||||
|
|
||||||
|
#### OpenRouter Client
|
||||||
|
- Handles communication with the OpenRouter API
|
||||||
|
- Sends prompts to AI models and receives responses
|
||||||
|
|
||||||
|
## 3. Implementation Details
|
||||||
|
|
||||||
|
### 3.1 escalayer.v (Main Module)
|
||||||
|
|
||||||
|
```v
|
||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
|
||||||
|
// TaskParams defines the parameters for creating a new task
|
||||||
|
@[params]
|
||||||
|
pub struct TaskParams {
|
||||||
|
pub:
|
||||||
|
name string
|
||||||
|
description string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new task
|
||||||
|
pub fn new_task(params TaskParams) &Task {
|
||||||
|
return &Task{
|
||||||
|
name: params.name
|
||||||
|
description: params.description
|
||||||
|
unit_tasks: []
|
||||||
|
current_result: ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default model configurations
|
||||||
|
pub fn default_base_model() ModelConfig {
|
||||||
|
return ModelConfig{
|
||||||
|
name: 'gpt-3.5-turbo'
|
||||||
|
provider: 'openai'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 20000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_retry_model() ModelConfig {
|
||||||
|
return ModelConfig{
|
||||||
|
name: 'gpt-4'
|
||||||
|
provider: 'openai'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 40000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.2 task.v
|
||||||
|
|
||||||
|
```v
|
||||||
|
module escalayer
|
||||||
|
|
||||||
|
// Task represents a complete AI task composed of multiple sequential unit tasks
|
||||||
|
pub struct Task {
|
||||||
|
pub mut:
|
||||||
|
name string
|
||||||
|
description string
|
||||||
|
unit_tasks []UnitTask
|
||||||
|
current_result string
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnitTaskParams defines the parameters for creating a new unit task
|
||||||
|
struct UnitTaskParams {
|
||||||
|
name string
|
||||||
|
prompt_function fn(string) string
|
||||||
|
callback_function fn(string)! string
|
||||||
|
base_model ?ModelConfig
|
||||||
|
retry_model ?ModelConfig
|
||||||
|
retry_count ?int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a new unit task to the task
|
||||||
|
pub fn (mut t Task) new_unit_task(params UnitTaskParams) &UnitTask {
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initiate the task execution
|
||||||
|
pub fn (mut t Task) initiate(input string)! string {
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3 unit_task.v
|
||||||
|
|
||||||
|
```v
|
||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
|
||||||
|
// UnitTask represents a single step in the task
|
||||||
|
pub struct UnitTask {
|
||||||
|
pub mut:
|
||||||
|
name string
|
||||||
|
prompt_function fn(string) string
|
||||||
|
callback_function fn(string)! string
|
||||||
|
base_model ModelConfig
|
||||||
|
retry_model ModelConfig
|
||||||
|
retry_count int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the unit task
|
||||||
|
pub fn (mut ut UnitTask) execute(input string)! string {
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.4 models.v
|
||||||
|
|
||||||
|
```v
|
||||||
|
module escalayer
|
||||||
|
|
||||||
|
// ModelConfig defines the configuration for an AI model
|
||||||
|
pub struct ModelConfig {
|
||||||
|
pub mut:
|
||||||
|
name string
|
||||||
|
provider string
|
||||||
|
temperature f32
|
||||||
|
max_tokens int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call an AI model using OpenRouter
|
||||||
|
fn call_ai_model(prompt string, model ModelConfig)! string {
|
||||||
|
// Get OpenAI client (configured for OpenRouter)
|
||||||
|
mut client := get_openrouter_client()!
|
||||||
|
|
||||||
|
// Create the message for the AI
|
||||||
|
mut m := openai.Messages{
|
||||||
|
messages: [
|
||||||
|
openai.Message{
|
||||||
|
role: .system
|
||||||
|
content: 'You are a helpful assistant.'
|
||||||
|
},
|
||||||
|
openai.Message{
|
||||||
|
role: .user
|
||||||
|
content: prompt
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call the AI model
|
||||||
|
res := client.chat_completion(
|
||||||
|
msgs: m,
|
||||||
|
model: model.name,
|
||||||
|
temperature: model.temperature,
|
||||||
|
max_completion_tokens: model.max_tokens
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Extract the response content
|
||||||
|
if res.choices.len > 0 {
|
||||||
|
return res.choices[0].message.content
|
||||||
|
}
|
||||||
|
|
||||||
|
return error('No response from AI model')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.5 openrouter.v
|
||||||
|
|
||||||
|
```v
|
||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
import os
|
||||||
|
|
||||||
|
// Get an OpenAI client configured for OpenRouter
|
||||||
|
fn get_openrouter_client()! &openai.OpenAI {
|
||||||
|
// Get API key from environment variable
|
||||||
|
api_key := os.getenv('OPENROUTER_API_KEY')
|
||||||
|
if api_key == '' {
|
||||||
|
return error('OPENROUTER_API_KEY environment variable not set')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create OpenAI client with OpenRouter base URL
|
||||||
|
mut client := openai.new(
|
||||||
|
api_key: api_key,
|
||||||
|
base_url: 'https://openrouter.ai/api/v1'
|
||||||
|
)!
|
||||||
|
|
||||||
|
return client
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Usage Example
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.ai.mcp.aitools.escalayer
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// Create a new task
|
||||||
|
mut task := escalayer.new_task(
|
||||||
|
name: 'rhai_wrapper_creator'
|
||||||
|
description: 'Create Rhai wrappers for Rust functions'
|
||||||
|
)
|
||||||
|
|
||||||
|
// Define the unit tasks
|
||||||
|
task.new_unit_task(
|
||||||
|
name: 'separate_functions'
|
||||||
|
prompt_function: separate_functions
|
||||||
|
callback_function: process_functions
|
||||||
|
)
|
||||||
|
|
||||||
|
task.new_unit_task(
|
||||||
|
name: 'create_wrappers'
|
||||||
|
prompt_function: create_wrappers
|
||||||
|
callback_function: process_wrappers
|
||||||
|
retry_count: 2
|
||||||
|
)
|
||||||
|
|
||||||
|
task.new_unit_task(
|
||||||
|
name: 'create_tests'
|
||||||
|
prompt_function: create_tests
|
||||||
|
callback_function: process_tests
|
||||||
|
base_model: escalayer.ModelConfig{
|
||||||
|
name: 'claude-3-haiku-20240307'
|
||||||
|
provider: 'anthropic'
|
||||||
|
temperature: 0.5
|
||||||
|
max_tokens: 4000
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// Initiate the task
|
||||||
|
result := task.initiate('path/to/rust/file.rs') or {
|
||||||
|
println('Task failed: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Task completed successfully')
|
||||||
|
println(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the prompt functions
|
||||||
|
fn separate_functions(input string) string {
|
||||||
|
return 'Read rust file and separate it into functions ${input}'
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_wrappers(input string) string {
|
||||||
|
return 'Create rhai wrappers for rust functions ${input}'
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_tests(input string) string {
|
||||||
|
return 'Create tests for rhai wrappers ${input}'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the callback functions
|
||||||
|
fn process_functions(response string)! string {
|
||||||
|
// Process the AI response
|
||||||
|
// Return error if processing fails
|
||||||
|
if response.contains('error') {
|
||||||
|
return error('Failed to process functions: Invalid response format')
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_wrappers(response string)! string {
|
||||||
|
// Process the AI response
|
||||||
|
// Return error if processing fails
|
||||||
|
if !response.contains('fn') {
|
||||||
|
return error('Failed to process wrappers: No functions found')
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_tests(response string)! string {
|
||||||
|
// Process the AI response
|
||||||
|
// Return error if processing fails
|
||||||
|
if !response.contains('test') {
|
||||||
|
return error('Failed to process tests: No tests found')
|
||||||
|
}
|
||||||
|
return response
|
||||||
|
}
|
||||||
|
```
|
||||||
62
lib/ai/escalayer/models.v
Normal file
62
lib/ai/escalayer/models.v
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
|
||||||
|
// ModelConfig defines the configuration for an AI model
|
||||||
|
pub struct ModelConfig {
|
||||||
|
pub mut:
|
||||||
|
name string
|
||||||
|
provider string
|
||||||
|
temperature f32
|
||||||
|
max_tokens int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create model configs
|
||||||
|
const claude_3_sonnet = ModelConfig{
|
||||||
|
name: 'anthropic/claude-3.7-sonnet'
|
||||||
|
provider: 'anthropic'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 25000
|
||||||
|
}
|
||||||
|
|
||||||
|
const gpt4 = ModelConfig{
|
||||||
|
name: 'gpt-4'
|
||||||
|
provider: 'openai'
|
||||||
|
temperature: 0.7
|
||||||
|
max_tokens: 25000
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call an AI model using OpenRouter
|
||||||
|
fn call_ai_model(prompt string, model ModelConfig) !string {
|
||||||
|
// Get OpenAI client (configured for OpenRouter)
|
||||||
|
mut client := get_openrouter_client()!
|
||||||
|
|
||||||
|
// Create the message for the AI
|
||||||
|
mut m := openai.Messages{
|
||||||
|
messages: [
|
||||||
|
openai.Message{
|
||||||
|
role: .system
|
||||||
|
content: 'You are a helpful assistant.'
|
||||||
|
},
|
||||||
|
openai.Message{
|
||||||
|
role: .user
|
||||||
|
content: prompt
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call the AI model
|
||||||
|
res := client.chat_completion(
|
||||||
|
msgs: m
|
||||||
|
model: model.name
|
||||||
|
temperature: model.temperature
|
||||||
|
max_completion_tokens: model.max_tokens
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Extract the response content
|
||||||
|
if res.choices.len > 0 {
|
||||||
|
return res.choices[0].message.content
|
||||||
|
}
|
||||||
|
|
||||||
|
return error('No response from AI model')
|
||||||
|
}
|
||||||
22
lib/ai/escalayer/openrouter.v
Normal file
22
lib/ai/escalayer/openrouter.v
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
module escalayer
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.clients.openai
|
||||||
|
import freeflowuniverse.herolib.osal
|
||||||
|
import os
|
||||||
|
|
||||||
|
// Get an OpenAI client configured for OpenRouter
|
||||||
|
fn get_openrouter_client() !&openai.OpenAI {
|
||||||
|
osal.env_set(key: 'OPENROUTER_API_KEY', value: '')
|
||||||
|
// Get API key from environment variable
|
||||||
|
api_key := os.getenv('OPENROUTER_API_KEY')
|
||||||
|
if api_key == '' {
|
||||||
|
return error('OPENROUTER_API_KEY environment variable not set')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create OpenAI client with OpenRouter base URL
|
||||||
|
mut client := openai.get(
|
||||||
|
name: 'openrouter'
|
||||||
|
)!
|
||||||
|
|
||||||
|
return client
|
||||||
|
}
|
||||||
65
lib/ai/escalayer/task.v
Normal file
65
lib/ai/escalayer/task.v
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
module escalayer
|
||||||
|
|
||||||
|
import log
|
||||||
|
|
||||||
|
// Task represents a complete AI task composed of multiple sequential unit tasks
|
||||||
|
pub struct Task {
|
||||||
|
pub mut:
|
||||||
|
name string
|
||||||
|
description string
|
||||||
|
unit_tasks []UnitTask
|
||||||
|
current_result string
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnitTaskParams defines the parameters for creating a new unit task
|
||||||
|
@[params]
|
||||||
|
pub struct UnitTaskParams {
|
||||||
|
pub:
|
||||||
|
name string
|
||||||
|
prompt_function fn (string) string
|
||||||
|
callback_function fn (string) !string
|
||||||
|
base_model ?ModelConfig
|
||||||
|
retry_model ?ModelConfig
|
||||||
|
retry_count ?int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a new unit task to the task
|
||||||
|
pub fn (mut t Task) new_unit_task(params UnitTaskParams) &UnitTask {
|
||||||
|
mut unit_task := UnitTask{
|
||||||
|
name: params.name
|
||||||
|
prompt_function: params.prompt_function
|
||||||
|
callback_function: params.callback_function
|
||||||
|
base_model: if base_model := params.base_model {
|
||||||
|
base_model
|
||||||
|
} else {
|
||||||
|
default_base_model()
|
||||||
|
}
|
||||||
|
retry_model: if retry_model := params.retry_model {
|
||||||
|
retry_model
|
||||||
|
} else {
|
||||||
|
default_retry_model()
|
||||||
|
}
|
||||||
|
retry_count: if retry_count := params.retry_count { retry_count } else { 3 }
|
||||||
|
}
|
||||||
|
|
||||||
|
t.unit_tasks << unit_task
|
||||||
|
return &t.unit_tasks[t.unit_tasks.len - 1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initiate the task execution
|
||||||
|
pub fn (mut t Task) initiate(input string) !string {
|
||||||
|
mut current_input := input
|
||||||
|
|
||||||
|
for i, mut unit_task in t.unit_tasks {
|
||||||
|
log.error('Executing unit task ${i + 1}/${t.unit_tasks.len}: ${unit_task.name}')
|
||||||
|
|
||||||
|
// Execute the unit task with the current input
|
||||||
|
result := unit_task.execute(current_input)!
|
||||||
|
|
||||||
|
// Update the current input for the next unit task
|
||||||
|
current_input = result
|
||||||
|
t.current_result = result
|
||||||
|
}
|
||||||
|
|
||||||
|
return t.current_result
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user