diff --git a/examples/develop/heroprompt/heroprompt_example.vsh b/examples/develop/heroprompt/heroprompt_example.vsh
new file mode 100755
index 00000000..bee22d7c
--- /dev/null
+++ b/examples/develop/heroprompt/heroprompt_example.vsh
@@ -0,0 +1,37 @@
+#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
+
+import freeflowuniverse.herolib.develop.heroprompt
+
+mut session := heroprompt.new_session()
+
+mut workspace1 := session.add_workspace()!
+// TODO: Check the name bug
+// mut workspace2 := session.add_workspace(name: 'withname')!
+
+mut dir1 := workspace1.add_dir(path: '/Users/mahmoud/code/github/freeflowuniverse/herolib/docker')!
+dir1.select_file(name: 'docker_ubuntu_install.sh')!
+
+mut dir2 := workspace1.add_dir(
+ path: '/Users/mahmoud/code/github/freeflowuniverse/herolib/docker/herolib'
+)!
+
+dir2.select_file(name: '.gitignore')!
+dir2.select_file(name: 'build.sh')!
+dir2.select_file(name: 'debug.sh')!
+
+mut dir3 := workspace1.add_dir(
+ path: '/Users/mahmoud/code/github/freeflowuniverse/herolib/docker/postgresql'
+ select_all_dirs: true
+ select_all_files: false
+ select_all: false
+)!
+
+dir3.select_file(name: 'docker-compose.yml')!
+
+selected := workspace1.get_selected()
+
+prompt := workspace1.prompt(
+ text: 'Using the selected files, i want you to get all print statments'
+)
+
+println(prompt)
diff --git a/lib/develop/heroprompt/heroprompt_dir.v b/lib/develop/heroprompt/heroprompt_dir.v
new file mode 100644
index 00000000..0ce43742
--- /dev/null
+++ b/lib/develop/heroprompt/heroprompt_dir.v
@@ -0,0 +1,40 @@
+module heroprompt
+
+import os
+import freeflowuniverse.herolib.core.pathlib
+
+@[params]
+pub struct AddFileParams {
+pub mut:
+ name string
+}
+
+pub fn (mut dir HeropromptDir) select_file(args AddFileParams) !&HeropromptFile {
+ mut full_path := dir.path.path + '/' + args.name
+ if dir.path.path.ends_with('/') {
+ full_path = dir.path.path + args.name
+ }
+
+ if !os.exists(full_path) {
+ return error('File ${full_path} does not exists')
+ }
+
+ if !os.is_file(full_path) {
+ return error('Provided path ${full_path} is not a file')
+ }
+
+ file_content := os.read_file(full_path)!
+
+ file := &HeropromptFile{
+ path: pathlib.Path{
+ path: full_path
+ cat: .file
+ exist: .yes
+ }
+ name: args.name
+ content: file_content
+ }
+
+ dir.files << file
+ return file
+}
diff --git a/lib/develop/heroprompt/heroprompt_file.v b/lib/develop/heroprompt/heroprompt_file.v
new file mode 100644
index 00000000..b8da43fe
--- /dev/null
+++ b/lib/develop/heroprompt/heroprompt_file.v
@@ -0,0 +1 @@
+module heroprompt
diff --git a/lib/develop/heroprompt/heroprompt_session.v b/lib/develop/heroprompt/heroprompt_session.v
new file mode 100644
index 00000000..32d4a817
--- /dev/null
+++ b/lib/develop/heroprompt/heroprompt_session.v
@@ -0,0 +1,23 @@
+module heroprompt
+
+import rand
+
+pub struct HeropromptSession {
+pub mut:
+ id string
+ workspaces []&HeropromptWorkspace
+}
+
+pub fn new_session() HeropromptSession {
+ return HeropromptSession{
+ id: rand.uuid_v4()
+ workspaces: []
+ }
+}
+
+pub fn (mut self HeropromptSession) add_workspace(args_ NewWorkspaceParams) !&HeropromptWorkspace {
+ mut wsp := &HeropromptWorkspace{}
+ wsp = wsp.new(args_)!
+ self.workspaces << wsp
+ return wsp
+}
diff --git a/lib/develop/heroprompt/heroprompt_workspace.v b/lib/develop/heroprompt/heroprompt_workspace.v
new file mode 100644
index 00000000..2e7c5677
--- /dev/null
+++ b/lib/develop/heroprompt/heroprompt_workspace.v
@@ -0,0 +1,219 @@
+module heroprompt
+
+import rand
+import time
+import os
+import freeflowuniverse.herolib.core.pathlib
+
+@[heap]
+pub struct HeropromptWorkspace {
+pub mut:
+ name string = 'default'
+ dirs []&HeropromptDir
+}
+
+@[params]
+pub struct NewWorkspaceParams {
+pub mut:
+ name string
+}
+
+/// Create a new workspace
+/// If the name is not passed, we will generate a random one
+fn (wsp HeropromptWorkspace) new(args_ NewWorkspaceParams) !&HeropromptWorkspace {
+ mut args := args_
+ if args.name.len == 0 {
+ args.name = generate_random_workspace_name()
+ }
+
+ workspace := get(name: args.name)!
+ return workspace
+}
+
+@[params]
+pub struct AddDirParams {
+pub mut:
+ path string @[required]
+}
+
+pub fn (mut wsp HeropromptWorkspace) add_dir(args_ AddDirParams) !&HeropromptDir {
+ if args_.path.len == 0 {
+ return error('The dir path is required')
+ }
+
+ if !os.exists(args_.path) {
+ return error('The provided path does not exists')
+ }
+
+ // Normalize absolute path
+ abs_path := os.real_path(args_.path)
+
+ parts := abs_path.split(os.path_separator)
+ dir_name := parts[parts.len - 1]
+
+ added_dir := &HeropromptDir{
+ path: pathlib.Path{
+ path: abs_path
+ cat: .dir
+ exist: .yes
+ }
+ name: dir_name
+ }
+
+ wsp.dirs << added_dir
+ return added_dir
+}
+
+struct SelectedFilesMetadata {
+ content_length int
+ extension string
+ name string
+ path string
+}
+
+struct SelectedDirsMetadata {
+ name string
+ selected_files []SelectedFilesMetadata
+}
+
+struct HeropromptWorkspaceGetSelected {
+pub mut:
+ dirs []SelectedDirsMetadata
+}
+
+pub fn (wsp HeropromptWorkspace) get_selected() HeropromptWorkspaceGetSelected {
+ mut result := HeropromptWorkspaceGetSelected{}
+
+ for dir in wsp.dirs {
+ mut files := []SelectedFilesMetadata{}
+ for file in dir.files {
+ files << SelectedFilesMetadata{
+ content_length: file.content.len
+ extension: get_file_extension(file.name)
+ name: file.name
+ path: file.path.path
+ }
+ }
+
+ result.dirs << SelectedDirsMetadata{
+ name: dir.name
+ selected_files: files
+ }
+ }
+
+ return result
+}
+
+pub struct HeropromptWorkspacePrompt {
+pub mut:
+ text string
+}
+
+pub fn (wsp HeropromptWorkspace) prompt(args HeropromptWorkspacePrompt) string {
+ prompt := wsp.build_prompt(args.text)
+ return prompt
+}
+
+// Placeholder function for future needs, in case we need to highlight the user_instructions block with some addtional messages
+fn (wsp HeropromptWorkspace) build_user_instructions(text string) string {
+ return text
+}
+
+fn build_file_map(dirs []&HeropromptDir, prefix string) string {
+ mut out := ''
+
+ for i, dir in dirs {
+ // Determine the correct tree connector
+ connector := if i == dirs.len - 1 { '└── ' } else { '├── ' }
+
+ // Directory name
+ out += '${prefix}${connector}${dir.name}\n'
+
+ // Files in this directory
+ for j, file in dir.files {
+ file_connector := if j == dir.files.len - 1 && dir.dirs.len == 0 {
+ '└── '
+ } else {
+ '├── '
+ }
+ out += '${prefix} ${file_connector}${file.name} *\n'
+ }
+
+ // Recurse into subdirectories
+ if dir.dirs.len > 0 {
+ new_prefix := if i == dirs.len - 1 { prefix + ' ' } else { prefix + '│ ' }
+ out += build_file_map(dir.dirs, new_prefix)
+ }
+ }
+
+ return out
+}
+
+fn (wsp HeropromptWorkspace) build_file_content() string {
+ return ''
+}
+
+fn (wsp HeropromptWorkspace) build_prompt(text string) string {
+ user_instructions := wsp.build_user_instructions(text)
+ file_map := build_file_map(wsp.dirs, '')
+ file_contents := wsp.build_file_content()
+
+ // Handle reading the prompt file and parse it
+ return file_map
+}
+
+/// Generate a random name for the workspace
+fn generate_random_workspace_name() string {
+ adjectives := [
+ 'brave',
+ 'bright',
+ 'clever',
+ 'swift',
+ 'noble',
+ 'mighty',
+ 'fearless',
+ 'bold',
+ 'wise',
+ 'epic',
+ 'valiant',
+ 'fierce',
+ 'legendary',
+ 'heroic',
+ 'dynamic',
+ ]
+ nouns := [
+ 'forge',
+ 'script',
+ 'ocean',
+ 'phoenix',
+ 'atlas',
+ 'quest',
+ 'shield',
+ 'dragon',
+ 'code',
+ 'summit',
+ 'path',
+ 'realm',
+ 'spark',
+ 'anvil',
+ 'saga',
+ ]
+
+ // Seed randomness with time
+ rand.seed([u32(time.now().unix()), u32(time.now().nanosecond)])
+
+ adj := adjectives[rand.intn(adjectives.len) or { 0 }]
+ noun := nouns[rand.intn(nouns.len) or { 0 }]
+ number := rand.intn(100) or { 0 } // 0–99
+
+ return '${adj}_${noun}_${number}'
+}
+
+fn get_file_extension(filename string) string {
+ parts := filename.split('.')
+ if parts.len < 2 {
+ // Handle the files with no exe such as Dockerfile, LICENSE
+ return ''
+ }
+ return parts[parts.len - 1]
+}
diff --git a/lib/develop/heroprompt/reprompt_do.v b/lib/develop/heroprompt/reprompt_actions.v
similarity index 87%
rename from lib/develop/heroprompt/reprompt_do.v
rename to lib/develop/heroprompt/reprompt_actions.v
index 323c058d..4d4f357b 100644
--- a/lib/develop/heroprompt/reprompt_do.v
+++ b/lib/develop/heroprompt/reprompt_actions.v
@@ -10,3 +10,7 @@ fn (mut ws HeropromptWorkspace) heroprompt() !string {
// TODO: fill in template based on selection
return ''
}
+
+pub fn get_tree() {}
+
+pub fn format_prompt() {}
diff --git a/lib/develop/heroprompt/reprompt_model.v b/lib/develop/heroprompt/reprompt_model.v
index 8556a2b3..474d33c1 100644
--- a/lib/develop/heroprompt/reprompt_model.v
+++ b/lib/develop/heroprompt/reprompt_model.v
@@ -1,7 +1,7 @@
module heroprompt
import freeflowuniverse.herolib.data.paramsparser
-import freeflowuniverse.herolib.data.encoderhero
+// import freeflowuniverse.herolib.data.encoderhero // temporarily commented out
import freeflowuniverse.herolib.core.pathlib
import os
@@ -11,38 +11,60 @@ const default = true
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
-@[heap]
-pub struct HeropromptWorkspace {
+pub struct HeropromptFile {
pub mut:
- name string = 'default'
- dirs []HeropromptDir
+ content string
+ path pathlib.Path
+ name string
}
pub struct HeropromptDir {
pub mut:
- path pathlib.Path
- selections []string // paths selected in the HeropromptDir
+ name string
+ path pathlib.Path
+ files []&HeropromptFile
+ dirs []&HeropromptDir
}
+// pub fn (wsp HeropromptWorkspace) to_tag() {
+// tag := HeropromptTags.file_map
+// // We need to pass it to the template
+// }
+
+// // pub fn (dir HeropromptDir) to_tag() {
+// // tag := HeropromptTags.file_content
+// // // We need to pass it to the template
+// // }
+
+// pub fn (fil HeropromptFile) to_tag() {
+// tag := HeropromptTags.file_content
+// // We need to pass it to the template
+// }
+
+// pub enum HeropromptTags {
+// file_map
+// file_content
+// user_instructions
+// }
+
// your checking & initialization code if needed
fn obj_init(mycfg_ HeropromptWorkspace) !HeropromptWorkspace {
mut mycfg := mycfg_
- if mycfg.password == '' && mycfg.secret == '' {
- return error('password or secret needs to be filled in for ${mycfg.name}')
- }
return mycfg
}
/////////////NORMALLY NO NEED TO TOUCH
+// TODO: Check the compiler issue with the encde/decode
pub fn heroscript_dumps(obj HeropromptWorkspace) !string {
- // create heroscript following template
- // check for our homedir on our machine and replace in the heroscript to @HOME in path
- return encoderhero.encode[HeropromptWorkspace](obj)!
+ // return encoderhero.encode[HeropromptWorkspace](obj)! // temporarily commented out
+ return 'name: "${obj.name}"'
}
pub fn heroscript_loads(heroscript string) !HeropromptWorkspace {
- // TODO: parse heroscript populate HeropromptWorkspace
- mut obj := encoderhero.decode[HeropromptWorkspace](heroscript)!
+ // mut obj := encoderhero.decode[HeropromptWorkspace](heroscript)! // temporarily commented out
+ obj := HeropromptWorkspace{
+ name: 'default'
+ }
return obj
}
diff --git a/lib/develop/heroprompt/templates/prompt.heroprompt b/lib/develop/heroprompt/templates/prompt.heroprompt
new file mode 100644
index 00000000..251c06c6
--- /dev/null
+++ b/lib/develop/heroprompt/templates/prompt.heroprompt
@@ -0,0 +1,11 @@
+
+{{text}}
+
+
+
+{{map}}
+
+
+
+{{content}}
+
\ No newline at end of file
diff --git a/lib/develop/heroprompt/templates/prompt_example.md b/lib/develop/heroprompt/templates/prompt_example.md
index 94615eef..a335cffd 100644
--- a/lib/develop/heroprompt/templates/prompt_example.md
+++ b/lib/develop/heroprompt/templates/prompt_example.md
@@ -1,1607 +1,171 @@
-/Users/despiegk/code/github/freeflowuniverse/herolib
-├── .gitignore
-├── .vdocignore
-├── compile.sh
-├── CONTRIBUTING.md
-├── doc.vsh
-├── generate.vsh
-├── herolib.code-workspace
-├── install_hero.sh
-├── install_herolib.vsh
-├── install_v.sh
-├── LICENSE
-├── README.md
-├── release_OLD.sh
-├── release.vsh
-├── specs.md
-├── test_basic.vsh
-└── test_runner.vsh
-├── .github
-│ └── workflows
-│ ├── documentation.yml
-│ ├── hero_build.yml
-│ └── test.yml
-├── aiprompts
-│ └── herolib_start_here.md
-│ ├── .openhands
-│ │ └── setup.sh
-│ ├── ai_instruct
-│ │ ├── documentation_from_v_md.md
-│ │ ├── documentation_from_v.md
-│ │ ├── prompt_processing_instructions.md
-│ │ ├── prompt_processing_openrpc_like.md
-│ │ └── what_is_a_hero_twin.md
-│ │ ├── models_from_v
-│ ├── bizmodel
-│ │ ├── bizmodel_cost.md
-│ │ ├── bizmodel_funding.md
-│ │ ├── bizmodel_generation_prompt.md
-│ │ ├── bizmodel_hr.md
-│ │ ├── bizmodel_revenue.md
-│ │ └── costs.heroscript
-│ ├── documentor
-│ │ └── generate_v_doc_readable_md_files.md
-│ ├── docusaurus
-│ │ └── docusaurus_ebook_manual.md
-│ ├── herolib_advanced
-│ │ ├── advanced_paths.md
-│ │ ├── builder.md
-│ │ ├── cmdline_argument_parsing_example.vsh
-│ │ ├── datatypes.md
-│ │ ├── osal.md
-│ │ ├── ourdb.md
-│ │ ├── redis.md
-│ │ ├── spreadsheet.md
-│ │ └── ui console chalk.md
-│ ├── herolib_core
-│ │ ├── core_curdir_example.md
-│ │ ├── core_globals.md
-│ │ ├── core_heroscript_basics.md
-│ │ ├── core_heroscript_playbook.md
-│ │ ├── core_http_client.md
-│ │ ├── core_osal.md
-│ │ ├── core_ourtime.md
-│ │ ├── core_params.md
-│ │ ├── core_paths.md
-│ │ ├── core_text.md
-│ │ ├── core_ui_console.md
-│ │ └── core_vshell.md
-│ ├── v_advanced
-│ │ ├── advanced_topics.md
-│ │ ├── compress.md
-│ │ ├── generics.md
-│ │ ├── html_parser.md
-│ │ ├── io.md
-│ │ ├── net.md
-│ │ ├── reflection.md
-│ │ ├── regex.md
-│ │ ├── smtp.md
-│ │ └── time instructions.md
-│ ├── v_core
-│ │ └── v_manual.md
-│ ├── v_veb_webserver
-│ │ ├── veb_assets.md
-│ │ ├── veb_auth.md
-│ │ ├── veb_csrf.md
-│ │ ├── veb_sse.md
-│ │ ├── veb.md
-│ │ └── vtemplates.md
-├── cli
-│ ├── .gitignore
-│ ├── compile
-│ ├── compile_upload.vsh
-│ ├── compile_vdo.vsh
-│ ├── compile.vsh
-│ ├── hero.v
-│ └── vdo.v
-├── docker
-│ └── docker_ubuntu_install.sh
-│ ├── herolib
-│ │ ├── .gitignore
-│ │ ├── build.sh
-│ │ ├── debug.sh
-│ │ ├── docker-compose.yml
-│ │ ├── Dockerfile
-│ │ ├── export.sh
-│ │ ├── README.md
-│ │ ├── shell.sh
-│ │ ├── ssh_init.sh
-│ │ ├── ssh.sh
-│ │ └── start.sh
-│ │ ├── scripts
-│ ├── postgresql
-│ │ ├── docker-compose.yml
-│ │ ├── readme.md
-│ │ └── start.sh
-├── examples
-│ ├── README.md
-│ └── sync_do.sh
-│ ├── aiexamples
-│ │ ├── groq.vsh
-│ │ ├── jetconvertor.vsh
-│ │ ├── jina.vsh
-│ │ └── qdrant.vsh
-│ ├── baobab
-│ │ ├── generator
-│ │ └── specification
-│ ├── biztools
-│ │ ├── bizmodel_complete.vsh
-│ │ ├── bizmodel_export.vsh
-│ │ ├── bizmodel_full.vsh
-│ │ ├── bizmodel.vsh
-│ │ ├── bizmodel1.vsh
-│ │ ├── bizmodel2.vsh
-│ │ ├── costs.vsh
-│ │ ├── funding.vsh
-│ │ ├── hr.vsh
-│ │ └── notworking.md
-│ │ ├── _archive
-│ │ ├── bizmodel_docusaurus
-│ │ ├── examples
-│ ├── builder
-│ │ ├── simple_ip4.vsh
-│ │ ├── simple_ip6.vsh
-│ │ └── simple.vsh
-│ │ ├── remote_executor
-│ ├── clients
-│ │ ├── aiclient_example.vsh
-│ │ ├── jina_example.vsh
-│ │ ├── mail.vsh
-│ │ ├── mycelium_rpc.vsh
-│ │ ├── mycelium.vsh
-│ │ ├── psql.vsh
-│ │ └── zinit_rpc_example.vsh
-│ ├── core
-│ │ ├── agent_encoding.vsh
-│ │ ├── generate.vsh
-│ │ └── secrets_example.vsh
-│ │ ├── base
-│ │ ├── db
-│ │ ├── dbfs
-│ │ ├── openapi
-│ │ ├── openrpc
-│ │ ├── pathlib
-│ ├── data
-│ │ ├── .gitignore
-│ │ ├── cache.vsh
-│ │ ├── compress_gzip_example.vsh
-│ │ ├── deduped_mycelium_master.vsh
-│ │ ├── deduped_mycelium_worker.vsh
-│ │ ├── encoder.vsh
-│ │ ├── encrypt_decrypt.vsh
-│ │ ├── graphdb.vsh
-│ │ ├── heroencoder_example.vsh
-│ │ ├── heroencoder_simple.vsh
-│ │ ├── jsonexample.vsh
-│ │ ├── ourdb_client.vsh
-│ │ ├── ourdb_example.vsh
-│ │ ├── ourdb_server.vsh
-│ │ ├── radixtree.vsh
-│ │ └── regex_example.vsh
-│ │ ├── location
-│ │ ├── ourdb_syncer
-│ │ ├── params
-│ │ ├── resp
-│ ├── develop
-│ │ ├── gittools
-│ │ ├── ipapi
-│ │ ├── juggler
-│ │ ├── luadns
-│ │ ├── openai
-│ │ ├── runpod
-│ │ ├── vastai
-│ │ └── wireguard
-│ ├── hero
-│ │ └── alpine_example.vsh
-│ │ ├── db
-│ │ ├── generation
-│ │ ├── openapi
-│ ├── installers
-│ │ ├── .gitignore
-│ │ ├── cometbft.vsh
-│ │ ├── conduit.vsh
-│ │ ├── coredns.vsh
-│ │ ├── hero_install.vsh
-│ │ ├── installers.vsh
-│ │ ├── traefik.vsh
-│ │ └── youki.vsh
-│ │ ├── db
-│ │ ├── infra
-│ │ ├── lang
-│ │ ├── net
-│ │ ├── sysadmintools
-│ │ ├── threefold
-│ │ ├── virt
-│ ├── jobs
-│ │ └── vfs_jobs_example.vsh
-│ ├── lang
-│ │ └── python
-│ ├── mcp
-│ │ ├── http_demo
-│ │ ├── http_server
-│ │ ├── inspector
-│ │ └── simple_http
-│ ├── osal
-│ │ ├── .gitignore
-│ │ ├── notifier.vsh
-│ │ ├── startup_manager.vsh
-│ │ ├── systemd.vsh
-│ │ ├── tun.vsh
-│ │ ├── ufw_play.vsh
-│ │ └── ufw.vsh
-│ │ ├── coredns
-│ │ ├── download
-│ │ ├── ping
-│ │ ├── process
-│ │ ├── rsync
-│ │ ├── sandbox
-│ │ ├── sshagent
-│ │ ├── zinit
-│ ├── schemas
-│ │ ├── openapi
-│ │ └── openrpc
-│ ├── threefold
-│ │ └── .gitignore
-│ │ ├── grid
-│ │ ├── gridproxy
-│ │ ├── holochain
-│ │ ├── solana
-│ │ ├── tfgrid3deployer
-│ ├── tools
-│ │ ├── imagemagick
-│ │ ├── tmux
-│ │ └── vault
-│ ├── ui
-│ │ ├── flow1.v
-│ │ └── silence.vsh
-│ │ ├── console
-│ │ ├── telegram
-│ ├── vfs
-│ │ └── vfs_db
-│ ├── virt
-│ │ ├── daguserver
-│ │ ├── docker
-│ │ ├── hetzner
-│ │ ├── lima
-│ │ ├── podman_buildah
-│ │ ├── runc
-│ │ └── windows
-│ ├── web
-│ │ ├── .gitignore
-│ │ ├── docusaurus_example.vsh
-│ │ ├── starllight_example.vsh
-│ │ └── ui_demo.vsh
-│ │ ├── doctree
-│ │ ├── markdown_renderer
-│ ├── webdav
-│ │ ├── .gitignore
-│ │ └── webdav_vfs.vsh
-├── lib
-│ ├── readme.md
-│ └── v.mod
-│ ├── ai
-│ │ ├── escalayer
-│ │ ├── mcp
-│ │ └── utils
-│ ├── baobab
-│ │ └── README.md
-│ │ ├── actor
-│ │ ├── generator
-│ │ ├── osis
-│ │ ├── specification
-│ │ ├── stage
-│ ├── biz
-│ │ ├── bizmodel
-│ │ ├── investortool
-│ │ ├── planner
-│ │ └── spreadsheet
-│ ├── builder
-│ │ ├── bootstrapper.v
-│ │ ├── builder_factory.v
-│ │ ├── done.v
-│ │ ├── executor_local_test.v
-│ │ ├── executor_local.v
-│ │ ├── executor_ssh_test.v
-│ │ ├── executor_ssh.v
-│ │ ├── executor.v
-│ │ ├── model_package.v
-│ │ ├── node_commands.v
-│ │ ├── node_executor.v
-│ │ ├── node_factory.v
-│ │ ├── node.v
-│ │ ├── nodedb_test.v
-│ │ ├── portforward_lib.v
-│ │ ├── readme.md
-│ │ └── this_remote.v
-│ ├── clients
-│ │ ├── ipapi
-│ │ ├── jina
-│ │ ├── livekit
-│ │ ├── mailclient
-│ │ ├── meilisearch
-│ │ ├── mycelium
-│ │ ├── mycelium_rpc
-│ │ ├── openai
-│ │ ├── postgresql_client
-│ │ ├── qdrant
-│ │ ├── rclone
-│ │ ├── runpod
-│ │ ├── sendgrid
-│ │ ├── vastai
-│ │ ├── wireguard
-│ │ ├── zerodb_client
-│ │ ├── zinit
-│ │ └── zinit_rpc
-│ ├── code
-│ │ └── generator
-│ ├── conversiontools
-│ │ └── tools.v
-│ │ ├── docsorter
-│ │ ├── imagemagick
-│ │ ├── pdftotext
-│ │ ├── text_extractor
-│ ├── core
-│ │ ├── interactive.v
-│ │ ├── memdb_test.v
-│ │ ├── memdb.v
-│ │ ├── platform_test.v
-│ │ ├── platform.v
-│ │ ├── readme.md
-│ │ ├── sudo_test.v
-│ │ └── sudo.v
-│ │ ├── base
-│ │ ├── code
-│ │ ├── generator
-│ │ ├── herocmds
-│ │ ├── httpconnection
-│ │ ├── logger
-│ │ ├── openrpc_remove
-│ │ ├── pathlib
-│ │ ├── playbook
-│ │ ├── playcmds
-│ │ ├── playmacros
-│ │ ├── redisclient
-│ │ ├── rootpath
-│ │ ├── smartid
-│ │ ├── texttools
-│ │ ├── vexecutor
-│ ├── crypt
-│ │ └── crypt.v
-│ │ ├── aes_symmetric
-│ │ ├── crpgp
-│ │ ├── ed25519
-│ │ ├── keychain
-│ │ ├── keysafe
-│ │ ├── openssl
-│ │ ├── pgp
-│ │ ├── secp256k1
-│ │ ├── secrets
-│ ├── data
-│ │ ├── cache
-│ │ ├── currency
-│ │ ├── dbfs
-│ │ ├── dedupestor
-│ │ ├── doctree
-│ │ ├── encoder
-│ │ ├── encoderhero
-│ │ ├── flist
-│ │ ├── gid
-│ │ ├── graphdb
-│ │ ├── ipaddress
-│ │ ├── location
-│ │ ├── markdown
-│ │ ├── markdownparser2
-│ │ ├── markdownrenderer
-│ │ ├── mnemonic
-│ │ ├── models
-│ │ ├── ourdb
-│ │ ├── ourdb_syncer
-│ │ ├── ourjson
-│ │ ├── ourtime
-│ │ ├── paramsparser
-│ │ ├── radixtree
-│ │ ├── resp
-│ │ ├── serializers
-│ │ ├── tst
-│ │ ├── verasure
-│ │ └── vstor
-│ ├── dav
-│ │ └── webdav
-│ ├── develop
-│ │ ├── gittools
-│ │ ├── luadns
-│ │ ├── performance
-│ │ ├── sourcetree
-│ │ ├── vscode
-│ │ └── vscode_extensions
-│ ├── hero
-│ │ └── models
-│ │ ├── db
-│ ├── installers
-│ │ ├── install_multi.v
-│ │ └── upload.v
-│ │ ├── base
-│ │ ├── db
-│ │ ├── develapps
-│ │ ├── infra
-│ │ ├── lang
-│ │ ├── net
-│ │ ├── sysadmintools
-│ │ ├── threefold
-│ │ ├── ulist
-│ │ ├── virt
-│ │ ├── web
-│ ├── lang
-│ │ ├── python
-│ │ └── rust
-│ ├── mcp
-│ │ ├── backend_interface.v
-│ │ ├── backend_memory.v
-│ │ ├── factory.v
-│ │ ├── generics.v
-│ │ ├── handler_initialize_test.v
-│ │ ├── handler_initialize.v
-│ │ ├── handler_prompts.v
-│ │ ├── handler_resources.v
-│ │ ├── handler_tools.v
-│ │ ├── model_configuration_test.v
-│ │ ├── model_configuration.v
-│ │ ├── model_error.v
-│ │ ├── README.md
-│ │ └── server.v
-│ │ ├── baobab
-│ │ ├── cmd
-│ │ ├── mcpgen
-│ │ ├── pugconvert
-│ │ ├── rhai
-│ │ ├── transport
-│ │ ├── vcode
-│ ├── osal
-│ │ ├── core
-│ │ ├── coredns
-│ │ ├── hostsfile
-│ │ ├── notifier
-│ │ ├── osinstaller
-│ │ ├── rsync
-│ │ ├── screen
-│ │ ├── sshagent
-│ │ ├── startupmanager
-│ │ ├── systemd
-│ │ ├── tmux
-│ │ ├── traefik
-│ │ ├── tun
-│ │ ├── ufw
-│ │ └── zinit
-│ ├── schemas
-│ │ ├── jsonrpc
-│ │ ├── jsonschema
-│ │ ├── openapi
-│ │ └── openrpc
-│ ├── security
-│ │ ├── authentication
-│ │ └── jwt
-│ ├── threefold
-│ │ ├── grid3
-│ │ └── grid4
-│ ├── ui
-│ │ ├── factory.v
-│ │ └── readme.md
-│ │ ├── console
-│ │ ├── generic
-│ │ ├── logger
-│ │ ├── telegram
-│ │ ├── template
-│ │ ├── uimodel
-│ ├── vfs
-│ │ ├── interface.v
-│ │ ├── metadata.v
-│ │ └── README.md
-│ │ ├── vfs_calendar
-│ │ ├── vfs_contacts
-│ │ ├── vfs_db
-│ │ ├── vfs_local
-│ │ ├── vfs_mail
-│ │ ├── vfs_nested
-│ ├── virt
-│ │ ├── cloudhypervisor
-│ │ ├── docker
-│ │ ├── herocontainers
-│ │ ├── hetzner
-│ │ ├── lima
-│ │ ├── qemu
-│ │ ├── runc
-│ │ └── utils
-│ ├── web
-│ │ ├── doctreeclient
-│ │ ├── docusaurus
-│ │ ├── echarts
-│ │ ├── site
-│ │ └── ui
-├── libarchive
-│ ├── installers
-│ │ └── web
-│ ├── rhai
-│ │ ├── generate_rhai_example.v
-│ │ ├── generate_wrapper_module.v
-│ │ ├── register_functions.v
-│ │ ├── register_types_test.v
-│ │ ├── register_types.v
-│ │ ├── rhai_test.v
-│ │ ├── rhai.v
-│ │ └── verify.v
-│ │ ├── prompts
-│ │ ├── templates
-│ │ ├── testdata
-│ └── starlight
-│ ├── clean.v
-│ ├── config.v
-│ ├── factory.v
-│ ├── model.v
-│ ├── site_get.v
-│ ├── site.v
-│ ├── template.v
-│ └── watcher.v
-│ ├── templates
-├── manual
-│ ├── config.json
-│ ├── create_tag.md
-│ └── serve_wiki.sh
-│ ├── best_practices
-│ │ └── using_args_in_function.md
-│ │ ├── osal
-│ │ ├── scripts
-│ ├── core
-│ │ ├── base.md
-│ │ ├── context_session_job.md
-│ │ ├── context.md
-│ │ ├── play.md
-│ │ └── session.md
-│ │ ├── concepts
-│ ├── documentation
-│ │ └── docextractor.md
-├── research
-│ └── globals
-│ ├── globals_example_inplace.vsh
-│ ├── globals_example_reference.vsh
-│ ├── globals_example.vsh
-│ └── ubuntu_partition.sh
-├── vscodeplugin
-│ ├── install_ubuntu.sh
-│ ├── package.sh
-│ └── readme.md
-│ ├── heroscrypt-syntax
-│ │ ├── heroscript-syntax-0.0.1.vsix
-│ │ ├── language-configuration.json
-│ │ └── package.json
-│ │ ├── syntaxes
+/Users/mahmoud/code/github/freeflowuniverse/herolib
+└── docker
+ └── docker_ubuntu_install.sh *
+ ├── herolib
+ │ ├── .gitignore*
+ │ ├── build.sh *
+ │ └── debug.sh*
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/bizmodel.v
-```v
-module playcmds
+File: /Users/mahmoud/code/github/freeflowuniverse/herolib/docker/docker_ubuntu_install.sh
-// import freeflowuniverse.herolib.core.playbook
+```sh
+#!/bin/bash
-// fn git(mut actions playbook.Actions, action playbook.Action) ! {
-// if action.name == 'init' {
-// // means we support initialization afterwards
-// c.bizmodel_init(mut actions, action)!
-// }
+# Exit immediately if a command exits with a non-zero status
+set -e
-// // if action.name == 'get' {
-// // mut gs := gittools.get()!
-// // url := action.params.get('url')!
-// // branch := action.params.get_default('branch', '')!
-// // reset := action.params.get_default_false('reset')!
-// // pull := action.params.get_default_false('pull')!
-// // mut gr := gs.repo_get_from_url(url: url, branch: branch, pull: pull, reset: reset)!
-// // }
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/currency.v
-```v
-module playcmds
-
-// fn currency_actions(actions_ []playbook.Action) ! {
-// mut actions2 := actions.filtersort(actions: actions_, actor: 'currency', book: '*')!
-// if actions2.len == 0 {
-// return
-// }
-
-// mut cs := currency.new()!
-
-// for action in actions2 {
-// // TODO: set the currencies
-// if action.name == 'default_set' {
-// cur := action.params.get('cur')!
-// usdval := action.params.get_int('usdval')!
-// cs.default_set(cur, usdval)!
-// }
-// }
-
-// // TODO: add the currency metainfo, do a test
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/dagu.v
-```v
-module playcmds
-
-// import freeflowuniverse.herolib.installers.sysadmintools.daguserver
-
-// pub fn scheduler(heroscript string) ! {
-// daguserver.play(
-// heroscript: heroscript
-// )!
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/downloader.v
-```v
-module playcmds
-
-// import freeflowuniverse.herolib.core.playbook
-// import freeflowuniverse.herolib.sysadmin.downloader
-
-// can start with sal, dal, ... the 2nd name is typicall the actor (or topic)
-// do this function public and then it breaches out to detail functionality
-
-// pub fn sal_downloader(action playbook.Action) ! {
-// match action.actor {
-// 'downloader' {
-// match action.name {
-// 'get' {
-// downloader_get(action: action)!
-// }
-// else {
-// return error('actions not supported yet')
-// }
-// }
-// }
-// else {
-// return error('actor not supported yet')
-// }
-// }
-// }
-
-// fn downloader_get(args ActionExecArgs) ! {
-// action := args.action
-// // session:=args.action or {panic("no context")} //if we need it here
-// mut name := action.params.get_default('name', '')!
-// mut downloadpath := action.params.get_default('downloadpath', '')!
-// mut url := action.params.get_default('url', '')!
-// mut reset := action.params.get_default_false('reset')
-// mut gitpull := action.params.get_default_false('gitpull')
-
-// mut minsize_kb := action.params.get_u32_default('minsize_kb', 0)!
-// mut maxsize_kb := action.params.get_u32_default('maxsize_kb', 0)!
-
-// mut destlink := action.params.get_default_false('destlink')
-
-// mut dest := action.params.get_default('dest', '')!
-// mut hash := action.params.get_default('hash', '')!
-// mut metapath := action.params.get_default('metapath', '')!
-
-// mut meta := downloader.download(
-// name: name
-// downloadpath: downloadpath
-// url: url
-// reset: reset
-// gitpull: gitpull
-// minsize_kb: minsize_kb
-// maxsize_kb: maxsize_kb
-// destlink: destlink
-// dest: dest
-// hash: hash
-// metapath: metapath
-// // session:session // TODO IMPLEMENT (also optional)
-// )!
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_caddy.v
-```v
-module playcmds
-
-// import freeflowuniverse.herolib.installers.web.caddy as caddy_installer
-// import freeflowuniverse.herolib.servers.caddy { CaddyFile }
-// import freeflowuniverse.herolib.core.playbook
-// import os
-// // import net.urllib
-
-// pub fn play_caddy(mut plbook playbook.PlayBook) ! {
-// play_caddy_basic(mut plbook)!
-// play_caddy_configure(mut plbook)!
-// }
-
-// pub fn play_caddy_configure(mut plbook playbook.PlayBook) ! {
-// mut caddy_actions := plbook.find(filter: 'caddy_configure')!
-// if caddy_actions.len == 0 {
-// return
-// }
-// }
-
-// pub fn play_caddy_basic(mut plbook playbook.PlayBook) ! {
-// caddy_actions := plbook.find(filter: 'caddy.')!
-// if caddy_actions.len == 0 {
-// return
-// }
-
-// mut install_actions := plbook.find(filter: 'caddy.install')!
-
-// if install_actions.len > 0 {
-// for install_action in install_actions {
-// mut p := install_action.params
-// xcaddy := p.get_default_false('xcaddy')
-// file_path := p.get_default('file_path', '/etc/caddy')!
-// file_url := p.get_default('file_url', '')!
-// reset := p.get_default_false('reset')
-// start := p.get_default_false('start')
-// restart := p.get_default_false('restart')
-// stop := p.get_default_false('stop')
-// homedir := p.get_default('file_url', '')!
-// plugins := p.get_list_default('plugins', []string{})!
-
-// caddy_installer.install(
-// xcaddy: xcaddy
-// file_path: file_path
-// file_url: file_url
-// reset: reset
-// start: start
-// restart: restart
-// stop: stop
-// homedir: homedir
-// plugins: plugins
-// )!
-// }
-// }
-
-// mut config_actions := plbook.find(filter: 'caddy.configure')!
-// if config_actions.len > 0 {
-// mut coderoot := ''
-// mut reset := false
-// mut pull := false
-
-// mut public_ip := ''
-
-// mut c := caddy.get('')!
-// // that to me seems to be wrong, not generic enough
-// if config_actions.len > 1 {
-// return error('can only have 1 config action for books')
-// } else if config_actions.len == 1 {
-// mut p := config_actions[0].params
-// path := p.get_default('path', '/etc/caddy')!
-// url := p.get_default('url', '')!
-// public_ip = p.get_default('public_ip', '')!
-// c = caddy.configure('', homedir: path)!
-// config_actions[0].done = true
-// }
-
-// mut caddyfile := CaddyFile{}
-// for mut action in plbook.find(filter: 'caddy.add_reverse_proxy')! {
-// mut p := action.params
-// mut from := p.get_default('from', '')!
-// mut to := p.get_default('to', '')!
-
-// if from == '' || to == '' {
-// return error('from & to cannot be empty')
-// }
-
-// caddyfile.add_reverse_proxy(
-// from: from
-// to: to
-// )!
-// action.done = true
-// }
-
-// for mut action in plbook.find(filter: 'caddy.add_file_server')! {
-// mut p := action.params
-// mut domain := p.get_default('domain', '')!
-// mut root := p.get_default('root', '')!
-
-// if root.starts_with('~') {
-// root = '${os.home_dir()}${root.trim_string_left('~')}'
-// }
-
-// if domain == '' || root == '' {
-// return error('domain & root cannot be empty')
-// }
-
-// caddyfile.add_file_server(
-// domain: domain
-// root: root
-// )!
-// action.done = true
-// }
-
-// for mut action in plbook.find(filter: 'caddy.add_basic_auth')! {
-// mut p := action.params
-// mut domain := p.get_default('domain', '')!
-// mut username := p.get_default('username', '')!
-// mut password := p.get_default('password', '')!
-
-// if domain == '' || username == '' || password == '' {
-// return error('domain & root cannot be empty')
-// }
-
-// caddyfile.add_basic_auth(
-// domain: domain
-// username: username
-// password: password
-// )!
-// action.done = true
-// }
-
-// for mut action in plbook.find(filter: 'caddy.generate')! {
-// c.set_caddyfile(caddyfile)!
-// action.done = true
-// }
-
-// for mut action in plbook.find(filter: 'caddy.start')! {
-// c.start()!
-// action.done = true
-// }
-// c.reload()!
-// }
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_dagu_test.v
-```v
-module playcmds
-
-import freeflowuniverse.herolib.core.playbook
-
-const dagu_script = "
-!!dagu.configure
- instance: 'test'
- username: 'admin'
- password: 'testpassword'
-
-!!dagu.new_dag
- name: 'test_dag'
-
-!!dagu.add_step
- dag: 'test_dag'
- name: 'hello_world'
- command: 'echo hello world'
-
-!!dagu.add_step
- dag: 'test_dag'
- name: 'last_step'
- command: 'echo last step'
-
-
-"
-
-fn test_play_dagu() ! {
- mut plbook := playbook.new(text: dagu_script)!
- play_dagu(mut plbook)!
- // panic('s')
+# Function to display an error message and exit
+error_exit() {
+ echo "Error: $1" >&2
+ exit 1
}
-```
+# Update package index and upgrade system
+echo "Updating system packages..."
+sudo apt update && sudo apt upgrade -y || error_exit "Failed to update system packages."
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_dagu.v
-```v
-module playcmds
+# Install required packages for repository setup
+echo "Installing prerequisites..."
+sudo apt install -y ca-certificates curl gnupg || error_exit "Failed to install prerequisites."
-// import freeflowuniverse.herolib.clients.daguclient
-// import freeflowuniverse.herolib.installers.sysadmintools.daguserver
-// import freeflowuniverse.herolib.installers.sysadmintools.daguserver
-import freeflowuniverse.herolib.core.playbook
-import freeflowuniverse.herolib.ui.console
-import os
+# Create directory for Docker GPG key
+echo "Setting up GPG keyring..."
+sudo mkdir -p /etc/apt/keyrings || error_exit "Failed to create keyring directory."
-// pub fn play_dagu(mut plbook playbook.PlayBook) ! {
-// // dagu_actions := plbook.find(filter: 'dagu.')!
-// // if dagu_actions.len == 0 {
-// // return
-// // }
+# Add Docker's official GPG key
+DOCKER_GPG_KEY=/etc/apt/keyrings/docker.gpg
+echo "Adding Docker GPG key..."
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o $DOCKER_GPG_KEY || error_exit "Failed to add Docker GPG key."
+sudo chmod a+r $DOCKER_GPG_KEY
-// // play_dagu_basic(mut plbook)!
-// // play_dagu_configure(mut plbook)!
-// }
+# Set up Docker repository
+echo "Adding Docker repository..."
+REPO_ENTRY="deb [arch=$(dpkg --print-architecture) signed-by=$DOCKER_GPG_KEY] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
+if ! grep -Fxq "$REPO_ENTRY" /etc/apt/sources.list.d/docker.list; then
+ echo "$REPO_ENTRY" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null || error_exit "Failed to add Docker repository."
+fi
-// // play_dagu plays the dagu play commands
-// pub fn play_dagu_basic(mut plbook playbook.PlayBook) ! {
-// // mut install_actions := plbook.find(filter: 'daguserver.configure')!
+# Update package index
+echo "Updating package index..."
+sudo apt update || error_exit "Failed to update package index."
-// // if install_actions.len > 0 {
-// // for install_action in install_actions {
-// // mut p := install_action.params
-// // panic("daguinstall play")
-// // }
-// // }
+# Install Docker Engine, CLI, and dependencies
+echo "Installing Docker Engine and dependencies..."
+sudo apt install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin || error_exit "Failed to install Docker packages."
-// // dagu_actions := plbook.find(filter: 'daguserver.install')!
-// // if dagu_actions.len > 0 {
-// // panic("daguinstall play")
-// // return
-// // }
+# Verify Docker installation
+echo "Verifying Docker installation..."
+if ! docker --version; then
+ error_exit "Docker installation verification failed."
+fi
-// // mut config_actions := plbook.find(filter: 'dagu.configure')!
-// // mut d := if config_actions.len > 1 {
-// // return error('can only have 1 config action for dagu')
-// // } else if config_actions.len == 1 {
-// // mut p := config_actions[0].params
-// // instance := p.get_default('instance', 'default')!
-// // port := p.get_int_default('port', 8888)!
-// // username := p.get_default('username', '')!
-// // password := p.get_default('password', '')!
-// // config_actions[0].done = true
-// // mut server := daguserver.configure(instance,
-// // port: port
-// // username: username
-// // password: password
-// // )!
-// // server.start()!
-// // console.print_debug('Dagu server is running at http://localhost:${port}')
-// // console.print_debug('Username: ${username} password: ${password}')
+# Run a test container
+echo "Running Docker test container..."
+if ! sudo docker run --rm hello-world; then
+ error_exit "Docker test container failed to run."
+fi
-// // // configure dagu client with server url and api secret
-// // server_cfg := server.config()!
-// // daguclient.get(instance,
-// // url: 'http://localhost:${port}'
-// // apisecret: server_cfg.secret
-// // )!
-// // } else {
-// // mut server := daguserver.get('')!
-// // server.start()!
-// // daguclient.get('')!
-// // }
+# Add current user to Docker group (if not already added)
+echo "Configuring Docker group..."
+if ! groups $USER | grep -q '\bdocker\b'; then
+ sudo usermod -aG docker $USER || error_exit "Failed to add user to Docker group."
+ echo "User added to Docker group. Please log out and back in for this change to take effect."
+else
+ echo "User is already in the Docker group."
+fi
-// // mut dags := map[string]DAG{}
+# Enable Docker service on boot
+echo "Enabling Docker service on boot..."
+sudo systemctl enable docker || error_exit "Failed to enable Docker service."
-// // for mut action in plbook.find(filter: 'dagu.new_dag')! {
-// // mut p := action.params
-// // name := p.get_default('name', '')!
-// // dags[name] = DAG{}
-// // action.done = true
-// // }
-
-// // for mut action in plbook.find(filter: 'dagu.add_step')! {
-// // mut p := action.params
-// // dag := p.get_default('dag', 'default')!
-// // name := p.get_default('name', 'default')!
-// // command := p.get_default('command', '')!
-// // dags[dag].step_add(
-// // nr: dags.len
-// // name: name
-// // command: command
-// // )!
-// // }
-
-// // for mut action in plbook.find(filter: 'dagu.run')! {
-// // mut p := action.params
-// // dag := p.get_default('dag', 'default')!
-// // // d.new_dag(dags[dag])!
-// // panic('to implement')
-// // }
-// }
+# Success message
+echo "Docker installation completed successfully!"
```
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_juggler.v
-```v
-module playcmds
+File: /Users/mahmoud/code/github/freeflowuniverse/herolib/docker/herolib/build.sh
-import freeflowuniverse.herolib.data.doctree
-import freeflowuniverse.herolib.ui.console
-import freeflowuniverse.herolib.core.playbook
-import freeflowuniverse.herolib.develop.juggler
-import os
+```sh
+#!/bin/bash -e
-pub fn play_juggler(mut plbook playbook.PlayBook) ! {
- mut coderoot := ''
- // mut install := false
- mut reset := false
- mut pull := false
+# Get the directory where the script is located
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+cd "$SCRIPT_DIR"
- mut config_actions := plbook.find(filter: 'juggler.configure')!
+# Copy installation files
+cp ../../install_v.sh ./scripts/install_v.sh
+cp ../../install_herolib.vsh ./scripts/install_herolib.vsh
- mut j := juggler.Juggler{}
+# Docker image and container names
+DOCKER_IMAGE_NAME="herolib"
+DEBUG_CONTAINER_NAME="herolib"
- if config_actions.len > 1 {
- return error('can only have 1 config action for juggler')
- } else if config_actions.len == 1 {
- mut p := config_actions[0].params
- path := p.get_default('path', '/etc/juggler')!
- url := p.get_default('url', '')!
- username := p.get_default('username', '')!
- password := p.get_default('password', '')!
- port := p.get_int_default('port', 8000)!
-
- j = juggler.configure(
- url: 'https://git.threefold.info/projectmycelium/itenv'
- username: username
- password: password
- reset: true
- )!
- config_actions[0].done = true
- }
-
- for mut action in plbook.find(filter: 'juggler.start')! {
- j.start()!
- action.done = true
- }
-
- for mut action in plbook.find(filter: 'juggler.restart')! {
- j.restart()!
- action.done = true
- }
+function cleanup {
+ if docker ps -aq -f name="$DEBUG_CONTAINER_NAME" &>/dev/null; then
+ echo "Cleaning up leftover debug container..."
+ docker rm -f "$DEBUG_CONTAINER_NAME" &>/dev/null || true
+ fi
}
+trap cleanup EXIT
+
+# Attempt to build the Docker image
+BUILD_LOG=$(mktemp)
+set +e
+docker build --name herolib --progress=plain -t "$DOCKER_IMAGE_NAME" .
+BUILD_EXIT_CODE=$?
+set -e
+
+# Handle build failure
+if [ $BUILD_EXIT_CODE -ne 0 ]; then
+ echo -e "\\n[ERROR] Docker build failed.\n"
+ echo -e "remove the part which didn't build in the Dockerfile, the run again and to debug do:"
+ echo docker run --name herolib -it --entrypoint=/bin/bash "herolib"
+ exit $BUILD_EXIT_CODE
+else
+ echo -e "\\n[INFO] Docker build completed successfully."
+fi
+
+
```
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_publisher_test.v
-```v
-module playcmds
+File: /Users/mahmoud/code/github/freeflowuniverse/herolib/docker/herolib/.gitignore
-import freeflowuniverse.herolib.core.playbook
-import freeflowuniverse.herolib.core.playcmds
-import freeflowuniverse.herolib.core.pathlib
-import os
+```
+.bash_history
+.openvscode-server/
+.cache/
+```
-fn test_play_publisher() {
- mut p := pathlib.get_file(path: '/tmp/heroscript/do.hero', create: true)!
+File: /Users/mahmoud/code/github/freeflowuniverse/herolib/docker/herolib/debug.sh
- s2 := "
+```sh
+#!/bin/bash -ex
-!!publisher.new_collection
- url:'https://git.threefold.info/tfgrid/info_tfgrid/src/branch/main/collections'
- reset: false
- pull: true
+# Get the directory where the script is located
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+cd "$SCRIPT_DIR"
+# Remove any existing container named 'debug' (ignore errors)
+docker rm -f herolib > /dev/null 2>&1
-!!book.define
- name:'info_tfgrid'
- summary_url:'https://git.threefold.info/tfgrid/info_tfgrid/src/branch/development/books/tech/SUMMARY.md'
- title:'ThreeFold Technology'
- collections: 'about,dashboard,farmers,library,partners_utilization,tech,p2p'
-
-
-!!book.publish
- name:'tech'
- production: false
-"
- p.write(s2)!
-
- mut plbook := playbook.new(path: '/tmp/heroscript')!
- playcmds.play_publisher(mut plbook)!
-}
+docker run --name herolib -it \
+ --entrypoint="/usr/local/bin/ourinit.sh" \
+ -v "${SCRIPT_DIR}/scripts:/scripts" \
+ -v "$HOME/code:/root/code" \
+ -p 4100:8100 \
+ -p 4101:8101 \
+ -p 4102:8102 \
+ -p 4379:6379 \
+ -p 4022:22 \
+ -p 4000:3000 herolib
```
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_publisher.v
-```v
-module playcmds
-
-import freeflowuniverse.herolib.core.playbook
-// import freeflowuniverse.herolib.hero.publishing
-
-// pub fn play_publisher(mut plbook playbook.PlayBook) ! {
-// publishing.play(mut plbook)!
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_threefold.v
-```v
-module playcmds
-
-import freeflowuniverse.herolib.core.playbook
-// import freeflowuniverse.herolib.threefold.grid
-// import freeflowuniverse.herolib.threefold.tfrobot
-// import os
-
-// pub fn play_threefold(mut plbook playbook.PlayBook) ! {
-// panic('fix tfrobot module')
-// // mut config_actions := plbook.find(filter: 'threefold.configure')!
-
-// // mnemonics_ := os.getenv_opt('TFGRID_MNEMONIC') or { '' }
-// // mut ssh_key := os.getenv_opt('SSH_KEY') or { '' }
-
-// // tfrobot.configure('play', network: 'main', mnemonics: mnemonics_)!
-
-// // mut robot := tfrobot.get('play')!
-
-// // if config_actions.len > 1 {
-// // return error('can only have 1 config action for threefold')
-// // } else if config_actions.len == 1 {
-// // mut a := config_actions[0]
-// // mut p := a.params
-// // mut network := p.get_default('network', 'main')!
-// // mnemonics := p.get_default('mnemonics', '')!
-// // ssh_key = p.get_default('ssh_key', '')!
-
-// // network = network.to_lower()
-
-// // // mnemonics string
-// // // network string = 'main'
-// // tfrobot.configure('play', network: network, mnemonics: mnemonics)!
-
-// // robot = tfrobot.get('play')!
-
-// // config_actions[0].done = true
-// // }
-// // cfg := robot.config()!
-// // if cfg.mnemonics == '' {
-// // return error('TFGRID_MNEMONIC should be specified as env variable')
-// // }
-
-// // if ssh_key == '' {
-// // return error('SSHKey should be specified as env variable')
-// // }
-
-// // panic('implement')
-
-// // for mut action in plbook.find(filter: 'threefold.deploy_vm')! {
-// // mut p := action.params
-// // deployment_name := p.get_default('deployment_name', 'deployment')!
-// // name := p.get_default('name', 'vm')!
-// // ssh_key := p.get_default('ssh_key', '')!
-// // cores := p.get_int_default('cores', 1)!
-// // memory := p.get_int_default('memory', 20)!
-// // panic("implement")
-// // action.done = true
-// // }
-
-// // for mut action in plbook.find(filter: 'threefold.deploy_zdb')! {
-// // panic("implement")
-// // action.done = true
-// // }
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/_archive/play_zola.v
-```v
-module playcmds
-
-// import freeflowuniverse.herolib.ui.console
-// import freeflowuniverse.herolib.web.zola
-// import freeflowuniverse.herolib.core.playbook
-
-// struct WebsiteItem {
-// mut:
-// name string
-// site ?&zola.ZolaSite
-// }
-
-// pub fn play_zola(mut plbook playbook.PlayBook) ! {
-// // mut coderoot := ''
-// mut buildroot := ''
-// mut publishroot := ''
-// mut install := true
-// mut reset := false
-
-// wsactions := plbook.find(filter: 'website.')!
-// if wsactions.len == 0 {
-// return
-// }
-
-// mut config_actions := plbook.find(filter: 'websites:configure')!
-// if config_actions.len > 1 {
-// return error('can only have 1 config action for websites')
-// } else if config_actions.len == 1 {
-// mut p := config_actions[0].params
-// buildroot = p.get_default('buildroot', '')!
-// publishroot = p.get_default('publishroot', '')!
-// // coderoot = p.get_default('coderoot', '')!
-// install = p.get_default_true('install')
-// reset = p.get_default_false('reset')
-// config_actions[0].done = true
-// }
-// mut websites := zola.new(
-// path_build: buildroot
-// path_publish: publishroot
-// install: install
-// reset: reset
-// )!
-
-// mut ws := WebsiteItem{}
-
-// for mut action in plbook.find(filter: 'website.')! {
-// if action.name == 'define' {
-// console.print_debug('website.define')
-// mut p := action.params
-// ws.name = p.get('name')!
-// title := p.get_default('title', '')!
-// description := p.get_default('description', '')!
-// ws.site = websites.new(name: ws.name, title: title, description: description)!
-// } else if action.name == 'template_add' {
-// console.print_debug('website.template_add')
-// mut p := action.params
-// url := p.get_default('url', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for template_add, should have been defined before with !!website.define")
-// }
-
-// site_.template_add(url: url)!
-// } else if action.name == 'content_add' {
-// console.print_debug('website.content_add')
-// mut p := action.params
-// url := p.get_default('url', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for content_add, should have been defined before with !!website.define")
-// }
-
-// site_.content_add(url: url)!
-// } else if action.name == 'doctree_add' {
-// console.print_debug('website.doctree_add')
-// mut p := action.params
-// url := p.get_default('url', '')!
-// pull := p.get_default_false('pull')
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.doctree_add(url: url, pull: pull)!
-// } else if action.name == 'post_add' {
-// console.print_debug('website.post_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// collection := p.get_default('collection', '')!
-// file := p.get_default('file', '')!
-// page := p.get_default('page', '')!
-// pointer := p.get_default('pointer', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.post_add(name: name, collection: collection, file: file, pointer: pointer)!
-// } else if action.name == 'blog_add' {
-// console.print_debug('website.blog_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// collection := p.get_default('collection', '')!
-// file := p.get_default('file', '')!
-// page := p.get_default('page', '')!
-// pointer := p.get_default('pointer', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.blog_add(name: name)!
-// } else if action.name == 'person_add' {
-// console.print_debug('website.person_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// page := p.get_default('page', '')!
-// collection := p.get_default('collection', '')!
-// file := p.get_default('file', '')!
-// pointer := p.get_default('pointer', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.person_add(
-// name: name
-// collection: collection
-// file: file
-// page: page
-// pointer: pointer
-// )!
-// } else if action.name == 'people_add' {
-// console.print_debug('website.people_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// description := p.get_default('description', '')!
-// sort_by_ := p.get_default('sort_by', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for people_add, should have been defined before with !!website.define")
-// }
-
-// sort_by := zola.SortBy.from(sort_by_)!
-// site_.people_add(
-// name: name
-// title: p.get_default('title', '')!
-// sort_by: sort_by
-// description: description
-// )!
-// } else if action.name == 'blog_add' {
-// console.print_debug('website.blog_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// description := p.get_default('description', '')!
-// sort_by_ := p.get_default('sort_by', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for people_add, should have been defined before with !!website.define")
-// }
-
-// sort_by := zola.SortBy.from(sort_by_)!
-// site_.blog_add(
-// name: name
-// title: p.get_default('title', '')!
-// sort_by: sort_by
-// description: description
-// )!
-// } else if action.name == 'news_add' {
-// console.print_debug('website.news_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// collection := p.get_default('collection', '')!
-// pointer := p.get_default('pointer', '')!
-// file := p.get_default('file', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for news_add, should have been defined before with !!website.define")
-// }
-
-// site_.article_add(name: name, collection: collection, file: file, pointer: pointer)!
-// } else if action.name == 'header_add' {
-// console.print_debug('website.header_add')
-// mut p := action.params
-// template := p.get_default('template', '')!
-// logo := p.get_default('logo', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.header_add(template: template, logo: logo)!
-// } else if action.name == 'header_link_add' {
-// console.print_debug('website.header_link_add')
-// mut p := action.params
-// page := p.get_default('page', '')!
-// label := p.get_default('label', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for header_link_add, should have been defined before with !!website.define")
-// }
-
-// site_.header_link_add(page: page, label: label)!
-// } else if action.name == 'footer_add' {
-// console.print_debug('website.footer_add')
-// mut p := action.params
-// template := p.get_default('template', '')!
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.footer_add(template: template)!
-// } else if action.name == 'page_add' {
-// console.print_debug('website.page_add')
-// mut p := action.params
-// name := p.get_default('name', '')!
-// collection := p.get_default('collection', '')!
-// file := p.get_default('file', '')!
-// homepage := p.get_default_false('homepage')
-// mut site_ := ws.site or {
-// return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// }
-
-// site_.page_add(name: name, collection: collection, file: file, homepage: homepage)!
-
-// // }else if action.name=="pull"{
-// // mut site_:=ws.site or { return error("can't find website for pull, should have been defined before with !!website.define")}
-// // site_.pull()!
-// } else if action.name == 'section_add' {
-// console.print_debug('website.section_add')
-// // mut p := action.params
-// // name := p.get_default('name', '')!
-// // // collection := p.get_default('collection', '')!
-// // // file := p.get_default('file', '')!
-// // // homepage := p.get_default_false('homepage')
-// // mut site_ := ws.site or {
-// // return error("can't find website for doctree_add, should have been defined before with !!website.define")
-// // }
-
-// // site_.add_section(name: name)!
-
-// // }else if action.name=="pull"{
-// // mut site_:=ws.site or { return error("can't find website for pull, should have been defined before with !!website.define")}
-// // site_.pull()!
-// } else if action.name == 'generate' {
-// mut site_ := ws.site or {
-// return error("can't find website for generate, should have been defined before with !!website.define")
-// }
-
-// site_.generate()!
-// // site_.serve()!
-// } else {
-// return error("Cannot find right action for website. Found '${action.name}' which is a non understood action for !!website.")
-// }
-// action.done = true
-// }
-// }
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/factory.v
-```v
-module playcmds
-
-import freeflowuniverse.herolib.core.playbook { PlayBook }
-import freeflowuniverse.herolib.data.doctree
-import freeflowuniverse.herolib.biz.bizmodel
-import freeflowuniverse.herolib.web.site
-import freeflowuniverse.herolib.web.docusaurus
-import freeflowuniverse.herolib.clients.openai
-
-// -------------------------------------------------------------------
-// run – entry point for all HeroScript play‑commands
-// -------------------------------------------------------------------
-
-@[params]
-pub struct PlayArgs {
-pub mut:
- heroscript string
- heroscript_path string
- plbook ?PlayBook
- reset bool
-}
-
-pub fn run(args_ PlayArgs) ! {
- mut args := args_
- mut plbook := args.plbook or {
- playbook.new(text: args.heroscript, path: args.heroscript_path)!
- }
-
- // Core actions
- play_core(mut plbook)!
- // Git actions
- play_git(mut plbook)!
-
- // Business model (e.g. currency, bizmodel)
- bizmodel.play(mut plbook)!
-
- // OpenAI client
- openai.play(mut plbook)!
-
- // Website / docs
- site.play(mut plbook)!
- doctree.play(mut plbook)!
- docusaurus.play(mut plbook)!
-
- // Ensure we did not leave any actions un‑processed
- plbook.empty_check()!
-}
-
-```
-
-File: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/core/playcmds/play_core.v
-```v
-module playcmds
-
-import freeflowuniverse.herolib.develop.gittools
-import freeflowuniverse.herolib.core.playbook { PlayBook }
-import freeflowuniverse.herolib.ui.console
-import freeflowuniverse.herolib.core.texttools
-
-// -------------------------------------------------------------------
-// Core play‑command processing (context, session, env‑subst, etc)
-// -------------------------------------------------------------------
-
-fn play_core(mut plbook PlayBook) ! {
- // ----------------------------------------------------------------
- // 1. Include handling (play include / echo)
- // ----------------------------------------------------------------
- // Track included paths to prevent infinite recursion
- mut included_paths := map[string]bool{}
-
- for action_ in plbook.find(filter: 'play.*')! {
- if action_.name == 'include' {
- console.print_debug('play run:${action_}')
- mut action := *action_
- mut playrunpath := action.params.get_default('path', '')!
- if playrunpath.len == 0 {
- action.name = 'pull'
- playrunpath = gittools.get_repo_path(
- path: action.params.get_default('path', '')!
- git_url: action.params.get_default('git_url', '')!
- git_reset: action.params.get_default_false('git_reset')
- git_pull: action.params.get_default_false('git_pull')
- )!
- }
- if playrunpath.len == 0 {
- return error("can't run a heroscript didn't find url or path.")
- }
-
- // Check for cycle detection
- if playrunpath in included_paths {
- console.print_debug('Skipping already included path: ${playrunpath}')
- continue
- }
-
- console.print_debug('play run path:${playrunpath}')
- included_paths[playrunpath] = true
- plbook.add(path: playrunpath)!
- }
- if action_.name == 'echo' {
- content := action_.params.get_default('content', "didn't find content")!
- console.print_header(content)
- }
- }
-
- // ----------------------------------------------------------------
- // 2. Session environment handling
- // ----------------------------------------------------------------
- // Guard – make sure a session exists
- mut session := plbook.session
-
- // !!session.env_set / env_set_once
- for mut action in plbook.find(filter: 'session.')! {
- mut p := action.params
- match action.name {
- 'env_set' {
- key := p.get('key')!
- val := p.get('val') or { p.get('value')! }
- session.env_set(key, val)!
- }
- 'env_set_once' {
- key := p.get('key')!
- val := p.get('val') or { p.get('value')! }
- // Use the dedicated “set‑once” method
- session.env_set_once(key, val)!
- }
- else { /* ignore unknown sub‑action */ }
- }
- action.done = true
- }
-
- // ----------------------------------------------------------------
- // 3. Template replacement in action parameters
- // ----------------------------------------------------------------
- // Apply template replacement from session environment variables
- if session.env.len > 0 {
- // Create a map with name_fix applied to keys for template replacement
- mut env_fixed := map[string]string{}
- for key, value in session.env {
- env_fixed[texttools.name_fix(key)] = value
- }
-
- for mut action in plbook.actions {
- if !action.done {
- action.params.replace(env_fixed)
- }
- }
- }
-
- for mut action in plbook.find(filter: 'core.coderoot_set')! {
- mut p := action.params
- if p.exists('coderoot') {
- coderoot := p.get_path_create('coderoot')!
- if session.context.config.coderoot != coderoot {
- session.context.config.coderoot = coderoot
- session.context.save()!
- }
- } else {
- return error('coderoot needs to be specified')
- }
- action.done = true
- }
-
- for mut action in plbook.find(filter: 'core.params_context_set')! {
- mut p := action.params
- mut context_params := session.context.params()!
- for param in p.params {
- context_params.set(param.key, param.value)
- }
- session.context.save()!
- action.done = true
- }
-
- for mut action in plbook.find(filter: 'core.params_session_set')! {
- mut p := action.params
- for param in p.params {
- session.params.set(param.key, param.value)
- }
- session.save()!
- action.done = true
- }
-}
-
-```
-these are my instructions what needs to be done with the attached code
-
-TODO…
+This is a small repo prompt example