diff --git a/.github/workflows/hero_build_macos.yml b/.github/workflows/hero_build_macos.yml index ec0d22dd..616a76ae 100644 --- a/.github/workflows/hero_build_macos.yml +++ b/.github/workflows/hero_build_macos.yml @@ -59,7 +59,7 @@ jobs: - name: Build Hero run: | - v -w -cg -gc none -no-retry-compilation -d use_openssl -enable-globals cli/hero.v + v -w -cg -gc none -d use_openssl -enable-globals cli/hero.v - name: Do all the basic tests run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 210c8574..b59b790b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,7 +70,7 @@ jobs: - name: Build Hero run: | - v -w -cg -gc none -no-retry-compilation -d use_openssl -enable-globals cli/hero.v -o cli/hero-${{ matrix.target }} + v -w -cg -gc none -d use_openssl -enable-globals cli/hero.v -o cli/hero-${{ matrix.target }} - name: Upload uses: actions/upload-artifact@v4 diff --git a/aiprompts/starter/0_start_here.md b/aiprompts/starter/0_start_here.md index d1032ad8..f3c811b1 100644 --- a/aiprompts/starter/0_start_here.md +++ b/aiprompts/starter/0_start_here.md @@ -11,7 +11,7 @@ when I generate vlang scripts I will always use .vsh extension and use following as first line: ``` -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run ``` - a .vsh is a v shell script and can be executed as is, no need to use v ... @@ -21,7 +21,7 @@ when I generate vlang scripts I will always use .vsh extension and use following ## to do argument parsing use following examples ```v -#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run import os import flag diff --git a/aiprompts/v_manual_advanced.md b/aiprompts/v_manual_advanced.md index 84b214a3..f091d19b 100644 --- a/aiprompts/v_manual_advanced.md +++ b/aiprompts/v_manual_advanced.md @@ -2238,7 +2238,7 @@ be faster, since there is no need for a re-compilation of a script, that has not An example `deploy.vsh`: ```v oksyntax -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run // Note: The shebang line above, associates the .vsh file to V on Unix-like systems, // so it can be run just by specifying the path to the .vsh file, once it's made @@ -2300,11 +2300,11 @@ Whilst V does normally not allow vsh scripts without the designated file extensi to circumvent this rule and have a file with a fully custom name and shebang. Whilst this feature exists it is only recommended for specific usecases like scripts that will be put in the path and should **not** be used for things like build or deploy scripts. To access this feature start the -file with `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +file with `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run the built executable. This will run in crun mode so it will only rebuild if changes to the script were made and keep the binary as `tmp.`. **Caution**: if this filename already exists the file will be overridden. If you want to rebuild each time and not keep this binary -instead use `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +instead use `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run # Appendices diff --git a/aiprompts/vshell example script instructions.md b/aiprompts/vshell example script instructions.md index cfd2956b..a45e7c41 100644 --- a/aiprompts/vshell example script instructions.md +++ b/aiprompts/vshell example script instructions.md @@ -3,7 +3,7 @@ this is how we want example scripts to be, see the first line ```vlang -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.sysadmintools.daguserver diff --git a/cli/compile.vsh b/cli/compile.vsh index 4dedb63e..cf66f026 100755 --- a/cli/compile.vsh +++ b/cli/compile.vsh @@ -1,5 +1,5 @@ #!/usr/bin/env -S v -n -w -parallel-cc -enable-globals run -// #!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import flag @@ -45,7 +45,7 @@ compile_cmd := if os.user_os() == 'macos' { if prod_mode { 'v -enable-globals -w -n -prod hero.v' } else { - 'v -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals hero.v' + 'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v' } } else { if prod_mode { diff --git a/doc.vsh b/doc.vsh index 010c9a03..b16ad984 100755 --- a/doc.vsh +++ b/doc.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os diff --git a/docker/herolib/scripts/install_herolib.vsh b/docker/herolib/scripts/install_herolib.vsh index d6cbbf3d..53524905 100755 --- a/docker/herolib/scripts/install_herolib.vsh +++ b/docker/herolib/scripts/install_herolib.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import flag @@ -64,7 +64,7 @@ os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowunive println('Herolib installation completed successfully!') // Add vtest alias -addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -no-retry-compilation -cc tcc test\' ') or { +addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -cc tcc test\' ') or { eprintln('Failed to add vtest alias: ${err}') } diff --git a/examples/README.md b/examples/README.md index 0411a911..08842e67 100644 --- a/examples/README.md +++ b/examples/README.md @@ -34,7 +34,7 @@ The examples directory demonstrates various capabilities of HeroLib: When creating V scripts (.vsh files), always use the following shebang: ```bash -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run ``` This shebang ensures: diff --git a/examples/builder/simple.vsh b/examples/builder/simple.vsh index 72c6eb92..fe78455a 100755 --- a/examples/builder/simple.vsh +++ b/examples/builder/simple.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.builder import freeflowuniverse.herolib.core.pathlib diff --git a/examples/builder/simple_ip4.vsh b/examples/builder/simple_ip4.vsh index f4da05d8..a3a22627 100755 --- a/examples/builder/simple_ip4.vsh +++ b/examples/builder/simple_ip4.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.builder import freeflowuniverse.herolib.core.pathlib diff --git a/examples/builder/simple_ip6.vsh b/examples/builder/simple_ip6.vsh index 10d38683..ee8c118a 100755 --- a/examples/builder/simple_ip6.vsh +++ b/examples/builder/simple_ip6.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.builder import freeflowuniverse.herolib.core.pathlib diff --git a/examples/core/base/config_basic.vsh b/examples/core/base/config_basic.vsh index 0400babe..e19cd557 100755 --- a/examples/core/base/config_basic.vsh +++ b/examples/core/base/config_basic.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.base diff --git a/examples/core/base/config_with_template_replacement.vsh b/examples/core/base/config_with_template_replacement.vsh index 0a7de62d..bdae2f47 100755 --- a/examples/core/base/config_with_template_replacement.vsh +++ b/examples/core/base/config_with_template_replacement.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.pathlib import freeflowuniverse.herolib.core.base diff --git a/examples/core/base/core_1.vsh b/examples/core/base/core_1.vsh index fec88efc..b39face2 100755 --- a/examples/core/base/core_1.vsh +++ b/examples/core/base/core_1.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.base import freeflowuniverse.herolib.develop.gittools diff --git a/examples/core/codeparser/parse_embedded/example.vsh b/examples/core/codeparser/parse_embedded/example.vsh index 2d0a48a4..35ba80e7 100755 --- a/examples/core/codeparser/parse_embedded/example.vsh +++ b/examples/core/codeparser/parse_embedded/example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import freeflowuniverse.herolib.core.codeparser diff --git a/examples/core/db/db_do.v b/examples/core/db/db_do.v index 4088cd57..01caf274 100755 --- a/examples/core/db/db_do.v +++ b/examples/core/db/db_do.v @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import time import freeflowuniverse.herolib.core.smartid diff --git a/examples/core/dbfs/dbfs1.vsh b/examples/core/dbfs/dbfs1.vsh index 74bdf542..045d7feb 100755 --- a/examples/core/dbfs/dbfs1.vsh +++ b/examples/core/dbfs/dbfs1.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.dbfs import time diff --git a/examples/core/generate.vsh b/examples/core/generate.vsh index dddb7e04..17b7a11d 100755 --- a/examples/core/generate.vsh +++ b/examples/core/generate.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.generator.installer diff --git a/examples/core/openapi/gitea/gitea_openapi.vsh b/examples/core/openapi/gitea/gitea_openapi.vsh index 0526e556..abb99d9e 100644 --- a/examples/core/openapi/gitea/gitea_openapi.vsh +++ b/examples/core/openapi/gitea/gitea_openapi.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import json diff --git a/examples/core/pathlib/examples/list/path_list.vsh b/examples/core/pathlib/examples/list/path_list.vsh index 7e635a9d..17f8b09f 100755 --- a/examples/core/pathlib/examples/list/path_list.vsh +++ b/examples/core/pathlib/examples/list/path_list.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.pathlib import os diff --git a/examples/core/pathlib/examples/md5/paths_md5.vsh b/examples/core/pathlib/examples/md5/paths_md5.vsh index 34433802..b6c59ca7 100755 --- a/examples/core/pathlib/examples/md5/paths_md5.vsh +++ b/examples/core/pathlib/examples/md5/paths_md5.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.pathlib import os diff --git a/examples/core/pathlib/examples/scanner/path_scanner.vsh b/examples/core/pathlib/examples/scanner/path_scanner.vsh index 4e6a31b9..26fcdb10 100755 --- a/examples/core/pathlib/examples/scanner/path_scanner.vsh +++ b/examples/core/pathlib/examples/scanner/path_scanner.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.pathlib import freeflowuniverse.herolib.data.paramsparser diff --git a/examples/core/pathlib/examples/sha256/paths_sha256.vsh b/examples/core/pathlib/examples/sha256/paths_sha256.vsh index 07e8de5f..ef9a1726 100755 --- a/examples/core/pathlib/examples/sha256/paths_sha256.vsh +++ b/examples/core/pathlib/examples/sha256/paths_sha256.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.pathlib import os diff --git a/examples/core/secrets_example.vsh b/examples/core/secrets_example.vsh index 0d0e8ff1..1521bd3c 100755 --- a/examples/core/secrets_example.vsh +++ b/examples/core/secrets_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.crypt.secrets diff --git a/examples/data/cache b/examples/data/cache new file mode 100755 index 00000000..32e26bb3 Binary files /dev/null and b/examples/data/cache differ diff --git a/examples/data/cache.vsh b/examples/data/cache.vsh new file mode 100755 index 00000000..2e81cacc --- /dev/null +++ b/examples/data/cache.vsh @@ -0,0 +1,139 @@ +#!/usr/bin/env -S v run + +// Example struct to cache +import freeflowuniverse.herolib.data.cache +import time + +@[heap] +struct User { + id u32 + name string + age int +} + +fn main() { + // Create a cache with custom configuration + config := cache.CacheConfig{ + max_entries: 1000 // Maximum number of entries + max_size_mb: 10.0 // Maximum cache size in MB + ttl_seconds: 300 // Items expire after 5 minutes + eviction_ratio: 0.2 // Evict 20% of entries when full + } + + mut user_cache := cache.new_cache[User](config) + + // Create some example users + user1 := &User{ + id: 1 + name: 'Alice' + age: 30 + } + + user2 := &User{ + id: 2 + name: 'Bob' + age: 25 + } + + // Add users to cache + println('Adding users to cache...') + user_cache.set(user1.id, user1) + user_cache.set(user2.id, user2) + + // Retrieve users from cache + println('\nRetrieving users from cache:') + if cached_user1 := user_cache.get(1) { + println('Found user 1: ${cached_user1.name}, age ${cached_user1.age}') + } + + if cached_user2 := user_cache.get(2) { + println('Found user 2: ${cached_user2.name}, age ${cached_user2.age}') + } + + // Try to get non-existent user + println('\nTrying to get non-existent user:') + if user := user_cache.get(999) { + println('Found user: ${user.name}') + } else { + println('User not found in cache') + } + + // Demonstrate cache stats + println('\nCache statistics:') + println('Number of entries: ${user_cache.len()}') + + // Clear the cache + println('\nClearing cache...') + user_cache.clear() + println('Cache entries after clear: ${user_cache.len()}') + + // Demonstrate max entries limit + println('\nDemonstrating max entries limit (adding 2000 entries):') + println('Initial cache size: ${user_cache.len()}') + + for i := u32(0); i < 2000; i++ { + user := &User{ + id: i + name: 'User${i}' + age: 20 + int(i % 50) + } + user_cache.set(i, user) + + if i % 200 == 0 { + println('After adding ${i} entries:') + println(' Cache size: ${user_cache.len()}') + + // Check some entries to verify LRU behavior + if i >= 500 { + old_id := if i < 1000 { u32(0) } else { i - 1000 } + recent_id := i - 1 + println(' Entry ${old_id} (old): ${if _ := user_cache.get(old_id) { + 'found' + } else { + 'evicted' + }}') + println(' Entry ${recent_id} (recent): ${if _ := user_cache.get(recent_id) { + 'found' + } else { + 'evicted' + }}') + } + println('') + } + } + + println('Final statistics:') + println('Cache size: ${user_cache.len()} (should be max 1000)') + + // Verify we can only access recent entries + println('\nVerifying LRU behavior:') + println('First entry (0): ${if _ := user_cache.get(0) { 'found' } else { 'evicted' }}') + println('Middle entry (1000): ${if _ := user_cache.get(1000) { 'found' } else { 'evicted' }}') + println('Recent entry (1900): ${if _ := user_cache.get(1900) { 'found' } else { 'evicted' }}') + println('Last entry (1999): ${if _ := user_cache.get(1999) { 'found' } else { 'evicted' }}') + + // Demonstrate TTL expiration + println('\nDemonstrating TTL expiration:') + quick_config := cache.CacheConfig{ + ttl_seconds: 2 // Set short TTL for demo + } + mut quick_cache := cache.new_cache[User](quick_config) + + // Add a user + quick_cache.set(user1.id, user1) + println('Added user to cache with 2 second TTL') + + if cached := quick_cache.get(user1.id) { + println('User found immediately: ${cached.name}') + } + + // Wait for TTL to expire + println('Waiting for TTL to expire...') + time.sleep(3 * time.second) + + if _ := quick_cache.get(user1.id) { + println('User still in cache') + } else { + println('User expired from cache as expected') + } +} diff --git a/examples/data/encoder.vsh b/examples/data/encoder.vsh index 4ebb9771..88db1ea7 100755 --- a/examples/data/encoder.vsh +++ b/examples/data/encoder.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.encoder import crypto.ed25519 diff --git a/examples/data/encrypt_decrypt.vsh b/examples/data/encrypt_decrypt.vsh index 742858a8..74a38407 100755 --- a/examples/data/encrypt_decrypt.vsh +++ b/examples/data/encrypt_decrypt.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.crypt.aes_symmetric { decrypt, encrypt } import freeflowuniverse.herolib.ui.console diff --git a/examples/data/graphdb.vsh b/examples/data/graphdb.vsh index 98d7ab88..4813de52 100755 --- a/examples/data/graphdb.vsh +++ b/examples/data/graphdb.vsh @@ -1,94 +1,175 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run +// Example demonstrating GraphDB usage in a social network context import freeflowuniverse.herolib.data.graphdb fn main() { - // Create a new graph database - mut gdb := graphdb.new(path: '/tmp/graphdb_example', reset: true)! + // Initialize a new graph database with default cache settings + mut gdb := graphdb.new( + path: '/tmp/social_network_example' + reset: true // Start fresh each time + )! - // Create some nodes - println('\nCreating nodes...') + println('=== Social Network Graph Example ===\n') + + // 1. Creating User Nodes + println('Creating users...') mut alice_id := gdb.create_node({ - 'name': 'Alice', - 'age': '30', - 'city': 'New York' + 'type': 'user' + 'name': 'Alice Chen' + 'age': '28' + 'location': 'San Francisco' + 'occupation': 'Software Engineer' })! - println(gdb.debug_node(alice_id)!) + println('Created user: ${gdb.debug_node(alice_id)!}') mut bob_id := gdb.create_node({ - 'name': 'Bob', - 'age': '25', - 'city': 'Boston' + 'type': 'user' + 'name': 'Bob Smith' + 'age': '32' + 'location': 'New York' + 'occupation': 'Product Manager' })! - println(gdb.debug_node(bob_id)!) + println('Created user: ${gdb.debug_node(bob_id)!}') + mut carol_id := gdb.create_node({ + 'type': 'user' + 'name': 'Carol Davis' + 'age': '27' + 'location': 'San Francisco' + 'occupation': 'Data Scientist' + })! + println('Created user: ${gdb.debug_node(carol_id)!}') + + // 2. Creating Organization Nodes + println('\nCreating organizations...') mut techcorp_id := gdb.create_node({ - 'name': 'TechCorp', - 'industry': 'Technology', + 'type': 'organization' + 'name': 'TechCorp' + 'industry': 'Technology' + 'location': 'San Francisco' + 'size': '500+' + })! + println('Created organization: ${gdb.debug_node(techcorp_id)!}') + + mut datacorp_id := gdb.create_node({ + 'type': 'organization' + 'name': 'DataCorp' + 'industry': 'Data Analytics' 'location': 'New York' + 'size': '100-500' })! - println(gdb.debug_node(techcorp_id)!) + println('Created organization: ${gdb.debug_node(datacorp_id)!}') - // Create relationships + // 3. Creating Interest Nodes + println('\nCreating interest groups...') + mut ai_group_id := gdb.create_node({ + 'type': 'group' + 'name': 'AI Enthusiasts' + 'category': 'Technology' + 'members': '0' + })! + println('Created group: ${gdb.debug_node(ai_group_id)!}') + + // 4. Establishing Relationships println('\nCreating relationships...') - knows_edge_id := gdb.create_edge(alice_id, bob_id, 'KNOWS', { - 'since': '2020', - 'relationship': 'Colleague' + + // Friendship relationships + gdb.create_edge(alice_id, bob_id, 'FRIENDS', { + 'since': '2022' + 'strength': 'close' })! - println(gdb.debug_edge(knows_edge_id)!) - - works_at_id := gdb.create_edge(alice_id, techcorp_id, 'WORKS_AT', { - 'role': 'Software Engineer', - 'since': '2019' + gdb.create_edge(alice_id, carol_id, 'FRIENDS', { + 'since': '2023' + 'strength': 'close' })! - println(gdb.debug_edge(works_at_id)!) - // Show current database state - println('\nInitial database state:') - gdb.debug_db()! + // Employment relationships + gdb.create_edge(alice_id, techcorp_id, 'WORKS_AT', { + 'role': 'Senior Engineer' + 'since': '2021' + 'department': 'Engineering' + })! + gdb.create_edge(bob_id, datacorp_id, 'WORKS_AT', { + 'role': 'Product Lead' + 'since': '2020' + 'department': 'Product' + })! + gdb.create_edge(carol_id, techcorp_id, 'WORKS_AT', { + 'role': 'Data Scientist' + 'since': '2022' + 'department': 'Analytics' + })! - // Print graph structure - println('\nGraph structure:') + // Group memberships + gdb.create_edge(alice_id, ai_group_id, 'MEMBER_OF', { + 'joined': '2023' + 'status': 'active' + })! + gdb.create_edge(carol_id, ai_group_id, 'MEMBER_OF', { + 'joined': '2023' + 'status': 'active' + })! + + // 5. Querying the Graph + println('\nPerforming queries...') + + // Find users in San Francisco + println('\nUsers in San Francisco:') + sf_users := gdb.query_nodes_by_property('location', 'San Francisco')! + for user in sf_users { + if user.properties['type'] == 'user' { + println('- ${user.properties['name']} (${user.properties['occupation']})') + } + } + + // Find Alice's friends + println("\nAlice's friends:") + alice_friends := gdb.get_connected_nodes(alice_id, 'FRIENDS', 'out')! + for friend in alice_friends { + println('- ${friend.properties['name']} in ${friend.properties['location']}') + } + + // Find where Alice works + println("\nAlice's workplace:") + alice_workplaces := gdb.get_connected_nodes(alice_id, 'WORKS_AT', 'out')! + for workplace in alice_workplaces { + println('- ${workplace.properties['name']} (${workplace.properties['industry']})') + } + + // Find TechCorp employees + println('\nTechCorp employees:') + techcorp_employees := gdb.get_connected_nodes(techcorp_id, 'WORKS_AT', 'in')! + for employee in techcorp_employees { + println('- ${employee.properties['name']} as ${employee.properties['occupation']}') + } + + // Find AI group members + println('\nAI Enthusiasts group members:') + ai_members := gdb.get_connected_nodes(ai_group_id, 'MEMBER_OF', 'in')! + for member in ai_members { + println('- ${member.properties['name']}') + } + + // 6. Updating Data + println('\nUpdating data...') + + // Promote Alice + println('\nPromoting Alice...') + mut alice := gdb.get_node(alice_id)! + alice.properties['occupation'] = 'Lead Software Engineer' + gdb.update_node(alice_id, alice.properties)! + + // Update Alice's work relationship + mut edges := gdb.get_edges_between(alice_id, techcorp_id)! + if edges.len > 0 { + gdb.update_edge(edges[0].id, { + 'role': 'Engineering Team Lead' + 'since': '2021' + 'department': 'Engineering' + })! + } + + println('\nFinal graph structure:') gdb.print_graph()! - - // Query nodes by property - println('\nQuerying nodes in New York:') - ny_nodes := gdb.query_nodes_by_property('city', 'New York')! - for node in ny_nodes { - println('Found: ${node.properties['name']}') - } - - // Get connected nodes - println('\nPeople Alice knows:') - alice_knows := gdb.get_connected_nodes(alice_id, 'KNOWS', 'out')! - for node in alice_knows { - println('${node.properties['name']} (${node.properties['city']})') - } - - println('\nWhere Alice works:') - alice_works := gdb.get_connected_nodes(alice_id, 'WORKS_AT', 'out')! - for node in alice_works { - println('${node.properties['name']} (${node.properties['industry']})') - } - - // Update node properties - println('\nUpdating Alice\'s age...') - gdb.update_node(alice_id, { - 'name': 'Alice', - 'age': '31', - 'city': 'New York' - })! - println(gdb.debug_node(alice_id)!) - - // Update edge properties - println('\nUpdating work relationship...') - gdb.update_edge(works_at_id, { - 'role': 'Senior Software Engineer', - 'since': '2019' - })! - println(gdb.debug_edge(works_at_id)!) - - // Show final state - println('\nFinal database state:') - gdb.debug_db()! } diff --git a/examples/data/heroencoder_example.vsh b/examples/data/heroencoder_example.vsh index 7628722c..e25766f2 100755 --- a/examples/data/heroencoder_example.vsh +++ b/examples/data/heroencoder_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.encoderhero import freeflowuniverse.herolib.core.base diff --git a/examples/data/ourdb_example.vsh b/examples/data/ourdb_example.vsh index 8647b7ce..d274c6a8 100755 --- a/examples/data/ourdb_example.vsh +++ b/examples/data/ourdb_example.vsh @@ -1,20 +1,18 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.ourdb - const test_dir = '/tmp/ourdb' - mut db := ourdb.new( - record_nr_max: 16777216 - 1 // max size of records - record_size_max: 1024 - path: test_dir - reset: true + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + reset: true )! defer { - db.destroy() or { panic('failed to destroy db: ${err}') } + db.destroy() or { panic('failed to destroy db: ${err}') } } // Test set and get @@ -24,12 +22,12 @@ id := db.set(data: test_data)! retrieved := db.get(id)! assert retrieved == test_data -assert id==0 +assert id == 0 // Test overwrite new_data := 'Updated data'.bytes() -id2 := db.set(id:0, data: new_data)! -assert id2==0 +id2 := db.set(id: 0, data: new_data)! +assert id2 == 0 // // Verify lookup table has the correct location // location := db.lookup.get(id2)! @@ -39,4 +37,4 @@ assert id2==0 retrieved2 := db.get(id2)! println('Retrieved data: ${retrieved2}') println('Expected data: ${new_data}') -assert retrieved2 == new_data \ No newline at end of file +assert retrieved2 == new_data diff --git a/examples/data/params/args/args_example.vsh b/examples/data/params/args/args_example.vsh index 45aca2da..b5962bfd 100755 --- a/examples/data/params/args/args_example.vsh +++ b/examples/data/params/args/args_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.playbook import freeflowuniverse.herolib.data.paramsparser diff --git a/examples/data/params/paramsfilter/paramsfilter.vsh b/examples/data/params/paramsfilter/paramsfilter.vsh index 9136ff58..5b7c858c 100755 --- a/examples/data/params/paramsfilter/paramsfilter.vsh +++ b/examples/data/params/paramsfilter/paramsfilter.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.paramsparser { Params, parse } import time diff --git a/examples/data/radixtree.vsh b/examples/data/radixtree.vsh index b53f3d25..b439386a 100755 --- a/examples/data/radixtree.vsh +++ b/examples/data/radixtree.vsh @@ -1,8 +1,8 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.radixtree -mut rt := radixtree.new(path:'/tmp/radixtree_test',reset:true)! +mut rt := radixtree.new(path: '/tmp/radixtree_test', reset: true)! // Show initial state println('\nInitial state:') @@ -21,13 +21,11 @@ rt.print_tree()! // Test search if value := rt.search('test') { - println('\nFound value: ${value.bytestr()}') + println('\nFound value: ${value.bytestr()}') } else { - println('\nError: ${err}') + println('\nError: ${err}') } - - println('\nInserting key "test2" with value "value2"') rt.insert('test2', 'value2'.bytes())! diff --git a/examples/data/resp/resp_example.vsh b/examples/data/resp/resp_example.vsh index 2ead50a3..6a9a24e8 100755 --- a/examples/data/resp/resp_example.vsh +++ b/examples/data/resp/resp_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.resp import crypto.ed25519 diff --git a/examples/develop/gittools/example3.vsh b/examples/develop/gittools/example3.vsh index 5744b7f4..3befba73 100755 --- a/examples/develop/gittools/example3.vsh +++ b/examples/develop/gittools/example3.vsh @@ -1,9 +1,10 @@ -#!/usr/bin/env -S v -n -w -gc none -cg -no-retry-compilation -cc tcc -d use_openssl -enable-globals run -// #!/usr/bin/env -S v -n -w -cg -no-retry-compilation -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run + +// #!/usr/bin/env -S v -n -w -cg -d use_openssl -enable-globals run //-parallel-cc import os import freeflowuniverse.herolib.develop.gittools -mut gs := gittools.get(reload:true)! +mut gs := gittools.get(reload: true)! -gs.repos_print()! \ No newline at end of file +gs.repos_print()! diff --git a/examples/develop/gittools/gittools_example.vsh b/examples/develop/gittools/gittools_example.vsh index d9e39f27..f5817995 100755 --- a/examples/develop/gittools/gittools_example.vsh +++ b/examples/develop/gittools/gittools_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.develop.gittools import freeflowuniverse.herolib.osal diff --git a/examples/develop/gittools/gittools_example2.vsh b/examples/develop/gittools/gittools_example2.vsh index ece58866..c768b256 100755 --- a/examples/develop/gittools/gittools_example2.vsh +++ b/examples/develop/gittools/gittools_example2.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.develop.gittools import freeflowuniverse.herolib.osal diff --git a/examples/develop/juggler/v_example.vsh b/examples/develop/juggler/v_example.vsh index 23a6106a..b81ffd52 100755 --- a/examples/develop/juggler/v_example.vsh +++ b/examples/develop/juggler/v_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import freeflowuniverse.herolib.osal diff --git a/examples/develop/juggler/v_example2.vsh b/examples/develop/juggler/v_example2.vsh index 120868b4..54fe1acc 100755 --- a/examples/develop/juggler/v_example2.vsh +++ b/examples/develop/juggler/v_example2.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.sysadmin.startupmanager import os diff --git a/examples/develop/luadns/example.vsh b/examples/develop/luadns/example.vsh index 582225c7..cebd9bdf 100644 --- a/examples/develop/luadns/example.vsh +++ b/examples/develop/luadns/example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.develop.luadns diff --git a/examples/develop/openai/openai_example.vsh b/examples/develop/openai/openai_example.vsh index c48599c3..b65e00bf 100644 --- a/examples/develop/openai/openai_example.vsh +++ b/examples/develop/openai/openai_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.clients.openai as op diff --git a/examples/develop/runpod/runpod_example.vsh b/examples/develop/runpod/runpod_example.vsh index 30ddfc62..d0029a3f 100755 --- a/examples/develop/runpod/runpod_example.vsh +++ b/examples/develop/runpod/runpod_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run // import freeflowuniverse.herolib.core.base import freeflowuniverse.herolib.clients.runpod diff --git a/examples/develop/vastai/vastai_example.vsh b/examples/develop/vastai/vastai_example.vsh index a73255f7..5e1eaaf9 100755 --- a/examples/develop/vastai/vastai_example.vsh +++ b/examples/develop/vastai/vastai_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.clients.vastai import json @@ -24,19 +24,19 @@ create_instance_res := va.create_instance( println('create instance res: ${create_instance_res}') attach_sshkey_to_instance_res := va.attach_sshkey_to_instance( - id: 1 - ssh_key: "ssh-rsa AAAA..." + id: 1 + ssh_key: 'ssh-rsa AAAA...' )! println('attach sshkey to instance res: ${attach_sshkey_to_instance_res}') stop_instance_res := va.stop_instance( - id: 1 - state: "stopped" + id: 1 + state: 'stopped' )! println('stop instance res: ${stop_instance_res}') destroy_instance_res := va.destroy_instance( - id: 1 + id: 1 )! println('destroy instance res: ${destroy_instance_res}') @@ -44,23 +44,23 @@ println('destroy instance res: ${destroy_instance_res}') // (request failed with code 500: {"error":"server_error","msg":"Something went wrong on the server"}) launch_instance_res := va.launch_instance( // Required - num_gpus: 1, - gpu_name: "RTX_3090", - image: 'vastai/tensorflow', - disk: 10, - region: "us-west", + num_gpus: 1 + gpu_name: 'RTX_3090' + image: 'vastai/tensorflow' + disk: 10 + region: 'us-west' // Optional - env: "user=7amada, home=/home/7amada", + env: 'user=7amada, home=/home/7amada' )! println('destroy instance res: ${launch_instance_res}') start_instances_res := va.start_instances( - ids: [1, 2, 3] + ids: [1, 2, 3] )! println('start instances res: ${start_instances_res}') start_instance_res := va.start_instance( - id: 1 + id: 1 )! println('start instance res: ${start_instance_res}') diff --git a/examples/hero/alpine_example.vsh b/examples/hero/alpine_example.vsh index 994a1a7b..9be0f17d 100755 --- a/examples/hero/alpine_example.vsh +++ b/examples/hero/alpine_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.hero.bootstrap diff --git a/examples/hero/generation/blank_generation/example_1.vsh b/examples/hero/generation/blank_generation/example_1.vsh index 428b21c4..451a73e0 100644 --- a/examples/hero/generation/blank_generation/example_1.vsh +++ b/examples/hero/generation/blank_generation/example_1.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.hero.generation diff --git a/examples/hero/generation/blank_generation/example_2.vsh b/examples/hero/generation/blank_generation/example_2.vsh index dbdbed32..bef29f7a 100644 --- a/examples/hero/generation/blank_generation/example_2.vsh +++ b/examples/hero/generation/blank_generation/example_2.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.hero.generation diff --git a/examples/hero/generation/openapi_generation/generate_actor.vsh b/examples/hero/generation/openapi_generation/generate_actor.vsh index 158bb5c7..135e08cd 100644 --- a/examples/hero/generation/openapi_generation/generate_actor.vsh +++ b/examples/hero/generation/openapi_generation/generate_actor.vsh @@ -1 +1 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run diff --git a/examples/hero/generation/openapi_generation/run_actor.vsh b/examples/hero/generation/openapi_generation/run_actor.vsh index 158bb5c7..53056db8 100644 --- a/examples/hero/generation/openapi_generation/run_actor.vsh +++ b/examples/hero/generation/openapi_generation/run_actor.vsh @@ -1 +1 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run diff --git a/examples/hero/generation/openapi_generation/run_interface_procedure.vsh b/examples/hero/generation/openapi_generation/run_interface_procedure.vsh index a09bffc0..5ee4e5c3 100755 --- a/examples/hero/generation/openapi_generation/run_interface_procedure.vsh +++ b/examples/hero/generation/openapi_generation/run_interface_procedure.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import example_actor diff --git a/examples/hero/generation/openapi_generation/run_server.vsh b/examples/hero/generation/openapi_generation/run_server.vsh index 158bb5c7..135e08cd 100644 --- a/examples/hero/generation/openapi_generation/run_server.vsh +++ b/examples/hero/generation/openapi_generation/run_server.vsh @@ -1 +1 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run diff --git a/examples/installers/actrunner.vsh b/examples/installers/actrunner.vsh index 0b718ca8..449736b1 100755 --- a/examples/installers/actrunner.vsh +++ b/examples/installers/actrunner.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.sysadmintools.actrunner import freeflowuniverse.herolib.installers.virt.herocontainers diff --git a/examples/installers/conduit.vsh b/examples/installers/conduit.vsh index 0605eb36..d4e1a69d 100755 --- a/examples/installers/conduit.vsh +++ b/examples/installers/conduit.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.fediverse.conduit diff --git a/examples/installers/coredns.vsh b/examples/installers/coredns.vsh index 7ead086d..dda45b90 100755 --- a/examples/installers/coredns.vsh +++ b/examples/installers/coredns.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.infra.coredns as coredns_installer diff --git a/examples/installers/dagu.vsh b/examples/installers/dagu.vsh index d586c380..39704612 100755 --- a/examples/installers/dagu.vsh +++ b/examples/installers/dagu.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.sysadmintools.daguserver import freeflowuniverse.herolib.installers.infra.zinit diff --git a/examples/installers/dagu_server.vsh b/examples/installers/dagu_server.vsh index f9604090..00a0156a 100755 --- a/examples/installers/dagu_server.vsh +++ b/examples/installers/dagu_server.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.sysadmintools.daguserver diff --git a/examples/installers/gitea.vsh b/examples/installers/gitea.vsh index c4afe912..b2ea3eb5 100755 --- a/examples/installers/gitea.vsh +++ b/examples/installers/gitea.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.infra.gitea as gitea_installer diff --git a/examples/installers/griddriver.vsh b/examples/installers/griddriver.vsh index 6535ec13..a001aa60 100755 --- a/examples/installers/griddriver.vsh +++ b/examples/installers/griddriver.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.threefold.griddriver diff --git a/examples/installers/hero_install.vsh b/examples/installers/hero_install.vsh index a2a483ed..e2d5365c 100755 --- a/examples/installers/hero_install.vsh +++ b/examples/installers/hero_install.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.lang.vlang import freeflowuniverse.herolib.installers.sysadmintools.daguserver diff --git a/examples/installers/herocontainers.vsh b/examples/installers/herocontainers.vsh index 531d1623..a42e0a6a 100755 --- a/examples/installers/herocontainers.vsh +++ b/examples/installers/herocontainers.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal import freeflowuniverse.herolib.installers.lang.golang diff --git a/examples/installers/installers.vsh b/examples/installers/installers.vsh index 0c901be9..d61cf087 100755 --- a/examples/installers/installers.vsh +++ b/examples/installers/installers.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.lang.rust import freeflowuniverse.herolib.installers.lang.python diff --git a/examples/installers/mycelium.vsh b/examples/installers/mycelium.vsh index 50791b0e..13cc0021 100755 --- a/examples/installers/mycelium.vsh +++ b/examples/installers/mycelium.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.net.mycelium as mycelium_installer diff --git a/examples/installers/podman.vsh b/examples/installers/podman.vsh index b512abc8..71f771df 100755 --- a/examples/installers/podman.vsh +++ b/examples/installers/podman.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.virt.podman as podman_installer diff --git a/examples/installers/postgresql.vsh b/examples/installers/postgresql.vsh index adabfdec..b826d972 100755 --- a/examples/installers/postgresql.vsh +++ b/examples/installers/postgresql.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import time import freeflowuniverse.herolib.installers.db.postgresql diff --git a/examples/installers/youki.vsh b/examples/installers/youki.vsh index d23fa428..0e00e757 100755 --- a/examples/installers/youki.vsh +++ b/examples/installers/youki.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.virt.youki diff --git a/examples/lang/python/pythonexample.vsh b/examples/lang/python/pythonexample.vsh index 40ee55ed..13015b52 100755 --- a/examples/lang/python/pythonexample.vsh +++ b/examples/lang/python/pythonexample.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.lang.python import json diff --git a/examples/osal/download/download_example.vsh b/examples/osal/download/download_example.vsh index 0ae2d2e1..f88006cf 100755 --- a/examples/osal/download/download_example.vsh +++ b/examples/osal/download/download_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal { download } diff --git a/examples/osal/ping/ping_example.vsh b/examples/osal/ping/ping_example.vsh index c5ac1b77..cd6a01c6 100755 --- a/examples/osal/ping/ping_example.vsh +++ b/examples/osal/ping/ping_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal { ping } diff --git a/examples/osal/ping/portforward.vsh b/examples/osal/ping/portforward.vsh index 9fb73241..83c3f953 100755 --- a/examples/osal/ping/portforward.vsh +++ b/examples/osal/ping/portforward.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.builder diff --git a/examples/osal/startup_manager.vsh b/examples/osal/startup_manager.vsh index 1d068238..0e5dca92 100755 --- a/examples/osal/startup_manager.vsh +++ b/examples/osal/startup_manager.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.infra.zinit as zinitinstaller import freeflowuniverse.herolib.sysadmin.startupmanager diff --git a/examples/osal/systemd.vsh b/examples/osal/systemd.vsh index becb9d1f..c7778014 100755 --- a/examples/osal/systemd.vsh +++ b/examples/osal/systemd.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal.systemd diff --git a/examples/osal/ufw.vsh b/examples/osal/ufw.vsh index ad602a78..592cf4dc 100755 --- a/examples/osal/ufw.vsh +++ b/examples/osal/ufw.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal.ufw diff --git a/examples/osal/ufw_play.vsh b/examples/osal/ufw_play.vsh index ebdc5822..02dda0ed 100755 --- a/examples/osal/ufw_play.vsh +++ b/examples/osal/ufw_play.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal.ufw import freeflowuniverse.herolib.core.playbook diff --git a/examples/osal/zinit/simple/zinit.vsh b/examples/osal/zinit/simple/zinit.vsh index 68abaa41..cb0d6c3a 100644 --- a/examples/osal/zinit/simple/zinit.vsh +++ b/examples/osal/zinit/simple/zinit.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import time diff --git a/examples/threefold/grid/README.md b/examples/threefold/grid/README.md index a61e5025..33f353a8 100644 --- a/examples/threefold/grid/README.md +++ b/examples/threefold/grid/README.md @@ -7,7 +7,7 @@ To be able to run examples you need to install updated version of `griddriver`. Create some `griddriver_install.vsh` file containing following code: ```vlang -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.tfgrid.griddriver as griddriverinstaller diff --git a/examples/threefold/grid/deploy/create_update_deployments.vsh b/examples/threefold/grid/deploy/create_update_deployments.vsh index cc1d41a8..d35693a0 100755 --- a/examples/threefold/grid/deploy/create_update_deployments.vsh +++ b/examples/threefold/grid/deploy/create_update_deployments.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deploy/deploy_gw_fqdn.vsh b/examples/threefold/grid/deploy/deploy_gw_fqdn.vsh index d6b12e46..1c0994a8 100755 --- a/examples/threefold/grid/deploy/deploy_gw_fqdn.vsh +++ b/examples/threefold/grid/deploy/deploy_gw_fqdn.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid import freeflowuniverse.herolib.threefold.grid.models diff --git a/examples/threefold/grid/deploy/deploy_gw_name.vsh b/examples/threefold/grid/deploy/deploy_gw_name.vsh index 86923384..64675c82 100755 --- a/examples/threefold/grid/deploy/deploy_gw_name.vsh +++ b/examples/threefold/grid/deploy/deploy_gw_name.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid import freeflowuniverse.herolib.threefold.grid.models diff --git a/examples/threefold/grid/deploy/deploy_vm.vsh b/examples/threefold/grid/deploy/deploy_vm.vsh index f6aa4e5e..93b37c0f 100755 --- a/examples/threefold/grid/deploy/deploy_vm.vsh +++ b/examples/threefold/grid/deploy/deploy_vm.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deploy/deploy_vm_high_level.vsh b/examples/threefold/grid/deploy/deploy_vm_high_level.vsh index 1bdfb99d..df37cadf 100755 --- a/examples/threefold/grid/deploy/deploy_vm_high_level.vsh +++ b/examples/threefold/grid/deploy/deploy_vm_high_level.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deploy/deploy_zdb.vsh b/examples/threefold/grid/deploy/deploy_zdb.vsh index c6c8d180..1a408407 100755 --- a/examples/threefold/grid/deploy/deploy_zdb.vsh +++ b/examples/threefold/grid/deploy/deploy_zdb.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deploy/holochain_vm.vsh b/examples/threefold/grid/deploy/holochain_vm.vsh index 7288285a..b7472f61 100755 --- a/examples/threefold/grid/deploy/holochain_vm.vsh +++ b/examples/threefold/grid/deploy/holochain_vm.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deploy/vm_with_gw_name.vsh b/examples/threefold/grid/deploy/vm_with_gw_name.vsh index a606479d..8fc2b7ed 100755 --- a/examples/threefold/grid/deploy/vm_with_gw_name.vsh +++ b/examples/threefold/grid/deploy/vm_with_gw_name.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/deployment_state.vsh b/examples/threefold/grid/deployment_state.vsh index dd44cc19..48cc93f1 100644 --- a/examples/threefold/grid/deployment_state.vsh +++ b/examples/threefold/grid/deployment_state.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run struct DeploymentStateDB { secret string // to encrypt symmetric diff --git a/examples/threefold/grid/utils/cancel_contract.vsh b/examples/threefold/grid/utils/cancel_contract.vsh index a4056524..5cdb450c 100755 --- a/examples/threefold/grid/utils/cancel_contract.vsh +++ b/examples/threefold/grid/utils/cancel_contract.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import log diff --git a/examples/threefold/grid/utils/cancel_contracts.vsh b/examples/threefold/grid/utils/cancel_contracts.vsh index bb506fb9..15a7aa84 100755 --- a/examples/threefold/grid/utils/cancel_contracts.vsh +++ b/examples/threefold/grid/utils/cancel_contracts.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid import log diff --git a/examples/threefold/grid/utils/tfgrid_config.vsh b/examples/threefold/grid/utils/tfgrid_config.vsh index 867f35fd..df8c7c13 100755 --- a/examples/threefold/grid/utils/tfgrid_config.vsh +++ b/examples/threefold/grid/utils/tfgrid_config.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/grid/utils/zos_version.vsh b/examples/threefold/grid/utils/zos_version.vsh index eadce80b..d4cbe23c 100755 --- a/examples/threefold/grid/utils/zos_version.vsh +++ b/examples/threefold/grid/utils/zos_version.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid import freeflowuniverse.herolib.threefold.griddriver diff --git a/examples/threefold/grid/vm_example.vsh b/examples/threefold/grid/vm_example.vsh index 2f158341..ae59f638 100644 --- a/examples/threefold/grid/vm_example.vsh +++ b/examples/threefold/grid/vm_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run struct VMSpecs { deployment_name string diff --git a/examples/threefold/grid/vm_query_example.vsh b/examples/threefold/grid/vm_query_example.vsh index 402330c4..5dd4fb6c 100644 --- a/examples/threefold/grid/vm_query_example.vsh +++ b/examples/threefold/grid/vm_query_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run struct NodeQuery { location string // how to define location diff --git a/examples/threefold/grid/webgw_example.vsh b/examples/threefold/grid/webgw_example.vsh index b2cfa376..42586015 100644 --- a/examples/threefold/grid/webgw_example.vsh +++ b/examples/threefold/grid/webgw_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run struct WebGWArgs { deployment_name string diff --git a/examples/threefold/grid/zdb_example.vsh b/examples/threefold/grid/zdb_example.vsh index 21b5df3d..ab1f2cb7 100644 --- a/examples/threefold/grid/zdb_example.vsh +++ b/examples/threefold/grid/zdb_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.redisclient { RedisClient } diff --git a/examples/threefold/gridproxy/bill.vsh b/examples/threefold/gridproxy/bill.vsh index 99de7f33..0fe36b9c 100755 --- a/examples/threefold/gridproxy/bill.vsh +++ b/examples/threefold/gridproxy/bill.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/gridproxy/contract.vsh b/examples/threefold/gridproxy/contract.vsh index 67b97da9..e4c15571 100755 --- a/examples/threefold/gridproxy/contract.vsh +++ b/examples/threefold/gridproxy/contract.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid as tfgrid import freeflowuniverse.herolib.threefold.gridproxy diff --git a/examples/threefold/gridproxy/farm.vsh b/examples/threefold/gridproxy/farm.vsh index a7b68d6b..1b96e0f8 100755 --- a/examples/threefold/gridproxy/farm.vsh +++ b/examples/threefold/gridproxy/farm.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/gridproxy/gateway.vsh b/examples/threefold/gridproxy/gateway.vsh index d7968147..485d5cde 100755 --- a/examples/threefold/gridproxy/gateway.vsh +++ b/examples/threefold/gridproxy/gateway.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/gridproxy/grid.vsh b/examples/threefold/gridproxy/grid.vsh index 01c26f95..383cd103 100755 --- a/examples/threefold/gridproxy/grid.vsh +++ b/examples/threefold/gridproxy/grid.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/gridproxy/node.vsh b/examples/threefold/gridproxy/node.vsh index 4107464d..caa2ab3a 100755 --- a/examples/threefold/gridproxy/node.vsh +++ b/examples/threefold/gridproxy/node.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/gridproxy/stats.vsh b/examples/threefold/gridproxy/stats.vsh index f8b164d6..18432edd 100755 --- a/examples/threefold/gridproxy/stats.vsh +++ b/examples/threefold/gridproxy/stats.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.threefold.gridproxy.model { NodeStatus } diff --git a/examples/threefold/gridproxy/twin.vsh b/examples/threefold/gridproxy/twin.vsh index 4e74d5b4..2300da82 100755 --- a/examples/threefold/gridproxy/twin.vsh +++ b/examples/threefold/gridproxy/twin.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/holochain/holochain_deployer.vsh b/examples/threefold/holochain/holochain_deployer.vsh index d3e6ebb8..f3e3ae1e 100755 --- a/examples/threefold/holochain/holochain_deployer.vsh +++ b/examples/threefold/holochain/holochain_deployer.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.tfrobot import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/holochain/holochain_vms.vsh b/examples/threefold/holochain/holochain_vms.vsh index 2fa435e9..bbcdd594 100755 --- a/examples/threefold/holochain/holochain_vms.vsh +++ b/examples/threefold/holochain/holochain_vms.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.tfrobot import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/holochain/tasker_example.vsh b/examples/threefold/holochain/tasker_example.vsh index e342b290..1ea20b37 100755 --- a/examples/threefold/holochain/tasker_example.vsh +++ b/examples/threefold/holochain/tasker_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.tfrobot import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/holochain/tasker_example2.vsh b/examples/threefold/holochain/tasker_example2.vsh index 9777bd0f..ab1c1bb5 100755 --- a/examples/threefold/holochain/tasker_example2.vsh +++ b/examples/threefold/holochain/tasker_example2.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.tfrobot import freeflowuniverse.herolib.ui.console diff --git a/examples/threefold/solana/seahorse_vm.vsh b/examples/threefold/solana/seahorse_vm.vsh index 6d1f0866..7efcc25e 100755 --- a/examples/threefold/solana/seahorse_vm.vsh +++ b/examples/threefold/solana/seahorse_vm.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.grid.models import freeflowuniverse.herolib.threefold.grid as tfgrid diff --git a/examples/threefold/tfgrid3deployer/hetzner/hetzner.vsh b/examples/threefold/tfgrid3deployer/hetzner/hetzner.vsh index 2260b4bc..b931274f 100644 --- a/examples/threefold/tfgrid3deployer/hetzner/hetzner.vsh +++ b/examples/threefold/tfgrid3deployer/hetzner/hetzner.vsh @@ -1,6 +1,6 @@ -#!/usr/bin/env -S v -gc none -no-retry-compilation -d use_openssl -enable-globals -cg run +#!/usr/bin/env -S v -gc none -d use_openssl -enable-globals -cg run -//#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals -cg run +//#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.threefold.tfgrid3deployer import freeflowuniverse.herolib.installers.threefold.griddriver diff --git a/examples/threefold/tfgrid3deployer/tfgrid3deployer_example.vsh b/examples/threefold/tfgrid3deployer/tfgrid3deployer_example.vsh index 9bd099bc..6aaa9956 100755 --- a/examples/threefold/tfgrid3deployer/tfgrid3deployer_example.vsh +++ b/examples/threefold/tfgrid3deployer/tfgrid3deployer_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.threefold.tfgrid3deployer diff --git a/examples/threefold/tfgrid3deployer/vm_gw_caddy/delete.vsh b/examples/threefold/tfgrid3deployer/vm_gw_caddy/delete.vsh index 24d6e759..a39fb870 100755 --- a/examples/threefold/tfgrid3deployer/vm_gw_caddy/delete.vsh +++ b/examples/threefold/tfgrid3deployer/vm_gw_caddy/delete.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals -cg run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.threefold.tfgrid3deployer diff --git a/examples/threefold/tfgrid3deployer/vm_gw_caddy/vm_gw_caddy.vsh b/examples/threefold/tfgrid3deployer/vm_gw_caddy/vm_gw_caddy.vsh index b499d1dc..05e5ebca 100755 --- a/examples/threefold/tfgrid3deployer/vm_gw_caddy/vm_gw_caddy.vsh +++ b/examples/threefold/tfgrid3deployer/vm_gw_caddy/vm_gw_caddy.vsh @@ -1,6 +1,6 @@ -#!/usr/bin/env -S v -gc none -no-retry-compilation -d use_openssl -enable-globals -cg run +#!/usr/bin/env -S v -gc none -d use_openssl -enable-globals -cg run -//#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals -cg run +//#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals -cg run import freeflowuniverse.herolib.threefold.gridproxy import freeflowuniverse.herolib.threefold.tfgrid3deployer import freeflowuniverse.herolib.installers.threefold.griddriver diff --git a/examples/ui/silence.vsh b/examples/ui/silence.vsh index c58733f2..84627974 100755 --- a/examples/ui/silence.vsh +++ b/examples/ui/silence.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal import freeflowuniverse.herolib.ui.console diff --git a/examples/virt/docker/docker_dev_tools.vsh b/examples/virt/docker/docker_dev_tools.vsh index ef309129..4f345343 100644 --- a/examples/virt/docker/docker_dev_tools.vsh +++ b/examples/virt/docker/docker_dev_tools.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.docker diff --git a/examples/virt/docker/docker_init.vsh b/examples/virt/docker/docker_init.vsh index 62d7b78f..a0b906a3 100755 --- a/examples/virt/docker/docker_init.vsh +++ b/examples/virt/docker/docker_init.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.docker diff --git a/examples/virt/docker/docker_registry.vsh b/examples/virt/docker/docker_registry.vsh index 7317a39e..5e3c7ee4 100644 --- a/examples/virt/docker/docker_registry.vsh +++ b/examples/virt/docker/docker_registry.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.docker diff --git a/examples/virt/docker/presearch_docker.vsh b/examples/virt/docker/presearch_docker.vsh index e56c2388..a5f57024 100644 --- a/examples/virt/docker/presearch_docker.vsh +++ b/examples/virt/docker/presearch_docker.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.docker import os diff --git a/examples/virt/docker/tf_dashboard.vsh b/examples/virt/docker/tf_dashboard.vsh index 01bbd0c1..326baa6b 100644 --- a/examples/virt/docker/tf_dashboard.vsh +++ b/examples/virt/docker/tf_dashboard.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.docker diff --git a/examples/virt/hetzner/hetzner_example.vsh b/examples/virt/hetzner/hetzner_example.vsh index adef3363..5035425c 100755 --- a/examples/virt/hetzner/hetzner_example.vsh +++ b/examples/virt/hetzner/hetzner_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.hetzner import freeflowuniverse.herolib.ui.console diff --git a/examples/virt/lima/lima_example.vsh b/examples/virt/lima/lima_example.vsh index ac2203d4..1214f0c9 100755 --- a/examples/virt/lima/lima_example.vsh +++ b/examples/virt/lima/lima_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.lima import freeflowuniverse.herolib.core.texttools diff --git a/examples/virt/podman_buildah/buildah_example.vsh b/examples/virt/podman_buildah/buildah_example.vsh index 3f6823aa..cfa37739 100755 --- a/examples/virt/podman_buildah/buildah_example.vsh +++ b/examples/virt/podman_buildah/buildah_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.herocontainers import freeflowuniverse.herolib.ui.console diff --git a/examples/virt/podman_buildah/buildah_run.vsh b/examples/virt/podman_buildah/buildah_run.vsh index c3c7f2f9..504b21f5 100755 --- a/examples/virt/podman_buildah/buildah_run.vsh +++ b/examples/virt/podman_buildah/buildah_run.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.herocontainers import freeflowuniverse.herolib.ui.console diff --git a/examples/virt/podman_buildah/buildah_run_clean.vsh b/examples/virt/podman_buildah/buildah_run_clean.vsh index 3eada9d3..e538ed05 100755 --- a/examples/virt/podman_buildah/buildah_run_clean.vsh +++ b/examples/virt/podman_buildah/buildah_run_clean.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.virt.herocontainers import freeflowuniverse.herolib.ui.console diff --git a/examples/virt/podman_buildah/buildah_run_mdbook.vsh b/examples/virt/podman_buildah/buildah_run_mdbook.vsh index 23ffca50..1cb931d1 100755 --- a/examples/virt/podman_buildah/buildah_run_mdbook.vsh +++ b/examples/virt/podman_buildah/buildah_run_mdbook.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import flag diff --git a/examples/virt/windows/cloudhypervisor.vsh b/examples/virt/windows/cloudhypervisor.vsh index ce457359..b9dd9574 100755 --- a/examples/virt/windows/cloudhypervisor.vsh +++ b/examples/virt/windows/cloudhypervisor.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.installers.virt.cloudhypervisor as cloudhypervisor_installer import freeflowuniverse.herolib.virt.cloudhypervisor diff --git a/examples/webdav/webdav.vsh b/examples/webdav/webdav.vsh index 6ca1acb2..28471daf 100755 --- a/examples/webdav/webdav.vsh +++ b/examples/webdav/webdav.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.webdav import freeflowuniverse.herolib.core.pathlib diff --git a/examples/webtools/mdbook_markdown/doctree_export.vsh b/examples/webtools/mdbook_markdown/doctree_export.vsh index fc9a9bea..7d161f2f 100755 --- a/examples/webtools/mdbook_markdown/doctree_export.vsh +++ b/examples/webtools/mdbook_markdown/doctree_export.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.data.doctree diff --git a/examples/webtools/mdbook_markdown/markdown_example.vsh b/examples/webtools/mdbook_markdown/markdown_example.vsh index 2b34ece7..dbfd3675 100755 --- a/examples/webtools/mdbook_markdown/markdown_example.vsh +++ b/examples/webtools/mdbook_markdown/markdown_example.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run // import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.ui.console diff --git a/generate.vsh b/generate.vsh index b6df27e0..b2766679 100755 --- a/generate.vsh +++ b/generate.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run import os import flag diff --git a/install_herolib.vsh b/install_herolib.vsh index 051d705d..9097331c 100755 --- a/install_herolib.vsh +++ b/install_herolib.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import flag @@ -64,7 +64,7 @@ os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowunive println('Herolib installation completed successfully!') // Add vtest alias -addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -no-retry-compilation -cc tcc test\' ') or { +addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -cc tcc test\' ') or { eprintln('Failed to add vtest alias: ${err}') } diff --git a/lib/clients/livekit/client.v b/lib/clients/livekit/client.v index 44e16c98..79158ae7 100644 --- a/lib/clients/livekit/client.v +++ b/lib/clients/livekit/client.v @@ -3,7 +3,7 @@ module livekit // App struct with `livekit.Client`, API keys, and other shared data pub struct Client { pub: - url string @[required] - api_key string @[required] - api_secret string @[required] + url string @[required] + api_key string @[required] + api_secret string @[required] } diff --git a/lib/clients/livekit/factory.v b/lib/clients/livekit/factory.v index 033eb2f9..31f6efe6 100644 --- a/lib/clients/livekit/factory.v +++ b/lib/clients/livekit/factory.v @@ -1,6 +1,7 @@ - module livekit pub fn new(client Client) Client { - return Client{...client} -} \ No newline at end of file + return Client{ + ...client + } +} diff --git a/lib/clients/livekit/room.v b/lib/clients/livekit/room.v index cce17e32..5687ed3d 100644 --- a/lib/clients/livekit/room.v +++ b/lib/clients/livekit/room.v @@ -5,47 +5,46 @@ import json @[params] pub struct ListRoomsParams { - names []string + names []string } pub struct ListRoomsResponse { pub: - rooms []Room + rooms []Room } pub fn (c Client) list_rooms(params ListRoomsParams) !ListRoomsResponse { - // Prepare request body - request := params - request_json := json.encode(request) + // Prepare request body + request := params + request_json := json.encode(request) - - // create token and give grant to list rooms + // create token and give grant to list rooms mut token := c.new_access_token()! - token.grants.video.room_list = true + token.grants.video.room_list = true - // make POST request - url := '${c.url}/twirp/livekit.RoomService/ListRooms' - // Configure HTTP request - mut headers := http.new_header_from_map({ - http.CommonHeader.authorization: 'Bearer ${token.to_jwt()!}', - http.CommonHeader.content_type: 'application/json' - }) + // make POST request + url := '${c.url}/twirp/livekit.RoomService/ListRooms' + // Configure HTTP request + mut headers := http.new_header_from_map({ + http.CommonHeader.authorization: 'Bearer ${token.to_jwt()!}' + http.CommonHeader.content_type: 'application/json' + }) - response := http.fetch(http.FetchConfig{ - url: url - method: .post - header: headers - data: request_json - })! + response := http.fetch(http.FetchConfig{ + url: url + method: .post + header: headers + data: request_json + })! - if response.status_code != 200 { - return error('Failed to list rooms: $response.status_code') - } + if response.status_code != 200 { + return error('Failed to list rooms: ${response.status_code}') + } - // Parse response - rooms_response := json.decode(ListRoomsResponse, response.body) or { - return error('Failed to parse response: $err') - } - - return rooms_response + // Parse response + rooms_response := json.decode(ListRoomsResponse, response.body) or { + return error('Failed to parse response: ${err}') + } + + return rooms_response } diff --git a/lib/clients/livekit/room_model.v b/lib/clients/livekit/room_model.v index 119c9a67..4586b545 100644 --- a/lib/clients/livekit/room_model.v +++ b/lib/clients/livekit/room_model.v @@ -5,29 +5,29 @@ import json pub struct Codec { pub: - fmtp_line string - mime string + fmtp_line string + mime string } pub struct Version { pub: - ticks u64 - unix_micro string + ticks u64 + unix_micro string } pub struct Room { pub: - active_recording bool - creation_time string - departure_timeout int - empty_timeout int - enabled_codecs []Codec - max_participants int - metadata string - name string - num_participants int - num_publishers int - sid string - turn_password string - version Version -} \ No newline at end of file + active_recording bool + creation_time string + departure_timeout int + empty_timeout int + enabled_codecs []Codec + max_participants int + metadata string + name string + num_participants int + num_publishers int + sid string + turn_password string + version Version +} diff --git a/lib/clients/livekit/room_test.v b/lib/clients/livekit/room_test.v index 2bd9d3fe..de9d8e4d 100644 --- a/lib/clients/livekit/room_test.v +++ b/lib/clients/livekit/room_test.v @@ -6,20 +6,20 @@ import freeflowuniverse.herolib.osal const env_file = '${os.dir(@FILE)}/.env' fn testsuite_begin() ! { - if os.exists(env_file) { - osal.load_env_file(env_file)! - } + if os.exists(env_file) { + osal.load_env_file(env_file)! + } } fn new_test_client() Client { - return new( - url: os.getenv('LIVEKIT_URL') - api_key: os.getenv('LIVEKIT_API_KEY') - api_secret: os.getenv('LIVEKIT_API_SECRET') - ) + return new( + url: os.getenv('LIVEKIT_URL') + api_key: os.getenv('LIVEKIT_API_KEY') + api_secret: os.getenv('LIVEKIT_API_SECRET') + ) } fn test_client_list_rooms() ! { - client := new_test_client() - rooms := client.list_rooms()! + client := new_test_client() + rooms := client.list_rooms()! } diff --git a/lib/clients/livekit/token.v b/lib/clients/livekit/token.v index a1cdd881..ea30877c 100644 --- a/lib/clients/livekit/token.v +++ b/lib/clients/livekit/token.v @@ -10,25 +10,25 @@ import json // Define AccessTokenOptions struct @[params] pub struct AccessTokenOptions { - pub mut: - ttl int = 21600// TTL in seconds - name string // Display name for the participant - identity string // Identity of the user - metadata string // Custom metadata to be passed to participants +pub mut: + ttl int = 21600 // TTL in seconds + name string // Display name for the participant + identity string // Identity of the user + metadata string // Custom metadata to be passed to participants } // Constructor for AccessToken pub fn (client Client) new_access_token(options AccessTokenOptions) !AccessToken { return AccessToken{ - api_key: client.api_key + api_key: client.api_key api_secret: client.api_secret - identity: options.identity - ttl: options.ttl - grants: ClaimGrants{ - exp: time.now().unix()+ options.ttl - iss: client.api_key - sub: options.name + identity: options.identity + ttl: options.ttl + grants: ClaimGrants{ + exp: time.now().unix() + options.ttl + iss: client.api_key + sub: options.name name: options.name } } -} \ No newline at end of file +} diff --git a/lib/clients/livekit/token_model.v b/lib/clients/livekit/token_model.v index 84a6b352..4cb33d67 100644 --- a/lib/clients/livekit/token_model.v +++ b/lib/clients/livekit/token_model.v @@ -10,23 +10,23 @@ import json // Struct representing grants pub struct ClaimGrants { pub mut: - video VideoGrant - iss string - exp i64 - nbf int - sub string - name string + video VideoGrant + iss string + exp i64 + nbf int + sub string + name string } // VideoGrant struct placeholder pub struct VideoGrant { pub mut: - room string - room_join bool @[json: 'roomJoin'] - room_list bool @[json: 'roomList'] - can_publish bool @[json: 'canPublish'] - can_publish_data bool @[json: 'canPublishData'] - can_subscribe bool @[json: 'canSubscribe'] + room string + room_join bool @[json: 'roomJoin'] + room_list bool @[json: 'roomList'] + can_publish bool @[json: 'canPublish'] + can_publish_data bool @[json: 'canPublishData'] + can_subscribe bool @[json: 'canSubscribe'] } // SIPGrant struct placeholder @@ -34,12 +34,12 @@ struct SIPGrant {} // AccessToken class pub struct AccessToken { - mut: - api_key string - api_secret string - grants ClaimGrants - identity string - ttl int +mut: + api_key string + api_secret string + grants ClaimGrants + identity string + ttl int } // Method to add a video grant to the token @@ -65,7 +65,8 @@ pub fn (token AccessToken) to_jwt() !string { unsigned_token := '${header_encoded}.${payload_encoded}' // Create the HMAC-SHA256 signature - signature := hmac.new(token.api_secret.bytes(), unsigned_token.bytes(), sha256.sum, sha256.block_size) + signature := hmac.new(token.api_secret.bytes(), unsigned_token.bytes(), sha256.sum, + sha256.block_size) // Encode the signature in base64 signature_encoded := base64.url_encode(signature) @@ -73,4 +74,4 @@ pub fn (token AccessToken) to_jwt() !string { // Create the final JWT jwt := '${unsigned_token}.${signature_encoded}' return jwt -} \ No newline at end of file +} diff --git a/lib/clients/mycelium/readme.md b/lib/clients/mycelium/readme.md index 598f250c..562e19a5 100644 --- a/lib/clients/mycelium/readme.md +++ b/lib/clients/mycelium/readme.md @@ -33,7 +33,7 @@ Note: Configuration is not needed if using a locally running Mycelium server wit Save as `mycelium_example.vsh`: ```v -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.clients.mycelium diff --git a/lib/clients/postgresql_client/readme.md b/lib/clients/postgresql_client/readme.md index 3daad8d2..11cbcc39 100644 --- a/lib/clients/postgresql_client/readme.md +++ b/lib/clients/postgresql_client/readme.md @@ -9,7 +9,7 @@ The PostgreSQL client can be configured using HeroScript. Configuration settings ### Basic Configuration Example ```v -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core import os diff --git a/lib/code/generator/installer_client/readme.md b/lib/code/generator/installer_client/readme.md index fabcd3de..dfadd322 100644 --- a/lib/code/generator/installer_client/readme.md +++ b/lib/code/generator/installer_client/readme.md @@ -52,7 +52,7 @@ this is to make distinction between processing at compile time (pre-compile) or to call in code ```v -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.code.generator.generic diff --git a/lib/conversiontools/docsorter/readme.md b/lib/conversiontools/docsorter/readme.md index 4f30a86f..9b49d071 100644 --- a/lib/conversiontools/docsorter/readme.md +++ b/lib/conversiontools/docsorter/readme.md @@ -10,7 +10,7 @@ How to use ## example ```v -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import os import import freeflowuniverse.herolib.conversiontools.docsorter diff --git a/lib/core/generator/generic/readme.md b/lib/core/generator/generic/readme.md index a458cbf3..1f9254ab 100644 --- a/lib/core/generator/generic/readme.md +++ b/lib/core/generator/generic/readme.md @@ -69,7 +69,7 @@ this is to make distinction between processing at compile time (pre-compile) or to call in code ```v -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.core.generator.generic diff --git a/lib/data/cache/README.md b/lib/data/cache/README.md new file mode 100644 index 00000000..d0bb068b --- /dev/null +++ b/lib/data/cache/README.md @@ -0,0 +1,139 @@ +# HeroLib Cache System + +A high-performance, generic in-memory caching system for V with support for TTL, size limits, and LRU eviction. + +## Features + +- Generic type support (can cache any type) +- Configurable maximum entries and memory size limits +- Time-To-Live (TTL) support +- Least Recently Used (LRU) eviction policy +- Memory-aware caching with size-based eviction +- Thread-safe operations +- Optional persistence support (configurable) + +## Configuration + +The cache system is highly configurable through the `CacheConfig` struct: + +```v +pub struct CacheConfig { +pub mut: + max_entries u32 = 1000 // Maximum number of entries + max_size_mb f64 = 100.0 // Maximum cache size in MB + ttl_seconds i64 = 3600 // Time-to-live in seconds (0 = no TTL) + eviction_ratio f64 = 0.05 // Percentage of entries to evict when full (5%) + persist bool // Whether to persist cache to disk +} +``` + +## Basic Usage + +Here's a simple example of using the cache: + +```v +import freeflowuniverse.herolib.data.cache + +// Define your struct type +@[heap] +struct User { + id u32 + name string + age int +} + +fn main() { + // Create a cache with default configuration + mut user_cache := cache.new_cache[User]() + + // Create a user + user := &User{ + id: 1 + name: 'Alice' + age: 30 + } + + // Add to cache + user_cache.set(user.id, user) + + // Retrieve from cache + if cached_user := user_cache.get(1) { + println('Found user: ${cached_user.name}') + } +} +``` + +## Advanced Usage + +### Custom Configuration + +```v +mut user_cache := cache.new_cache[User]( + max_entries: 1000 // Maximum number of entries + max_size_mb: 10.0 // Maximum cache size in MB + ttl_seconds: 300 // Items expire after 5 minutes + eviction_ratio: 0.2 // Evict 20% of entries when full +) +``` + +### Memory Management + +The cache automatically manages memory using two mechanisms: + +1. **Entry Count Limit**: When `max_entries` is reached, least recently used items are evicted. +2. **Memory Size Limit**: When `max_size_mb` is reached, items are evicted based on the `eviction_ratio`. + +```v +// Create a cache with strict memory limits +config := cache.CacheConfig{ + max_entries: 100 // Only keep 100 entries maximum + max_size_mb: 1.0 // Limit cache to 1MB + eviction_ratio: 0.1 // Remove 10% of entries when full +} +``` + +### Cache Operations + +```v +mut cache := cache.new_cache[User](cache.CacheConfig{}) + +// Add/update items +cache.set(1, user1) +cache.set(2, user2) + +// Get items +if user := cache.get(1) { + // Use cached user +} + +// Check cache size +println('Cache entries: ${cache.len()}') + +// Clear the cache +cache.clear() +``` + +## Best Practices + +1. **Choose Appropriate TTL**: Set TTL based on how frequently your data changes and how critical freshness is. + +2. **Memory Management**: + - Set reasonable `max_entries` and `max_size_mb` limits based on your application's memory constraints + - Monitor cache size using `len()` + - Use appropriate `eviction_ratio` (typically 0.05-0.2) to balance performance and memory usage + +3. **Type Safety**: + - Always use `@[heap]` attribute for structs stored in cache + - Ensure cached types are properly memory managed + +4. **Error Handling**: + - Always use option types when retrieving items (`if value := cache.get(key) {`) + - Handle cache misses gracefully + +5. **Performance**: + - Consider the trade-off between cache size and hit rate + - Monitor and adjust TTL and eviction settings based on usage patterns + +## Thread Safety + +The cache implementation is thread-safe for concurrent access. However, when using the cache in a multi-threaded environment, ensure proper synchronization when accessing cached objects. diff --git a/lib/data/cache/cache.v b/lib/data/cache/cache.v new file mode 100644 index 00000000..fd04bb68 --- /dev/null +++ b/lib/data/cache/cache.v @@ -0,0 +1,167 @@ +module cache + +import time +import math + +// CacheConfig holds cache configuration parameters +pub struct CacheConfig { +pub mut: + max_entries u32 = 1000 // Maximum number of entries + max_size_mb f64 = 100.0 // Maximum cache size in MB + ttl_seconds i64 = 3600 // Time-to-live in seconds (0 = no TTL) + eviction_ratio f64 = 0.05 // Percentage of entries to evict when full (5%) +} + +// CacheEntry represents a cached object with its metadata +@[heap] +struct CacheEntry[T] { +mut: + obj T // Reference to the cached object + last_access i64 // Unix timestamp of last access + created_at i64 // Unix timestamp of creation + size u32 // Approximate size in bytes +} + +// Cache manages the in-memory caching of objects +pub struct Cache[T] { +mut: + entries map[u32]&CacheEntry[T] // Map of object ID to cache entry + config CacheConfig // Cache configuration + access_log []u32 // Ordered list of object IDs by access time + total_size u64 // Total size of cached entries in bytes +} + +// new_cache creates a new cache instance with the given configuration +pub fn new_cache[T](config CacheConfig) &Cache[T] { + return &Cache[T]{ + entries: map[u32]&CacheEntry[T]{} + config: config + access_log: []u32{cap: int(config.max_entries)} + total_size: 0 + } +} + +// get retrieves an object from the cache if it exists +pub fn (mut c Cache[T]) get(id u32) ?&T { + if entry := c.entries[id] { + now := time.now().unix() + + // Check TTL + if c.config.ttl_seconds > 0 { + if (now - entry.created_at) > c.config.ttl_seconds { + c.remove(id) + return none + } + } + + // Update access time + unsafe { + entry.last_access = now + } + // Move ID to end of access log + idx := c.access_log.index(id) + if idx >= 0 { + c.access_log.delete(idx) + } + c.access_log << id + + return &entry.obj + } + return none +} + +// set adds or updates an object in the cache +pub fn (mut c Cache[T]) set(id u32, obj &T) { + now := time.now().unix() + + // Calculate entry size (approximate) + entry_size := sizeof(T) + sizeof(CacheEntry[T]) + + // Check memory and entry count limits + new_total := c.total_size + u64(entry_size) + max_bytes := u64(c.config.max_size_mb * 1024 * 1024) + + // Always evict if we're at or above max_entries + if c.entries.len >= int(c.config.max_entries) { + c.evict() + } else if new_total > max_bytes { + // Otherwise evict only if we're over memory limit + c.evict() + } + + // Create new entry + entry := &CacheEntry[T]{ + obj: *obj + last_access: now + created_at: now + size: u32(entry_size) + } + + // Update total size + if old := c.entries[id] { + c.total_size -= u64(old.size) + } + c.total_size += u64(entry_size) + + // Add to entries map + c.entries[id] = entry + + // Update access log + idx := c.access_log.index(id) + if idx >= 0 { + c.access_log.delete(idx) + } + c.access_log << id + + // Ensure access_log stays in sync with entries + if c.access_log.len > c.entries.len { + c.access_log = c.access_log[c.access_log.len - c.entries.len..] + } +} + +// evict removes entries based on configured eviction ratio +fn (mut c Cache[T]) evict() { + // If we're at max entries, remove enough to get to 80% capacity + target_size := int(c.config.max_entries) * 8 / 10 // 80% + num_to_evict := if c.entries.len >= int(c.config.max_entries) { + c.entries.len - target_size + } else { + math.max(1, int(c.entries.len * c.config.eviction_ratio)) + } + + if num_to_evict > 0 { + // Remove oldest entries + mut evicted_size := u64(0) + for i := 0; i < num_to_evict && i < c.access_log.len; i++ { + id := c.access_log[i] + if entry := c.entries[id] { + evicted_size += u64(entry.size) + c.entries.delete(id) + } + } + + // Update total size and access log + c.total_size -= evicted_size + c.access_log = c.access_log[num_to_evict..] + } +} + +// remove deletes a single entry from the cache +pub fn (mut c Cache[T]) remove(id u32) { + if entry := c.entries[id] { + c.total_size -= u64(entry.size) + } + c.entries.delete(id) +} + +// clear empties the cache +pub fn (mut c Cache[T]) clear() { + c.entries.clear() + c.access_log.clear() + c.total_size = 0 +} + +// len returns the number of entries in the cache +pub fn (c &Cache[T]) len() int { + return c.entries.len +} diff --git a/lib/data/cache/cache_test.v b/lib/data/cache/cache_test.v new file mode 100644 index 00000000..62f1bd56 --- /dev/null +++ b/lib/data/cache/cache_test.v @@ -0,0 +1,152 @@ +module cache + +import time + +@[heap] +struct TestData { + value string +} + +fn test_cache_creation() { + config := CacheConfig{ + max_entries: 100 + max_size_mb: 1.0 + ttl_seconds: 60 + eviction_ratio: 0.1 + } + mut cache := new_cache[TestData](config) + assert cache.len() == 0 + assert cache.config.max_entries == 100 + assert cache.config.max_size_mb == 1.0 + assert cache.config.ttl_seconds == 60 + assert cache.config.eviction_ratio == 0.1 +} + +fn test_cache_set_get() { + mut cache := new_cache[TestData](CacheConfig{}) + data := &TestData{ + value: 'test' + } + + cache.set(1, data) + assert cache.len() == 1 + + if cached := cache.get(1) { + assert cached.value == 'test' + } else { + assert false, 'Failed to get cached item' + } + + if _ := cache.get(2) { + assert false, 'Should not find non-existent item' + } +} + +fn test_cache_ttl() { + $if debug { + eprintln('> test_cache_ttl') + } + mut cache := new_cache[TestData](CacheConfig{ + ttl_seconds: 1 + }) + data := &TestData{ + value: 'test' + } + + cache.set(1, data) + assert cache.len() == 1 + + if cached := cache.get(1) { + assert cached.value == 'test' + } + + time.sleep(2 * time.second) + $if debug { + eprintln('> waited 2 seconds') + } + + if _ := cache.get(1) { + assert false, 'Item should have expired' + } + assert cache.len() == 0 +} + +fn test_cache_eviction() { + mut cache := new_cache[TestData](CacheConfig{ + max_entries: 2 + eviction_ratio: 0.5 + }) + + data1 := &TestData{ + value: 'one' + } + data2 := &TestData{ + value: 'two' + } + data3 := &TestData{ + value: 'three' + } + + cache.set(1, data1) + cache.set(2, data2) + assert cache.len() == 2 + + // Access data1 to make it more recently used + cache.get(1) + + // Adding data3 should trigger eviction of data2 (least recently used) + cache.set(3, data3) + assert cache.len() == 2 + + if _ := cache.get(2) { + assert false, 'Item 2 should have been evicted' + } + + if cached := cache.get(1) { + assert cached.value == 'one' + } else { + assert false, 'Item 1 should still be cached' + } + + if cached := cache.get(3) { + assert cached.value == 'three' + } else { + assert false, 'Item 3 should be cached' + } +} + +fn test_cache_clear() { + mut cache := new_cache[TestData](CacheConfig{}) + data := &TestData{ + value: 'test' + } + + cache.set(1, data) + assert cache.len() == 1 + + cache.clear() + assert cache.len() == 0 + + if _ := cache.get(1) { + assert false, 'Cache should be empty after clear' + } +} + +fn test_cache_size_limit() { + // Set a very small size limit to force eviction + mut cache := new_cache[TestData](CacheConfig{ + max_size_mb: 0.0001 // ~100 bytes + eviction_ratio: 0.5 + }) + + // Add multiple entries to exceed size limit + for i := u32(0); i < 10; i++ { + data := &TestData{ + value: 'test${i}' + } + cache.set(i, data) + } + + // Cache should have evicted some entries to stay under size limit + assert cache.len() < 10 +} diff --git a/lib/data/encoder/encoder_decode.v b/lib/data/encoder/encoder_decode.v index 7287906d..b2ff1c81 100644 --- a/lib/data/encoder/encoder_decode.v +++ b/lib/data/encoder/encoder_decode.v @@ -17,138 +17,201 @@ pub fn decoder_new(data []u8) Decoder { return e } -pub fn (mut d Decoder) get_string() string { - n := d.get_u16() - v := d.data[..n] +pub fn (mut d Decoder) get_string() !string { + n := d.get_u16()! + if n > 64 * 1024 { // 64KB limit + return error('string length ${n} exceeds 64KB limit') + } + if n > d.data.len { + return error('string length ${n} exceeds remaining data length ${d.data.len}') + } + mut bytes := []u8{len: int(n)} + for i in 0 .. n { + bytes[i] = d.data[i] + } d.data.delete_many(0, n) - return v.bytestr() + return bytes.bytestr() } -pub fn (mut d Decoder) get_int() int { - return int(d.get_u32()) +pub fn (mut d Decoder) get_int() !int { + return int(d.get_u32()!) } -pub fn (mut d Decoder) get_bytes() []u8 { - n := int(d.get_u32()) - v := d.data[..n] +pub fn (mut d Decoder) get_bytes() ![]u8 { + n := int(d.get_u32()!) + if n > 64 * 1024 { // 64KB limit + return error('bytes length ${n} exceeds 64KB limit') + } + if n > d.data.len { + return error('bytes length ${n} exceeds remaining data length ${d.data.len}') + } + mut bytes := []u8{len: int(n)} + for i in 0 .. n { + bytes[i] = d.data[i] + } d.data.delete_many(0, n) - return v + return bytes } // adds u16 length of string in bytes + the bytes -pub fn (mut d Decoder) get_u8() u8 { - // remove first byte, this corresponds to u8, so the data bytestring becomes 1 byte shorter +pub fn (mut d Decoder) get_u8() !u8 { + if d.data.len < 1 { + return error('not enough data for u8') + } v := d.data.first() d.data.delete(0) return v } -pub fn (mut d Decoder) get_u16() u16 { - v := d.data[..2] +pub fn (mut d Decoder) get_u16() !u16 { + if d.data.len < 2 { + return error('not enough data for u16') + } + mut bytes := []u8{len: 2} + bytes[0] = d.data[0] + bytes[1] = d.data[1] d.data.delete_many(0, 2) - return bin.little_endian_u16(v) + return bin.little_endian_u16(bytes) } -pub fn (mut d Decoder) get_u32() u32 { - v := d.data[..4] +pub fn (mut d Decoder) get_u32() !u32 { + if d.data.len < 4 { + return error('not enough data for u32') + } + mut bytes := []u8{len: 4} + bytes[0] = d.data[0] + bytes[1] = d.data[1] + bytes[2] = d.data[2] + bytes[3] = d.data[3] d.data.delete_many(0, 4) - return bin.little_endian_u32(v) + return bin.little_endian_u32(bytes) } -pub fn (mut d Decoder) get_u64() u64 { - v := d.data[..8] +pub fn (mut d Decoder) get_u64() !u64 { + if d.data.len < 8 { + return error('not enough data for u64') + } + mut bytes := []u8{len: 8} + bytes[0] = d.data[0] + bytes[1] = d.data[1] + bytes[2] = d.data[2] + bytes[3] = d.data[3] + bytes[4] = d.data[4] + bytes[5] = d.data[5] + bytes[6] = d.data[6] + bytes[7] = d.data[7] d.data.delete_many(0, 8) - return bin.little_endian_u64(v) + return bin.little_endian_u64(bytes) } -pub fn (mut d Decoder) get_i64() i64 { - v := d.data[..8] +pub fn (mut d Decoder) get_i64() !i64 { + if d.data.len < 8 { + return error('not enough data for i64') + } + mut bytes := []u8{len: 8} + bytes[0] = d.data[0] + bytes[1] = d.data[1] + bytes[2] = d.data[2] + bytes[3] = d.data[3] + bytes[4] = d.data[4] + bytes[5] = d.data[5] + bytes[6] = d.data[6] + bytes[7] = d.data[7] d.data.delete_many(0, 8) - return u64(bin.little_endian_u64(v)) + return u64(bin.little_endian_u64(bytes)) } -pub fn (mut d Decoder) get_time() time.Time { - nano_time := d.get_i64() +pub fn (mut d Decoder) get_time() !time.Time { + nano_time := d.get_i64()! seconds := nano_time / int(1e9) nano_seconds := int(nano_time % int(1e9)) return time.unix_nanosecond(seconds, nano_seconds) } -pub fn (mut d Decoder) get_ourtime() ourtime.OurTime { +pub fn (mut d Decoder) get_ourtime() !ourtime.OurTime { return ourtime.OurTime{ - unixt: d.get_i64() + unixt: d.get_i64()! } } -pub fn (mut d Decoder) get_list_string() []string { - n := d.get_u16() +pub fn (mut d Decoder) get_list_string() ![]string { + n := d.get_u16()! mut v := []string{len: int(n)} for i in 0 .. n { - v[i] = d.get_string() + v[i] = d.get_string()! } return v } -pub fn (mut d Decoder) get_list_int() []int { - n := d.get_u16() +pub fn (mut d Decoder) get_list_int() ![]int { + n := d.get_u16()! mut v := []int{len: int(n)} for i in 0 .. n { - v[i] = d.get_int() + v[i] = d.get_int()! } return v } -pub fn (mut d Decoder) get_list_u8() []u8 { - n := d.get_u16() - v := d.data[..n] +pub fn (mut d Decoder) get_list_u8() ![]u8 { + n := d.get_u16()! + if n > 64 * 1024 { // 64KB limit + return error('list length ${n} exceeds 64KB limit') + } + if n > d.data.len { + return error('list length ${n} exceeds remaining data length ${d.data.len}') + } + mut bytes := []u8{len: int(n)} + for i in 0 .. n { + bytes[i] = d.data[i] + } d.data.delete_many(0, n) - return v + return bytes } -pub fn (mut d Decoder) get_list_u16() []u16 { - n := d.get_u16() +pub fn (mut d Decoder) get_list_u16() ![]u16 { + n := d.get_u16()! mut v := []u16{len: int(n)} for i in 0 .. n { - v[i] = d.get_u16() + v[i] = d.get_u16()! } return v } -pub fn (mut d Decoder) get_list_u32() []u32 { - n := d.get_u16() +pub fn (mut d Decoder) get_list_u32() ![]u32 { + n := d.get_u16()! mut v := []u32{len: int(n)} for i in 0 .. n { - v[i] = d.get_u32() + v[i] = d.get_u32()! } return v } -pub fn (mut d Decoder) get_list_u64() []u64 { - n := d.get_u16() +pub fn (mut d Decoder) get_list_u64() ![]u64 { + n := d.get_u16()! mut v := []u64{len: int(n)} for i in 0 .. n { - v[i] = d.get_u64() + v[i] = d.get_u64()! } return v } -pub fn (mut d Decoder) get_map_string() map[string]string { - n := d.get_u16() +pub fn (mut d Decoder) get_map_string() !map[string]string { + n := d.get_u16()! mut v := map[string]string{} for _ in 0 .. n { - key := d.get_string() - val := d.get_string() + key := d.get_string()! + val := d.get_string()! v[key] = val } return v } -pub fn (mut d Decoder) get_map_bytes() map[string][]u8 { - n := d.get_u16() +pub fn (mut d Decoder) get_map_bytes() !map[string][]u8 { + n := d.get_u16()! mut v := map[string][]u8{} for _ in 0 .. n { - key := d.get_string() - val := d.get_bytes() + key := d.get_string()! + val := d.get_bytes()! v[key] = val } return v diff --git a/lib/data/graphdb/README.md b/lib/data/graphdb/README.md new file mode 100644 index 00000000..1e3d76ed --- /dev/null +++ b/lib/data/graphdb/README.md @@ -0,0 +1,170 @@ +# GraphDB + +A lightweight, efficient graph database implementation in V that supports property graphs with nodes and edges. It provides both in-memory caching and persistent storage capabilities. + +## Features + +- Property Graph Model + - Nodes with key-value properties + - Typed edges with properties + - Bidirectional edge traversal +- Persistent Storage + - Automatic data persistence + - Efficient serialization +- Memory-Efficient Caching + - LRU caching for nodes and edges + - Configurable cache sizes +- Rich Query Capabilities + - Property-based node queries + - Edge-based node traversal + - Relationship type filtering +- CRUD Operations + - Create, read, update, and delete nodes + - Manage relationships between nodes + - Update properties dynamically + +## Installation + +GraphDB is part of the HeroLib library. Include it in your V project: + +```v +import freeflowuniverse.herolib.data.graphdb +``` + +## Basic Usage + +Here's a simple example demonstrating core functionality: + +```v +import freeflowuniverse.herolib.data.graphdb + +fn main() { + // Create a new graph database + mut gdb := graphdb.new(path: '/tmp/mydb', reset: true)! + + // Create nodes + user_id := gdb.create_node({ + 'name': 'John', + 'age': '30', + 'city': 'London' + })! + + company_id := gdb.create_node({ + 'name': 'TechCorp', + 'industry': 'Technology' + })! + + // Create relationship + gdb.create_edge(user_id, company_id, 'WORKS_AT', { + 'role': 'Developer', + 'since': '2022' + })! + + // Query nodes by property + london_users := gdb.query_nodes_by_property('city', 'London')! + + // Find connected nodes + workplaces := gdb.get_connected_nodes(user_id, 'WORKS_AT', 'out')! +} +``` + +## API Reference + +### Creating a Database + +```v +// Create new database instance +struct NewArgs { + path string // Storage path + reset bool // Clear existing data + cache_config CacheConfig // Optional cache configuration +} +db := graphdb.new(NewArgs{...})! +``` + +### Node Operations + +```v +// Create node +node_id := db.create_node(properties: map[string]string)! + +// Get node +node := db.get_node(id: u32)! + +// Update node +db.update_node(id: u32, properties: map[string]string)! + +// Delete node (and connected edges) +db.delete_node(id: u32)! + +// Query nodes by property +nodes := db.query_nodes_by_property(key: string, value: string)! +``` + +### Edge Operations + +```v +// Create edge +edge_id := db.create_edge(from_id: u32, to_id: u32, edge_type: string, properties: map[string]string)! + +// Get edge +edge := db.get_edge(id: u32)! + +// Update edge +db.update_edge(id: u32, properties: map[string]string)! + +// Delete edge +db.delete_edge(id: u32)! + +// Get edges between nodes +edges := db.get_edges_between(from_id: u32, to_id: u32)! +``` + +### Graph Traversal + +```v +// Get connected nodes +// direction can be 'in', 'out', or 'both' +nodes := db.get_connected_nodes(id: u32, edge_type: string, direction: string)! +``` + +## Data Model + +### Node Structure + +```v +struct Node { + id u32 // Unique identifier + properties map[string]string // Key-value properties + node_type string // Type of node + edges_out []EdgeRef // Outgoing edge references + edges_in []EdgeRef // Incoming edge references +} +``` + +### Edge Structure + +```v +struct Edge { + id u32 // Unique identifier + from_node u32 // Source node ID + to_node u32 // Target node ID + edge_type string // Type of relationship + properties map[string]string // Key-value properties + weight u16 // Edge weight +} +``` + +## Performance Considerations + +- The database uses LRU caching for both nodes and edges to improve read performance +- Persistent storage is handled efficiently through the underlying OurDB implementation +- Edge references are stored in both source and target nodes for efficient traversal +- Property queries perform full scans - consider indexing needs for large datasets + +## Example Use Cases + +- Social Networks: Modeling user relationships and interactions +- Knowledge Graphs: Representing connected information and metadata +- Organization Charts: Modeling company structure and relationships +- Recommendation Systems: Building relationship-based recommendation engines diff --git a/lib/data/graphdb/graphdb.v b/lib/data/graphdb/graphdb.v index 8bc82176..17be3a71 100644 --- a/lib/data/graphdb/graphdb.v +++ b/lib/data/graphdb/graphdb.v @@ -1,131 +1,192 @@ module graphdb import freeflowuniverse.herolib.data.ourdb +import freeflowuniverse.herolib.data.cache { Cache, CacheConfig, new_cache } // Node represents a vertex in the graph with properties and edge references +@[heap] pub struct Node { pub mut: - id u32 // Unique identifier - properties map[string]string // Key-value properties + id u32 // Unique identifier + properties map[string]string // Key-value properties + node_type string // Type of node can e.g. refer to a object implementation e.g. a User, ... edges_out []EdgeRef // Outgoing edge references edges_in []EdgeRef // Incoming edge references } // Edge represents a connection between nodes with properties +@[heap] pub struct Edge { pub mut: - id u32 // Unique identifier - from_node u32 // Source node ID - to_node u32 // Target node ID - edge_type string // Type of relationship - properties map[string]string // Key-value properties + id u32 // Unique identifier + from_node u32 // Source node ID + to_node u32 // Target node ID + edge_type string // Type of relationship + properties map[string]string // Key-value properties + weight u16 // weight of the connection between the objects } // EdgeRef is a lightweight reference to an edge +@[heap] pub struct EdgeRef { pub mut: - edge_id u32 // Database ID of the edge - edge_type string // Type of the edge relationship + edge_id u32 // Database ID of the edge + edge_type string // Type of the edge relationship } // GraphDB represents the graph database pub struct GraphDB { mut: - db &ourdb.OurDB // Database for persistent storage + db &ourdb.OurDB // Database for persistent storage + node_cache &Cache[Node] // Cache for nodes + edge_cache &Cache[Edge] // Cache for edges } pub struct NewArgs { pub mut: - path string - reset bool + path string + reset bool + cache_config CacheConfig = CacheConfig{} // Default cache configuration } // Creates a new graph database instance pub fn new(args NewArgs) !&GraphDB { mut db := ourdb.new( - path: args.path - record_size_max: 1024 * 4 // 4KB max record size + path: args.path + record_size_max: 1024 * 4 // 4KB max record size incremental_mode: true - reset: args.reset + reset: args.reset )! + // Create type-specific caches with provided config + node_cache := new_cache[Node](args.cache_config) + edge_cache := new_cache[Edge](args.cache_config) + return &GraphDB{ - db: &db + db: &db + node_cache: node_cache + edge_cache: edge_cache } } // Creates a new node with the given properties pub fn (mut gdb GraphDB) create_node(properties map[string]string) !u32 { - node := Node{ + mut node := Node{ properties: properties - edges_out: []EdgeRef{} - edges_in: []EdgeRef{} + edges_out: []EdgeRef{} + edges_in: []EdgeRef{} } - + + // Let OurDB assign the ID in incremental mode node_id := gdb.db.set(data: serialize_node(node))! + + // Update node with assigned ID and cache it + node.id = node_id + gdb.node_cache.set(node_id, &node) + return node_id } // Creates an edge between two nodes pub fn (mut gdb GraphDB) create_edge(from_id u32, to_id u32, edge_type string, properties map[string]string) !u32 { // Create the edge - edge := Edge{ - from_node: from_id - to_node: to_id - edge_type: edge_type + mut edge := Edge{ + from_node: from_id + to_node: to_id + edge_type: edge_type properties: properties } + + // Let OurDB assign the ID in incremental mode edge_id := gdb.db.set(data: serialize_edge(edge))! + // Update edge with assigned ID and cache it + edge.id = edge_id + gdb.edge_cache.set(edge_id, &edge) + // Update source node's outgoing edges mut from_node := deserialize_node(gdb.db.get(from_id)!)! from_node.edges_out << EdgeRef{ - edge_id: edge_id + edge_id: edge_id edge_type: edge_type } gdb.db.set(id: from_id, data: serialize_node(from_node))! + gdb.node_cache.set(from_id, &from_node) // Update target node's incoming edges mut to_node := deserialize_node(gdb.db.get(to_id)!)! to_node.edges_in << EdgeRef{ - edge_id: edge_id + edge_id: edge_id edge_type: edge_type } gdb.db.set(id: to_id, data: serialize_node(to_node))! + gdb.node_cache.set(to_id, &to_node) return edge_id } // Gets a node by its ID pub fn (mut gdb GraphDB) get_node(id u32) !Node { + // Try cache first + if cached_node := gdb.node_cache.get(id) { + return *cached_node + } + + // Load from database node_data := gdb.db.get(id)! - return deserialize_node(node_data)! + node := deserialize_node(node_data)! + + // Cache the node + gdb.node_cache.set(id, &node) + + return node } // Gets an edge by its ID pub fn (mut gdb GraphDB) get_edge(id u32) !Edge { + // Try cache first + if cached_edge := gdb.edge_cache.get(id) { + return *cached_edge + } + + // Load from database edge_data := gdb.db.get(id)! - return deserialize_edge(edge_data)! + edge := deserialize_edge(edge_data)! + + // Cache the edge + gdb.edge_cache.set(id, &edge) + + return edge } // Updates a node's properties pub fn (mut gdb GraphDB) update_node(id u32, properties map[string]string) ! { mut node := deserialize_node(gdb.db.get(id)!)! node.properties = properties.clone() + + // Update database gdb.db.set(id: id, data: serialize_node(node))! + + // Update cache + gdb.node_cache.set(id, &node) } // Updates an edge's properties pub fn (mut gdb GraphDB) update_edge(id u32, properties map[string]string) ! { mut edge := deserialize_edge(gdb.db.get(id)!)! edge.properties = properties.clone() + + // Update database gdb.db.set(id: id, data: serialize_edge(edge))! + + // Update cache + gdb.edge_cache.set(id, &edge) } // Deletes a node and all its edges pub fn (mut gdb GraphDB) delete_node(id u32) ! { node := deserialize_node(gdb.db.get(id)!)! - + // Delete outgoing edges for edge_ref in node.edges_out { gdb.delete_edge(edge_ref.edge_id)! @@ -136,8 +197,11 @@ pub fn (mut gdb GraphDB) delete_node(id u32) ! { gdb.delete_edge(edge_ref.edge_id)! } - // Delete the node itself + // Delete from database gdb.db.delete(id)! + + // Remove from cache + gdb.node_cache.remove(id) } // Deletes an edge and updates connected nodes @@ -153,6 +217,7 @@ pub fn (mut gdb GraphDB) delete_edge(id u32) ! { } } gdb.db.set(id: edge.from_node, data: serialize_node(from_node))! + gdb.node_cache.set(edge.from_node, &from_node) // Update target node mut to_node := deserialize_node(gdb.db.get(edge.to_node)!)! @@ -163,9 +228,11 @@ pub fn (mut gdb GraphDB) delete_edge(id u32) ! { } } gdb.db.set(id: edge.to_node, data: serialize_node(to_node))! + gdb.node_cache.set(edge.to_node, &to_node) - // Delete the edge itself + // Delete from database and cache gdb.db.delete(id)! + gdb.edge_cache.remove(id) } // Queries nodes by property value @@ -173,13 +240,30 @@ pub fn (mut gdb GraphDB) query_nodes_by_property(key string, value string) ![]No mut nodes := []Node{} mut next_id := gdb.db.get_next_id()! + // Process each ID up to next_id for id := u32(0); id < next_id; id++ { - if node_data := gdb.db.get(id) { - if node := deserialize_node(node_data) { - if node.properties[key] == value { - nodes << node + // Try to get from cache first + if cached := gdb.node_cache.get(id) { + if prop_value := cached.properties[key] { + if prop_value == value { + nodes << *cached } } + continue + } + + // Not in cache, try to get from database + raw_data := gdb.db.get(id) or { continue } + mut node := deserialize_node(raw_data) or { continue } + + // Cache the node for future use + gdb.node_cache.set(id, &node) + + // Check if this node matches the query + if prop_value := node.properties[key] { + if prop_value == value { + nodes << node + } } } @@ -188,13 +272,26 @@ pub fn (mut gdb GraphDB) query_nodes_by_property(key string, value string) ![]No // Gets all edges between two nodes pub fn (mut gdb GraphDB) get_edges_between(from_id u32, to_id u32) ![]Edge { - from_node := deserialize_node(gdb.db.get(from_id)!)! - mut edges := []Edge{} + mut from_node := if cached := gdb.node_cache.get(from_id) { + *cached + } else { + node := deserialize_node(gdb.db.get(from_id)!)! + gdb.node_cache.set(from_id, &node) + node + } + mut edges := []Edge{} for edge_ref in from_node.edges_out { - edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! - if edge.to_node == to_id { - edges << edge + edge_data := if cached := gdb.edge_cache.get(edge_ref.edge_id) { + *cached + } else { + mut edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! + gdb.edge_cache.set(edge_ref.edge_id, &edge) + edge + } + + if edge_data.to_node == to_id { + edges << edge_data } } @@ -203,23 +300,58 @@ pub fn (mut gdb GraphDB) get_edges_between(from_id u32, to_id u32) ![]Edge { // Gets all nodes connected to a given node by edge type pub fn (mut gdb GraphDB) get_connected_nodes(id u32, edge_type string, direction string) ![]Node { - node := deserialize_node(gdb.db.get(id)!)! + mut start_node := if cached := gdb.node_cache.get(id) { + *cached + } else { + node := deserialize_node(gdb.db.get(id)!)! + gdb.node_cache.set(id, &node) + node + } + mut connected_nodes := []Node{} if direction in ['out', 'both'] { - for edge_ref in node.edges_out { + for edge_ref in start_node.edges_out { if edge_ref.edge_type == edge_type { - edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! - connected_nodes << deserialize_node(gdb.db.get(edge.to_node)!)! + edge_data := if cached := gdb.edge_cache.get(edge_ref.edge_id) { + *cached + } else { + mut edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! + gdb.edge_cache.set(edge_ref.edge_id, &edge) + edge + } + + mut target_node := if cached := gdb.node_cache.get(edge_data.to_node) { + *cached + } else { + node := deserialize_node(gdb.db.get(edge_data.to_node)!)! + gdb.node_cache.set(edge_data.to_node, &node) + node + } + connected_nodes << target_node } } } if direction in ['in', 'both'] { - for edge_ref in node.edges_in { + for edge_ref in start_node.edges_in { if edge_ref.edge_type == edge_type { - edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! - connected_nodes << deserialize_node(gdb.db.get(edge.from_node)!)! + edge_data := if cached := gdb.edge_cache.get(edge_ref.edge_id) { + *cached + } else { + mut edge := deserialize_edge(gdb.db.get(edge_ref.edge_id)!)! + gdb.edge_cache.set(edge_ref.edge_id, &edge) + edge + } + + mut source_node := if cached := gdb.node_cache.get(edge_data.from_node) { + *cached + } else { + node := deserialize_node(gdb.db.get(edge_data.from_node)!)! + gdb.node_cache.set(edge_data.from_node, &node) + node + } + connected_nodes << source_node } } } diff --git a/lib/data/graphdb/graphdb_debug.v b/lib/data/graphdb/graphdb_debug.v index 5aad02db..3e1449ae 100644 --- a/lib/data/graphdb/graphdb_debug.v +++ b/lib/data/graphdb/graphdb_debug.v @@ -3,10 +3,10 @@ module graphdb // Gets detailed information about a node pub fn (mut gdb GraphDB) debug_node(id u32) !string { node := gdb.get_node(id)! - + mut info := '\nNode Details (ID: ${id})\n' info += '===================\n' - + // Properties info += '\nProperties:\n' if node.properties.len == 0 { @@ -81,13 +81,13 @@ pub fn (mut gdb GraphDB) debug_edge(id u32) !string { edge := gdb.get_edge(id)! from_node := gdb.get_node(edge.from_node)! to_node := gdb.get_node(edge.to_node)! - + mut info := '\nEdge Details (ID: ${id})\n' info += '===================\n' - + // Basic info info += '\nType: ${edge.edge_type}\n' - + // Connected nodes info += '\nFrom Node (ID: ${edge.from_node}):\n' if name := from_node.properties['name'] { @@ -125,10 +125,10 @@ pub fn (mut gdb GraphDB) debug_edge(id u32) !string { // Prints the current state of the database pub fn (mut gdb GraphDB) debug_db() ! { mut next_id := gdb.db.get_next_id()! - + println('\nGraph Database State') println('===================') - + // Print all nodes println('\nNodes:') println('------') @@ -153,13 +153,13 @@ pub fn (mut gdb GraphDB) debug_db() ! { if edge := deserialize_edge(edge_data) { mut from_name := '' mut to_name := '' - + if from_node := gdb.get_node(edge.from_node) { if name := from_node.properties['name'] { from_name = ' (${name})' } } - + if to_node := gdb.get_node(edge.to_node) { if name := to_node.properties['name'] { to_name = ' (${name})' @@ -195,7 +195,7 @@ pub fn (mut gdb GraphDB) print_graph_from(start_id u32, visited map[u32]bool) ! my_visited[start_id] = true node := gdb.get_node(start_id)! - + mut node_info := 'Node(${start_id})' if name := node.properties['name'] { node_info += ' (${name})' @@ -206,7 +206,7 @@ pub fn (mut gdb GraphDB) print_graph_from(start_id u32, visited map[u32]bool) ! for edge_ref in node.edges_out { edge := gdb.get_edge(edge_ref.edge_id)! mut edge_info := ' -[${edge.edge_type}]->' - + if edge.properties.len > 0 { edge_info += ' {' mut first := true @@ -219,7 +219,7 @@ pub fn (mut gdb GraphDB) print_graph_from(start_id u32, visited map[u32]bool) ! } edge_info += '}' } - + println(edge_info) gdb.print_graph_from(edge.to_node, my_visited)! } @@ -229,7 +229,7 @@ pub fn (mut gdb GraphDB) print_graph_from(start_id u32, visited map[u32]bool) ! pub fn (mut gdb GraphDB) print_graph() ! { println('\nGraph Structure') println('===============') - + mut visited := map[u32]bool{} mut next_id := gdb.db.get_next_id()! diff --git a/lib/data/graphdb/graphdb_test.v b/lib/data/graphdb/graphdb_test.v index d4567db6..4084cb1e 100644 --- a/lib/data/graphdb/graphdb_test.v +++ b/lib/data/graphdb/graphdb_test.v @@ -5,13 +5,13 @@ fn test_basic_operations() ! { // Test creating nodes with properties mut person1_id := gdb.create_node({ - 'name': 'Alice', - 'age': '30' + 'name': 'Alice' + 'age': '30' })! mut person2_id := gdb.create_node({ - 'name': 'Bob', - 'age': '25' + 'name': 'Bob' + 'age': '25' })! // Test retrieving nodes @@ -51,8 +51,8 @@ fn test_basic_operations() ! { // Test updating node properties gdb.update_node(person1_id, { - 'name': 'Alice', - 'age': '31' + 'name': 'Alice' + 'age': '31' })! updated_alice := gdb.get_node(person1_id)! assert updated_alice.properties['age'] == '31' @@ -86,42 +86,54 @@ fn test_complex_graph() ! { // Create nodes representing people mut alice_id := gdb.create_node({ - 'name': 'Alice', - 'age': '30', + 'name': 'Alice' + 'age': '30' 'city': 'New York' })! mut bob_id := gdb.create_node({ - 'name': 'Bob', - 'age': '25', + 'name': 'Bob' + 'age': '25' 'city': 'Boston' })! mut charlie_id := gdb.create_node({ - 'name': 'Charlie', - 'age': '35', + 'name': 'Charlie' + 'age': '35' 'city': 'New York' })! // Create nodes representing companies mut company1_id := gdb.create_node({ - 'name': 'TechCorp', + 'name': 'TechCorp' 'industry': 'Technology' })! mut company2_id := gdb.create_node({ - 'name': 'FinCo', + 'name': 'FinCo' 'industry': 'Finance' })! // Create relationships - gdb.create_edge(alice_id, bob_id, 'KNOWS', {'since': '2020'})! - gdb.create_edge(bob_id, charlie_id, 'KNOWS', {'since': '2019'})! - gdb.create_edge(charlie_id, alice_id, 'KNOWS', {'since': '2018'})! + gdb.create_edge(alice_id, bob_id, 'KNOWS', { + 'since': '2020' + })! + gdb.create_edge(bob_id, charlie_id, 'KNOWS', { + 'since': '2019' + })! + gdb.create_edge(charlie_id, alice_id, 'KNOWS', { + 'since': '2018' + })! - gdb.create_edge(alice_id, company1_id, 'WORKS_AT', {'role': 'Engineer'})! - gdb.create_edge(bob_id, company2_id, 'WORKS_AT', {'role': 'Analyst'})! - gdb.create_edge(charlie_id, company1_id, 'WORKS_AT', {'role': 'Manager'})! + gdb.create_edge(alice_id, company1_id, 'WORKS_AT', { + 'role': 'Engineer' + })! + gdb.create_edge(bob_id, company2_id, 'WORKS_AT', { + 'role': 'Analyst' + })! + gdb.create_edge(charlie_id, company1_id, 'WORKS_AT', { + 'role': 'Manager' + })! // Test querying by property ny_people := gdb.query_nodes_by_property('city', 'New York')! @@ -159,7 +171,7 @@ fn test_edge_cases() ! { // Test node with many properties mut large_props := map[string]string{} - for i in 0..100 { + for i in 0 .. 100 { large_props['key${i}'] = 'value${i}' } large_node_id := gdb.create_node(large_props)! diff --git a/lib/data/graphdb/search.v b/lib/data/graphdb/search.v new file mode 100644 index 00000000..e9eef6f9 --- /dev/null +++ b/lib/data/graphdb/search.v @@ -0,0 +1,99 @@ +module graphdb + +// SearchConfig represents the configuration for graph traversal search +pub struct SearchConfig { +pub mut: + types []string // List of node types to search for + max_distance f32 // Maximum distance to traverse using edge weights +} + +// SearchResult represents a node found during search with its distance from start +pub struct SearchResult { +pub: + node &Node + distance f32 +} + +// search performs a breadth-first traversal from a start node +// Returns nodes of specified types within max_distance +pub fn (mut gdb GraphDB) search(start_id u32, config SearchConfig) ![]SearchResult { + mut results := []SearchResult{} + mut visited := map[u32]f32{} // Maps node ID to shortest distance found + mut queue := []u32{cap: 100} // Queue of node IDs to visit + + // Start from the given node + queue << start_id + visited[start_id] = 0 + + // Process nodes in queue + for queue.len > 0 { + current_id := queue[0] + queue.delete(0) + + current_distance := visited[current_id] + if current_distance > config.max_distance { + continue + } + + // Get current node + current_node := gdb.get_node(current_id)! + + // Add to results if node type matches search criteria + if config.types.len == 0 || current_node.node_type in config.types { + results << SearchResult{ + node: ¤t_node + distance: current_distance + } + } + + // Process outgoing edges + for edge_ref in current_node.edges_out { + edge := gdb.get_edge(edge_ref.edge_id)! + next_id := edge.to_node + + // Calculate new distance using edge weight + weight := if edge.weight == 0 { f32(1) } else { f32(edge.weight) } + new_distance := current_distance + weight + + // Skip if we've found a shorter path or would exceed max distance + if new_distance > config.max_distance { + continue + } + if next_distance := visited[next_id] { + if new_distance >= next_distance { + continue + } + } + + // Add to queue and update distance + queue << next_id + visited[next_id] = new_distance + } + + // Process incoming edges + for edge_ref in current_node.edges_in { + edge := gdb.get_edge(edge_ref.edge_id)! + next_id := edge.from_node + + // Calculate new distance using edge weight + weight := if edge.weight == 0 { f32(1) } else { f32(edge.weight) } + new_distance := current_distance + weight + + // Skip if we've found a shorter path or would exceed max distance + if new_distance > config.max_distance { + continue + } + if next_distance := visited[next_id] { + if new_distance >= next_distance { + continue + } + } + + // Add to queue and update distance + queue << next_id + visited[next_id] = new_distance + } + } + + return results +} diff --git a/lib/data/graphdb/search_test.v b/lib/data/graphdb/search_test.v new file mode 100644 index 00000000..a8bc4837 --- /dev/null +++ b/lib/data/graphdb/search_test.v @@ -0,0 +1,156 @@ +module graphdb + +fn test_search() ! { + mut gdb := new(NewArgs{ + path: 'test_search.db' + reset: true + })! + + // Create test nodes of different types + mut user1 := Node{ + properties: { + 'name': 'User 1' + } + node_type: 'user' + } + user1_id := gdb.db.set(data: serialize_node(user1))! + user1.id = user1_id + gdb.node_cache.set(user1_id, &user1) + + mut user2 := Node{ + properties: { + 'name': 'User 2' + } + node_type: 'user' + } + user2_id := gdb.db.set(data: serialize_node(user2))! + user2.id = user2_id + gdb.node_cache.set(user2_id, &user2) + + mut post1 := Node{ + properties: { + 'title': 'Post 1' + } + node_type: 'post' + } + post1_id := gdb.db.set(data: serialize_node(post1))! + post1.id = post1_id + gdb.node_cache.set(post1_id, &post1) + + mut post2 := Node{ + properties: { + 'title': 'Post 2' + } + node_type: 'post' + } + post2_id := gdb.db.set(data: serialize_node(post2))! + post2.id = post2_id + gdb.node_cache.set(post2_id, &post2) + + // Create edges with different weights + mut edge1 := Edge{ + from_node: user1_id + to_node: post1_id + edge_type: 'created' + weight: 1 + } + edge1_id := gdb.db.set(data: serialize_edge(edge1))! + edge1.id = edge1_id + gdb.edge_cache.set(edge1_id, &edge1) + + mut edge2 := Edge{ + from_node: post1_id + to_node: post2_id + edge_type: 'related' + weight: 2 + } + edge2_id := gdb.db.set(data: serialize_edge(edge2))! + edge2.id = edge2_id + gdb.edge_cache.set(edge2_id, &edge2) + + mut edge3 := Edge{ + from_node: user2_id + to_node: post2_id + edge_type: 'created' + weight: 1 + } + edge3_id := gdb.db.set(data: serialize_edge(edge3))! + edge3.id = edge3_id + gdb.edge_cache.set(edge3_id, &edge3) + + // Update node edge references + user1.edges_out << EdgeRef{ + edge_id: edge1_id + edge_type: 'created' + } + gdb.db.set(id: user1_id, data: serialize_node(user1))! + gdb.node_cache.set(user1_id, &user1) + + post1.edges_in << EdgeRef{ + edge_id: edge1_id + edge_type: 'created' + } + post1.edges_out << EdgeRef{ + edge_id: edge2_id + edge_type: 'related' + } + gdb.db.set(id: post1_id, data: serialize_node(post1))! + gdb.node_cache.set(post1_id, &post1) + + post2.edges_in << EdgeRef{ + edge_id: edge2_id + edge_type: 'related' + } + post2.edges_in << EdgeRef{ + edge_id: edge3_id + edge_type: 'created' + } + gdb.db.set(id: post2_id, data: serialize_node(post2))! + gdb.node_cache.set(post2_id, &post2) + + user2.edges_out << EdgeRef{ + edge_id: edge3_id + edge_type: 'created' + } + gdb.db.set(id: user2_id, data: serialize_node(user2))! + gdb.node_cache.set(user2_id, &user2) + + // Test 1: Search for posts within distance 2 + results1 := gdb.search(user1_id, SearchConfig{ + types: ['post'] + max_distance: 2 + })! + + assert results1.len == 1 // Should only find post1 within distance 2 + assert results1[0].node.properties['title'] == 'Post 1' + assert results1[0].distance == 1 + + // Test 2: Search for posts within distance 4 + results2 := gdb.search(user1_id, SearchConfig{ + types: ['post'] + max_distance: 4 + })! + + assert results2.len == 2 // Should find both posts + assert results2[0].node.properties['title'] == 'Post 1' + assert results2[1].node.properties['title'] == 'Post 2' + assert results2[1].distance == 3 + + // Test 3: Search for users within distance 3 + results3 := gdb.search(post2_id, SearchConfig{ + types: ['user'] + max_distance: 3 + })! + + assert results3.len == 2 // Should find both users + assert results3[0].node.properties['name'] in ['User 1', 'User 2'] + assert results3[1].node.properties['name'] in ['User 1', 'User 2'] + + // Test 4: Search without type filter + results4 := gdb.search(user1_id, SearchConfig{ + types: [] + max_distance: 4 + })! + + assert results4.len == 4 // Should find all nodes +} diff --git a/lib/data/graphdb/serialization.v b/lib/data/graphdb/serialization.v index 4ead2222..281bde3f 100644 --- a/lib/data/graphdb/serialization.v +++ b/lib/data/graphdb/serialization.v @@ -1,165 +1,98 @@ module graphdb -import encoding.binary -import math +import freeflowuniverse.herolib.data.encoder + +const version_v1 = u8(1) // Serializes a Node struct to bytes -fn serialize_node(node Node) []u8 { - mut data := []u8{} +pub fn serialize_node(node Node) []u8 { + mut e := encoder.new() + + // Add version byte + e.add_u8(version_v1) + + // Serialize node ID + e.add_u32(node.id) + + // Serialize node type + e.add_string(node.node_type) // Serialize properties - data << u32_to_bytes(u32(node.properties.len)) // Number of properties + e.add_u16(u16(node.properties.len)) // Number of properties for key, value in node.properties { - // Key length and bytes - data << u32_to_bytes(u32(key.len)) - data << key.bytes() - // Value length and bytes - data << u32_to_bytes(u32(value.len)) - data << value.bytes() + e.add_string(key) + e.add_string(value) } // Serialize outgoing edges - data << u32_to_bytes(u32(node.edges_out.len)) // Number of outgoing edges + e.add_u16(u16(node.edges_out.len)) // Number of outgoing edges for edge in node.edges_out { - data << u32_to_bytes(edge.edge_id) - data << u32_to_bytes(u32(edge.edge_type.len)) - data << edge.edge_type.bytes() + e.add_u32(edge.edge_id) + e.add_string(edge.edge_type) } // Serialize incoming edges - data << u32_to_bytes(u32(node.edges_in.len)) // Number of incoming edges + e.add_u16(u16(node.edges_in.len)) // Number of incoming edges for edge in node.edges_in { - data << u32_to_bytes(edge.edge_id) - data << u32_to_bytes(u32(edge.edge_type.len)) - data << edge.edge_type.bytes() + e.add_u32(edge.edge_id) + e.add_string(edge.edge_type) } - return data + return e.data } // Deserializes bytes to a Node struct -fn deserialize_node(data []u8) !Node { - if data.len < 4 { +pub fn deserialize_node(data []u8) !Node { + if data.len < 1 { return error('Invalid node data: too short') } - mut offset := 0 + mut d := encoder.decoder_new(data) + + // Check version + version := d.get_u8()! + if version != version_v1 { + return error('Unsupported version: ${version}') + } + mut node := Node{ properties: map[string]string{} - edges_out: []EdgeRef{} - edges_in: []EdgeRef{} + edges_out: []EdgeRef{} + edges_in: []EdgeRef{} } + // Deserialize node ID + node.id = d.get_u32()! + + // Deserialize node type + node.node_type = d.get_string()! + // Deserialize properties - mut end_pos := int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated properties count') - } - num_properties := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - + num_properties := d.get_u16()! for _ in 0 .. num_properties { - // Read key - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated property key length') - } - key_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(key_len) - if end_pos > data.len { - return error('Invalid node data: truncated property key') - } - key := data[offset..end_pos].bytestr() - offset = end_pos - - // Read value - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated property value length') - } - value_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(value_len) - if end_pos > data.len { - return error('Invalid node data: truncated property value') - } - value := data[offset..end_pos].bytestr() - offset = end_pos - + key := d.get_string()! + value := d.get_string()! node.properties[key] = value } // Deserialize outgoing edges - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated outgoing edges count') - } - num_edges_out := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - + num_edges_out := d.get_u16()! for _ in 0 .. num_edges_out { - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated edge ID') - } - edge_id := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated edge type length') - } - type_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(type_len) - if end_pos > data.len { - return error('Invalid node data: truncated edge type') - } - edge_type := data[offset..end_pos].bytestr() - offset = end_pos - + edge_id := d.get_u32()! + edge_type := d.get_string()! node.edges_out << EdgeRef{ - edge_id: edge_id + edge_id: edge_id edge_type: edge_type } } // Deserialize incoming edges - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated incoming edges count') - } - num_edges_in := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - + num_edges_in := d.get_u16()! for _ in 0 .. num_edges_in { - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated edge ID') - } - edge_id := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid node data: truncated edge type length') - } - type_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(type_len) - if end_pos > data.len { - return error('Invalid node data: truncated edge type') - } - edge_type := data[offset..end_pos].bytestr() - offset = end_pos - + edge_id := d.get_u32()! + edge_type := d.get_string()! node.edges_in << EdgeRef{ - edge_id: edge_id + edge_id: edge_id edge_type: edge_type } } @@ -168,120 +101,65 @@ fn deserialize_node(data []u8) !Node { } // Serializes an Edge struct to bytes -fn serialize_edge(edge Edge) []u8 { - mut data := []u8{} +pub fn serialize_edge(edge Edge) []u8 { + mut e := encoder.new() + + // Add version byte + e.add_u8(version_v1) + + // Serialize edge ID + e.add_u32(edge.id) // Serialize edge metadata - data << u32_to_bytes(edge.from_node) - data << u32_to_bytes(edge.to_node) - data << u32_to_bytes(u32(edge.edge_type.len)) - data << edge.edge_type.bytes() + e.add_u32(edge.from_node) + e.add_u32(edge.to_node) + e.add_string(edge.edge_type) + e.add_u16(edge.weight) // Serialize properties - data << u32_to_bytes(u32(edge.properties.len)) + e.add_u16(u16(edge.properties.len)) for key, value in edge.properties { - data << u32_to_bytes(u32(key.len)) - data << key.bytes() - data << u32_to_bytes(u32(value.len)) - data << value.bytes() + e.add_string(key) + e.add_string(value) } - return data + return e.data } // Deserializes bytes to an Edge struct -fn deserialize_edge(data []u8) !Edge { - if data.len < 12 { +pub fn deserialize_edge(data []u8) !Edge { + if data.len < 1 { return error('Invalid edge data: too short') } - mut offset := 0 + mut d := encoder.decoder_new(data) + + // Check version + version := d.get_u8()! + if version != version_v1 { + return error('Unsupported version: ${version}') + } + mut edge := Edge{ properties: map[string]string{} } + // Deserialize edge ID + edge.id = d.get_u32()! + // Deserialize edge metadata - mut end_pos := int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated from_node') - } - edge.from_node = bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated to_node') - } - edge.to_node = bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated type length') - } - type_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(type_len) - if end_pos > data.len { - return error('Invalid edge data: truncated edge type') - } - edge.edge_type = data[offset..end_pos].bytestr() - offset = end_pos + edge.from_node = d.get_u32()! + edge.to_node = d.get_u32()! + edge.edge_type = d.get_string()! + edge.weight = d.get_u16()! // Deserialize properties - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated properties count') - } - num_properties := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - + num_properties := d.get_u16()! for _ in 0 .. num_properties { - // Read key - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated property key length') - } - key_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(key_len) - if end_pos > data.len { - return error('Invalid edge data: truncated property key') - } - key := data[offset..end_pos].bytestr() - offset = end_pos - - // Read value - end_pos = int(offset) + 4 - if end_pos > data.len { - return error('Invalid edge data: truncated property value length') - } - value_len := bytes_to_u32(data[offset..end_pos]) - offset = end_pos - - end_pos = int(offset) + int(value_len) - if end_pos > data.len { - return error('Invalid edge data: truncated property value') - } - value := data[offset..end_pos].bytestr() - offset = end_pos - + key := d.get_string()! + value := d.get_string()! edge.properties[key] = value } return edge } - -// Helper function to convert u32 to bytes -fn u32_to_bytes(n u32) []u8 { - mut bytes := []u8{len: 4} - binary.little_endian_put_u32(mut bytes, n) - return bytes -} - -// Helper function to convert bytes to u32 -fn bytes_to_u32(data []u8) u32 { - return binary.little_endian_u32(data) -} diff --git a/lib/data/graphdb/serialization_test.v b/lib/data/graphdb/serialization_test.v new file mode 100644 index 00000000..1d988cda --- /dev/null +++ b/lib/data/graphdb/serialization_test.v @@ -0,0 +1,202 @@ +module graphdb + +fn test_node_serialization() { + // Create a test node with all fields populated + node := Node{ + node_type: 'user' + properties: { + 'name': 'John Doe' + 'age': '30' + 'email': 'john@example.com' + } + edges_out: [ + EdgeRef{ + edge_id: 1 + edge_type: 'follows' + }, + EdgeRef{ + edge_id: 2 + edge_type: 'likes' + }, + ] + edges_in: [ + EdgeRef{ + edge_id: 3 + edge_type: 'followed_by' + }, + ] + } + + // Serialize the node + serialized := serialize_node(node) + + // Deserialize back to node + deserialized := deserialize_node(serialized) or { + assert false, 'Failed to deserialize node: ${err}' + Node{} + } + + // Verify all fields match + assert deserialized.node_type == node.node_type, 'node_type mismatch' + assert deserialized.properties.len == node.properties.len, 'properties length mismatch' + for key, value in node.properties { + assert deserialized.properties[key] == value, 'property ${key} mismatch' + } + assert deserialized.edges_out.len == node.edges_out.len, 'edges_out length mismatch' + for i, edge in node.edges_out { + assert deserialized.edges_out[i].edge_id == edge.edge_id, 'edge_out ${i} id mismatch' + assert deserialized.edges_out[i].edge_type == edge.edge_type, 'edge_out ${i} type mismatch' + } + assert deserialized.edges_in.len == node.edges_in.len, 'edges_in length mismatch' + for i, edge in node.edges_in { + assert deserialized.edges_in[i].edge_id == edge.edge_id, 'edge_in ${i} id mismatch' + assert deserialized.edges_in[i].edge_type == edge.edge_type, 'edge_in ${i} type mismatch' + } +} + +fn test_edge_serialization() { + // Create a test edge with all fields populated + edge := Edge{ + from_node: 1 + to_node: 2 + edge_type: 'follows' + weight: 5 + properties: { + 'created_at': '2024-01-31' + 'active': 'true' + } + } + + // Serialize the edge + serialized := serialize_edge(edge) + + // Deserialize back to edge + deserialized := deserialize_edge(serialized) or { + assert false, 'Failed to deserialize edge: ${err}' + Edge{} + } + + // Verify all fields match + assert deserialized.from_node == edge.from_node, 'from_node mismatch' + assert deserialized.to_node == edge.to_node, 'to_node mismatch' + assert deserialized.edge_type == edge.edge_type, 'edge_type mismatch' + assert deserialized.weight == edge.weight, 'weight mismatch' + assert deserialized.properties.len == edge.properties.len, 'properties length mismatch' + for key, value in edge.properties { + assert deserialized.properties[key] == value, 'property ${key} mismatch' + } +} + +fn test_node_serialization_empty() { + // Test with empty node + node := Node{ + node_type: '' + properties: map[string]string{} + edges_out: []EdgeRef{} + edges_in: []EdgeRef{} + } + + serialized := serialize_node(node) + deserialized := deserialize_node(serialized) or { + assert false, 'Failed to deserialize empty node: ${err}' + Node{} + } + + assert deserialized.node_type == '', 'empty node_type mismatch' + assert deserialized.properties.len == 0, 'empty properties mismatch' + assert deserialized.edges_out.len == 0, 'empty edges_out mismatch' + assert deserialized.edges_in.len == 0, 'empty edges_in mismatch' +} + +fn test_edge_serialization_empty() { + // Test with empty edge + edge := Edge{ + from_node: 0 + to_node: 0 + edge_type: '' + weight: 0 + properties: map[string]string{} + } + + serialized := serialize_edge(edge) + deserialized := deserialize_edge(serialized) or { + assert false, 'Failed to deserialize empty edge: ${err}' + Edge{} + } + + assert deserialized.from_node == 0, 'empty from_node mismatch' + assert deserialized.to_node == 0, 'empty to_node mismatch' + assert deserialized.edge_type == '', 'empty edge_type mismatch' + assert deserialized.weight == 0, 'empty weight mismatch' + assert deserialized.properties.len == 0, 'empty properties mismatch' +} + +fn test_version_compatibility() { + // Test version checking + node := Node{ + node_type: 'test' + } + mut serialized := serialize_node(node) + + // Modify version byte to invalid version + serialized[0] = 99 + + // Should fail with version error + deserialize_node(serialized) or { + assert err.msg().contains('Unsupported version'), 'Expected version error' + return + } + assert false, 'Expected error for invalid version' +} + +fn test_large_property_values() { + // Create a large string that's bigger than the slice bounds we're seeing in the error (20043) + mut large_value := '' + for _ in 0 .. 25000 { + large_value += 'x' + } + + // Create a node with the large property value + node := Node{ + node_type: 'test' + properties: { + 'large_prop': large_value + } + } + + // Serialize and deserialize + serialized := serialize_node(node) + deserialized := deserialize_node(serialized) or { + assert false, 'Failed to deserialize node with large property: ${err}' + Node{} + } + + // Verify the large property was preserved + assert deserialized.properties['large_prop'] == large_value, 'large property value mismatch' +} + +fn test_data_validation() { + // Test with invalid data + invalid_data := []u8{} + deserialize_node(invalid_data) or { + assert err.msg().contains('too short'), 'Expected data length error' + return + } + assert false, 'Expected error for empty data' + + // Test with truncated data + node := Node{ + node_type: 'test' + properties: { + 'key': 'value' + } + } + serialized := serialize_node(node) + truncated := serialized[..serialized.len / 2] + + deserialize_node(truncated) or { + assert err.msg().contains('Invalid'), 'Expected truncation error' + return + } + assert false, 'Expected error for truncated data' +} diff --git a/lib/data/ourdb/backend.v b/lib/data/ourdb/backend.v index 6139a9dd..505b8e97 100644 --- a/lib/data/ourdb/backend.v +++ b/lib/data/ourdb/backend.v @@ -75,7 +75,7 @@ pub fn (mut db OurDB) set_(x u32, old_location Location, data []u8) ! { file_nr: file_nr position: u32(db.file.tell()!) } - //println('Writing data at position: ${new_location.position}, file_nr: ${file_nr}') + // println('Writing data at position: ${new_location.position}, file_nr: ${file_nr}') // Calculate CRC of data crc := calculate_crc(data) @@ -109,9 +109,9 @@ pub fn (mut db OurDB) set_(x u32, old_location Location, data []u8) ! { // Update lookup table with new position db.lookup.set(x, new_location)! - + // Ensure lookup table is synced - //db.save()! + // db.save()! } // get retrieves data at specified location @@ -144,7 +144,7 @@ fn (mut db OurDB) get_(location Location) ![]u8 { if data_read_bytes != int(size) { return error('failed to read data bytes') } - //println('Reading data from position: ${location.position}, file_nr: ${location.file_nr}, size: ${size}, data: ${data}') + // println('Reading data from position: ${location.position}, file_nr: ${location.file_nr}, size: ${size}, data: ${data}') // Verify CRC calculated_crc := calculate_crc(data) diff --git a/lib/data/ourdb/db.v b/lib/data/ourdb/db.v index a03c9cf2..121d4a74 100644 --- a/lib/data/ourdb/db.v +++ b/lib/data/ourdb/db.v @@ -97,7 +97,7 @@ pub fn (mut db OurDB) get_next_id() !u32 { return error('incremental mode is not enabled') } next_id := db.lookup.get_next_id()! - return next_id + return next_id } // close closes the database file diff --git a/lib/data/ourdb/db_test.v b/lib/data/ourdb/db_test.v index 07194c52..7778c581 100644 --- a/lib/data/ourdb/db_test.v +++ b/lib/data/ourdb/db_test.v @@ -67,7 +67,7 @@ fn test_history_tracking() { )! defer { - db.destroy() or { } + db.destroy() or {} } // Create multiple versions of data @@ -155,7 +155,7 @@ fn test_file_switching() { )! defer { - db.destroy() or { } + db.destroy() or {} } test_data1 := 'Test data'.bytes() diff --git a/lib/data/ourdb/db_update_test.v b/lib/data/ourdb/db_update_test.v index ab772a07..e47a8eb5 100644 --- a/lib/data/ourdb/db_update_test.v +++ b/lib/data/ourdb/db_update_test.v @@ -19,7 +19,7 @@ fn test_db_update() { )! defer { - db.destroy() or { } + db.destroy() or {} } // Test set and get @@ -29,17 +29,17 @@ fn test_db_update() { retrieved := db.get(id)! assert retrieved == test_data - assert id==0 + assert id == 0 // Test overwrite new_data := 'Updated data'.bytes() - id2 := db.set(id:0, data: new_data)! - assert id2==0 - + id2 := db.set(id: 0, data: new_data)! + assert id2 == 0 + // Verify lookup table has the correct location location := db.lookup.get(id2)! println('Location after update - file_nr: ${location.file_nr}, position: ${location.position}') - + // Get and verify the updated data retrieved2 := db.get(id2)! println('Retrieved data: ${retrieved2}') diff --git a/lib/data/ourdb/factory.v b/lib/data/ourdb/factory.v index a5fabd0b..26ef2fbf 100644 --- a/lib/data/ourdb/factory.v +++ b/lib/data/ourdb/factory.v @@ -24,12 +24,12 @@ const header_size = 12 @[params] pub struct OurDBConfig { pub: - record_nr_max u32 = 16777216 - 1 // max size of records - record_size_max u32 = 1024 * 4 // max size in bytes of a record, is 4 KB default - file_size u32 = 500 * (1 << 20) // 500MB - path string // directory where we will stor the DB + record_nr_max u32 = 16777216 - 1 // max size of records + record_size_max u32 = 1024 * 4 // max size in bytes of a record, is 4 KB default + file_size u32 = 500 * (1 << 20) // 500MB + path string // directory where we will stor the DB incremental_mode bool = true - reset bool + reset bool } // new_memdb creates a new memory database with the given path and lookup table @@ -56,7 +56,7 @@ pub fn new(args OurDBConfig) !OurDB { incremental_mode: args.incremental_mode )! - if args.reset{ + if args.reset { os.rmdir_all(args.path) or {} } diff --git a/lib/data/ourdb/lookup.v b/lib/data/ourdb/lookup.v index 93177fc4..eceae9c3 100644 --- a/lib/data/ourdb/lookup.v +++ b/lib/data/ourdb/lookup.v @@ -90,7 +90,7 @@ fn (lut LookupTable) get(x u32) !Location { entry_size := lut.keysize if lut.lookuppath.len > 0 { // Check file size first - file_size := os.file_size(lut.get_data_file_path()!) //THIS SLOWS DOWN, NEED TO DO SOMETHING MORE INTELLIGENCE ONCE + file_size := os.file_size(lut.get_data_file_path()!) // THIS SLOWS DOWN, NEED TO DO SOMETHING MORE INTELLIGENCE ONCE start_pos := x * entry_size if start_pos + entry_size > file_size { @@ -325,7 +325,7 @@ fn (mut lut LookupTable) import_data(path string) ! { incremental_file_name))! // Update the incremental value in memory inc_str := os.read_file(os.join_path(path, incremental_file_name))! - //println('inc_str: ${inc_str}') + // println('inc_str: ${inc_str}') lut.incremental = inc_str.u32() } return diff --git a/lib/data/radixtree/factory_test.v b/lib/data/radixtree/factory_test.v index 2470ff07..dcbdf694 100644 --- a/lib/data/radixtree/factory_test.v +++ b/lib/data/radixtree/factory_test.v @@ -1,7 +1,7 @@ module radixtree fn test_basic_operations() ! { - mut rt := new(path:'/tmp/radixtree_test',reset:true)! + mut rt := new(path: '/tmp/radixtree_test', reset: true)! // Test insert and search rt.insert('test', 'value1'.bytes())! @@ -20,15 +20,15 @@ fn test_basic_operations() ! { // Test delete rt.delete('test')! - mut ok:=false + mut ok := false if _ := rt.search('test') { - ok=true + ok = true } assert ok } fn test_prefix_matching() ! { - mut rt := new(path:'/tmp/radixtree_test_prefix')! + mut rt := new(path: '/tmp/radixtree_test_prefix')! // Insert keys with common prefixes rt.insert('team', 'value1'.bytes())! @@ -47,7 +47,7 @@ fn test_prefix_matching() ! { // Delete middle key and verify others still work rt.delete('test')! - + if _ := rt.search('test') { assert false, 'Expected error after deletion' } @@ -60,7 +60,7 @@ fn test_prefix_matching() ! { } fn test_edge_cases() ! { - mut rt := new(path:'/tmp/radixtree_test_edge')! + mut rt := new(path: '/tmp/radixtree_test_edge')! // Test empty key rt.insert('', 'empty'.bytes())! @@ -89,7 +89,7 @@ fn test_edge_cases() ! { } fn test_multiple_operations() ! { - mut rt := new(path:'/tmp/radixtree_test_multiple')! + mut rt := new(path: '/tmp/radixtree_test_multiple')! // Insert multiple keys keys := ['abc', 'abcd', 'abcde', 'bcd', 'bcde'] @@ -110,7 +110,7 @@ fn test_multiple_operations() ! { // Verify remaining keys remaining := ['abc', 'abcde', 'bcd'] expected := ['value1', 'value3', 'value4'] - + for i, key in remaining { value := rt.search(key)! assert value.bytestr() == expected[i] diff --git a/lib/data/radixtree/radixtree.v b/lib/data/radixtree/radixtree.v index 46826c13..b5fd0485 100644 --- a/lib/data/radixtree/radixtree.v +++ b/lib/data/radixtree/radixtree.v @@ -6,39 +6,38 @@ import freeflowuniverse.herolib.data.ourdb struct Node { mut: key_segment string // The segment of the key stored at this node - value []u8 // Value stored at this node (empty if not a leaf) - children []NodeRef // References to child nodes - is_leaf bool // Whether this node is a leaf node + value []u8 // Value stored at this node (empty if not a leaf) + children []NodeRef // References to child nodes + is_leaf bool // Whether this node is a leaf node } // Reference to a node in the database struct NodeRef { mut: key_part string // The key segment for this child - node_id u32 // Database ID of the node + node_id u32 // Database ID of the node } // RadixTree represents a radix tree data structure pub struct RadixTree { mut: db &ourdb.OurDB // Database for persistent storage - root_id u32 // Database ID of the root node + root_id u32 // Database ID of the root node } - pub struct NewArgs { pub mut: - path string + path string reset bool } // Creates a new radix tree with the specified database path pub fn new(args NewArgs) !&RadixTree { mut db := ourdb.new( - path: args.path - record_size_max: 1024 * 4 // 4KB max record size + path: args.path + record_size_max: 1024 * 4 // 4KB max record size incremental_mode: true - reset:args.reset + reset: args.reset )! mut root_id := u32(0) @@ -47,11 +46,11 @@ pub fn new(args NewArgs) !&RadixTree { println('Debug: Creating new root node') root := Node{ key_segment: '' - value: []u8{} - children: []NodeRef{} - is_leaf: false + value: []u8{} + children: []NodeRef{} + is_leaf: false } - root_id = db.set(data: serialize_node(root))! + root_id = db.set(data: serialize_node(root))! println('Debug: Created root node with ID ${root_id}') assert root_id == 0 } else { @@ -62,7 +61,7 @@ pub fn new(args NewArgs) !&RadixTree { } return &RadixTree{ - db: &db + db: &db root_id: root_id } } @@ -83,7 +82,7 @@ pub fn (mut rt RadixTree) insert(key string, value []u8) ! { for offset < key.len { mut node := deserialize_node(rt.db.get(current_id)!)! - + // Find matching child mut matched_child := -1 for i, child in node.children { @@ -98,33 +97,33 @@ pub fn (mut rt RadixTree) insert(key string, value []u8) ! { key_part := key[offset..] new_node := Node{ key_segment: key_part - value: value - children: []NodeRef{} - is_leaf: true + value: value + children: []NodeRef{} + is_leaf: true } println('Debug: Creating new leaf node with key_part "${key_part}"') new_id := rt.db.set(data: serialize_node(new_node))! println('Debug: Created node ID ${new_id}') - + // Create new child reference and update parent node println('Debug: Updating parent node ${current_id} to add child reference') - + // Get fresh copy of parent node mut parent_node := deserialize_node(rt.db.get(current_id)!)! println('Debug: Parent node initially has ${parent_node.children.len} children') - + // Add new child reference parent_node.children << NodeRef{ key_part: key_part - node_id: new_id + node_id: new_id } println('Debug: Added child reference, now has ${parent_node.children.len} children') - + // Update parent node in DB println('Debug: Serializing parent node with ${parent_node.children.len} children') parent_data := serialize_node(parent_node) println('Debug: Parent data size: ${parent_data.len} bytes') - + // First verify we can deserialize the data correctly println('Debug: Verifying serialization...') if test_node := deserialize_node(parent_data) { @@ -133,17 +132,17 @@ pub fn (mut rt RadixTree) insert(key string, value []u8) ! { println('Debug: ERROR - Failed to deserialize test data') return error('Serialization verification failed') } - + // Set with explicit ID to update existing node println('Debug: Writing to DB...') rt.db.set(id: current_id, data: parent_data)! - + // Verify by reading back and comparing println('Debug: Reading back for verification...') verify_data := rt.db.get(current_id)! verify_node := deserialize_node(verify_data)! println('Debug: Verification - node has ${verify_node.children.len} children') - + if verify_node.children.len == 0 { println('Debug: ERROR - Node update verification failed!') println('Debug: Original node children: ${node.children.len}') @@ -159,24 +158,24 @@ pub fn (mut rt RadixTree) insert(key string, value []u8) ! { child := node.children[matched_child] common_prefix := get_common_prefix(key[offset..], child.key_part) - + if common_prefix.len < child.key_part.len { // Split existing node mut child_node := deserialize_node(rt.db.get(child.node_id)!)! - + // Create new intermediate node mut new_node := Node{ key_segment: child.key_part[common_prefix.len..] - value: child_node.value - children: child_node.children - is_leaf: child_node.is_leaf + value: child_node.value + children: child_node.children + is_leaf: child_node.is_leaf } new_id := rt.db.set(data: serialize_node(new_node))! // Update current node node.children[matched_child] = NodeRef{ key_part: common_prefix - node_id: new_id + node_id: new_id } rt.db.set(id: current_id, data: serialize_node(node))! } @@ -211,7 +210,7 @@ pub fn (mut rt RadixTree) search(key string) ![]u8 { for offset < key.len { node := deserialize_node(rt.db.get(current_id)!)! - + mut found := false for child in node.children { if key[offset..].starts_with(child.key_part) { @@ -245,7 +244,7 @@ pub fn (mut rt RadixTree) delete(key string) ! { // Find the node to delete for offset < key.len { node := deserialize_node(rt.db.get(current_id)!)! - + mut found := false for child in node.children { if key[offset..].starts_with(child.key_part) { @@ -279,7 +278,7 @@ pub fn (mut rt RadixTree) delete(key string) ! { mut last_node := deserialize_node(rt.db.get(path.last().node_id)!)! last_node.is_leaf = false last_node.value = []u8{} - + // If node has no children, remove it from parent if last_node.children.len == 0 { if path.len > 1 { diff --git a/lib/data/radixtree/radixtree_debug.v b/lib/data/radixtree/radixtree_debug.v index db7b987c..c00f1ad2 100644 --- a/lib/data/radixtree/radixtree_debug.v +++ b/lib/data/radixtree/radixtree_debug.v @@ -50,7 +50,7 @@ pub fn (mut rt RadixTree) debug_db() ! { // Prints the tree structure starting from a given node ID pub fn (mut rt RadixTree) print_tree_from_node(node_id u32, indent string) ! { node := rt.get_node_by_id(node_id)! - + mut node_info := '${indent}Node(id: ${node_id})' node_info += '\n${indent}├── key_segment: "${node.key_segment}"' node_info += '\n${indent}├── is_leaf: ${node.is_leaf}' @@ -61,7 +61,9 @@ pub fn (mut rt RadixTree) print_tree_from_node(node_id u32, indent string) ! { if node.children.len > 0 { node_info += ' [' for i, child in node.children { - if i > 0 { node_info += ', ' } + if i > 0 { + node_info += ', ' + } node_info += '${child.node_id}:${child.key_part}' } node_info += ']' @@ -90,7 +92,7 @@ pub fn (mut rt RadixTree) print_tree() ! { // Gets detailed information about a specific node pub fn (mut rt RadixTree) get_node_info(id u32) !string { node := rt.get_node_by_id(id)! - + mut info := 'Node Details:\n' info += '=============\n' info += 'ID: ${id}\n' @@ -106,6 +108,6 @@ pub fn (mut rt RadixTree) get_node_info(id u32) !string { info += '- ID: ${child.node_id}, Key Part: "${child.key_part}"\n' } } - + return info } diff --git a/lib/data/radixtree/serialize.v b/lib/data/radixtree/serialize.v index 1775d660..736f59c1 100644 --- a/lib/data/radixtree/serialize.v +++ b/lib/data/radixtree/serialize.v @@ -2,57 +2,55 @@ module radixtree import freeflowuniverse.herolib.data.encoder -const ( - version = u8(1) // Current binary format version -) +const version = u8(1) // Current binary format version // Serializes a node to bytes for storage fn serialize_node(node Node) []u8 { mut e := encoder.new() - + // Add version byte e.add_u8(version) - + // Add key segment e.add_string(node.key_segment) - + // Add value as []u8 e.add_u16(u16(node.value.len)) e.data << node.value - + // Add children e.add_u16(u16(node.children.len)) for child in node.children { e.add_string(child.key_part) e.add_u32(child.node_id) } - + // Add leaf flag e.add_u8(if node.is_leaf { u8(1) } else { u8(0) }) - + return e.data } // Deserializes bytes to a node fn deserialize_node(data []u8) !Node { mut d := encoder.decoder_new(data) - + // Read and verify version version_byte := d.get_u8() if version_byte != version { return error('Invalid version byte: expected ${version}, got ${version_byte}') } - + // Read key segment key_segment := d.get_string() - + // Read value as []u8 value_len := d.get_u16() mut value := []u8{len: int(value_len)} - for i in 0..int(value_len) { + for i in 0 .. int(value_len) { value[i] = d.get_u8() } - + // Read children children_len := d.get_u16() mut children := []NodeRef{cap: int(children_len)} @@ -61,17 +59,17 @@ fn deserialize_node(data []u8) !Node { node_id := d.get_u32() children << NodeRef{ key_part: key_part - node_id: node_id + node_id: node_id } } - + // Read leaf flag is_leaf := d.get_u8() == 1 - + return Node{ key_segment: key_segment - value: value - children: children - is_leaf: is_leaf + value: value + children: children + is_leaf: is_leaf } } diff --git a/lib/data/radixtree/serialize_test.v b/lib/data/radixtree/serialize_test.v index db53f678..1ffe0e2f 100644 --- a/lib/data/radixtree/serialize_test.v +++ b/lib/data/radixtree/serialize_test.v @@ -4,35 +4,35 @@ fn test_serialize_deserialize() { // Create a test node with children node := Node{ key_segment: 'test' - value: 'hello world'.bytes() - children: [ + value: 'hello world'.bytes() + children: [ NodeRef{ key_part: 'child1' - node_id: 1 + node_id: 1 }, NodeRef{ key_part: 'child2' - node_id: 2 - } + node_id: 2 + }, ] - is_leaf: true + is_leaf: true } // Serialize data := serialize_node(node) - + // Verify version byte assert data[0] == version - + // Deserialize decoded := deserialize_node(data)! - + // Verify all fields match assert decoded.key_segment == node.key_segment assert decoded.value == node.value assert decoded.is_leaf == node.is_leaf assert decoded.children.len == node.children.len - + // Verify children assert decoded.children[0].key_part == node.children[0].key_part assert decoded.children[0].node_id == node.children[0].node_id @@ -44,14 +44,14 @@ fn test_empty_node() { // Test node with empty values node := Node{ key_segment: '' - value: []u8{} - children: []NodeRef{} - is_leaf: false + value: []u8{} + children: []NodeRef{} + is_leaf: false } - + data := serialize_node(node) decoded := deserialize_node(data)! - + assert decoded.key_segment == node.key_segment assert decoded.value == node.value assert decoded.children == node.children @@ -62,27 +62,27 @@ fn test_large_values() { // Create large test data mut large_value := []u8{len: 1000, init: u8(index & 0xFF)} mut children := []NodeRef{cap: 100} - for i in 0..100 { + for i in 0 .. 100 { children << NodeRef{ key_part: 'child${i}' - node_id: u32(i) + node_id: u32(i) } } - + node := Node{ key_segment: 'large_test' - value: large_value - children: children - is_leaf: true + value: large_value + children: children + is_leaf: true } - + data := serialize_node(node) decoded := deserialize_node(data)! - + assert decoded.key_segment == node.key_segment assert decoded.value == node.value assert decoded.children.len == node.children.len - + // Verify some random children assert decoded.children[0] == node.children[0] assert decoded.children[50] == node.children[50] @@ -92,15 +92,15 @@ fn test_large_values() { fn test_invalid_version() { node := Node{ key_segment: 'test' - value: []u8{} - children: []NodeRef{} - is_leaf: false + value: []u8{} + children: []NodeRef{} + is_leaf: false } - + mut data := serialize_node(node) // Corrupt version byte data[0] = 255 - + // Should return error for version mismatch if result := deserialize_node(data) { assert false, 'Expected error for invalid version byte' diff --git a/lib/develop/gittools/factory.v b/lib/develop/gittools/factory.v index c6af0525..468e5c4a 100644 --- a/lib/develop/gittools/factory.v +++ b/lib/develop/gittools/factory.v @@ -37,7 +37,7 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure { ssh_key_name: args.ssh_key_name } - return get(coderoot: args.coderoot,reload:args.reload,cfg:cfg) + return get(coderoot: args.coderoot, reload: args.reload, cfg: cfg) } @[params] @@ -45,7 +45,7 @@ pub struct GitStructureArgGet { pub mut: coderoot string reload bool - cfg ?GitStructureConfig + cfg ?GitStructureConfig } // Retrieve a GitStructure instance based on the given arguments. @@ -72,7 +72,7 @@ pub fn get(args_ GitStructureArgGet) !&GitStructure { coderoot: pathlib.get_dir(path: args.coderoot, create: true)! } - gs.config()! //will load the config, don't remove + gs.config()! // will load the config, don't remove gs.load(false)! if gs.repos.keys().len == 0 || args.reload { @@ -83,4 +83,3 @@ pub fn get(args_ GitStructureArgGet) !&GitStructure { return gsinstances[rediskey_] or { panic('bug') } } - diff --git a/lib/develop/gittools/gitstructure.v b/lib/develop/gittools/gitstructure.v index 9c627110..e4a74424 100644 --- a/lib/develop/gittools/gitstructure.v +++ b/lib/develop/gittools/gitstructure.v @@ -9,27 +9,25 @@ import json pub struct GitStructureConfig { pub mut: - coderoot string //just to be informative, its not used + coderoot string // just to be informative, its not used light bool = true // If true, clones only the last history for all branches (clone with only 1 level deep) log bool = true // If true, logs git commands/statements debug bool = true ssh_key_name string } - // GitStructure holds information about repositories within a specific code root. // This structure keeps track of loaded repositories, their configurations, and their status. @[heap] pub struct GitStructure { mut: - config_ ?GitStructureConfig // Configuration settings for the git structure. + config_ ?GitStructureConfig // Configuration settings for the git structure. pub mut: key string // Unique key representing the git structure (default is hash of $home/code). repos map[string]&GitRepo // Map of repositories - coderoot pathlib.Path + coderoot pathlib.Path } - ////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////// @@ -41,53 +39,52 @@ pub mut: pub fn (mut gitstructure GitStructure) load(reload bool) ! { mut processed_paths := []string{} - if reload{ - gitstructure.repos=map[string]&GitRepo{} + if reload { + gitstructure.repos = map[string]&GitRepo{} } gitstructure.load_recursive(gitstructure.coderoot.path, mut processed_paths)! - if reload{ + if reload { gitstructure.cache_reset()! } // mut ths := []thread !{} - //need to make sure redis is empty before doing the threads + // need to make sure redis is empty before doing the threads redisclient.reset()! - redisclient.checkempty() + redisclient.checkempty() for _, mut repo in gitstructure.repos { // mut myfunction := fn (mut repo GitRepo) ! { // } - //ths << spawn myfunction(mut repo_) + // ths << spawn myfunction(mut repo_) repo.status_update(reload: reload) or { - msg:="Error in git repo: ${repo.path()}\n${err}" + msg := 'Error in git repo: ${repo.path()}\n${err}' console.print_stderr(msg) return error(msg) } } // pp.work_on_items(todo) - //console.print_debug('loaded all threads for git on ${gitstructure.coderoot}') + // console.print_debug('loaded all threads for git on ${gitstructure.coderoot}') // for th in ths { // th.wait()! // } - // for x in pp.get_results[SResult]() { - // println('result: ${x.s}') - // } + // for x in pp.get_results[SResult]() { + // println('result: ${x.s}') + // } // console.print_debug("threads finished") - //now we need to load them back in our memory because these were done in sub process + // now we need to load them back in our memory because these were done in sub process // for _, mut r in gitstructure.repos { // r.cache_get()! // } - // gitstructure.init()! } -// Recursively loads repositories from the provided path, updating their statuses, does not check the status +// Recursively loads repositories from the provided path, updating their statuses, does not check the status // // Args: // - path (string): The path to search for repositories. @@ -136,7 +133,6 @@ fn (mut gitstructure GitStructure) load_recursive(path string, mut processed_pat } } - @[params] pub struct RepoInitParams { ssh_key_name string // name of ssh key to be used in repo @@ -192,76 +188,72 @@ pub fn (mut gitstructure GitStructure) get_working_repo() ?GitRepo { return gitstructure.repo_init_from_path_(curdir.path) or { return none } } - -//key in redis used to store all config info +// key in redis used to store all config info fn cache_key(coderoot string) string { key := md5.hexhash(coderoot) return 'git:${key}' } -//key in redis used to store all config info +// key in redis used to store all config info pub fn (mut self GitStructure) cache_key() string { return cache_key(self.coderoot.path) } -//load from cache +// load from cache pub fn (mut self GitStructure) cache_load() ! { // Retrieve the configuration from Redis. mut redis := redis_get() - keys := redis.keys("${self.cache_key()}:repos")! - self.repos = map[string]&GitRepo{} //reset + keys := redis.keys('${self.cache_key()}:repos')! + self.repos = map[string]&GitRepo{} // reset for key in keys { - data:=redis.get(key)! - mut r:=json.decode(GitRepo,data)! + data := redis.get(key)! + mut r := json.decode(GitRepo, data)! self.repos[key] = &r } } - // Reset all caches and configurations for all Git repositories. pub fn (mut self GitStructure) cache_reset() ! { mut redis := redis_get() - keys := redis.keys("${self.cache_key()}:**")! + keys := redis.keys('${self.cache_key()}:**')! for key in keys { redis.del(key)! } } - // Load config from redis fn (mut self GitStructure) coderoot() !pathlib.Path { - mut coderoot := pathlib.get_dir(path:self.coderoot.path,create:true)! + mut coderoot := pathlib.get_dir(path: self.coderoot.path, create: true)! return coderoot } - ////// CONFIG // Load config from redis pub fn (mut self GitStructure) config() !GitStructureConfig { - mut config := self.config_ or { + mut config := self.config_ or { mut redis := redis_get() - data:=redis.get("${self.cache_key()}:config")! - mut c:= GitStructureConfig{} - if data.len>0{ - c = json.decode(GitStructureConfig,data)! + data := redis.get('${self.cache_key()}:config')! + mut c := GitStructureConfig{} + if data.len > 0 { + c = json.decode(GitStructureConfig, data)! } - c + c } + return config } // Reset the configuration cache for Git structures. pub fn (mut self GitStructure) config_reset() ! { mut redis := redis_get() - redis.del("${self.cache_key()}:config")! + redis.del('${self.cache_key()}:config')! } - -//save to the cache +// save to the cache pub fn (mut self GitStructure) config_save() ! { // Retrieve the configuration from Redis. mut redis := redis_get() datajson := json.encode(self.config) - redis.set("${self.cache_key()}:config", datajson)! + redis.set('${self.cache_key()}:config', datajson)! } diff --git a/lib/develop/gittools/repos_print.v b/lib/develop/gittools/repos_print.v index e3ebaa2c..632daae4 100644 --- a/lib/develop/gittools/repos_print.v +++ b/lib/develop/gittools/repos_print.v @@ -14,7 +14,7 @@ fn get_repo_status(gr GitRepo) !string { if repo.need_push_or_pull()! { statuses << 'PULL' } - + return statuses.join(', ') } @@ -54,7 +54,7 @@ pub fn (mut gitstructure GitStructure) repos_print(args ReposGetArgs) ! { // } else { // 'Repositories: ${gitstructure.config()!.coderoot}' // } - header:='Repositories: ${gitstructure.config()!.coderoot}' + header := 'Repositories: ${gitstructure.config()!.coderoot}' console.print_header(header) // Print the repository information in a formatted array diff --git a/lib/develop/gittools/repository.v b/lib/develop/gittools/repository.v index 55ab2c2d..03385bb2 100644 --- a/lib/develop/gittools/repository.v +++ b/lib/develop/gittools/repository.v @@ -18,7 +18,7 @@ pub mut: config GitRepoConfig // Repository-specific configuration last_load int // Epoch timestamp of the last load from reality deploysshkey string // to use with git - has_changes bool + has_changes bool } // this is the status we want, we need to work towards off @@ -121,9 +121,7 @@ pub fn (mut repo GitRepo) pull(args_ PullCheckoutArgs) ! { repo.checkout()! } - repo.exec('git pull') or { - return error('Cannot pull repo: ${repo.path()}. Error: ${err}') - } + repo.exec('git pull') or { return error('Cannot pull repo: ${repo.path()}. Error: ${err}') } if args_.submodules { repo.update_submodules()! @@ -299,7 +297,7 @@ fn (mut repo GitRepo) update_submodules() ! { fn (repo GitRepo) exec(cmd_ string) !string { repo_path := repo.path() cmd := 'cd ${repo_path} && ${cmd_}' - //console.print_debug(cmd) + // console.print_debug(cmd) r := os.execute(cmd) if r.exit_code != 0 { return error('Repo failed to exec cmd: ${cmd}\n${r.output})') diff --git a/lib/develop/gittools/repository_cache.v b/lib/develop/gittools/repository_cache.v index 16823716..799300e2 100644 --- a/lib/develop/gittools/repository_cache.v +++ b/lib/develop/gittools/repository_cache.v @@ -21,9 +21,7 @@ fn (mut repo GitRepo) cache_get() ! { mut repo_json := '' mut redis_client := redis_get() cache_key := repo.cache_key() - repo_json = redis_client.get(cache_key) or { - return - } + repo_json = redis_client.get(cache_key) or { return } if repo_json.len > 0 { mut cached := json.decode(GitRepo, repo_json)! diff --git a/lib/develop/gittools/repository_clone.v b/lib/develop/gittools/repository_clone.v index 27796762..4831fb60 100644 --- a/lib/develop/gittools/repository_clone.v +++ b/lib/develop/gittools/repository_clone.v @@ -1,7 +1,6 @@ module gittools import freeflowuniverse.herolib.ui.console - import os @[params] diff --git a/lib/develop/gittools/repository_info.v b/lib/develop/gittools/repository_info.v index c9675921..bedf1cd4 100644 --- a/lib/develop/gittools/repository_info.v +++ b/lib/develop/gittools/repository_info.v @@ -58,7 +58,7 @@ pub fn (mut repo GitRepo) need_push_or_pull() !bool { last_local_commit := repo.get_last_local_commit() or { return error('Failed to get last local commit: ${err}') } - //println('commit status: ${repo.name} ${last_local_commit} ${last_remote_commit}') + // println('commit status: ${repo.name} ${last_local_commit} ${last_remote_commit}') return last_local_commit != last_remote_commit } diff --git a/lib/develop/gittools/repository_load.v b/lib/develop/gittools/repository_load.v index 82046ae1..0c6a6aa3 100644 --- a/lib/develop/gittools/repository_load.v +++ b/lib/develop/gittools/repository_load.v @@ -3,9 +3,10 @@ module gittools import time import freeflowuniverse.herolib.ui.console import os + @[params] pub struct StatusUpdateArgs { - reload bool + reload bool } pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! { @@ -28,7 +29,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! { fn (mut repo GitRepo) load() ! { console.print_debug('load ${repo.cache_key()}') repo.init()! - if os.exists("${repo.path()}/.git") == false{ + if os.exists('${repo.path()}/.git') == false { return error("Can't find git in repo ${repo.path()}") } repo.exec('git fetch --all') or { diff --git a/lib/develop/gittools/repository_utils.v b/lib/develop/gittools/repository_utils.v index d2f9c765..afcff0d3 100644 --- a/lib/develop/gittools/repository_utils.v +++ b/lib/develop/gittools/repository_utils.v @@ -13,24 +13,23 @@ pub mut: create bool } -//get the key in redis where json cached info is +// get the key in redis where json cached info is pub fn (mut repo GitRepo) cache_key() string { return '${repo.gs.cache_key()}:${repo.provider}:${repo.account}:${repo.name}' } -//get path where the repo is on the fs +// get path where the repo is on the fs pub fn (repo GitRepo) path() string { - mut repo_:=repo - mypath:=repo_.gs.coderoot.path + mut repo_ := repo + mypath := repo_.gs.coderoot.path return '${mypath}/${repo.provider}/${repo.account}/${repo.name}' } -//get herolib path object +// get herolib path object pub fn (repo GitRepo) patho() !pathlib.Path { return pathlib.get_dir(path: repo.path(), create: false)! } - // gets the path of a given url within a repo // ex: 'https://git.ourworld.tf/ourworld_holding/info_ourworld/src/branch/main/books/cocreation/SUMMARY.md' // returns /books/cocreation/SUMMARY.md @@ -64,7 +63,7 @@ pub fn (mut repo GitRepo) get_path_of_url(url string) !string { // Relative path inside the gitstructure, pointing to the repo pub fn (repo GitRepo) get_relative_path() !string { mut mypath := repo.patho()! - mut repo_:=repo + mut repo_ := repo return mypath.path_relative(repo_.gs.coderoot.path) or { panic("couldn't get relative path") } } diff --git a/lib/installers/infra/livekit/livekit_factory_.v b/lib/installers/infra/livekit/livekit_factory_.v index abc47f3b..208797a5 100644 --- a/lib/installers/infra/livekit/livekit_factory_.v +++ b/lib/installers/infra/livekit/livekit_factory_.v @@ -202,19 +202,3 @@ pub fn (mut self LivekitServer) destroy() ! { pub fn switch(name string) { livekit_default = name } - -pub fn (mut self LivekitServer) install(args InstallArgs) ! { - switch(self.name) - if args.reset { - destroy_()! - } - if !(installed_()!) { - install_()! - } -} - -pub fn (mut self LivekitServer) destroy() ! { - switch(self.name) - self.stop() or {} - destroy_()! -} diff --git a/lib/installers/infra/livekit/livekit_model.v b/lib/installers/infra/livekit/livekit_model.v index 2ff96b36..497cadf6 100644 --- a/lib/installers/infra/livekit/livekit_model.v +++ b/lib/installers/infra/livekit/livekit_model.v @@ -7,7 +7,6 @@ pub const version = '1.7.2' const singleton = false const default = true -// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT pub fn heroscript_default() !string { heroscript := " !!livekit.configure diff --git a/lib/lang/python/readme.md b/lib/lang/python/readme.md index 444ecb98..b3a71a6d 100644 --- a/lib/lang/python/readme.md +++ b/lib/lang/python/readme.md @@ -20,7 +20,7 @@ source ~/hero/python/default/bin/activate ```v -#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.lang.python diff --git a/lib/osal/hostsfile/README.md b/lib/osal/hostsfile/README.md index f42ba55e..4a712fba 100644 --- a/lib/osal/hostsfile/README.md +++ b/lib/osal/hostsfile/README.md @@ -17,7 +17,7 @@ This module provides functionality to manage the system's hosts file (`/etc/host Create a file `example.vsh`: ```v -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import freeflowuniverse.herolib.osal.hostsfile import os diff --git a/lib/security/authentication/authenticator.v b/lib/security/authentication/authenticator.v index f6f4335b..845af260 100644 --- a/lib/security/authentication/authenticator.v +++ b/lib/security/authentication/authenticator.v @@ -14,7 +14,7 @@ import log pub struct Authenticator { secret string mut: - config SmtpConfig @[required] + config SmtpConfig @[required] backend IBackend // Backend for authenticator } @@ -56,7 +56,7 @@ pub fn new(config AuthenticatorConfig) !Authenticator { // password: config.smtp.password // )! backend: config.backend - secret: config.secret + secret: config.secret } } @@ -92,24 +92,24 @@ pub fn (mut auth Authenticator) send_verification_mail(config SendMailConfig) ! // create auth session auth_code := rand.bytes(64) or { panic(err) } auth.backend.create_auth_session( - email: config.email + email: config.email auth_code: auth_code.hex() - timeout: time.now().add_seconds(180) + timeout: time.now().add_seconds(180) )! link := 'Click to authenticate' mail := smtp.Mail{ - to: config.email - from: config.mail.from - subject: config.mail.subject + to: config.email + from: config.mail.from + subject: config.mail.subject body_type: .html - body: '${config.mail.body}\n${link}' + body: '${config.mail.body}\n${link}' } mut client := smtp.new_client( - server: auth.config.server - from: auth.config.from - port: auth.config.port + server: auth.config.server + from: auth.config.from + port: auth.config.port username: auth.config.username password: auth.config.password )! @@ -128,17 +128,17 @@ pub fn (mut auth Authenticator) send_login_link(config SendMailConfig) ! { encoded_signature := base64.url_encode(signature.bytestr().bytes()) link := 'Click to login' mail := smtp.Mail{ - to: config.email - from: config.mail.from - subject: config.mail.subject + to: config.email + from: config.mail.from + subject: config.mail.subject body_type: .html - body: '${config.mail.body}\n${link}' + body: '${config.mail.body}\n${link}' } mut client := smtp.new_client( - server: auth.config.server - from: auth.config.from - port: auth.config.port + server: auth.config.server + from: auth.config.from + port: auth.config.port username: auth.config.username password: auth.config.password )! @@ -222,7 +222,7 @@ pub fn (mut auth Authenticator) authenticate(email string, cypher string) ! { } pub struct AwaitAuthParams { - email string @[required] + email string @[required] timeout time.Duration = 3 * time.minute } diff --git a/lib/security/authentication/backend_database.v b/lib/security/authentication/backend_database.v index b133b7dc..7cb84cf0 100644 --- a/lib/security/authentication/backend_database.v +++ b/lib/security/authentication/backend_database.v @@ -8,7 +8,7 @@ import time @[noinit] struct DatabaseBackend { mut: - db sqlite.DB + db sqlite.DB } @[params] diff --git a/lib/security/authentication/client.v b/lib/security/authentication/client.v index 2e38f2a4..cce8ef9f 100644 --- a/lib/security/authentication/client.v +++ b/lib/security/authentication/client.v @@ -33,8 +33,8 @@ fn (client EmailClient) post_request(params PostParams) !http.Response { // verify_email posts an email verification req to the email auth controller pub fn (client EmailClient) email_authentication(params SendMailConfig) ! { client.post_request( - url: '${client.url}/email_authentication' - data: json.encode(params) + url: '${client.url}/email_authentication' + data: json.encode(params) timeout: 180 * time.second )! } @@ -42,8 +42,8 @@ pub fn (client EmailClient) email_authentication(params SendMailConfig) ! { // verify_email posts an email verification req to the email auth controller pub fn (client EmailClient) is_verified(address string) !bool { resp := client.post_request( - url: '${client.url}/is_verified' - data: json.encode(address) + url: '${client.url}/is_verified' + data: json.encode(address) timeout: 180 * time.second )! return resp.body == 'true' @@ -52,7 +52,7 @@ pub fn (client EmailClient) is_verified(address string) !bool { // send_verification_email posts an email verification req to the email auth controller pub fn (client EmailClient) send_verification_email(params SendMailConfig) ! { client.post_request( - url: '${client.url}/send_verification_mail' + url: '${client.url}/send_verification_mail' data: json.encode(params) ) or { return error(err.msg()) } } @@ -61,7 +61,7 @@ pub fn (client EmailClient) send_verification_email(params SendMailConfig) ! { pub fn (c EmailClient) authenticate(address string, cypher string) !AttemptResult { resp := http.post('${c.url}/authenticate', json.encode(AuthAttempt{ address: address - cypher: cypher + cypher: cypher }))! result := json.decode(AttemptResult, resp.body)! return result diff --git a/lib/security/authentication/controller_test.v b/lib/security/authentication/controller_test.v index 4394274a..cdf8a740 100644 --- a/lib/security/authentication/controller_test.v +++ b/lib/security/authentication/controller_test.v @@ -15,9 +15,9 @@ fn test_new_controller() { } client := smtp.Client{ - server: 'smtp-relay.brevo.com' - from: 'verify@authenticator.io' - port: 587 + server: 'smtp-relay.brevo.com' + from: 'verify@authenticator.io' + port: 587 username: env.value('BREVO_SMTP_USERNAME').string() password: env.value('BREVO_SMTP_PASSWORD').string() } diff --git a/lib/security/authentication/email_authentication.v b/lib/security/authentication/email_authentication.v index 0544d996..924efa3c 100644 --- a/lib/security/authentication/email_authentication.v +++ b/lib/security/authentication/email_authentication.v @@ -5,28 +5,30 @@ import crypto.hmac import crypto.sha256 import encoding.hex import encoding.base64 -import freeflowuniverse.herolib.clients.mailclient {MailClient} +import freeflowuniverse.herolib.clients.mailclient { MailClient } pub struct StatelessAuthenticator { pub: secret string pub mut: - mail_client MailClient + mail_client MailClient } - pub fn new_stateless_authenticator(authenticator StatelessAuthenticator) !StatelessAuthenticator { +pub fn new_stateless_authenticator(authenticator StatelessAuthenticator) !StatelessAuthenticator { // TODO: do some checks - return StatelessAuthenticator {...authenticator} + return StatelessAuthenticator{ + ...authenticator + } } pub struct AuthenticationMail { RedirectURLs pub: - to string // email address being authentcated - from string = 'email_authenticator@herolib.tf' - subject string = 'Verify your email' - body string = 'Please verify your email by clicking the link below' - callback string // callback url of authentication link + to string // email address being authentcated + from string = 'email_authenticator@herolib.tf' + subject string = 'Verify your email' + body string = 'Please verify your email by clicking the link below' + callback string // callback url of authentication link success_url string // where the user will be redirected upon successful authentication failure_url string // where the user will be redirected upon failed authentication } @@ -35,14 +37,14 @@ pub fn (mut a StatelessAuthenticator) send_authentication_mail(mail Authenticati link := a.new_authentication_link(mail.to, mail.callback, mail.RedirectURLs)! button := 'Verify Email' - // send email with link in body - a.mail_client.send( - to: mail.to - from: mail.from - subject: mail.subject - body_type: .html - body: $tmpl('./templates/mail.html') - ) or { return error('Error resolving email address $err') } + // send email with link in body + a.mail_client.send( + to: mail.to + from: mail.from + subject: mail.subject + body_type: .html + body: $tmpl('./templates/mail.html') + ) or { return error('Error resolving email address ${err}') } } @[params] @@ -59,48 +61,39 @@ fn (a StatelessAuthenticator) new_authentication_link(email string, callback str // sign email address and expiration of authentication link expiration := time.now().add(5 * time.minute) - data := '${email}.${expiration}' // data to be signed + data := '${email}.${expiration}' // data to be signed // QUESTION? should success url also be signed for security? - signature := hmac.new( - hex.decode(a.secret)!, - data.bytes(), - sha256.sum, - sha256.block_size - ) - encoded_signature := base64.url_encode(signature.bytestr().bytes()) + signature := hmac.new(hex.decode(a.secret)!, data.bytes(), sha256.sum, sha256.block_size) + encoded_signature := base64.url_encode(signature.bytestr().bytes()) mut queries := '' if urls.success_url != '' { encoded_url := base64.url_encode(urls.success_url.bytes()) queries += '?success_url=${encoded_url}' } - return "${callback}/${email}/${expiration.unix()}/${encoded_signature}${queries}" + return '${callback}/${email}/${expiration.unix()}/${encoded_signature}${queries}' } pub struct AuthenticationAttempt { pub: - email string - expiration time.Time - signature string + email string + expiration time.Time + signature string } // sends mail with login link -pub fn (auth StatelessAuthenticator) authenticate(attempt AuthenticationAttempt) ! { - if time.now() > attempt.expiration { - return error('link expired') - } +pub fn (auth StatelessAuthenticator) authenticate(attempt AuthenticationAttempt) ! { + if time.now() > attempt.expiration { + return error('link expired') + } - data := '${attempt.email}.${attempt.expiration}' // data to be signed - signature_mirror := hmac.new( - hex.decode(auth.secret) or {panic(err)}, - data.bytes(), - sha256.sum, - sha256.block_size - ).bytestr().bytes() + data := '${attempt.email}.${attempt.expiration}' // data to be signed + signature_mirror := hmac.new(hex.decode(auth.secret) or { panic(err) }, data.bytes(), + sha256.sum, sha256.block_size).bytestr().bytes() - decoded_signature := base64.url_decode(attempt.signature) + decoded_signature := base64.url_decode(attempt.signature) - if !hmac.equal(decoded_signature, signature_mirror) { - return error('signature mismatch') - } + if !hmac.equal(decoded_signature, signature_mirror) { + return error('signature mismatch') + } } diff --git a/lib/security/jwt/jwt.v b/lib/security/jwt/jwt.v index 9f70712b..126209dd 100644 --- a/lib/security/jwt/jwt.v +++ b/lib/security/jwt/jwt.v @@ -39,7 +39,7 @@ pub: // DOESN'T handle data encryption, sensitive data should be encrypted pub fn create_token(payload_ JwtPayload) JsonWebToken { return JsonWebToken{ - JwtHeader: JwtHeader{'HS256', 'JWT'} + JwtHeader: JwtHeader{'HS256', 'JWT'} JwtPayload: JwtPayload{ ...payload_ iat: time.now() @@ -92,7 +92,7 @@ pub fn (token SignedJWT) decode() !JsonWebToken { payload_json := base64.url_decode(payload_urlencoded).bytestr() payload := json.decode(JwtPayload, payload_json) or { panic('Decoding payload: ${err}') } return JsonWebToken{ - JwtHeader: header + JwtHeader: header JwtPayload: payload } } diff --git a/manual/best_practices/scripts/scripts.md b/manual/best_practices/scripts/scripts.md index e76c3fd9..eed603ea 100644 --- a/manual/best_practices/scripts/scripts.md +++ b/manual/best_practices/scripts/scripts.md @@ -6,7 +6,7 @@ example would be ```go -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run fn sh(cmd string) { println('❯ ${cmd}') @@ -33,7 +33,7 @@ $if !linux { ## argument parsing ```v -#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run import os import flag diff --git a/manual/best_practices/scripts/shebang.md b/manual/best_practices/scripts/shebang.md index b9e918b0..718aded9 100644 --- a/manual/best_practices/scripts/shebang.md +++ b/manual/best_practices/scripts/shebang.md @@ -5,7 +5,7 @@ is the first line of a script, your os will use that one to get started. for V we use ```bash -#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run +#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run ``` - -w no warnings diff --git a/test_basic.vsh b/test_basic.vsh index d0e5345c..6290dc7c 100755 --- a/test_basic.vsh +++ b/test_basic.vsh @@ -1,4 +1,4 @@ -#!/usr/bin/env -S v -cc gcc -n -w -gc none -no-retry-compilation -d use_openssl -enable-globals run +#!/usr/bin/env -S v -cc gcc -n -w -gc none -d use_openssl -enable-globals run import os import flag @@ -119,7 +119,7 @@ fn dotest(path string, base_dir string, mut cache TestCache) ! { return } - cmd := 'v -stats -enable-globals -n -w -gc none -no-retry-compilation test ${norm_path}' + cmd := 'v -stats -enable-globals -n -w -gc none test ${norm_path}' println(cmd) result := os.execute(cmd) eprintln(result)