This commit is contained in:
2025-09-14 18:25:45 +02:00
parent 95507002c9
commit 6a02a45474
78 changed files with 421 additions and 410 deletions

View File

@@ -1,6 +1,6 @@
module mcpgen module mcpgen
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.schemas.jsonschema.codegen import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,7 +1,7 @@
module mcpgen module mcpgen
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any } import x.json2 as json { Any }
// import json // import json

View File

@@ -1,7 +1,7 @@
module vcode module vcode
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any } import x.json2 { Any }

View File

@@ -1,7 +1,7 @@
module vcode module vcode
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any } import x.json2 { Any }

View File

@@ -168,7 +168,7 @@ pub fn (mut self DBFsDir) list_by_filesystem(fs_id u32) ![]FsDir {
pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir { pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
// Normalize path (remove trailing slashes, handle empty path) // Normalize path (remove trailing slashes, handle empty path)
normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') } normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') }
if normalized_path == '/' { if normalized_path == '/' {
// Special case for root directory // Special case for root directory
dirs := self.list_by_filesystem(fs_id)! dirs := self.list_by_filesystem(fs_id)!
@@ -179,14 +179,14 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
} }
return error('Root directory not found for filesystem ${fs_id}') return error('Root directory not found for filesystem ${fs_id}')
} }
// Split path into components // Split path into components
components := normalized_path.trim_left('/').split('/') components := normalized_path.trim_left('/').split('/')
// Start from the root directory // Start from the root directory
mut current_dir_id := u32(0) mut current_dir_id := u32(0)
mut dirs := self.list_by_filesystem(fs_id)! mut dirs := self.list_by_filesystem(fs_id)!
// Find root directory // Find root directory
for dir in dirs { for dir in dirs {
if dir.parent_id == 0 { if dir.parent_id == 0 {
@@ -194,11 +194,11 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
break break
} }
} }
if current_dir_id == 0 { if current_dir_id == 0 {
return error('Root directory not found for filesystem ${fs_id}') return error('Root directory not found for filesystem ${fs_id}')
} }
// Navigate through path components // Navigate through path components
for component in components { for component in components {
mut found := false mut found := false
@@ -209,15 +209,15 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
break break
} }
} }
if !found { if !found {
return error('Directory "${component}" not found in path "${normalized_path}"') return error('Directory "${component}" not found in path "${normalized_path}"')
} }
// Update dirs for next iteration // Update dirs for next iteration
dirs = self.list_children(current_dir_id)! dirs = self.list_children(current_dir_id)!
} }
return self.get(current_dir_id)! return self.get(current_dir_id)!
} }
@@ -225,7 +225,7 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 { pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
// Normalize path // Normalize path
normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') } normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') }
if normalized_path == '/' { if normalized_path == '/' {
// Special case for root directory // Special case for root directory
dirs := self.list_by_filesystem(fs_id)! dirs := self.list_by_filesystem(fs_id)!
@@ -234,24 +234,24 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
return dir.id return dir.id
} }
} }
// Create root directory if it doesn't exist // Create root directory if it doesn't exist
mut root_dir := self.new( mut root_dir := self.new(
name: 'root' name: 'root'
fs_id: fs_id fs_id: fs_id
parent_id: 0 parent_id: 0
description: 'Root directory' description: 'Root directory'
)! )!
return self.set(root_dir)! return self.set(root_dir)!
} }
// Split path into components // Split path into components
components := normalized_path.trim_left('/').split('/') components := normalized_path.trim_left('/').split('/')
// Start from the root directory // Start from the root directory
mut current_dir_id := u32(0) mut current_dir_id := u32(0)
mut dirs := self.list_by_filesystem(fs_id)! mut dirs := self.list_by_filesystem(fs_id)!
// Find or create root directory // Find or create root directory
for dir in dirs { for dir in dirs {
if dir.parent_id == 0 { if dir.parent_id == 0 {
@@ -259,18 +259,18 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
break break
} }
} }
if current_dir_id == 0 { if current_dir_id == 0 {
// Create root directory // Create root directory
mut root_dir := self.new( mut root_dir := self.new(
name: 'root' name: 'root'
fs_id: fs_id fs_id: fs_id
parent_id: 0 parent_id: 0
description: 'Root directory' description: 'Root directory'
)! )!
current_dir_id = self.set(root_dir)! current_dir_id = self.set(root_dir)!
} }
// Navigate/create through path components // Navigate/create through path components
for component in components { for component in components {
mut found := false mut found := false
@@ -281,22 +281,22 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
break break
} }
} }
if !found { if !found {
// Create this directory component // Create this directory component
mut new_dir := self.new( mut new_dir := self.new(
name: component name: component
fs_id: fs_id fs_id: fs_id
parent_id: current_dir_id parent_id: current_dir_id
description: 'Directory created as part of path ${normalized_path}' description: 'Directory created as part of path ${normalized_path}'
)! )!
current_dir_id = self.set(new_dir)! current_dir_id = self.set(new_dir)!
} }
// Update directory list for next iteration // Update directory list for next iteration
dirs = self.list_children(current_dir_id)! dirs = self.list_children(current_dir_id)!
} }
return current_dir_id return current_dir_id
} }
@@ -310,23 +310,23 @@ pub fn (mut self DBFsDir) delete_by_path(fs_id u32, path string) ! {
pub fn (mut self DBFsDir) move_by_path(fs_id u32, source_path string, dest_path string) !u32 { pub fn (mut self DBFsDir) move_by_path(fs_id u32, source_path string, dest_path string) !u32 {
// Get the source directory // Get the source directory
source_dir := self.get_by_absolute_path(fs_id, source_path)! source_dir := self.get_by_absolute_path(fs_id, source_path)!
// For the destination, we need the parent directory // For the destination, we need the parent directory
dest_dir_path := dest_path.all_before_last('/') dest_dir_path := dest_path.all_before_last('/')
dest_dir_name := dest_path.all_after_last('/') dest_dir_name := dest_path.all_after_last('/')
dest_parent_dir := if dest_dir_path == '' || dest_dir_path == '/' { dest_parent_dir := if dest_dir_path == '' || dest_dir_path == '/' {
// Moving to the root // Moving to the root
self.get_by_absolute_path(fs_id, '/')! self.get_by_absolute_path(fs_id, '/')!
} else { } else {
self.get_by_absolute_path(fs_id, dest_dir_path)! self.get_by_absolute_path(fs_id, dest_dir_path)!
} }
// First rename if the destination name is different // First rename if the destination name is different
if source_dir.name != dest_dir_name { if source_dir.name != dest_dir_name {
self.rename(source_dir.id, dest_dir_name)! self.rename(source_dir.id, dest_dir_name)!
} }
// Then move to the new parent // Then move to the new parent
return self.move(source_dir.id, dest_parent_dir.id)! return self.move(source_dir.id, dest_parent_dir.id)!
} }
@@ -393,9 +393,9 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
// List contents of a directory with filtering capabilities // List contents of a directory with filtering capabilities
pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, opts ListContentsOptions) !DirectoryContents { pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, opts ListContentsOptions) !DirectoryContents {
mut result := DirectoryContents{} mut result := DirectoryContents{}
// Helper function to check if name matches include/exclude patterns // Helper function to check if name matches include/exclude patterns
// Check if item should be included based on patterns // Check if item should be included based on patterns
should_include_local := fn (name string, include_patterns []string, exclude_patterns []string) bool { should_include_local := fn (name string, include_patterns []string, exclude_patterns []string) bool {
// Helper function to check if name matches include/exclude patterns // Helper function to check if name matches include/exclude patterns
@@ -403,12 +403,12 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
if patterns.len == 0 { if patterns.len == 0 {
return true // No patterns means include everything return true // No patterns means include everything
} }
for pattern in patterns { for pattern in patterns {
if pattern.contains('*') { if pattern.contains('*') {
prefix := pattern.all_before('*') prefix := pattern.all_before('*')
suffix := pattern.all_after('*') suffix := pattern.all_after('*')
if prefix == '' && suffix == '' { if prefix == '' && suffix == '' {
return true // Pattern is just "*" return true // Pattern is just "*"
} else if prefix == '' { } else if prefix == '' {
@@ -428,30 +428,30 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
return true // Exact match return true // Exact match
} }
} }
return false return false
} }
// First apply include patterns (if empty, include everything) // First apply include patterns (if empty, include everything)
if !matches_pattern_fn(name, include_patterns) && include_patterns.len > 0 { if !matches_pattern_fn(name, include_patterns) && include_patterns.len > 0 {
return false return false
} }
// Then apply exclude patterns // Then apply exclude patterns
if matches_pattern_fn(name, exclude_patterns) && exclude_patterns.len > 0 { if matches_pattern_fn(name, exclude_patterns) && exclude_patterns.len > 0 {
return false return false
} }
return true return true
} }
// Get directories, files, and symlinks in the current directory // Get directories, files, and symlinks in the current directory
dirs := self.list_children(dir_id)! dirs := self.list_children(dir_id)!
for dir in dirs { for dir in dirs {
if should_include_local(dir.name, opts.include_patterns, opts.exclude_patterns) { if should_include_local(dir.name, opts.include_patterns, opts.exclude_patterns) {
result.directories << dir result.directories << dir
} }
// If recursive, process subdirectories // If recursive, process subdirectories
if opts.recursive { if opts.recursive {
sub_contents := self.list_contents(mut fs_factory, dir.id, opts)! sub_contents := self.list_contents(mut fs_factory, dir.id, opts)!
@@ -460,7 +460,7 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.symlinks << sub_contents.symlinks result.symlinks << sub_contents.symlinks
} }
} }
// Get files in the directory // Get files in the directory
files := fs_factory.fs_file.list_by_directory(dir_id)! files := fs_factory.fs_file.list_by_directory(dir_id)!
for file in files { for file in files {
@@ -468,7 +468,7 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.files << file result.files << file
} }
} }
// Get symlinks in the directory // Get symlinks in the directory
symlinks := fs_factory.fs_symlink.list_by_parent(dir_id)! symlinks := fs_factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks { for symlink in symlinks {
@@ -476,6 +476,6 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.symlinks << symlink result.symlinks << symlink
} }
} }
return result return result
} }

View File

@@ -26,15 +26,15 @@ pub mut:
include_patterns []string // File/directory name patterns to include (e.g. ['*.v', 'doc*']) include_patterns []string // File/directory name patterns to include (e.g. ['*.v', 'doc*'])
exclude_patterns []string // File/directory name patterns to exclude exclude_patterns []string // File/directory name patterns to exclude
max_depth int = -1 // Maximum depth to search (-1 for unlimited) max_depth int = -1 // Maximum depth to search (-1 for unlimited)
follow_symlinks bool // Whether to follow symbolic links during search follow_symlinks bool // Whether to follow symbolic links during search
} }
// CopyOptions provides options for copy operations // CopyOptions provides options for copy operations
@[params] @[params]
pub struct CopyOptions { pub struct CopyOptions {
pub mut: pub mut:
recursive bool = true // Copy directories recursively recursive bool = true // Copy directories recursively
preserve_links bool = true // Preserve symbolic links as links preserve_links bool = true // Preserve symbolic links as links
overwrite bool // Overwrite existing files overwrite bool // Overwrite existing files
follow_symlinks bool // Follow symlinks instead of copying them follow_symlinks bool // Follow symlinks instead of copying them
} }
@@ -74,12 +74,12 @@ fn matches_pattern(name string, patterns []string) bool {
if patterns.len == 0 { if patterns.len == 0 {
return true // No patterns means include everything return true // No patterns means include everything
} }
for pattern in patterns { for pattern in patterns {
if pattern.contains('*') { if pattern.contains('*') {
prefix := pattern.all_before('*') prefix := pattern.all_before('*')
suffix := pattern.all_after('*') suffix := pattern.all_after('*')
if prefix == '' && suffix == '' { if prefix == '' && suffix == '' {
return true // Pattern is just "*" return true // Pattern is just "*"
} else if prefix == '' { } else if prefix == '' {
@@ -99,7 +99,7 @@ fn matches_pattern(name string, patterns []string) bool {
return true // Exact match return true // Exact match
} }
} }
return false return false
} }
@@ -109,12 +109,12 @@ fn should_include(name string, include_patterns []string, exclude_patterns []str
if !matches_pattern(name, include_patterns) && include_patterns.len > 0 { if !matches_pattern(name, include_patterns) && include_patterns.len > 0 {
return false return false
} }
// Then apply exclude patterns // Then apply exclude patterns
if matches_pattern(name, exclude_patterns) && exclude_patterns.len > 0 { if matches_pattern(name, exclude_patterns) && exclude_patterns.len > 0 {
return false return false
} }
return true return true
} }
@@ -132,14 +132,14 @@ fn split_path(path string) (string, string) {
if normalized == '/' { if normalized == '/' {
return '/', '' return '/', ''
} }
mut dir_path := normalized.all_before_last('/') mut dir_path := normalized.all_before_last('/')
filename := normalized.all_after_last('/') filename := normalized.all_after_last('/')
if dir_path == '' { if dir_path == '' {
dir_path = '/' dir_path = '/'
} }
return dir_path, filename return dir_path, filename
} }
@@ -149,7 +149,7 @@ fn parent_path(path string) string {
if normalized == '/' { if normalized == '/' {
return '/' return '/'
} }
parent := normalized.all_before_last('/') parent := normalized.all_before_last('/')
if parent == '' { if parent == '' {
return '/' return '/'
@@ -169,13 +169,13 @@ fn join_path(base string, component string) string {
// Find filesystem objects starting from a given path // Find filesystem objects starting from a given path
pub fn (mut self FsTools) find(fs_id u32, start_path string, opts FindOptions) ![]FindResult { pub fn (mut self FsTools) find(fs_id u32, start_path string, opts FindOptions) ![]FindResult {
mut results := []FindResult{} mut results := []FindResult{}
// Get the starting directory // Get the starting directory
start_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, start_path)! start_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, start_path)!
// Start recursive search // Start recursive search
self.find_recursive(fs_id, start_dir.id, start_path, opts, mut results, 0)! self.find_recursive(fs_id, start_dir.id, start_path, opts, mut results, 0)!
return results return results
} }
@@ -185,20 +185,20 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
if opts.max_depth >= 0 && current_depth > opts.max_depth { if opts.max_depth >= 0 && current_depth > opts.max_depth {
return return
} }
// Get current directory info // Get current directory info
current_dir := self.factory.fs_dir.get(dir_id)! current_dir := self.factory.fs_dir.get(dir_id)!
// Check if current directory matches search criteria // Check if current directory matches search criteria
if should_include(current_dir.name, opts.include_patterns, opts.exclude_patterns) { if should_include(current_dir.name, opts.include_patterns, opts.exclude_patterns) {
results << FindResult{ results << FindResult{
result_type: .directory result_type: .directory
id: dir_id id: dir_id
path: current_path path: current_path
name: current_dir.name name: current_dir.name
} }
} }
// Get files in current directory // Get files in current directory
files := self.factory.fs_file.list_by_directory(dir_id)! files := self.factory.fs_file.list_by_directory(dir_id)!
for file in files { for file in files {
@@ -206,13 +206,13 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
file_path := join_path(current_path, file.name) file_path := join_path(current_path, file.name)
results << FindResult{ results << FindResult{
result_type: .file result_type: .file
id: file.id id: file.id
path: file_path path: file_path
name: file.name name: file.name
} }
} }
} }
// Get symlinks in current directory // Get symlinks in current directory
symlinks := self.factory.fs_symlink.list_by_parent(dir_id)! symlinks := self.factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks { for symlink in symlinks {
@@ -220,22 +220,23 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
symlink_path := join_path(current_path, symlink.name) symlink_path := join_path(current_path, symlink.name)
results << FindResult{ results << FindResult{
result_type: .symlink result_type: .symlink
id: symlink.id id: symlink.id
path: symlink_path path: symlink_path
name: symlink.name name: symlink.name
} }
} }
// Follow symlinks if requested and they point to directories // Follow symlinks if requested and they point to directories
if opts.follow_symlinks && opts.recursive && symlink.target_type == .directory { if opts.follow_symlinks && opts.recursive && symlink.target_type == .directory {
// Check if symlink is not broken // Check if symlink is not broken
if !self.factory.fs_symlink.is_broken(symlink.id)! { if !self.factory.fs_symlink.is_broken(symlink.id)! {
symlink_path := join_path(current_path, symlink.name) symlink_path := join_path(current_path, symlink.name)
self.find_recursive(fs_id, symlink.target_id, symlink_path, opts, mut results, current_depth + 1)! self.find_recursive(fs_id, symlink.target_id, symlink_path, opts, mut
results, current_depth + 1)!
} }
} }
} }
// Process subdirectories if recursive // Process subdirectories if recursive
if opts.recursive { if opts.recursive {
subdirs := self.factory.fs_dir.list_children(dir_id)! subdirs := self.factory.fs_dir.list_children(dir_id)!
@@ -245,29 +246,30 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
} }
} }
} }
// Remove filesystem objects starting from a given path // Remove filesystem objects starting from a given path
pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions) ! { pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions) ! {
normalized_path := normalize_path(target_path) normalized_path := normalize_path(target_path)
// Try to find what we're removing (file, directory, or symlink) // Try to find what we're removing (file, directory, or symlink)
dir_path, filename := split_path(normalized_path) dir_path, filename := split_path(normalized_path)
if filename == '' { if filename == '' {
// We're removing a directory by its path // We're removing a directory by its path
self.rm_directory_by_path(fs_id, normalized_path, opts)! self.rm_directory_by_path(fs_id, normalized_path, opts)!
} else { } else {
// We're removing a specific item within a directory // We're removing a specific item within a directory
parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, dir_path)! parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, dir_path)!
// Try to find what we're removing // Try to find what we're removing
mut found := false mut found := false
// Try file first // Try file first
if file := self.factory.fs_file.get_by_path(parent_dir.id, filename) { if file := self.factory.fs_file.get_by_path(parent_dir.id, filename) {
self.rm_file(file.id, opts)! self.rm_file(file.id, opts)!
found = true found = true
} }
// Try symlink if file not found // Try symlink if file not found
if !found { if !found {
if symlink := self.factory.fs_symlink.get_by_path(parent_dir.id, filename) { if symlink := self.factory.fs_symlink.get_by_path(parent_dir.id, filename) {
@@ -275,7 +277,7 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
found = true found = true
} }
} }
// Try directory if neither file nor symlink found // Try directory if neither file nor symlink found
if !found { if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, parent_dir.id, filename) { if subdir := self.factory.fs_dir.get_by_path(fs_id, parent_dir.id, filename) {
@@ -283,7 +285,7 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
found = true found = true
} }
} }
if !found { if !found {
return error('Path "${target_path}" not found') return error('Path "${target_path}" not found')
} }
@@ -293,18 +295,18 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
// Remove a file by ID // Remove a file by ID
fn (mut self FsTools) rm_file(file_id u32, opts RemoveOptions) ! { fn (mut self FsTools) rm_file(file_id u32, opts RemoveOptions) ! {
file := self.factory.fs_file.get(file_id)! file := self.factory.fs_file.get(file_id)!
// If file is in multiple directories and force is not set, only remove from directories // If file is in multiple directories and force is not set, only remove from directories
if file.directories.len > 1 && !opts.force { if file.directories.len > 1 && !opts.force {
return error('File "${file.name}" exists in multiple directories. Use force=true to delete completely or remove from specific directories.') return error('File "${file.name}" exists in multiple directories. Use force=true to delete completely or remove from specific directories.')
} }
// Collect blob IDs before deleting the file // Collect blob IDs before deleting the file
blob_ids := file.blobs.clone() blob_ids := file.blobs.clone()
// Delete the file // Delete the file
self.factory.fs_file.delete(file_id)! self.factory.fs_file.delete(file_id)!
// Delete blobs if requested // Delete blobs if requested
if opts.delete_blobs { if opts.delete_blobs {
for blob_id in blob_ids { for blob_id in blob_ids {
@@ -326,11 +328,11 @@ fn (mut self FsTools) rm_directory(dir_id u32, opts RemoveOptions) ! {
dir := self.factory.fs_dir.get(dir_id)! dir := self.factory.fs_dir.get(dir_id)!
return error('Directory "${dir.name}" is not empty. Use recursive=true to remove contents.') return error('Directory "${dir.name}" is not empty. Use recursive=true to remove contents.')
} }
// Remove all children recursively // Remove all children recursively
self.rm_directory_contents(dir_id, opts)! self.rm_directory_contents(dir_id, opts)!
} }
// Remove the directory itself // Remove the directory itself
self.factory.fs_dir.delete(dir_id)! self.factory.fs_dir.delete(dir_id)!
} }
@@ -348,13 +350,13 @@ fn (mut self FsTools) rm_directory_contents(dir_id u32, opts RemoveOptions) ! {
for file in files { for file in files {
self.rm_file(file.id, opts)! self.rm_file(file.id, opts)!
} }
// Remove all symlinks in the directory // Remove all symlinks in the directory
symlinks := self.factory.fs_symlink.list_by_parent(dir_id)! symlinks := self.factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks { for symlink in symlinks {
self.rm_symlink(symlink.id)! self.rm_symlink(symlink.id)!
} }
// Remove all subdirectories recursively // Remove all subdirectories recursively
subdirs := self.factory.fs_dir.list_children(dir_id)! subdirs := self.factory.fs_dir.list_children(dir_id)!
for subdir in subdirs { for subdir in subdirs {
@@ -379,14 +381,15 @@ fn (mut self FsTools) is_blob_used_by_other_files(blob_id u32, exclude_file_id u
} }
return false return false
} }
// Copy filesystem objects from source path to destination path // Copy filesystem objects from source path to destination path
pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, opts CopyOptions) ! { pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, opts CopyOptions) ! {
normalized_source := normalize_path(source_path) normalized_source := normalize_path(source_path)
normalized_dest := normalize_path(dest_path) normalized_dest := normalize_path(dest_path)
// Determine what we're copying // Determine what we're copying
source_dir_path, source_filename := split_path(normalized_source) source_dir_path, source_filename := split_path(normalized_source)
if source_filename == '' { if source_filename == '' {
// We're copying a directory // We're copying a directory
source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)! source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)!
@@ -394,16 +397,16 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
} else { } else {
// We're copying a specific item // We're copying a specific item
source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)! source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)!
// Try to find what we're copying // Try to find what we're copying
mut found := false mut found := false
// Try file first // Try file first
if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) { if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) {
self.cp_file(fs_id, file.id, normalized_dest, opts)! self.cp_file(fs_id, file.id, normalized_dest, opts)!
found = true found = true
} }
// Try symlink if file not found // Try symlink if file not found
if !found { if !found {
if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) { if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) {
@@ -411,15 +414,18 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
found = true found = true
} }
} }
// Try directory if neither file nor symlink found // Try directory if neither file nor symlink found
if !found { if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id, source_filename) { if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id,
self.cp_directory(fs_id, subdir.id, normalized_source, normalized_dest, opts)! source_filename)
{
self.cp_directory(fs_id, subdir.id, normalized_source, normalized_dest,
opts)!
found = true found = true
} }
} }
if !found { if !found {
return error('Source path "${source_path}" not found') return error('Source path "${source_path}" not found')
} }
@@ -429,16 +435,16 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
// Copy a file to destination path // Copy a file to destination path
fn (mut self FsTools) cp_file(fs_id u32, file_id u32, dest_path string, opts CopyOptions) ! { fn (mut self FsTools) cp_file(fs_id u32, file_id u32, dest_path string, opts CopyOptions) ! {
source_file := self.factory.fs_file.get(file_id)! source_file := self.factory.fs_file.get(file_id)!
// Determine destination directory and filename // Determine destination directory and filename
dest_dir_path, mut dest_filename := split_path(dest_path) dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' { if dest_filename == '' {
dest_filename = source_file.name dest_filename = source_file.name
} }
// Ensure destination directory exists (create if needed) // Ensure destination directory exists (create if needed)
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)! dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination file already exists // Check if destination file already exists
if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) { if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite { if !opts.overwrite {
@@ -447,26 +453,26 @@ fn (mut self FsTools) cp_file(fs_id u32, file_id u32, dest_path string, opts Cop
// Remove existing file // Remove existing file
self.factory.fs_file.delete(existing_file.id)! self.factory.fs_file.delete(existing_file.id)!
} }
// Create new file with same content (reuse blobs) // Create new file with same content (reuse blobs)
new_file := self.factory.fs_file.new( new_file := self.factory.fs_file.new(
name: dest_filename name: dest_filename
fs_id: fs_id fs_id: fs_id
directories: [dest_dir_id] directories: [dest_dir_id]
blobs: source_file.blobs.clone() blobs: source_file.blobs.clone()
mime_type: source_file.mime_type mime_type: source_file.mime_type
checksum: source_file.checksum checksum: source_file.checksum
metadata: source_file.metadata.clone() metadata: source_file.metadata.clone()
description: source_file.description description: source_file.description
)! )!
self.factory.fs_file.set(new_file)! self.factory.fs_file.set(new_file)!
} }
// Copy a symlink to destination path // Copy a symlink to destination path
fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, opts CopyOptions) ! { fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, opts CopyOptions) ! {
source_symlink := self.factory.fs_symlink.get(symlink_id)! source_symlink := self.factory.fs_symlink.get(symlink_id)!
if opts.follow_symlinks { if opts.follow_symlinks {
// Follow the symlink and copy its target instead // Follow the symlink and copy its target instead
if source_symlink.target_type == .file { if source_symlink.target_type == .file {
@@ -476,16 +482,16 @@ fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, op
} }
return return
} }
// Copy the symlink itself // Copy the symlink itself
dest_dir_path, mut dest_filename := split_path(dest_path) dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' { if dest_filename == '' {
dest_filename = source_symlink.name dest_filename = source_symlink.name
} }
// Ensure destination directory exists // Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)! dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination symlink already exists // Check if destination symlink already exists
if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) { if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite { if !opts.overwrite {
@@ -493,38 +499,38 @@ fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, op
} }
self.factory.fs_symlink.delete(existing_symlink.id)! self.factory.fs_symlink.delete(existing_symlink.id)!
} }
// Create new symlink // Create new symlink
new_symlink := self.factory.fs_symlink.new( new_symlink := self.factory.fs_symlink.new(
name: dest_filename name: dest_filename
fs_id: fs_id fs_id: fs_id
parent_id: dest_dir_id parent_id: dest_dir_id
target_id: source_symlink.target_id target_id: source_symlink.target_id
target_type: source_symlink.target_type target_type: source_symlink.target_type
description: source_symlink.description description: source_symlink.description
)! )!
self.factory.fs_symlink.set(new_symlink)! self.factory.fs_symlink.set(new_symlink)!
} }
// Copy a directory to destination path // Copy a directory to destination path
fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path string, dest_path string, opts CopyOptions) ! { fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path string, dest_path string, opts CopyOptions) ! {
source_dir := self.factory.fs_dir.get(source_dir_id)! source_dir := self.factory.fs_dir.get(source_dir_id)!
// Create destination directory // Create destination directory
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_path)! dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_path)!
if !opts.recursive { if !opts.recursive {
return // Just create the directory, don't copy contents return
} }
// Copy all files in the source directory // Copy all files in the source directory
files := self.factory.fs_file.list_by_directory(source_dir_id)! files := self.factory.fs_file.list_by_directory(source_dir_id)!
for file in files { for file in files {
file_dest_path := join_path(dest_path, file.name) file_dest_path := join_path(dest_path, file.name)
self.cp_file(fs_id, file.id, file_dest_path, opts)! self.cp_file(fs_id, file.id, file_dest_path, opts)!
} }
// Copy all symlinks in the source directory // Copy all symlinks in the source directory
if opts.preserve_links { if opts.preserve_links {
symlinks := self.factory.fs_symlink.list_by_parent(source_dir_id)! symlinks := self.factory.fs_symlink.list_by_parent(source_dir_id)!
@@ -533,7 +539,7 @@ fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path str
self.cp_symlink(fs_id, symlink.id, symlink_dest_path, opts)! self.cp_symlink(fs_id, symlink.id, symlink_dest_path, opts)!
} }
} }
// Copy all subdirectories recursively // Copy all subdirectories recursively
subdirs := self.factory.fs_dir.list_children(source_dir_id)! subdirs := self.factory.fs_dir.list_children(source_dir_id)!
for subdir in subdirs { for subdir in subdirs {
@@ -546,14 +552,15 @@ fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path str
self.cp_directory(fs_id, subdir.id, subdir_source_path, subdir_dest_path, opts)! self.cp_directory(fs_id, subdir.id, subdir_source_path, subdir_dest_path, opts)!
} }
} }
// Move filesystem objects from source path to destination path // Move filesystem objects from source path to destination path
pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, opts MoveOptions) ! { pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, opts MoveOptions) ! {
normalized_source := normalize_path(source_path) normalized_source := normalize_path(source_path)
normalized_dest := normalize_path(dest_path) normalized_dest := normalize_path(dest_path)
// Determine what we're moving // Determine what we're moving
source_dir_path, source_filename := split_path(normalized_source) source_dir_path, source_filename := split_path(normalized_source)
if source_filename == '' { if source_filename == '' {
// We're moving a directory // We're moving a directory
source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)! source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)!
@@ -561,16 +568,16 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
} else { } else {
// We're moving a specific item // We're moving a specific item
source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)! source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)!
// Try to find what we're moving // Try to find what we're moving
mut found := false mut found := false
// Try file first // Try file first
if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) { if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) {
self.mv_file(fs_id, file.id, normalized_dest, opts)! self.mv_file(fs_id, file.id, normalized_dest, opts)!
found = true found = true
} }
// Try symlink if file not found // Try symlink if file not found
if !found { if !found {
if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) { if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) {
@@ -578,15 +585,17 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
found = true found = true
} }
} }
// Try directory if neither file nor symlink found // Try directory if neither file nor symlink found
if !found { if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id, source_filename) { if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id,
source_filename)
{
self.mv_directory(fs_id, subdir.id, normalized_dest)! self.mv_directory(fs_id, subdir.id, normalized_dest)!
found = true found = true
} }
} }
if !found { if !found {
return error('Source path "${source_path}" not found') return error('Source path "${source_path}" not found')
} }
@@ -596,16 +605,16 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
// Move a file to destination path // Move a file to destination path
fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts MoveOptions) ! { fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts MoveOptions) ! {
source_file := self.factory.fs_file.get(file_id)! source_file := self.factory.fs_file.get(file_id)!
// Determine destination directory and filename // Determine destination directory and filename
dest_dir_path, mut dest_filename := split_path(dest_path) dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' { if dest_filename == '' {
dest_filename = source_file.name dest_filename = source_file.name
} }
// Ensure destination directory exists // Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)! dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination file already exists // Check if destination file already exists
if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) { if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite { if !opts.overwrite {
@@ -614,12 +623,12 @@ fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts Mov
// Remove existing file // Remove existing file
self.factory.fs_file.delete(existing_file.id)! self.factory.fs_file.delete(existing_file.id)!
} }
// Update file name if it's different // Update file name if it's different
if dest_filename != source_file.name { if dest_filename != source_file.name {
self.factory.fs_file.rename(file_id, dest_filename)! self.factory.fs_file.rename(file_id, dest_filename)!
} }
// Move file to new directory (replace all directory associations) // Move file to new directory (replace all directory associations)
self.factory.fs_file.move(file_id, [dest_dir_id])! self.factory.fs_file.move(file_id, [dest_dir_id])!
} }
@@ -627,7 +636,7 @@ fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts Mov
// Move a symlink to destination path // Move a symlink to destination path
fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, opts MoveOptions) ! { fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, opts MoveOptions) ! {
source_symlink := self.factory.fs_symlink.get(symlink_id)! source_symlink := self.factory.fs_symlink.get(symlink_id)!
if opts.follow_symlinks { if opts.follow_symlinks {
// Follow the symlink and move its target instead // Follow the symlink and move its target instead
if source_symlink.target_type == .file { if source_symlink.target_type == .file {
@@ -639,16 +648,16 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
self.factory.fs_symlink.delete(symlink_id)! self.factory.fs_symlink.delete(symlink_id)!
return return
} }
// Move the symlink itself // Move the symlink itself
dest_dir_path, mut dest_filename := split_path(dest_path) dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' { if dest_filename == '' {
dest_filename = source_symlink.name dest_filename = source_symlink.name
} }
// Ensure destination directory exists // Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)! dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination symlink already exists // Check if destination symlink already exists
if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) { if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite { if !opts.overwrite {
@@ -656,12 +665,12 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
} }
self.factory.fs_symlink.delete(existing_symlink.id)! self.factory.fs_symlink.delete(existing_symlink.id)!
} }
// Update symlink name if it's different // Update symlink name if it's different
if dest_filename != source_symlink.name { if dest_filename != source_symlink.name {
self.factory.fs_symlink.rename(symlink_id, dest_filename)! self.factory.fs_symlink.rename(symlink_id, dest_filename)!
} }
// Move symlink to new parent directory // Move symlink to new parent directory
self.factory.fs_symlink.move(symlink_id, dest_dir_id)! self.factory.fs_symlink.move(symlink_id, dest_dir_id)!
} }
@@ -669,13 +678,13 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
// Move a directory to destination path // Move a directory to destination path
fn (mut self FsTools) mv_directory(fs_id u32, source_dir_id u32, dest_path string) ! { fn (mut self FsTools) mv_directory(fs_id u32, source_dir_id u32, dest_path string) ! {
source_dir := self.factory.fs_dir.get(source_dir_id)! source_dir := self.factory.fs_dir.get(source_dir_id)!
// Parse destination path // Parse destination path
dest_parent_path, mut dest_dirname := split_path(dest_path) dest_parent_path, mut dest_dirname := split_path(dest_path)
if dest_dirname == '' { if dest_dirname == '' {
dest_dirname = source_dir.name dest_dirname = source_dir.name
} }
// Ensure destination parent directory exists // Ensure destination parent directory exists
dest_parent_id := if dest_parent_path == '/' { dest_parent_id := if dest_parent_path == '/' {
// Moving to root level, find root directory // Moving to root level, find root directory
@@ -684,12 +693,12 @@ fn (mut self FsTools) mv_directory(fs_id u32, source_dir_id u32, dest_path strin
} else { } else {
self.factory.fs_dir.create_path(fs_id, dest_parent_path)! self.factory.fs_dir.create_path(fs_id, dest_parent_path)!
} }
// Update directory name if it's different // Update directory name if it's different
if dest_dirname != source_dir.name { if dest_dirname != source_dir.name {
self.factory.fs_dir.rename(source_dir_id, dest_dirname)! self.factory.fs_dir.rename(source_dir_id, dest_dirname)!
} }
// Move directory to new parent // Move directory to new parent
self.factory.fs_dir.move(source_dir_id, dest_parent_id)! self.factory.fs_dir.move(source_dir_id, dest_parent_id)!
} }

View File

@@ -44,11 +44,11 @@ pub fn fs_set(request Request) !Response {
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
mut fs_obj := fs_factory.fs.new( mut fs_obj := fs_factory.fs.new(
name: payload.name name: payload.name
description: payload.description description: payload.description
quota_bytes: payload.quota_bytes quota_bytes: payload.quota_bytes
)! )!
if payload.root_dir_id > 0 { if payload.root_dir_id > 0 {
fs_obj.root_dir_id = payload.root_dir_id fs_obj.root_dir_id = payload.root_dir_id
} }

View File

@@ -33,25 +33,25 @@ pub fn fs_blob_get(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
// Get blob by either id or hash // Get blob by either id or hash
mut blob := if payload.id > 0 { mut blob := if payload.id > 0 {
fs_factory.fs_blob.get(payload.id)! fs_factory.fs_blob.get(payload.id)!
} else if payload.hash != '' { } else if payload.hash != '' {
fs_factory.fs_blob.get_by_hash(payload.hash)! fs_factory.fs_blob.get_by_hash(payload.hash)!
} else { } else {
return jsonrpc.invalid_params_with_msg("Either id or hash must be provided") return jsonrpc.invalid_params_with_msg('Either id or hash must be provided')
} }
// Convert binary data to base64 for JSON transport // Convert binary data to base64 for JSON transport
blob_response := { blob_response := {
'id': blob.id.str() 'id': blob.id.str()
'created_at': blob.created_at.str() 'created_at': blob.created_at.str()
'updated_at': blob.updated_at.str() 'updated_at': blob.updated_at.str()
'mime_type': blob.mime_type 'mime_type': blob.mime_type
'name': blob.name 'name': blob.name
'hash': blob.hash 'hash': blob.hash
'size_bytes': blob.size_bytes.str() 'size_bytes': blob.size_bytes.str()
'data_base64': base64.encode(blob.data) 'data_base64': base64.encode(blob.data)
} }
@@ -65,14 +65,14 @@ pub fn fs_blob_set(request Request) !Response {
// Decode the base64 data // Decode the base64 data
data := base64.decode(payload.data_base64) or { data := base64.decode(payload.data_base64) or {
return jsonrpc.invalid_params_with_msg("Invalid base64 data") return jsonrpc.invalid_params_with_msg('Invalid base64 data')
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
mut blob_obj := fs_factory.fs_blob.new( mut blob_obj := fs_factory.fs_blob.new(
data: data data: data
mime_type: payload.mime_type mime_type: payload.mime_type
name: payload.name name: payload.name
)! )!
id := fs_factory.fs_blob.set(blob_obj)! id := fs_factory.fs_blob.set(blob_obj)!

View File

@@ -10,7 +10,7 @@ pub struct FSDirGetArgs {
pub mut: pub mut:
id u32 id u32
path string // Allow getting a directory by path path string // Allow getting a directory by path
fs_id u32 // Required when using path fs_id u32 // Required when using path
} }
@[params] @[params]
@@ -29,14 +29,14 @@ pub struct FSDirDeleteArgs {
pub mut: pub mut:
id u32 id u32
path string // Allow deleting a directory by path path string // Allow deleting a directory by path
fs_id u32 // Required when using path fs_id u32 // Required when using path
} }
@[params] @[params]
pub struct FSDirMoveArgs { pub struct FSDirMoveArgs {
pub mut: pub mut:
id u32 id u32
parent_id u32 parent_id u32
source_path string // Allow moving using paths source_path string // Allow moving using paths
dest_path string dest_path string
fs_id u32 // Required when using paths fs_id u32 // Required when using paths
@@ -66,7 +66,7 @@ pub struct FSDirListContentsArgs {
pub mut: pub mut:
dir_id u32 dir_id u32
path string // Allow listing contents by path path string // Allow listing contents by path
fs_id u32 // Required when using path fs_id u32 // Required when using path
recursive bool recursive bool
include []string // Patterns to include include []string // Patterns to include
exclude []string // Patterns to exclude exclude []string // Patterns to exclude
@@ -78,14 +78,14 @@ pub fn fs_dir_get(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
// Handle either path-based or ID-based retrieval // Handle either path-based or ID-based retrieval
mut dir := if payload.path != '' && payload.fs_id > 0 { mut dir := if payload.path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)! fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)!
} else if payload.id > 0 { } else if payload.id > 0 {
fs_factory.fs_dir.get(payload.id)! fs_factory.fs_dir.get(payload.id)!
} else { } else {
return jsonrpc.invalid_params_with_msg("Either id or both path and fs_id must be provided") return jsonrpc.invalid_params_with_msg('Either id or both path and fs_id must be provided')
} }
return jsonrpc.new_response(request.id, json.encode(dir)) return jsonrpc.new_response(request.id, json.encode(dir))
@@ -97,20 +97,20 @@ pub fn fs_dir_set(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
mut dir_id := u32(0) mut dir_id := u32(0)
// Handle path-based creation // Handle path-based creation
if payload.path != '' { if payload.path != '' {
dir_id = fs_factory.fs_dir.create_path(payload.fs_id, payload.path)! dir_id = fs_factory.fs_dir.create_path(payload.fs_id, payload.path)!
} else { } else {
// Handle traditional creation // Handle traditional creation
mut dir_obj := fs_factory.fs_dir.new( mut dir_obj := fs_factory.fs_dir.new(
name: payload.name name: payload.name
fs_id: payload.fs_id fs_id: payload.fs_id
parent_id: payload.parent_id parent_id: payload.parent_id
description: payload.description description: payload.description
metadata: payload.metadata metadata: payload.metadata
)! )!
dir_id = fs_factory.fs_dir.set(dir_obj)! dir_id = fs_factory.fs_dir.set(dir_obj)!
} }
@@ -124,14 +124,14 @@ pub fn fs_dir_delete(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
// Handle either path-based or ID-based deletion // Handle either path-based or ID-based deletion
if payload.path != '' && payload.fs_id > 0 { if payload.path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.delete_by_path(payload.fs_id, payload.path)! fs_factory.fs_dir.delete_by_path(payload.fs_id, payload.path)!
} else if payload.id > 0 { } else if payload.id > 0 {
fs_factory.fs_dir.delete(payload.id)! fs_factory.fs_dir.delete(payload.id)!
} else { } else {
return jsonrpc.invalid_params_with_msg("Either id or both path and fs_id must be provided") return jsonrpc.invalid_params_with_msg('Either id or both path and fs_id must be provided')
} }
return new_response_true(request.id) return new_response_true(request.id)
@@ -150,14 +150,14 @@ pub fn fs_dir_move(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
// Handle either path-based or ID-based move // Handle either path-based or ID-based move
if payload.source_path != '' && payload.dest_path != '' && payload.fs_id > 0 { if payload.source_path != '' && payload.dest_path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.move_by_path(payload.fs_id, payload.source_path, payload.dest_path)! fs_factory.fs_dir.move_by_path(payload.fs_id, payload.source_path, payload.dest_path)!
} else if payload.id > 0 && payload.parent_id > 0 { } else if payload.id > 0 && payload.parent_id > 0 {
fs_factory.fs_dir.move(payload.id, payload.parent_id)! fs_factory.fs_dir.move(payload.id, payload.parent_id)!
} else { } else {
return jsonrpc.invalid_params_with_msg("Either id and parent_id, or source_path, dest_path and fs_id must be provided") return jsonrpc.invalid_params_with_msg('Either id and parent_id, or source_path, dest_path and fs_id must be provided')
} }
return new_response_true(request.id) return new_response_true(request.id)
@@ -203,7 +203,7 @@ pub fn fs_dir_list_contents(request Request) !Response {
} }
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
// Get directory ID either directly or from path // Get directory ID either directly or from path
mut dir_id := if payload.path != '' && payload.fs_id > 0 { mut dir_id := if payload.path != '' && payload.fs_id > 0 {
dir := fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)! dir := fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)!
@@ -211,16 +211,16 @@ pub fn fs_dir_list_contents(request Request) !Response {
} else if payload.dir_id > 0 { } else if payload.dir_id > 0 {
payload.dir_id payload.dir_id
} else { } else {
return jsonrpc.invalid_params_with_msg("Either dir_id or both path and fs_id must be provided") return jsonrpc.invalid_params_with_msg('Either dir_id or both path and fs_id must be provided')
} }
// Create options struct // Create options struct
opts := herofs.ListContentsOptions{ opts := herofs.ListContentsOptions{
recursive: payload.recursive recursive: payload.recursive
include_patterns: payload.include include_patterns: payload.include
exclude_patterns: payload.exclude exclude_patterns: payload.exclude
} }
// List contents with filters // List contents with filters
contents := fs_factory.fs_dir.list_contents(&fs_factory, dir_id, opts)! contents := fs_factory.fs_dir.list_contents(&fs_factory, dir_id, opts)!

View File

@@ -14,8 +14,8 @@ pub mut:
@[params] @[params]
pub struct FSFileSetArgs { pub struct FSFileSetArgs {
pub mut: pub mut:
name string @[required] name string @[required]
fs_id u32 @[required] fs_id u32 @[required]
directories []u32 directories []u32
blobs []u32 blobs []u32
mime_type string mime_type string
@@ -99,12 +99,12 @@ pub fn fs_file_set(request Request) !Response {
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
mut file_obj := fs_factory.fs_file.new( mut file_obj := fs_factory.fs_file.new(
name: payload.name name: payload.name
fs_id: payload.fs_id fs_id: payload.fs_id
directories: payload.directories directories: payload.directories
blobs: payload.blobs blobs: payload.blobs
mime_type: payload.mime_type mime_type: payload.mime_type
metadata: payload.metadata metadata: payload.metadata
)! )!
id := fs_factory.fs_file.set(file_obj)! id := fs_factory.fs_file.set(file_obj)!

View File

@@ -65,10 +65,10 @@ pub fn fs_symlink_set(request Request) !Response {
mut fs_factory := herofs.new()! mut fs_factory := herofs.new()!
mut symlink_obj := fs_factory.fs_symlink.new( mut symlink_obj := fs_factory.fs_symlink.new(
name: payload.name name: payload.name
fs_id: payload.fs_id fs_id: payload.fs_id
parent_id: payload.parent_id parent_id: payload.parent_id
target_id: payload.target_id target_id: payload.target_id
target_type: target_type target_type: target_type
description: payload.description description: payload.description
)! )!

View File

@@ -78,6 +78,7 @@ pub mut:
pub fn (self CalendarEvent) type_name() string { pub fn (self CalendarEvent) type_name() string {
return 'calendar_event' return 'calendar_event'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self CalendarEvent) description(methodname string) string { pub fn (self CalendarEvent) description(methodname string) string {
match methodname { match methodname {

View File

@@ -29,6 +29,7 @@ pub mut:
pub fn (self ChatGroup) type_name() string { pub fn (self ChatGroup) type_name() string {
return 'chat_group' return 'chat_group'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self ChatGroup) description(methodname string) string { pub fn (self ChatGroup) description(methodname string) string {
match methodname { match methodname {

View File

@@ -66,6 +66,7 @@ pub mut:
pub fn (self ChatMessage) type_name() string { pub fn (self ChatMessage) type_name() string {
return 'chat_message' return 'chat_message'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self ChatMessage) description(methodname string) string { pub fn (self ChatMessage) description(methodname string) string {
match methodname { match methodname {

View File

@@ -25,6 +25,7 @@ pub mut:
pub fn (self Comment) type_name() string { pub fn (self Comment) type_name() string {
return 'comments' return 'comments'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self Comment) description(methodname string) string { pub fn (self Comment) description(methodname string) string {
match methodname { match methodname {

View File

@@ -32,6 +32,7 @@ pub enum GroupRole {
pub fn (self Group) type_name() string { pub fn (self Group) type_name() string {
return 'group' return 'group'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self Group) description(methodname string) string { pub fn (self Group) description(methodname string) string {
match methodname { match methodname {

View File

@@ -52,6 +52,7 @@ pub mut:
pub fn (self Project) type_name() string { pub fn (self Project) type_name() string {
return 'project' return 'project'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self Project) description(methodname string) string { pub fn (self Project) description(methodname string) string {
match methodname { match methodname {

View File

@@ -61,6 +61,7 @@ pub mut:
pub fn (self ProjectIssue) type_name() string { pub fn (self ProjectIssue) type_name() string {
return 'project_issue' return 'project_issue'
} }
// return example rpc call and result for each methodname // return example rpc call and result for each methodname
pub fn (self ProjectIssue) description(methodname string) string { pub fn (self ProjectIssue) description(methodname string) string {
match methodname { match methodname {

View File

@@ -6,115 +6,112 @@ import rand
import time import time
pub struct AuthConfig { pub struct AuthConfig {
pub mut:
// Add any authentication-related configuration here
// For now, it can be empty or have default values
} }
pub struct AuthManager { pub struct AuthManager {
mut: mut:
registered_keys map[string]string // pubkey -> user_id registered_keys map[string]string // pubkey -> user_id
pending_auths map[string]AuthChallenge // challenge -> challenge_data pending_auths map[string]AuthChallenge // challenge -> challenge_data
active_sessions map[string]Session // session_key -> session_data active_sessions map[string]Session // session_key -> session_data
} }
pub struct AuthChallenge { pub struct AuthChallenge {
pub: pub:
pubkey string pubkey string
challenge string challenge string
created_at i64 created_at i64
expires_at i64 expires_at i64
} }
pub struct Session { pub struct Session {
pub: pub:
user_id string user_id string
pubkey string pubkey string
created_at i64 created_at i64
expires_at i64 expires_at i64
} }
pub fn new_auth_manager(config AuthConfig) &AuthManager { pub fn new_auth_manager(config AuthConfig) &AuthManager {
// Use config if needed, for now it's just passed // Use config if needed, for now it's just passed
_ = config _ = config
return &AuthManager{} return &AuthManager{}
} }
// Register public key // Register public key
pub fn (mut am AuthManager) register_pubkey(pubkey string) !string { pub fn (mut am AuthManager) register_pubkey(pubkey string) !string {
// Validate pubkey format // Validate pubkey format
if pubkey.len != 64 { // ed25519 pubkey length if pubkey.len != 64 { // ed25519 pubkey length
return error('Invalid public key format') return error('Invalid public key format')
} }
user_id := md5.hexhash(pubkey + time.now().unix().str()) user_id := md5.hexhash(pubkey + time.now().unix().str())
am.registered_keys[pubkey] = user_id am.registered_keys[pubkey] = user_id
return user_id return user_id
} }
// Generate authentication challenge // Generate authentication challenge
pub fn (mut am AuthManager) create_auth_challenge(pubkey string) !string { pub fn (mut am AuthManager) create_auth_challenge(pubkey string) !string {
// Check if pubkey is registered // Check if pubkey is registered
if pubkey !in am.registered_keys { if pubkey !in am.registered_keys {
return error('Public key not registered') return error('Public key not registered')
} }
// Generate unique challenge // Generate unique challenge
random_data := rand.string(32) random_data := rand.string(32)
challenge := md5.hexhash(pubkey + random_data + time.now().unix().str()) challenge := md5.hexhash(pubkey + random_data + time.now().unix().str())
now := time.now().unix() now := time.now().unix()
am.pending_auths[challenge] = AuthChallenge{ am.pending_auths[challenge] = AuthChallenge{
pubkey: pubkey pubkey: pubkey
challenge: challenge challenge: challenge
created_at: now created_at: now
expires_at: now + 300 // 5 minutes expires_at: now + 300 // 5 minutes
} }
return challenge return challenge
} }
// Verify signature and create session // Verify signature and create session
pub fn (mut am AuthManager) verify_and_create_session(challenge string, signature string) !string { pub fn (mut am AuthManager) verify_and_create_session(challenge string, signature string) !string {
// Get challenge data // Get challenge data
auth_challenge := am.pending_auths[challenge] or { auth_challenge := am.pending_auths[challenge] or {
return error('Invalid or expired challenge') return error('Invalid or expired challenge')
} }
// Check expiration // Check expiration
if time.now().unix() > auth_challenge.expires_at { if time.now().unix() > auth_challenge.expires_at {
am.pending_auths.delete(challenge) am.pending_auths.delete(challenge)
return error('Challenge expired') return error('Challenge expired')
} }
// Verify signature // Verify signature
pubkey_bytes := auth_challenge.pubkey.bytes() pubkey_bytes := auth_challenge.pubkey.bytes()
challenge_bytes := challenge.bytes() challenge_bytes := challenge.bytes()
signature_bytes := signature.bytes() signature_bytes := signature.bytes()
ed25519.verify(pubkey_bytes, challenge_bytes, signature_bytes) or { ed25519.verify(pubkey_bytes, challenge_bytes, signature_bytes) or {
return error('Invalid signature') return error('Invalid signature')
} }
// Create session // Create session
session_key := md5.hexhash(auth_challenge.pubkey + time.now().unix().str() + rand.string(16)) session_key := md5.hexhash(auth_challenge.pubkey + time.now().unix().str() + rand.string(16))
now := time.now().unix() now := time.now().unix()
am.active_sessions[session_key] = Session{ am.active_sessions[session_key] = Session{
user_id: am.registered_keys[auth_challenge.pubkey] user_id: am.registered_keys[auth_challenge.pubkey]
pubkey: auth_challenge.pubkey pubkey: auth_challenge.pubkey
created_at: now created_at: now
expires_at: now + 3600 // 1 hour expires_at: now + 3600 // 1 hour
} }
// Clean up challenge // Clean up challenge
am.pending_auths.delete(challenge) am.pending_auths.delete(challenge)
return session_key return session_key
} }
// Validate session // Validate session
pub fn (am AuthManager) validate_session(session_key string) bool { pub fn (am AuthManager) validate_session(session_key string) bool {
session := am.active_sessions[session_key] or { return false } session := am.active_sessions[session_key] or { return false }
return time.now().unix() < session.expires_at return time.now().unix() < session.expires_at
} }

View File

@@ -6,80 +6,80 @@ import freeflowuniverse.herolib.schemas.jsonschema
// Generate HTML documentation for handler type // Generate HTML documentation for handler type
pub fn (s HeroServer) generate_documentation(handler_type string, handler openrpc.Handler) !string { pub fn (s HeroServer) generate_documentation(handler_type string, handler openrpc.Handler) !string {
spec := s.handler_registry.get_spec(handler_type) or { spec := s.handler_registry.get_spec(handler_type) or {
return error('No spec found for handler type: ${handler_type}') return error('No spec found for handler type: ${handler_type}')
} }
// Load and process template // Load and process template
template_path := os.join_path(@VMODROOT, 'lib/hero/heroserver/templates/doc.md') template_path := os.join_path(@VMODROOT, 'lib/hero/heroserver/templates/doc.md')
template_content := os.read_file(template_path) or { template_content := os.read_file(template_path) or {
return error('Failed to read documentation template: ${err}') return error('Failed to read documentation template: ${err}')
} }
// Process template with spec data // Process template with spec data
doc_content := process_doc_template(template_content, spec, handler_type) doc_content := process_doc_template(template_content, spec, handler_type)
// Return HTML with Bootstrap and markdown processing // Return HTML with Bootstrap and markdown processing
return generate_html_wrapper(doc_content, handler_type) return generate_html_wrapper(doc_content, handler_type)
} }
// Process the markdown template with OpenRPC spec data // Process the markdown template with OpenRPC spec data
fn process_doc_template(template string, spec openrpc.OpenRPC, handler_type string) string { fn process_doc_template(template string, spec openrpc.OpenRPC, handler_type string) string {
mut content := template mut content := template
// Replace template variables // Replace template variables
content = content.replace('@{handler_type}', handler_type) content = content.replace('@{handler_type}', handler_type)
content = content.replace('@{spec.info.title}', spec.info.title) content = content.replace('@{spec.info.title}', spec.info.title)
content = content.replace('@{spec.info.description}', spec.info.description) content = content.replace('@{spec.info.description}', spec.info.description)
content = content.replace('@{spec.info.version}', spec.info.version) content = content.replace('@{spec.info.version}', spec.info.version)
// Generate methods documentation // Generate methods documentation
mut methods_doc := '' mut methods_doc := ''
for method in spec.methods { for method in spec.methods {
methods_doc += generate_method_doc(method) methods_doc += generate_method_doc(method)
} }
content = content.replace('@{methods}', methods_doc) content = content.replace('@{methods}', methods_doc)
return content return content
} }
// Generate documentation for a single method // Generate documentation for a single method
fn generate_method_doc(method openrpc.Method) string { fn generate_method_doc(method openrpc.Method) string {
mut doc := '## ${method.name}\n\n' mut doc := '## ${method.name}\n\n'
if method.description.len > 0 { if method.description.len > 0 {
doc += '${method.description}\n\n' doc += '${method.description}\n\n'
} }
// Parameters // Parameters
if method.params.len > 0 { if method.params.len > 0 {
doc += '### Parameters\n\n' doc += '### Parameters\n\n'
for param in method.params { for param in method.params {
// Handle both ContentDescriptor and Reference // Handle both ContentDescriptor and Reference
if param is openrpc.ContentDescriptor { if param is openrpc.ContentDescriptor {
if param.schema is jsonschema.Schema { if param.schema is jsonschema.Schema {
schema := param.schema as jsonschema.Schema schema := param.schema as jsonschema.Schema
doc += '- **${param.name}** (${schema.typ}): ${param.description}\n' doc += '- **${param.name}** (${schema.typ}): ${param.description}\n'
} }
} }
} }
doc += '\n' doc += '\n'
} }
// Result // Result
if method.result is openrpc.ContentDescriptor { if method.result is openrpc.ContentDescriptor {
result := method.result as openrpc.ContentDescriptor result := method.result as openrpc.ContentDescriptor
doc += '### Returns\n\n' doc += '### Returns\n\n'
doc += '${result.description}\n\n' doc += '${result.description}\n\n'
} }
// Examples (would need to be added to OpenRPC spec or handled differently) // Examples (would need to be added to OpenRPC spec or handled differently)
doc += '### Example\n\n' doc += '### Example\n\n'
doc += '```json\n' doc += '```json\n'
doc += '// Request example would go here\n' doc += '// Request example would go here\n'
doc += '```\n\n' doc += '```\n\n'
return doc return doc
} }
// Generate HTML wrapper with Bootstrap // Generate HTML wrapper with Bootstrap
@@ -89,4 +89,4 @@ fn generate_html_wrapper(markdown_content string, handler_type string) string {
template_content = template_content.replace('@{handler_type}', handler_type) template_content = template_content.replace('@{handler_type}', handler_type)
template_content = template_content.replace('@{markdown_content}', markdown_content) template_content = template_content.replace('@{markdown_content}', markdown_content)
return template_content return template_content
} }

View File

@@ -1,19 +1,18 @@
module heroserver module heroserver
@[params] @[params]
pub struct ServerConfig { pub struct ServerConfig {
pub: pub:
port int = 8080 port int = 8080
host string = 'localhost' host string = 'localhost'
} }
// Factory function to create new server instance // Factory function to create new server instance
pub fn new_server(config ServerConfig) !&HeroServer { pub fn new_server(config ServerConfig) !&HeroServer {
mut server := &HeroServer{ mut server := &HeroServer{
config: config config: config
auth_manager: new_auth_manager() auth_manager: new_auth_manager()
handler_registry: new_handler_registry() handler_registry: new_handler_registry()
} }
return server return server
} }

View File

@@ -4,31 +4,31 @@ import freeflowuniverse.herolib.schemas.openrpc
pub struct HandlerRegistry { pub struct HandlerRegistry {
mut: mut:
handlers map[string]openrpc.Handler handlers map[string]openrpc.Handler
specs map[string]openrpc.OpenRPC specs map[string]openrpc.OpenRPC
} }
pub fn new_handler_registry() &HandlerRegistry { pub fn new_handler_registry() &HandlerRegistry {
return &HandlerRegistry{} return &HandlerRegistry{}
} }
// Register OpenRPC handler with type name // Register OpenRPC handler with type name
pub fn (mut hr HandlerRegistry) register(handler_type string, handler openrpc.Handler, spec openrpc.OpenRPC) { pub fn (mut hr HandlerRegistry) register(handler_type string, handler openrpc.Handler, spec openrpc.OpenRPC) {
hr.handlers[handler_type] = handler hr.handlers[handler_type] = handler
hr.specs[handler_type] = spec hr.specs[handler_type] = spec
} }
// Get handler by type // Get handler by type
pub fn (hr HandlerRegistry) get(handler_type string) ?openrpc.Handler { pub fn (hr HandlerRegistry) get(handler_type string) ?openrpc.Handler {
return hr.handlers[handler_type] return hr.handlers[handler_type]
} }
// Get OpenRPC spec by type // Get OpenRPC spec by type
pub fn (hr HandlerRegistry) get_spec(handler_type string) ?openrpc.OpenRPC { pub fn (hr HandlerRegistry) get_spec(handler_type string) ?openrpc.OpenRPC {
return hr.specs[handler_type] return hr.specs[handler_type]
} }
// List all registered handler types // List all registered handler types
pub fn (hr HandlerRegistry) list_types() []string { pub fn (hr HandlerRegistry) list_types() []string {
return hr.handlers.keys() return hr.handlers.keys()
} }

View File

@@ -8,7 +8,7 @@ import freeflowuniverse.herolib.heroserver.handlers
pub struct ServerConfig { pub struct ServerConfig {
pub mut: pub mut:
port int = 8080 port int = 8080
auth_config auth.AuthConfig auth_config auth.AuthConfig
} }
@@ -67,7 +67,7 @@ pub fn (mut s HeroServer) api(mut ctx Context) veb.Result {
request := jsonrpc.decode_request(ctx.req.data) or { request := jsonrpc.decode_request(ctx.req.data) or {
return ctx.request_error('Invalid JSON-RPC request') return ctx.request_error('Invalid JSON-RPC request')
} }
response := handler.handle(request) or { return ctx.server_error('Handler error') } response := handler.handle(request) or { return ctx.server_error('Handler error') }
return ctx.json(response) return ctx.json(response)

View File

@@ -1,7 +1,7 @@
module codegen module codegen
import log import log
import freeflowuniverse.herolib.core.code { Alias, Array, Attribute, CodeItem, Object, Struct, StructField, Type, type_from_symbol } import freeflowuniverse.herolib.develop.codetools as code { Alias, Array, Attribute, CodeItem, Object, Struct, StructField, Type, type_from_symbol }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }
const vtypes = { const vtypes = {

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { Param, Struct, Type } import freeflowuniverse.herolib.develop.codetools as code { Param, Struct, Type }
import freeflowuniverse.herolib.schemas.jsonschema { Number, Reference, Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Number, Reference, Schema, SchemaRef }
// struct_to_schema generates a json schema or reference from a struct model // struct_to_schema generates a json schema or reference from a struct model

View File

@@ -1,7 +1,7 @@
module codegen module codegen
import log import log
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
fn test_struct_to_schema() { fn test_struct_to_schema() {
struct_ := code.Struct{ struct_ := code.Struct{

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { File, Folder } import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema }
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct } import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct }

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.jsonschema.codegen import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type } import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type }
import freeflowuniverse.herolib.schemas.openapi { ResponseSpec } import freeflowuniverse.herolib.schemas.openapi { ResponseSpec }

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { Function, Module, Struct } import freeflowuniverse.herolib.develop.codetools as code { Function, Module, Struct }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
// pub struct OpenRPCCode { // pub struct OpenRPCCode {

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Struct, VFile, parse_function } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Struct, VFile, parse_function }
// import freeflowuniverse.herolib.schemas.jsonrpc.codegen {generate_client_struct} // import freeflowuniverse.herolib.schemas.jsonrpc.codegen {generate_client_struct}
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Param, Struct, VFile, parse_import } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Param, Struct, VFile, parse_import }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import rand import rand

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { CustomCode, VFile, parse_function, parse_import } import freeflowuniverse.herolib.develop.codetools as code { CustomCode, VFile, parse_function, parse_import }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { CodeItem } import freeflowuniverse.herolib.develop.codetools as code { CodeItem }
import freeflowuniverse.herolib.schemas.jsonschema { Schema } import freeflowuniverse.herolib.schemas.jsonschema { Schema }
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_code } import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_code }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }

View File

@@ -2,7 +2,7 @@ module codegen
import os import os
import json import json
import freeflowuniverse.herolib.core.code { Alias, Struct } import freeflowuniverse.herolib.develop.codetools as code { Alias, Struct }
import freeflowuniverse.herolib.core.pathlib import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -1,6 +1,6 @@
module codegen module codegen
import freeflowuniverse.herolib.core.code { Function, Struct } import freeflowuniverse.herolib.develop.codetools as code { Function, Struct }
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_struct, schemaref_to_type } import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_struct, schemaref_to_type }
import freeflowuniverse.herolib.schemas.jsonschema { Schema } import freeflowuniverse.herolib.schemas.jsonschema { Schema }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Method } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Method }

View File

@@ -1,7 +1,7 @@
module openrpc module openrpc
import freeflowuniverse.herolib.schemas.jsonschema { Reference } import freeflowuniverse.herolib.schemas.jsonschema { Reference }
import freeflowuniverse.herolib.core.code { Struct, StructField } import freeflowuniverse.herolib.develop.codetools as code { Struct, StructField }
import x.json2 import x.json2
pub fn parse_example_pairing(text_ string) !ExamplePairing { pub fn parse_example_pairing(text_ string) !ExamplePairing {

View File

@@ -1,6 +1,6 @@
module openrpc module openrpc
import freeflowuniverse.herolib.core.code { Attribute, Struct, StructField } import freeflowuniverse.herolib.develop.codetools as code { Attribute, Struct, StructField }
const example_txt = " const example_txt = "
Example: Get pet example. Example: Get pet example.

View File

@@ -10,7 +10,6 @@ module models
// - Location models (addresses) // - Location models (addresses)
// Import all model modules for easy access // Import all model modules for easy access
import freeflowuniverse.herolib.threefold.models.core import freeflowuniverse.herolib.threefold.models.core
import freeflowuniverse.herolib.threefold.models.finance import freeflowuniverse.herolib.threefold.models.finance
import freeflowuniverse.herolib.threefold.models.flow import freeflowuniverse.herolib.threefold.models.flow

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen import freeflowuniverse.herolib.schemas.jsonschema.codegen
import freeflowuniverse.herolib.schemas.openrpc.codegen as openrpc_codegen import freeflowuniverse.herolib.schemas.openrpc.codegen as openrpc_codegen

View File

@@ -2,7 +2,7 @@ module generator
import x.json2 as json import x.json2 as json
import arrays import arrays
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,7 +1,7 @@
module generator module generator
import freeflowuniverse.herolib.baobab.specification import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.core.code { Param, Param, type_from_symbol } import freeflowuniverse.herolib.develop.codetools as code { Param, Param, type_from_symbol }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
const id_param = Param{ const id_param = Param{

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification import freeflowuniverse.herolib.baobab.specification
import rand import rand
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Import, Object, Param, Result, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Import, Object, Param, Result, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Example } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Example }
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schemaref_to_type } import freeflowuniverse.herolib.schemas.jsonschema.codegen { schemaref_to_type }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { File, Folder, IFile, IFolder } import freeflowuniverse.herolib.develop.codetools as code { File, Folder, IFile, IFolder }
import freeflowuniverse.herolib.schemas.openapi import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification } import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CustomCode, IFile, IFolder, Module, VFile } import freeflowuniverse.herolib.develop.codetools as code { CustomCode, IFile, IFolder, Module, VFile }
import freeflowuniverse.herolib.schemas.openapi import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification } import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Import, Param, Result, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Import, Param, Result, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type } import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type }
import freeflowuniverse.herolib.schemas.openrpc.codegen { content_descriptor_to_parameter } import freeflowuniverse.herolib.schemas.openrpc.codegen { content_descriptor_to_parameter }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Import, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Import, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorMethod, ActorSpecification } import freeflowuniverse.herolib.baobab.specification { ActorMethod, ActorSpecification }

View File

@@ -1,7 +1,7 @@
module generator module generator
import freeflowuniverse.herolib.baobab.specification { ActorInterface } import freeflowuniverse.herolib.baobab.specification { ActorInterface }
import freeflowuniverse.herolib.core.code { CustomCode, VFile } import freeflowuniverse.herolib.develop.codetools as code { CustomCode, VFile }
fn generate_openrpc_interface_files(interfaces []ActorInterface) (VFile, VFile) { fn generate_openrpc_interface_files(interfaces []ActorInterface) (VFile, VFile) {
http := ActorInterface.http in interfaces http := ActorInterface.http in interfaces

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, Function, Import, Param, Result, Struct, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openapi import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, Function, Import, Param, Result, Struct, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { Example } import freeflowuniverse.herolib.schemas.openrpc { Example }
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, Import, Param, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Import, Param, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc.codegen import freeflowuniverse.herolib.schemas.openrpc.codegen
import freeflowuniverse.herolib.baobab.specification { ActorSpecification } import freeflowuniverse.herolib.baobab.specification { ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { CodeItem, Struct, VFile } import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Struct, VFile }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct } import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct }
import freeflowuniverse.herolib.baobab.specification { ActorSpecification } import freeflowuniverse.herolib.baobab.specification { ActorSpecification }

View File

@@ -1,7 +1,7 @@
module generator module generator
import json import json
import freeflowuniverse.herolib.core.code { File, Folder } import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.schemas.openapi { OpenAPI, Operation } import freeflowuniverse.herolib.schemas.openapi { OpenAPI, Operation }
import freeflowuniverse.herolib.schemas.openapi.codegen import freeflowuniverse.herolib.schemas.openapi.codegen
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_type } import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_type }

View File

@@ -1,7 +1,7 @@
module generator module generator
import json import json
import freeflowuniverse.herolib.core.code { File, Function, Struct, VFile } import freeflowuniverse.herolib.develop.codetools as code { File, Function, Struct, VFile }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC } import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.schemas.openrpc.codegen { generate_client_file, generate_client_test_file } import freeflowuniverse.herolib.schemas.openrpc.codegen { generate_client_file, generate_client_test_file }

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { Function, Param, Result, Struct, Type } import freeflowuniverse.herolib.develop.codetools as code { Function, Param, Result, Struct, Type }
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc
const test_actor_specification = ActorSpecification{ const test_actor_specification = ActorSpecification{

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code { File, Folder } import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
// generates the folder with runnable scripts of the actor // generates the folder with runnable scripts of the actor

View File

@@ -1,6 +1,6 @@
module generator module generator
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import os import os
// // generate_object_methods generates CRUD actor methods for a provided structure // // generate_object_methods generates CRUD actor methods for a provided structure

View File

@@ -1,7 +1,7 @@
module specification module specification
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.code { Struct } import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.openapi { MediaType, OpenAPI, OperationInfo, Parameter } import freeflowuniverse.herolib.schemas.openapi { MediaType, OpenAPI, OperationInfo, Parameter }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, Example, ExamplePairing, ExampleRef } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, Example, ExamplePairing, ExampleRef }

View File

@@ -1,7 +1,7 @@
module specification module specification
import x.json2 as json import x.json2 as json
import freeflowuniverse.herolib.core.code { Struct } import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec }
import freeflowuniverse.herolib.schemas.openapi { Components, Info, OpenAPI, Operation, PathItem, ServerSpec } import freeflowuniverse.herolib.schemas.openapi { Components, Info, OpenAPI, Operation, PathItem, ServerSpec }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }

View File

@@ -1,6 +1,6 @@
module specification module specification
import freeflowuniverse.herolib.core.code { Struct } import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor }
import freeflowuniverse.herolib.schemas.openapi { Components, Info } import freeflowuniverse.herolib.schemas.openapi { Components, Info }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }

View File

@@ -1,6 +1,6 @@
module specification module specification
import freeflowuniverse.herolib.core.code { Struct } import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openapi import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, ExamplePairing } import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, ExamplePairing }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema } import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema }

View File

@@ -1,6 +1,6 @@
module specification module specification
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef } import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.openapi import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -2,7 +2,7 @@ module baobab
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import x.json2 as json { Any } import x.json2 as json { Any }
import freeflowuniverse.herolib.baobab.generator import freeflowuniverse.herolib.baobab.generator
import freeflowuniverse.herolib.baobab.specification import freeflowuniverse.herolib.baobab.specification

View File

@@ -1,6 +1,6 @@
module mcpgen module mcpgen
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.schemas.jsonschema.codegen import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,7 +1,7 @@
module mcpgen module mcpgen
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any } import x.json2 as json { Any }
// import json // import json

View File

@@ -1,7 +1,7 @@
module mcp module mcp
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp.rhai.logic import freeflowuniverse.herolib.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.lang.rust import freeflowuniverse.herolib.lang.rust

View File

@@ -1,7 +1,7 @@
module mcp module mcp
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp.rhai.logic import freeflowuniverse.herolib.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any } import x.json2 as json { Any }

View File

@@ -1,7 +1,7 @@
module vcode module vcode
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any } import x.json2 { Any }

View File

@@ -1,7 +1,7 @@
module vcode module vcode
import freeflowuniverse.herolib.mcp import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any } import x.json2 { Any }

View File

@@ -2,7 +2,7 @@ module logic
import freeflowuniverse.herolib.ai.escalayer import freeflowuniverse.herolib.ai.escalayer
import freeflowuniverse.herolib.lang.rust import freeflowuniverse.herolib.lang.rust
import freeflowuniverse.herolib.ai.utils import freeflowuniverse.herolib.develop.codetools.utils as ai_utils
import os import os
pub fn generate_rhai_wrapper(name string, source_path string) !string { pub fn generate_rhai_wrapper(name string, source_path string) !string {
@@ -241,30 +241,30 @@ struct CodeBlocks {
// Extract code blocks from the AI response // Extract code blocks from the AI response
fn extract_code_blocks(response string) !CodeBlocks { fn extract_code_blocks(response string) !CodeBlocks {
// Extract wrapper.rs content // Extract wrapper.rs content
wrapper_rs_content := utils.extract_code_block(response, 'wrapper.rs', 'rust') wrapper_rs_content := ai_utils.extract_code_block(response, 'wrapper.rs', 'rust')
if wrapper_rs_content == '' { if wrapper_rs_content == '' {
return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```') return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```')
} }
// Extract engine.rs content // Extract engine.rs content
mut engine_rs_content := utils.extract_code_block(response, 'engine.rs', 'rust') mut engine_rs_content := ai_utils.extract_code_block(response, 'engine.rs', 'rust')
if engine_rs_content == '' { if engine_rs_content == '' {
// Try to extract from the response without explicit language marker // Try to extract from the response without explicit language marker
engine_rs_content = utils.extract_code_block(response, 'engine.rs', '') engine_rs_content = ai_utils.extract_code_block(response, 'engine.rs', '')
} }
// Extract example.rhai content // Extract example.rhai content
mut example_rhai_content := utils.extract_code_block(response, 'example.rhai', 'rhai') mut example_rhai_content := ai_utils.extract_code_block(response, 'example.rhai', 'rhai')
if example_rhai_content == '' { if example_rhai_content == '' {
// Try to extract from the response without explicit language marker // Try to extract from the response without explicit language marker
example_rhai_content = utils.extract_code_block(response, 'example.rhai', '') example_rhai_content = ai_utils.extract_code_block(response, 'example.rhai', '')
if example_rhai_content == '' { if example_rhai_content == '' {
return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```') return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```')
} }
} }
// Extract lib.rs content // Extract lib.rs content
lib_rs_content := utils.extract_code_block(response, 'lib.rs', 'rust') lib_rs_content := ai_utils.extract_code_block(response, 'lib.rs', 'rust')
if lib_rs_content == '' { if lib_rs_content == '' {
return error('Failed to extract lib.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// lib.rs and ends with ```') return error('Failed to extract lib.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// lib.rs and ends with ```')
} }

View File

@@ -2,7 +2,7 @@ module logic
import freeflowuniverse.herolib.ai.escalayer import freeflowuniverse.herolib.ai.escalayer
import freeflowuniverse.herolib.lang.rust import freeflowuniverse.herolib.lang.rust
import freeflowuniverse.herolib.ai.utils import freeflowuniverse.herolib.develop.codetools.utils as ai_utils
import os import os
// pub fn generate_rhai_wrapper_sampling(name string, source_path string) !string { // pub fn generate_rhai_wrapper_sampling(name string, source_path string) !string {
@@ -223,23 +223,23 @@ import os
// // Extract code blocks from the AI response // // Extract code blocks from the AI response
// fn extract_code_blocks(response string)! CodeBlocks { // fn extract_code_blocks(response string)! CodeBlocks {
// // Extract wrapper.rs content // // Extract wrapper.rs content
// wrapper_rs_content := utils.extract_code_block(response, 'wrapper.rs', 'rust') // wrapper_rs_content := ai_utils.extract_code_block(response, 'wrapper.rs', 'rust')
// if wrapper_rs_content == '' { // if wrapper_rs_content == '' {
// return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```') // return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```')
// } // }
// // Extract engine.rs content // // Extract engine.rs content
// mut engine_rs_content := utils.extract_code_block(response, 'engine.rs', 'rust') // mut engine_rs_content := ai_utils.extract_code_block(response, 'engine.rs', 'rust')
// if engine_rs_content == '' { // if engine_rs_content == '' {
// // Try to extract from the response without explicit language marker // // Try to extract from the response without explicit language marker
// engine_rs_content = utils.extract_code_block(response, 'engine.rs', '') // engine_rs_content = ai_utils.extract_code_block(response, 'engine.rs', '')
// } // }
// // Extract example.rhai content // // Extract example.rhai content
// mut example_rhai_content := utils.extract_code_block(response, 'example.rhai', 'rhai') // mut example_rhai_content := ai_utils.extract_code_block(response, 'example.rhai', 'rhai')
// if example_rhai_content == '' { // if example_rhai_content == '' {
// // Try to extract from the response without explicit language marker // // Try to extract from the response without explicit language marker
// example_rhai_content = utils.extract_code_block(response, 'example.rhai', '') // example_rhai_content = ai_utils.extract_code_block(response, 'example.rhai', '')
// if example_rhai_content == '' { // if example_rhai_content == '' {
// return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```') // return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```')
// } // }

View File

@@ -1,7 +1,7 @@
module mcp module mcp
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp.rhai.logic import freeflowuniverse.herolib.ai.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.lang.rust import freeflowuniverse.herolib.lang.rust

View File

@@ -1,7 +1,7 @@
module mcp module mcp
import freeflowuniverse.herolib.ai.mcp import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp.rhai.logic import freeflowuniverse.herolib.ai.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any } import x.json2 as json { Any }