This commit is contained in:
2025-09-14 18:25:45 +02:00
parent 95507002c9
commit 6a02a45474
78 changed files with 421 additions and 410 deletions

View File

@@ -1,6 +1,6 @@
module mcpgen
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,7 +1,7 @@
module mcpgen
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any }
// import json

View File

@@ -1,7 +1,7 @@
module vcode
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any }

View File

@@ -1,7 +1,7 @@
module vcode
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any }

View File

@@ -168,7 +168,7 @@ pub fn (mut self DBFsDir) list_by_filesystem(fs_id u32) ![]FsDir {
pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
// Normalize path (remove trailing slashes, handle empty path)
normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') }
if normalized_path == '/' {
// Special case for root directory
dirs := self.list_by_filesystem(fs_id)!
@@ -179,14 +179,14 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
}
return error('Root directory not found for filesystem ${fs_id}')
}
// Split path into components
components := normalized_path.trim_left('/').split('/')
// Start from the root directory
mut current_dir_id := u32(0)
mut dirs := self.list_by_filesystem(fs_id)!
// Find root directory
for dir in dirs {
if dir.parent_id == 0 {
@@ -194,11 +194,11 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
break
}
}
if current_dir_id == 0 {
return error('Root directory not found for filesystem ${fs_id}')
}
// Navigate through path components
for component in components {
mut found := false
@@ -209,15 +209,15 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
break
}
}
if !found {
return error('Directory "${component}" not found in path "${normalized_path}"')
}
// Update dirs for next iteration
dirs = self.list_children(current_dir_id)!
}
return self.get(current_dir_id)!
}
@@ -225,7 +225,7 @@ pub fn (mut self DBFsDir) get_by_absolute_path(fs_id u32, path string) !FsDir {
pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
// Normalize path
normalized_path := if path == '' || path == '/' { '/' } else { path.trim_right('/') }
if normalized_path == '/' {
// Special case for root directory
dirs := self.list_by_filesystem(fs_id)!
@@ -234,24 +234,24 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
return dir.id
}
}
// Create root directory if it doesn't exist
mut root_dir := self.new(
name: 'root'
fs_id: fs_id
parent_id: 0
name: 'root'
fs_id: fs_id
parent_id: 0
description: 'Root directory'
)!
return self.set(root_dir)!
}
// Split path into components
components := normalized_path.trim_left('/').split('/')
// Start from the root directory
mut current_dir_id := u32(0)
mut dirs := self.list_by_filesystem(fs_id)!
// Find or create root directory
for dir in dirs {
if dir.parent_id == 0 {
@@ -259,18 +259,18 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
break
}
}
if current_dir_id == 0 {
// Create root directory
mut root_dir := self.new(
name: 'root'
fs_id: fs_id
parent_id: 0
name: 'root'
fs_id: fs_id
parent_id: 0
description: 'Root directory'
)!
current_dir_id = self.set(root_dir)!
}
// Navigate/create through path components
for component in components {
mut found := false
@@ -281,22 +281,22 @@ pub fn (mut self DBFsDir) create_path(fs_id u32, path string) !u32 {
break
}
}
if !found {
// Create this directory component
mut new_dir := self.new(
name: component
fs_id: fs_id
parent_id: current_dir_id
name: component
fs_id: fs_id
parent_id: current_dir_id
description: 'Directory created as part of path ${normalized_path}'
)!
current_dir_id = self.set(new_dir)!
}
// Update directory list for next iteration
dirs = self.list_children(current_dir_id)!
}
return current_dir_id
}
@@ -310,23 +310,23 @@ pub fn (mut self DBFsDir) delete_by_path(fs_id u32, path string) ! {
pub fn (mut self DBFsDir) move_by_path(fs_id u32, source_path string, dest_path string) !u32 {
// Get the source directory
source_dir := self.get_by_absolute_path(fs_id, source_path)!
// For the destination, we need the parent directory
dest_dir_path := dest_path.all_before_last('/')
dest_dir_name := dest_path.all_after_last('/')
dest_parent_dir := if dest_dir_path == '' || dest_dir_path == '/' {
// Moving to the root
self.get_by_absolute_path(fs_id, '/')!
} else {
self.get_by_absolute_path(fs_id, dest_dir_path)!
}
// First rename if the destination name is different
if source_dir.name != dest_dir_name {
self.rename(source_dir.id, dest_dir_name)!
}
// Then move to the new parent
return self.move(source_dir.id, dest_parent_dir.id)!
}
@@ -393,9 +393,9 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
// List contents of a directory with filtering capabilities
pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, opts ListContentsOptions) !DirectoryContents {
mut result := DirectoryContents{}
// Helper function to check if name matches include/exclude patterns
// Check if item should be included based on patterns
should_include_local := fn (name string, include_patterns []string, exclude_patterns []string) bool {
// Helper function to check if name matches include/exclude patterns
@@ -403,12 +403,12 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
if patterns.len == 0 {
return true // No patterns means include everything
}
for pattern in patterns {
if pattern.contains('*') {
prefix := pattern.all_before('*')
suffix := pattern.all_after('*')
if prefix == '' && suffix == '' {
return true // Pattern is just "*"
} else if prefix == '' {
@@ -428,30 +428,30 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
return true // Exact match
}
}
return false
}
// First apply include patterns (if empty, include everything)
if !matches_pattern_fn(name, include_patterns) && include_patterns.len > 0 {
return false
}
// Then apply exclude patterns
if matches_pattern_fn(name, exclude_patterns) && exclude_patterns.len > 0 {
return false
}
return true
}
// Get directories, files, and symlinks in the current directory
dirs := self.list_children(dir_id)!
for dir in dirs {
if should_include_local(dir.name, opts.include_patterns, opts.exclude_patterns) {
result.directories << dir
}
// If recursive, process subdirectories
if opts.recursive {
sub_contents := self.list_contents(mut fs_factory, dir.id, opts)!
@@ -460,7 +460,7 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.symlinks << sub_contents.symlinks
}
}
// Get files in the directory
files := fs_factory.fs_file.list_by_directory(dir_id)!
for file in files {
@@ -468,7 +468,7 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.files << file
}
}
// Get symlinks in the directory
symlinks := fs_factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks {
@@ -476,6 +476,6 @@ pub fn (mut self DBFsDir) list_contents(mut fs_factory FsFactory, dir_id u32, op
result.symlinks << symlink
}
}
return result
}

View File

@@ -26,15 +26,15 @@ pub mut:
include_patterns []string // File/directory name patterns to include (e.g. ['*.v', 'doc*'])
exclude_patterns []string // File/directory name patterns to exclude
max_depth int = -1 // Maximum depth to search (-1 for unlimited)
follow_symlinks bool // Whether to follow symbolic links during search
follow_symlinks bool // Whether to follow symbolic links during search
}
// CopyOptions provides options for copy operations
@[params]
pub struct CopyOptions {
pub mut:
recursive bool = true // Copy directories recursively
preserve_links bool = true // Preserve symbolic links as links
recursive bool = true // Copy directories recursively
preserve_links bool = true // Preserve symbolic links as links
overwrite bool // Overwrite existing files
follow_symlinks bool // Follow symlinks instead of copying them
}
@@ -74,12 +74,12 @@ fn matches_pattern(name string, patterns []string) bool {
if patterns.len == 0 {
return true // No patterns means include everything
}
for pattern in patterns {
if pattern.contains('*') {
prefix := pattern.all_before('*')
suffix := pattern.all_after('*')
if prefix == '' && suffix == '' {
return true // Pattern is just "*"
} else if prefix == '' {
@@ -99,7 +99,7 @@ fn matches_pattern(name string, patterns []string) bool {
return true // Exact match
}
}
return false
}
@@ -109,12 +109,12 @@ fn should_include(name string, include_patterns []string, exclude_patterns []str
if !matches_pattern(name, include_patterns) && include_patterns.len > 0 {
return false
}
// Then apply exclude patterns
if matches_pattern(name, exclude_patterns) && exclude_patterns.len > 0 {
return false
}
return true
}
@@ -132,14 +132,14 @@ fn split_path(path string) (string, string) {
if normalized == '/' {
return '/', ''
}
mut dir_path := normalized.all_before_last('/')
filename := normalized.all_after_last('/')
if dir_path == '' {
dir_path = '/'
}
return dir_path, filename
}
@@ -149,7 +149,7 @@ fn parent_path(path string) string {
if normalized == '/' {
return '/'
}
parent := normalized.all_before_last('/')
if parent == '' {
return '/'
@@ -169,13 +169,13 @@ fn join_path(base string, component string) string {
// Find filesystem objects starting from a given path
pub fn (mut self FsTools) find(fs_id u32, start_path string, opts FindOptions) ![]FindResult {
mut results := []FindResult{}
// Get the starting directory
start_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, start_path)!
// Start recursive search
self.find_recursive(fs_id, start_dir.id, start_path, opts, mut results, 0)!
return results
}
@@ -185,20 +185,20 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
if opts.max_depth >= 0 && current_depth > opts.max_depth {
return
}
// Get current directory info
current_dir := self.factory.fs_dir.get(dir_id)!
// Check if current directory matches search criteria
if should_include(current_dir.name, opts.include_patterns, opts.exclude_patterns) {
results << FindResult{
result_type: .directory
id: dir_id
path: current_path
name: current_dir.name
id: dir_id
path: current_path
name: current_dir.name
}
}
// Get files in current directory
files := self.factory.fs_file.list_by_directory(dir_id)!
for file in files {
@@ -206,13 +206,13 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
file_path := join_path(current_path, file.name)
results << FindResult{
result_type: .file
id: file.id
path: file_path
name: file.name
id: file.id
path: file_path
name: file.name
}
}
}
// Get symlinks in current directory
symlinks := self.factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks {
@@ -220,22 +220,23 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
symlink_path := join_path(current_path, symlink.name)
results << FindResult{
result_type: .symlink
id: symlink.id
path: symlink_path
name: symlink.name
id: symlink.id
path: symlink_path
name: symlink.name
}
}
// Follow symlinks if requested and they point to directories
if opts.follow_symlinks && opts.recursive && symlink.target_type == .directory {
// Check if symlink is not broken
if !self.factory.fs_symlink.is_broken(symlink.id)! {
symlink_path := join_path(current_path, symlink.name)
self.find_recursive(fs_id, symlink.target_id, symlink_path, opts, mut results, current_depth + 1)!
self.find_recursive(fs_id, symlink.target_id, symlink_path, opts, mut
results, current_depth + 1)!
}
}
}
// Process subdirectories if recursive
if opts.recursive {
subdirs := self.factory.fs_dir.list_children(dir_id)!
@@ -245,29 +246,30 @@ fn (mut self FsTools) find_recursive(fs_id u32, dir_id u32, current_path string,
}
}
}
// Remove filesystem objects starting from a given path
pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions) ! {
normalized_path := normalize_path(target_path)
// Try to find what we're removing (file, directory, or symlink)
dir_path, filename := split_path(normalized_path)
if filename == '' {
// We're removing a directory by its path
self.rm_directory_by_path(fs_id, normalized_path, opts)!
} else {
// We're removing a specific item within a directory
parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, dir_path)!
// Try to find what we're removing
mut found := false
// Try file first
if file := self.factory.fs_file.get_by_path(parent_dir.id, filename) {
self.rm_file(file.id, opts)!
found = true
}
// Try symlink if file not found
if !found {
if symlink := self.factory.fs_symlink.get_by_path(parent_dir.id, filename) {
@@ -275,7 +277,7 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
found = true
}
}
// Try directory if neither file nor symlink found
if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, parent_dir.id, filename) {
@@ -283,7 +285,7 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
found = true
}
}
if !found {
return error('Path "${target_path}" not found')
}
@@ -293,18 +295,18 @@ pub fn (mut self FsTools) rm(fs_id u32, target_path string, opts RemoveOptions)
// Remove a file by ID
fn (mut self FsTools) rm_file(file_id u32, opts RemoveOptions) ! {
file := self.factory.fs_file.get(file_id)!
// If file is in multiple directories and force is not set, only remove from directories
if file.directories.len > 1 && !opts.force {
return error('File "${file.name}" exists in multiple directories. Use force=true to delete completely or remove from specific directories.')
}
// Collect blob IDs before deleting the file
blob_ids := file.blobs.clone()
// Delete the file
self.factory.fs_file.delete(file_id)!
// Delete blobs if requested
if opts.delete_blobs {
for blob_id in blob_ids {
@@ -326,11 +328,11 @@ fn (mut self FsTools) rm_directory(dir_id u32, opts RemoveOptions) ! {
dir := self.factory.fs_dir.get(dir_id)!
return error('Directory "${dir.name}" is not empty. Use recursive=true to remove contents.')
}
// Remove all children recursively
self.rm_directory_contents(dir_id, opts)!
}
// Remove the directory itself
self.factory.fs_dir.delete(dir_id)!
}
@@ -348,13 +350,13 @@ fn (mut self FsTools) rm_directory_contents(dir_id u32, opts RemoveOptions) ! {
for file in files {
self.rm_file(file.id, opts)!
}
// Remove all symlinks in the directory
symlinks := self.factory.fs_symlink.list_by_parent(dir_id)!
for symlink in symlinks {
self.rm_symlink(symlink.id)!
}
// Remove all subdirectories recursively
subdirs := self.factory.fs_dir.list_children(dir_id)!
for subdir in subdirs {
@@ -379,14 +381,15 @@ fn (mut self FsTools) is_blob_used_by_other_files(blob_id u32, exclude_file_id u
}
return false
}
// Copy filesystem objects from source path to destination path
pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, opts CopyOptions) ! {
normalized_source := normalize_path(source_path)
normalized_dest := normalize_path(dest_path)
// Determine what we're copying
source_dir_path, source_filename := split_path(normalized_source)
if source_filename == '' {
// We're copying a directory
source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)!
@@ -394,16 +397,16 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
} else {
// We're copying a specific item
source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)!
// Try to find what we're copying
mut found := false
// Try file first
if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) {
self.cp_file(fs_id, file.id, normalized_dest, opts)!
found = true
}
// Try symlink if file not found
if !found {
if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) {
@@ -411,15 +414,18 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
found = true
}
}
// Try directory if neither file nor symlink found
if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id, source_filename) {
self.cp_directory(fs_id, subdir.id, normalized_source, normalized_dest, opts)!
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id,
source_filename)
{
self.cp_directory(fs_id, subdir.id, normalized_source, normalized_dest,
opts)!
found = true
}
}
if !found {
return error('Source path "${source_path}" not found')
}
@@ -429,16 +435,16 @@ pub fn (mut self FsTools) cp(fs_id u32, source_path string, dest_path string, op
// Copy a file to destination path
fn (mut self FsTools) cp_file(fs_id u32, file_id u32, dest_path string, opts CopyOptions) ! {
source_file := self.factory.fs_file.get(file_id)!
// Determine destination directory and filename
dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' {
dest_filename = source_file.name
}
// Ensure destination directory exists (create if needed)
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination file already exists
if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite {
@@ -447,26 +453,26 @@ fn (mut self FsTools) cp_file(fs_id u32, file_id u32, dest_path string, opts Cop
// Remove existing file
self.factory.fs_file.delete(existing_file.id)!
}
// Create new file with same content (reuse blobs)
new_file := self.factory.fs_file.new(
name: dest_filename
fs_id: fs_id
name: dest_filename
fs_id: fs_id
directories: [dest_dir_id]
blobs: source_file.blobs.clone()
mime_type: source_file.mime_type
checksum: source_file.checksum
metadata: source_file.metadata.clone()
blobs: source_file.blobs.clone()
mime_type: source_file.mime_type
checksum: source_file.checksum
metadata: source_file.metadata.clone()
description: source_file.description
)!
self.factory.fs_file.set(new_file)!
}
// Copy a symlink to destination path
fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, opts CopyOptions) ! {
source_symlink := self.factory.fs_symlink.get(symlink_id)!
if opts.follow_symlinks {
// Follow the symlink and copy its target instead
if source_symlink.target_type == .file {
@@ -476,16 +482,16 @@ fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, op
}
return
}
// Copy the symlink itself
dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' {
dest_filename = source_symlink.name
}
// Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination symlink already exists
if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite {
@@ -493,38 +499,38 @@ fn (mut self FsTools) cp_symlink(fs_id u32, symlink_id u32, dest_path string, op
}
self.factory.fs_symlink.delete(existing_symlink.id)!
}
// Create new symlink
new_symlink := self.factory.fs_symlink.new(
name: dest_filename
fs_id: fs_id
parent_id: dest_dir_id
target_id: source_symlink.target_id
name: dest_filename
fs_id: fs_id
parent_id: dest_dir_id
target_id: source_symlink.target_id
target_type: source_symlink.target_type
description: source_symlink.description
)!
self.factory.fs_symlink.set(new_symlink)!
}
// Copy a directory to destination path
fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path string, dest_path string, opts CopyOptions) ! {
source_dir := self.factory.fs_dir.get(source_dir_id)!
// Create destination directory
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_path)!
if !opts.recursive {
return // Just create the directory, don't copy contents
return
}
// Copy all files in the source directory
files := self.factory.fs_file.list_by_directory(source_dir_id)!
for file in files {
file_dest_path := join_path(dest_path, file.name)
self.cp_file(fs_id, file.id, file_dest_path, opts)!
}
// Copy all symlinks in the source directory
if opts.preserve_links {
symlinks := self.factory.fs_symlink.list_by_parent(source_dir_id)!
@@ -533,7 +539,7 @@ fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path str
self.cp_symlink(fs_id, symlink.id, symlink_dest_path, opts)!
}
}
// Copy all subdirectories recursively
subdirs := self.factory.fs_dir.list_children(source_dir_id)!
for subdir in subdirs {
@@ -546,14 +552,15 @@ fn (mut self FsTools) cp_directory(fs_id u32, source_dir_id u32, source_path str
self.cp_directory(fs_id, subdir.id, subdir_source_path, subdir_dest_path, opts)!
}
}
// Move filesystem objects from source path to destination path
pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, opts MoveOptions) ! {
normalized_source := normalize_path(source_path)
normalized_dest := normalize_path(dest_path)
// Determine what we're moving
source_dir_path, source_filename := split_path(normalized_source)
if source_filename == '' {
// We're moving a directory
source_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, normalized_source)!
@@ -561,16 +568,16 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
} else {
// We're moving a specific item
source_parent_dir := self.factory.fs_dir.get_by_absolute_path(fs_id, source_dir_path)!
// Try to find what we're moving
mut found := false
// Try file first
if file := self.factory.fs_file.get_by_path(source_parent_dir.id, source_filename) {
self.mv_file(fs_id, file.id, normalized_dest, opts)!
found = true
}
// Try symlink if file not found
if !found {
if symlink := self.factory.fs_symlink.get_by_path(source_parent_dir.id, source_filename) {
@@ -578,15 +585,17 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
found = true
}
}
// Try directory if neither file nor symlink found
if !found {
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id, source_filename) {
if subdir := self.factory.fs_dir.get_by_path(fs_id, source_parent_dir.id,
source_filename)
{
self.mv_directory(fs_id, subdir.id, normalized_dest)!
found = true
}
}
if !found {
return error('Source path "${source_path}" not found')
}
@@ -596,16 +605,16 @@ pub fn (mut self FsTools) mv(fs_id u32, source_path string, dest_path string, op
// Move a file to destination path
fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts MoveOptions) ! {
source_file := self.factory.fs_file.get(file_id)!
// Determine destination directory and filename
dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' {
dest_filename = source_file.name
}
// Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination file already exists
if existing_file := self.factory.fs_file.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite {
@@ -614,12 +623,12 @@ fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts Mov
// Remove existing file
self.factory.fs_file.delete(existing_file.id)!
}
// Update file name if it's different
if dest_filename != source_file.name {
self.factory.fs_file.rename(file_id, dest_filename)!
}
// Move file to new directory (replace all directory associations)
self.factory.fs_file.move(file_id, [dest_dir_id])!
}
@@ -627,7 +636,7 @@ fn (mut self FsTools) mv_file(fs_id u32, file_id u32, dest_path string, opts Mov
// Move a symlink to destination path
fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, opts MoveOptions) ! {
source_symlink := self.factory.fs_symlink.get(symlink_id)!
if opts.follow_symlinks {
// Follow the symlink and move its target instead
if source_symlink.target_type == .file {
@@ -639,16 +648,16 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
self.factory.fs_symlink.delete(symlink_id)!
return
}
// Move the symlink itself
dest_dir_path, mut dest_filename := split_path(dest_path)
if dest_filename == '' {
dest_filename = source_symlink.name
}
// Ensure destination directory exists
dest_dir_id := self.factory.fs_dir.create_path(fs_id, dest_dir_path)!
// Check if destination symlink already exists
if existing_symlink := self.factory.fs_symlink.get_by_path(dest_dir_id, dest_filename) {
if !opts.overwrite {
@@ -656,12 +665,12 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
}
self.factory.fs_symlink.delete(existing_symlink.id)!
}
// Update symlink name if it's different
if dest_filename != source_symlink.name {
self.factory.fs_symlink.rename(symlink_id, dest_filename)!
}
// Move symlink to new parent directory
self.factory.fs_symlink.move(symlink_id, dest_dir_id)!
}
@@ -669,13 +678,13 @@ fn (mut self FsTools) mv_symlink(fs_id u32, symlink_id u32, dest_path string, op
// Move a directory to destination path
fn (mut self FsTools) mv_directory(fs_id u32, source_dir_id u32, dest_path string) ! {
source_dir := self.factory.fs_dir.get(source_dir_id)!
// Parse destination path
dest_parent_path, mut dest_dirname := split_path(dest_path)
if dest_dirname == '' {
dest_dirname = source_dir.name
}
// Ensure destination parent directory exists
dest_parent_id := if dest_parent_path == '/' {
// Moving to root level, find root directory
@@ -684,12 +693,12 @@ fn (mut self FsTools) mv_directory(fs_id u32, source_dir_id u32, dest_path strin
} else {
self.factory.fs_dir.create_path(fs_id, dest_parent_path)!
}
// Update directory name if it's different
if dest_dirname != source_dir.name {
self.factory.fs_dir.rename(source_dir_id, dest_dirname)!
}
// Move directory to new parent
self.factory.fs_dir.move(source_dir_id, dest_parent_id)!
}
}

View File

@@ -44,11 +44,11 @@ pub fn fs_set(request Request) !Response {
mut fs_factory := herofs.new()!
mut fs_obj := fs_factory.fs.new(
name: payload.name
name: payload.name
description: payload.description
quota_bytes: payload.quota_bytes
)!
if payload.root_dir_id > 0 {
fs_obj.root_dir_id = payload.root_dir_id
}

View File

@@ -33,25 +33,25 @@ pub fn fs_blob_get(request Request) !Response {
}
mut fs_factory := herofs.new()!
// Get blob by either id or hash
mut blob := if payload.id > 0 {
fs_factory.fs_blob.get(payload.id)!
} else if payload.hash != '' {
fs_factory.fs_blob.get_by_hash(payload.hash)!
} else {
return jsonrpc.invalid_params_with_msg("Either id or hash must be provided")
return jsonrpc.invalid_params_with_msg('Either id or hash must be provided')
}
// Convert binary data to base64 for JSON transport
blob_response := {
'id': blob.id.str()
'created_at': blob.created_at.str()
'updated_at': blob.updated_at.str()
'mime_type': blob.mime_type
'name': blob.name
'hash': blob.hash
'size_bytes': blob.size_bytes.str()
'id': blob.id.str()
'created_at': blob.created_at.str()
'updated_at': blob.updated_at.str()
'mime_type': blob.mime_type
'name': blob.name
'hash': blob.hash
'size_bytes': blob.size_bytes.str()
'data_base64': base64.encode(blob.data)
}
@@ -65,14 +65,14 @@ pub fn fs_blob_set(request Request) !Response {
// Decode the base64 data
data := base64.decode(payload.data_base64) or {
return jsonrpc.invalid_params_with_msg("Invalid base64 data")
return jsonrpc.invalid_params_with_msg('Invalid base64 data')
}
mut fs_factory := herofs.new()!
mut blob_obj := fs_factory.fs_blob.new(
data: data
data: data
mime_type: payload.mime_type
name: payload.name
name: payload.name
)!
id := fs_factory.fs_blob.set(blob_obj)!

View File

@@ -10,7 +10,7 @@ pub struct FSDirGetArgs {
pub mut:
id u32
path string // Allow getting a directory by path
fs_id u32 // Required when using path
fs_id u32 // Required when using path
}
@[params]
@@ -29,14 +29,14 @@ pub struct FSDirDeleteArgs {
pub mut:
id u32
path string // Allow deleting a directory by path
fs_id u32 // Required when using path
fs_id u32 // Required when using path
}
@[params]
pub struct FSDirMoveArgs {
pub mut:
id u32
parent_id u32
id u32
parent_id u32
source_path string // Allow moving using paths
dest_path string
fs_id u32 // Required when using paths
@@ -66,7 +66,7 @@ pub struct FSDirListContentsArgs {
pub mut:
dir_id u32
path string // Allow listing contents by path
fs_id u32 // Required when using path
fs_id u32 // Required when using path
recursive bool
include []string // Patterns to include
exclude []string // Patterns to exclude
@@ -78,14 +78,14 @@ pub fn fs_dir_get(request Request) !Response {
}
mut fs_factory := herofs.new()!
// Handle either path-based or ID-based retrieval
mut dir := if payload.path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)!
} else if payload.id > 0 {
fs_factory.fs_dir.get(payload.id)!
} else {
return jsonrpc.invalid_params_with_msg("Either id or both path and fs_id must be provided")
return jsonrpc.invalid_params_with_msg('Either id or both path and fs_id must be provided')
}
return jsonrpc.new_response(request.id, json.encode(dir))
@@ -97,20 +97,20 @@ pub fn fs_dir_set(request Request) !Response {
}
mut fs_factory := herofs.new()!
mut dir_id := u32(0)
// Handle path-based creation
if payload.path != '' {
dir_id = fs_factory.fs_dir.create_path(payload.fs_id, payload.path)!
} else {
// Handle traditional creation
mut dir_obj := fs_factory.fs_dir.new(
name: payload.name
fs_id: payload.fs_id
parent_id: payload.parent_id
name: payload.name
fs_id: payload.fs_id
parent_id: payload.parent_id
description: payload.description
metadata: payload.metadata
metadata: payload.metadata
)!
dir_id = fs_factory.fs_dir.set(dir_obj)!
}
@@ -124,14 +124,14 @@ pub fn fs_dir_delete(request Request) !Response {
}
mut fs_factory := herofs.new()!
// Handle either path-based or ID-based deletion
if payload.path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.delete_by_path(payload.fs_id, payload.path)!
} else if payload.id > 0 {
fs_factory.fs_dir.delete(payload.id)!
} else {
return jsonrpc.invalid_params_with_msg("Either id or both path and fs_id must be provided")
return jsonrpc.invalid_params_with_msg('Either id or both path and fs_id must be provided')
}
return new_response_true(request.id)
@@ -150,14 +150,14 @@ pub fn fs_dir_move(request Request) !Response {
}
mut fs_factory := herofs.new()!
// Handle either path-based or ID-based move
if payload.source_path != '' && payload.dest_path != '' && payload.fs_id > 0 {
fs_factory.fs_dir.move_by_path(payload.fs_id, payload.source_path, payload.dest_path)!
} else if payload.id > 0 && payload.parent_id > 0 {
fs_factory.fs_dir.move(payload.id, payload.parent_id)!
} else {
return jsonrpc.invalid_params_with_msg("Either id and parent_id, or source_path, dest_path and fs_id must be provided")
return jsonrpc.invalid_params_with_msg('Either id and parent_id, or source_path, dest_path and fs_id must be provided')
}
return new_response_true(request.id)
@@ -203,7 +203,7 @@ pub fn fs_dir_list_contents(request Request) !Response {
}
mut fs_factory := herofs.new()!
// Get directory ID either directly or from path
mut dir_id := if payload.path != '' && payload.fs_id > 0 {
dir := fs_factory.fs_dir.get_by_absolute_path(payload.fs_id, payload.path)!
@@ -211,16 +211,16 @@ pub fn fs_dir_list_contents(request Request) !Response {
} else if payload.dir_id > 0 {
payload.dir_id
} else {
return jsonrpc.invalid_params_with_msg("Either dir_id or both path and fs_id must be provided")
return jsonrpc.invalid_params_with_msg('Either dir_id or both path and fs_id must be provided')
}
// Create options struct
opts := herofs.ListContentsOptions{
recursive: payload.recursive
recursive: payload.recursive
include_patterns: payload.include
exclude_patterns: payload.exclude
}
// List contents with filters
contents := fs_factory.fs_dir.list_contents(&fs_factory, dir_id, opts)!

View File

@@ -14,8 +14,8 @@ pub mut:
@[params]
pub struct FSFileSetArgs {
pub mut:
name string @[required]
fs_id u32 @[required]
name string @[required]
fs_id u32 @[required]
directories []u32
blobs []u32
mime_type string
@@ -99,12 +99,12 @@ pub fn fs_file_set(request Request) !Response {
mut fs_factory := herofs.new()!
mut file_obj := fs_factory.fs_file.new(
name: payload.name
fs_id: payload.fs_id
name: payload.name
fs_id: payload.fs_id
directories: payload.directories
blobs: payload.blobs
mime_type: payload.mime_type
metadata: payload.metadata
blobs: payload.blobs
mime_type: payload.mime_type
metadata: payload.metadata
)!
id := fs_factory.fs_file.set(file_obj)!

View File

@@ -65,10 +65,10 @@ pub fn fs_symlink_set(request Request) !Response {
mut fs_factory := herofs.new()!
mut symlink_obj := fs_factory.fs_symlink.new(
name: payload.name
fs_id: payload.fs_id
parent_id: payload.parent_id
target_id: payload.target_id
name: payload.name
fs_id: payload.fs_id
parent_id: payload.parent_id
target_id: payload.target_id
target_type: target_type
description: payload.description
)!

View File

@@ -78,6 +78,7 @@ pub mut:
pub fn (self CalendarEvent) type_name() string {
return 'calendar_event'
}
// return example rpc call and result for each methodname
pub fn (self CalendarEvent) description(methodname string) string {
match methodname {

View File

@@ -29,6 +29,7 @@ pub mut:
pub fn (self ChatGroup) type_name() string {
return 'chat_group'
}
// return example rpc call and result for each methodname
pub fn (self ChatGroup) description(methodname string) string {
match methodname {

View File

@@ -66,6 +66,7 @@ pub mut:
pub fn (self ChatMessage) type_name() string {
return 'chat_message'
}
// return example rpc call and result for each methodname
pub fn (self ChatMessage) description(methodname string) string {
match methodname {

View File

@@ -25,6 +25,7 @@ pub mut:
pub fn (self Comment) type_name() string {
return 'comments'
}
// return example rpc call and result for each methodname
pub fn (self Comment) description(methodname string) string {
match methodname {

View File

@@ -32,6 +32,7 @@ pub enum GroupRole {
pub fn (self Group) type_name() string {
return 'group'
}
// return example rpc call and result for each methodname
pub fn (self Group) description(methodname string) string {
match methodname {

View File

@@ -52,6 +52,7 @@ pub mut:
pub fn (self Project) type_name() string {
return 'project'
}
// return example rpc call and result for each methodname
pub fn (self Project) description(methodname string) string {
match methodname {

View File

@@ -61,6 +61,7 @@ pub mut:
pub fn (self ProjectIssue) type_name() string {
return 'project_issue'
}
// return example rpc call and result for each methodname
pub fn (self ProjectIssue) description(methodname string) string {
match methodname {

View File

@@ -6,115 +6,112 @@ import rand
import time
pub struct AuthConfig {
pub mut:
// Add any authentication-related configuration here
// For now, it can be empty or have default values
}
pub struct AuthManager {
mut:
registered_keys map[string]string // pubkey -> user_id
pending_auths map[string]AuthChallenge // challenge -> challenge_data
active_sessions map[string]Session // session_key -> session_data
registered_keys map[string]string // pubkey -> user_id
pending_auths map[string]AuthChallenge // challenge -> challenge_data
active_sessions map[string]Session // session_key -> session_data
}
pub struct AuthChallenge {
pub:
pubkey string
challenge string
created_at i64
expires_at i64
pubkey string
challenge string
created_at i64
expires_at i64
}
pub struct Session {
pub:
user_id string
pubkey string
created_at i64
expires_at i64
user_id string
pubkey string
created_at i64
expires_at i64
}
pub fn new_auth_manager(config AuthConfig) &AuthManager {
// Use config if needed, for now it's just passed
_ = config
return &AuthManager{}
return &AuthManager{}
}
// Register public key
pub fn (mut am AuthManager) register_pubkey(pubkey string) !string {
// Validate pubkey format
if pubkey.len != 64 { // ed25519 pubkey length
return error('Invalid public key format')
}
// Validate pubkey format
if pubkey.len != 64 { // ed25519 pubkey length
return error('Invalid public key format')
}
user_id := md5.hexhash(pubkey + time.now().unix().str())
am.registered_keys[pubkey] = user_id
return user_id
user_id := md5.hexhash(pubkey + time.now().unix().str())
am.registered_keys[pubkey] = user_id
return user_id
}
// Generate authentication challenge
pub fn (mut am AuthManager) create_auth_challenge(pubkey string) !string {
// Check if pubkey is registered
if pubkey !in am.registered_keys {
return error('Public key not registered')
}
// Check if pubkey is registered
if pubkey !in am.registered_keys {
return error('Public key not registered')
}
// Generate unique challenge
random_data := rand.string(32)
challenge := md5.hexhash(pubkey + random_data + time.now().unix().str())
// Generate unique challenge
random_data := rand.string(32)
challenge := md5.hexhash(pubkey + random_data + time.now().unix().str())
now := time.now().unix()
am.pending_auths[challenge] = AuthChallenge{
pubkey: pubkey
challenge: challenge
created_at: now
expires_at: now + 300 // 5 minutes
}
now := time.now().unix()
am.pending_auths[challenge] = AuthChallenge{
pubkey: pubkey
challenge: challenge
created_at: now
expires_at: now + 300 // 5 minutes
}
return challenge
return challenge
}
// Verify signature and create session
pub fn (mut am AuthManager) verify_and_create_session(challenge string, signature string) !string {
// Get challenge data
auth_challenge := am.pending_auths[challenge] or {
return error('Invalid or expired challenge')
}
// Get challenge data
auth_challenge := am.pending_auths[challenge] or {
return error('Invalid or expired challenge')
}
// Check expiration
if time.now().unix() > auth_challenge.expires_at {
am.pending_auths.delete(challenge)
return error('Challenge expired')
}
// Check expiration
if time.now().unix() > auth_challenge.expires_at {
am.pending_auths.delete(challenge)
return error('Challenge expired')
}
// Verify signature
pubkey_bytes := auth_challenge.pubkey.bytes()
challenge_bytes := challenge.bytes()
signature_bytes := signature.bytes()
// Verify signature
pubkey_bytes := auth_challenge.pubkey.bytes()
challenge_bytes := challenge.bytes()
signature_bytes := signature.bytes()
ed25519.verify(pubkey_bytes, challenge_bytes, signature_bytes) or {
return error('Invalid signature')
}
ed25519.verify(pubkey_bytes, challenge_bytes, signature_bytes) or {
return error('Invalid signature')
}
// Create session
session_key := md5.hexhash(auth_challenge.pubkey + time.now().unix().str() + rand.string(16))
now := time.now().unix()
// Create session
session_key := md5.hexhash(auth_challenge.pubkey + time.now().unix().str() + rand.string(16))
now := time.now().unix()
am.active_sessions[session_key] = Session{
user_id: am.registered_keys[auth_challenge.pubkey]
pubkey: auth_challenge.pubkey
created_at: now
expires_at: now + 3600 // 1 hour
}
am.active_sessions[session_key] = Session{
user_id: am.registered_keys[auth_challenge.pubkey]
pubkey: auth_challenge.pubkey
created_at: now
expires_at: now + 3600 // 1 hour
}
// Clean up challenge
am.pending_auths.delete(challenge)
// Clean up challenge
am.pending_auths.delete(challenge)
return session_key
return session_key
}
// Validate session
pub fn (am AuthManager) validate_session(session_key string) bool {
session := am.active_sessions[session_key] or { return false }
return time.now().unix() < session.expires_at
}
session := am.active_sessions[session_key] or { return false }
return time.now().unix() < session.expires_at
}

View File

@@ -6,80 +6,80 @@ import freeflowuniverse.herolib.schemas.jsonschema
// Generate HTML documentation for handler type
pub fn (s HeroServer) generate_documentation(handler_type string, handler openrpc.Handler) !string {
spec := s.handler_registry.get_spec(handler_type) or {
return error('No spec found for handler type: ${handler_type}')
}
spec := s.handler_registry.get_spec(handler_type) or {
return error('No spec found for handler type: ${handler_type}')
}
// Load and process template
template_path := os.join_path(@VMODROOT, 'lib/hero/heroserver/templates/doc.md')
template_content := os.read_file(template_path) or {
return error('Failed to read documentation template: ${err}')
}
// Load and process template
template_path := os.join_path(@VMODROOT, 'lib/hero/heroserver/templates/doc.md')
template_content := os.read_file(template_path) or {
return error('Failed to read documentation template: ${err}')
}
// Process template with spec data
doc_content := process_doc_template(template_content, spec, handler_type)
// Process template with spec data
doc_content := process_doc_template(template_content, spec, handler_type)
// Return HTML with Bootstrap and markdown processing
return generate_html_wrapper(doc_content, handler_type)
// Return HTML with Bootstrap and markdown processing
return generate_html_wrapper(doc_content, handler_type)
}
// Process the markdown template with OpenRPC spec data
fn process_doc_template(template string, spec openrpc.OpenRPC, handler_type string) string {
mut content := template
mut content := template
// Replace template variables
content = content.replace('@{handler_type}', handler_type)
content = content.replace('@{spec.info.title}', spec.info.title)
content = content.replace('@{spec.info.description}', spec.info.description)
content = content.replace('@{spec.info.version}', spec.info.version)
// Replace template variables
content = content.replace('@{handler_type}', handler_type)
content = content.replace('@{spec.info.title}', spec.info.title)
content = content.replace('@{spec.info.description}', spec.info.description)
content = content.replace('@{spec.info.version}', spec.info.version)
// Generate methods documentation
mut methods_doc := ''
for method in spec.methods {
methods_doc += generate_method_doc(method)
}
content = content.replace('@{methods}', methods_doc)
// Generate methods documentation
mut methods_doc := ''
for method in spec.methods {
methods_doc += generate_method_doc(method)
}
content = content.replace('@{methods}', methods_doc)
return content
return content
}
// Generate documentation for a single method
fn generate_method_doc(method openrpc.Method) string {
mut doc := '## ${method.name}\n\n'
mut doc := '## ${method.name}\n\n'
if method.description.len > 0 {
doc += '${method.description}\n\n'
}
if method.description.len > 0 {
doc += '${method.description}\n\n'
}
// Parameters
if method.params.len > 0 {
doc += '### Parameters\n\n'
for param in method.params {
// Handle both ContentDescriptor and Reference
if param is openrpc.ContentDescriptor {
if param.schema is jsonschema.Schema {
schema := param.schema as jsonschema.Schema
doc += '- **${param.name}** (${schema.typ}): ${param.description}\n'
}
}
}
doc += '\n'
}
// Parameters
if method.params.len > 0 {
doc += '### Parameters\n\n'
for param in method.params {
// Handle both ContentDescriptor and Reference
if param is openrpc.ContentDescriptor {
if param.schema is jsonschema.Schema {
schema := param.schema as jsonschema.Schema
doc += '- **${param.name}** (${schema.typ}): ${param.description}\n'
}
}
}
doc += '\n'
}
// Result
if method.result is openrpc.ContentDescriptor {
result := method.result as openrpc.ContentDescriptor
doc += '### Returns\n\n'
doc += '${result.description}\n\n'
}
// Result
if method.result is openrpc.ContentDescriptor {
result := method.result as openrpc.ContentDescriptor
doc += '### Returns\n\n'
doc += '${result.description}\n\n'
}
// Examples (would need to be added to OpenRPC spec or handled differently)
doc += '### Example\n\n'
doc += '```json\n'
doc += '// Request example would go here\n'
doc += '```\n\n'
// Examples (would need to be added to OpenRPC spec or handled differently)
doc += '### Example\n\n'
doc += '```json\n'
doc += '// Request example would go here\n'
doc += '```\n\n'
return doc
return doc
}
// Generate HTML wrapper with Bootstrap
@@ -89,4 +89,4 @@ fn generate_html_wrapper(markdown_content string, handler_type string) string {
template_content = template_content.replace('@{handler_type}', handler_type)
template_content = template_content.replace('@{markdown_content}', markdown_content)
return template_content
}
}

View File

@@ -1,19 +1,18 @@
module heroserver
@[params]
pub struct ServerConfig {
pub:
port int = 8080
host string = 'localhost'
port int = 8080
host string = 'localhost'
}
// Factory function to create new server instance
pub fn new_server(config ServerConfig) !&HeroServer {
mut server := &HeroServer{
config: config
auth_manager: new_auth_manager()
handler_registry: new_handler_registry()
}
return server
}
mut server := &HeroServer{
config: config
auth_manager: new_auth_manager()
handler_registry: new_handler_registry()
}
return server
}

View File

@@ -4,31 +4,31 @@ import freeflowuniverse.herolib.schemas.openrpc
pub struct HandlerRegistry {
mut:
handlers map[string]openrpc.Handler
specs map[string]openrpc.OpenRPC
handlers map[string]openrpc.Handler
specs map[string]openrpc.OpenRPC
}
pub fn new_handler_registry() &HandlerRegistry {
return &HandlerRegistry{}
return &HandlerRegistry{}
}
// Register OpenRPC handler with type name
pub fn (mut hr HandlerRegistry) register(handler_type string, handler openrpc.Handler, spec openrpc.OpenRPC) {
hr.handlers[handler_type] = handler
hr.specs[handler_type] = spec
hr.handlers[handler_type] = handler
hr.specs[handler_type] = spec
}
// Get handler by type
pub fn (hr HandlerRegistry) get(handler_type string) ?openrpc.Handler {
return hr.handlers[handler_type]
return hr.handlers[handler_type]
}
// Get OpenRPC spec by type
pub fn (hr HandlerRegistry) get_spec(handler_type string) ?openrpc.OpenRPC {
return hr.specs[handler_type]
return hr.specs[handler_type]
}
// List all registered handler types
pub fn (hr HandlerRegistry) list_types() []string {
return hr.handlers.keys()
}
return hr.handlers.keys()
}

View File

@@ -8,7 +8,7 @@ import freeflowuniverse.herolib.heroserver.handlers
pub struct ServerConfig {
pub mut:
port int = 8080
port int = 8080
auth_config auth.AuthConfig
}
@@ -67,7 +67,7 @@ pub fn (mut s HeroServer) api(mut ctx Context) veb.Result {
request := jsonrpc.decode_request(ctx.req.data) or {
return ctx.request_error('Invalid JSON-RPC request')
}
response := handler.handle(request) or { return ctx.server_error('Handler error') }
return ctx.json(response)

View File

@@ -1,7 +1,7 @@
module codegen
import log
import freeflowuniverse.herolib.core.code { Alias, Array, Attribute, CodeItem, Object, Struct, StructField, Type, type_from_symbol }
import freeflowuniverse.herolib.develop.codetools as code { Alias, Array, Attribute, CodeItem, Object, Struct, StructField, Type, type_from_symbol }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }
const vtypes = {

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { Param, Struct, Type }
import freeflowuniverse.herolib.develop.codetools as code { Param, Struct, Type }
import freeflowuniverse.herolib.schemas.jsonschema { Number, Reference, Schema, SchemaRef }
// struct_to_schema generates a json schema or reference from a struct model

View File

@@ -1,7 +1,7 @@
module codegen
import log
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
fn test_struct_to_schema() {
struct_ := code.Struct{

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { File, Folder }
import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema }
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct }

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type }
import freeflowuniverse.herolib.schemas.openapi { ResponseSpec }

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { Function, Module, Struct }
import freeflowuniverse.herolib.develop.codetools as code { Function, Module, Struct }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
// pub struct OpenRPCCode {

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Struct, VFile, parse_function }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Struct, VFile, parse_function }
// import freeflowuniverse.herolib.schemas.jsonrpc.codegen {generate_client_struct}
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.core.texttools

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Param, Struct, VFile, parse_import }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Param, Struct, VFile, parse_import }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.core.texttools
import rand

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { CustomCode, VFile, parse_function, parse_import }
import freeflowuniverse.herolib.develop.codetools as code { CustomCode, VFile, parse_function, parse_import }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { CodeItem }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem }
import freeflowuniverse.herolib.schemas.jsonschema { Schema }
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_code }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }

View File

@@ -2,7 +2,7 @@ module codegen
import os
import json
import freeflowuniverse.herolib.core.code { Alias, Struct }
import freeflowuniverse.herolib.develop.codetools as code { Alias, Struct }
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -1,6 +1,6 @@
module codegen
import freeflowuniverse.herolib.core.code { Function, Struct }
import freeflowuniverse.herolib.develop.codetools as code { Function, Struct }
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_struct, schemaref_to_type }
import freeflowuniverse.herolib.schemas.jsonschema { Schema }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Method }

View File

@@ -1,7 +1,7 @@
module openrpc
import freeflowuniverse.herolib.schemas.jsonschema { Reference }
import freeflowuniverse.herolib.core.code { Struct, StructField }
import freeflowuniverse.herolib.develop.codetools as code { Struct, StructField }
import x.json2
pub fn parse_example_pairing(text_ string) !ExamplePairing {

View File

@@ -1,6 +1,6 @@
module openrpc
import freeflowuniverse.herolib.core.code { Attribute, Struct, StructField }
import freeflowuniverse.herolib.develop.codetools as code { Attribute, Struct, StructField }
const example_txt = "
Example: Get pet example.

View File

@@ -10,7 +10,6 @@ module models
// - Location models (addresses)
// Import all model modules for easy access
import freeflowuniverse.herolib.threefold.models.core
import freeflowuniverse.herolib.threefold.models.finance
import freeflowuniverse.herolib.threefold.models.flow

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen
import freeflowuniverse.herolib.schemas.openrpc.codegen as openrpc_codegen

View File

@@ -2,7 +2,7 @@ module generator
import x.json2 as json
import arrays
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.schemas.openrpc
import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,7 +1,7 @@
module generator
import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.core.code { Param, Param, type_from_symbol }
import freeflowuniverse.herolib.develop.codetools as code { Param, Param, type_from_symbol }
import freeflowuniverse.herolib.core.texttools
const id_param = Param{

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification
import rand
import freeflowuniverse.herolib.core.texttools

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Import, Object, Param, Result, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Import, Object, Param, Result, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, Example }
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schemaref_to_type }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { File, Folder, IFile, IFolder }
import freeflowuniverse.herolib.develop.codetools as code { File, Folder, IFile, IFolder }
import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CustomCode, IFile, IFolder, Module, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CustomCode, IFile, IFolder, Module, VFile }
import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorInterface, ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.baobab.specification
import freeflowuniverse.herolib.schemas.openrpc
import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Function, Import, Param, Result, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Function, Import, Param, Result, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schemaref_to_type }
import freeflowuniverse.herolib.schemas.openrpc.codegen { content_descriptor_to_parameter }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, CustomCode, Import, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, CustomCode, Import, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.baobab.specification { ActorMethod, ActorSpecification }

View File

@@ -1,7 +1,7 @@
module generator
import freeflowuniverse.herolib.baobab.specification { ActorInterface }
import freeflowuniverse.herolib.core.code { CustomCode, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CustomCode, VFile }
fn generate_openrpc_interface_files(interfaces []ActorInterface) (VFile, VFile) {
http := ActorInterface.http in interfaces

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Function, Import, Param, Result, Struct, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc { Example }
import freeflowuniverse.herolib.schemas.jsonschema

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, Import, Param, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Import, Param, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.openrpc.codegen
import freeflowuniverse.herolib.baobab.specification { ActorSpecification }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { CodeItem, Struct, VFile }
import freeflowuniverse.herolib.develop.codetools as code { CodeItem, Struct, VFile }
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.schemas.jsonschema.codegen { schema_to_struct }
import freeflowuniverse.herolib.baobab.specification { ActorSpecification }

View File

@@ -1,7 +1,7 @@
module generator
import json
import freeflowuniverse.herolib.core.code { File, Folder }
import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.schemas.openapi { OpenAPI, Operation }
import freeflowuniverse.herolib.schemas.openapi.codegen
import freeflowuniverse.herolib.schemas.jsonschema.codegen as jsonschema_codegen { schema_to_type }

View File

@@ -1,7 +1,7 @@
module generator
import json
import freeflowuniverse.herolib.core.code { File, Function, Struct, VFile }
import freeflowuniverse.herolib.develop.codetools as code { File, Function, Struct, VFile }
import freeflowuniverse.herolib.schemas.openrpc { OpenRPC }
import freeflowuniverse.herolib.schemas.openrpc.codegen { generate_client_file, generate_client_test_file }

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { Function, Param, Result, Struct, Type }
import freeflowuniverse.herolib.develop.codetools as code { Function, Param, Result, Struct, Type }
import freeflowuniverse.herolib.schemas.openrpc
const test_actor_specification = ActorSpecification{

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code { File, Folder }
import freeflowuniverse.herolib.develop.codetools as code { File, Folder }
import freeflowuniverse.herolib.core.texttools
// generates the folder with runnable scripts of the actor

View File

@@ -1,6 +1,6 @@
module generator
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import os
// // generate_object_methods generates CRUD actor methods for a provided structure

View File

@@ -1,7 +1,7 @@
module specification
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.code { Struct }
import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.openapi { MediaType, OpenAPI, OperationInfo, Parameter }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, Example, ExamplePairing, ExampleRef }

View File

@@ -1,7 +1,7 @@
module specification
import x.json2 as json
import freeflowuniverse.herolib.core.code { Struct }
import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec }
import freeflowuniverse.herolib.schemas.openapi { Components, Info, OpenAPI, Operation, PathItem, ServerSpec }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }

View File

@@ -1,6 +1,6 @@
module specification
import freeflowuniverse.herolib.core.code { Struct }
import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor }
import freeflowuniverse.herolib.schemas.openapi { Components, Info }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema, SchemaRef }

View File

@@ -1,6 +1,6 @@
module specification
import freeflowuniverse.herolib.core.code { Struct }
import freeflowuniverse.herolib.develop.codetools as code { Struct }
import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc { ContentDescriptor, ErrorSpec, ExamplePairing }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, Schema }

View File

@@ -1,6 +1,6 @@
module specification
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema { Schema, SchemaRef }
import freeflowuniverse.herolib.schemas.openapi
import freeflowuniverse.herolib.schemas.openrpc

View File

@@ -2,7 +2,7 @@ module baobab
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import x.json2 as json { Any }
import freeflowuniverse.herolib.baobab.generator
import freeflowuniverse.herolib.baobab.specification

View File

@@ -1,6 +1,6 @@
module mcpgen
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.schemas.jsonschema.codegen

View File

@@ -1,7 +1,7 @@
module mcpgen
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any }
// import json

View File

@@ -1,7 +1,7 @@
module mcp
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.lang.rust

View File

@@ -1,7 +1,7 @@
module mcp
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any }

View File

@@ -1,7 +1,7 @@
module vcode
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any }

View File

@@ -1,7 +1,7 @@
module vcode
import freeflowuniverse.herolib.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 { Any }

View File

@@ -2,7 +2,7 @@ module logic
import freeflowuniverse.herolib.ai.escalayer
import freeflowuniverse.herolib.lang.rust
import freeflowuniverse.herolib.ai.utils
import freeflowuniverse.herolib.develop.codetools.utils as ai_utils
import os
pub fn generate_rhai_wrapper(name string, source_path string) !string {
@@ -241,30 +241,30 @@ struct CodeBlocks {
// Extract code blocks from the AI response
fn extract_code_blocks(response string) !CodeBlocks {
// Extract wrapper.rs content
wrapper_rs_content := utils.extract_code_block(response, 'wrapper.rs', 'rust')
wrapper_rs_content := ai_utils.extract_code_block(response, 'wrapper.rs', 'rust')
if wrapper_rs_content == '' {
return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```')
}
// Extract engine.rs content
mut engine_rs_content := utils.extract_code_block(response, 'engine.rs', 'rust')
mut engine_rs_content := ai_utils.extract_code_block(response, 'engine.rs', 'rust')
if engine_rs_content == '' {
// Try to extract from the response without explicit language marker
engine_rs_content = utils.extract_code_block(response, 'engine.rs', '')
engine_rs_content = ai_utils.extract_code_block(response, 'engine.rs', '')
}
// Extract example.rhai content
mut example_rhai_content := utils.extract_code_block(response, 'example.rhai', 'rhai')
mut example_rhai_content := ai_utils.extract_code_block(response, 'example.rhai', 'rhai')
if example_rhai_content == '' {
// Try to extract from the response without explicit language marker
example_rhai_content = utils.extract_code_block(response, 'example.rhai', '')
example_rhai_content = ai_utils.extract_code_block(response, 'example.rhai', '')
if example_rhai_content == '' {
return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```')
}
}
// Extract lib.rs content
lib_rs_content := utils.extract_code_block(response, 'lib.rs', 'rust')
lib_rs_content := ai_utils.extract_code_block(response, 'lib.rs', 'rust')
if lib_rs_content == '' {
return error('Failed to extract lib.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// lib.rs and ends with ```')
}

View File

@@ -2,7 +2,7 @@ module logic
import freeflowuniverse.herolib.ai.escalayer
import freeflowuniverse.herolib.lang.rust
import freeflowuniverse.herolib.ai.utils
import freeflowuniverse.herolib.develop.codetools.utils as ai_utils
import os
// pub fn generate_rhai_wrapper_sampling(name string, source_path string) !string {
@@ -223,23 +223,23 @@ import os
// // Extract code blocks from the AI response
// fn extract_code_blocks(response string)! CodeBlocks {
// // Extract wrapper.rs content
// wrapper_rs_content := utils.extract_code_block(response, 'wrapper.rs', 'rust')
// wrapper_rs_content := ai_utils.extract_code_block(response, 'wrapper.rs', 'rust')
// if wrapper_rs_content == '' {
// return error('Failed to extract wrapper.rs content from response. Please ensure your code is properly formatted inside a code block that starts with ```rust\n// wrapper.rs and ends with ```')
// }
// // Extract engine.rs content
// mut engine_rs_content := utils.extract_code_block(response, 'engine.rs', 'rust')
// mut engine_rs_content := ai_utils.extract_code_block(response, 'engine.rs', 'rust')
// if engine_rs_content == '' {
// // Try to extract from the response without explicit language marker
// engine_rs_content = utils.extract_code_block(response, 'engine.rs', '')
// engine_rs_content = ai_utils.extract_code_block(response, 'engine.rs', '')
// }
// // Extract example.rhai content
// mut example_rhai_content := utils.extract_code_block(response, 'example.rhai', 'rhai')
// mut example_rhai_content := ai_utils.extract_code_block(response, 'example.rhai', 'rhai')
// if example_rhai_content == '' {
// // Try to extract from the response without explicit language marker
// example_rhai_content = utils.extract_code_block(response, 'example.rhai', '')
// example_rhai_content = ai_utils.extract_code_block(response, 'example.rhai', '')
// if example_rhai_content == '' {
// return error('Failed to extract example.rhai content from response. Please ensure your code is properly formatted inside a code block that starts with ```rhai\n// example.rhai and ends with ```')
// }

View File

@@ -1,7 +1,7 @@
module mcp
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema
import freeflowuniverse.herolib.lang.rust

View File

@@ -1,7 +1,7 @@
module mcp
import freeflowuniverse.herolib.ai.mcp
import freeflowuniverse.herolib.core.code
import freeflowuniverse.herolib.develop.codetools as code
import freeflowuniverse.herolib.ai.mcp.rhai.logic
import freeflowuniverse.herolib.schemas.jsonschema
import x.json2 as json { Any }