Merge pull request #167 from Incubaid/development_heroserver

Implement Tags Entity System and Tags API Endpoint
This commit is contained in:
Omdanii
2025-10-08 11:33:06 +03:00
committed by GitHub
84 changed files with 3872 additions and 1897 deletions

View File

@@ -30,6 +30,15 @@ pub fn decode_u32(data string) !u32 {
return u32(parsed_uint)
}
pub fn decode_string(data string) !string {
// Try JSON decode first (for proper JSON strings)
// if result := json2.decode[string](data) {
// return result
// }
// If that fails, return an error
return data
}
pub fn decode_bool(data string) !bool {
return json2.decode[bool](data) or { return error('Failed to decode bool: ${data}') }
}

View File

@@ -1,23 +1,53 @@
module db
import crypto.md5
import json
pub fn (mut self DB) tags_get(tags []string) !u32 {
return if tags.len > 0 {
mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != '')
tags_fixed.sort_ignore_case()
hash := md5.hexhash(tags_fixed.join(','))
tags_found := self.redis.hget('db:tags', hash)!
return if tags_found == '' {
println('tags_get: new tags: ${tags_fixed.join(',')}')
id := self.new_id()!
self.redis.hset('db:tags', hash, id.str())!
self.redis.hset('db:tags', id.str(), tags_fixed.join(','))!
id
} else {
tags_found.u32()
}
} else {
0
if tags.len == 0 {
return 0
}
mut tags_fixed := tags.map(it.to_lower_ascii().replace(' ', '_')).filter(it != '')
tags_fixed.sort_ignore_case()
hash := md5.hexhash(tags_fixed.join(','))
tags_id_str := self.redis.hget('db:tags_hash', hash) or { '' }
if tags_id_str != '' {
// Return existing Tags ID
return tags_id_str.u32()
}
// Generate new ID for Tags entity
tags_id := self.new_id()!
tags_entity := Tags{
id: tags_id
names: tags_fixed
md5: hash
}
// Store Tags entity in Redis as JSON
tags_json := json.encode(tags_entity)
self.redis.hset('db:tags', tags_id.str(), tags_json)!
self.redis.hset('db:tags_hash', hash, tags_id.str())!
return tags_id
}
// Get tag names from Tags ID
pub fn (mut self DB) tags_from_id(tags_id u32) ![]string {
if tags_id == 0 {
return []string{}
}
// Get Tags entity from Redis
tags_json := self.redis.hget('db:tags', tags_id.str())!
if tags_json == '' {
return error('Tags entity not found for ID: ${tags_id}')
}
// Decode Tags entity
tags_entity := json.decode(Tags, tags_json)!
return tags_entity.names
}

View File

@@ -1,17 +0,0 @@
```v
pub fn (mut self DBFsBlobMembership) set(mut o FsBlobMembership) ! {
```
becomes
```v
pub fn (mut self DBFsBlobMembership) set(o FsBlobMembership) FsBlobMembership! {
... the other code
return o
}
```
we need to change each set in this module to be like this, then we need to make sure that _test.v programs use it the same way
see how we don't use the mut statement in set.

View File

@@ -1,10 +1,11 @@
// Replace the current content with:
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.core.redisclient
@[heap]
pub struct FsFactory {
pub struct FSFactory {
pub mut:
fs DBFs
fs_blob DBFsBlob
@@ -20,9 +21,9 @@ pub mut:
redis ?&redisclient.Redis
}
pub fn new(args DBArgs) !FsFactory {
pub fn new(args DBArgs) !FSFactory {
mut mydb := db.new(redis: args.redis)!
mut f := FsFactory{
mut f := FSFactory{
fs: DBFs{
db: &mydb
}
@@ -51,20 +52,28 @@ pub fn new(args DBArgs) !FsFactory {
return f
}
// is the main function we need to use to get a filesystem, will get it from database and initialize if needed
pub fn new_test() !FSFactory {
mut mydb := db.new_test()!
mut f := new(redis: mydb.redis)!
f.fs.db.redis.flushdb()!
return f
}
// Convenience function for creating a filesystem
pub fn new_fs(args FsArg) !Fs {
mut f := new()!
mut fs := f.fs.new_get_set(args)!
return fs
return f.fs.new_get_set(args)!
}
pub fn new_fs_test() !Fs {
mut f := new()!
mut fs := f.fs.new_get_set(name: 'test')!
return fs
mut mydb := db.new_test()!
mut f := new(redis: mydb.redis)!
f.fs.db.redis.flushdb()!
return f.fs.new_get_set(name: 'test')!
}
pub fn delete_fs_test() ! {
mut fs_factory := new()!
fs_factory.fs.db.redis.flushdb()!
mut mydb := db.new_test()!
mut f := new(redis: mydb.redis)!
f.fs.db.redis.flushdb()!
}

View File

@@ -3,6 +3,11 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// Fs represents a filesystem, is the top level container for files and directories and symlinks, blobs are used over filesystems
@[heap]
@@ -14,7 +19,7 @@ pub mut:
root_dir_id u32 // ID of root directory
quota_bytes u64 // Storage quota in bytes
used_bytes u64 // Current usage in bytes
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
// We only keep the root directory ID here, other directories can be found by querying parent_id in FsDir
@@ -22,13 +27,61 @@ pub mut:
pub struct DBFs {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self Fs) type_name() string {
return 'fs'
}
// return example rpc call and result for each methodname
pub fn (self Fs) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a filesystem. Returns the ID of the filesystem.'
}
'get' {
return 'Retrieve a filesystem by ID. Returns the filesystem object.'
}
'delete' {
return 'Delete a filesystem by ID. Returns true if successful.'
}
'exist' {
return 'Check if a filesystem exists by ID. Returns true or false.'
}
'list' {
return 'List all filesystems. Returns an array of filesystem objects.'
}
else {
return 'This is generic method for the root object, TODO fill in, ...'
}
}
}
// return example rpc call and result for each methodname
pub fn (self Fs) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"fs": {"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}]'
}
else {
return '{}', '{}'
}
}
}
pub fn (self Fs) dump(mut e encoder.Encoder) ! {
e.add_string(self.name)
e.add_u32(self.group_id)
@@ -88,6 +141,7 @@ pub fn (mut self DBFs) new(args FsArg) !Fs {
if args.messages.len > 0 {
o.messages = self.db.messages_get(args.messages)!
}
o.updated_at = ourtime.now().unix()
return o
}
@@ -192,11 +246,11 @@ pub fn (mut self DBFs) get(id u32) !Fs {
mut o, data := self.db.get_data[Fs](id)!
mut e_decoder := encoder.decoder_new(data)
self.load(mut o, mut e_decoder)!
o.factory = self.factory
// o.factory = self.factory
return o
}
pub fn (mut self DBFs) list() ![]Fs {
pub fn (mut self DBFs) list(args FsListArg) ![]Fs {
return self.db.list[Fs]()!.map(self.get(it)!)
}
@@ -235,3 +289,44 @@ pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
fs := self.get(id)!
return (fs.used_bytes + additional_bytes) <= fs.quota_bytes
}
pub fn fs_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Fs](params)!
o = f.fs.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
args := db.decode_generic[FsListArg](params)!
res := f.fs.list(args)!
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs'
)
}
}
}

View File

@@ -4,6 +4,9 @@ import crypto.blake3
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import json
// FsBlob represents binary data up to 1MB
@[heap]
@@ -18,16 +21,83 @@ pub mut:
encoding string // Encoding type
}
// Update DBFsBlob struct:
pub struct DBFsBlob {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsBlob) type_name() string {
return 'fs_blob'
}
// return example rpc call and result for each methodname
pub fn (self FsBlob) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a blob. Returns the ID of the blob.'
}
'get' {
return 'Retrieve a blob by ID. Returns the blob object.'
}
'delete' {
return 'Delete a blob by ID. Returns true if successful.'
}
'exist' {
return 'Check if a blob exists by ID. Returns true or false.'
}
'list' {
return 'List all blobs. Returns an array of blob objects.'
}
'get_by_hash' {
return 'Retrieve a blob by its hash. Returns the blob object.'
}
'exists_by_hash' {
return 'Check if a blob exists by its hash. Returns true or false.'
}
'verify' {
return 'Verify the integrity of a blob by its hash. Returns true or false.'
}
else {
return 'This is generic method for the root object, TODO fill in, ...'
}
}
}
// return example rpc call and result for each methodname
pub fn (self FsBlob) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"data": "SGVsbG8gV29ybGQh"}', '1'
}
'get' {
return '{"id": 1}', '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}]'
}
'get_by_hash' {
return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}'
}
'exists_by_hash' {
return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', 'true'
}
'verify' {
return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', 'true'
}
else {
return '{}', '{}'
}
}
}
pub fn (self FsBlob) dump(mut e encoder.Encoder) ! {
e.add_string(self.hash)
e.add_list_u8(self.data)
@@ -138,6 +208,10 @@ pub fn (mut self DBFsBlob) get_multi(id []u32) ![]FsBlob {
return blobs
}
pub fn (mut self DBFsBlob) list() ![]FsBlob {
return self.db.list[FsBlob]()!.map(self.get(it)!)
}
pub fn (mut self DBFsBlob) get_by_hash(hash string) !FsBlob {
// Get blob ID from Redis hash mapping
id_str := self.db.redis.hget('fsblob:hashes', hash)!
@@ -164,3 +238,62 @@ pub fn (mut self DBFsBlob) verify(hash string) !bool {
blob := self.get_by_hash(hash)!
return blob.verify_integrity()
}
pub fn fs_blob_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_blob.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsBlob](params)!
o = f.fs_blob.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_blob.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_blob.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_blob.list()!
return new_response(rpcid, json.encode(res))
}
'get_by_hash' {
hash := db.decode_string(params)!
res := f.fs_blob.get_by_hash(hash)!
return new_response(rpcid, json.encode(res))
}
'exists_by_hash' {
hash := db.decode_string(params)!
if f.fs_blob.exists_by_hash(hash)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'verify' {
hash := db.decode_string(params)!
if f.fs_blob.verify(hash)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
else {
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_blob'
)
}
}
}

View File

@@ -2,6 +2,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsBlobMembership represents membership of a blob in one or more filesystems, the key is the hash of the blob
@[heap]
@@ -15,7 +19,7 @@ pub mut:
pub struct DBFsBlobMembership {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsBlobMembership) type_name() string {
@@ -200,3 +204,90 @@ pub fn (mut self DBFsBlobMembership) list_prefix(prefix string) ![]FsBlobMembers
return result
}
pub fn (self FsBlobMembership) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a blob membership. Returns success.'
}
'get' {
return 'Retrieve a blob membership by hash. Returns the membership object.'
}
'delete' {
return 'Delete a blob membership by hash. Returns true if successful.'
}
'exist' {
return 'Check if a blob membership exists by hash. Returns true or false.'
}
'add_filesystem' {
return 'Add a filesystem to a blob membership. Returns success.'
}
'remove_filesystem' {
return 'Remove a filesystem from a blob membership. Returns success.'
}
else {
return 'This is generic method for the blob membership object.'
}
}
}
pub fn (self FsBlobMembership) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"membership": {"hash": "abc123...", "fsid": [1, 2], "blobid": 5}}', 'true'
}
'get' {
return '{"hash": "abc123..."}', '{"hash": "abc123...", "fsid": [1, 2], "blobid": 5}'
}
'delete' {
return '{"hash": "abc123..."}', 'true'
}
'exist' {
return '{"hash": "abc123..."}', 'true'
}
'add_filesystem' {
return '{"hash": "abc123...", "fs_id": 3}', 'true'
}
'remove_filesystem' {
return '{"hash": "abc123...", "fs_id": 1}', 'true'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_blob_membership_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
hash := db.decode_string(params)!
res := f.fs_blob_membership.get(hash)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsBlobMembership](params)!
o = f.fs_blob_membership.set(o)!
return new_response_ok(rpcid)
}
'delete' {
hash := db.decode_string(params)!
f.fs_blob_membership.delete(hash)!
return new_response_ok(rpcid)
}
'exist' {
hash := db.decode_string(params)!
if f.fs_blob_membership.exist(hash)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
else {
console.print_stderr('Method not found on fs_blob_membership: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_blob_membership'
)
}
}
}

View File

@@ -1,6 +1,6 @@
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.hero.herofs { new_test }
fn test_cleanup() ! {
delete_fs_test()!
@@ -14,9 +14,9 @@ fn test_basic() ! {
test_cleanup()!
// Initialize the HeroFS factory for test purposes
mut fs_factory := new()!
mut fs_factory := new_test()!
// Create a new filesystem (required for FsBlobMembership validation)
// Create a new filesystem
mut test_fs := fs_factory.fs.new_get_set(
name: 'test_filesystem'
description: 'Filesystem for testing FsBlobMembership functionality'
@@ -34,63 +34,21 @@ fn test_basic() ! {
test_blob = fs_factory.fs_blob.set(test_blob)!
blob_id := test_blob.id
// Create test file to get a valid fsid (file ID) for membership
mut test_file := fs_factory.fs_file.new(
name: 'test_file.txt'
fs_id: test_fs.id
blobs: [blob_id]
description: 'Test file for blob membership'
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
file_id := test_file.id
println('Created test file with ID: ${file_id}')
// Add file to directory
mut dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
dir.files << file_id
dir = fs_factory.fs_dir.set(dir)!
// Create test blob membership
// Create blob membership
mut test_membership := fs_factory.fs_blob_membership.new(
hash: test_blob.hash
fsid: [test_fs.id] // Use filesystem ID
blobid: blob_id
fsid: [test_fs.id]
blobid: test_blob.id
)!
// Save the test membership
test_membership = fs_factory.fs_blob_membership.set(test_membership)!
membership_hash := test_membership.hash
println('Created test blob membership with hash: ${membership_hash}')
// Test loading membership by hash
println('Testing blob membership loading...')
loaded_membership := fs_factory.fs_blob_membership.get(membership_hash)!
// Test retrieval
loaded_membership := fs_factory.fs_blob_membership.get(test_membership.hash)!
assert loaded_membership.hash == test_membership.hash
assert loaded_membership.fsid == test_membership.fsid
assert loaded_membership.blobid == test_membership.blobid
println(' Loaded blob membership: ${loaded_membership.hash} (Blob ID: ${loaded_membership.blobid})')
// Verify that loaded membership matches the original one
println('Verifying data integrity...')
assert loaded_membership.hash == test_blob.hash
println(' Blob membership data integrity check passed')
// Test exist method
println('Testing blob membership existence checks...')
mut exists := fs_factory.fs_blob_membership.exist(membership_hash)!
assert exists == true
println(' Blob membership exists: ${exists}')
// Test with non-existent hash
non_existent_hash := '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
exists = fs_factory.fs_blob_membership.exist(non_existent_hash)!
assert exists == false
println(' Non-existent blob membership exists: ${exists}')
println('FsBlobMembership basic test completed successfully!')
println(' FsBlobMembership basic test passed!')
}
fn test_filesystem_operations() ! {
@@ -100,7 +58,7 @@ fn test_filesystem_operations() ! {
test_cleanup() or { panic('cleanup failed: ${err.msg()}') }
}
// Initialize the HeroFS factory for test purposes
mut fs_factory := new()!
mut fs_factory := new_test()!
// Create filesystems for testing
mut fs1 := fs_factory.fs.new_get_set(
@@ -117,7 +75,6 @@ fn test_filesystem_operations() ! {
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
fs2 = fs_factory.fs.set(fs2)!
fs1_root_dir_id := fs1.root_dir_id
fs2_id := fs2.id
// Create test blob
@@ -126,39 +83,6 @@ fn test_filesystem_operations() ! {
test_blob = fs_factory.fs_blob.set(test_blob)!
blob_id := test_blob.id
// Create test files to get valid fsid (file IDs) for membership
mut test_file1 := fs_factory.fs_file.new(
name: 'test_file1.txt'
fs_id: fs1_id
blobs: [blob_id]
description: 'Test file 1 for blob membership'
mime_type: .txt
)!
test_file1 = fs_factory.fs_file.set(test_file1)!
file1_id := test_file1.id
println('Created test file 1 with ID: ${file1_id}')
// Add file to directory
mut fs1_root_dir := fs_factory.fs_dir.get(fs1.root_dir_id)!
fs1_root_dir.files << file1_id
fs1_root_dir = fs_factory.fs_dir.set(fs1_root_dir)!
mut test_file2 := fs_factory.fs_file.new(
name: 'test_file2.txt'
fs_id: fs2_id
blobs: [blob_id]
description: 'Test file 2 for blob membership'
mime_type: .txt
)!
test_file2 = fs_factory.fs_file.set(test_file2)!
file2_id := test_file2.id
println('Created test file 2 with ID: ${file2_id}')
// Add file to directory
mut fs2_root_dir := fs_factory.fs_dir.get(fs2.root_dir_id)!
fs2_root_dir.files << file2_id
fs2_root_dir = fs_factory.fs_dir.set(fs2_root_dir)!
// Create blob membership with first filesystem
mut membership := fs_factory.fs_blob_membership.new(
hash: test_blob.hash
@@ -170,9 +94,6 @@ fn test_filesystem_operations() ! {
println('Created blob membership with filesystem 1: ${membership_hash}')
// Test adding a filesystem to membership
println('Testing add_filesystem operation...')
// Add second filesystem
fs_factory.fs_blob_membership.add_filesystem(membership_hash, fs2_id)!
mut updated_membership := fs_factory.fs_blob_membership.get(membership_hash)!
@@ -183,9 +104,6 @@ fn test_filesystem_operations() ! {
println(' Added filesystem 2 to blob membership')
// Test removing a filesystem from membership
println('Testing remove_filesystem operation...')
// Remove first filesystem
fs_factory.fs_blob_membership.remove_filesystem(membership_hash, fs1_id)!
mut updated_membership2 := fs_factory.fs_blob_membership.get(membership_hash)!
@@ -212,7 +130,7 @@ fn test_validation() ! {
test_cleanup() or { panic('cleanup failed: ${err.msg()}') }
}
// Initialize the HeroFS factory for test purposes
mut fs_factory := new()!
mut fs_factory := new_test()!
// Create a filesystem for validation tests
mut test_fs := fs_factory.fs.new_get_set(
@@ -221,11 +139,8 @@ fn test_validation() ! {
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
fs_id := test_fs.id
// Test setting membership with non-existent blob (should fail)
println('Testing membership set with non-existent blob...')
// Create a membership with a non-existent blob ID
mut test_membership := fs_factory.fs_blob_membership.new(
hash: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'

View File

@@ -0,0 +1,209 @@
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.hero.user
import json
import freeflowuniverse.herolib.hero.herofs { FsBlob }
fn test_fs_blob_new() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'Hello World!'.bytes()
}
blob := db_fs_blob.new(args)!
assert blob.data == 'Hello World!'.bytes()
assert blob.size_bytes == 12
assert blob.hash == '5ca7815adcb484e9a136c11efe69c1d530176d549b5d18d0'
assert blob.updated_at > 0
println(' FsBlob new test passed!')
}
fn test_fs_blob_crud_operations() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'CRUD Test Data'.bytes()
}
mut blob := db_fs_blob.new(args)!
blob = db_fs_blob.set(blob)!
original_id := blob.id
retrieved_blob := db_fs_blob.get(original_id)!
assert retrieved_blob.data == 'CRUD Test Data'.bytes()
assert retrieved_blob.id == original_id
exists := db_fs_blob.exist(original_id)!
assert exists == true
mut updated_args := FsBlobArg{
data: 'Updated CRUD Test Data'.bytes()
}
mut updated_blob := db_fs_blob.new(updated_args)!
updated_blob.id = original_id
updated_blob = db_fs_blob.set(updated_blob)!
final_blob := db_fs_blob.get(original_id)!
assert final_blob.data == 'Updated CRUD Test Data'.bytes()
mut expected_blob_for_hash := FsBlob{
data: 'Updated CRUD Test Data'.bytes()
size_bytes: 'Updated CRUD Test Data'.len
}
expected_blob_for_hash.calculate_hash()
assert final_blob.hash == expected_blob_for_hash.hash
db_fs_blob.delete(original_id)!
exists_after_delete := db_fs_blob.exist(original_id)!
assert exists_after_delete == false
println(' FsBlob CRUD operations test passed!')
}
fn test_fs_blob_encoding_decoding() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'Encoding Decoding Test'.bytes()
}
mut blob := db_fs_blob.new(args)!
blob = db_fs_blob.set(blob)!
blob_id := blob.id
retrieved_blob := db_fs_blob.get(blob_id)!
assert retrieved_blob.data == 'Encoding Decoding Test'.bytes()
assert retrieved_blob.size_bytes == 'Encoding Decoding Test'.len
mut expected_blob_for_hash := FsBlob{
data: 'Encoding Decoding Test'.bytes()
size_bytes: 'Encoding Decoding Test'.len
}
expected_blob_for_hash.calculate_hash()
assert retrieved_blob.hash == expected_blob_for_hash.hash
println(' FsBlob encoding/decoding test passed!')
}
fn test_fs_blob_type_name() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'Type Name Test'.bytes()
}
blob := db_fs_blob.new(args)!
type_name := blob.type_name()
assert type_name == 'fs_blob'
println(' FsBlob type_name test passed!')
}
fn test_fs_blob_description() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'Description Test'.bytes()
}
blob := db_fs_blob.new(args)!
assert blob.description('set') == 'Create or update a blob. Returns the ID of the blob.'
assert blob.description('get') == 'Retrieve a blob by ID. Returns the blob object.'
assert blob.description('delete') == 'Delete a blob by ID. Returns true if successful.'
assert blob.description('exist') == 'Check if a blob exists by ID. Returns true or false.'
assert blob.description('list') == 'List all blobs. Returns an array of blob objects.'
assert blob.description('get_by_hash') == 'Retrieve a blob by its hash. Returns the blob object.'
assert blob.description('exists_by_hash') == 'Check if a blob exists by its hash. Returns true or false.'
assert blob.description('verify') == 'Verify the integrity of a blob by its hash. Returns true or false.'
assert blob.description('unknown') == 'This is generic method for the root object, TODO fill in, ...'
println(' FsBlob description test passed!')
}
fn test_fs_blob_example() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args := FsBlobArg{
data: 'Example Test'.bytes()
}
blob := db_fs_blob.new(args)!
set_call, set_result := blob.example('set')
assert set_call == '{"data": "SGVsbG8gV29ybGQh"}'
assert set_result == '1'
get_call, get_result := blob.example('get')
assert get_call == '{"id": 1}'
assert get_result == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}'
delete_call, delete_result := blob.example('delete')
assert delete_call == '{"id": 1}'
assert delete_result == 'true'
exist_call, exist_result := blob.example('exist')
assert exist_call == '{"id": 1}'
assert exist_result == 'true'
list_call, list_result := blob.example('list')
assert list_call == '{}'
assert list_result == '[{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}]'
get_by_hash_call, get_by_hash_result := blob.example('get_by_hash')
assert get_by_hash_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}'
assert get_by_hash_result == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}'
exists_by_hash_call, exists_by_hash_result := blob.example('exists_by_hash')
assert exists_by_hash_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}'
assert exists_by_hash_result == 'true'
verify_call, verify_result := blob.example('verify')
assert verify_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}'
assert verify_result == 'true'
unknown_call, unknown_result := blob.example('unknown')
assert unknown_call == '{}'
assert unknown_result == '{}'
println(' FsBlob example test passed!')
}
fn test_fs_blob_list() ! {
mut factory := new_test()!
mut db_fs_blob := factory.fs_blob
mut args1 := FsBlobArg{
data: 'Blob 1'.bytes()
}
mut blob1 := db_fs_blob.new(args1)!
blob1 = db_fs_blob.set(blob1)!
mut args2 := FsBlobArg{
data: 'Blob 2'.bytes()
}
mut blob2 := db_fs_blob.new(args2)!
blob2 = db_fs_blob.set(blob2)!
list_of_blobs := db_fs_blob.list()!
assert list_of_blobs.len == 2
assert list_of_blobs[0].data == 'Blob 1'.bytes() || list_of_blobs[0].data == 'Blob 2'.bytes()
assert list_of_blobs[1].data == 'Blob 1'.bytes() || list_of_blobs[1].data == 'Blob 2'.bytes()
println(' FsBlob list test passed!')
}

View File

@@ -1,10 +1,8 @@
module herofs
import freeflowuniverse.herolib.hero.herofs
fn test_filesystem_crud() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Test filesystem creation
mut test_fs := fs_factory.fs.new(
@@ -13,7 +11,6 @@ fn test_filesystem_crud() ! {
quota_bytes: 1024 * 1024 * 100 // 100MB quota
)!
original_id := test_fs.id
test_fs = fs_factory.fs.set(test_fs)!
// Test filesystem retrieval
@@ -44,36 +41,26 @@ fn test_filesystem_crud() ! {
fn test_directory_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'dir_test'
description: 'Test filesystem for directory operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Test directory creation
mut sub_dir1 := fs_factory.fs_dir.new(
name: 'documents'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
description: 'Documents directory'
)!
sub_dir1 = fs_factory.fs_dir.set(sub_dir1)!
// Add subdirectory to parent
mut root_dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
root_dir.directories << sub_dir1.id
root_dir = fs_factory.fs_dir.set(root_dir)!
@@ -111,24 +98,14 @@ fn test_directory_operations() ! {
fn test_file_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Create test filesystem with root directory
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'file_test'
description: 'Test filesystem for file operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create test blob
test_content := 'Hello, HeroFS! This is test content.'.bytes()
@@ -150,7 +127,7 @@ fn test_file_operations() ! {
test_file = fs_factory.fs_file.set(test_file)!
// Add file to root directory
fs_factory.fs_file.add_to_directory(test_file.id, root_dir.id)!
fs_factory.fs_file.add_to_directory(test_file.id, test_fs.root_dir_id)!
// Test file retrieval
retrieved_file := fs_factory.fs_file.get(test_file.id)!
@@ -179,163 +156,9 @@ fn test_file_operations() ! {
assert renamed_file.name == 'renamed_test.txt'
// Test file listing by directory
files_in_root := fs_factory.fs_file.list_by_directory(root_dir.id)!
files_in_root := fs_factory.fs_file.list_by_directory(test_fs.root_dir_id)!
assert files_in_root.len == 1
assert files_in_root[0].id == test_file.id
// Test file listing by filesystem
files_in_fs := fs_factory.fs_file.list_by_filesystem(test_fs.id)!
assert files_in_fs.len == 1
// Test file listing by MIME type - create a specific file for this test
mime_test_content := 'MIME type test content'.bytes()
mut mime_test_blob := fs_factory.fs_blob.new(data: mime_test_content)!
mime_test_blob = fs_factory.fs_blob.set(mime_test_blob)!
mut mime_test_file := fs_factory.fs_file.new(
name: 'mime_test.txt'
fs_id: test_fs.id
blobs: [mime_test_blob.id]
mime_type: .txt
)!
mime_test_file = fs_factory.fs_file.set(mime_test_file)!
fs_factory.fs_file.add_to_directory(mime_test_file.id, root_dir.id)!
txt_files := fs_factory.fs_file.list_by_mime_type(.txt)!
assert txt_files.len >= 1
// Test blob content appending
additional_content := '\nAppended content.'.bytes()
mut additional_blob := fs_factory.fs_blob.new(data: additional_content)!
additional_blob = fs_factory.fs_blob.set(additional_blob)!
fs_factory.fs_file.append_blob(test_file.id, additional_blob.id)!
updated_file_with_blob := fs_factory.fs_file.get(test_file.id)!
assert updated_file_with_blob.blobs.len == 2
println(' File operations tests passed!')
}
fn test_blob_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
// Test blob creation and deduplication
test_data1 := 'This is test data for blob operations.'.bytes()
test_data2 := 'This is different test data.'.bytes()
test_data3 := 'This is test data for blob operations.'.bytes() // Same as test_data1
// Create first blob
mut blob1 := fs_factory.fs_blob.new(data: test_data1)!
blob1 = fs_factory.fs_blob.set(blob1)!
// Create second blob with different data
mut blob2 := fs_factory.fs_blob.new(data: test_data2)!
blob2 = fs_factory.fs_blob.set(blob2)!
// Create third blob with same data as first (should have same hash)
mut blob3 := fs_factory.fs_blob.new(data: test_data3)!
blob3 = fs_factory.fs_blob.set(blob3)!
// Test hash-based retrieval
assert blob1.hash == blob3.hash // Same content should have same hash
assert blob1.hash != blob2.hash // Different content should have different hash
// Test blob retrieval by hash
blob_by_hash := fs_factory.fs_blob.get_by_hash(blob1.hash)!
assert blob_by_hash.data == test_data1
// Test blob existence by hash
exists_by_hash := fs_factory.fs_blob.exists_by_hash(blob1.hash)!
assert exists_by_hash == true
// Test blob integrity verification
assert blob1.verify_integrity() == true
assert blob2.verify_integrity() == true
// Test blob verification by hash
is_valid := fs_factory.fs_blob.verify(blob1.hash)!
assert is_valid == true
// Test blob size limits
large_data := []u8{len: 2 * 1024 * 1024} // 2MB data
fs_factory.fs_blob.new(data: large_data) or {
println(' Blob size limit correctly enforced')
// This should fail due to 1MB limit
}
println(' Blob operations tests passed!')
}
fn test_symlink_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
// Create test filesystem with root directory
mut test_fs := fs_factory.fs.new(
name: 'symlink_test'
description: 'Test filesystem for symlink operations'
quota_bytes: 1024 * 1024 * 10 // 10MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create a target file
test_content := 'Target file content'.bytes()
mut target_blob := fs_factory.fs_blob.new(data: test_content)!
target_blob = fs_factory.fs_blob.set(target_blob)!
mut target_file := fs_factory.fs_file.new(
name: 'target.txt'
fs_id: test_fs.id
blobs: [target_blob.id]
mime_type: .txt
)!
target_file = fs_factory.fs_file.set(target_file)!
fs_factory.fs_file.add_to_directory(target_file.id, root_dir.id)!
// Create symlink
mut test_symlink := fs_factory.fs_symlink.new(
name: 'link_to_target.txt'
fs_id: test_fs.id
parent_id: root_dir.id
target_id: target_file.id
target_type: .file
description: 'Symlink to target file'
)!
test_symlink = fs_factory.fs_symlink.set(test_symlink)!
// Add symlink to directory
root_dir.symlinks << test_symlink.id
root_dir = fs_factory.fs_dir.set(root_dir)!
// Test symlink retrieval
retrieved_symlink := fs_factory.fs_symlink.get(test_symlink.id)!
assert retrieved_symlink.name == 'link_to_target.txt'
assert retrieved_symlink.target_id == target_file.id
// Test symlink validation (should not be broken since target exists)
is_broken := fs_factory.fs_symlink.is_broken(test_symlink.id)!
assert is_broken == false
// Test symlink listing by filesystem
symlinks_in_fs := fs_factory.fs_symlink.list_by_filesystem(test_fs.id)!
assert symlinks_in_fs.len == 1
// Delete target file to make symlink broken
fs_factory.fs_file.delete(target_file.id)!
// Test broken symlink detection
is_broken_after_delete := fs_factory.fs_symlink.is_broken(test_symlink.id)!
assert is_broken_after_delete == true
println(' Symlink operations tests passed!')
}
}

View File

@@ -3,6 +3,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsDir represents a directory in a filesystem
@[heap]
@@ -19,7 +23,7 @@ pub mut:
pub struct DBFsDir {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsDir) type_name() string {
@@ -273,3 +277,94 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) ! {
dir.updated_at = ourtime.now().unix()
dir = self.set(dir)!
}
pub fn (self FsDir) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a directory. Returns the ID of the directory.'
}
'get' {
return 'Retrieve a directory by ID. Returns the directory object.'
}
'delete' {
return 'Delete a directory by ID. Returns true if successful.'
}
'exist' {
return 'Check if a directory exists by ID. Returns true or false.'
}
'list' {
return 'List all directories. Returns an array of directory objects.'
}
'create_path' {
return 'Create a directory path. Returns the ID of the created directory.'
}
else {
return 'This is generic method for the directory object.'
}
}
}
pub fn (self FsDir) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"dir": {"name": "documents", "fs_id": 1, "parent_id": 2}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}]'
}
'create_path' {
return '{"fs_id": 1, "path": "/projects/web/frontend"}', '5'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_dir_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_dir.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsDir](params)!
o = f.fs_dir.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_dir.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_dir.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_dir.list()!
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs_dir: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_dir'
)
}
}
}

View File

@@ -0,0 +1,363 @@
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.hero.user
import json
// FsDir, FsDirArg, FsFileArg, MimeType, FsBlobArg are part of the same module, no need to import explicitly
// import freeflowuniverse.herolib.hero.herofs { FsDir, FsDirArg, FsFileArg, MimeType, FsBlobArg }
fn test_fs_dir_new() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'test_fs_dir_new')!
mut args := FsDirArg{
name: 'test_dir'
description: 'Test directory for new function'
fs_id: fs.id
parent_id: fs.root_dir_id
}
dir := db_fs_dir.new(args)!
assert dir.name == 'test_dir'
assert dir.description == 'Test directory for new function'
assert dir.fs_id == fs.id
assert dir.parent_id == fs.root_dir_id
assert dir.created_at > 0
assert dir.updated_at > 0
println(' FsDir new test passed!')
}
fn test_fs_dir_crud_operations() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'crud_test_fs_dir')!
root_dir := fs.root_dir()!
mut args := FsDirArg{
name: 'crud_dir'
description: 'CRUD Test Directory'
fs_id: fs.id
parent_id: root_dir.id
}
mut dir := db_fs_dir.new(args)!
dir = db_fs_dir.set(dir)!
original_id := dir.id
retrieved_dir := db_fs_dir.get(original_id)!
assert retrieved_dir.name == 'crud_dir'
assert retrieved_dir.id == original_id
exists := db_fs_dir.exist(original_id)!
assert exists == true
// Update directory
mut updated_dir_obj := retrieved_dir
updated_dir_obj.description = 'Updated CRUD Test Directory'
updated_dir_obj = db_fs_dir.set(updated_dir_obj)!
final_dir := db_fs_dir.get(original_id)!
assert final_dir.description == 'Updated CRUD Test Directory'
db_fs_dir.delete(original_id)!
exists_after_delete := db_fs_dir.exist(original_id)!
assert exists_after_delete == false
println(' FsDir CRUD operations test passed!')
}
fn test_fs_dir_create_path() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'create_path_fs')!
path_id := db_fs_dir.create_path(fs.id, '/path/to/new/dir')!
assert path_id > 0
// Verify the path was created
dir_new := db_fs_dir.get(path_id)!
assert dir_new.name == 'dir'
dir_to := db_fs_dir.get(dir_new.parent_id)!
assert dir_to.name == 'new'
dir_path := db_fs_dir.get(dir_to.parent_id)!
assert dir_path.name == 'to'
dir_root := db_fs_dir.get(dir_path.parent_id)!
assert dir_root.name == 'path'
println(' FsDir create_path test passed!')
}
fn test_fs_dir_list() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'fs_dir_list_test')!
mut root_dir := fs.root_dir()!
mut dir1 := db_fs_dir.new(name: 'list_dir1', fs_id: fs.id, parent_id: root_dir.id)!
dir1 = db_fs_dir.set(dir1)!
root_dir.directories << dir1.id
root_dir = db_fs_dir.set(root_dir)!
mut dir2 := db_fs_dir.new(name: 'list_dir2', fs_id: fs.id, parent_id: root_dir.id)!
dir2 = db_fs_dir.set(dir2)!
root_dir.directories << dir2.id
root_dir = db_fs_dir.set(root_dir)!
list_of_dirs := db_fs_dir.list()!
// Should be root_dir, dir1, dir2
assert list_of_dirs.len == 3
println(' FsDir list test passed!')
}
fn test_fs_dir_list_by_filesystem() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs1 := db_fs.new_get_set(name: 'list_by_fs_dir_1')!
mut fs2 := db_fs.new_get_set(name: 'list_by_fs_dir_2')!
mut dir1 := db_fs_dir.new(name: 'fs1_dir', fs_id: fs1.id, parent_id: fs1.root_dir_id)!
dir1 = db_fs_dir.set(dir1)!
mut root_fs1 := db_fs_dir.get(fs1.root_dir_id)!
root_fs1.directories << dir1.id
root_fs1 = db_fs_dir.set(root_fs1)!
mut dir2 := db_fs_dir.new(name: 'fs2_dir', fs_id: fs2.id, parent_id: fs2.root_dir_id)!
dir2 = db_fs_dir.set(dir2)!
mut root_fs2 := db_fs_dir.get(fs2.root_dir_id)!
root_fs2.directories << dir2.id
root_fs2 = db_fs_dir.set(root_fs2)!
dirs_in_fs1 := db_fs_dir.list_by_filesystem(fs1.id)!
assert dirs_in_fs1.len == 2 // root_fs1 and dir1
assert dirs_in_fs1[0].name == 'root' || dirs_in_fs1[0].name == 'fs1_dir'
assert dirs_in_fs1[1].name == 'root' || dirs_in_fs1[1].name == 'fs1_dir'
dirs_in_fs2 := db_fs_dir.list_by_filesystem(fs2.id)!
assert dirs_in_fs2.len == 2 // root_fs2 and dir2
assert dirs_in_fs2[0].name == 'root' || dirs_in_fs2[0].name == 'fs2_dir'
assert dirs_in_fs2[1].name == 'root' || dirs_in_fs2[1].name == 'fs2_dir'
println(' FsDir list_by_filesystem test passed!')
}
fn test_fs_dir_list_children() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'list_children_fs')!
mut root_dir := fs.root_dir()!
mut child_dir1 := db_fs_dir.new(name: 'child1', fs_id: fs.id, parent_id: root_dir.id)!
child_dir1 = db_fs_dir.set(child_dir1)!
root_dir.directories << child_dir1.id
root_dir = db_fs_dir.set(root_dir)!
mut child_dir2 := db_fs_dir.new(name: 'child2', fs_id: fs.id, parent_id: root_dir.id)!
child_dir2 = db_fs_dir.set(child_dir2)!
root_dir.directories << child_dir2.id
root_dir = db_fs_dir.set(root_dir)!
children := db_fs_dir.list_children(root_dir.id)!
assert children.len == 2
assert children[0].name == 'child1' || children[0].name == 'child2'
assert children[1].name == 'child1' || children[1].name == 'child2'
println(' FsDir list_children test passed!')
}
fn test_fs_dir_has_children() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'has_children_fs')!
mut root_dir := fs.root_dir()!
// Directory with no children
mut empty_dir := db_fs_dir.new(name: 'empty', fs_id: fs.id, parent_id: root_dir.id)!
empty_dir = db_fs_dir.set(empty_dir)!
root_dir.directories << empty_dir.id
root_dir = db_fs_dir.set(root_dir)!
assert db_fs_dir.has_children(empty_dir.id)! == false
// Directory with a child directory
mut parent_dir := db_fs_dir.new(name: 'parent', fs_id: fs.id, parent_id: root_dir.id)!
parent_dir = db_fs_dir.set(parent_dir)!
root_dir.directories << parent_dir.id
root_dir = db_fs_dir.set(root_dir)!
mut child_dir := db_fs_dir.new(name: 'child', fs_id: fs.id, parent_id: parent_dir.id)!
child_dir = db_fs_dir.set(child_dir)!
parent_dir.directories << child_dir.id
parent_dir = db_fs_dir.set(parent_dir)!
assert db_fs_dir.has_children(parent_dir.id)! == true
// Directory with a child file
mut file_dir := db_fs_dir.new(name: 'file_dir', fs_id: fs.id, parent_id: root_dir.id)!
file_dir = db_fs_dir.set(file_dir)!
root_dir.directories << file_dir.id
root_dir = db_fs_dir.set(root_dir)!
mut blob_args := FsBlobArg{ data: 'Child File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'child_file.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.add_to_directory(file.id, file_dir.id)!
assert db_fs_dir.has_children(file_dir.id)! == true
println(' FsDir has_children test passed!')
}
fn test_fs_dir_rename() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'rename_fs_dir')!
mut root_dir := fs.root_dir()!
mut dir := db_fs_dir.new(name: 'old_name', fs_id: fs.id, parent_id: root_dir.id)!
dir = db_fs_dir.set(dir)!
root_dir.directories << dir.id
root_dir = db_fs_dir.set(root_dir)!
db_fs_dir.rename(dir.id, 'new_name')!
renamed_dir := db_fs_dir.get(dir.id)!
assert renamed_dir.name == 'new_name'
println(' FsDir rename test passed!')
}
fn test_fs_dir_move() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'move_fs_dir')!
mut root_dir := fs.root_dir()!
mut dir_to_move := db_fs_dir.new(name: 'to_move', fs_id: fs.id, parent_id: root_dir.id)!
dir_to_move = db_fs_dir.set(dir_to_move)!
root_dir.directories << dir_to_move.id
root_dir = db_fs_dir.set(root_dir)!
mut new_parent := db_fs_dir.new(name: 'new_parent', fs_id: fs.id, parent_id: root_dir.id)!
new_parent = db_fs_dir.set(new_parent)!
root_dir.directories << new_parent.id
root_dir = db_fs_dir.set(root_dir)!
// Move dir_to_move from root_dir to new_parent
db_fs_dir.move(dir_to_move.id, new_parent.id)!
root_dir_after_move := db_fs_dir.get(root_dir.id)!
new_parent_after_move := db_fs_dir.get(new_parent.id)!
dir_to_move_after_move := db_fs_dir.get(dir_to_move.id)!
assert !(dir_to_move.id in root_dir_after_move.directories)
assert dir_to_move.id in new_parent_after_move.directories
assert dir_to_move_after_move.parent_id == new_parent.id
println(' FsDir move test passed!')
}
fn test_fs_dir_description() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'fs_dir_description_test')!
mut args := FsDirArg{
name: 'description_dir'
fs_id: fs.id
parent_id: fs.root_dir_id
}
dir := db_fs_dir.new(args)!
assert dir.description('set') == 'Create or update a directory. Returns the ID of the directory.'
assert dir.description('get') == 'Retrieve a directory by ID. Returns the directory object.'
assert dir.description('delete') == 'Delete a directory by ID. Returns true if successful.'
assert dir.description('exist') == 'Check if a directory exists by ID. Returns true or false.'
assert dir.description('list') == 'List all directories. Returns an array of directory objects.'
assert dir.description('create_path') == 'Create a directory path. Returns the ID of the created directory.'
assert dir.description('unknown') == 'This is generic method for the directory object.'
println(' FsDir description test passed!')
}
fn test_fs_dir_example() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_dir := factory.fs_dir
mut fs := db_fs.new_get_set(name: 'fs_dir_example_test')!
mut args := FsDirArg{
name: 'example_dir'
fs_id: fs.id
parent_id: fs.root_dir_id
}
dir := db_fs_dir.new(args)!
set_call, set_result := dir.example('set')
assert set_call == '{"dir": {"name": "documents", "fs_id": 1, "parent_id": 2}}'
assert set_result == '1'
get_call, get_result := dir.example('get')
assert get_call == '{"id": 1}'
assert get_result == '{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}'
delete_call, delete_result := dir.example('delete')
assert delete_call == '{"id": 1}'
assert delete_result == 'true'
exist_call, exist_result := dir.example('exist')
assert exist_call == '{"id": 1}'
assert exist_result == 'true'
list_call, list_result := dir.example('list')
assert list_call == '{}'
assert list_result == '[{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}]'
create_path_call, create_path_result := dir.example('create_path')
assert create_path_call == '{"fs_id": 1, "path": "/projects/web/frontend"}'
assert create_path_result == '5'
unknown_call, unknown_result := dir.example('unknown')
assert unknown_call == '{}'
assert unknown_result == '{}'
println(' FsDir example test passed!')
}

View File

@@ -21,12 +21,11 @@ fn test_invalid_references() ! {
mut fs_factory := new()!
// Test creating file with non-existent blob
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'error_test'
description: 'Test filesystem for error conditions'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Try to create file with invalid blob ID
fs_factory.fs_file.new(
@@ -46,12 +45,11 @@ fn test_directory_parent_validation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'parent_test'
description: 'Test filesystem for parent validation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Try to create directory with invalid parent
mut invalid_dir := fs_factory.fs_dir.new(
@@ -75,20 +73,14 @@ fn test_symlink_validation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'symlink_test'
description: 'Test filesystem for symlink validation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
mut root_dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
// Try to create symlink with invalid target
mut invalid_symlink := fs_factory.fs_symlink.new(
@@ -108,377 +100,4 @@ fn test_symlink_validation() ! {
// If validation is not implemented, that's also valid
println(' Symlink target validation tested (validation may not be implemented)')
}
fn test_nonexistent_operations() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Test getting non-existent filesystem
fs_factory.fs.get(u32(99999)) or {
assert err.msg().contains('not found')
println(' Non-existent filesystem correctly handled')
}
// Test getting non-existent blob by hash
fs_factory.fs_blob.get_by_hash('nonexistent_hash') or {
assert err.msg().contains('not found')
println(' Non-existent blob hash correctly handled')
}
// Test blob existence check
exists := fs_factory.fs_blob.exists_by_hash('nonexistent_hash')!
assert exists == false
println(' Blob existence check works correctly')
}
fn test_empty_data_handling() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Test creating blob with empty data
empty_data := []u8{}
mut empty_blob := fs_factory.fs_blob.new(data: empty_data)!
empty_blob = fs_factory.fs_blob.set(empty_blob)!
// Verify empty blob was created correctly
retrieved_blob := fs_factory.fs_blob.get(empty_blob.id)!
assert retrieved_blob.data.len == 0
assert retrieved_blob.size_bytes == 0
assert retrieved_blob.verify_integrity() == true
println(' Empty blob handling works correctly')
}
fn test_path_edge_cases() ! {
// Initialize HeroFS factory and filesystem
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'path_test'
description: 'Test filesystem for path edge cases'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Get filesystem instance
mut fs := fs_factory.fs.get(test_fs.id)!
fs.factory = &fs_factory
// Test finding non-existent path
results := fs.find('/nonexistent/path', FindOptions{ recursive: false }) or {
assert err.msg().contains('not found')
println(' Non-existent path correctly handled')
[]FindResult{}
}
assert results.len == 0
println(' Path edge cases handled correctly')
}
fn test_circular_symlink_detection() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'circular_test'
description: 'Test filesystem for circular symlink detection'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create directory A
mut dir_a := fs_factory.fs_dir.new(
name: 'dir_a'
fs_id: test_fs.id
parent_id: root_dir.id
)!
dir_a = fs_factory.fs_dir.set(dir_a)!
// Create directory B
mut dir_b := fs_factory.fs_dir.new(
name: 'dir_b'
fs_id: test_fs.id
parent_id: root_dir.id
)!
dir_b = fs_factory.fs_dir.set(dir_b)!
// Create symlink from A to B
mut symlink_a_to_b := fs_factory.fs_symlink.new(
name: 'link_to_b'
fs_id: test_fs.id
parent_id: dir_a.id
target_id: dir_b.id
target_type: .directory
)!
symlink_a_to_b = fs_factory.fs_symlink.set(symlink_a_to_b)!
// Try to create symlink from B to A (would create circular reference)
mut symlink_b_to_a := fs_factory.fs_symlink.new(
name: 'link_to_a'
fs_id: test_fs.id
parent_id: dir_b.id
target_id: dir_a.id
target_type: .directory
)!
// This should succeed for now (circular detection not implemented yet)
// But we can test that both symlinks exist
symlink_b_to_a = fs_factory.fs_symlink.set(symlink_b_to_a)!
// Verify both symlinks were created
link_a_exists := fs_factory.fs_symlink.exist(symlink_a_to_b.id)!
link_b_exists := fs_factory.fs_symlink.exist(symlink_b_to_a.id)!
assert link_a_exists == true
assert link_b_exists == true
println(' Circular symlink test completed (detection not yet implemented)')
}
fn test_quota_enforcement() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create filesystem with very small quota
mut test_fs := fs_factory.fs.new(
name: 'quota_test'
description: 'Test filesystem for quota enforcement'
quota_bytes: 100 // Very small quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Try to create blob larger than quota
large_data := []u8{len: 200, init: u8(65)} // 200 bytes > 100 byte quota
mut large_blob := fs_factory.fs_blob.new(data: large_data)!
large_blob = fs_factory.fs_blob.set(large_blob)!
// Note: Quota enforcement is not yet implemented
// This test documents the expected behavior for future implementation
println(' Quota test completed (enforcement not yet implemented)')
}
fn test_concurrent_access_simulation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'concurrent_test'
description: 'Test filesystem for concurrent access simulation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Simulate concurrent file creation
for i in 0 .. 10 {
content := 'Concurrent file ${i}'.bytes()
mut blob := fs_factory.fs_blob.new(data: content)!
blob = fs_factory.fs_blob.set(blob)!
mut file := fs_factory.fs_file.new(
name: 'concurrent_${i}.txt'
fs_id: test_fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs_factory.fs_file.set(file)!
fs_factory.fs_file.add_to_directory(file.id, root_dir.id)!
}
// Verify all files were created
files_in_root := fs_factory.fs_file.list_by_directory(root_dir.id)!
assert files_in_root.len == 10
println(' Concurrent access simulation completed')
}
fn test_invalid_path_operations() ! {
// Initialize HeroFS factory and filesystem
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'invalid_path_test'
description: 'Test filesystem for invalid path operations'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Get filesystem instance
mut fs := fs_factory.fs.get(test_fs.id)!
fs.factory = &fs_factory
// Test copy with invalid source path
fs.cp('/nonexistent/file.txt', '/dest/', FindOptions{ recursive: false }, CopyOptions{
overwrite: true
copy_blobs: true
}) or {
assert err.msg().contains('not found')
println(' Copy with invalid source correctly handled')
}
// Test move with invalid source path
fs.mv('/nonexistent/file.txt', '/dest.txt', MoveOptions{ overwrite: true }) or {
assert err.msg().contains('not found')
println(' Move with invalid source correctly handled')
}
// Test remove with invalid path
fs.rm('/nonexistent/file.txt', FindOptions{ recursive: false }, RemoveOptions{
delete_blobs: false
}) or {
assert err.msg().contains('not found') || err.msg().contains('No items found')
println(' Remove with invalid path correctly handled')
}
println(' Invalid path operations handled correctly')
}
fn test_filesystem_name_conflicts() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create first filesystem
mut fs1 := fs_factory.fs.new(
name: 'duplicate_name'
description: 'First filesystem'
quota_bytes: 1024 * 1024 * 10
)!
fs1 = fs_factory.fs.set(fs1)!
// Try to create second filesystem with same name
mut fs2 := fs_factory.fs.new(
name: 'duplicate_name'
description: 'Second filesystem'
quota_bytes: 1024 * 1024 * 10
)!
fs2 = fs_factory.fs.set(fs2)!
// Both should succeed (name conflicts not enforced at DB level)
// But we can test retrieval by name
retrieved_fs := fs_factory.fs.get_by_name('duplicate_name') or {
// If get_by_name fails with multiple matches, that's expected
println(' Filesystem name conflict correctly detected')
return
}
// If it succeeds, it should return one of them
assert retrieved_fs.name == 'duplicate_name'
println(' Filesystem name handling tested')
}
fn test_blob_integrity_verification() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create blob with known content
test_data := 'Test data for integrity check'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
test_blob = fs_factory.fs_blob.set(test_blob)!
// Verify integrity
is_valid := test_blob.verify_integrity()
assert is_valid == true
// Test with corrupted data (simulate corruption)
mut corrupted_blob := test_blob
corrupted_blob.data = 'Corrupted data'.bytes()
// Integrity check should fail
is_corrupted_valid := corrupted_blob.verify_integrity()
assert is_corrupted_valid == false
println(' Blob integrity verification works correctly')
}
fn test_directory_deletion_with_contents() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'dir_delete_test'
description: 'Test filesystem for directory deletion'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create subdirectory with content
mut sub_dir := fs_factory.fs_dir.new(
name: 'subdir'
fs_id: test_fs.id
parent_id: root_dir.id
)!
sub_dir = fs_factory.fs_dir.set(sub_dir)!
// Add file to subdirectory
test_content := 'File in subdirectory'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut test_file := fs_factory.fs_file.new(
name: 'test.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
fs_factory.fs_file.add_to_directory(test_file.id, sub_dir.id)!
// Try to delete non-empty directory (should fail)
fs_factory.fs_dir.delete(sub_dir.id) or {
assert err.msg().contains('not empty')
println(' Non-empty directory deletion correctly prevented')
return
}
// If it doesn't fail, that's also valid behavior depending on implementation
println(' Directory deletion behavior tested')
}
}

View File

@@ -3,6 +3,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsFile represents a file in a filesystem
@[heap]
@@ -22,7 +26,7 @@ pub mut:
pub struct DBFsFile {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsFile) type_name() string {
@@ -373,3 +377,94 @@ pub fn (mut self DBFsFile) list_directories_for_file(file_id u32) ![]u32 {
}
return containing_dirs
}
pub fn (self FsFile) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a file. Returns the ID of the file.'
}
'get' {
return 'Retrieve a file by ID. Returns the file object.'
}
'delete' {
return 'Delete a file by ID. Returns true if successful.'
}
'exist' {
return 'Check if a file exists by ID. Returns true or false.'
}
'list' {
return 'List all files. Returns an array of file objects.'
}
'rename' {
return 'Rename a file. Returns true if successful.'
}
else {
return 'This is generic method for the file object.'
}
}
}
pub fn (self FsFile) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"file": {"name": "document.txt", "fs_id": 1, "blobs": [1], "mime_type": "txt"}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}]'
}
'rename' {
return '{"id": 1, "new_name": "renamed_document.txt"}', 'true'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_file_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_file.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsFile](params)!
o = f.fs_file.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_file.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_file.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_file.list()!
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs_file: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_file'
)
}
}
}

View File

@@ -0,0 +1,526 @@
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.hero.user
import json
import time // Added for time.sleep
// FsFile, FsFileArg, MimeType, FsBlobArg are part of the same module, no need to import explicitly
// import freeflowuniverse.herolib.hero.herofs { FsFile, FsFileArg, MimeType, FsBlobArg }
fn test_fs_file_new() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut fs := db_fs.new_get_set(name: 'test_fs_file_new')!
mut args := FsFileArg{
name: 'test_file.txt'
description: 'Test file for new function'
fs_id: fs.id
mime_type: .txt
}
file := db_fs_file.new(args)!
assert file.name == 'test_file.txt'
assert file.description == 'Test file for new function'
assert file.fs_id == fs.id
assert file.mime_type == .txt
assert file.size_bytes == 0
assert file.updated_at > 0
println(' FsFile new test passed!')
}
fn test_fs_file_crud_operations() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'crud_test_fs_file')!
// Create a blob for the file
mut blob_args := FsBlobArg{
data: 'File Content'.bytes()
}
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut args := FsFileArg{
name: 'crud_file.txt'
description: 'CRUD Test File'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
}
mut file := db_fs_file.new(args)!
file = db_fs_file.set(file)!
original_id := file.id
retrieved_file := db_fs_file.get(original_id)!
assert retrieved_file.name == 'crud_file.txt'
assert retrieved_file.id == original_id
assert retrieved_file.blobs.len == 1
assert retrieved_file.blobs[0] == blob.id
assert retrieved_file.size_bytes == u64('File Content'.len)
exists := db_fs_file.exist(original_id)!
assert exists == true
// Update file
mut updated_blob_args := FsBlobArg{
data: 'Updated File Content'.bytes()
}
mut updated_blob := db_fs_blob.new(updated_blob_args)!
updated_blob = db_fs_blob.set(updated_blob)!
mut updated_file_obj := retrieved_file
updated_file_obj.description = 'Updated CRUD Test File'
updated_file_obj.blobs = [updated_blob.id]
updated_file_obj.size_bytes = u64('Updated File Content'.len)
updated_file_obj = db_fs_file.set(updated_file_obj)!
final_file := db_fs_file.get(original_id)!
assert final_file.description == 'Updated CRUD Test File'
assert final_file.blobs.len == 1
assert final_file.blobs[0] == updated_blob.id
assert final_file.size_bytes == u64('Updated File Content'.len)
db_fs_file.delete(original_id)!
exists_after_delete := db_fs_file.exist(original_id)!
assert exists_after_delete == false
println(' FsFile CRUD operations test passed!')
}
fn test_fs_file_add_to_directory() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_dir := factory.fs_dir
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'add_to_dir_fs')!
root_dir := fs.root_dir()!
mut blob_args := FsBlobArg{
data: 'File for directory'.bytes()
}
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{
name: 'dir_file.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
}
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.add_to_directory(file.id, root_dir.id)!
updated_root_dir := db_fs_dir.get(root_dir.id)!
assert file.id in updated_root_dir.files
println(' FsFile add_to_directory test passed!')
}
fn test_fs_file_list() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'fs_file_list_test')!
mut blob_args1 := FsBlobArg{ data: 'File 1'.bytes() }
mut blob1 := db_fs_blob.new(blob_args1)!
blob1 = db_fs_blob.set(blob1)!
mut blob_args2 := FsBlobArg{ data: 'File 2'.bytes() }
mut blob2 := db_fs_blob.new(blob_args2)!
blob2 = db_fs_blob.set(blob2)!
mut file_args1 := FsFileArg{ name: 'list_file1.txt', fs_id: fs.id, blobs: [blob1.id], mime_type: .txt }
mut file1 := db_fs_file.new(file_args1)!
file1 = db_fs_file.set(file1)!
mut file_args2 := FsFileArg{ name: 'list_file2.txt', fs_id: fs.id, blobs: [blob2.id], mime_type: .txt }
mut file2 := db_fs_file.new(file_args2)!
file2 = db_fs_file.set(file2)!
list_of_files := db_fs_file.list()!
assert list_of_files.len == 2
assert list_of_files[0].name == 'list_file1.txt' || list_of_files[0].name == 'list_file2.txt'
assert list_of_files[1].name == 'list_file1.txt' || list_of_files[1].name == 'list_file2.txt'
println(' FsFile list test passed!')
}
fn test_fs_file_get_by_path() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_dir := factory.fs_dir
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'get_by_path_fs')!
root_dir := fs.root_dir()!
mut blob_args := FsBlobArg{ data: 'Path File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'path_file.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.add_to_directory(file.id, root_dir.id)!
retrieved_file := db_fs_file.get_by_path(root_dir.id, 'path_file.txt')!
assert retrieved_file.id == file.id
assert retrieved_file.name == 'path_file.txt'
println(' FsFile get_by_path test passed!')
}
fn test_fs_file_list_by_directory() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_dir := factory.fs_dir
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'list_by_dir_fs')!
root_dir := fs.root_dir()!
mut blob_args1 := FsBlobArg{ data: 'Dir File 1'.bytes() }
mut blob1 := db_fs_blob.new(blob_args1)!
blob1 = db_fs_blob.set(blob1)!
mut blob_args2 := FsBlobArg{ data: 'Dir File 2'.bytes() }
mut blob2 := db_fs_blob.new(blob_args2)!
blob2 = db_fs_blob.set(blob2)!
mut file_args1 := FsFileArg{ name: 'dir_file1.txt', fs_id: fs.id, blobs: [blob1.id], mime_type: .txt }
mut file1 := db_fs_file.new(file_args1)!
file1 = db_fs_file.set(file1)!
mut file_args2 := FsFileArg{ name: 'dir_file2.txt', fs_id: fs.id, blobs: [blob2.id], mime_type: .txt }
mut file2 := db_fs_file.new(file_args2)!
file2 = db_fs_file.set(file2)!
db_fs_file.add_to_directory(file1.id, root_dir.id)!
db_fs_file.add_to_directory(file2.id, root_dir.id)!
files_in_dir := db_fs_file.list_by_directory(root_dir.id)!
assert files_in_dir.len == 2
assert files_in_dir[0].name == 'dir_file1.txt' || files_in_dir[0].name == 'dir_file2.txt'
assert files_in_dir[1].name == 'dir_file1.txt' || files_in_dir[1].name == 'dir_file2.txt'
println(' FsFile list_by_directory test passed!')
}
fn test_fs_file_list_by_filesystem() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs1 := db_fs.new_get_set(name: 'list_by_fs_1')!
mut fs2 := db_fs.new_get_set(name: 'list_by_fs_2')!
mut blob_args1 := FsBlobArg{ data: 'FS1 File'.bytes() }
mut blob1 := db_fs_blob.new(blob_args1)!
blob1 = db_fs_blob.set(blob1)!
mut blob_args2 := FsBlobArg{ data: 'FS2 File'.bytes() }
mut blob2 := db_fs_blob.new(blob_args2)!
blob2 = db_fs_blob.set(blob2)!
mut file_args1 := FsFileArg{ name: 'fs1_file.txt', fs_id: fs1.id, blobs: [blob1.id], mime_type: .txt }
mut file1 := db_fs_file.new(file_args1)!
file1 = db_fs_file.set(file1)!
mut file_args2 := FsFileArg{ name: 'fs2_file.txt', fs_id: fs2.id, blobs: [blob2.id], mime_type: .txt }
mut file2 := db_fs_file.new(file_args2)!
file2 = db_fs_file.set(file2)!
files_in_fs1 := db_fs_file.list_by_filesystem(fs1.id)!
assert files_in_fs1.len == 1
assert files_in_fs1[0].name == 'fs1_file.txt'
files_in_fs2 := db_fs_file.list_by_filesystem(fs2.id)!
assert files_in_fs2.len == 1
assert files_in_fs2[0].name == 'fs2_file.txt'
println(' FsFile list_by_filesystem test passed!')
}
fn test_fs_file_list_by_mime_type() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'list_by_mime_fs')!
mut blob_args1 := FsBlobArg{ data: 'Text File'.bytes() }
mut blob1 := db_fs_blob.new(blob_args1)!
blob1 = db_fs_blob.set(blob1)!
mut blob_args2 := FsBlobArg{ data: 'Image File'.bytes() }
mut blob2 := db_fs_blob.new(blob_args2)!
blob2 = db_fs_blob.set(blob2)!
mut file_args1 := FsFileArg{ name: 'text.txt', fs_id: fs.id, blobs: [blob1.id], mime_type: .txt }
mut file1 := db_fs_file.new(file_args1)!
file1 = db_fs_file.set(file1)!
mut file_args2 := FsFileArg{ name: 'image.png', fs_id: fs.id, blobs: [blob2.id], mime_type: .png }
mut file2 := db_fs_file.new(file_args2)!
file2 = db_fs_file.set(file2)!
text_files := db_fs_file.list_by_mime_type(.txt)!
assert text_files.len == 1
assert text_files[0].name == 'text.txt'
image_files := db_fs_file.list_by_mime_type(.png)!
assert image_files.len == 1
assert image_files[0].name == 'image.png'
println(' FsFile list_by_mime_type test passed!')
}
fn test_fs_file_update_accessed() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'update_accessed_fs')!
mut blob_args := FsBlobArg{ data: 'Accessed File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'accessed.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
// Manually set updated_at to a past value to ensure a change
mut file_to_update := file
file_to_update.updated_at = 1
file_to_update = db_fs_file.set(file_to_update)!
db_fs_file.update_accessed(file_to_update.id)!
updated_file := db_fs_file.get(file_to_update.id)!
assert updated_file.updated_at > 1
println(' FsFile update_accessed test passed!')
}
fn test_fs_file_update_metadata() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'update_metadata_fs')!
mut blob_args := FsBlobArg{ data: 'Metadata File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'metadata.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.update_metadata(file.id, 'author', 'John Doe')!
updated_file := db_fs_file.get(file.id)!
assert updated_file.metadata['author'] == 'John Doe'
println(' FsFile update_metadata test passed!')
}
fn test_fs_file_rename() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'rename_fs')!
mut blob_args := FsBlobArg{ data: 'Rename File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'old_name.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.rename(file.id, 'new_name.txt')!
renamed_file := db_fs_file.get(file.id)!
assert renamed_file.name == 'new_name.txt'
println(' FsFile rename test passed!')
}
fn test_fs_file_move() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_dir := factory.fs_dir
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'move_fs')!
root_dir := fs.root_dir()!
mut dir1 := db_fs_dir.new(name: 'dir1', fs_id: fs.id, parent_id: root_dir.id)!
dir1 = db_fs_dir.set(dir1)!
mut updated_root_dir_for_dir1 := db_fs_dir.get(root_dir.id)!
updated_root_dir_for_dir1.directories << dir1.id
updated_root_dir_for_dir1 = db_fs_dir.set(updated_root_dir_for_dir1)!
mut dir2 := db_fs_dir.new(name: 'dir2', fs_id: fs.id, parent_id: root_dir.id)!
dir2 = db_fs_dir.set(dir2)!
mut updated_root_dir_for_dir2 := db_fs_dir.get(root_dir.id)!
updated_root_dir_for_dir2.directories << dir2.id
updated_root_dir_for_dir2 = db_fs_dir.set(updated_root_dir_for_dir2)!
mut blob_args := FsBlobArg{ data: 'Move File'.bytes() }
mut blob := db_fs_blob.new(blob_args)!
blob = db_fs_blob.set(blob)!
mut file_args := FsFileArg{ name: 'move_file.txt', fs_id: fs.id, blobs: [blob.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
db_fs_file.add_to_directory(file.id, dir1.id)!
// Move file from dir1 to dir2
db_fs_file.move(file.id, [dir2.id])!
dir1_after_move := db_fs_dir.get(dir1.id)!
dir2_after_move := db_fs_dir.get(dir2.id)!
assert !(file.id in dir1_after_move.files)
assert file.id in dir2_after_move.files
println(' FsFile move test passed!')
}
fn test_fs_file_append_blob() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut db_fs_blob := factory.fs_blob
mut fs := db_fs.new_get_set(name: 'append_blob_fs')!
mut blob_args1 := FsBlobArg{ data: 'Part 1'.bytes() }
mut blob1 := db_fs_blob.new(blob_args1)!
blob1 = db_fs_blob.set(blob1)!
mut file_args := FsFileArg{ name: 'append.txt', fs_id: fs.id, blobs: [blob1.id], mime_type: .txt }
mut file := db_fs_file.new(file_args)!
file = db_fs_file.set(file)!
original_size := file.size_bytes
assert file.blobs.len == 1
mut blob_args2 := FsBlobArg{ data: 'Part 2'.bytes() }
mut blob2 := db_fs_blob.new(blob_args2)!
blob2 = db_fs_blob.set(blob2)!
db_fs_file.append_blob(file.id, blob2.id)!
updated_file := db_fs_file.get(file.id)!
assert updated_file.blobs.len == 2
assert updated_file.blobs[1] == blob2.id
assert updated_file.size_bytes == original_size + u64('Part 2'.len)
println(' FsFile append_blob test passed!')
}
fn test_fs_file_description() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut fs := db_fs.new_get_set(name: 'fs_file_description_test')!
mut args := FsFileArg{
name: 'description_file.txt'
fs_id: fs.id
mime_type: .txt
}
file := db_fs_file.new(args)!
assert file.description('set') == 'Create or update a file. Returns the ID of the file.'
assert file.description('get') == 'Retrieve a file by ID. Returns the file object.'
assert file.description('delete') == 'Delete a file by ID. Returns true if successful.'
assert file.description('exist') == 'Check if a file exists by ID. Returns true or false.'
assert file.description('list') == 'List all files. Returns an array of file objects.'
assert file.description('rename') == 'Rename a file. Returns true if successful.'
assert file.description('unknown') == 'This is generic method for the file object.'
println(' FsFile description test passed!')
}
fn test_fs_file_example() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut db_fs_file := factory.fs_file
mut fs := db_fs.new_get_set(name: 'fs_file_example_test')!
mut args := FsFileArg{
name: 'example_file.txt'
fs_id: fs.id
mime_type: .txt
}
file := db_fs_file.new(args)!
set_call, set_result := file.example('set')
assert set_call == '{"file": {"name": "document.txt", "fs_id": 1, "blobs": [1], "mime_type": "txt"}}'
assert set_result == '1'
get_call, get_result := file.example('get')
assert get_call == '{"id": 1}'
assert get_result == '{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}'
delete_call, delete_result := file.example('delete')
assert delete_call == '{"id": 1}'
assert delete_result == 'true'
exist_call, exist_result := file.example('exist')
assert exist_call == '{"id": 1}'
assert exist_result == 'true'
list_call, list_result := file.example('list')
assert list_call == '{}'
assert list_result == '[{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}]'
rename_call, rename_result := file.example('rename')
assert rename_call == '{"id": 1, "new_name": "renamed_document.txt"}'
assert rename_result == 'true'
unknown_call, unknown_result := file.example('unknown')
assert unknown_call == '{}'
assert unknown_result == '{}'
println(' FsFile example test passed!')
}

View File

@@ -3,13 +3,16 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsSymlink represents a symbolic link in a filesystem
@[heap]
pub struct FsSymlink {
db.Base
pub mut:
name string
fs_id u32 // Associated filesystem
parent_id u32 // Parent directory ID
target_id u32 // ID of target file or directory
@@ -24,7 +27,7 @@ pub enum SymlinkTargetType {
pub struct DBFsSymlink {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &FSFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsSymlink) type_name() string {
@@ -32,7 +35,6 @@ pub fn (self FsSymlink) type_name() string {
}
pub fn (self FsSymlink) dump(mut e encoder.Encoder) ! {
e.add_string(self.name)
e.add_u32(self.fs_id)
e.add_u32(self.parent_id)
e.add_u32(self.target_id)
@@ -40,7 +42,6 @@ pub fn (self FsSymlink) dump(mut e encoder.Encoder) ! {
}
fn (mut self DBFsSymlink) load(mut o FsSymlink, mut e encoder.Decoder) ! {
o.name = e.get_string()!
o.fs_id = e.get_u32()!
o.parent_id = e.get_u32()!
o.target_id = e.get_u32()!
@@ -158,3 +159,103 @@ pub fn (mut self DBFsSymlink) is_broken(id u32) !bool {
return true // Unknown target type is considered broken
}
pub fn (self FsSymlink) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a symlink. Returns the ID of the symlink.'
}
'get' {
return 'Retrieve a symlink by ID. Returns the symlink object.'
}
'delete' {
return 'Delete a symlink by ID. Returns true if successful.'
}
'exist' {
return 'Check if a symlink exists by ID. Returns true or false.'
}
'list' {
return 'List all symlinks. Returns an array of symlink objects.'
}
'is_broken' {
return 'Check if a symlink is broken. Returns true or false.'
}
else {
return 'This is generic method for the symlink object.'
}
}
}
pub fn (self FsSymlink) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"symlink": {"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}]'
}
'is_broken' {
return '{"id": 1}', 'false'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_symlink_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_symlink.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsSymlink](params)!
o = f.fs_symlink.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_symlink.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_symlink.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_symlink.list()!
return new_response(rpcid, json.encode(res))
}
'is_broken' {
id := db.decode_u32(params)!
is_broken := f.fs_symlink.is_broken(id)!
if is_broken {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
else {
console.print_stderr('Method not found on fs_symlink: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_symlink'
)
}
}
}

View File

@@ -5,26 +5,17 @@ fn test_symlink_operations() ! {
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'symlink_test'
description: 'Test filesystem for symlink operations'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create a subdirectory
mut sub_dir := fs_factory.fs_dir.new(
name: 'subdir'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
)!
sub_dir = fs_factory.fs_dir.set(sub_dir)!
@@ -46,7 +37,7 @@ fn test_symlink_operations() ! {
mut file_symlink := fs_factory.fs_symlink.new(
name: 'file_link'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
target_id: test_file.id
target_type: .file
)!
@@ -56,7 +47,7 @@ fn test_symlink_operations() ! {
mut dir_symlink := fs_factory.fs_symlink.new(
name: 'dir_link'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
target_id: sub_dir.id
target_type: .directory
)!
@@ -73,17 +64,6 @@ fn test_symlink_operations() ! {
assert retrieved_dir_link.target_id == sub_dir.id
assert retrieved_dir_link.target_type == .directory
// Test symlink existence
file_link_exists := fs_factory.fs_symlink.exist(file_symlink.id)!
assert file_link_exists == true
// Test listing symlinks
all_symlinks := fs_factory.fs_symlink.list()!
assert all_symlinks.len >= 2
fs_symlinks := fs_factory.fs_symlink.list_by_filesystem(test_fs.id)!
assert fs_symlinks.len == 2
// Test broken symlink detection
is_file_link_broken := fs_factory.fs_symlink.is_broken(file_symlink.id)!
assert is_file_link_broken == false
@@ -98,61 +78,4 @@ fn test_symlink_operations() ! {
assert file_link_exists_after_delete == false
println(' Symlink operations tests passed!')
}
fn test_broken_symlink_detection() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
name: 'broken_symlink_test'
description: 'Test filesystem for broken symlink detection'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create a test file
test_content := 'Temporary file'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut temp_file := fs_factory.fs_file.new(
name: 'temp.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
temp_file = fs_factory.fs_file.set(temp_file)!
// Create symlink to the file
mut symlink := fs_factory.fs_symlink.new(
name: 'temp_link'
fs_id: test_fs.id
parent_id: root_dir.id
target_id: temp_file.id
target_type: .file
)!
symlink = fs_factory.fs_symlink.set(symlink)!
// Verify symlink is not broken initially
is_broken_before := fs_factory.fs_symlink.is_broken(symlink.id)!
assert is_broken_before == false
// Delete the target file
fs_factory.fs_file.delete(temp_file.id)!
// Now the symlink should be broken
is_broken_after := fs_factory.fs_symlink.is_broken(symlink.id)!
assert is_broken_after == true
println(' Broken symlink detection works correctly!')
}
}

249
lib/hero/herofs/fs_test.v Normal file
View File

@@ -0,0 +1,249 @@
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.hero.user
import json
// Fs and FsArg are part of the same module, no need to import explicitly
// import freeflowuniverse.herolib.hero.herofs { Fs, FsArg }
fn test_fs_new() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'test_fs_new'
description: 'Test filesystem for new function'
quota_bytes: 1000
}
fs := db_fs.new(args)!
assert fs.name == 'test_fs_new'
assert fs.description == 'Test filesystem for new function'
assert fs.quota_bytes == 1000
assert fs.used_bytes == 0
assert fs.updated_at > 0
assert fs.root_dir_id == 0 // Should be 0 before setting
println(' Fs new test passed!')
}
fn test_fs_new_get_set() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args1 := FsArg{
name: 'test_fs_new_get_set'
description: 'Test filesystem for new_get_set function'
quota_bytes: 2000
}
mut fs1 := db_fs.new_get_set(args1)!
assert fs1.name == 'test_fs_new_get_set'
assert fs1.description == 'Test filesystem for new_get_set function'
assert fs1.quota_bytes == 2000
assert fs1.used_bytes == 0
assert fs1.root_dir_id > 0 // Should be set after new_get_set
mut args2 := FsArg{
name: 'test_fs_new_get_set'
description: 'Updated description'
quota_bytes: 3000
}
mut fs2 := db_fs.new_get_set(args2)!
assert fs2.id == fs1.id
assert fs2.name == 'test_fs_new_get_set'
assert fs2.description == 'Updated description'
assert fs2.quota_bytes == 3000
assert fs2.used_bytes == 0
assert fs2.root_dir_id == fs1.root_dir_id
println(' Fs new_get_set test passed!')
}
fn test_fs_crud_operations() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'crud_test_fs'
description: 'CRUD Test Filesystem'
quota_bytes: 5000
}
mut fs := db_fs.new(args)!
fs = db_fs.set(fs)!
original_id := fs.id
retrieved_fs := db_fs.get(original_id)!
assert retrieved_fs.name == 'crud_test_fs'
assert retrieved_fs.id == original_id
exists := db_fs.exist(original_id)!
assert exists == true
mut updated_args := FsArg{
name: 'crud_test_fs'
description: 'Updated CRUD Test Filesystem'
quota_bytes: 6000
}
mut updated_fs := db_fs.new(updated_args)!
updated_fs.id = original_id
updated_fs = db_fs.set(updated_fs)!
final_fs := db_fs.get(original_id)!
assert final_fs.description == 'Updated CRUD Test Filesystem'
assert final_fs.quota_bytes == 6000
db_fs.delete(original_id)!
exists_after_delete := db_fs.exist(original_id)!
assert exists_after_delete == false
println(' Fs CRUD operations test passed!')
}
fn test_fs_list() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args1 := FsArg{
name: 'fs_list_test_1'
description: 'Filesystem for list test 1'
}
mut fs1 := db_fs.new_get_set(args1)!
mut args2 := FsArg{
name: 'fs_list_test_2'
description: 'Filesystem for list test 2'
}
mut fs2 := db_fs.new_get_set(args2)!
list_of_fss := db_fs.list(FsListArg{})!
assert list_of_fss.len == 2
assert list_of_fss[0].name == 'fs_list_test_1' || list_of_fss[0].name == 'fs_list_test_2'
assert list_of_fss[1].name == 'fs_list_test_1' || list_of_fss[1].name == 'fs_list_test_2'
println(' Fs list test passed!')
}
fn test_fs_get_by_name() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'fs_by_name'
description: 'Filesystem for get_by_name test'
}
mut fs := db_fs.new_get_set(args)!
retrieved_fs := db_fs.get_by_name('fs_by_name')!
assert retrieved_fs.id == fs.id
assert retrieved_fs.name == 'fs_by_name'
println(' Fs get_by_name test passed!')
}
fn test_fs_check_quota() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'fs_quota_test'
quota_bytes: 100
used_bytes: 50
}
mut fs := db_fs.new_get_set(args)!
// Check within quota
can_add := db_fs.check_quota(fs.id, 40)!
assert can_add == true
// Check exactly at quota limit
can_add_exact := db_fs.check_quota(fs.id, 50)!
assert can_add_exact == true
// Check exceeding quota
cannot_add := db_fs.check_quota(fs.id, 51)!
assert cannot_add == false
println(' Fs check_quota test passed!')
}
fn test_fs_root_dir() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'fs_root_dir_test'
}
mut fs := db_fs.new_get_set(args)!
root_dir := fs.root_dir()!
assert root_dir.id == fs.root_dir_id
assert root_dir.name == 'root'
assert root_dir.fs_id == fs.id
assert root_dir.parent_id == 0
println(' Fs root_dir test passed!')
}
fn test_fs_description() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'fs_description_test'
}
fs := db_fs.new(args)!
assert fs.description('set') == 'Create or update a filesystem. Returns the ID of the filesystem.'
assert fs.description('get') == 'Retrieve a filesystem by ID. Returns the filesystem object.'
assert fs.description('delete') == 'Delete a filesystem by ID. Returns true if successful.'
assert fs.description('exist') == 'Check if a filesystem exists by ID. Returns true or false.'
assert fs.description('list') == 'List all filesystems. Returns an array of filesystem objects.'
assert fs.description('unknown') == 'This is generic method for the root object, TODO fill in, ...'
println(' Fs description test passed!')
}
fn test_fs_example() ! {
mut factory := new_test()!
mut db_fs := factory.fs
mut args := FsArg{
name: 'fs_example_test'
}
fs := db_fs.new(args)!
set_call, set_result := fs.example('set')
assert set_call == '{"fs": {"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824}}'
assert set_result == '1'
get_call, get_result := fs.example('get')
assert get_call == '{"id": 1}'
assert get_result == '{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}'
delete_call, delete_result := fs.example('delete')
assert delete_call == '{"id": 1}'
assert delete_result == 'true'
exist_call, exist_result := fs.example('exist')
assert exist_call == '{"id": 1}'
assert exist_result == 'true'
list_call, list_result := fs.example('list')
assert list_call == '{}'
assert list_result == '[{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}]'
unknown_call, unknown_result := fs.example('unknown')
assert unknown_call == '{}'
assert unknown_result == '{}'
println(' Fs example test passed!')
}

View File

@@ -1,12 +1,15 @@
module herofs
import freeflowuniverse.herolib.hero.db
import os
// CopyOptions provides options for copy operations
@[params]
pub struct CopyOptions {
pub mut:
recursive bool = true // Copy directories recursively
overwrite bool // Overwrite existing files at destination
copy_blobs bool = true // Create new blob copies (true) or reference same blobs (false)
copy_blobs bool // Create new blob copies (true) or reference same blobs (false)
}
// cp copies files and directories from source path to destination
@@ -22,11 +25,7 @@ pub mut:
// fs.cp('/src/*.v', '/backup/', FindOptions{recursive: true}, CopyOptions{overwrite: true})!
// ```
pub fn (mut self Fs) cp(src_path string, dest_path string, find_opts FindOptions, copy_opts CopyOptions) ! {
// Try to find items using the find function first
mut items := []FindResult{}
// If find fails, try to get the item directly by path
items = self.find(src_path, find_opts) or {
mut items := self.find(src_path, find_opts) or {
// Try to get specific file, directory, or symlink by exact path
mut direct_items := []FindResult{}
@@ -55,197 +54,229 @@ pub fn (mut self Fs) cp(src_path string, dest_path string, find_opts FindOptions
return error('Source path "${src_path}" not found')
}
direct_items
}
}
if items.len == 0 {
return error('No items found matching pattern: ${src_path}')
}
if items.len == 0 {
return error('No items found matching pattern: ${src_path}')
}
// Determine destination directory
mut dest_dir_id := u32(0)
is_dest_dir := dest_path.ends_with('/') || self.get_dir_by_absolute_path(dest_path) or { FsDir{} } != FsDir{}
// Check if destination is an existing directory
if dest_dir := self.get_dir_by_absolute_path(dest_path) {
dest_dir_id = dest_dir.id
} else {
// If destination doesn't exist as directory, treat it as a directory path to create
// or as a parent directory if it looks like a file path
mut dir_to_create := dest_path
if !dest_path.ends_with('/') && items.len == 1 && items[0].result_type == .file {
// Single file copy to a specific filename - use parent directory
path_parts := dest_path.trim_left('/').split('/')
if path_parts.len > 1 {
dir_to_create = '/' + path_parts[..path_parts.len - 1].join('/')
} else {
dir_to_create = '/'
}
}
if items.len > 1 && !is_dest_dir {
return error('Cannot copy multiple items to a single file path: ${dest_path}')
}
// Create the destination directory if it doesn't exist
if dir_to_create != '/' {
self.factory.fs_dir.create_path(self.id, dir_to_create)!
}
dest_dir_id = self.get_dir_by_absolute_path(dir_to_create)!.id
}
// Copy each found item
for item in items {
match item.result_type {
.file {
self.copy_file(item.id, dest_dir_id, copy_opts)!
}
.directory {
if copy_opts.recursive {
self.copy_directory(item.id, dest_dir_id, copy_opts)!
}
}
.symlink {
self.copy_symlink(item.id, dest_dir_id, copy_opts)!
}
}
}
for item in items {
match item.result_type {
.file {
self.copy_file(item.id, dest_path, copy_opts)!
}
.directory {
if !copy_opts.recursive {
return error('Cannot copy directory "${item.path}" without recursive option')
}
self.copy_directory(item.id, dest_path, copy_opts)!
}
.symlink {
self.copy_symlink(item.id, dest_path, copy_opts)!
}
}
}
}
// copy_file copies a single file to a destination directory
fn (mut self Fs) copy_file(file_id u32, dest_dir_id u32, opts CopyOptions) ! {
original_file := self.factory.fs_file.get(file_id)!
// copy_file copies a single file to a destination path
fn (mut self Fs) copy_file(file_id u32, dest_path string, opts CopyOptions) ! {
original_file := self.factory.fs_file.get(file_id)!
is_dest_dir := dest_path.ends_with('/') || self.get_dir_by_absolute_path(dest_path) or { FsDir{} } != FsDir{}
dest_dir_id := if is_dest_dir {
self.factory.fs_dir.create_path(self.id, dest_path)!
} else {
self.factory.fs_dir.create_path(self.id, os.dir(dest_path))!
}
file_name := if is_dest_dir { original_file.name } else { os.file_name(dest_path) }
dest_dir := self.factory.fs_dir.get(dest_dir_id)!
if existing_file_id := self.find_file_in_dir(file_name, dest_dir) {
if !opts.overwrite {
return error('File "${file_name}" already exists in destination')
}
self.factory.fs_file.delete(existing_file_id)!
}
// Check if file already exists in destination
for existing_file_id in dest_dir.files {
existing_file := self.factory.fs_file.get(existing_file_id)!
if existing_file.name == original_file.name {
if !opts.overwrite {
return error('File "${original_file.name}" already exists in destination directory')
}
// Remove existing file
self.factory.fs_file.delete(existing_file_id)!
break
}
}
mut new_blob_ids := []u32{}
if opts.copy_blobs {
for blob_id in original_file.blobs {
o_blob := self.factory.fs_blob.get(blob_id)!
mut n_blob := self.factory.fs_blob.new(data: o_blob.data)!
n_blob = self.factory.fs_blob.set(n_blob)!
new_blob_ids << n_blob.id
}
} else {
new_blob_ids = original_file.blobs.clone()
}
// Create new blobs or reference existing ones
mut new_blob_ids := []u32{}
if opts.copy_blobs {
// Create new blob copies
for blob_id in original_file.blobs {
original_blob := self.factory.fs_blob.get(blob_id)!
mut new_blob := self.factory.fs_blob.new(data: original_blob.data)!
new_blob = self.factory.fs_blob.set(new_blob)!
new_blob_ids << new_blob.id
}
} else {
// Reference the same blobs
new_blob_ids = original_file.blobs.clone()
}
// Create new file
mut new_file := self.factory.fs_file.new(
name: original_file.name
fs_id: self.id
blobs: new_blob_ids
mime_type: original_file.mime_type
metadata: original_file.metadata.clone()
)!
new_file = self.factory.fs_file.set(new_file)!
self.factory.fs_file.add_to_directory(new_file.id, dest_dir_id)!
mut new_file := self.factory.fs_file.new(
name: file_name,
fs_id: self.id,
blobs: new_blob_ids,
mime_type: original_file.mime_type,
metadata: original_file.metadata.clone(),
)!
new_file = self.factory.fs_file.set(new_file)!
self.factory.fs_file.add_to_directory(new_file.id, dest_dir_id)!
}
// copy_directory copies a directory and optionally its contents recursively
fn (mut self Fs) copy_directory(dir_id u32, dest_parent_id u32, opts CopyOptions) ! {
original_dir := self.factory.fs_dir.get(dir_id)!
dest_parent := self.factory.fs_dir.get(dest_parent_id)!
// copy_directory copies a directory and its contents recursively to a destination path
fn (mut self Fs) copy_directory(dir_id u32, dest_path string, opts CopyOptions) ! {
original_dir := self.factory.fs_dir.get(dir_id)!
// Check if directory already exists in destination
for existing_dir_id in dest_parent.directories {
existing_dir := self.factory.fs_dir.get(existing_dir_id)!
if existing_dir.name == original_dir.name {
if !opts.overwrite {
return error('Directory "${original_dir.name}" already exists in destination')
}
// For directories, we merge rather than replace when overwrite is true
if opts.recursive {
// Copy contents into existing directory
self.copy_directory_contents(dir_id, existing_dir_id, opts)!
}
is_dest_dir := dest_path.ends_with('/') || self.get_dir_by_absolute_path(dest_path) or { FsDir{} } != FsDir{}
dest_dir_name := if is_dest_dir { original_dir.name } else { os.file_name(dest_path) }
parent_dest_dir_id := if is_dest_dir {
self.factory.fs_dir.create_path(self.id, dest_path)!
} else {
self.factory.fs_dir.create_path(self.id, os.dir(dest_path))!
}
parent_dest_dir := self.factory.fs_dir.get(parent_dest_dir_id)!
if existing_dir_id := self.find_dir_in_dir(dest_dir_name, parent_dest_dir) {
if opts.recursive {
self.copy_directory_contents(dir_id, existing_dir_id, opts)!
return
}
}
// Create new directory
mut new_dir := self.factory.fs_dir.new(
name: original_dir.name
fs_id: self.id
parent_id: dest_parent_id
description: original_dir.description
)!
if !opts.overwrite {
return error('Directory "${dest_dir_name}" already exists in destination')
}
self.factory.fs_dir.delete(existing_dir_id)!
}
mut new_dir := self.factory.fs_dir.new(
name: dest_dir_name,
fs_id: self.id,
parent_id: parent_dest_dir_id,
description: original_dir.description,
)!
new_dir = self.factory.fs_dir.set(new_dir)!
self.factory.fs_dir.set(new_dir)!
// Add to parent's directories list
mut parent := self.factory.fs_dir.get(dest_parent_id)!
parent.directories << new_dir.id
self.factory.fs_dir.set(parent)!
// Copy contents if recursive
if opts.recursive {
self.copy_directory_contents(dir_id, new_dir.id, opts)!
}
mut parent_dir := self.factory.fs_dir.get(parent_dest_dir_id)!
if new_dir.id !in parent_dir.directories {
parent_dir.directories << new_dir.id
self.factory.fs_dir.set(parent_dir)!
}
self.copy_directory_contents(dir_id, new_dir.id, opts)!
}
// copy_directory_contents copies all contents of a directory to another directory
// copy_directory_contents copies the contents of one directory to another
fn (mut self Fs) copy_directory_contents(src_dir_id u32, dest_dir_id u32, opts CopyOptions) ! {
src_dir := self.factory.fs_dir.get(src_dir_id)!
src_dir := self.factory.fs_dir.get(src_dir_id)!
for file_id in src_dir.files {
self.copy_file(file_id, dest_dir_id.str(), opts)!
}
// Copy all files
for file_id in src_dir.files {
self.copy_file(file_id, dest_dir_id, opts)!
}
for subdir_id in src_dir.directories {
self.copy_directory(subdir_id, dest_dir_id.str(), opts)!
}
// Copy all symlinks
for symlink_id in src_dir.symlinks {
self.copy_symlink(symlink_id, dest_dir_id, opts)!
}
// Copy all subdirectories recursively
for subdir_id in src_dir.directories {
self.copy_directory(subdir_id, dest_dir_id, opts)!
}
for symlink_id in src_dir.symlinks {
self.copy_symlink(symlink_id, dest_dir_id.str(), opts)!
}
}
// copy_symlink copies a symbolic link to a destination directory
fn (mut self Fs) copy_symlink(symlink_id u32, dest_dir_id u32, opts CopyOptions) ! {
original_symlink := self.factory.fs_symlink.get(symlink_id)!
dest_dir := self.factory.fs_dir.get(dest_dir_id)!
// copy_symlink copies a symbolic link to a destination path
fn (mut self Fs) copy_symlink(symlink_id u32, dest_path string, opts CopyOptions) ! {
original_symlink := self.factory.fs_symlink.get(symlink_id)!
is_dest_dir := dest_path.ends_with('/') || self.get_dir_by_absolute_path(dest_path) or { FsDir{} } != FsDir{}
dest_dir_id := if is_dest_dir {
self.factory.fs_dir.create_path(self.id, dest_path)!
} else {
self.factory.fs_dir.create_path(self.id, os.dir(dest_path))!
}
// Check if symlink already exists in destination
for existing_symlink_id in dest_dir.symlinks {
existing_symlink := self.factory.fs_symlink.get(existing_symlink_id)!
if existing_symlink.name == original_symlink.name {
if !opts.overwrite {
return error('Symlink "${original_symlink.name}" already exists in destination directory')
}
// Remove existing symlink
self.factory.fs_symlink.delete(existing_symlink_id)!
symlink_name := if is_dest_dir { original_symlink.name } else { os.file_name(dest_path) }
dest_dir := self.factory.fs_dir.get(dest_dir_id)!
if existing_symlink_id := self.find_symlink_in_dir(symlink_name, dest_dir) {
if !opts.overwrite {
return error('Symlink "${symlink_name}" already exists')
}
self.factory.fs_symlink.delete(existing_symlink_id)!
}
mut new_symlink := self.factory.fs_symlink.new(
name: symlink_name,
fs_id: self.id,
parent_id: dest_dir_id,
target_id: original_symlink.target_id,
target_type: original_symlink.target_type,
description: original_symlink.description,
)!
new_symlink = self.factory.fs_symlink.set(new_symlink)!
mut parent := self.factory.fs_dir.get(dest_dir_id)!
parent.symlinks << new_symlink.id
self.factory.fs_dir.set(parent)!
}
// find_file_in_dir finds a file in a directory by name and returns its ID
fn (mut self Fs) find_file_in_dir(file_name string, dir FsDir) ?u32 {
for file_id in dir.files {
file := self.factory.fs_file.get(file_id) or { continue }
if file.name == file_name {
return file_id
}
}
return none
}
// find_dir_in_dir finds a directory in a directory by name and returns its ID
fn (mut self Fs) find_dir_in_dir(dir_name string, dir FsDir) ?u32 {
for did in dir.directories {
d := self.factory.fs_dir.get(did) or { continue }
if d.name == dir_name {
return did
}
}
return none
}
// find_symlink_in_dir finds a symlink in a directory by name and returns its ID
fn (mut self Fs) find_symlink_in_dir(symlink_name string, dir FsDir) ?u32 {
for symlink_id in dir.symlinks {
symlink := self.factory.fs_symlink.get(symlink_id) or { continue }
if symlink.name == symlink_name {
return symlink_id
}
}
return none
}
// get_dir_path returns the absolute path for a given directory ID.
pub fn (mut self Fs) get_dir_path(dir_id u32) !string {
if dir_id == self.root_dir_id {
return '/'
}
mut path := ''
mut current_id := dir_id
for {
dir := self.factory.fs_dir.get(current_id)!
if dir.id == self.root_dir_id {
break
}
path = '/' + dir.name + path
if dir.parent_id == 0 {
break
}
current_id = dir.parent_id
}
// Create new symlink
mut new_symlink := self.factory.fs_symlink.new(
name: original_symlink.name
fs_id: self.id
parent_id: dest_dir_id
target_id: original_symlink.target_id
target_type: original_symlink.target_type
description: original_symlink.description
)!
self.factory.fs_symlink.set(new_symlink)!
// Add to parent directory's symlinks list
mut parent := self.factory.fs_dir.get(dest_dir_id)!
parent.symlinks << new_symlink.id
self.factory.fs_dir.set(parent)!
}
return if path == '' { '/' } else { path }
}

View File

@@ -0,0 +1,437 @@
module herofs
import os
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.core.redisclient
fn test_cp_file() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source directory and a file
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob := fs.factory.fs_blob.new(data: 'file content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut file := fs.factory.fs_file.new(
name: 'test_file.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs.factory.fs_file.set(file)!
fs.factory.fs_file.add_to_directory(file.id, src_dir_id)!
// 2. Create a destination directory
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
// 3. Copy the file
fs.cp('/src/test_file.txt', '/dest/', FindOptions{}, CopyOptions{})!
// 4. Verify the file is copied
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 1
copied_file := fs.factory.fs_file.get(dest_dir.files[0])!
assert copied_file.name == 'test_file.txt'
assert copied_file.blobs[0] == blob.id // Should reference the same blob by default
}
fn test_cp_file_overwrite() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source directory and a file
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob1 := fs.factory.fs_blob.new(data: 'original content'.bytes())!
blob1 = fs.factory.fs_blob.set(blob1)!
mut file1 := fs.factory.fs_file.new(
name: 'overwrite_file.txt'
fs_id: fs.id
blobs: [blob1.id]
mime_type: .txt
)!
file1 = fs.factory.fs_file.set(file1)!
fs.factory.fs_file.add_to_directory(file1.id, src_dir_id)!
// 2. Create a destination directory and an existing file with the same name
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
mut blob_existing := fs.factory.fs_blob.new(data: 'existing content'.bytes())!
blob_existing = fs.factory.fs_blob.set(blob_existing)!
mut existing_file := fs.factory.fs_file.new(
name: 'overwrite_file.txt'
fs_id: fs.id
blobs: [blob_existing.id]
mime_type: .txt
)!
existing_file = fs.factory.fs_file.set(existing_file)!
fs.factory.fs_file.add_to_directory(existing_file.id, dest_dir_id)!
// 3. Copy the file with overwrite enabled
fs.cp('/src/overwrite_file.txt', '/dest/', FindOptions{}, CopyOptions{overwrite: true})!
// 4. Verify the file is overwritten
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 1
copied_file := fs.factory.fs_file.get(dest_dir.files[0])!
assert copied_file.name == 'overwrite_file.txt'
assert copied_file.blobs[0] == blob1.id // Should now reference the new blob
}
fn test_cp_file_no_overwrite_error() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source directory and a file
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob1 := fs.factory.fs_blob.new(data: 'original content'.bytes())!
blob1 = fs.factory.fs_blob.set(blob1)!
mut file1 := fs.factory.fs_file.new(
name: 'no_overwrite_file.txt'
fs_id: fs.id
blobs: [blob1.id]
mime_type: .txt
)!
file1 = fs.factory.fs_file.set(file1)!
fs.factory.fs_file.add_to_directory(file1.id, src_dir_id)!
// 2. Create a destination directory and an existing file with the same name
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
mut blob_existing := fs.factory.fs_blob.new(data: 'existing content'.bytes())!
blob_existing = fs.factory.fs_blob.set(blob_existing)!
mut existing_file := fs.factory.fs_file.new(
name: 'no_overwrite_file.txt'
fs_id: fs.id
blobs: [blob_existing.id]
mime_type: .txt
)!
existing_file = fs.factory.fs_file.set(existing_file)!
fs.factory.fs_file.add_to_directory(existing_file.id, dest_dir_id)!
// 3. Attempt to copy the file without overwrite (should error)
fs.cp('/src/no_overwrite_file.txt', '/dest/', FindOptions{}, CopyOptions{overwrite: false}) or {
assert err.msg().contains('already exists')
return
}
assert false, 'Should have failed'
}
fn test_cp_directory_recursive() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create source directory structure
src_root_id := fs.factory.fs_dir.create_path(fs.id, '/src_root')!
src_subdir_id := fs.factory.fs_dir.create_path(fs.id, '/src_root/subdir')!
mut blob1 := fs.factory.fs_blob.new(data: 'file1 content'.bytes())!
blob1 = fs.factory.fs_blob.set(blob1)!
mut file1 := fs.factory.fs_file.new(
name: 'file1.txt'
fs_id: fs.id
blobs: [blob1.id]
mime_type: .txt
)!
file1 = fs.factory.fs_file.set(file1)!
fs.factory.fs_file.add_to_directory(file1.id, src_root_id)!
mut blob2 := fs.factory.fs_blob.new(data: 'file2 content'.bytes())!
blob2 = fs.factory.fs_blob.set(blob2)!
mut file2 := fs.factory.fs_file.new(
name: 'file2.txt'
fs_id: fs.id
blobs: [blob2.id]
mime_type: .txt
)!
file2 = fs.factory.fs_file.set(file2)!
fs.factory.fs_file.add_to_directory(file2.id, src_subdir_id)!
// 2. Create destination root
dest_root_id := fs.factory.fs_dir.create_path(fs.id, '/dest_root')!
// 3. Copy source_root to dest_root recursively
fs.cp('/src_root', '/dest_root/', FindOptions{}, CopyOptions{recursive: true})!
// 4. Verify destination structure
dest_root := fs.factory.fs_dir.get(dest_root_id)!
assert dest_root.directories.len == 1, 'dest_root should contain src_root'
copied_src_root_dir := fs.factory.fs_dir.get(dest_root.directories[0])!
assert copied_src_root_dir.name == 'src_root', 'copied directory should be named src_root'
assert copied_src_root_dir.files.len == 1, 'src_root should contain 1 file'
copied_subdir := fs.factory.fs_dir.get(copied_src_root_dir.directories[0])!
assert copied_subdir.name == 'subdir', 'copied subdirectory should be named subdir'
assert copied_subdir.files.len == 1, 'subdir should contain 1 file'
}
fn test_cp_directory_merge_overwrite() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create source directory structure
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob1 := fs.factory.fs_blob.new(data: 'src file content'.bytes())!
blob1 = fs.factory.fs_blob.set(blob1)!
mut file1 := fs.factory.fs_file.new(
name: 'file1.txt'
fs_id: fs.id
blobs: [blob1.id]
mime_type: .txt
)!
file1 = fs.factory.fs_file.set(file1)!
fs.factory.fs_file.add_to_directory(file1.id, src_dir_id)!
// 2. Create destination directory with an existing file and a new file
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
mut blob_existing := fs.factory.fs_blob.new(data: 'existing file content'.bytes())!
blob_existing = fs.factory.fs_blob.set(blob_existing)!
mut existing_file := fs.factory.fs_file.new(
name: 'file1.txt' // Same name as source file
fs_id: fs.id
blobs: [blob_existing.id]
mime_type: .txt
)!
existing_file = fs.factory.fs_file.set(existing_file)!
fs.factory.fs_file.add_to_directory(existing_file.id, dest_dir_id)!
mut blob_new_dest := fs.factory.fs_blob.new(data: 'new dest file content'.bytes())!
blob_new_dest = fs.factory.fs_blob.set(blob_new_dest)!
mut new_dest_file := fs.factory.fs_file.new(
name: 'file_only_in_dest.txt'
fs_id: fs.id
blobs: [blob_new_dest.id]
mime_type: .txt
)!
new_dest_file = fs.factory.fs_file.set(new_dest_file)!
fs.factory.fs_file.add_to_directory(new_dest_file.id, dest_dir_id)!
// 3. Copy source directory to destination with overwrite (should merge and overwrite file1.txt)
fs.cp('/src', '/dest', FindOptions{}, CopyOptions{recursive: true, overwrite: true})!
// 4. Verify destination contents
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 2, 'dest_dir should have 2 files after merge'
mut file_names := dest_dir.files.map(fs.factory.fs_file.get(it)!.name)
assert 'file1.txt' in file_names
assert 'file_only_in_dest.txt' in file_names
}
fn test_cp_file_to_non_existent_path() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source file
mut blob := fs.factory.fs_blob.new(data: 'content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut file := fs.factory.fs_file.new(
name: 'source.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs.factory.fs_file.set(file)!
fs.factory.fs_file.add_to_directory(file.id, fs.root_dir_id)!
// 2. Copy the file to a non-existent path
fs.cp('/source.txt', '/new_dir/new_file.txt', FindOptions{}, CopyOptions{})!
// 3. Verify the directory and file are created
new_dir := fs.get_dir_by_absolute_path('/new_dir')!
assert new_dir.files.len == 1
copied_file := fs.factory.fs_file.get(new_dir.files[0])!
assert copied_file.name == 'new_file.txt'
assert copied_file.blobs[0] == blob.id
}
fn test_cp_symlink() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a target file
mut blob := fs.factory.fs_blob.new(data: 'target content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut target_file := fs.factory.fs_file.new(
name: 'target.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
target_file = fs.factory.fs_file.set(target_file)!
fs.factory.fs_file.add_to_directory(target_file.id, fs.root_dir_id)!
// 2. Create a source symlink
mut symlink := fs.factory.fs_symlink.new(
name: 'link_to_target.txt'
fs_id: fs.id
parent_id: fs.root_dir_id
target_id: target_file.id
target_type: .file
)!
symlink = fs.factory.fs_symlink.set(symlink)!
mut root_dir := fs.root_dir()!
root_dir.symlinks << symlink.id
fs.factory.fs_dir.set(root_dir)!
// 3. Create a destination directory
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
// 4. Copy the symlink
fs.cp('/link_to_target.txt', '/dest/', FindOptions{}, CopyOptions{})!
// 5. Verify the symlink is copied
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.symlinks.len == 1
copied_symlink := fs.factory.fs_symlink.get(dest_dir.symlinks[0])!
assert copied_symlink.name == 'link_to_target.txt'
assert copied_symlink.target_id == target_file.id
assert copied_symlink.target_type == .file
}
fn test_cp_symlink_overwrite() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a target file
mut blob := fs.factory.fs_blob.new(data: 'target content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut target_file := fs.factory.fs_file.new(
name: 'target.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
target_file = fs.factory.fs_file.set(target_file)!
fs.factory.fs_file.add_to_directory(target_file.id, fs.root_dir_id)!
// 2. Create a source symlink
mut symlink1 := fs.factory.fs_symlink.new(
name: 'link_to_target.txt'
fs_id: fs.id
parent_id: fs.root_dir_id
target_id: target_file.id
target_type: .file
)!
symlink1 = fs.factory.fs_symlink.set(symlink1)!
mut root_dir := fs.root_dir()!
root_dir.symlinks << symlink1.id
fs.factory.fs_dir.set(root_dir)!
// 3. Create a destination directory and an existing symlink with the same name
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
mut other_target_file := fs.factory.fs_file.new(
name: 'other_target.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
other_target_file = fs.factory.fs_file.set(other_target_file)!
fs.factory.fs_file.add_to_directory(other_target_file.id, fs.root_dir_id)!
mut existing_symlink := fs.factory.fs_symlink.new(
name: 'link_to_target.txt'
fs_id: fs.id
parent_id: dest_dir_id
target_id: other_target_file.id
target_type: .file
)!
existing_symlink = fs.factory.fs_symlink.set(existing_symlink)!
mut dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
dest_dir.symlinks << existing_symlink.id
fs.factory.fs_dir.set(dest_dir)!
// 4. Copy the symlink with overwrite enabled
fs.cp('/link_to_target.txt', '/dest/', FindOptions{}, CopyOptions{overwrite: true})!
// 5. Verify the symlink is overwritten
dest_dir = fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.symlinks.len == 1
copied_symlink := fs.factory.fs_symlink.get(dest_dir.symlinks[0])!
assert copied_symlink.name == 'link_to_target.txt'
assert copied_symlink.target_id == target_file.id // Should now point to the original target
}
fn test_cp_file_copy_blobs_false() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source directory and a file
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob := fs.factory.fs_blob.new(data: 'file content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut file := fs.factory.fs_file.new(
name: 'test_file.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs.factory.fs_file.set(file)!
fs.factory.fs_file.add_to_directory(file.id, src_dir_id)!
// 2. Create a destination directory
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
// 3. Copy the file with copy_blobs set to false
fs.cp('/src/test_file.txt', '/dest/', FindOptions{}, CopyOptions{copy_blobs: false})!
// 4. Verify the file is copied and references the same blob
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 1
copied_file := fs.factory.fs_file.get(dest_dir.files[0])!
assert copied_file.name == 'test_file.txt'
assert copied_file.blobs[0] == blob.id // Should reference the same blob
}
fn test_cp_file_copy_blobs_true() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// 1. Create a source directory and a file
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
mut blob := fs.factory.fs_blob.new(data: 'file content'.bytes())!
blob = fs.factory.fs_blob.set(blob)!
mut file := fs.factory.fs_file.new(
name: 'test_file.txt'
fs_id: fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs.factory.fs_file.set(file)!
fs.factory.fs_file.add_to_directory(file.id, src_dir_id)!
// 2. Create a destination directory
dest_dir_id := fs.factory.fs_dir.create_path(fs.id, '/dest')!
// 3. Copy the file with copy_blobs set to true
fs.cp('/src/test_file.txt', '/dest/', FindOptions{}, CopyOptions{copy_blobs: true})!
// 4. Verify the file is copied and has a new blob
dest_dir := fs.factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 1
copied_file := fs.factory.fs_file.get(dest_dir.files[0])!
assert copied_file.name == 'test_file.txt'
assert copied_file.blobs[0] != blob.id // Should have a new blob ID
// Verify the content of the new blob is the same
new_blob := fs.factory.fs_blob.get(copied_file.blobs[0])!
assert new_blob.data == 'file content'.bytes()
}

View File

@@ -1,5 +1,8 @@
module herofs
import os
// FindResult represents the result of a filesystem search
pub struct FindResult {
pub mut:
@@ -26,6 +29,8 @@ pub mut:
follow_symlinks bool // Whether to follow symbolic links during search
}
// find searches for filesystem objects starting from a given path
//
// Parameters:
@@ -109,6 +114,11 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
}
}
// Stop if we have reached max_depth
if opts.max_depth >= 0 && current_depth >= opts.max_depth {
return
}
// Get files in current directory
for file_id in current_dir.files {
file := self.factory.fs_file.get(file_id)!
@@ -137,58 +147,64 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
}
} else {
if symlink.target_type == .file {
if self.factory.fs_file.exist(symlink.target_id)! {
target_file := self.factory.fs_file.get(symlink.target_id)!
target_file_path := join_path(current_path, target_file.name)
// Check if we've already added this file to avoid duplicates
mut found := false
for result in results {
if result.id == target_file.id && result.result_type == .file {
found = true
break
if self.factory.fs_file.exist(symlink.target_id)! {
target_file := self.factory.fs_file.get(symlink.target_id)!
// Resolve the absolute path of the target file
target_abs_path := self.get_abs_path_for_item(target_file.id, .file)!
// Check if we've already added this file to avoid duplicates
mut found := false
for result in results {
if result.id == target_file.id && result.result_type == .file {
found = true
break
}
}
if !found {
results << FindResult{
result_type: .file
id: target_file.id
path: target_abs_path // Use the absolute path
}
}
} else {
// dangling symlink, just add the symlink itself
return error('Dangling symlink at path ${symlink_path} in directory ${current_path} in fs: ${self.id}')
}
}
if !found {
results << FindResult{
result_type: .file
id: target_file.id
path: target_file_path
if symlink.target_type == .directory {
if self.factory.fs_dir.exist(symlink.target_id)! {
target_dir := self.factory.fs_dir.get(symlink.target_id)!
// Resolve the absolute path of the target directory
target_abs_path := self.get_abs_path_for_item(target_dir.id, .directory)!
// Check if we've already added this directory to avoid duplicates
mut found := false
for result in results {
if result.id == target_dir.id && result.result_type == .directory {
found = true
break
}
}
if !found {
results << FindResult{
result_type: .directory
id: target_dir.id
path: target_abs_path // Use the absolute path
}
if opts.recursive {
self.find_recursive(symlink.target_id, target_abs_path,
opts, mut results, current_depth + 1)!
}
}
} else {
// dangling symlink, just add the symlink itself
return error('Dangling dir symlink at path ${symlink_path} in directory ${current_path} in fs: ${self.id}')
}
}
} else {
// dangling symlink, just add the symlink itself
return error('Dangling symlink at path ${symlink_path} in directory ${current_path} in fs: ${self.id}')
}
}
if symlink.target_type == .directory {
if self.factory.fs_dir.exist(symlink.target_id)! {
target_dir := self.factory.fs_dir.get(symlink.target_id)!
target_dir_path := join_path(current_path, target_dir.name)
// Check if we've already added this directory to avoid duplicates
mut found := false
for result in results {
if result.id == target_dir.id && result.result_type == .directory {
found = true
break
}
}
if !found {
results << FindResult{
result_type: .directory
id: target_dir.id
path: target_dir_path
}
if opts.recursive {
self.find_recursive(symlink.target_id, target_dir_path,
opts, mut results, current_depth + 1)!
}
}
} else {
// dangling symlink, just add the symlink itself
return error('Dangling dir symlink at path ${symlink_path} in directory ${current_path} in fs: ${self.id}')
}
}
}
}
}
@@ -329,6 +345,7 @@ pub fn (mut self Fs) get_symlink_by_absolute_path(path string) !FsSymlink {
if path_parts.len == 0 || path_parts[path_parts.len - 1] == '' {
return error('Invalid symlink path: "${path}"')
}
symlink_name := path_parts[path_parts.len - 1]
dir_path := if path_parts.len == 1 {

View File

@@ -0,0 +1,118 @@
module herofs
import freeflowuniverse.herolib.core.pathlib
import os
fn test_find() {
mut f := new_fs_test()!
// defer {
// delete_fs_test()!
// }
// Create a test directory structure
f.factory.fs_dir.create_path(f.id, '/test_dir/subdir')!
mut blob1 := f.factory.fs_blob.new(data: 'hello'.bytes())!
blob1 = f.factory.fs_blob.set(blob1)!
mut file1 := f.factory.fs_file.new(
name: 'file1.txt'
fs_id: f.id
blobs: [blob1.id]
)!
file1 = f.factory.fs_file.set(file1)!
dir1 := f.get_dir_by_absolute_path('/test_dir')!
f.factory.fs_file.add_to_directory(file1.id, dir1.id)!
mut blob2 := f.factory.fs_blob.new(data: 'world'.bytes())!
blob2 = f.factory.fs_blob.set(blob2)!
mut file2 := f.factory.fs_file.new(
name: 'file2.log'
fs_id: f.id
blobs: [blob2.id]
)!
file2 = f.factory.fs_file.set(file2)!
f.factory.fs_file.add_to_directory(file2.id, dir1.id)!
mut blob3 := f.factory.fs_blob.new(data: 'sub'.bytes())!
blob3 = f.factory.fs_blob.set(blob3)!
mut file3 := f.factory.fs_file.new(
name: 'file3.txt'
fs_id: f.id
blobs: [blob3.id]
)!
file3 = f.factory.fs_file.set(file3)!
dir2 := f.get_dir_by_absolute_path('/test_dir/subdir')!
f.factory.fs_file.add_to_directory(file3.id, dir2.id)!
mut symlink1 := f.factory.fs_symlink.new(
name: 'link1.txt'
fs_id: f.id
parent_id: dir1.id
target_id: file1.id
target_type: .file
)!
symlink1 = f.factory.fs_symlink.set(symlink1)!
mut dir1_mut := f.factory.fs_dir.get(dir1.id)!
dir1_mut.symlinks << symlink1.id
f.factory.fs_dir.set(dir1_mut)!
// Test 1: Find all files recursively (default)
mut results_all := f.find('/', FindOptions{})!
// root, test_dir, file1.txt, file2.log, link1.txt, subdir, file3.txt
assert results_all.len == 7
// Test 2: Find text files
mut results_txt := f.find('/', FindOptions{
include_patterns: ['*.txt']
})!
assert results_txt.filter(it.result_type == .file).len == 2
for result in results_txt {
if result.result_type == .file {
assert result.path.ends_with('.txt')
}
}
// Test 3: Find files non-recursively
mut results_non_recursive := f.find('/test_dir', FindOptions{
recursive: false
})!
// test_dir, file1.txt, file2.log, subdir, link1.txt
assert results_non_recursive.len == 5
// Test 4: Exclude log files
mut results_no_log := f.find('/', FindOptions{
exclude_patterns: ['*.log']
})!
for result in results_no_log {
assert !result.path.ends_with('.log')
}
// Test 5: Find with max depth
mut results_depth_1 := f.find('/', FindOptions{
max_depth: 1
})!
// root, test_dir
assert results_depth_1.len == 2
// Test 6: Find a specific file
mut results_specific_file := f.find('/test_dir/file1.txt', FindOptions{})!
assert results_specific_file.len == 1
assert results_specific_file[0].path == '/test_dir/file1.txt'
// Test 7: Find with symlinks not followed
mut results_symlinks := f.find('/', FindOptions{
follow_symlinks: false
})!
mut found_symlink := false
for result in results_symlinks {
if result.result_type == .symlink {
found_symlink = true
break
}
}
assert found_symlink
// Test 8: Find a specific directory
mut results_specific_dir := f.find('/test_dir/subdir', FindOptions{})!
// should contain subdir and file3.txt
assert results_specific_dir.len == 2
}

View File

@@ -232,7 +232,7 @@ fn (mut self Fs) export_file(file_id u32, dest_path string, opts ExportOptions)
// Set file modification time if available in metadata
if _ := vfs_file.metadata['modified'] {
// Note: V doesn't have built-in utime, but we could add this later
// For now, just preserve the metadata in a comment or separate file
// For now, just preserve the metadata in a message or separate file
}
}
}

View File

@@ -0,0 +1,50 @@
module herofs
// get_abs_path_for_item returns the absolute path for a given filesystem item ID and type
pub fn (mut self Fs) get_abs_path_for_item(id u32, item_type FSItemType) !string {
match item_type {
.file {
// Find the directory containing the file
// This is inefficient and should be optimized in a real implementation
all_dirs := self.factory.fs_dir.list()!
for dir in all_dirs {
if id in dir.files {
parent_path := self.get_abs_path_for_item(dir.id, .directory)!
file := self.factory.fs_file.get(id)!
return join_path(parent_path, file.name)
}
}
return error('File with ID ${id} not found in any directory')
}
.directory {
mut path_parts := []string{}
mut current_dir_id := id
for {
dir := self.factory.fs_dir.get(current_dir_id)!
path_parts.insert(0, dir.name)
if dir.parent_id == 0 {
break
}
current_dir_id = dir.parent_id
}
// Don't prepend slash to root, which is just 'root'
if path_parts.len > 0 && path_parts[0] == 'root' {
path_parts.delete(0)
}
return '/' + path_parts.join('/')
}
.symlink {
// Similar to file logic, find parent directory
all_dirs := self.factory.fs_dir.list()!
for dir in all_dirs {
if id in dir.symlinks {
parent_path := self.get_abs_path_for_item(dir.id, .directory)!
symlink := self.factory.fs_symlink.get(id)!
return join_path(parent_path, symlink.name)
}
}
return error('Symlink with ID ${id} not found in any directory')
}
}
return '' // Should be unreachable
}

View File

@@ -1,13 +1,137 @@
module herofs
// Note: This test is simplified due to V compiler namespace issues with FindOptions
// The full functionality is tested in the examples and working correctly
fn test_basic_operations() ! {
// Initialize HeroFS factory and create test filesystem
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// Test basic file creation and retrieval
mut test_blob := fs.factory.fs_blob.new(data: 'Hello, HeroFS!'.bytes())!
test_blob = fs.factory.fs_blob.set(test_blob)!
mut test_file := fs.factory.fs_file.new(
name: 'test.txt'
fs_id: fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs.factory.fs_file.set(test_file)!
fs.factory.fs_file.add_to_directory(test_file.id, fs.root_dir_id)!
// Verify file was created
retrieved_file := fs.factory.fs_file.get(test_file.id)!
assert retrieved_file.name == 'test.txt'
assert retrieved_file.blobs.len == 1
// Test directory creation using create_path
src_dir_id := fs.factory.fs_dir.create_path(fs.id, '/src')!
docs_dir_id := fs.factory.fs_dir.create_path(fs.id, '/docs')!
tests_dir_id := fs.factory.fs_dir.create_path(fs.id, '/tests')!
// Verify directories were created
src_dir := fs.factory.fs_dir.get(src_dir_id)!
assert src_dir.name == 'src'
docs_dir := fs.factory.fs_dir.get(docs_dir_id)!
assert docs_dir.name == 'docs'
tests_dir := fs.factory.fs_dir.get(tests_dir_id)!
assert tests_dir.name == 'tests'
// Test blob creation and hash-based retrieval
test_data := 'Test blob content'.bytes()
mut test_blob2 := fs.factory.fs_blob.new(data: test_data)!
test_blob2 = fs.factory.fs_blob.set(test_blob2)!
// Test hash-based retrieval
retrieved_blob := fs.factory.fs_blob.get_by_hash(test_blob2.hash)!
assert retrieved_blob.data == test_data
// Test blob existence by hash
exists := fs.factory.fs_blob.exists_by_hash(test_blob2.hash)!
assert exists == true
// Test blob integrity verification
assert test_blob2.verify_integrity() == true
println(' Basic operations test passed!')
}
fn test_rm_file() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// Create a file to remove
mut test_blob := fs.factory.fs_blob.new(data: 'File to remove'.bytes())!
test_blob = fs.factory.fs_blob.set(test_blob)!
mut test_file := fs.factory.fs_file.new(
name: 'to_remove.txt'
fs_id: fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs.factory.fs_file.set(test_file)!
fs.factory.fs_file.add_to_directory(test_file.id, fs.root_dir_id)!
// Verify file exists before removal
assert fs.factory.fs_file.exist(test_file.id)! == true
// Test rm with file path
fs.rm('/to_remove.txt', FindOptions{}, RemoveOptions{})!
// Verify file no longer exists
assert fs.factory.fs_file.exist(test_file.id)! == false
// Verify blob still exists (default behavior)
assert fs.factory.fs_blob.exist(test_blob.id)! == true
println(' Remove file test passed!')
}
fn test_rm_file_with_blobs() ! {
mut fs := new_fs_test() or { panic(err) }
defer {
delete_fs_test() or {}
}
// Create a file to remove with delete_blobs option
mut test_blob := fs.factory.fs_blob.new(data: 'File to remove with blobs'.bytes())!
test_blob = fs.factory.fs_blob.set(test_blob)!
mut test_file := fs.factory.fs_file.new(
name: 'to_remove_with_blobs.txt'
fs_id: fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs.factory.fs_file.set(test_file)!
fs.factory.fs_file.add_to_directory(test_file.id, fs.root_dir_id)!
// Verify file and blob exist before removal
assert fs.factory.fs_file.exist(test_file.id)! == true
assert fs.factory.fs_blob.exist(test_blob.id)! == true
// Test rm with delete_blobs option
fs.rm('/to_remove_with_blobs.txt', FindOptions{}, RemoveOptions{delete_blobs: true})!
// Verify file no longer exists
assert fs.factory.fs_file.exist(test_file.id)! == false
// Verify blob is also deleted
assert fs.factory.fs_blob.exist(test_blob.id)! == false
println(' Remove file with blobs test passed!')
}
fn test_rm_directory() ! {
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'basic_test'
description: 'Test filesystem for basic operations'
name: 'rm_dir_test'
description: 'Test filesystem for remove directory operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
@@ -22,33 +146,76 @@ fn test_basic_operations() ! {
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Test basic file creation and retrieval
mut test_blob := fs_factory.fs_blob.new(data: 'Hello, HeroFS!'.bytes())!
test_blob = fs_factory.fs_blob.set(test_blob)!
// Create a directory to remove
test_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/test_dir')!
test_dir := fs_factory.fs_dir.get(test_dir_id)!
assert test_dir.name == 'test_dir'
// Test rm with directory path
test_fs.rm('/test_dir', FindOptions{}, RemoveOptions{})!
// Verify directory no longer exists
assert fs_factory.fs_dir.exist(test_dir_id)! == false
println(' Remove directory test passed!')
}
fn test_rm_directory_recursive() ! {
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'rm_dir_recursive_test'
description: 'Test filesystem for recursive remove directory operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0 // Root has no parent
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create directory structure
test_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/test_dir')!
sub_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/test_dir/sub_dir')!
// Create a file in the directory
mut test_blob := fs_factory.fs_blob.new(data: 'File in directory'.bytes())!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut test_file := fs_factory.fs_file.new(
name: 'test.txt'
name: 'file_in_dir.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
fs_factory.fs_file.add_to_directory(test_file.id, root_dir.id)!
fs_factory.fs_file.add_to_directory(test_file.id, test_dir_id)!
// Verify file was created
retrieved_file := fs_factory.fs_file.get(test_file.id)!
assert retrieved_file.name == 'test.txt'
assert retrieved_file.blobs.len == 1
// Verify directory and file exist before removal
assert fs_factory.fs_dir.exist(test_dir_id)! == true
assert fs_factory.fs_dir.exist(sub_dir_id)! == true
assert fs_factory.fs_file.exist(test_file.id)! == true
println(' Basic operations test passed!')
// Test rm with recursive option
test_fs.rm('/test_dir', FindOptions{}, RemoveOptions{recursive: true})!
// Verify directory and its contents are removed
assert fs_factory.fs_dir.exist(test_dir_id)! == false
assert fs_factory.fs_dir.exist(sub_dir_id)! == false
assert fs_factory.fs_file.exist(test_file.id)! == false
println(' Remove directory recursively test passed!')
}
fn test_directory_operations() ! {
// Initialize HeroFS factory and create test filesystem
fn test_mv_file() ! {
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'dir_test'
description: 'Test filesystem for directory operations'
name: 'mv_file_test'
description: 'Test filesystem for move file operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
@@ -63,43 +230,131 @@ fn test_directory_operations() ! {
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Test directory creation using create_path
// Create source directory
src_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/src')!
docs_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/docs')!
tests_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/tests')!
// Verify directories were created
src_dir := fs_factory.fs_dir.get(src_dir_id)!
assert src_dir.name == 'src'
docs_dir := fs_factory.fs_dir.get(docs_dir_id)!
assert docs_dir.name == 'docs'
tests_dir := fs_factory.fs_dir.get(tests_dir_id)!
assert tests_dir.name == 'tests'
// Create destination directory
dest_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/dest')!
println(' Directory operations test passed!')
}
fn test_blob_operations() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Test blob creation and hash-based retrieval
test_data := 'Test blob content'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
// Create a file to move
mut test_blob := fs_factory.fs_blob.new(data: 'File to move'.bytes())!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut test_file := fs_factory.fs_file.new(
name: 'to_move.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
fs_factory.fs_file.add_to_directory(test_file.id, src_dir_id)!
// Test hash-based retrieval
retrieved_blob := fs_factory.fs_blob.get_by_hash(test_blob.hash)!
assert retrieved_blob.data == test_data
// Verify file exists in source directory before move
src_dir := fs_factory.fs_dir.get(src_dir_id)!
assert test_file.id in src_dir.files
// Test blob existence by hash
exists := fs_factory.fs_blob.exists_by_hash(test_blob.hash)!
assert exists == true
// Test mv file operation
test_fs.mv('/src/to_move.txt', '/dest/', MoveOptions{})!
// Test blob integrity verification
assert test_blob.verify_integrity() == true
// Verify file no longer exists in source directory
src_dir = fs_factory.fs_dir.get(src_dir_id)!
assert test_file.id !in src_dir.files
println(' Blob operations test passed!')
// Verify file exists in destination directory
dest_dir := fs_factory.fs_dir.get(dest_dir_id)!
assert dest_dir.files.len == 1
moved_file := fs_factory.fs_file.get(dest_dir.files[0])!
assert moved_file.name == 'to_move.txt'
println(' Move file test passed!')
}
fn test_mv_file_rename() ! {
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'mv_file_rename_test'
description: 'Test filesystem for move and rename file operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0 // Root has no parent
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create source directory
src_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/src')!
// Create a file to move and rename
mut test_blob := fs_factory.fs_blob.new(data: 'File to move and rename'.bytes())!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut test_file := fs_factory.fs_file.new(
name: 'original_name.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
fs_factory.fs_file.add_to_directory(test_file.id, src_dir_id)!
// Test mv with rename
test_fs.mv('/src/original_name.txt', '/src/renamed_file.txt', MoveOptions{})!
// Verify file was renamed
renamed_file := fs_factory.fs_file.get(test_file.id)!
assert renamed_file.name == 'renamed_file.txt'
println(' Move file with rename test passed!')
}
fn test_mv_directory() ! {
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'mv_dir_test'
description: 'Test filesystem for move directory operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0 // Root has no parent
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create source directory
src_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/src')!
// Create destination directory
dest_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/dest')!
// Create a subdirectory to move
sub_dir_id := fs_factory.fs_dir.create_path(test_fs.id, '/src/sub_dir')!
sub_dir := fs_factory.fs_dir.get(sub_dir_id)!
assert sub_dir.name == 'sub_dir'
// Test mv directory operation
test_fs.mv('/src/sub_dir', '/dest/', MoveOptions{})!
// Verify directory no longer exists in source
src_dir := fs_factory.fs_dir.get(src_dir_id)!
assert sub_dir_id !in src_dir.directories
// Verify directory exists in destination
dest_dir := fs_factory.fs_dir.get(dest_dir_id)!
assert dest_dir.directories.len == 1
moved_dir := fs_factory.fs_dir.get(dest_dir.directories[0])!
assert moved_dir.name == 'sub_dir'
assert moved_dir.parent_id == dest_dir_id
println(' Move directory test passed!')
}

View File

@@ -12,7 +12,7 @@ HeroFS is built on top of HeroDB, which uses Redis as its storage backend. The f
4. **FsSymlink** - Symbolic links
5. **FsBlob** - Binary data chunks
All components inherit from the `Base` struct, which provides common fields like ID, name, description, timestamps, security policies, tags, and comments.
All components inherit from the `Base` struct, which provides common fields like ID, name, description, timestamps, security policies, tags, and messages.
## Filesystem (Fs)
@@ -278,7 +278,7 @@ When creating or modifying components, HeroFS validates references to other comp
HeroFS inherits the security model from HeroDB:
- Each component has a `securitypolicy` field referencing a SecurityPolicy object
- Components can have associated tags for categorization
- Components can have associated comments for documentation
- Components can have associated messages for documentation
## Performance Considerations

View File

@@ -93,12 +93,16 @@ fn (mut self DBCalendar) load(mut o Calendar, mut e encoder.Decoder) ! {
@[params]
pub struct CalendarArg {
pub mut:
name string
description string
color string
timezone string
is_public bool
events []u32
id u32
name string
description string
color string
timezone string
is_public bool
events []u32
securitypolicy u32
tags []string
messages []db.MessageArg
}
// get new calendar, not from the DB
@@ -113,6 +117,9 @@ pub fn (mut self DBCalendar) new(args CalendarArg) !Calendar {
// Set base fields
o.name = args.name
o.description = args.description
o.securitypolicy = args.securitypolicy
o.tags = self.db.tags_get(args.tags)!
o.messages = self.db.messages_get(args.messages)!
o.updated_at = ourtime.now().unix()
return o
@@ -155,7 +162,11 @@ pub fn calendar_handle(mut f ModelsFactory, rpcid int, servercontext map[string]
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Calendar](params)!
mut args := db.decode_generic[CalendarArg](params)!
mut o := f.calendar.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.calendar.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -318,6 +318,7 @@ pub fn (mut self DBCalendarEvent) load(mut o CalendarEvent, mut e encoder.Decode
@[params]
pub struct CalendarEventArg {
pub mut:
id u32
name string
description string
title string
@@ -462,7 +463,11 @@ pub fn calendar_event_handle(mut f ModelsFactory, rpcid int, servercontext map[s
return new_response(rpcid, json.encode_pretty(res))
}
'set' {
mut o := db.decode_generic[CalendarEvent](params)!
mut args := db.decode_generic[CalendarEventArg](params)!
mut o := f.calendar_event.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.calendar_event.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -107,6 +107,7 @@ pub fn (mut self DBChatGroup) load(mut o ChatGroup, mut e encoder.Decoder) ! {
@[params]
pub struct ChatGroupArg {
pub mut:
id u32
name string
description string
chat_type ChatType
@@ -175,7 +176,11 @@ pub fn chat_group_handle(mut f ModelsFactory, rpcid int, servercontext map[strin
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[ChatGroup](params)!
mut args := db.decode_generic[ChatGroupArg](params)!
mut o := f.chat_group.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.chat_group.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -197,6 +197,7 @@ pub fn (mut self DBChatMessage) load(mut o ChatMessage, mut e encoder.Decoder) !
@[params]
pub struct ChatMessageArg {
pub mut:
id u32
name string
description string
content string
@@ -298,7 +299,11 @@ pub fn chat_message_handle(mut f ModelsFactory, rpcid int, servercontext map[str
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[ChatMessage](params)!
mut args := db.decode_generic[ChatMessageArg](params)!
mut o := f.chat_message.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.chat_message.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -105,6 +105,7 @@ fn (mut self DBContact) load(mut o Contact, mut e encoder.Decoder) ! {
@[params]
pub struct ContactArg {
pub mut:
id u32
name string @[required]
description string
emails []string
@@ -208,7 +209,11 @@ pub fn contact_handle(mut f ModelsFactory, rpcid int, servercontext map[string]s
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Contact](params)!
mut args := db.decode_generic[ContactArg](params)!
mut o := f.contact.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.contact.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -17,20 +17,21 @@ __global (
@[heap]
pub struct ModelsFactory {
pub mut:
calendar DBCalendar
calendar_event DBCalendarEvent
group DBGroup
user DBUser
project DBProject
project_issue DBProjectIssue
chat_group DBChatGroup
chat_message DBChatMessage
contact DBContact
profile DBProfile
planning DBPlanning
calendar DBCalendar
calendar_event DBCalendarEvent
group DBGroup
user DBUser
project DBProject
project_issue DBProjectIssue
chat_group DBChatGroup
chat_message DBChatMessage
contact DBContact
profile DBProfile
planning DBPlanning
registration_desk DBRegistrationDesk
messages DBMessages
rpc_handler &Handler
messages DBMessages
tags DBTags
rpc_handler &Handler
}
@[params]
@@ -48,46 +49,49 @@ pub fn new(args NewArgs) !&ModelsFactory {
}
mut h := new_handler(openrpc_path)!
mut f := ModelsFactory{
calendar: DBCalendar{
calendar: DBCalendar{
db: &mydb
}
calendar_event: DBCalendarEvent{
calendar_event: DBCalendarEvent{
db: &mydb
}
group: DBGroup{
group: DBGroup{
db: &mydb
}
user: DBUser{
user: DBUser{
db: &mydb
}
project: DBProject{
project: DBProject{
db: &mydb
}
project_issue: DBProjectIssue{
project_issue: DBProjectIssue{
db: &mydb
}
chat_group: DBChatGroup{
chat_group: DBChatGroup{
db: &mydb
}
chat_message: DBChatMessage{
chat_message: DBChatMessage{
db: &mydb
}
contact: DBContact{
contact: DBContact{
db: &mydb
}
profile: DBProfile{
profile: DBProfile{
db: &mydb
}
planning: DBPlanning{
planning: DBPlanning{
db: &mydb
}
registration_desk: DBRegistrationDesk{
db: &mydb
}
messages: DBMessages{
messages: DBMessages{
db: &mydb
}
rpc_handler: &h
tags: DBTags{
db: &mydb
}
rpc_handler: &h
}
// openrpc handler can be used by any server, has even embedded unix sockets and simple http server
@@ -123,13 +127,16 @@ pub fn group_api_handler(rpcid int, servercontext map[string]string, actorname s
return calendar_handle(mut f, rpcid, servercontext, userref, methodname, params)!
}
'calendar_event' {
return calendar_event_handle(mut f, rpcid, servercontext, userref, methodname, params)!
return calendar_event_handle(mut f, rpcid, servercontext, userref, methodname,
params)!
}
'chat_group' {
return chat_group_handle(mut f, rpcid, servercontext, userref, methodname, params)!
return chat_group_handle(mut f, rpcid, servercontext, userref, methodname,
params)!
}
'chat_message' {
return chat_message_handle(mut f, rpcid, servercontext, userref, methodname, params)!
return chat_message_handle(mut f, rpcid, servercontext, userref, methodname,
params)!
}
'group' {
return group_handle(mut f, rpcid, servercontext, userref, methodname, params)!
@@ -138,7 +145,8 @@ pub fn group_api_handler(rpcid int, servercontext map[string]string, actorname s
return project_handle(mut f, rpcid, servercontext, userref, methodname, params)!
}
'project_issue' {
return project_issue_handle(mut f, rpcid, servercontext, userref, methodname, params)!
return project_issue_handle(mut f, rpcid, servercontext, userref, methodname,
params)!
}
'user' {
return user_handle(mut f, rpcid, servercontext, userref, methodname, params)!
@@ -153,11 +161,15 @@ pub fn group_api_handler(rpcid int, servercontext map[string]string, actorname s
return planning_handle(mut f, rpcid, servercontext, userref, methodname, params)!
}
'registration_desk' {
return registration_desk_handle(mut f, rpcid, servercontext, userref, methodname, params)!
return registration_desk_handle(mut f, rpcid, servercontext, userref, methodname,
params)!
}
'message' {
return message_handle(mut f, rpcid, servercontext, userref, methodname, params)!
}
'tags' {
return tags_handle(mut f, rpcid, servercontext, userref, methodname, params)!
}
else {
return jsonrpc.new_error(rpcid,
code: 32111
@@ -166,4 +178,4 @@ pub fn group_api_handler(rpcid int, servercontext map[string]string, actorname s
)
}
}
}
}

View File

@@ -120,12 +120,16 @@ pub fn (mut o Group) load(mut e encoder.Decoder) ! {
@[params]
pub struct GroupArg {
pub mut:
name string
description string
members []GroupMember
subgroups []u32
parent_group u32
is_public bool
id u32
name string
description string
members []GroupMember
subgroups []u32
parent_group u32
is_public bool
securitypolicy u32
tags []string
messages []db.MessageArg
}
pub struct DBGroup {
@@ -153,6 +157,9 @@ pub fn (mut self DBGroup) new(args GroupArg) !Group {
// Set base fields
o.name = args.name
o.description = args.description
o.securitypolicy = args.securitypolicy
o.tags = self.db.tags_get(args.tags)!
o.messages = self.db.messages_get(args.messages)!
o.updated_at = ourtime.now().unix()
return o
@@ -264,7 +271,11 @@ pub fn group_handle(mut f ModelsFactory, rpcid int, servercontext map[string]str
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Group](params)!
mut args := db.decode_generic[GroupArg](params)!
mut o := f.group.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.group.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -21,10 +21,10 @@ pub mut:
pub struct Location {
db.Base
pub mut:
addresses []Address // Multiple addresses (home, work, etc.)
coordinates Coordinates // GPS coordinates
timezone string
is_verified bool
addresses []Address // Multiple addresses (home, work, etc.)
coordinates Coordinates // GPS coordinates
timezone string
is_verified bool
location_type LocationType
}
@@ -141,10 +141,10 @@ pub fn (self Location) dump(mut e encoder.Encoder) ! {
for addr in self.addresses {
addr.dump(mut e)!
}
// Encode coordinates
self.coordinates.dump(mut e)!
// Encode other fields
e.add_string(self.timezone)
e.add_bool(self.is_verified)
@@ -155,19 +155,19 @@ fn (mut self DBLocation) load(mut o Location, mut e encoder.Decoder) ! {
// Decode addresses
addr_count := e.get_u32()!
o.addresses = []Address{cap: int(addr_count)}
for _ in 0..addr_count {
for _ in 0 .. addr_count {
mut addr := Address{}
addr.load(mut e)!
o.addresses << addr
}
// Decode coordinates
o.coordinates.load(mut e)!
// Decode other fields
o.timezone = e.get_string()!
o.is_verified = e.get_bool()!
o.location_type = LocationType(e.get_u8()!)
o.location_type = unsafe { LocationType(e.get_u8()!) }
}
@[params]
@@ -285,4 +285,4 @@ pub fn (self Coordinates) to_string() string {
// Helper method to check if coordinates are set
pub fn (self Coordinates) is_valid() bool {
return self.latitude != 0.0 || self.longitude != 0.0
}
}

View File

@@ -149,12 +149,16 @@ pub fn (mut self DBMessages) load(mut o Message, mut e encoder.Decoder) ! {
@[params]
pub struct MessageArg {
pub mut:
subject string
message string @[required]
parent u32
author u32
to []u32
cc []u32
id u32
subject string
message string @[required]
parent u32
author u32
to []u32
cc []u32
securitypolicy u32
tags []string
messages []db.MessageArg
}
// get new message, not from the DB
@@ -169,6 +173,12 @@ pub fn (mut self DBMessages) new(args MessageArg) !Message {
send_log: []SendLog{} // Initialize as empty
updated_at: ourtime.now().unix()
}
// Set base fields
o.securitypolicy = args.securitypolicy
o.tags = self.db.tags_get(args.tags)!
o.messages = self.db.messages_get(args.messages)!
return o
}
@@ -237,7 +247,11 @@ pub fn message_handle(mut f ModelsFactory, rpcid int, servercontext map[string]s
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Message](params)!
mut args := db.decode_generic[MessageArg](params)!
mut o := f.messages.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.messages.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -184,6 +184,7 @@ fn (mut self DBPlanning) load(mut o Planning, mut e encoder.Decoder) ! {
@[params]
pub struct PlanningArg {
pub mut:
id u32
name string
description string
color string
@@ -297,7 +298,11 @@ pub fn planning_handle(mut f ModelsFactory, rpcid int, servercontext map[string]
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Planning](params)!
mut args := db.decode_generic[PlanningArg](params)!
mut o := f.planning.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.planning.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -161,6 +161,10 @@ pub mut:
education []Education
skills []string
languages []string
id u32
securitypolicy u32
tags []string
messages []db.MessageArg
}
// get new profile, not from the DB
@@ -185,6 +189,9 @@ pub fn (mut self DBProfile) new(args ProfileArg) !Profile {
// Set base fields
o.name = args.name
o.description = args.description
o.securitypolicy = args.securitypolicy
o.tags = self.db.tags_get(args.tags)!
o.messages = self.db.messages_get(args.messages)!
o.updated_at = ourtime.now().unix()
return o
@@ -226,10 +233,14 @@ pub fn profile_handle(mut f ModelsFactory, rpcid int, servercontext map[string]s
'get' {
id := db.decode_u32(params)!
res := f.profile.get(id)!
return new_response(rpcid, json.encode_pretty(res))
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Profile](params)!
mut args := db.decode_generic[ProfileArg](params)!
mut o := f.profile.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.profile.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -183,6 +183,7 @@ pub fn (mut self DBProject) load(mut o Project, mut e encoder.Decoder) ! {
@[params]
pub struct ProjectArg {
pub mut:
id u32
name string
description string
swimlanes []Swimlane
@@ -285,7 +286,11 @@ pub fn project_handle(mut f ModelsFactory, rpcid int, servercontext map[string]s
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[Project](params)!
mut args := db.decode_generic[ProjectArg](params)!
mut o := f.project.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.project.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -161,6 +161,7 @@ pub fn (mut self DBProjectIssue) load(mut o ProjectIssue, mut e encoder.Decoder)
@[params]
pub struct ProjectIssueArg {
pub mut:
id u32
name string
description string
title string
@@ -329,7 +330,11 @@ pub fn project_issue_handle(mut f ModelsFactory, rpcid int, servercontext map[st
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[ProjectIssue](params)!
mut args := db.decode_generic[ProjectIssueArg](params)!
mut o := f.project_issue.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.project_issue.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -178,6 +178,7 @@ pub fn (mut self DBRegistrationDesk) load(mut o RegistrationDesk, mut e encoder.
@[params]
pub struct RegistrationDeskArg {
pub mut:
id u32
name string
description string
fs_items []u32 // IDs of linked files or dirs
@@ -303,7 +304,11 @@ pub fn registration_desk_handle(mut f ModelsFactory, rpcid int, servercontext ma
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[RegistrationDesk](params)!
mut args := db.decode_generic[RegistrationDeskArg](params)!
mut o := f.registration_desk.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.registration_desk.set(o)!
return new_response_int(rpcid, int(o.id))
}

113
lib/hero/heromodels/tags.v Normal file
View File

@@ -0,0 +1,113 @@
module heromodels
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import json
// Tags represents a collection of tag names with a unique ID
// This is the same as db.Tags but in the heromodels context
pub type Tags = db.Tags
pub fn (self Tags) type_name() string {
return 'tags'
}
// return example rpc call and result for each methodname
pub fn (self Tags) example(methodname string) (string, string) {
match methodname {
'get' {
return '{"id": 1}', '{"id": 1, "names": ["development", "urgent", "team"], "md5": "abc123def456"}'
}
else {
return '{}', '{}'
}
}
}
pub struct DBTags {
pub mut:
db &db.DB @[skip; str: skip]
}
pub fn (mut self DBTags) get(id u32) !Tags {
if id == 0 {
return error('Tags ID cannot be 0')
}
// Get the full Tags entity to include md5
tags_json := self.db.redis.hget('db:tags', id.str())!
if tags_json == '' {
return error('Tags entity not found for ID: ${id}')
}
// Decode Tags entity
tags_entity := json.decode(Tags, tags_json)!
return tags_entity
}
pub fn (mut self DBTags) exist(id u32) !bool {
if id == 0 {
return false
}
tags_json := self.db.redis.hget('db:tags', id.str()) or { '' }
return tags_json != ''
}
pub fn (mut self DBTags) delete(id u32) !bool {
if id == 0 {
return false
}
// Get the Tags entity first to get the md5 hash
tags_json := self.db.redis.hget('db:tags', id.str()) or { '' }
if tags_json == '' {
return false // Already doesn't exist
}
tags_entity := json.decode(Tags, tags_json)!
// Delete from both hash tables
self.db.redis.hdel('db:tags', id.str())!
self.db.redis.hdel('db:tags_hash', tags_entity.md5)!
return true
}
pub fn tags_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.tags.get(id)!
return new_response(rpcid, json.encode(res))
}
'exist' {
id := db.decode_u32(params)!
exists := f.tags.exist(id)!
if exists {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'delete' {
id := db.decode_u32(params)!
deleted := f.tags.delete(id)!
if deleted {
return new_response_true(rpcid)
} else {
return new_error(rpcid,
code: 404
message: 'Tags with ID ${id} not found'
)
}
}
else {
return new_error(rpcid,
code: -32601
message: 'Method not found: ${method}'
)
}
}
}

View File

@@ -94,6 +94,7 @@ fn (mut self DBUser) load(mut o User, mut e encoder.Decoder) ! {
@[params]
pub struct UserArg {
pub mut:
id u32
name string @[required]
description string
user_id u32
@@ -191,7 +192,11 @@ pub fn user_handle(mut f ModelsFactory, rpcid int, servercontext map[string]stri
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[User](params)!
mut args := db.decode_generic[UserArg](params)!
mut o := f.user.new(args)!
if args.id != 0 {
o.id = args.id
}
o = f.user.set(o)!
return new_response_int(rpcid, int(o.id))
}

View File

@@ -1,143 +0,0 @@
import json
import os
def to_snake_case(name):
import re
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def ts_type(v_type):
if v_type in ['string']:
return 'string'
if v_type in ['int', 'integer', 'u32', 'u64', 'i64', 'f32', 'f64']:
return 'number'
if v_type in ['bool', 'boolean']:
return 'boolean'
if v_type.startswith('[]'):
return ts_type(v_type[2:]) + '[]'
if v_type == 'array':
return 'any[]'
return v_type
def generate_interface(schema_name, schema):
content = f'export interface {schema_name} {{\n'
if 'properties' in schema:
for prop_name, prop_schema in schema.get('properties', {}).items():
prop_type = ts_type(prop_schema.get('type', 'any'))
if '$ref' in prop_schema:
prop_type = prop_schema['$ref'].split('/')[-1]
required = '?' if prop_name not in schema.get('required', []) else ''
content += f' {prop_name}{required}: {prop_type};\n'
if schema.get('allOf'):
for item in schema['allOf']:
if '$ref' in item:
ref_name = item['$ref'].split('/')[-1]
content += f' // Properties from {ref_name} are inherited\n'
content += '}\n'
return content
def generate_client(spec):
methods_str = ''
for method in spec['methods']:
params = []
if 'params' in method:
for param in method['params']:
param_type = 'any'
if 'schema' in param:
if '$ref' in param['schema']:
param_type = param['schema']['$ref'].split('/')[-1]
else:
param_type = ts_type(param['schema'].get('type', 'any'))
params.append(f"{param['name']}: {param_type}")
params_str = ', '.join(params)
result_type = 'any'
if 'result' in method and 'schema' in method['result']:
if '$ref' in method['result']['schema']:
result_type = method['result']['schema']['$ref'].split('/')[-1]
else:
result_type = ts_type(method['result']['schema'].get('type', 'any'))
method_name_snake = to_snake_case(method['name'])
methods_str += f"""
async {method_name_snake}(params: {{ {params_str} }}): Promise<{result_type}> {{
return this.send('{method['name']}', params);
}}
"""
schemas = spec.get('components', {}).get('schemas', {})
imports_str = '\n'.join([f"import {{ {name} }} from './{name}';" for name in schemas.keys()])
base_url = 'http://localhost:8086/api/heromodels'
client_class = f"""
import fetch from 'node-fetch';
{imports_str}
export class HeroModelsClient {{
private baseUrl: string;
constructor(baseUrl: string = '{base_url}') {{
this.baseUrl = baseUrl;
}}
private async send<T>(method: string, params: any): Promise<T> {{
const response = await fetch(this.baseUrl, {{
method: 'POST',
headers: {{
'Content-Type': 'application/json',
}},
body: JSON.stringify({{
jsonrpc: '2.0',
method: method,
params: params,
id: 1,
}}),
}});
if (!response.ok) {{
throw new Error(`HTTP error! status: ${{response.status}}`);
}}
const jsonResponse:any = await response.json();
if (jsonResponse.error) {{
throw new Error(`RPC error: ${{jsonResponse.error.message}}`);
}}
return jsonResponse.result;
}}
{methods_str}
}}
"""
return client_class
def main():
script_dir = os.path.dirname(__file__)
openrpc_path = os.path.abspath(os.path.join(script_dir, '..', '..', 'hero', 'heromodels', 'openrpc.json'))
output_dir = os.path.join(script_dir, 'generated_ts_client')
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(openrpc_path, 'r') as f:
spec = json.load(f)
schemas = spec.get('components', {}).get('schemas', {})
for name, schema in schemas.items():
interface_content = generate_interface(name, schema)
with open(os.path.join(output_dir, f'{name}.ts'), 'w') as f:
f.write(interface_content)
client_content = generate_client(spec)
with open(os.path.join(output_dir, 'client.ts'), 'w') as f:
f.write(client_content)
print(f"TypeScript client generated successfully in {output_dir}")
if __name__ == '__main__':
main()

5
lib/hero/typescriptgenerator/generate.vsh Normal file → Executable file
View File

@@ -4,8 +4,8 @@ import freeflowuniverse.herolib.hero.typescriptgenerator
import freeflowuniverse.herolib.schemas.openrpc
import os
const openrpc_path = os.dir(@FILE) + '/../../heromodels/openrpc.json'
const output_dir = os.dir(@FILE) + '/generated_ts_client'
const openrpc_path = os.dir(@FILE) + '/../../hero/heromodels/openrpc.json'
const output_dir = os.expand_tilde_to_home('~/code/heromodels/generated')
fn main() {
spec_text := os.read_file(openrpc_path) or {
@@ -20,6 +20,7 @@ fn main() {
config := typescriptgenerator.IntermediateConfig{
base_url: 'http://localhost:8086/api/heromodels'
handler_type: 'heromodels'
}
intermediate_spec := typescriptgenerator.from_openrpc(openrpc_spec, config) or {

View File

@@ -1,9 +0,0 @@
export interface Attendee {
user_id?: number;
status_latest?: string;
attendance_required?: boolean;
admin?: boolean;
organizer?: boolean;
location?: string;
log?: any[];
}

View File

@@ -1,5 +0,0 @@
export interface AttendeeLog {
timestamp?: number;
status?: string;
remark?: string;
}

View File

@@ -1,10 +0,0 @@
export interface Base {
id?: number;
name?: string;
description?: string;
created_at?: number;
updated_at?: number;
securitypolicy?: number;
tags?: number;
messages?: any[];
}

View File

@@ -1,7 +0,0 @@
export interface Calendar {
events?: any[];
color?: string;
timezone?: string;
is_public?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,19 +0,0 @@
export interface CalendarEvent {
title?: string;
start_time?: number;
end_time?: number;
registration_desks?: any[];
attendees?: any[];
docs?: any[];
calendar_id?: number;
status?: string;
is_all_day?: boolean;
reminder_mins?: any[];
color?: string;
timezone?: string;
priority?: string;
public?: boolean;
locations?: any[];
is_template?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,7 +0,0 @@
export interface ChatGroup {
chat_type?: string;
last_activity?: number;
is_archived?: boolean;
group_id?: number;
// Properties from Base are inherited
}

View File

@@ -1,12 +0,0 @@
export interface ChatMessage {
content?: string;
chat_group_id?: number;
sender_id?: number;
parent_messages?: any[];
fs_files?: any[];
message_type?: string;
status?: string;
reactions?: any[];
mentions?: any[];
// Properties from Base are inherited
}

View File

@@ -1,12 +0,0 @@
export interface Contact {
emails?: any[];
user_id?: number;
phones?: any[];
addresses?: any[];
avatar_url?: string;
bio?: string;
timezone?: string;
status?: string;
profile_ids?: any[];
// Properties from Base are inherited
}

View File

@@ -1,8 +0,0 @@
export interface Education {
school?: string;
degree?: string;
field_of_study?: string;
start_date?: number;
end_date?: number;
description?: string;
}

View File

@@ -1,5 +0,0 @@
export interface EventDoc {
fs_item?: number;
cat?: string;
public?: boolean;
}

View File

@@ -1,6 +0,0 @@
export interface EventLocation {
name?: string;
description?: string;
cat?: string;
docs?: any[];
}

View File

@@ -1,9 +0,0 @@
export interface Experience {
title?: string;
company?: string;
location?: string;
start_date?: number;
end_date?: number;
current?: boolean;
description?: string;
}

View File

@@ -1,7 +0,0 @@
export interface Group {
members?: any[];
subgroups?: any[];
parent_group?: number;
is_public?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,5 +0,0 @@
export interface GroupMember {
user_id?: number;
role?: string;
joined_at?: number;
}

View File

@@ -1,10 +0,0 @@
export interface Message {
subject?: string;
message?: string;
parent?: number;
author?: number;
to?: any[];
cc?: any[];
send_log?: any[];
// Properties from Base are inherited
}

View File

@@ -1,4 +0,0 @@
export interface MessageLink {
message_id?: number;
link_type?: string;
}

View File

@@ -1,5 +0,0 @@
export interface MessageReaction {
user_id?: number;
emoji?: string;
timestamp?: number;
}

View File

@@ -1,7 +0,0 @@
export interface Milestone {
name?: string;
description?: string;
due_date?: number;
completed?: boolean;
issues?: any[];
}

View File

@@ -1,12 +0,0 @@
export interface Planning {
color?: string;
timezone?: string;
is_public?: boolean;
calendar_template_id?: number;
registration_desk_id?: number;
autoschedule_rules?: any[];
invite_rules?: any[];
attendees_required?: any[];
attendees_optional?: any[];
// Properties from Base are inherited
}

View File

@@ -1,9 +0,0 @@
export interface PlanningRecurrenceRule {
until?: number;
by_weekday?: any[];
by_monthday?: any[];
hour_from?: number;
hour_to?: number;
duration?: number;
priority?: number;
}

View File

@@ -1,17 +0,0 @@
export interface Profile {
user_id?: number;
summary?: string;
headline?: string;
location?: string;
industry?: string;
picture_url?: string;
background_image_url?: string;
email?: string;
phone?: string;
website?: string;
experience?: any[];
education?: any[];
skills?: any[];
languages?: any[];
// Properties from Base are inherited
}

View File

@@ -1,9 +0,0 @@
export interface Project {
swimlanes?: any[];
milestones?: any[];
fs_files?: any[];
status?: string;
start_date?: number;
end_date?: number;
// Properties from Base are inherited
}

View File

@@ -1,17 +0,0 @@
export interface ProjectIssue {
title?: string;
project_id?: number;
issue_type?: string;
priority?: string;
status?: string;
swimlane?: string;
assignees?: any[];
reporter?: number;
milestone?: string;
deadline?: number;
estimate?: number;
fs_files?: any[];
parent_id?: number;
children?: any[];
// Properties from Base are inherited
}

View File

@@ -1,8 +0,0 @@
export interface RecurrenceRule {
frequency?: string;
interval?: number;
until?: number;
count?: number;
by_weekday?: any[];
by_monthday?: any[];
}

View File

@@ -1,7 +0,0 @@
export interface Registration {
user_id?: number;
accepted?: boolean;
accepted_by?: number;
timestamp?: number;
timestamp_acceptation?: number;
}

View File

@@ -1,12 +0,0 @@
export interface RegistrationDesk {
fs_items?: any[];
white_list?: any[];
white_list_accepted?: any[];
required_list?: any[];
black_list?: any[];
start_time?: number;
end_time?: number;
acceptance_required?: boolean;
registrations?: any[];
// Properties from Base are inherited
}

View File

@@ -1,5 +0,0 @@
export interface RegistrationFileAttachment {
fs_item?: number;
cat?: string;
public?: boolean;
}

View File

@@ -1,6 +0,0 @@
export interface SendLog {
to?: any[];
cc?: any[];
status?: string;
timestamp?: number;
}

View File

@@ -1,7 +0,0 @@
export interface Swimlane {
name?: string;
description?: string;
order?: number;
color?: string;
is_done?: boolean;
}

View File

@@ -1,7 +0,0 @@
export interface User {
user_id?: number;
contact_id?: number;
status?: string;
profile_ids?: any[];
// Properties from Base are inherited
}

View File

@@ -1,327 +0,0 @@
import fetch from 'node-fetch';
import { Base } from './Base';
import { Calendar } from './Calendar';
import { CalendarEvent } from './CalendarEvent';
import { Attendee } from './Attendee';
import { AttendeeLog } from './AttendeeLog';
import { EventDoc } from './EventDoc';
import { EventLocation } from './EventLocation';
import { ChatGroup } from './ChatGroup';
import { ChatMessage } from './ChatMessage';
import { MessageLink } from './MessageLink';
import { MessageReaction } from './MessageReaction';
import { Contact } from './Contact';
import { Group } from './Group';
import { GroupMember } from './GroupMember';
import { Message } from './Message';
import { SendLog } from './SendLog';
import { Planning } from './Planning';
import { PlanningRecurrenceRule } from './PlanningRecurrenceRule';
import { Profile } from './Profile';
import { Experience } from './Experience';
import { Education } from './Education';
import { Project } from './Project';
import { Swimlane } from './Swimlane';
import { Milestone } from './Milestone';
import { ProjectIssue } from './ProjectIssue';
import { RegistrationDesk } from './RegistrationDesk';
import { RegistrationFileAttachment } from './RegistrationFileAttachment';
import { Registration } from './Registration';
import { User } from './User';
export class HeroModelsClient {
private baseUrl: string;
constructor(baseUrl: string = 'http://localhost:8086/api/heromodels') {
this.baseUrl = baseUrl;
}
private async send<T>(method: string, params: any): Promise<T> {
const response = await fetch(this.baseUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
jsonrpc: '2.0',
method: method,
params: params,
id: 1,
}),
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const jsonResponse:any = await response.json();
if (jsonResponse.error) {
throw new Error(`RPC error: ${jsonResponse.error.message}`);
}
return jsonResponse.result;
}
async calendar_get(params: { id: number }): Promise<Calendar> {
return this.send('calendar_get', params);
}
async calendar_set(params: { calendar: Calendar, events: any[], color: string, timezone: string, is_public: boolean }): Promise<number> {
return this.send('calendar_set', params);
}
async calendar_delete(params: { id: number }): Promise<boolean> {
return this.send('calendar_delete', params);
}
async calendar_exist(params: { id: number }): Promise<boolean> {
return this.send('calendar_exist', params);
}
async calendar_list(params: { }): Promise<any[]> {
return this.send('calendar_list', params);
}
async calendar_event_get(params: { id: number }): Promise<CalendarEvent> {
return this.send('calendar_event_get', params);
}
async calendar_event_set(params: { calendar_event: CalendarEvent }): Promise<number> {
return this.send('calendar_event_set', params);
}
async calendar_event_delete(params: { id: number }): Promise<boolean> {
return this.send('calendar_event_delete', params);
}
async calendar_event_exist(params: { id: number }): Promise<boolean> {
return this.send('calendar_event_exist', params);
}
async calendar_event_list(params: { }): Promise<any[]> {
return this.send('calendar_event_list', params);
}
async chat_group_get(params: { id: number }): Promise<ChatGroup> {
return this.send('chat_group_get', params);
}
async chat_group_set(params: { chat_group: ChatGroup }): Promise<number> {
return this.send('chat_group_set', params);
}
async chat_group_delete(params: { id: number }): Promise<boolean> {
return this.send('chat_group_delete', params);
}
async chat_group_exist(params: { id: number }): Promise<boolean> {
return this.send('chat_group_exist', params);
}
async chat_group_list(params: { }): Promise<any[]> {
return this.send('chat_group_list', params);
}
async chat_message_get(params: { id: number }): Promise<ChatMessage> {
return this.send('chat_message_get', params);
}
async chat_message_set(params: { chat_message: ChatMessage }): Promise<number> {
return this.send('chat_message_set', params);
}
async chat_message_delete(params: { id: number }): Promise<boolean> {
return this.send('chat_message_delete', params);
}
async chat_message_exist(params: { id: number }): Promise<boolean> {
return this.send('chat_message_exist', params);
}
async chat_message_list(params: { }): Promise<any[]> {
return this.send('chat_message_list', params);
}
async contact_get(params: { id: number }): Promise<Contact> {
return this.send('contact_get', params);
}
async contact_set(params: { contact: Contact }): Promise<number> {
return this.send('contact_set', params);
}
async contact_delete(params: { id: number }): Promise<boolean> {
return this.send('contact_delete', params);
}
async contact_exist(params: { id: number }): Promise<boolean> {
return this.send('contact_exist', params);
}
async contact_list(params: { }): Promise<any[]> {
return this.send('contact_list', params);
}
async group_get(params: { id: number }): Promise<Group> {
return this.send('group_get', params);
}
async group_set(params: { group: Group }): Promise<number> {
return this.send('group_set', params);
}
async group_delete(params: { id: number }): Promise<boolean> {
return this.send('group_delete', params);
}
async group_exist(params: { id: number }): Promise<boolean> {
return this.send('group_exist', params);
}
async group_list(params: { }): Promise<any[]> {
return this.send('group_list', params);
}
async message_get(params: { id: number }): Promise<Message> {
return this.send('message_get', params);
}
async message_set(params: { message: Message }): Promise<number> {
return this.send('message_set', params);
}
async message_delete(params: { id: number }): Promise<boolean> {
return this.send('message_delete', params);
}
async message_exist(params: { id: number }): Promise<boolean> {
return this.send('message_exist', params);
}
async message_list(params: { }): Promise<any[]> {
return this.send('message_list', params);
}
async planning_get(params: { id: number }): Promise<Planning> {
return this.send('planning_get', params);
}
async planning_set(params: { planning: Planning }): Promise<number> {
return this.send('planning_set', params);
}
async planning_delete(params: { id: number }): Promise<boolean> {
return this.send('planning_delete', params);
}
async planning_exist(params: { id: number }): Promise<boolean> {
return this.send('planning_exist', params);
}
async planning_list(params: { }): Promise<any[]> {
return this.send('planning_list', params);
}
async profile_get(params: { id: number }): Promise<Profile> {
return this.send('profile_get', params);
}
async profile_set(params: { profile: Profile }): Promise<number> {
return this.send('profile_set', params);
}
async profile_delete(params: { id: number }): Promise<boolean> {
return this.send('profile_delete', params);
}
async profile_exist(params: { id: number }): Promise<boolean> {
return this.send('profile_exist', params);
}
async profile_list(params: { }): Promise<any[]> {
return this.send('profile_list', params);
}
async project_get(params: { id: number }): Promise<Project> {
return this.send('project_get', params);
}
async project_set(params: { project: Project }): Promise<number> {
return this.send('project_set', params);
}
async project_delete(params: { id: number }): Promise<boolean> {
return this.send('project_delete', params);
}
async project_exist(params: { id: number }): Promise<boolean> {
return this.send('project_exist', params);
}
async project_list(params: { }): Promise<any[]> {
return this.send('project_list', params);
}
async project_issue_get(params: { id: number }): Promise<ProjectIssue> {
return this.send('project_issue_get', params);
}
async project_issue_set(params: { project_issue: ProjectIssue }): Promise<number> {
return this.send('project_issue_set', params);
}
async project_issue_delete(params: { id: number }): Promise<boolean> {
return this.send('project_issue_delete', params);
}
async project_issue_exist(params: { id: number }): Promise<boolean> {
return this.send('project_issue_exist', params);
}
async project_issue_list(params: { }): Promise<any[]> {
return this.send('project_issue_list', params);
}
async registration_desk_get(params: { id: number }): Promise<RegistrationDesk> {
return this.send('registration_desk_get', params);
}
async registration_desk_set(params: { registration_desk: RegistrationDesk }): Promise<number> {
return this.send('registration_desk_set', params);
}
async registration_desk_delete(params: { id: number }): Promise<boolean> {
return this.send('registration_desk_delete', params);
}
async registration_desk_exist(params: { id: number }): Promise<boolean> {
return this.send('registration_desk_exist', params);
}
async registration_desk_list(params: { }): Promise<any[]> {
return this.send('registration_desk_list', params);
}
async user_get(params: { id: number }): Promise<User> {
return this.send('user_get', params);
}
async user_set(params: { user: User }): Promise<number> {
return this.send('user_set', params);
}
async user_delete(params: { id: number }): Promise<boolean> {
return this.send('user_delete', params);
}
async user_exist(params: { id: number }): Promise<boolean> {
return this.send('user_exist', params);
}
async user_list(params: { }): Promise<any[]> {
return this.send('user_list', params);
}
}

View File

@@ -2,10 +2,11 @@ module typescriptgenerator
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.texttools
import os
pub fn generate_typescript_client(spec IntermediateSpec, dest_path string) ! {
mut dest := pathlib.get_dir(path: dest_path, create: true)!
// Generate a file for each schema
for name, schema in spec.schemas {
mut file_content := generate_interface(schema)
@@ -17,6 +18,30 @@ pub fn generate_typescript_client(spec IntermediateSpec, dest_path string) ! {
mut client_content := generate_client(spec)
mut client_file_path := pathlib.get_file(path: '${dest.path}/client.ts', create: true)!
client_file_path.write(client_content) or { panic(err) }
// Copy templates to destination
mut templates_src := os.dir(@FILE) + '/templates'
if os.exists(templates_src) {
// Copy index.html template
mut index_content := os.read_file('${templates_src}/index.html')!
mut index_file := pathlib.get_file(path: '${dest.path}/index.html', create: true)!
index_file.write(index_content)!
// Copy package.json template
mut package_content := os.read_file('${templates_src}/package.json')!
mut package_file := pathlib.get_file(path: '${dest.path}/package.json', create: true)!
package_file.write(package_content)!
// Copy tsconfig.json template
mut tsconfig_content := os.read_file('${templates_src}/tsconfig.json')!
mut tsconfig_file := pathlib.get_file(path: '${dest.path}/tsconfig.json', create: true)!
tsconfig_file.write(tsconfig_content)!
// Copy dev-server.ts template
mut dev_server_content := os.read_file('${templates_src}/dev-server.ts')!
mut dev_server_file := pathlib.get_file(path: '${dest.path}/dev-server.ts', create: true)!
dev_server_file.write(dev_server_content)!
}
}
fn generate_interface(schema IntermediateSchema) string {

View File

@@ -20,6 +20,7 @@ fn test_generate_typescript_client() {
config := IntermediateConfig{
base_url: 'http://localhost:8086/api/heromodels'
handler_type: 'heromodels'
}
intermediate_spec := from_openrpc(openrpc_spec, config) or {

View File

@@ -0,0 +1,34 @@
#!/bin/bash
# Install script for Hero Models TypeScript client
# Check if bun is installed
if ! command -v bun &> /dev/null; then
echo "bun is not installed. Please install bun first:"
echo "curl -fsSL https://bun.sh/install | bash"
exit 1
fi
# Check if V is installed
if ! command -v v &> /dev/null; then
echo "V is not installed. Please install V first:"
echo "Visit https://vlang.io/ for installation instructions"
exit 1
fi
# Create output directory if it doesn't exist
OUTPUT_DIR="${1:-~/code/heromodels/generated}"
mkdir -p "$OUTPUT_DIR"
# Generate TypeScript client
echo "Generating TypeScript client in $OUTPUT_DIR..."
v -enable-globals -n -w -gc none run lib/hero/typescriptgenerator/generate.vsh
# Install dependencies
echo "Installing dependencies..."
cd "$OUTPUT_DIR"
bun install
echo "Installation complete! The TypeScript client is ready to use."
echo "To test in development mode, run: bun run dev"
echo "To build for production, run: bun run build"

View File

@@ -0,0 +1,28 @@
check lib/hero/heromodels/openrpc.json
can we create a a module in hero/typescriptgenerator
which takes an openrpc.json and creates an intermediate easy to use model (like we did for heroserver)
and then use this model to generate a typescript client
we should have a separate file per root model
API Endpoint: http://localhost:8086/api/heromodels
is here and its all as http posts
example
All API endpoints use JSON-RPC 2.0 format. Here's a basic example:
curl -X POST http://localhost:8086/api/[handler_name] \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "method_name",
"params": {
"param1": "value1",
"param2": "value2"
},
"id": 1
}'

View File

@@ -61,10 +61,14 @@ pub fn from_openrpc(openrpc_spec openrpc.OpenRPC, config IntermediateConfig) !In
return error('handler_type cannot be empty')
}
// Process schemas
mut schemas_map := process_schemas(openrpc_spec.components.schemas)!
mut intermediate_spec := IntermediateSpec{
info: openrpc_spec.info
methods: []IntermediateMethod{}
schemas: schemas_map
base_url: config.base_url
schemas: process_schemas(openrpc_spec.components.schemas)!
}
// Process all methods
@@ -110,6 +114,13 @@ fn process_parameters(params []openrpc.ContentDescriptorRef) ![]IntermediatePara
}
} else if param is jsonschema.Reference {
//TODO: handle reference
// For now, create a placeholder parameter
intermediate_params << IntermediateParam{
name: 'reference'
description: ''
type_info: 'any'
required: false
}
}
}
@@ -117,7 +128,12 @@ fn process_parameters(params []openrpc.ContentDescriptorRef) ![]IntermediatePara
}
fn process_result(result openrpc.ContentDescriptorRef) !IntermediateParam {
mut intermediate_result := IntermediateParam{}
mut intermediate_result := IntermediateParam{
name: ''
description: ''
type_info: ''
required: false
}
if result is openrpc.ContentDescriptor {
type_info := extract_type_from_schema(result.schema)
@@ -134,9 +150,18 @@ fn process_result(result openrpc.ContentDescriptorRef) !IntermediateParam {
type_info := ref.ref.all_after_last('/')
intermediate_result = IntermediateParam{
name: type_info.to_lower()
description: ''
type_info: type_info
required: false
}
} else {
// Handle any other cases
intermediate_result = IntermediateParam{
name: 'unknown'
description: ''
type_info: 'unknown'
required: false
}
}
return intermediate_result
@@ -161,21 +186,24 @@ fn extract_type_from_schema(schema_ref jsonschema.SchemaRef) string {
fn process_schemas(schemas map[string]jsonschema.SchemaRef) !map[string]IntermediateSchema {
// Initialize the map with a known size if possible
mut intermediate_schemas := map[string]IntermediateSchema{}
// Process each schema in the map
for name, schema_ref in schemas {
if schema_ref is jsonschema.Schema {
schema := schema_ref as jsonschema.Schema
mut properties := []IntermediateProperty{}
for prop_name, prop_schema_ref in schema.properties {
prop_type := extract_type_from_schema(prop_schema_ref)
properties << IntermediateProperty {
properties << IntermediateProperty{
name: prop_name
description: "" // TODO
type_info: prop_type
required: prop_name in schema.required
}
}
intermediate_schemas[name] = IntermediateSchema {
intermediate_schemas[name] = IntermediateSchema{
name: name
description: schema.description
properties: properties

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env bun
// Dev server script for Hero Models TypeScript client
import { serve } from "bun";
const server = serve({
port: 3000,
fetch(req) {
const url = new URL(req.url);
// Serve static files
if (url.pathname === "/" || url.pathname === "/index.html") {
return new Response(Bun.file("index.html"));
}
// Serve TypeScript files
if (url.pathname.endsWith(".ts")) {
return new Response(Bun.file(url.pathname.slice(1)));
}
// Serve JavaScript files (compiled TypeScript)
if (url.pathname.endsWith(".js")) {
return new Response(Bun.file(url.pathname.slice(1)));
}
return new Response("Not found", { status: 404 });
},
});
console.log(`Dev server running on http://localhost:${server.port}`);
console.log("Press Ctrl+C to stop the server");

View File

@@ -0,0 +1,125 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Hero Models TypeScript Client Test</title>
<style>
body {
font-family: Arial, sans-serif;
margin: 20px;
background-color: #f5f5f5;
}
.container {
max-width: 800px;
margin: 0 auto;
background-color: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
}
h1 {
color: #333;
text-align: center;
}
.test-section {
margin: 20px 0;
padding: 15px;
border: 1px solid #ddd;
border-radius: 5px;
}
button {
background-color: #4CAF50;
color: white;
padding: 10px 15px;
border: none;
border-radius: 4px;
cursor: pointer;
margin: 5px;
}
button:hover {
background-color: #45a049;
}
.result {
margin-top: 10px;
padding: 10px;
background-color: #f9f9f9;
border: 1px solid #eee;
border-radius: 4px;
white-space: pre-wrap;
font-family: monospace;
}
input, select {
padding: 8px;
margin: 5px;
border: 1px solid #ddd;
border-radius: 4px;
}
</style>
</head>
<body>
<div class="container">
<h1>Hero Models TypeScript Client Test</h1>
<div class="test-section">
<h2>Calendar Operations</h2>
<label>Calendar ID: <input type="number" id="calendarId" value="1"></label>
<button onclick="getCalendar()">Get Calendar</button>
<div class="result" id="calendarResult"></div>
</div>
<div class="test-section">
<h2>User Operations</h2>
<label>User ID: <input type="number" id="userId" value="1"></label>
<button onclick="getUser()">Get User</button>
<div class="result" id="userResult"></div>
</div>
<div class="test-section">
<h2>Event Operations</h2>
<label>Event ID: <input type="number" id="eventId" value="1"></label>
<button onclick="getEvent()">Get Event</button>
<div class="result" id="eventResult"></div>
</div>
</div>
<script type="module">
import { HeroModelsClient } from './client.js';
const client = new HeroModelsClient('http://localhost:8086/api/heromodels');
async function getCalendar() {
const resultDiv = document.getElementById('calendarResult');
try {
const id = parseInt(document.getElementById('calendarId').value);
const calendar = await client.calendar_get({id});
resultDiv.textContent = JSON.stringify(calendar, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
async function getUser() {
const resultDiv = document.getElementById('userResult');
try {
const id = parseInt(document.getElementById('userId').value);
const user = await client.user_get({id});
resultDiv.textContent = JSON.stringify(user, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
async function getEvent() {
const resultDiv = document.getElementById('eventResult');
try {
const id = parseInt(document.getElementById('eventId').value);
const event = await client.event_get({id});
resultDiv.textContent = JSON.stringify(event, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,18 @@
{
"name": "heromodels-client",
"version": "1.0.0",
"description": "TypeScript client for Hero Models API",
"main": "client.ts",
"type": "module",
"scripts": {
"dev": "bun run dev-server.ts",
"build": "tsc"
},
"dependencies": {
"node-fetch": "^3.3.2"
},
"devDependencies": {
"typescript": "^5.0.0",
"@types/node": "^20.0.0"
}
}

View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "ES2020",
"moduleResolution": "node",
"lib": ["ES2020", "DOM"],
"outDir": "./dist",
"rootDir": ".",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true
},
"include": [
"*.ts",
"templates/*.ts"
],
"exclude": [
"node_modules",
"dist"
]
}