diff --git a/lib/hero/db/decode.v b/lib/hero/db/decode.v index 042d2811..7268213c 100644 --- a/lib/hero/db/decode.v +++ b/lib/hero/db/decode.v @@ -30,6 +30,15 @@ pub fn decode_u32(data string) !u32 { return u32(parsed_uint) } +pub fn decode_string(data string) !string { + // Try JSON decode first (for proper JSON strings) + // if result := json2.decode[string](data) { + // return result + // } + // If that fails, return an error + return data +} + pub fn decode_bool(data string) !bool { return json2.decode[bool](data) or { return error('Failed to decode bool: ${data}') } } diff --git a/lib/hero/herofs/fs_tools_cp.v b/lib/hero/herofs/_todo/fs_tools_cp.v similarity index 100% rename from lib/hero/herofs/fs_tools_cp.v rename to lib/hero/herofs/_todo/fs_tools_cp.v diff --git a/lib/hero/herofs/fs_tools_find.v b/lib/hero/herofs/_todo/fs_tools_find.v similarity index 100% rename from lib/hero/herofs/fs_tools_find.v rename to lib/hero/herofs/_todo/fs_tools_find.v diff --git a/lib/hero/herofs/fs_tools_helpers.v b/lib/hero/herofs/_todo/fs_tools_helpers.v similarity index 100% rename from lib/hero/herofs/fs_tools_helpers.v rename to lib/hero/herofs/_todo/fs_tools_helpers.v diff --git a/lib/hero/herofs/fs_tools_import_export.v b/lib/hero/herofs/_todo/fs_tools_import_export.v similarity index 99% rename from lib/hero/herofs/fs_tools_import_export.v rename to lib/hero/herofs/_todo/fs_tools_import_export.v index 0623d788..abe91459 100644 --- a/lib/hero/herofs/fs_tools_import_export.v +++ b/lib/hero/herofs/_todo/fs_tools_import_export.v @@ -232,7 +232,7 @@ fn (mut self Fs) export_file(file_id u32, dest_path string, opts ExportOptions) // Set file modification time if available in metadata if _ := vfs_file.metadata['modified'] { // Note: V doesn't have built-in utime, but we could add this later - // For now, just preserve the metadata in a comment or separate file + // For now, just preserve the metadata in a message or separate file } } } diff --git a/lib/hero/herofs/fs_tools_mv.v b/lib/hero/herofs/_todo/fs_tools_mv.v similarity index 100% rename from lib/hero/herofs/fs_tools_mv.v rename to lib/hero/herofs/_todo/fs_tools_mv.v diff --git a/lib/hero/herofs/fs_tools_rm.v b/lib/hero/herofs/_todo/fs_tools_rm.v similarity index 100% rename from lib/hero/herofs/fs_tools_rm.v rename to lib/hero/herofs/_todo/fs_tools_rm.v diff --git a/lib/hero/herofs/fs_tools_test.v b/lib/hero/herofs/_todo/fs_tools_test.v similarity index 99% rename from lib/hero/herofs/fs_tools_test.v rename to lib/hero/herofs/_todo/fs_tools_test.v index eb33128a..2ea61858 100644 --- a/lib/hero/herofs/fs_tools_test.v +++ b/lib/hero/herofs/_todo/fs_tools_test.v @@ -71,10 +71,10 @@ fn test_directory_operations() ! { // Verify directories were created src_dir := fs_factory.fs_dir.get(src_dir_id)! assert src_dir.name == 'src' - + docs_dir := fs_factory.fs_dir.get(docs_dir_id)! assert docs_dir.name == 'docs' - + tests_dir := fs_factory.fs_dir.get(tests_dir_id)! assert tests_dir.name == 'tests' diff --git a/lib/hero/herofs/fs.v b/lib/hero/herofs/fs.v index 8b583264..6bcc79c7 100644 --- a/lib/hero/herofs/fs.v +++ b/lib/hero/herofs/fs.v @@ -2,6 +2,7 @@ module herofs import freeflowuniverse.herolib.data.encoder import freeflowuniverse.herolib.hero.db +import freeflowuniverse.herolib.hero.heromodels import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true } import freeflowuniverse.herolib.hero.user { UserRef } import freeflowuniverse.herolib.ui.console @@ -15,6 +16,7 @@ pub mut: root_dir_id u32 // ID of root directory quota_bytes u64 // Storage quota in bytes used_bytes u64 // Current usage in bytes + factory &FSFactory = unsafe { nil } @[skip; str: skip] } // We only keep the root directory ID here, other directories can be found by querying parent_id in FsDir @@ -100,7 +102,7 @@ pub mut: quota_bytes u64 used_bytes u64 tags []string - comments []db.CommentArg + messages []db.MessageArg } @[params] @@ -113,7 +115,7 @@ pub mut: pub fn (mut self DBFs) new(args FsArg) !Fs { mut o := Fs{ name: args.name - //factory: self.factory + factory: self.factory } if args.description != '' { @@ -133,8 +135,8 @@ pub fn (mut self DBFs) new(args FsArg) !Fs { if args.tags.len > 0 { o.tags = self.db.tags_get(args.tags)! } - if args.comments.len > 0 { - o.comments = self.db.comments_get(args.comments)! + if args.messages.len > 0 { + o.messages = self.db.messages_get(args.messages)! } return o @@ -147,7 +149,7 @@ pub fn (mut self DBFs) new_get_set(args_ FsArg) !Fs { mut o := Fs{ name: args.name - //factory: self.factory + factory: self.factory } myid := self.db.redis.hget('fs:names', args.name)! @@ -180,8 +182,8 @@ pub fn (mut self DBFs) new_get_set(args_ FsArg) !Fs { o.tags = self.db.tags_get(args.tags)! changes = true } - if args.comments.len > 0 { - o.comments = self.db.comments_get(args.comments)! + if args.messages.len > 0 { + o.messages = self.db.messages_get(args.messages)! changes = true } @@ -240,7 +242,7 @@ pub fn (mut self DBFs) get(id u32) !Fs { mut o, data := self.db.get_data[Fs](id)! mut e_decoder := encoder.decoder_new(data) self.load(mut o, mut e_decoder)! - //o.factory = self.factory + // o.factory = self.factory return o } diff --git a/lib/hero/herofs/fs_blob.v b/lib/hero/herofs/fs_blob.v index c0a09a02..4b824d4d 100644 --- a/lib/hero/herofs/fs_blob.v +++ b/lib/hero/herofs/fs_blob.v @@ -4,6 +4,9 @@ import crypto.blake3 import freeflowuniverse.herolib.data.encoder import freeflowuniverse.herolib.data.ourtime import freeflowuniverse.herolib.hero.db +import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true } +import freeflowuniverse.herolib.hero.user { UserRef } +import json // FsBlob represents binary data up to 1MB @[heap] @@ -26,6 +29,72 @@ pub fn (self FsBlob) type_name() string { return 'fs_blob' } +// return example rpc call and result for each methodname +pub fn (self FsBlob) description(methodname string) string { + match methodname { + 'set' { + return 'Create or update a blob. Returns the ID of the blob.' + } + 'get' { + return 'Retrieve a blob by ID. Returns the blob object.' + } + 'delete' { + return 'Delete a blob by ID. Returns true if successful.' + } + 'exist' { + return 'Check if a blob exists by ID. Returns true or false.' + } + 'list' { + return 'List all blobs. Returns an array of blob objects.' + } + 'get_by_hash' { + return 'Retrieve a blob by its hash. Returns the blob object.' + } + 'exists_by_hash' { + return 'Check if a blob exists by its hash. Returns true or false.' + } + 'verify' { + return 'Verify the integrity of a blob by its hash. Returns true or false.' + } + else { + return 'This is generic method for the root object, TODO fill in, ...' + } + } +} + +// return example rpc call and result for each methodname +pub fn (self FsBlob) example(methodname string) (string, string) { + match methodname { + 'set' { + return '{"data": "SGVsbG8gV29ybGQh"}', '1' + } + 'get' { + return '{"id": 1}', '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}' + } + 'delete' { + return '{"id": 1}', 'true' + } + 'exist' { + return '{"id": 1}', 'true' + } + 'list' { + return '{}', '[{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}]' + } + 'get_by_hash' { + return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}' + } + 'exists_by_hash' { + return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', 'true' + } + 'verify' { + return '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}', 'true' + } + else { + return '{}', '{}' + } + } +} + pub fn (self FsBlob) dump(mut e encoder.Encoder) ! { e.add_string(self.hash) e.add_list_u8(self.data) @@ -124,6 +193,10 @@ pub fn (mut self DBFsBlob) get_multi(id []u32) ![]FsBlob { return blobs } +pub fn (mut self DBFsBlob) list() ![]FsBlob { + return self.db.list[FsBlob]()!.map(self.get(it)!) +} + pub fn (mut self DBFsBlob) get_by_hash(hash string) !FsBlob { // Get blob ID from Redis hash mapping id_str := self.db.redis.hget('fsblob:hashes', hash)! @@ -150,3 +223,62 @@ pub fn (mut self DBFsBlob) verify(hash string) !bool { blob := self.get_by_hash(hash)! return blob.verify_integrity() } + +pub fn fs_blob_handle(mut f FSFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response { + match method { + 'get' { + id := db.decode_u32(params)! + res := f.fs_blob.get(id)! + return new_response(rpcid, json.encode(res)) + } + 'set' { + mut o := db.decode_generic[FsBlob](params)! + o = f.fs_blob.set(o)! + return new_response_int(rpcid, int(o.id)) + } + 'delete' { + id := db.decode_u32(params)! + f.fs_blob.delete(id)! + return new_response_ok(rpcid) + } + 'exist' { + id := db.decode_u32(params)! + if f.fs_blob.exist(id)! { + return new_response_true(rpcid) + } else { + return new_response_false(rpcid) + } + } + 'list' { + res := f.fs_blob.list()! + return new_response(rpcid, json.encode(res)) + } + 'get_by_hash' { + hash := db.decode_string(params)! + res := f.fs_blob.get_by_hash(hash)! + return new_response(rpcid, json.encode(res)) + } + 'exists_by_hash' { + hash := db.decode_string(params)! + if f.fs_blob.exists_by_hash(hash)! { + return new_response_true(rpcid) + } else { + return new_response_false(rpcid) + } + } + 'verify' { + hash := db.decode_string(params)! + if f.fs_blob.verify(hash)! { + return new_response_true(rpcid) + } else { + return new_response_false(rpcid) + } + } + else { + return new_error(rpcid, + code: 32601 + message: 'Method ${method} not found on fs_blob' + ) + } + } +} diff --git a/lib/hero/herofs/fs_blob_test.v b/lib/hero/herofs/fs_blob_test.v new file mode 100644 index 00000000..ee378d22 --- /dev/null +++ b/lib/hero/herofs/fs_blob_test.v @@ -0,0 +1,419 @@ +module herofs + +import freeflowuniverse.herolib.hero.db +import freeflowuniverse.herolib.data.encoder +import freeflowuniverse.herolib.data.ourtime +import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true } +import freeflowuniverse.herolib.hero.user { UserRef } +import json +import freeflowuniverse.herolib.hero.herofs { FsBlob } + +fn test_fs_blob_new() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Hello World!'.bytes() + } + + blob := db_fs_blob.new(args)! + + assert blob.data == 'Hello World!'.bytes() + assert blob.size_bytes == 12 + assert blob.hash == 'a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587' + assert blob.updated_at > 0 + + println('✓ FsBlob new test passed!') +} + +fn test_fs_blob_crud_operations() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'CRUD Test Data'.bytes() + } + + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + original_id := blob.id + + retrieved_blob := db_fs_blob.get(original_id)! + assert retrieved_blob.data == 'CRUD Test Data'.bytes() + assert retrieved_blob.id == original_id + + exists := db_fs_blob.exist(original_id)! + assert exists == true + + mut updated_args := FsBlobArg{ + data: 'Updated CRUD Test Data'.bytes() + } + mut updated_blob := db_fs_blob.new(updated_args)! + updated_blob.id = original_id + updated_blob = db_fs_blob.set(updated_blob)! + + final_blob := db_fs_blob.get(original_id)! + assert final_blob.data == 'Updated CRUD Test Data'.bytes() + + mut expected_blob_for_hash := FsBlob{ + data: 'Updated CRUD Test Data'.bytes() + size_bytes: 'Updated CRUD Test Data'.len + } + expected_blob_for_hash.calculate_hash() + assert final_blob.hash == expected_blob_for_hash.hash + + db_fs_blob.delete(original_id)! + exists_after_delete := db_fs_blob.exist(original_id)! + assert exists_after_delete == false + + println('✓ FsBlob CRUD operations test passed!') +} + +fn test_fs_blob_encoding_decoding() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Encoding Decoding Test'.bytes() + } + + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + blob_id := blob.id + + retrieved_blob := db_fs_blob.get(blob_id)! + + assert retrieved_blob.data == 'Encoding Decoding Test'.bytes() + assert retrieved_blob.size_bytes == 'Encoding Decoding Test'.len + + mut expected_blob_for_hash := FsBlob{ + data: 'Encoding Decoding Test'.bytes() + size_bytes: 'Encoding Decoding Test'.len + } + expected_blob_for_hash.calculate_hash() + assert retrieved_blob.hash == expected_blob_for_hash.hash + + println('✓ FsBlob encoding/decoding test passed!') +} + +fn test_fs_blob_type_name() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Type Name Test'.bytes() + } + + blob := db_fs_blob.new(args)! + + type_name := blob.type_name() + assert type_name == 'fs_blob' + + println('✓ FsBlob type_name test passed!') +} + +fn test_fs_blob_description() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Description Test'.bytes() + } + + blob := db_fs_blob.new(args)! + + assert blob.description('set') == 'Create or update a blob. Returns the ID of the blob.' + assert blob.description('get') == 'Retrieve a blob by ID. Returns the blob object.' + assert blob.description('delete') == 'Delete a blob by ID. Returns true if successful.' + assert blob.description('exist') == 'Check if a blob exists by ID. Returns true or false.' + assert blob.description('list') == 'List all blobs. Returns an array of blob objects.' + assert blob.description('get_by_hash') == 'Retrieve a blob by its hash. Returns the blob object.' + assert blob.description('exists_by_hash') == 'Check if a blob exists by its hash. Returns true or false.' + assert blob.description('verify') == 'Verify the integrity of a blob by its hash. Returns true or false.' + assert blob.description('unknown') == 'This is generic method for the root object, TODO fill in, ...' + + println('✓ FsBlob description test passed!') +} + +fn test_fs_blob_example() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Example Test'.bytes() + } + + blob := db_fs_blob.new(args)! + + set_call, set_result := blob.example('set') + assert set_call == '{"data": "SGVsbG8gV29ybGQh"}' + assert set_result == '1' + + get_call, get_result := blob.example('get') + assert get_call == '{"id": 1}' + assert get_result == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}' + + delete_call, delete_result := blob.example('delete') + assert delete_call == '{"id": 1}' + assert delete_result == 'true' + + exist_call, exist_result := blob.example('exist') + assert exist_call == '{"id": 1}' + assert exist_result == 'true' + + list_call, list_result := blob.example('list') + assert list_call == '{}' + assert list_result == '[{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}]' + + get_by_hash_call, get_by_hash_result := blob.example('get_by_hash') + assert get_by_hash_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}' + assert get_by_hash_result == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587", "data": "SGVsbG8gV29ybGQh", "size_bytes": 12}' + + exists_by_hash_call, exists_by_hash_result := blob.example('exists_by_hash') + assert exists_by_hash_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}' + assert exists_by_hash_result == 'true' + + verify_call, verify_result := blob.example('verify') + assert verify_call == '{"hash": "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b27796d9ad9587"}' + assert verify_result == 'true' + + unknown_call, unknown_result := blob.example('unknown') + assert unknown_call == '{}' + assert unknown_result == '{}' + + println('✓ FsBlob example test passed!') +} + +fn test_fs_blob_list() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args1 := FsBlobArg{ + data: 'Blob 1'.bytes() + } + mut blob1 := db_fs_blob.new(args1)! + blob1 = db_fs_blob.set(blob1)! + + mut args2 := FsBlobArg{ + data: 'Blob 2'.bytes() + } + mut blob2 := db_fs_blob.new(args2)! + blob2 = db_fs_blob.set(blob2)! + + list_of_blobs := db_fs_blob.list()! + assert list_of_blobs.len == 2 + assert list_of_blobs[0].data == 'Blob 1'.bytes() || list_of_blobs[0].data == 'Blob 2'.bytes() + assert list_of_blobs[1].data == 'Blob 1'.bytes() || list_of_blobs[1].data == 'Blob 2'.bytes() + + println('✓ FsBlob list test passed!') +} + +fn test_fs_blob_handle_get() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Get Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.id) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'get', params)! + assert resp.result.string() == json.encode(blob) + + println('✓ FsBlob handle get test passed!') +} + +fn test_fs_blob_handle_set() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + mut args := FsBlobArg{ + data: 'Handle Set Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + + params := json.encode(blob) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'set', params)! + assert resp.result.int() == 1 // Assuming ID 1 for the first set operation + + println('✓ FsBlob handle set test passed!') +} + +fn test_fs_blob_handle_delete() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Delete Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.id) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'delete', params)! + assert resp.result.string() == 'true' + + exists := db_fs_blob.exist(blob.id)! + assert exists == false + + println('✓ FsBlob handle delete test passed!') +} + +fn test_fs_blob_handle_exist() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Exist Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.id) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'exist', params)! + assert resp.result.string() == 'true' + + db_fs_blob.delete(blob.id)! + resp_false := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'exist', params)! + assert resp_false.result.string() == 'false' + + println('✓ FsBlob handle exist test passed!') +} + +fn test_fs_blob_handle_list() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args1 := FsBlobArg{ + data: 'Handle List Test 1'.bytes() + } + mut blob1 := db_fs_blob.new(args1)! + blob1 = db_fs_blob.set(blob1)! + + mut args2 := FsBlobArg{ + data: 'Handle List Test 2'.bytes() + } + mut blob2 := db_fs_blob.new(args2)! + blob2 = db_fs_blob.set(blob2)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'list', '{}')! + mut expected_list := [FsBlob(blob1), FsBlob(blob2)] + assert resp.result.string() == json.encode(expected_list) + + println('✓ FsBlob handle list test passed!') +} + +fn test_fs_blob_handle_get_by_hash() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Get By Hash Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.hash) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'get_by_hash', params)! + assert resp.result.string() == json.encode(blob) + + println('✓ FsBlob handle get_by_hash test passed!') +} + +fn test_fs_blob_handle_exists_by_hash() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Exists By Hash Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.hash) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'exists_by_hash', params)! + assert resp.result.string() == 'true' + + println('✓ FsBlob handle exists_by_hash test passed!') +} + +fn test_fs_blob_handle_verify() ! { + mut mydb := db.new_test()! + mut db_fs_blob := DBFsBlob{ + db: &mydb + } + + mut args := FsBlobArg{ + data: 'Handle Verify Test'.bytes() + } + mut blob := db_fs_blob.new(args)! + blob = db_fs_blob.set(blob)! + + mut f := FSFactory{ + fs_blob: db_fs_blob + } + + params := json.encode(blob.hash) + resp := fs_blob_handle(mut f, 1, map[string]string{}, user.UserRef{}, 'verify', params)! + assert resp.result.string() == 'true' + + println('✓ FsBlob handle verify test passed!') +} \ No newline at end of file diff --git a/lib/hero/herofs/fs_dir.v b/lib/hero/herofs/fs_dir.v index 463b481b..d027bf0d 100644 --- a/lib/hero/herofs/fs_dir.v +++ b/lib/hero/herofs/fs_dir.v @@ -87,7 +87,7 @@ pub mut: fs_id u32 @[required] parent_id u32 tags []string - comments []db.CommentArg + messages []db.MessageArg directories []u32 files []u32 symlinks []u32 @@ -107,7 +107,7 @@ pub fn (mut self DBFsDir) new(args FsDirArg) !FsDir { // Set base fields o.tags = self.db.tags_get(args.tags)! - o.comments = self.db.comments_get(args.comments)! + o.messages = self.db.messages_get(args.messages)! o.created_at = ourtime.now().unix() o.updated_at = o.created_at @@ -367,4 +367,4 @@ pub fn fs_dir_handle(mut f FSFactory, rpcid int, servercontext map[string]string ) } } -} \ No newline at end of file +} diff --git a/lib/hero/herofs/fs_file.v b/lib/hero/herofs/fs_file.v index 0a202473..bc1f77a9 100644 --- a/lib/hero/herofs/fs_file.v +++ b/lib/hero/herofs/fs_file.v @@ -87,7 +87,7 @@ pub mut: checksum string metadata map[string]string tags []string - comments []db.CommentArg + messages []db.MessageArg } // get new file, not from the DB @@ -128,7 +128,7 @@ pub fn (mut self DBFsFile) new(args FsFileArg) !FsFile { // Set base fields o.description = args.description o.tags = self.db.tags_get(args.tags)! - o.comments = self.db.comments_get(args.comments)! + o.messages = self.db.messages_get(args.messages)! o.updated_at = ourtime.now().unix() return o @@ -402,4 +402,4 @@ pub fn fs_file_handle(mut f FSFactory, rpcid int, servercontext map[string]strin ) } } -} \ No newline at end of file +} diff --git a/lib/hero/herofs/fs_symlink.v b/lib/hero/herofs/fs_symlink.v index 9544a1ae..354b9666 100644 --- a/lib/hero/herofs/fs_symlink.v +++ b/lib/hero/herofs/fs_symlink.v @@ -58,7 +58,7 @@ pub mut: target_id u32 @[required] target_type SymlinkTargetType @[required] tags []string - comments []db.CommentArg + messages []db.MessageArg } // get new symlink, not from the DB @@ -74,7 +74,7 @@ pub fn (mut self DBFsSymlink) new(args FsSymlinkArg) !FsSymlink { // Set base fields o.description = args.description o.tags = self.db.tags_get(args.tags)! - o.comments = self.db.comments_get(args.comments)! + o.messages = self.db.messages_get(args.messages)! o.updated_at = ourtime.now().unix() return o @@ -258,4 +258,4 @@ pub fn fs_symlink_handle(mut f FSFactory, rpcid int, servercontext map[string]st ) } } -} \ No newline at end of file +} diff --git a/lib/hero/herofs/specs.md b/lib/hero/herofs/specs.md index 0aad6d0c..c339b5c3 100644 --- a/lib/hero/herofs/specs.md +++ b/lib/hero/herofs/specs.md @@ -12,7 +12,7 @@ HeroFS is built on top of HeroDB, which uses Redis as its storage backend. The f 4. **FsSymlink** - Symbolic links 5. **FsBlob** - Binary data chunks -All components inherit from the `Base` struct, which provides common fields like ID, name, description, timestamps, security policies, tags, and comments. +All components inherit from the `Base` struct, which provides common fields like ID, name, description, timestamps, security policies, tags, and messages. ## Filesystem (Fs) @@ -278,7 +278,7 @@ When creating or modifying components, HeroFS validates references to other comp HeroFS inherits the security model from HeroDB: - Each component has a `securitypolicy` field referencing a SecurityPolicy object - Components can have associated tags for categorization -- Components can have associated comments for documentation +- Components can have associated messages for documentation ## Performance Considerations diff --git a/lib/heromodels/location.v b/lib/hero/heromodels/location.v similarity index 96% rename from lib/heromodels/location.v rename to lib/hero/heromodels/location.v index fbd68057..fa5c3305 100644 --- a/lib/heromodels/location.v +++ b/lib/hero/heromodels/location.v @@ -21,10 +21,10 @@ pub mut: pub struct Location { db.Base pub mut: - addresses []Address // Multiple addresses (home, work, etc.) - coordinates Coordinates // GPS coordinates - timezone string - is_verified bool + addresses []Address // Multiple addresses (home, work, etc.) + coordinates Coordinates // GPS coordinates + timezone string + is_verified bool location_type LocationType } @@ -141,10 +141,10 @@ pub fn (self Location) dump(mut e encoder.Encoder) ! { for addr in self.addresses { addr.dump(mut e)! } - + // Encode coordinates self.coordinates.dump(mut e)! - + // Encode other fields e.add_string(self.timezone) e.add_bool(self.is_verified) @@ -155,19 +155,19 @@ fn (mut self DBLocation) load(mut o Location, mut e encoder.Decoder) ! { // Decode addresses addr_count := e.get_u32()! o.addresses = []Address{cap: int(addr_count)} - for _ in 0..addr_count { + for _ in 0 .. addr_count { mut addr := Address{} addr.load(mut e)! o.addresses << addr } - + // Decode coordinates o.coordinates.load(mut e)! - + // Decode other fields o.timezone = e.get_string()! o.is_verified = e.get_bool()! - o.location_type = LocationType(e.get_u8()!) + o.location_type = unsafe { LocationType(e.get_u8()!) } } @[params] @@ -285,4 +285,4 @@ pub fn (self Coordinates) to_string() string { // Helper method to check if coordinates are set pub fn (self Coordinates) is_valid() bool { return self.latitude != 0.0 || self.longitude != 0.0 -} \ No newline at end of file +}