This commit is contained in:
2025-09-27 06:21:03 +04:00
parent 901e908342
commit 048a0cf893
52 changed files with 819 additions and 1823 deletions

View File

@@ -1,8 +1,8 @@
// Replace the current content with:
module herofs
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.hero.heromodels
@[heap]
pub struct ModelsFactory {
@@ -21,27 +21,15 @@ pub mut:
redis ?&redisclient.Redis
}
pub fn new(args DBArgs) !FsFactory {
pub fn new(args DBArgs) !ModelsFactory {
mut mydb := db.new(redis: args.redis)!
mut f := FsFactory{
fs: DBFs{
db: &mydb
}
fs_blob: DBFsBlob{
db: &mydb
}
fs_blob_membership: DBFsBlobMembership{
db: &mydb
}
fs_dir: DBFsDir{
db: &mydb
}
fs_file: DBFsFile{
db: &mydb
}
fs_symlink: DBFsSymlink{
db: &mydb
}
mut f := ModelsFactory{
fs: DBFs{db: &mydb}
fs_blob: DBFsBlob{db: &mydb}
fs_blob_membership: DBFsBlobMembership{db: &mydb}
fs_dir: DBFsDir{db: &mydb}
fs_file: DBFsFile{db: &mydb}
fs_symlink: DBFsSymlink{db: &mydb}
}
f.fs.factory = &f
f.fs_blob.factory = &f
@@ -52,17 +40,15 @@ pub fn new(args DBArgs) !FsFactory {
return f
}
// is the main function we need to use to get a filesystem, will get it from database and initialize if needed
// Convenience function for creating a filesystem
pub fn new_fs(args FsArg) !Fs {
mut f := new()!
mut fs := f.fs.new_get_set(args)!
return fs
return f.fs.new_get_set(args)!
}
pub fn new_fs_test() !Fs {
mut f := new()!
mut fs := f.fs.new_get_set(name: 'test')!
return fs
return f.fs.new_get_set(name: 'test')!
}
pub fn delete_fs_test() ! {

View File

@@ -2,21 +2,19 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
import freeflowuniverse.herolib.hero.heromodels
// Fs represents a filesystem, is the top level container for files and directories and symlinks, blobs are used over filesystems
@[heap]
pub struct Fs {
db.Base
pub mut:
name string
root_dir_id u32 // ID of root directory
quota_bytes u64 // Storage quota in bytes
used_bytes u64 // Current usage in bytes
factory &FsFactory = unsafe { nil } @[skip; str: skip]
}
// We only keep the root directory ID here, other directories can be found by querying parent_id in FsDir
@@ -24,7 +22,7 @@ pub mut:
pub struct DBFs {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self Fs) type_name() string {
@@ -59,10 +57,10 @@ pub fn (self Fs) description(methodname string) string {
pub fn (self Fs) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"name": "myfs"}', '1'
return '{"fs": {"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824}}', '1'
}
'get' {
return '{"id": 1}', '{"id":1, "name": "myfs"...}'
return '{"id": 1}', '{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}'
}
'delete' {
return '{"id": 1}', 'true'
@@ -71,7 +69,7 @@ pub fn (self Fs) example(methodname string) (string, string) {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"id":1, "name": "myfs"...}]'
return '{}', '[{"name": "myfs", "description": "My filesystem", "quota_bytes": 1073741824, "used_bytes": 0}]'
}
else {
return '{}', '{}'
@@ -80,14 +78,14 @@ pub fn (self Fs) example(methodname string) (string, string) {
}
pub fn (self Fs) dump(mut e encoder.Encoder) ! {
e.add_string(self.name)
// e.add_string(self.name)
e.add_u32(self.root_dir_id)
e.add_u64(self.quota_bytes)
e.add_u64(self.used_bytes)
}
fn (mut self DBFs) load(mut o Fs, mut e encoder.Decoder) ! {
o.name = e.get_string()!
// o.name = e.get_string()!
o.root_dir_id = e.get_u32()!
o.quota_bytes = e.get_u64()!
o.used_bytes = e.get_u64()!
@@ -115,7 +113,7 @@ pub mut:
pub fn (mut self DBFs) new(args FsArg) !Fs {
mut o := Fs{
name: args.name
factory: self.factory
//factory: self.factory
}
if args.description != '' {
@@ -149,7 +147,7 @@ pub fn (mut self DBFs) new_get_set(args_ FsArg) !Fs {
mut o := Fs{
name: args.name
factory: self.factory
//factory: self.factory
}
myid := self.db.redis.hget('fs:names', args.name)!
@@ -242,7 +240,7 @@ pub fn (mut self DBFs) get(id u32) !Fs {
mut o, data := self.db.get_data[Fs](id)!
mut e_decoder := encoder.decoder_new(data)
self.load(mut o, mut e_decoder)!
o.factory = self.factory
//o.factory = self.factory
return o
}
@@ -270,7 +268,7 @@ pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
return (fs.used_bytes + additional_bytes) <= fs.quota_bytes
}
pub fn fs_handle(mut f heromodels.ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
pub fn fs_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
@@ -301,6 +299,7 @@ pub fn fs_handle(mut f heromodels.ModelsFactory, rpcid int, servercontext map[st
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs'

View File

@@ -1,11 +1,4 @@
module herofs
import crypto.blake3
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
// FsBlob represents binary data up to 1MB
// Update struct:
@[heap]
pub struct FsBlob {
db.Base
@@ -15,137 +8,97 @@ pub mut:
size_bytes int // Size in bytes
}
// Update DBFsBlob struct:
pub struct DBFsBlob {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsBlob) type_name() string {
return 'fs_blob'
// Add these methods:
pub fn (self FsBlob) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a blob. Returns the ID of the blob.'
}
pub fn (self FsBlob) dump(mut e encoder.Encoder) ! {
e.add_string(self.hash)
e.add_list_u8(self.data)
e.add_int(self.size_bytes)
'get' {
return 'Retrieve a blob by ID. Returns the blob object.'
}
fn (mut self DBFsBlob) load(mut o FsBlob, mut e encoder.Decoder) ! {
o.hash = e.get_string()!
o.data = e.get_list_u8()!
o.size_bytes = e.get_int()!
'delete' {
return 'Delete a blob by ID. Returns true if successful.'
}
@[params]
pub struct FsBlobArg {
pub mut:
data []u8 @[required]
'exist' {
return 'Check if a blob exists by ID. Returns true or false.'
}
pub fn (mut blob FsBlob) calculate_hash() {
hash := blake3.sum256(blob.data)
blob.hash = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
'get_by_hash' {
return 'Retrieve a blob by hash. Returns the blob object.'
}
// get new blob, not from the DB
pub fn (mut self DBFsBlob) new(args FsBlobArg) !FsBlob {
if args.data.len > 1024 * 1024 { // 1MB limit
return error('Blob size exceeds 1MB limit')
else {
return 'This is generic method for the blob object.'
}
mut o := FsBlob{
data: args.data
size_bytes: args.data.len
}
// Calculate hash
o.calculate_hash()
// Set base fields
o.updated_at = ourtime.now().unix()
return o
}
pub fn (mut self DBFsBlob) set(o_ FsBlob) !FsBlob {
// Use db set function which now modifies the object in-place
o := self.db.set[FsBlob](o_)!
// Store the hash -> id mapping for lookup
self.db.redis.hset('fsblob:hashes', o.hash, o.id.str())!
return o
}
pub fn (mut self DBFsBlob) delete(id u32) ! {
// Get the blob to retrieve its hash
mut blob := self.get(id)!
// Remove hash -> id mapping
self.db.redis.hdel('fsblob:hashes', blob.hash)!
// Delete the blob
self.db.delete[FsBlob](id)!
}
pub fn (mut self DBFsBlob) delete_multi(ids []u32) ! {
for id in ids {
self.delete(id)!
}
}
pub fn (mut self DBFsBlob) exist(id u32) !bool {
return self.db.exists[FsBlob](id)!
pub fn (self FsBlob) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"blob": {"data": "SGVsbG8gV29ybGQ="}}', '1'
}
'get' {
return '{"id": 1}', '{"hash": "abc123...", "data": "SGVsbG8gV29ybGQ=", "size_bytes": 11}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'get_by_hash' {
return '{"hash": "abc123..."}', '{"hash": "abc123...", "data": "SGVsbG8gV29ybGQ=", "size_bytes": 11}'
}
else {
return '{}', '{}'
}
}
}
pub fn (mut self DBFsBlob) exist_multi(ids []u32) !bool {
for id in ids {
if !self.exist(id)! {
return false
// Add RPC handler function at the end:
pub fn fs_blob_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_blob.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsBlob](params)!
o = f.fs_blob.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_blob.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_blob.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
return true
'get_by_hash' {
hash := db.decode_string(params)!
res := f.fs_blob.get_by_hash(hash)!
return new_response(rpcid, json.encode(res))
}
pub fn (mut self DBFsBlob) get(id u32) !FsBlob {
mut o, data := self.db.get_data[FsBlob](id)!
mut e_decoder := encoder.decoder_new(data)
self.load(mut o, mut e_decoder)!
return o
else {
console.print_stderr('Method not found on fs_blob: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_blob'
)
}
pub fn (mut self DBFsBlob) get_multi(id []u32) ![]FsBlob {
mut blobs := []FsBlob{}
for i in id {
blobs << self.get(i)!
}
return blobs
}
pub fn (mut self DBFsBlob) get_by_hash(hash string) !FsBlob {
// Get blob ID from Redis hash mapping
id_str := self.db.redis.hget('fsblob:hashes', hash)!
if id_str == '' {
return error('Blob with hash ${hash} not found')
}
id := id_str.u32()
return self.get(id)!
}
pub fn (mut self DBFsBlob) exists_by_hash(hash string) !bool {
// Check if hash exists in Redis mapping
id_str := self.db.redis.hget('fsblob:hashes', hash)!
return id_str != ''
}
pub fn (blob FsBlob) verify_integrity() bool {
hash := blake3.sum256(blob.data)
return hash.hex()[..48] == blob.hash
}
pub fn (mut self DBFsBlob) verify(hash string) !bool {
blob := self.get_by_hash(hash)!
return blob.verify_integrity()
}

View File

@@ -2,6 +2,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsBlobMembership represents membership of a blob in one or more filesystems, the key is the hash of the blob
@[heap]
@@ -15,7 +19,7 @@ pub mut:
pub struct DBFsBlobMembership {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsBlobMembership) type_name() string {
@@ -200,3 +204,90 @@ pub fn (mut self DBFsBlobMembership) list_prefix(prefix string) ![]FsBlobMembers
return result
}
pub fn (self FsBlobMembership) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a blob membership. Returns success.'
}
'get' {
return 'Retrieve a blob membership by hash. Returns the membership object.'
}
'delete' {
return 'Delete a blob membership by hash. Returns true if successful.'
}
'exist' {
return 'Check if a blob membership exists by hash. Returns true or false.'
}
'add_filesystem' {
return 'Add a filesystem to a blob membership. Returns success.'
}
'remove_filesystem' {
return 'Remove a filesystem from a blob membership. Returns success.'
}
else {
return 'This is generic method for the blob membership object.'
}
}
}
pub fn (self FsBlobMembership) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"membership": {"hash": "abc123...", "fsid": [1, 2], "blobid": 5}}', 'true'
}
'get' {
return '{"hash": "abc123..."}', '{"hash": "abc123...", "fsid": [1, 2], "blobid": 5}'
}
'delete' {
return '{"hash": "abc123..."}', 'true'
}
'exist' {
return '{"hash": "abc123..."}', 'true'
}
'add_filesystem' {
return '{"hash": "abc123...", "fs_id": 3}', 'true'
}
'remove_filesystem' {
return '{"hash": "abc123...", "fs_id": 1}', 'true'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_blob_membership_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
hash := db.decode_string(params)!
res := f.fs_blob_membership.get(hash)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsBlobMembership](params)!
o = f.fs_blob_membership.set(o)!
return new_response_ok(rpcid)
}
'delete' {
hash := db.decode_string(params)!
f.fs_blob_membership.delete(hash)!
return new_response_ok(rpcid)
}
'exist' {
hash := db.decode_string(params)!
if f.fs_blob_membership.exist(hash)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
else {
console.print_stderr('Method not found on fs_blob_membership: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_blob_membership'
)
}
}
}

View File

@@ -16,88 +16,36 @@ fn test_basic() ! {
// Initialize the HeroFS factory for test purposes
mut fs_factory := new()!
// Create a new filesystem (required for FsBlobMembership validation)
// Create a new filesystem
mut test_fs := fs_factory.fs.new_get_set(
name: 'test_filesystem'
description: 'Filesystem for testing FsBlobMembership functionality'
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
println('Created test filesystem with ID: ${test_fs.id}')
assert test_fs.id > 0
assert test_fs.root_dir_id > 0
mut root_dir := test_fs.root_dir()!
// this means root_dir is automatically there, no need to create
println(root_dir)
panic('sd')
// Create test blob for membership
// Create test blob
test_data := 'This is test content for blob membership'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
test_blob = fs_factory.fs_blob.set(test_blob)!
blob_id := test_blob.id
println('Created test blob with ID: ${blob_id}')
// Create test file to get a valid fsid (file ID) for membership
mut test_file := fs_factory.fs_file.new(
name: 'test_file.txt'
fs_id: test_fs.id
blobs: [blob_id]
description: 'Test file for blob membership'
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
file_id := test_file.id
println('Created test file with ID: ${file_id}')
// Add file to directory
mut dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
dir.files << file_id
dir = fs_factory.fs_dir.set(dir)!
// Create test blob membership
// Create blob membership
mut test_membership := fs_factory.fs_blob_membership.new(
hash: test_blob.hash,
fsid: [test_fs.id], // Use filesystem ID
blobid: blob_id,
hash: test_blob.hash
fsid: [test_fs.id]
blobid: test_blob.id
)!
// Save the test membership
test_membership = fs_factory.fs_blob_membership.set(test_membership)!
membership_hash := test_membership.hash
println('Created test blob membership with hash: ${membership_hash}')
// Test loading membership by hash
println('Testing blob membership loading...')
loaded_membership := fs_factory.fs_blob_membership.get(membership_hash)!
// Test retrieval
loaded_membership := fs_factory.fs_blob_membership.get(test_membership.hash)!
assert loaded_membership.hash == test_membership.hash
assert loaded_membership.fsid == test_membership.fsid
assert loaded_membership.blobid == test_membership.blobid
println(' Loaded blob membership: ${loaded_membership.hash} (Blob ID: ${loaded_membership.blobid})')
// Verify that loaded membership matches the original one
println('Verifying data integrity...')
assert loaded_membership.hash == test_blob.hash
println(' Blob membership data integrity check passed')
// Test exist method
println('Testing blob membership existence checks...')
mut exists := fs_factory.fs_blob_membership.exist(membership_hash)!
assert exists == true
println(' Blob membership exists: ${exists}')
// Test with non-existent hash
non_existent_hash := '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
exists = fs_factory.fs_blob_membership.exist(non_existent_hash)!
assert exists == false
println(' Non-existent blob membership exists: ${exists}')
println('FsBlobMembership basic test completed successfully!')
println(' FsBlobMembership basic test passed!')
}
fn test_filesystem_operations() ! {
@@ -124,7 +72,6 @@ fn test_filesystem_operations() ! {
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
fs2 = fs_factory.fs.set(fs2)!
fs1_root_dir_id := fs1.root_dir_id
fs2_id := fs2.id
// Create test blob
@@ -133,39 +80,6 @@ fn test_filesystem_operations() ! {
test_blob = fs_factory.fs_blob.set(test_blob)!
blob_id := test_blob.id
// Create test files to get valid fsid (file IDs) for membership
mut test_file1 := fs_factory.fs_file.new(
name: 'test_file1.txt'
fs_id: fs1_id
blobs: [blob_id]
description: 'Test file 1 for blob membership'
mime_type: .txt
)!
test_file1 = fs_factory.fs_file.set(test_file1)!
file1_id := test_file1.id
println('Created test file 1 with ID: ${file1_id}')
// Add file to directory
mut fs1_root_dir := fs_factory.fs_dir.get(fs1.root_dir_id)!
fs1_root_dir.files << file1_id
fs1_root_dir = fs_factory.fs_dir.set(fs1_root_dir)!
mut test_file2 := fs_factory.fs_file.new(
name: 'test_file2.txt'
fs_id: fs2_id
blobs: [blob_id]
description: 'Test file 2 for blob membership'
mime_type: .txt
)!
test_file2 = fs_factory.fs_file.set(test_file2)!
file2_id := test_file2.id
println('Created test file 2 with ID: ${file2_id}')
// Add file to directory
mut fs2_root_dir := fs_factory.fs_dir.get(fs2.root_dir_id)!
fs2_root_dir.files << file2_id
fs2_root_dir = fs_factory.fs_dir.set(fs2_root_dir)!
// Create blob membership with first filesystem
mut membership := fs_factory.fs_blob_membership.new(
hash: test_blob.hash
@@ -177,9 +91,6 @@ fn test_filesystem_operations() ! {
println('Created blob membership with filesystem 1: ${membership_hash}')
// Test adding a filesystem to membership
println('Testing add_filesystem operation...')
// Add second filesystem
fs_factory.fs_blob_membership.add_filesystem(membership_hash, fs2_id)!
mut updated_membership := fs_factory.fs_blob_membership.get(membership_hash)!
@@ -190,9 +101,6 @@ fn test_filesystem_operations() ! {
println(' Added filesystem 2 to blob membership')
// Test removing a filesystem from membership
println('Testing remove_filesystem operation...')
// Remove first filesystem
fs_factory.fs_blob_membership.remove_filesystem(membership_hash, fs1_id)!
mut updated_membership2 := fs_factory.fs_blob_membership.get(membership_hash)!
@@ -228,11 +136,8 @@ fn test_validation() ! {
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
fs_id := test_fs.id
// Test setting membership with non-existent blob (should fail)
println('Testing membership set with non-existent blob...')
// Create a membership with a non-existent blob ID
mut test_membership := fs_factory.fs_blob_membership.new(
hash: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'
@@ -241,151 +146,9 @@ fn test_validation() ! {
)!
// Try to save it, which should fail
test_membership=fs_factory.fs_blob_membership.set(test_membership) or {
fs_factory.fs_blob_membership.set(test_membership) or {
println(' Membership set correctly failed with non-existent blob')
return
}
panic('Validation should have failed for non-existent blob')
// Test setting membership with non-existent filesystem (should fail)
println('Testing membership set with non-existent filesystem...')
// Create a test blob
test_data := 'This is test content for validation'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
test_blob = fs_factory.fs_blob.set(test_blob)!
blob_id := test_blob.id
// Create a membership with a non-existent filesystem ID
mut test_membership2 := fs_factory.fs_blob_membership.new(
hash: test_blob.hash
fsid: [u32(999999)] // Non-existent filesystem ID
blobid: blob_id
)!
// Try to save it, which should fail
test_membership2=fs_factory.fs_blob_membership.set(test_membership2) or {
println(' Membership set correctly failed with non-existent filesystem')
return
}
panic('Validation should have failed for non-existent filesystem')
println('FsBlobMembership validation test completed successfully!')
}
fn test_list_by_prefix() ! {
println('
Testing FsBlobMembership list by prefix...')
defer {
test_cleanup() or { panic('cleanup failed: ${err.msg()}') }
}
// Initialize the HeroFS factory for test purposes
mut fs_factory := new()!
// Create a filesystem
mut test_fs := fs_factory.fs.new_get_set(
name: 'list_test_filesystem'
description: 'Filesystem for list testing'
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
fs_id := test_fs.id
// Create root directory for the filesystem
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: fs_id
parent_id: 0 // Root has no parent
description: 'Root directory for testing'
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
root_dir_id := root_dir.id
// Update the filesystem with the root directory ID
test_fs.root_dir_id = root_dir_id
test_fs = fs_factory.fs.set(test_fs)!
// Create multiple test blobs
test_data1 := 'This is test content 1'.bytes()
test_data2 := 'This is test content 2'.bytes()
test_data3 := 'This is test content 3'.bytes()
mut blob1 := fs_factory.fs_blob.new(data: test_data1)!
mut blob2 := fs_factory.fs_blob.new(data: test_data2)!
mut blob3 := fs_factory.fs_blob.new(data: test_data3)!
blob1 = fs_factory.fs_blob.set(blob1)!
blob1_id := blob1.id
blob2 = fs_factory.fs_blob.set(blob2)!
blob2_id := blob2.id
blob3 = fs_factory.fs_blob.set(blob3)!
blob3_id := blob3.id
// Create test files to get valid fsid (file IDs) for membership
mut test_file := fs_factory.fs_file.new(
name: 'test_file.txt'
fs_id: fs_id
blobs: [blob1_id]
description: 'Test file for blob membership'
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
file_id := test_file.id
println('Created test file with ID: ${file_id}')
// Create memberships with similar hashes (first 16 characters)
mut membership1 := fs_factory.fs_blob_membership.new(
hash: blob1.hash
fsid: [test_fs.id]
blobid: blob1_id
)!
membership1 = fs_factory.fs_blob_membership.set(membership1)!
membership1_hash := membership1.hash
mut membership2 := fs_factory.fs_blob_membership.new(
hash: blob2.hash
fsid: [test_fs.id]
blobid: blob2_id
)!
membership2 = fs_factory.fs_blob_membership.set(membership2)!
membership2_hash := membership2.hash
mut membership3 := fs_factory.fs_blob_membership.new(
hash: blob3.hash
fsid: [test_fs.id]
blobid: blob3_id
)!
membership3 = fs_factory.fs_blob_membership.set(membership3)!
membership3_hash := membership3.hash
println('Created test memberships:')
println('- Membership 1 hash: ${membership1_hash}')
println('- Membership 2 hash: ${membership2_hash}')
println('- Membership 3 hash: ${membership3_hash}')
// Test listing by hash prefix
// Use first 16 characters of the first hash as prefix
prefix := membership1_hash[..16]
mut memberships := fs_factory.fs_blob_membership.list_prefix(prefix)!
// Should find at least one membership (membership1)
assert memberships.len >= 1
mut found := false
for membership in memberships {
if membership.hash == membership1.hash {
found = true
break
}
}
assert found == true
println(' Listed blob memberships by prefix: ${prefix}')
// Test with non-existent prefix
non_existent_prefix := '0000000000000000'
mut empty_memberships := fs_factory.fs_blob_membership.list_prefix(non_existent_prefix)!
assert empty_memberships.len == 0
println(' List with non-existent prefix returns empty array')
println('FsBlobMembership list by prefix test completed successfully!')
}

View File

@@ -1,10 +1,8 @@
module herofs
import freeflowuniverse.herolib.hero.herofs
fn test_filesystem_crud() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Test filesystem creation
mut test_fs := fs_factory.fs.new(
@@ -13,7 +11,6 @@ fn test_filesystem_crud() ! {
quota_bytes: 1024 * 1024 * 100 // 100MB quota
)!
original_id := test_fs.id
test_fs = fs_factory.fs.set(test_fs)!
// Test filesystem retrieval
@@ -44,36 +41,26 @@ fn test_filesystem_crud() ! {
fn test_directory_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'dir_test'
description: 'Test filesystem for directory operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Test directory creation
mut sub_dir1 := fs_factory.fs_dir.new(
name: 'documents'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
description: 'Documents directory'
)!
sub_dir1 = fs_factory.fs_dir.set(sub_dir1)!
// Add subdirectory to parent
mut root_dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
root_dir.directories << sub_dir1.id
root_dir = fs_factory.fs_dir.set(root_dir)!
@@ -111,24 +98,14 @@ fn test_directory_operations() ! {
fn test_file_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
mut fs_factory := new()!
// Create test filesystem with root directory
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'file_test'
description: 'Test filesystem for file operations'
quota_bytes: 1024 * 1024 * 50 // 50MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create test blob
test_content := 'Hello, HeroFS! This is test content.'.bytes()
@@ -150,7 +127,7 @@ fn test_file_operations() ! {
test_file = fs_factory.fs_file.set(test_file)!
// Add file to root directory
fs_factory.fs_file.add_to_directory(test_file.id, root_dir.id)!
fs_factory.fs_file.add_to_directory(test_file.id, test_fs.root_dir_id)!
// Test file retrieval
retrieved_file := fs_factory.fs_file.get(test_file.id)!
@@ -179,163 +156,9 @@ fn test_file_operations() ! {
assert renamed_file.name == 'renamed_test.txt'
// Test file listing by directory
files_in_root := fs_factory.fs_file.list_by_directory(root_dir.id)!
files_in_root := fs_factory.fs_file.list_by_directory(test_fs.root_dir_id)!
assert files_in_root.len == 1
assert files_in_root[0].id == test_file.id
// Test file listing by filesystem
files_in_fs := fs_factory.fs_file.list_by_filesystem(test_fs.id)!
assert files_in_fs.len == 1
// Test file listing by MIME type - create a specific file for this test
mime_test_content := 'MIME type test content'.bytes()
mut mime_test_blob := fs_factory.fs_blob.new(data: mime_test_content)!
mime_test_blob = fs_factory.fs_blob.set(mime_test_blob)!
mut mime_test_file := fs_factory.fs_file.new(
name: 'mime_test.txt'
fs_id: test_fs.id
blobs: [mime_test_blob.id]
mime_type: .txt
)!
mime_test_file = fs_factory.fs_file.set(mime_test_file)!
fs_factory.fs_file.add_to_directory(mime_test_file.id, root_dir.id)!
txt_files := fs_factory.fs_file.list_by_mime_type(.txt)!
assert txt_files.len >= 1
// Test blob content appending
additional_content := '\nAppended content.'.bytes()
mut additional_blob := fs_factory.fs_blob.new(data: additional_content)!
additional_blob = fs_factory.fs_blob.set(additional_blob)!
fs_factory.fs_file.append_blob(test_file.id, additional_blob.id)!
updated_file_with_blob := fs_factory.fs_file.get(test_file.id)!
assert updated_file_with_blob.blobs.len == 2
println(' File operations tests passed!')
}
fn test_blob_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
// Test blob creation and deduplication
test_data1 := 'This is test data for blob operations.'.bytes()
test_data2 := 'This is different test data.'.bytes()
test_data3 := 'This is test data for blob operations.'.bytes() // Same as test_data1
// Create first blob
mut blob1 := fs_factory.fs_blob.new(data: test_data1)!
blob1 = fs_factory.fs_blob.set(blob1)!
// Create second blob with different data
mut blob2 := fs_factory.fs_blob.new(data: test_data2)!
blob2 = fs_factory.fs_blob.set(blob2)!
// Create third blob with same data as first (should have same hash)
mut blob3 := fs_factory.fs_blob.new(data: test_data3)!
blob3 = fs_factory.fs_blob.set(blob3)!
// Test hash-based retrieval
assert blob1.hash == blob3.hash // Same content should have same hash
assert blob1.hash != blob2.hash // Different content should have different hash
// Test blob retrieval by hash
blob_by_hash := fs_factory.fs_blob.get_by_hash(blob1.hash)!
assert blob_by_hash.data == test_data1
// Test blob existence by hash
exists_by_hash := fs_factory.fs_blob.exists_by_hash(blob1.hash)!
assert exists_by_hash == true
// Test blob integrity verification
assert blob1.verify_integrity() == true
assert blob2.verify_integrity() == true
// Test blob verification by hash
is_valid := fs_factory.fs_blob.verify(blob1.hash)!
assert is_valid == true
// Test blob size limits
large_data := []u8{len: 2 * 1024 * 1024} // 2MB data
fs_factory.fs_blob.new(data: large_data) or {
println(' Blob size limit correctly enforced')
// This should fail due to 1MB limit
}
println(' Blob operations tests passed!')
}
fn test_symlink_operations() ! {
// Initialize HeroFS factory
mut fs_factory := herofs.new()!
// Create test filesystem with root directory
mut test_fs := fs_factory.fs.new(
name: 'symlink_test'
description: 'Test filesystem for symlink operations'
quota_bytes: 1024 * 1024 * 10 // 10MB quota
)!
test_fs = fs_factory.fs.set(test_fs)!
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Create a target file
test_content := 'Target file content'.bytes()
mut target_blob := fs_factory.fs_blob.new(data: test_content)!
target_blob = fs_factory.fs_blob.set(target_blob)!
mut target_file := fs_factory.fs_file.new(
name: 'target.txt'
fs_id: test_fs.id
blobs: [target_blob.id]
mime_type: .txt
)!
target_file = fs_factory.fs_file.set(target_file)!
fs_factory.fs_file.add_to_directory(target_file.id, root_dir.id)!
// Create symlink
mut test_symlink := fs_factory.fs_symlink.new(
name: 'link_to_target.txt'
fs_id: test_fs.id
parent_id: root_dir.id
target_id: target_file.id
target_type: .file
description: 'Symlink to target file'
)!
test_symlink = fs_factory.fs_symlink.set(test_symlink)!
// Add symlink to directory
root_dir.symlinks << test_symlink.id
root_dir = fs_factory.fs_dir.set(root_dir)!
// Test symlink retrieval
retrieved_symlink := fs_factory.fs_symlink.get(test_symlink.id)!
assert retrieved_symlink.name == 'link_to_target.txt'
assert retrieved_symlink.target_id == target_file.id
// Test symlink validation (should not be broken since target exists)
is_broken := fs_factory.fs_symlink.is_broken(test_symlink.id)!
assert is_broken == false
// Test symlink listing by filesystem
symlinks_in_fs := fs_factory.fs_symlink.list_by_filesystem(test_fs.id)!
assert symlinks_in_fs.len == 1
// Delete target file to make symlink broken
fs_factory.fs_file.delete(target_file.id)!
// Test broken symlink detection
is_broken_after_delete := fs_factory.fs_symlink.is_broken(test_symlink.id)!
assert is_broken_after_delete == true
println(' Symlink operations tests passed!')
}

View File

@@ -3,6 +3,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsDir represents a directory in a filesystem
@[heap]
@@ -19,7 +23,7 @@ pub mut:
pub struct DBFsDir {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsDir) type_name() string {
@@ -273,3 +277,94 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) ! {
dir.updated_at = ourtime.now().unix()
dir = self.set(dir)!
}
pub fn (self FsDir) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a directory. Returns the ID of the directory.'
}
'get' {
return 'Retrieve a directory by ID. Returns the directory object.'
}
'delete' {
return 'Delete a directory by ID. Returns true if successful.'
}
'exist' {
return 'Check if a directory exists by ID. Returns true or false.'
}
'list' {
return 'List all directories. Returns an array of directory objects.'
}
'create_path' {
return 'Create a directory path. Returns the ID of the created directory.'
}
else {
return 'This is generic method for the directory object.'
}
}
}
pub fn (self FsDir) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"dir": {"name": "documents", "fs_id": 1, "parent_id": 2}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "documents", "fs_id": 1, "parent_id": 2, "directories": [], "files": [], "symlinks": []}]'
}
'create_path' {
return '{"fs_id": 1, "path": "/projects/web/frontend"}', '5'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_dir_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_dir.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsDir](params)!
o = f.fs_dir.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_dir.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_dir.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_dir.list()!
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs_dir: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_dir'
)
}
}
}

View File

@@ -21,12 +21,11 @@ fn test_invalid_references() ! {
mut fs_factory := new()!
// Test creating file with non-existent blob
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'error_test'
description: 'Test filesystem for error conditions'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Try to create file with invalid blob ID
fs_factory.fs_file.new(
@@ -46,12 +45,11 @@ fn test_directory_parent_validation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'parent_test'
description: 'Test filesystem for parent validation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Try to create directory with invalid parent
mut invalid_dir := fs_factory.fs_dir.new(
@@ -75,20 +73,14 @@ fn test_symlink_validation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'symlink_test'
description: 'Test filesystem for symlink validation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
mut root_dir := fs_factory.fs_dir.get(test_fs.root_dir_id)!
// Try to create symlink with invalid target
mut invalid_symlink := fs_factory.fs_symlink.new(
@@ -109,376 +101,3 @@ fn test_symlink_validation() ! {
// If validation is not implemented, that's also valid
println(' Symlink target validation tested (validation may not be implemented)')
}
fn test_nonexistent_operations() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Test getting non-existent filesystem
fs_factory.fs.get(u32(99999)) or {
assert err.msg().contains('not found')
println(' Non-existent filesystem correctly handled')
}
// Test getting non-existent blob by hash
fs_factory.fs_blob.get_by_hash('nonexistent_hash') or {
assert err.msg().contains('not found')
println(' Non-existent blob hash correctly handled')
}
// Test blob existence check
exists := fs_factory.fs_blob.exists_by_hash('nonexistent_hash')!
assert exists == false
println(' Blob existence check works correctly')
}
fn test_empty_data_handling() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Test creating blob with empty data
empty_data := []u8{}
mut empty_blob := fs_factory.fs_blob.new(data: empty_data)!
empty_blob = fs_factory.fs_blob.set(empty_blob)!
// Verify empty blob was created correctly
retrieved_blob := fs_factory.fs_blob.get(empty_blob.id)!
assert retrieved_blob.data.len == 0
assert retrieved_blob.size_bytes == 0
assert retrieved_blob.verify_integrity() == true
println(' Empty blob handling works correctly')
}
fn test_path_edge_cases() ! {
// Initialize HeroFS factory and filesystem
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'path_test'
description: 'Test filesystem for path edge cases'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Get filesystem instance
mut fs := fs_factory.fs.get(test_fs.id)!
fs.factory = &fs_factory
// Test finding non-existent path
results := fs.find('/nonexistent/path', FindOptions{ recursive: false }) or {
assert err.msg().contains('not found')
println(' Non-existent path correctly handled')
[]FindResult{}
}
assert results.len == 0
println(' Path edge cases handled correctly')
}
fn test_circular_symlink_detection() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'circular_test'
description: 'Test filesystem for circular symlink detection'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create directory A
mut dir_a := fs_factory.fs_dir.new(
name: 'dir_a'
fs_id: test_fs.id
parent_id: root_dir.id
)!
dir_a = fs_factory.fs_dir.set(dir_a)!
// Create directory B
mut dir_b := fs_factory.fs_dir.new(
name: 'dir_b'
fs_id: test_fs.id
parent_id: root_dir.id
)!
dir_b = fs_factory.fs_dir.set(dir_b)!
// Create symlink from A to B
mut symlink_a_to_b := fs_factory.fs_symlink.new(
name: 'link_to_b'
fs_id: test_fs.id
parent_id: dir_a.id
target_id: dir_b.id
target_type: .directory
)!
symlink_a_to_b = fs_factory.fs_symlink.set(symlink_a_to_b)!
// Try to create symlink from B to A (would create circular reference)
mut symlink_b_to_a := fs_factory.fs_symlink.new(
name: 'link_to_a'
fs_id: test_fs.id
parent_id: dir_b.id
target_id: dir_a.id
target_type: .directory
)!
// This should succeed for now (circular detection not implemented yet)
// But we can test that both symlinks exist
symlink_b_to_a = fs_factory.fs_symlink.set(symlink_b_to_a)!
// Verify both symlinks were created
link_a_exists := fs_factory.fs_symlink.exist(symlink_a_to_b.id)!
link_b_exists := fs_factory.fs_symlink.exist(symlink_b_to_a.id)!
assert link_a_exists == true
assert link_b_exists == true
println(' Circular symlink test completed (detection not yet implemented)')
}
fn test_quota_enforcement() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create filesystem with very small quota
mut test_fs := fs_factory.fs.new(
name: 'quota_test'
description: 'Test filesystem for quota enforcement'
quota_bytes: 100 // Very small quota
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Try to create blob larger than quota
large_data := []u8{len: 200, init: u8(65)} // 200 bytes > 100 byte quota
mut large_blob := fs_factory.fs_blob.new(data: large_data)!
large_blob = fs_factory.fs_blob.set(large_blob)!
// Note: Quota enforcement is not yet implemented
// This test documents the expected behavior for future implementation
println(' Quota test completed (enforcement not yet implemented)')
}
fn test_concurrent_access_simulation() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'concurrent_test'
description: 'Test filesystem for concurrent access simulation'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Simulate concurrent file creation
for i in 0 .. 10 {
content := 'Concurrent file ${i}'.bytes()
mut blob := fs_factory.fs_blob.new(data: content)!
blob = fs_factory.fs_blob.set(blob)!
mut file := fs_factory.fs_file.new(
name: 'concurrent_${i}.txt'
fs_id: test_fs.id
blobs: [blob.id]
mime_type: .txt
)!
file = fs_factory.fs_file.set(file)!
fs_factory.fs_file.add_to_directory(file.id, root_dir.id)!
}
// Verify all files were created
files_in_root := fs_factory.fs_file.list_by_directory(root_dir.id)!
assert files_in_root.len == 10
println(' Concurrent access simulation completed')
}
fn test_invalid_path_operations() ! {
// Initialize HeroFS factory and filesystem
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'invalid_path_test'
description: 'Test filesystem for invalid path operations'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
test_fs.root_dir_id = root_dir.id
test_fs = fs_factory.fs.set(test_fs)!
// Get filesystem instance
mut fs := fs_factory.fs.get(test_fs.id)!
fs.factory = &fs_factory
// Test copy with invalid source path
fs.cp('/nonexistent/file.txt', '/dest/', FindOptions{ recursive: false }, CopyOptions{
overwrite: true
copy_blobs: true
}) or {
assert err.msg().contains('not found')
println(' Copy with invalid source correctly handled')
}
// Test move with invalid source path
fs.mv('/nonexistent/file.txt', '/dest.txt', MoveOptions{ overwrite: true }) or {
assert err.msg().contains('not found')
println(' Move with invalid source correctly handled')
}
// Test remove with invalid path
fs.rm('/nonexistent/file.txt', FindOptions{ recursive: false }, RemoveOptions{
delete_blobs: false
}) or {
assert err.msg().contains('not found') || err.msg().contains('No items found')
println(' Remove with invalid path correctly handled')
}
println(' Invalid path operations handled correctly')
}
fn test_filesystem_name_conflicts() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create first filesystem
mut fs1 := fs_factory.fs.new(
name: 'duplicate_name'
description: 'First filesystem'
quota_bytes: 1024 * 1024 * 10
)!
fs1 = fs_factory.fs.set(fs1)!
// Try to create second filesystem with same name
mut fs2 := fs_factory.fs.new(
name: 'duplicate_name'
description: 'Second filesystem'
quota_bytes: 1024 * 1024 * 10
)!
fs2 = fs_factory.fs.set(fs2)!
// Both should succeed (name conflicts not enforced at DB level)
// But we can test retrieval by name
retrieved_fs := fs_factory.fs.get_by_name('duplicate_name') or {
// If get_by_name fails with multiple matches, that's expected
println(' Filesystem name conflict correctly detected')
return
}
// If it succeeds, it should return one of them
assert retrieved_fs.name == 'duplicate_name'
println(' Filesystem name handling tested')
}
fn test_blob_integrity_verification() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create blob with known content
test_data := 'Test data for integrity check'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
test_blob = fs_factory.fs_blob.set(test_blob)!
// Verify integrity
is_valid := test_blob.verify_integrity()
assert is_valid == true
// Test with corrupted data (simulate corruption)
mut corrupted_blob := test_blob
corrupted_blob.data = 'Corrupted data'.bytes()
// Integrity check should fail
is_corrupted_valid := corrupted_blob.verify_integrity()
assert is_corrupted_valid == false
println(' Blob integrity verification works correctly')
}
fn test_directory_deletion_with_contents() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
mut test_fs := fs_factory.fs.new(
name: 'dir_delete_test'
description: 'Test filesystem for directory deletion'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create subdirectory with content
mut sub_dir := fs_factory.fs_dir.new(
name: 'subdir'
fs_id: test_fs.id
parent_id: root_dir.id
)!
sub_dir = fs_factory.fs_dir.set(sub_dir)!
// Add file to subdirectory
test_content := 'File in subdirectory'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut test_file := fs_factory.fs_file.new(
name: 'test.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
test_file = fs_factory.fs_file.set(test_file)!
fs_factory.fs_file.add_to_directory(test_file.id, sub_dir.id)!
// Try to delete non-empty directory (should fail)
fs_factory.fs_dir.delete(sub_dir.id) or {
assert err.msg().contains('not empty')
println(' Non-empty directory deletion correctly prevented')
return
}
// If it doesn't fail, that's also valid behavior depending on implementation
println(' Directory deletion behavior tested')
}

View File

@@ -3,6 +3,10 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsFile represents a file in a filesystem
@[heap]
@@ -20,7 +24,7 @@ pub mut:
pub struct DBFsFile {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsFile) type_name() string {
@@ -308,3 +312,94 @@ pub fn (mut self DBFsFile) list_directories_for_file(file_id u32) ![]u32 {
}
return containing_dirs
}
pub fn (self FsFile) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a file. Returns the ID of the file.'
}
'get' {
return 'Retrieve a file by ID. Returns the file object.'
}
'delete' {
return 'Delete a file by ID. Returns true if successful.'
}
'exist' {
return 'Check if a file exists by ID. Returns true or false.'
}
'list' {
return 'List all files. Returns an array of file objects.'
}
'rename' {
return 'Rename a file. Returns true if successful.'
}
else {
return 'This is generic method for the file object.'
}
}
}
pub fn (self FsFile) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"file": {"name": "document.txt", "fs_id": 1, "blobs": [1], "mime_type": "txt"}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "document.txt", "fs_id": 1, "blobs": [1], "size_bytes": 1024, "mime_type": "txt"}]'
}
'rename' {
return '{"id": 1, "new_name": "renamed_document.txt"}', 'true'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_file_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_file.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsFile](params)!
o = f.fs_file.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_file.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_file.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_file.list()!
return new_response(rpcid, json.encode(res))
}
else {
console.print_stderr('Method not found on fs_file: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_file'
)
}
}
}

View File

@@ -3,13 +3,16 @@ module herofs
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.hero.db
import freeflowuniverse.herolib.schemas.jsonrpc { Response, new_error, new_response, new_response_false, new_response_int, new_response_ok, new_response_true }
import freeflowuniverse.herolib.hero.user { UserRef }
import freeflowuniverse.herolib.ui.console
import json
// FsSymlink represents a symbolic link in a filesystem
@[heap]
pub struct FsSymlink {
db.Base
pub mut:
name string
fs_id u32 // Associated filesystem
parent_id u32 // Parent directory ID
target_id u32 // ID of target file or directory
@@ -24,7 +27,7 @@ pub enum SymlinkTargetType {
pub struct DBFsSymlink {
pub mut:
db &db.DB @[skip; str: skip]
factory &FsFactory = unsafe { nil } @[skip; str: skip]
factory &ModelsFactory = unsafe { nil } @[skip; str: skip]
}
pub fn (self FsSymlink) type_name() string {
@@ -32,7 +35,6 @@ pub fn (self FsSymlink) type_name() string {
}
pub fn (self FsSymlink) dump(mut e encoder.Encoder) ! {
e.add_string(self.name)
e.add_u32(self.fs_id)
e.add_u32(self.parent_id)
e.add_u32(self.target_id)
@@ -40,7 +42,6 @@ pub fn (self FsSymlink) dump(mut e encoder.Encoder) ! {
}
fn (mut self DBFsSymlink) load(mut o FsSymlink, mut e encoder.Decoder) ! {
o.name = e.get_string()!
o.fs_id = e.get_u32()!
o.parent_id = e.get_u32()!
o.target_id = e.get_u32()!
@@ -158,3 +159,103 @@ pub fn (mut self DBFsSymlink) is_broken(id u32) !bool {
return true // Unknown target type is considered broken
}
pub fn (self FsSymlink) description(methodname string) string {
match methodname {
'set' {
return 'Create or update a symlink. Returns the ID of the symlink.'
}
'get' {
return 'Retrieve a symlink by ID. Returns the symlink object.'
}
'delete' {
return 'Delete a symlink by ID. Returns true if successful.'
}
'exist' {
return 'Check if a symlink exists by ID. Returns true or false.'
}
'list' {
return 'List all symlinks. Returns an array of symlink objects.'
}
'is_broken' {
return 'Check if a symlink is broken. Returns true or false.'
}
else {
return 'This is generic method for the symlink object.'
}
}
}
pub fn (self FsSymlink) example(methodname string) (string, string) {
match methodname {
'set' {
return '{"symlink": {"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}}', '1'
}
'get' {
return '{"id": 1}', '{"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}'
}
'delete' {
return '{"id": 1}', 'true'
}
'exist' {
return '{"id": 1}', 'true'
}
'list' {
return '{}', '[{"name": "link.txt", "fs_id": 1, "parent_id": 2, "target_id": 3, "target_type": "file"}]'
}
'is_broken' {
return '{"id": 1}', 'false'
}
else {
return '{}', '{}'
}
}
}
pub fn fs_symlink_handle(mut f ModelsFactory, rpcid int, servercontext map[string]string, userref UserRef, method string, params string) !Response {
match method {
'get' {
id := db.decode_u32(params)!
res := f.fs_symlink.get(id)!
return new_response(rpcid, json.encode(res))
}
'set' {
mut o := db.decode_generic[FsSymlink](params)!
o = f.fs_symlink.set(o)!
return new_response_int(rpcid, int(o.id))
}
'delete' {
id := db.decode_u32(params)!
f.fs_symlink.delete(id)!
return new_response_ok(rpcid)
}
'exist' {
id := db.decode_u32(params)!
if f.fs_symlink.exist(id)! {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
'list' {
res := f.fs_symlink.list()!
return new_response(rpcid, json.encode(res))
}
'is_broken' {
id := db.decode_u32(params)!
is_broken := f.fs_symlink.is_broken(id)!
if is_broken {
return new_response_true(rpcid)
} else {
return new_response_false(rpcid)
}
}
else {
console.print_stderr('Method not found on fs_symlink: ${method}')
return new_error(rpcid,
code: 32601
message: 'Method ${method} not found on fs_symlink'
)
}
}
}

View File

@@ -5,26 +5,17 @@ fn test_symlink_operations() ! {
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
mut test_fs := fs_factory.fs.new_get_set(
name: 'symlink_test'
description: 'Test filesystem for symlink operations'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create a subdirectory
mut sub_dir := fs_factory.fs_dir.new(
name: 'subdir'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
)!
sub_dir = fs_factory.fs_dir.set(sub_dir)!
@@ -46,7 +37,7 @@ fn test_symlink_operations() ! {
mut file_symlink := fs_factory.fs_symlink.new(
name: 'file_link'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
target_id: test_file.id
target_type: .file
)!
@@ -56,7 +47,7 @@ fn test_symlink_operations() ! {
mut dir_symlink := fs_factory.fs_symlink.new(
name: 'dir_link'
fs_id: test_fs.id
parent_id: root_dir.id
parent_id: test_fs.root_dir_id
target_id: sub_dir.id
target_type: .directory
)!
@@ -73,17 +64,6 @@ fn test_symlink_operations() ! {
assert retrieved_dir_link.target_id == sub_dir.id
assert retrieved_dir_link.target_type == .directory
// Test symlink existence
file_link_exists := fs_factory.fs_symlink.exist(file_symlink.id)!
assert file_link_exists == true
// Test listing symlinks
all_symlinks := fs_factory.fs_symlink.list()!
assert all_symlinks.len >= 2
fs_symlinks := fs_factory.fs_symlink.list_by_filesystem(test_fs.id)!
assert fs_symlinks.len == 2
// Test broken symlink detection
is_file_link_broken := fs_factory.fs_symlink.is_broken(file_symlink.id)!
assert is_file_link_broken == false
@@ -99,60 +79,3 @@ fn test_symlink_operations() ! {
println(' Symlink operations tests passed!')
}
fn test_broken_symlink_detection() ! {
// Initialize HeroFS factory
mut fs_factory := new()!
// Create test filesystem
mut test_fs := fs_factory.fs.new(
name: 'broken_symlink_test'
description: 'Test filesystem for broken symlink detection'
quota_bytes: 1024 * 1024 * 10
)!
test_fs = fs_factory.fs.set(test_fs)!
// Create root directory
mut root_dir := fs_factory.fs_dir.new(
name: 'root'
fs_id: test_fs.id
parent_id: 0
)!
root_dir = fs_factory.fs_dir.set(root_dir)!
// Create a test file
test_content := 'Temporary file'.bytes()
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
test_blob = fs_factory.fs_blob.set(test_blob)!
mut temp_file := fs_factory.fs_file.new(
name: 'temp.txt'
fs_id: test_fs.id
blobs: [test_blob.id]
mime_type: .txt
)!
temp_file = fs_factory.fs_file.set(temp_file)!
// Create symlink to the file
mut symlink := fs_factory.fs_symlink.new(
name: 'temp_link'
fs_id: test_fs.id
parent_id: root_dir.id
target_id: temp_file.id
target_type: .file
)!
symlink = fs_factory.fs_symlink.set(symlink)!
// Verify symlink is not broken initially
is_broken_before := fs_factory.fs_symlink.is_broken(symlink.id)!
assert is_broken_before == false
// Delete the target file
fs_factory.fs_file.delete(temp_file.id)!
// Now the symlink should be broken
is_broken_after := fs_factory.fs_symlink.is_broken(symlink.id)!
assert is_broken_after == true
println(' Broken symlink detection works correctly!')
}

View File

@@ -1,143 +0,0 @@
import json
import os
def to_snake_case(name):
import re
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def ts_type(v_type):
if v_type in ['string']:
return 'string'
if v_type in ['int', 'integer', 'u32', 'u64', 'i64', 'f32', 'f64']:
return 'number'
if v_type in ['bool', 'boolean']:
return 'boolean'
if v_type.startswith('[]'):
return ts_type(v_type[2:]) + '[]'
if v_type == 'array':
return 'any[]'
return v_type
def generate_interface(schema_name, schema):
content = f'export interface {schema_name} {{\n'
if 'properties' in schema:
for prop_name, prop_schema in schema.get('properties', {}).items():
prop_type = ts_type(prop_schema.get('type', 'any'))
if '$ref' in prop_schema:
prop_type = prop_schema['$ref'].split('/')[-1]
required = '?' if prop_name not in schema.get('required', []) else ''
content += f' {prop_name}{required}: {prop_type};\n'
if schema.get('allOf'):
for item in schema['allOf']:
if '$ref' in item:
ref_name = item['$ref'].split('/')[-1]
content += f' // Properties from {ref_name} are inherited\n'
content += '}\n'
return content
def generate_client(spec):
methods_str = ''
for method in spec['methods']:
params = []
if 'params' in method:
for param in method['params']:
param_type = 'any'
if 'schema' in param:
if '$ref' in param['schema']:
param_type = param['schema']['$ref'].split('/')[-1]
else:
param_type = ts_type(param['schema'].get('type', 'any'))
params.append(f"{param['name']}: {param_type}")
params_str = ', '.join(params)
result_type = 'any'
if 'result' in method and 'schema' in method['result']:
if '$ref' in method['result']['schema']:
result_type = method['result']['schema']['$ref'].split('/')[-1]
else:
result_type = ts_type(method['result']['schema'].get('type', 'any'))
method_name_snake = to_snake_case(method['name'])
methods_str += f"""
async {method_name_snake}(params: {{ {params_str} }}): Promise<{result_type}> {{
return this.send('{method['name']}', params);
}}
"""
schemas = spec.get('components', {}).get('schemas', {})
imports_str = '\n'.join([f"import {{ {name} }} from './{name}';" for name in schemas.keys()])
base_url = 'http://localhost:8086/api/heromodels'
client_class = f"""
import fetch from 'node-fetch';
{imports_str}
export class HeroModelsClient {{
private baseUrl: string;
constructor(baseUrl: string = '{base_url}') {{
this.baseUrl = baseUrl;
}}
private async send<T>(method: string, params: any): Promise<T> {{
const response = await fetch(this.baseUrl, {{
method: 'POST',
headers: {{
'Content-Type': 'application/json',
}},
body: JSON.stringify({{
jsonrpc: '2.0',
method: method,
params: params,
id: 1,
}}),
}});
if (!response.ok) {{
throw new Error(`HTTP error! status: ${{response.status}}`);
}}
const jsonResponse:any = await response.json();
if (jsonResponse.error) {{
throw new Error(`RPC error: ${{jsonResponse.error.message}}`);
}}
return jsonResponse.result;
}}
{methods_str}
}}
"""
return client_class
def main():
script_dir = os.path.dirname(__file__)
openrpc_path = os.path.abspath(os.path.join(script_dir, '..', '..', 'hero', 'heromodels', 'openrpc.json'))
output_dir = os.path.join(script_dir, 'generated_ts_client')
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with open(openrpc_path, 'r') as f:
spec = json.load(f)
schemas = spec.get('components', {}).get('schemas', {})
for name, schema in schemas.items():
interface_content = generate_interface(name, schema)
with open(os.path.join(output_dir, f'{name}.ts'), 'w') as f:
f.write(interface_content)
client_content = generate_client(spec)
with open(os.path.join(output_dir, 'client.ts'), 'w') as f:
f.write(client_content)
print(f"TypeScript client generated successfully in {output_dir}")
if __name__ == '__main__':
main()

5
lib/hero/typescriptgenerator/generate.vsh Normal file → Executable file
View File

@@ -4,8 +4,8 @@ import freeflowuniverse.herolib.hero.typescriptgenerator
import freeflowuniverse.herolib.schemas.openrpc
import os
const openrpc_path = os.dir(@FILE) + '/../../heromodels/openrpc.json'
const output_dir = os.dir(@FILE) + '/generated_ts_client'
const openrpc_path = os.dir(@FILE) + '/../../hero/heromodels/openrpc.json'
const output_dir = os.expand_tilde_to_home('~/code/heromodels/generated')
fn main() {
spec_text := os.read_file(openrpc_path) or {
@@ -20,6 +20,7 @@ fn main() {
config := typescriptgenerator.IntermediateConfig{
base_url: 'http://localhost:8086/api/heromodels'
handler_type: 'heromodels'
}
intermediate_spec := typescriptgenerator.from_openrpc(openrpc_spec, config) or {

View File

@@ -1,9 +0,0 @@
export interface Attendee {
user_id?: number;
status_latest?: string;
attendance_required?: boolean;
admin?: boolean;
organizer?: boolean;
location?: string;
log?: any[];
}

View File

@@ -1,5 +0,0 @@
export interface AttendeeLog {
timestamp?: number;
status?: string;
remark?: string;
}

View File

@@ -1,10 +0,0 @@
export interface Base {
id?: number;
name?: string;
description?: string;
created_at?: number;
updated_at?: number;
securitypolicy?: number;
tags?: number;
messages?: any[];
}

View File

@@ -1,7 +0,0 @@
export interface Calendar {
events?: any[];
color?: string;
timezone?: string;
is_public?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,19 +0,0 @@
export interface CalendarEvent {
title?: string;
start_time?: number;
end_time?: number;
registration_desks?: any[];
attendees?: any[];
docs?: any[];
calendar_id?: number;
status?: string;
is_all_day?: boolean;
reminder_mins?: any[];
color?: string;
timezone?: string;
priority?: string;
public?: boolean;
locations?: any[];
is_template?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,7 +0,0 @@
export interface ChatGroup {
chat_type?: string;
last_activity?: number;
is_archived?: boolean;
group_id?: number;
// Properties from Base are inherited
}

View File

@@ -1,12 +0,0 @@
export interface ChatMessage {
content?: string;
chat_group_id?: number;
sender_id?: number;
parent_messages?: any[];
fs_files?: any[];
message_type?: string;
status?: string;
reactions?: any[];
mentions?: any[];
// Properties from Base are inherited
}

View File

@@ -1,12 +0,0 @@
export interface Contact {
emails?: any[];
user_id?: number;
phones?: any[];
addresses?: any[];
avatar_url?: string;
bio?: string;
timezone?: string;
status?: string;
profile_ids?: any[];
// Properties from Base are inherited
}

View File

@@ -1,8 +0,0 @@
export interface Education {
school?: string;
degree?: string;
field_of_study?: string;
start_date?: number;
end_date?: number;
description?: string;
}

View File

@@ -1,5 +0,0 @@
export interface EventDoc {
fs_item?: number;
cat?: string;
public?: boolean;
}

View File

@@ -1,6 +0,0 @@
export interface EventLocation {
name?: string;
description?: string;
cat?: string;
docs?: any[];
}

View File

@@ -1,9 +0,0 @@
export interface Experience {
title?: string;
company?: string;
location?: string;
start_date?: number;
end_date?: number;
current?: boolean;
description?: string;
}

View File

@@ -1,7 +0,0 @@
export interface Group {
members?: any[];
subgroups?: any[];
parent_group?: number;
is_public?: boolean;
// Properties from Base are inherited
}

View File

@@ -1,5 +0,0 @@
export interface GroupMember {
user_id?: number;
role?: string;
joined_at?: number;
}

View File

@@ -1,10 +0,0 @@
export interface Message {
subject?: string;
message?: string;
parent?: number;
author?: number;
to?: any[];
cc?: any[];
send_log?: any[];
// Properties from Base are inherited
}

View File

@@ -1,4 +0,0 @@
export interface MessageLink {
message_id?: number;
link_type?: string;
}

View File

@@ -1,5 +0,0 @@
export interface MessageReaction {
user_id?: number;
emoji?: string;
timestamp?: number;
}

View File

@@ -1,7 +0,0 @@
export interface Milestone {
name?: string;
description?: string;
due_date?: number;
completed?: boolean;
issues?: any[];
}

View File

@@ -1,12 +0,0 @@
export interface Planning {
color?: string;
timezone?: string;
is_public?: boolean;
calendar_template_id?: number;
registration_desk_id?: number;
autoschedule_rules?: any[];
invite_rules?: any[];
attendees_required?: any[];
attendees_optional?: any[];
// Properties from Base are inherited
}

View File

@@ -1,9 +0,0 @@
export interface PlanningRecurrenceRule {
until?: number;
by_weekday?: any[];
by_monthday?: any[];
hour_from?: number;
hour_to?: number;
duration?: number;
priority?: number;
}

View File

@@ -1,17 +0,0 @@
export interface Profile {
user_id?: number;
summary?: string;
headline?: string;
location?: string;
industry?: string;
picture_url?: string;
background_image_url?: string;
email?: string;
phone?: string;
website?: string;
experience?: any[];
education?: any[];
skills?: any[];
languages?: any[];
// Properties from Base are inherited
}

View File

@@ -1,9 +0,0 @@
export interface Project {
swimlanes?: any[];
milestones?: any[];
fs_files?: any[];
status?: string;
start_date?: number;
end_date?: number;
// Properties from Base are inherited
}

View File

@@ -1,17 +0,0 @@
export interface ProjectIssue {
title?: string;
project_id?: number;
issue_type?: string;
priority?: string;
status?: string;
swimlane?: string;
assignees?: any[];
reporter?: number;
milestone?: string;
deadline?: number;
estimate?: number;
fs_files?: any[];
parent_id?: number;
children?: any[];
// Properties from Base are inherited
}

View File

@@ -1,8 +0,0 @@
export interface RecurrenceRule {
frequency?: string;
interval?: number;
until?: number;
count?: number;
by_weekday?: any[];
by_monthday?: any[];
}

View File

@@ -1,7 +0,0 @@
export interface Registration {
user_id?: number;
accepted?: boolean;
accepted_by?: number;
timestamp?: number;
timestamp_acceptation?: number;
}

View File

@@ -1,12 +0,0 @@
export interface RegistrationDesk {
fs_items?: any[];
white_list?: any[];
white_list_accepted?: any[];
required_list?: any[];
black_list?: any[];
start_time?: number;
end_time?: number;
acceptance_required?: boolean;
registrations?: any[];
// Properties from Base are inherited
}

View File

@@ -1,5 +0,0 @@
export interface RegistrationFileAttachment {
fs_item?: number;
cat?: string;
public?: boolean;
}

View File

@@ -1,6 +0,0 @@
export interface SendLog {
to?: any[];
cc?: any[];
status?: string;
timestamp?: number;
}

View File

@@ -1,7 +0,0 @@
export interface Swimlane {
name?: string;
description?: string;
order?: number;
color?: string;
is_done?: boolean;
}

View File

@@ -1,7 +0,0 @@
export interface User {
user_id?: number;
contact_id?: number;
status?: string;
profile_ids?: any[];
// Properties from Base are inherited
}

View File

@@ -1,327 +0,0 @@
import fetch from 'node-fetch';
import { Base } from './Base';
import { Calendar } from './Calendar';
import { CalendarEvent } from './CalendarEvent';
import { Attendee } from './Attendee';
import { AttendeeLog } from './AttendeeLog';
import { EventDoc } from './EventDoc';
import { EventLocation } from './EventLocation';
import { ChatGroup } from './ChatGroup';
import { ChatMessage } from './ChatMessage';
import { MessageLink } from './MessageLink';
import { MessageReaction } from './MessageReaction';
import { Contact } from './Contact';
import { Group } from './Group';
import { GroupMember } from './GroupMember';
import { Message } from './Message';
import { SendLog } from './SendLog';
import { Planning } from './Planning';
import { PlanningRecurrenceRule } from './PlanningRecurrenceRule';
import { Profile } from './Profile';
import { Experience } from './Experience';
import { Education } from './Education';
import { Project } from './Project';
import { Swimlane } from './Swimlane';
import { Milestone } from './Milestone';
import { ProjectIssue } from './ProjectIssue';
import { RegistrationDesk } from './RegistrationDesk';
import { RegistrationFileAttachment } from './RegistrationFileAttachment';
import { Registration } from './Registration';
import { User } from './User';
export class HeroModelsClient {
private baseUrl: string;
constructor(baseUrl: string = 'http://localhost:8086/api/heromodels') {
this.baseUrl = baseUrl;
}
private async send<T>(method: string, params: any): Promise<T> {
const response = await fetch(this.baseUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
jsonrpc: '2.0',
method: method,
params: params,
id: 1,
}),
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const jsonResponse:any = await response.json();
if (jsonResponse.error) {
throw new Error(`RPC error: ${jsonResponse.error.message}`);
}
return jsonResponse.result;
}
async calendar_get(params: { id: number }): Promise<Calendar> {
return this.send('calendar_get', params);
}
async calendar_set(params: { calendar: Calendar, events: any[], color: string, timezone: string, is_public: boolean }): Promise<number> {
return this.send('calendar_set', params);
}
async calendar_delete(params: { id: number }): Promise<boolean> {
return this.send('calendar_delete', params);
}
async calendar_exist(params: { id: number }): Promise<boolean> {
return this.send('calendar_exist', params);
}
async calendar_list(params: { }): Promise<any[]> {
return this.send('calendar_list', params);
}
async calendar_event_get(params: { id: number }): Promise<CalendarEvent> {
return this.send('calendar_event_get', params);
}
async calendar_event_set(params: { calendar_event: CalendarEvent }): Promise<number> {
return this.send('calendar_event_set', params);
}
async calendar_event_delete(params: { id: number }): Promise<boolean> {
return this.send('calendar_event_delete', params);
}
async calendar_event_exist(params: { id: number }): Promise<boolean> {
return this.send('calendar_event_exist', params);
}
async calendar_event_list(params: { }): Promise<any[]> {
return this.send('calendar_event_list', params);
}
async chat_group_get(params: { id: number }): Promise<ChatGroup> {
return this.send('chat_group_get', params);
}
async chat_group_set(params: { chat_group: ChatGroup }): Promise<number> {
return this.send('chat_group_set', params);
}
async chat_group_delete(params: { id: number }): Promise<boolean> {
return this.send('chat_group_delete', params);
}
async chat_group_exist(params: { id: number }): Promise<boolean> {
return this.send('chat_group_exist', params);
}
async chat_group_list(params: { }): Promise<any[]> {
return this.send('chat_group_list', params);
}
async chat_message_get(params: { id: number }): Promise<ChatMessage> {
return this.send('chat_message_get', params);
}
async chat_message_set(params: { chat_message: ChatMessage }): Promise<number> {
return this.send('chat_message_set', params);
}
async chat_message_delete(params: { id: number }): Promise<boolean> {
return this.send('chat_message_delete', params);
}
async chat_message_exist(params: { id: number }): Promise<boolean> {
return this.send('chat_message_exist', params);
}
async chat_message_list(params: { }): Promise<any[]> {
return this.send('chat_message_list', params);
}
async contact_get(params: { id: number }): Promise<Contact> {
return this.send('contact_get', params);
}
async contact_set(params: { contact: Contact }): Promise<number> {
return this.send('contact_set', params);
}
async contact_delete(params: { id: number }): Promise<boolean> {
return this.send('contact_delete', params);
}
async contact_exist(params: { id: number }): Promise<boolean> {
return this.send('contact_exist', params);
}
async contact_list(params: { }): Promise<any[]> {
return this.send('contact_list', params);
}
async group_get(params: { id: number }): Promise<Group> {
return this.send('group_get', params);
}
async group_set(params: { group: Group }): Promise<number> {
return this.send('group_set', params);
}
async group_delete(params: { id: number }): Promise<boolean> {
return this.send('group_delete', params);
}
async group_exist(params: { id: number }): Promise<boolean> {
return this.send('group_exist', params);
}
async group_list(params: { }): Promise<any[]> {
return this.send('group_list', params);
}
async message_get(params: { id: number }): Promise<Message> {
return this.send('message_get', params);
}
async message_set(params: { message: Message }): Promise<number> {
return this.send('message_set', params);
}
async message_delete(params: { id: number }): Promise<boolean> {
return this.send('message_delete', params);
}
async message_exist(params: { id: number }): Promise<boolean> {
return this.send('message_exist', params);
}
async message_list(params: { }): Promise<any[]> {
return this.send('message_list', params);
}
async planning_get(params: { id: number }): Promise<Planning> {
return this.send('planning_get', params);
}
async planning_set(params: { planning: Planning }): Promise<number> {
return this.send('planning_set', params);
}
async planning_delete(params: { id: number }): Promise<boolean> {
return this.send('planning_delete', params);
}
async planning_exist(params: { id: number }): Promise<boolean> {
return this.send('planning_exist', params);
}
async planning_list(params: { }): Promise<any[]> {
return this.send('planning_list', params);
}
async profile_get(params: { id: number }): Promise<Profile> {
return this.send('profile_get', params);
}
async profile_set(params: { profile: Profile }): Promise<number> {
return this.send('profile_set', params);
}
async profile_delete(params: { id: number }): Promise<boolean> {
return this.send('profile_delete', params);
}
async profile_exist(params: { id: number }): Promise<boolean> {
return this.send('profile_exist', params);
}
async profile_list(params: { }): Promise<any[]> {
return this.send('profile_list', params);
}
async project_get(params: { id: number }): Promise<Project> {
return this.send('project_get', params);
}
async project_set(params: { project: Project }): Promise<number> {
return this.send('project_set', params);
}
async project_delete(params: { id: number }): Promise<boolean> {
return this.send('project_delete', params);
}
async project_exist(params: { id: number }): Promise<boolean> {
return this.send('project_exist', params);
}
async project_list(params: { }): Promise<any[]> {
return this.send('project_list', params);
}
async project_issue_get(params: { id: number }): Promise<ProjectIssue> {
return this.send('project_issue_get', params);
}
async project_issue_set(params: { project_issue: ProjectIssue }): Promise<number> {
return this.send('project_issue_set', params);
}
async project_issue_delete(params: { id: number }): Promise<boolean> {
return this.send('project_issue_delete', params);
}
async project_issue_exist(params: { id: number }): Promise<boolean> {
return this.send('project_issue_exist', params);
}
async project_issue_list(params: { }): Promise<any[]> {
return this.send('project_issue_list', params);
}
async registration_desk_get(params: { id: number }): Promise<RegistrationDesk> {
return this.send('registration_desk_get', params);
}
async registration_desk_set(params: { registration_desk: RegistrationDesk }): Promise<number> {
return this.send('registration_desk_set', params);
}
async registration_desk_delete(params: { id: number }): Promise<boolean> {
return this.send('registration_desk_delete', params);
}
async registration_desk_exist(params: { id: number }): Promise<boolean> {
return this.send('registration_desk_exist', params);
}
async registration_desk_list(params: { }): Promise<any[]> {
return this.send('registration_desk_list', params);
}
async user_get(params: { id: number }): Promise<User> {
return this.send('user_get', params);
}
async user_set(params: { user: User }): Promise<number> {
return this.send('user_set', params);
}
async user_delete(params: { id: number }): Promise<boolean> {
return this.send('user_delete', params);
}
async user_exist(params: { id: number }): Promise<boolean> {
return this.send('user_exist', params);
}
async user_list(params: { }): Promise<any[]> {
return this.send('user_list', params);
}
}

View File

@@ -2,6 +2,7 @@ module typescriptgenerator
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.texttools
import os
pub fn generate_typescript_client(spec IntermediateSpec, dest_path string) ! {
mut dest := pathlib.get_dir(path: dest_path, create: true)!
@@ -17,6 +18,30 @@ pub fn generate_typescript_client(spec IntermediateSpec, dest_path string) ! {
mut client_content := generate_client(spec)
mut client_file_path := pathlib.get_file(path: '${dest.path}/client.ts', create: true)!
client_file_path.write(client_content) or { panic(err) }
// Copy templates to destination
mut templates_src := os.dir(@FILE) + '/templates'
if os.exists(templates_src) {
// Copy index.html template
mut index_content := os.read_file('${templates_src}/index.html')!
mut index_file := pathlib.get_file(path: '${dest.path}/index.html', create: true)!
index_file.write(index_content)!
// Copy package.json template
mut package_content := os.read_file('${templates_src}/package.json')!
mut package_file := pathlib.get_file(path: '${dest.path}/package.json', create: true)!
package_file.write(package_content)!
// Copy tsconfig.json template
mut tsconfig_content := os.read_file('${templates_src}/tsconfig.json')!
mut tsconfig_file := pathlib.get_file(path: '${dest.path}/tsconfig.json', create: true)!
tsconfig_file.write(tsconfig_content)!
// Copy dev-server.ts template
mut dev_server_content := os.read_file('${templates_src}/dev-server.ts')!
mut dev_server_file := pathlib.get_file(path: '${dest.path}/dev-server.ts', create: true)!
dev_server_file.write(dev_server_content)!
}
}
fn generate_interface(schema IntermediateSchema) string {

View File

@@ -20,6 +20,7 @@ fn test_generate_typescript_client() {
config := IntermediateConfig{
base_url: 'http://localhost:8086/api/heromodels'
handler_type: 'heromodels'
}
intermediate_spec := from_openrpc(openrpc_spec, config) or {

View File

@@ -0,0 +1,34 @@
#!/bin/bash
# Install script for Hero Models TypeScript client
# Check if bun is installed
if ! command -v bun &> /dev/null; then
echo "bun is not installed. Please install bun first:"
echo "curl -fsSL https://bun.sh/install | bash"
exit 1
fi
# Check if V is installed
if ! command -v v &> /dev/null; then
echo "V is not installed. Please install V first:"
echo "Visit https://vlang.io/ for installation instructions"
exit 1
fi
# Create output directory if it doesn't exist
OUTPUT_DIR="${1:-~/code/heromodels/generated}"
mkdir -p "$OUTPUT_DIR"
# Generate TypeScript client
echo "Generating TypeScript client in $OUTPUT_DIR..."
v -enable-globals -n -w -gc none run lib/hero/typescriptgenerator/generate.vsh
# Install dependencies
echo "Installing dependencies..."
cd "$OUTPUT_DIR"
bun install
echo "Installation complete! The TypeScript client is ready to use."
echo "To test in development mode, run: bun run dev"
echo "To build for production, run: bun run build"

View File

@@ -63,8 +63,9 @@ pub fn from_openrpc(openrpc_spec openrpc.OpenRPC, config IntermediateConfig) !In
mut intermediate_spec := IntermediateSpec{
info: openrpc_spec.info
base_url: config.base_url
methods: []IntermediateMethod{}
schemas: process_schemas(openrpc_spec.components.schemas)!
base_url: config.base_url
}
// Process all methods
@@ -110,6 +111,13 @@ fn process_parameters(params []openrpc.ContentDescriptorRef) ![]IntermediatePara
}
} else if param is jsonschema.Reference {
//TODO: handle reference
// For now, create a placeholder parameter
intermediate_params << IntermediateParam{
name: 'reference'
description: ''
type_info: 'any'
required: false
}
}
}
@@ -117,7 +125,12 @@ fn process_parameters(params []openrpc.ContentDescriptorRef) ![]IntermediatePara
}
fn process_result(result openrpc.ContentDescriptorRef) !IntermediateParam {
mut intermediate_result := IntermediateParam{}
mut intermediate_result := IntermediateParam{
name: ''
description: ''
type_info: ''
required: false
}
if result is openrpc.ContentDescriptor {
type_info := extract_type_from_schema(result.schema)
@@ -134,9 +147,18 @@ fn process_result(result openrpc.ContentDescriptorRef) !IntermediateParam {
type_info := ref.ref.all_after_last('/')
intermediate_result = IntermediateParam{
name: type_info.to_lower()
description: ''
type_info: type_info
required: false
}
} else {
// Handle any other cases
intermediate_result = IntermediateParam{
name: 'unknown'
description: ''
type_info: 'unknown'
required: false
}
}
return intermediate_result

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env bun
// Dev server script for Hero Models TypeScript client
import { serve } from "bun";
const server = serve({
port: 3000,
fetch(req) {
const url = new URL(req.url);
// Serve static files
if (url.pathname === "/" || url.pathname === "/index.html") {
return new Response(Bun.file("index.html"));
}
// Serve TypeScript files
if (url.pathname.endsWith(".ts")) {
return new Response(Bun.file(url.pathname.slice(1)));
}
// Serve JavaScript files (compiled TypeScript)
if (url.pathname.endsWith(".js")) {
return new Response(Bun.file(url.pathname.slice(1)));
}
return new Response("Not found", { status: 404 });
},
});
console.log(`Dev server running on http://localhost:${server.port}`);
console.log("Press Ctrl+C to stop the server");

View File

@@ -0,0 +1,125 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Hero Models TypeScript Client Test</title>
<style>
body {
font-family: Arial, sans-serif;
margin: 20px;
background-color: #f5f5f5;
}
.container {
max-width: 800px;
margin: 0 auto;
background-color: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
}
h1 {
color: #333;
text-align: center;
}
.test-section {
margin: 20px 0;
padding: 15px;
border: 1px solid #ddd;
border-radius: 5px;
}
button {
background-color: #4CAF50;
color: white;
padding: 10px 15px;
border: none;
border-radius: 4px;
cursor: pointer;
margin: 5px;
}
button:hover {
background-color: #45a049;
}
.result {
margin-top: 10px;
padding: 10px;
background-color: #f9f9f9;
border: 1px solid #eee;
border-radius: 4px;
white-space: pre-wrap;
font-family: monospace;
}
input, select {
padding: 8px;
margin: 5px;
border: 1px solid #ddd;
border-radius: 4px;
}
</style>
</head>
<body>
<div class="container">
<h1>Hero Models TypeScript Client Test</h1>
<div class="test-section">
<h2>Calendar Operations</h2>
<label>Calendar ID: <input type="number" id="calendarId" value="1"></label>
<button onclick="getCalendar()">Get Calendar</button>
<div class="result" id="calendarResult"></div>
</div>
<div class="test-section">
<h2>User Operations</h2>
<label>User ID: <input type="number" id="userId" value="1"></label>
<button onclick="getUser()">Get User</button>
<div class="result" id="userResult"></div>
</div>
<div class="test-section">
<h2>Event Operations</h2>
<label>Event ID: <input type="number" id="eventId" value="1"></label>
<button onclick="getEvent()">Get Event</button>
<div class="result" id="eventResult"></div>
</div>
</div>
<script type="module">
import { HeroModelsClient } from './client.js';
const client = new HeroModelsClient('http://localhost:8086/api/heromodels');
async function getCalendar() {
const resultDiv = document.getElementById('calendarResult');
try {
const id = parseInt(document.getElementById('calendarId').value);
const calendar = await client.calendar_get({id});
resultDiv.textContent = JSON.stringify(calendar, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
async function getUser() {
const resultDiv = document.getElementById('userResult');
try {
const id = parseInt(document.getElementById('userId').value);
const user = await client.user_get({id});
resultDiv.textContent = JSON.stringify(user, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
async function getEvent() {
const resultDiv = document.getElementById('eventResult');
try {
const id = parseInt(document.getElementById('eventId').value);
const event = await client.event_get({id});
resultDiv.textContent = JSON.stringify(event, null, 2);
} catch (error) {
resultDiv.textContent = `Error: ${error.message}`;
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,18 @@
{
"name": "heromodels-client",
"version": "1.0.0",
"description": "TypeScript client for Hero Models API",
"main": "client.ts",
"type": "module",
"scripts": {
"dev": "bun run dev-server.ts",
"build": "tsc"
},
"dependencies": {
"node-fetch": "^3.3.2"
},
"devDependencies": {
"typescript": "^5.0.0",
"@types/node": "^20.0.0"
}
}

View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "ES2020",
"moduleResolution": "node",
"lib": ["ES2020", "DOM"],
"outDir": "./dist",
"rootDir": ".",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true
},
"include": [
"*.ts",
"templates/*.ts"
],
"exclude": [
"node_modules",
"dist"
]
}