docs: Formatting the code
This commit is contained in:
@@ -17,251 +17,251 @@ fn main() {
|
|||||||
// Initialize the HeroFS factory
|
// Initialize the HeroFS factory
|
||||||
mut fs_factory := herofs.new()!
|
mut fs_factory := herofs.new()!
|
||||||
println('HeroFS factory initialized')
|
println('HeroFS factory initialized')
|
||||||
|
|
||||||
// Create a new filesystem
|
// Create a new filesystem
|
||||||
mut my_fs := fs_factory.fs.new(
|
mut my_fs := fs_factory.fs.new(
|
||||||
name: 'project_workspace'
|
name: 'project_workspace'
|
||||||
description: 'Project development workspace'
|
description: 'Project development workspace'
|
||||||
quota_bytes: 5 * 1024 * 1024 * 1024 // 5GB quota
|
quota_bytes: 5 * 1024 * 1024 * 1024 // 5GB quota
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the filesystem to get an ID
|
// Save the filesystem to get an ID
|
||||||
fs_id := fs_factory.fs.set(my_fs)!
|
fs_id := fs_factory.fs.set(my_fs)!
|
||||||
println('Created filesystem: ${my_fs.name} with ID: ${fs_id}')
|
println('Created filesystem: ${my_fs.name} with ID: ${fs_id}')
|
||||||
|
|
||||||
// Create root directory
|
// Create root directory
|
||||||
mut root_dir := fs_factory.fs_dir.new(
|
mut root_dir := fs_factory.fs_dir.new(
|
||||||
name: 'root'
|
name: 'root'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: 0 // Root has no parent
|
parent_id: 0 // Root has no parent
|
||||||
description: 'Root directory'
|
description: 'Root directory'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the root directory
|
// Save the root directory
|
||||||
root_dir_id := fs_factory.fs_dir.set(root_dir)!
|
root_dir_id := fs_factory.fs_dir.set(root_dir)!
|
||||||
println('Created root directory with ID: ${root_dir_id}')
|
println('Created root directory with ID: ${root_dir_id}')
|
||||||
|
|
||||||
// Update the filesystem with the root directory ID
|
// Update the filesystem with the root directory ID
|
||||||
my_fs.root_dir_id = root_dir_id
|
my_fs.root_dir_id = root_dir_id
|
||||||
fs_factory.fs.set(my_fs)!
|
fs_factory.fs.set(my_fs)!
|
||||||
|
|
||||||
// Create a directory hierarchy
|
// Create a directory hierarchy
|
||||||
println('\nCreating directory hierarchy...')
|
println('\nCreating directory hierarchy...')
|
||||||
|
|
||||||
// Main project directories
|
// Main project directories
|
||||||
mut src_dir := fs_factory.fs_dir.new(
|
mut src_dir := fs_factory.fs_dir.new(
|
||||||
name: 'src'
|
name: 'src'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Source code'
|
description: 'Source code'
|
||||||
)!
|
)!
|
||||||
src_dir_id := fs_factory.fs_dir.set(src_dir)!
|
src_dir_id := fs_factory.fs_dir.set(src_dir)!
|
||||||
|
|
||||||
mut docs_dir := fs_factory.fs_dir.new(
|
mut docs_dir := fs_factory.fs_dir.new(
|
||||||
name: 'docs'
|
name: 'docs'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Documentation'
|
description: 'Documentation'
|
||||||
)!
|
)!
|
||||||
docs_dir_id := fs_factory.fs_dir.set(docs_dir)!
|
docs_dir_id := fs_factory.fs_dir.set(docs_dir)!
|
||||||
|
|
||||||
mut assets_dir := fs_factory.fs_dir.new(
|
mut assets_dir := fs_factory.fs_dir.new(
|
||||||
name: 'assets'
|
name: 'assets'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Project assets'
|
description: 'Project assets'
|
||||||
)!
|
)!
|
||||||
assets_dir_id := fs_factory.fs_dir.set(assets_dir)!
|
assets_dir_id := fs_factory.fs_dir.set(assets_dir)!
|
||||||
|
|
||||||
// Subdirectories
|
// Subdirectories
|
||||||
mut images_dir := fs_factory.fs_dir.new(
|
mut images_dir := fs_factory.fs_dir.new(
|
||||||
name: 'images'
|
name: 'images'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: assets_dir_id
|
parent_id: assets_dir_id
|
||||||
description: 'Image assets'
|
description: 'Image assets'
|
||||||
)!
|
)!
|
||||||
images_dir_id := fs_factory.fs_dir.set(images_dir)!
|
images_dir_id := fs_factory.fs_dir.set(images_dir)!
|
||||||
|
|
||||||
mut api_docs_dir := fs_factory.fs_dir.new(
|
mut api_docs_dir := fs_factory.fs_dir.new(
|
||||||
name: 'api'
|
name: 'api'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: docs_dir_id
|
parent_id: docs_dir_id
|
||||||
description: 'API documentation'
|
description: 'API documentation'
|
||||||
)!
|
)!
|
||||||
api_docs_dir_id := fs_factory.fs_dir.set(api_docs_dir)!
|
api_docs_dir_id := fs_factory.fs_dir.set(api_docs_dir)!
|
||||||
|
|
||||||
println('Directory hierarchy created successfully')
|
println('Directory hierarchy created successfully')
|
||||||
|
|
||||||
// Create some files with different content types
|
// Create some files with different content types
|
||||||
println('\nCreating various files...')
|
println('\nCreating various files...')
|
||||||
|
|
||||||
// Text file for source code
|
// Text file for source code
|
||||||
code_content := 'fn main() {\n println("Hello, HeroFS!")\n}\n'.bytes()
|
code_content := 'fn main() {\n println("Hello, HeroFS!")\n}\n'.bytes()
|
||||||
mut code_blob := fs_factory.fs_blob.new(
|
mut code_blob := fs_factory.fs_blob.new(
|
||||||
data: code_content
|
data: code_content
|
||||||
mime_type: 'text/plain'
|
mime_type: 'text/plain'
|
||||||
name: 'main.v blob'
|
name: 'main.v blob'
|
||||||
)!
|
)!
|
||||||
code_blob_id := fs_factory.fs_blob.set(code_blob)!
|
code_blob_id := fs_factory.fs_blob.set(code_blob)!
|
||||||
|
|
||||||
mut code_file := fs_factory.fs_file.new(
|
mut code_file := fs_factory.fs_file.new(
|
||||||
name: 'main.v'
|
name: 'main.v'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
directories: [src_dir_id]
|
directories: [src_dir_id]
|
||||||
blobs: [code_blob_id]
|
blobs: [code_blob_id]
|
||||||
mime_type: 'text/plain'
|
mime_type: 'text/plain'
|
||||||
metadata: {
|
metadata: {
|
||||||
'language': 'vlang',
|
'language': 'vlang'
|
||||||
'version': '0.3.3'
|
'version': '0.3.3'
|
||||||
}
|
}
|
||||||
)!
|
)!
|
||||||
code_file_id := fs_factory.fs_file.set(code_file)!
|
code_file_id := fs_factory.fs_file.set(code_file)!
|
||||||
|
|
||||||
// Markdown documentation file
|
// Markdown documentation file
|
||||||
docs_content := '# API Documentation\n\n## Endpoints\n\n- GET /api/v1/users\n- POST /api/v1/users\n'.bytes()
|
docs_content := '# API Documentation\n\n## Endpoints\n\n- GET /api/v1/users\n- POST /api/v1/users\n'.bytes()
|
||||||
mut docs_blob := fs_factory.fs_blob.new(
|
mut docs_blob := fs_factory.fs_blob.new(
|
||||||
data: docs_content
|
data: docs_content
|
||||||
mime_type: 'text/markdown'
|
mime_type: 'text/markdown'
|
||||||
name: 'api.md blob'
|
name: 'api.md blob'
|
||||||
)!
|
)!
|
||||||
docs_blob_id := fs_factory.fs_blob.set(docs_blob)!
|
docs_blob_id := fs_factory.fs_blob.set(docs_blob)!
|
||||||
|
|
||||||
mut docs_file := fs_factory.fs_file.new(
|
mut docs_file := fs_factory.fs_file.new(
|
||||||
name: 'api.md'
|
name: 'api.md'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
directories: [api_docs_dir_id]
|
directories: [api_docs_dir_id]
|
||||||
blobs: [docs_blob_id]
|
blobs: [docs_blob_id]
|
||||||
mime_type: 'text/markdown'
|
mime_type: 'text/markdown'
|
||||||
)!
|
)!
|
||||||
docs_file_id := fs_factory.fs_file.set(docs_file)!
|
docs_file_id := fs_factory.fs_file.set(docs_file)!
|
||||||
|
|
||||||
// Create a binary file (sample image)
|
// Create a binary file (sample image)
|
||||||
// For this example, we'll just create random bytes
|
// For this example, we'll just create random bytes
|
||||||
mut image_data := []u8{len: 1024, init: u8(index % 256)}
|
mut image_data := []u8{len: 1024, init: u8(index % 256)}
|
||||||
mut image_blob := fs_factory.fs_blob.new(
|
mut image_blob := fs_factory.fs_blob.new(
|
||||||
data: image_data
|
data: image_data
|
||||||
mime_type: 'image/png'
|
mime_type: 'image/png'
|
||||||
name: 'logo.png blob'
|
name: 'logo.png blob'
|
||||||
)!
|
)!
|
||||||
image_blob_id := fs_factory.fs_blob.set(image_blob)!
|
image_blob_id := fs_factory.fs_blob.set(image_blob)!
|
||||||
|
|
||||||
mut image_file := fs_factory.fs_file.new(
|
mut image_file := fs_factory.fs_file.new(
|
||||||
name: 'logo.png'
|
name: 'logo.png'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
directories: [images_dir_id]
|
directories: [images_dir_id]
|
||||||
blobs: [image_blob_id]
|
blobs: [image_blob_id]
|
||||||
mime_type: 'image/png'
|
mime_type: 'image/png'
|
||||||
metadata: {
|
metadata: {
|
||||||
'width': '200',
|
'width': '200'
|
||||||
'height': '100',
|
'height': '100'
|
||||||
'format': 'PNG'
|
'format': 'PNG'
|
||||||
}
|
}
|
||||||
)!
|
)!
|
||||||
image_file_id := fs_factory.fs_file.set(image_file)!
|
image_file_id := fs_factory.fs_file.set(image_file)!
|
||||||
|
|
||||||
println('Files created successfully')
|
println('Files created successfully')
|
||||||
|
|
||||||
// Create symlinks
|
// Create symlinks
|
||||||
println('\nCreating symlinks...')
|
println('\nCreating symlinks...')
|
||||||
|
|
||||||
// Symlink to the API docs from the root directory
|
// Symlink to the API docs from the root directory
|
||||||
mut api_symlink := fs_factory.fs_symlink.new(
|
mut api_symlink := fs_factory.fs_symlink.new(
|
||||||
name: 'api-docs'
|
name: 'api-docs'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
target_id: api_docs_dir_id
|
target_id: api_docs_dir_id
|
||||||
target_type: .directory
|
target_type: .directory
|
||||||
description: 'Shortcut to API documentation'
|
description: 'Shortcut to API documentation'
|
||||||
)!
|
)!
|
||||||
api_symlink_id := fs_factory.fs_symlink.set(api_symlink)!
|
api_symlink_id := fs_factory.fs_symlink.set(api_symlink)!
|
||||||
|
|
||||||
// Symlink to the logo from the docs directory
|
// Symlink to the logo from the docs directory
|
||||||
mut logo_symlink := fs_factory.fs_symlink.new(
|
mut logo_symlink := fs_factory.fs_symlink.new(
|
||||||
name: 'logo.png'
|
name: 'logo.png'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: docs_dir_id
|
parent_id: docs_dir_id
|
||||||
target_id: image_file_id
|
target_id: image_file_id
|
||||||
target_type: .file
|
target_type: .file
|
||||||
description: 'Shortcut to project logo'
|
description: 'Shortcut to project logo'
|
||||||
)!
|
)!
|
||||||
logo_symlink_id := fs_factory.fs_symlink.set(logo_symlink)!
|
logo_symlink_id := fs_factory.fs_symlink.set(logo_symlink)!
|
||||||
|
|
||||||
println('Symlinks created successfully')
|
println('Symlinks created successfully')
|
||||||
|
|
||||||
// Demonstrate file operations
|
// Demonstrate file operations
|
||||||
println('\nDemonstrating file operations...')
|
println('\nDemonstrating file operations...')
|
||||||
|
|
||||||
// 1. Move a file to multiple directories (hard link-like behavior)
|
// 1. Move a file to multiple directories (hard link-like behavior)
|
||||||
println('Moving logo.png to both images and docs directories...')
|
println('Moving logo.png to both images and docs directories...')
|
||||||
image_file = fs_factory.fs_file.get(image_file_id)!
|
image_file = fs_factory.fs_file.get(image_file_id)!
|
||||||
fs_factory.fs_file.move(image_file_id, [images_dir_id, docs_dir_id])!
|
fs_factory.fs_file.move(image_file_id, [images_dir_id, docs_dir_id])!
|
||||||
image_file = fs_factory.fs_file.get(image_file_id)!
|
image_file = fs_factory.fs_file.get(image_file_id)!
|
||||||
|
|
||||||
// 2. Rename a file
|
// 2. Rename a file
|
||||||
println('Renaming main.v to app.v...')
|
println('Renaming main.v to app.v...')
|
||||||
fs_factory.fs_file.rename(code_file_id, 'app.v')!
|
fs_factory.fs_file.rename(code_file_id, 'app.v')!
|
||||||
code_file = fs_factory.fs_file.get(code_file_id)!
|
code_file = fs_factory.fs_file.get(code_file_id)!
|
||||||
|
|
||||||
// 3. Update file metadata
|
// 3. Update file metadata
|
||||||
println('Updating file metadata...')
|
println('Updating file metadata...')
|
||||||
fs_factory.fs_file.update_metadata(docs_file_id, 'status', 'draft')!
|
fs_factory.fs_file.update_metadata(docs_file_id, 'status', 'draft')!
|
||||||
fs_factory.fs_file.update_metadata(docs_file_id, 'author', 'HeroFS Team')!
|
fs_factory.fs_file.update_metadata(docs_file_id, 'author', 'HeroFS Team')!
|
||||||
|
|
||||||
// 4. Update file access time when "reading" it
|
// 4. Update file access time when "reading" it
|
||||||
println('Updating file access time...')
|
println('Updating file access time...')
|
||||||
fs_factory.fs_file.update_accessed(docs_file_id)!
|
fs_factory.fs_file.update_accessed(docs_file_id)!
|
||||||
|
|
||||||
// 5. Add additional content to a file (append a blob)
|
// 5. Add additional content to a file (append a blob)
|
||||||
println('Appending content to API docs...')
|
println('Appending content to API docs...')
|
||||||
additional_content := '\n## Authentication\n\nUse Bearer token for authentication.\n'.bytes()
|
additional_content := '\n## Authentication\n\nUse Bearer token for authentication.\n'.bytes()
|
||||||
mut additional_blob := fs_factory.fs_blob.new(
|
mut additional_blob := fs_factory.fs_blob.new(
|
||||||
data: additional_content
|
data: additional_content
|
||||||
mime_type: 'text/markdown'
|
mime_type: 'text/markdown'
|
||||||
name: 'api_append.md blob'
|
name: 'api_append.md blob'
|
||||||
)!
|
)!
|
||||||
additional_blob_id := fs_factory.fs_blob.set(additional_blob)!
|
additional_blob_id := fs_factory.fs_blob.set(additional_blob)!
|
||||||
fs_factory.fs_file.append_blob(docs_file_id, additional_blob_id)!
|
fs_factory.fs_file.append_blob(docs_file_id, additional_blob_id)!
|
||||||
|
|
||||||
// Demonstrate directory operations
|
// Demonstrate directory operations
|
||||||
println('\nDemonstrating directory operations...')
|
println('\nDemonstrating directory operations...')
|
||||||
|
|
||||||
// 1. Create a new directory and move it
|
// 1. Create a new directory and move it
|
||||||
mut temp_dir := fs_factory.fs_dir.new(
|
mut temp_dir := fs_factory.fs_dir.new(
|
||||||
name: 'temp'
|
name: 'temp'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Temporary directory'
|
description: 'Temporary directory'
|
||||||
)!
|
)!
|
||||||
temp_dir_id := fs_factory.fs_dir.set(temp_dir)!
|
temp_dir_id := fs_factory.fs_dir.set(temp_dir)!
|
||||||
|
|
||||||
println('Moving temp directory to be under docs...')
|
println('Moving temp directory to be under docs...')
|
||||||
fs_factory.fs_dir.move(temp_dir_id, docs_dir_id)!
|
fs_factory.fs_dir.move(temp_dir_id, docs_dir_id)!
|
||||||
|
|
||||||
// 2. Rename a directory
|
// 2. Rename a directory
|
||||||
println('Renaming temp directory to drafts...')
|
println('Renaming temp directory to drafts...')
|
||||||
fs_factory.fs_dir.rename(temp_dir_id, 'drafts')!
|
fs_factory.fs_dir.rename(temp_dir_id, 'drafts')!
|
||||||
|
|
||||||
// 3. Check if a directory has children
|
// 3. Check if a directory has children
|
||||||
has_children := fs_factory.fs_dir.has_children(docs_dir_id)!
|
has_children := fs_factory.fs_dir.has_children(docs_dir_id)!
|
||||||
println('Does docs directory have children? ${has_children}')
|
println('Does docs directory have children? ${has_children}')
|
||||||
|
|
||||||
// Demonstrate searching and filtering
|
// Demonstrate searching and filtering
|
||||||
println('\nDemonstrating searching and filtering...')
|
println('\nDemonstrating searching and filtering...')
|
||||||
|
|
||||||
// 1. List all files in the filesystem
|
// 1. List all files in the filesystem
|
||||||
all_files := fs_factory.fs_file.list_by_filesystem(fs_id)!
|
all_files := fs_factory.fs_file.list_by_filesystem(fs_id)!
|
||||||
println('All files in filesystem (${all_files.len}):')
|
println('All files in filesystem (${all_files.len}):')
|
||||||
for file in all_files {
|
for file in all_files {
|
||||||
println('- ${file.name} (ID: ${file.id})')
|
println('- ${file.name} (ID: ${file.id})')
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. List files by MIME type
|
// 2. List files by MIME type
|
||||||
markdown_files := fs_factory.fs_file.list_by_mime_type('text/markdown')!
|
markdown_files := fs_factory.fs_file.list_by_mime_type('text/markdown')!
|
||||||
println('\nMarkdown files (${markdown_files.len}):')
|
println('\nMarkdown files (${markdown_files.len}):')
|
||||||
for file in markdown_files {
|
for file in markdown_files {
|
||||||
println('- ${file.name} (ID: ${file.id})')
|
println('- ${file.name} (ID: ${file.id})')
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. List all symlinks
|
// 3. List all symlinks
|
||||||
all_symlinks := fs_factory.fs_symlink.list_by_filesystem(fs_id)!
|
all_symlinks := fs_factory.fs_symlink.list_by_filesystem(fs_id)!
|
||||||
println('\nAll symlinks (${all_symlinks.len}):')
|
println('\nAll symlinks (${all_symlinks.len}):')
|
||||||
@@ -269,37 +269,37 @@ fn main() {
|
|||||||
target_type_str := if symlink.target_type == .file { 'file' } else { 'directory' }
|
target_type_str := if symlink.target_type == .file { 'file' } else { 'directory' }
|
||||||
println('- ${symlink.name} -> ${symlink.target_id} (${target_type_str})')
|
println('- ${symlink.name} -> ${symlink.target_id} (${target_type_str})')
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Check for broken symlinks
|
// 4. Check for broken symlinks
|
||||||
println('\nChecking for broken symlinks:')
|
println('\nChecking for broken symlinks:')
|
||||||
for symlink in all_symlinks {
|
for symlink in all_symlinks {
|
||||||
is_broken := fs_factory.fs_symlink.is_broken(symlink.id)!
|
is_broken := fs_factory.fs_symlink.is_broken(symlink.id)!
|
||||||
println('- ${symlink.name}: ${if is_broken { "BROKEN" } else { "OK" }}')
|
println('- ${symlink.name}: ${if is_broken { 'BROKEN' } else { 'OK' }}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Demonstrate file content retrieval
|
// Demonstrate file content retrieval
|
||||||
println('\nDemonstrating file content retrieval:')
|
println('\nDemonstrating file content retrieval:')
|
||||||
|
|
||||||
// Get the updated API docs file and print its content
|
// Get the updated API docs file and print its content
|
||||||
docs_file = fs_factory.fs_file.get(docs_file_id)!
|
docs_file = fs_factory.fs_file.get(docs_file_id)!
|
||||||
println('Content of ${docs_file.name}:')
|
println('Content of ${docs_file.name}:')
|
||||||
mut full_content := ''
|
mut full_content := ''
|
||||||
|
|
||||||
for blob_id in docs_file.blobs {
|
for blob_id in docs_file.blobs {
|
||||||
blob := fs_factory.fs_blob.get(blob_id)!
|
blob := fs_factory.fs_blob.get(blob_id)!
|
||||||
full_content += blob.data.bytestr()
|
full_content += blob.data.bytestr()
|
||||||
}
|
}
|
||||||
|
|
||||||
println('---BEGIN CONTENT---')
|
println('---BEGIN CONTENT---')
|
||||||
println(full_content)
|
println(full_content)
|
||||||
println('---END CONTENT---')
|
println('---END CONTENT---')
|
||||||
|
|
||||||
// Print filesystem usage
|
// Print filesystem usage
|
||||||
println('\nFilesystem usage:')
|
println('\nFilesystem usage:')
|
||||||
my_fs = fs_factory.fs.get(fs_id)!
|
my_fs = fs_factory.fs.get(fs_id)!
|
||||||
println('Used: ${my_fs.used_bytes} bytes')
|
println('Used: ${my_fs.used_bytes} bytes')
|
||||||
println('Quota: ${my_fs.quota_bytes} bytes')
|
println('Quota: ${my_fs.quota_bytes} bytes')
|
||||||
println('Available: ${my_fs.quota_bytes - my_fs.used_bytes} bytes')
|
println('Available: ${my_fs.quota_bytes - my_fs.used_bytes} bytes')
|
||||||
|
|
||||||
println('\nHeroFS advanced example completed successfully!')
|
println('\nHeroFS advanced example completed successfully!')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,93 +10,93 @@ fn main() {
|
|||||||
// Initialize the HeroFS factory
|
// Initialize the HeroFS factory
|
||||||
mut fs_factory := herofs.new()!
|
mut fs_factory := herofs.new()!
|
||||||
println('HeroFS factory initialized')
|
println('HeroFS factory initialized')
|
||||||
|
|
||||||
// Create a new filesystem
|
// Create a new filesystem
|
||||||
mut my_fs := fs_factory.fs.new(
|
mut my_fs := fs_factory.fs.new(
|
||||||
name: 'my_documents'
|
name: 'my_documents'
|
||||||
description: 'Personal documents filesystem'
|
description: 'Personal documents filesystem'
|
||||||
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
|
quota_bytes: 1024 * 1024 * 1024 // 1GB quota
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the filesystem to get an ID
|
// Save the filesystem to get an ID
|
||||||
fs_id := fs_factory.fs.set(my_fs)!
|
fs_id := fs_factory.fs.set(my_fs)!
|
||||||
println('Created filesystem: ${my_fs.name} with ID: ${fs_id}')
|
println('Created filesystem: ${my_fs.name} with ID: ${fs_id}')
|
||||||
|
|
||||||
// Create root directory
|
// Create root directory
|
||||||
mut root_dir := fs_factory.fs_dir.new(
|
mut root_dir := fs_factory.fs_dir.new(
|
||||||
name: 'root'
|
name: 'root'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: 0 // Root has no parent
|
parent_id: 0 // Root has no parent
|
||||||
description: 'Root directory'
|
description: 'Root directory'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the root directory
|
// Save the root directory
|
||||||
root_dir_id := fs_factory.fs_dir.set(root_dir)!
|
root_dir_id := fs_factory.fs_dir.set(root_dir)!
|
||||||
println('Created root directory with ID: ${root_dir_id}')
|
println('Created root directory with ID: ${root_dir_id}')
|
||||||
|
|
||||||
// Update the filesystem with the root directory ID
|
// Update the filesystem with the root directory ID
|
||||||
my_fs.root_dir_id = root_dir_id
|
my_fs.root_dir_id = root_dir_id
|
||||||
fs_factory.fs.set(my_fs)!
|
fs_factory.fs.set(my_fs)!
|
||||||
|
|
||||||
// Create some subdirectories
|
// Create some subdirectories
|
||||||
mut docs_dir := fs_factory.fs_dir.new(
|
mut docs_dir := fs_factory.fs_dir.new(
|
||||||
name: 'documents'
|
name: 'documents'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Documents directory'
|
description: 'Documents directory'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
mut pics_dir := fs_factory.fs_dir.new(
|
mut pics_dir := fs_factory.fs_dir.new(
|
||||||
name: 'pictures'
|
name: 'pictures'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
parent_id: root_dir_id
|
parent_id: root_dir_id
|
||||||
description: 'Pictures directory'
|
description: 'Pictures directory'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the subdirectories
|
// Save the subdirectories
|
||||||
docs_dir_id := fs_factory.fs_dir.set(docs_dir)!
|
docs_dir_id := fs_factory.fs_dir.set(docs_dir)!
|
||||||
pics_dir_id := fs_factory.fs_dir.set(pics_dir)!
|
pics_dir_id := fs_factory.fs_dir.set(pics_dir)!
|
||||||
println('Created documents directory with ID: ${docs_dir_id}')
|
println('Created documents directory with ID: ${docs_dir_id}')
|
||||||
println('Created pictures directory with ID: ${pics_dir_id}')
|
println('Created pictures directory with ID: ${pics_dir_id}')
|
||||||
|
|
||||||
// Create a text file blob
|
// Create a text file blob
|
||||||
text_content := 'Hello, world! This is a test file in HeroFS.'.bytes()
|
text_content := 'Hello, world! This is a test file in HeroFS.'.bytes()
|
||||||
mut text_blob := fs_factory.fs_blob.new(
|
mut text_blob := fs_factory.fs_blob.new(
|
||||||
data: text_content
|
data: text_content
|
||||||
mime_type: 'text/plain'
|
mime_type: 'text/plain'
|
||||||
name: 'hello.txt blob'
|
name: 'hello.txt blob'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the blob
|
// Save the blob
|
||||||
blob_id := fs_factory.fs_blob.set(text_blob)!
|
blob_id := fs_factory.fs_blob.set(text_blob)!
|
||||||
println('Created text blob with ID: ${blob_id}')
|
println('Created text blob with ID: ${blob_id}')
|
||||||
|
|
||||||
// Create a file referencing the blob
|
// Create a file referencing the blob
|
||||||
mut text_file := fs_factory.fs_file.new(
|
mut text_file := fs_factory.fs_file.new(
|
||||||
name: 'hello.txt'
|
name: 'hello.txt'
|
||||||
fs_id: fs_id
|
fs_id: fs_id
|
||||||
directories: [docs_dir_id]
|
directories: [docs_dir_id]
|
||||||
blobs: [blob_id]
|
blobs: [blob_id]
|
||||||
mime_type: 'text/plain'
|
mime_type: 'text/plain'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the file
|
// Save the file
|
||||||
file_id := fs_factory.fs_file.set(text_file)!
|
file_id := fs_factory.fs_file.set(text_file)!
|
||||||
println('Created text file with ID: ${file_id}')
|
println('Created text file with ID: ${file_id}')
|
||||||
|
|
||||||
// List all directories in the filesystem
|
// List all directories in the filesystem
|
||||||
dirs := fs_factory.fs_dir.list_by_filesystem(fs_id)!
|
dirs := fs_factory.fs_dir.list_by_filesystem(fs_id)!
|
||||||
println('\nAll directories in filesystem:')
|
println('\nAll directories in filesystem:')
|
||||||
for dir in dirs {
|
for dir in dirs {
|
||||||
println('- ${dir.name} (ID: ${dir.id})')
|
println('- ${dir.name} (ID: ${dir.id})')
|
||||||
}
|
}
|
||||||
|
|
||||||
// List all files in the documents directory
|
// List all files in the documents directory
|
||||||
files := fs_factory.fs_file.list_by_directory(docs_dir_id)!
|
files := fs_factory.fs_file.list_by_directory(docs_dir_id)!
|
||||||
println('\nFiles in documents directory:')
|
println('\nFiles in documents directory:')
|
||||||
for file in files {
|
for file in files {
|
||||||
println('- ${file.name} (ID: ${file.id}, Size: ${file.size_bytes} bytes)')
|
println('- ${file.name} (ID: ${file.id}, Size: ${file.size_bytes} bytes)')
|
||||||
|
|
||||||
// Get the file's content from its blobs
|
// Get the file's content from its blobs
|
||||||
if file.blobs.len > 0 {
|
if file.blobs.len > 0 {
|
||||||
blob := fs_factory.fs_blob.get(file.blobs[0])!
|
blob := fs_factory.fs_blob.get(file.blobs[0])!
|
||||||
@@ -104,6 +104,6 @@ fn main() {
|
|||||||
println(' Content: "${content}"')
|
println(' Content: "${content}"')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println('\nHeroFS basic example completed successfully!')
|
println('\nHeroFS basic example completed successfully!')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ mut mydb := heromodels.new()!
|
|||||||
// mydb.calendar.db.redis.flushdb()!
|
// mydb.calendar.db.redis.flushdb()!
|
||||||
|
|
||||||
mut o := mydb.calendar.new(
|
mut o := mydb.calendar.new(
|
||||||
name: 'Work Calendar'
|
name: 'Work Calendar'
|
||||||
description: 'Calendar for work events'
|
description: 'Calendar for work events'
|
||||||
color: '#FF0000'
|
color: '#FF0000'
|
||||||
timezone: 'Europe/Brussels'
|
timezone: 'Europe/Brussels'
|
||||||
is_public: false
|
is_public: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
o.events << 2
|
o.events << 2
|
||||||
|
|||||||
@@ -7,22 +7,22 @@ mut mydb := heromodels.new()!
|
|||||||
mydb.calendar_event.db.redis.flushdb()!
|
mydb.calendar_event.db.redis.flushdb()!
|
||||||
|
|
||||||
mut o := mydb.calendar_event.new(
|
mut o := mydb.calendar_event.new(
|
||||||
name: 'Team Meeting'
|
name: 'Team Meeting'
|
||||||
description: 'Weekly team meeting'
|
description: 'Weekly team meeting'
|
||||||
title: 'Team Meeting'
|
title: 'Team Meeting'
|
||||||
start_time: '2023-01-01 10:00:00'
|
start_time: '2023-01-01 10:00:00'
|
||||||
end_time: '2023-01-01 11:00:00'
|
end_time: '2023-01-01 11:00:00'
|
||||||
location: 'Conference Room A'
|
location: 'Conference Room A'
|
||||||
attendees: [u32(1), u32(2), u32(3)]
|
attendees: [u32(1), u32(2), u32(3)]
|
||||||
fs_items: [u32(10), u32(20)]
|
fs_items: [u32(10), u32(20)]
|
||||||
calendar_id: u32(1)
|
calendar_id: u32(1)
|
||||||
status: .published
|
status: .published
|
||||||
is_all_day: false
|
is_all_day: false
|
||||||
is_recurring: false
|
is_recurring: false
|
||||||
recurrence: []
|
recurrence: []
|
||||||
reminder_mins: [15, 30]
|
reminder_mins: [15, 30]
|
||||||
color: '#00FF00'
|
color: '#00FF00'
|
||||||
timezone: 'Europe/Brussels'
|
timezone: 'Europe/Brussels'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Add tags if needed
|
// Add tags if needed
|
||||||
|
|||||||
@@ -7,22 +7,22 @@ mut mydb := heromodels.new()!
|
|||||||
// mydb.calendar_event.db.redis.flushdb()!
|
// mydb.calendar_event.db.redis.flushdb()!
|
||||||
|
|
||||||
mut o := mydb.calendar_event.new(
|
mut o := mydb.calendar_event.new(
|
||||||
name: 'Team Meeting'
|
name: 'Team Meeting'
|
||||||
description: 'Weekly team meeting'
|
description: 'Weekly team meeting'
|
||||||
title: 'Team Meeting'
|
title: 'Team Meeting'
|
||||||
start_time: '2023-01-01 10:00:00'
|
start_time: '2023-01-01 10:00:00'
|
||||||
end_time: '2023-01-01 11:00:00'
|
end_time: '2023-01-01 11:00:00'
|
||||||
location: 'Conference Room A'
|
location: 'Conference Room A'
|
||||||
attendees: [u32(1), u32(2), u32(3)]
|
attendees: [u32(1), u32(2), u32(3)]
|
||||||
fs_items: [u32(10), u32(20)]
|
fs_items: [u32(10), u32(20)]
|
||||||
calendar_id: u32(1)
|
calendar_id: u32(1)
|
||||||
status: .published
|
status: .published
|
||||||
is_all_day: false
|
is_all_day: false
|
||||||
is_recurring: false
|
is_recurring: false
|
||||||
recurrence: []
|
recurrence: []
|
||||||
reminder_mins: [15, 30]
|
reminder_mins: [15, 30]
|
||||||
color: '#00FF00'
|
color: '#00FF00'
|
||||||
timezone: 'Europe/Brussels'
|
timezone: 'Europe/Brussels'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Add tags if needed
|
// Add tags if needed
|
||||||
|
|||||||
@@ -8,31 +8,31 @@ mydb.calendar_event.db.redis.flushdb()!
|
|||||||
|
|
||||||
// Create a recurrence rule
|
// Create a recurrence rule
|
||||||
mut rule := heromodels.RecurrenceRule{
|
mut rule := heromodels.RecurrenceRule{
|
||||||
frequency: .weekly
|
frequency: .weekly
|
||||||
interval: 1
|
interval: 1
|
||||||
until: 1672570800 + 30 * 24 * 60 * 60 // 30 days from start
|
until: 1672570800 + 30 * 24 * 60 * 60 // 30 days from start
|
||||||
count: 0
|
count: 0
|
||||||
by_weekday: [1, 3, 5] // Monday, Wednesday, Friday
|
by_weekday: [1, 3, 5] // Monday, Wednesday, Friday
|
||||||
by_monthday: []
|
by_monthday: []
|
||||||
}
|
}
|
||||||
|
|
||||||
mut o := mydb.calendar_event.new(
|
mut o := mydb.calendar_event.new(
|
||||||
name: 'Team Meeting'
|
name: 'Team Meeting'
|
||||||
description: 'Weekly team meeting'
|
description: 'Weekly team meeting'
|
||||||
title: 'Team Meeting'
|
title: 'Team Meeting'
|
||||||
start_time: '2023-01-01 10:00:00'
|
start_time: '2023-01-01 10:00:00'
|
||||||
end_time: '2023-01-01 11:00:00'
|
end_time: '2023-01-01 11:00:00'
|
||||||
location: 'Conference Room A'
|
location: 'Conference Room A'
|
||||||
attendees: [u32(1), u32(2), u32(3)]
|
attendees: [u32(1), u32(2), u32(3)]
|
||||||
fs_items: [u32(10), u32(20)]
|
fs_items: [u32(10), u32(20)]
|
||||||
calendar_id: u32(1)
|
calendar_id: u32(1)
|
||||||
status: .published
|
status: .published
|
||||||
is_all_day: false
|
is_all_day: false
|
||||||
is_recurring: true
|
is_recurring: true
|
||||||
recurrence: [rule]
|
recurrence: [rule]
|
||||||
reminder_mins: [15, 30]
|
reminder_mins: [15, 30]
|
||||||
color: '#00FF00'
|
color: '#00FF00'
|
||||||
timezone: 'Europe/Brussels'
|
timezone: 'Europe/Brussels'
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Add tags if needed
|
// Add tags if needed
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a new chat group
|
// Create a new chat group
|
||||||
mut chat_group := mydb.chat_group.new(
|
mut chat_group := mydb.chat_group.new(
|
||||||
name: 'General Discussion'
|
name: 'General Discussion'
|
||||||
description: 'A public channel for general discussions'
|
description: 'A public channel for general discussions'
|
||||||
chat_type: .public_channel
|
chat_type: .public_channel
|
||||||
last_activity: 0
|
last_activity: 0
|
||||||
is_archived: false
|
is_archived: false
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save to database
|
// Save to database
|
||||||
|
|||||||
@@ -7,27 +7,27 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// First create a chat group to reference
|
// First create a chat group to reference
|
||||||
mut chat_group := mydb.chat_group.new(
|
mut chat_group := mydb.chat_group.new(
|
||||||
name: 'General Discussion'
|
name: 'General Discussion'
|
||||||
description: 'A public channel for general discussions'
|
description: 'A public channel for general discussions'
|
||||||
chat_type: .public_channel
|
chat_type: .public_channel
|
||||||
last_activity: 0
|
last_activity: 0
|
||||||
is_archived: false
|
is_archived: false
|
||||||
)!
|
)!
|
||||||
chat_group_id := mydb.chat_group.set(chat_group)!
|
chat_group_id := mydb.chat_group.set(chat_group)!
|
||||||
|
|
||||||
// Create a new chat message
|
// Create a new chat message
|
||||||
mut chat_message := mydb.chat_message.new(
|
mut chat_message := mydb.chat_message.new(
|
||||||
name: 'Hello World Message'
|
name: 'Hello World Message'
|
||||||
description: 'A simple hello world message'
|
description: 'A simple hello world message'
|
||||||
content: 'Hello, world!'
|
content: 'Hello, world!'
|
||||||
chat_group_id: chat_group_id
|
chat_group_id: chat_group_id
|
||||||
sender_id: 1
|
sender_id: 1
|
||||||
parent_messages: []
|
parent_messages: []
|
||||||
fs_files: []
|
fs_files: []
|
||||||
message_type: .text
|
message_type: .text
|
||||||
status: .sent
|
status: .sent
|
||||||
reactions: []
|
reactions: []
|
||||||
mentions: []
|
mentions: []
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save to database
|
// Save to database
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a new group
|
// Create a new group
|
||||||
mut o := mydb.group.new(
|
mut o := mydb.group.new(
|
||||||
name: 'Development Team'
|
name: 'Development Team'
|
||||||
description: 'Group for software developers'
|
description: 'Group for software developers'
|
||||||
is_public: false
|
is_public: false
|
||||||
members: []
|
members: []
|
||||||
subgroups: []
|
subgroups: []
|
||||||
parent_group: 0
|
parent_group: 0
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a new group without members
|
// Create a new group without members
|
||||||
mut o := mydb.group.new(
|
mut o := mydb.group.new(
|
||||||
name: 'Marketing Team'
|
name: 'Marketing Team'
|
||||||
description: 'Group for marketing professionals'
|
description: 'Group for marketing professionals'
|
||||||
is_public: true
|
is_public: true
|
||||||
members: []
|
members: []
|
||||||
subgroups: []
|
subgroups: []
|
||||||
parent_group: 0
|
parent_group: 0
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a parent group
|
// Create a parent group
|
||||||
mut parent_group := mydb.group.new(
|
mut parent_group := mydb.group.new(
|
||||||
name: 'Company'
|
name: 'Company'
|
||||||
description: 'Main company group'
|
description: 'Main company group'
|
||||||
is_public: true
|
is_public: true
|
||||||
members: []
|
members: []
|
||||||
subgroups: []
|
subgroups: []
|
||||||
parent_group: 0
|
parent_group: 0
|
||||||
)!
|
)!
|
||||||
|
|
||||||
@@ -19,11 +19,11 @@ println('Created Parent Group ID: ${parent_oid}')
|
|||||||
|
|
||||||
// Create a subgroup
|
// Create a subgroup
|
||||||
mut subgroup := mydb.group.new(
|
mut subgroup := mydb.group.new(
|
||||||
name: 'Development Team'
|
name: 'Development Team'
|
||||||
description: 'Subgroup for developers'
|
description: 'Subgroup for developers'
|
||||||
is_public: false
|
is_public: false
|
||||||
members: []
|
members: []
|
||||||
subgroups: []
|
subgroups: []
|
||||||
parent_group: parent_oid
|
parent_group: parent_oid
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -6,22 +6,22 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a new group with members
|
// Create a new group with members
|
||||||
mut o := mydb.group.new(
|
mut o := mydb.group.new(
|
||||||
name: 'Development Team'
|
name: 'Development Team'
|
||||||
description: 'Group for software developers'
|
description: 'Group for software developers'
|
||||||
is_public: false
|
is_public: false
|
||||||
members: [
|
members: [
|
||||||
heromodels.GroupMember{
|
heromodels.GroupMember{
|
||||||
user_id: 1
|
user_id: 1
|
||||||
role: heromodels.GroupRole.admin
|
role: heromodels.GroupRole.admin
|
||||||
joined_at: 0 // Will be set when adding to group
|
joined_at: 0 // Will be set when adding to group
|
||||||
},
|
},
|
||||||
heromodels.GroupMember{
|
heromodels.GroupMember{
|
||||||
user_id: 2
|
user_id: 2
|
||||||
role: heromodels.GroupRole.writer
|
role: heromodels.GroupRole.writer
|
||||||
joined_at: 0 // Will be set when adding to group
|
joined_at: 0 // Will be set when adding to group
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
subgroups: []
|
subgroups: []
|
||||||
parent_group: 0
|
parent_group: 0
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -8,59 +8,61 @@ mydb.project.db.redis.flushdb()!
|
|||||||
|
|
||||||
// Create swimlanes
|
// Create swimlanes
|
||||||
swimlane1 := heromodels.Swimlane{
|
swimlane1 := heromodels.Swimlane{
|
||||||
name: 'todo'
|
name: 'todo'
|
||||||
description: 'Tasks to be done'
|
description: 'Tasks to be done'
|
||||||
order: 1
|
order: 1
|
||||||
color: '#FF0000'
|
color: '#FF0000'
|
||||||
is_done: false
|
is_done: false
|
||||||
}
|
}
|
||||||
|
|
||||||
swimlane2 := heromodels.Swimlane{
|
swimlane2 := heromodels.Swimlane{
|
||||||
name: 'in_progress'
|
name: 'in_progress'
|
||||||
description: 'Tasks currently being worked on'
|
description: 'Tasks currently being worked on'
|
||||||
order: 2
|
order: 2
|
||||||
color: '#FFFF00'
|
color: '#FFFF00'
|
||||||
is_done: false
|
is_done: false
|
||||||
}
|
}
|
||||||
|
|
||||||
swimlane3 := heromodels.Swimlane{
|
swimlane3 := heromodels.Swimlane{
|
||||||
name: 'done'
|
name: 'done'
|
||||||
description: 'Completed tasks'
|
description: 'Completed tasks'
|
||||||
order: 3
|
order: 3
|
||||||
color: '#00FF00'
|
color: '#00FF00'
|
||||||
is_done: true
|
is_done: true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create milestones
|
// Create milestones
|
||||||
milestone1 := heromodels.Milestone{
|
milestone1 := heromodels.Milestone{
|
||||||
name: 'phase_1'
|
name: 'phase_1'
|
||||||
description: 'First development phase'
|
description: 'First development phase'
|
||||||
due_date: 1672531200 // 2023-01-01
|
due_date: 1672531200 // 2023-01-01
|
||||||
completed: false
|
completed: false
|
||||||
issues: [u32(1), u32(2)]
|
issues: [u32(1), u32(2)]
|
||||||
}
|
}
|
||||||
|
|
||||||
milestone2 := heromodels.Milestone{
|
milestone2 := heromodels.Milestone{
|
||||||
name: 'phase_2'
|
name: 'phase_2'
|
||||||
description: 'Second development phase'
|
description: 'Second development phase'
|
||||||
due_date: 1675209600 // 2023-02-01
|
due_date: 1675209600 // 2023-02-01
|
||||||
completed: false
|
completed: false
|
||||||
issues: [u32(3), u32(4)]
|
issues: [u32(3), u32(4)]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new project
|
// Create a new project
|
||||||
mut project := mydb.project.new(
|
mut project := mydb.project.new(
|
||||||
name: 'Sample Project'
|
name: 'Sample Project'
|
||||||
description: 'A sample project for demonstration'
|
description: 'A sample project for demonstration'
|
||||||
swimlanes: [swimlane1, swimlane2, swimlane3]
|
swimlanes: [swimlane1, swimlane2, swimlane3]
|
||||||
milestones: [milestone1, milestone2]
|
milestones: [milestone1, milestone2]
|
||||||
issues: ['issue1', 'issue2', 'issue3']
|
issues: ['issue1', 'issue2', 'issue3']
|
||||||
fs_files: [u32(100), u32(200)]
|
fs_files: [u32(100), u32(200)]
|
||||||
status: .active
|
status: .active
|
||||||
start_date: '2023-01-01'
|
start_date: '2023-01-01'
|
||||||
end_date: '2023-12-31'
|
end_date: '2023-12-31'
|
||||||
tags: ['sample', 'demo', 'project']
|
tags: ['sample', 'demo', 'project']
|
||||||
comments: [db.CommentArg{comment: 'This is a sample project'}]
|
comments: [db.CommentArg{
|
||||||
|
comment: 'This is a sample project'
|
||||||
|
}]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the project to the database
|
// Save the project to the database
|
||||||
|
|||||||
@@ -8,26 +8,30 @@ mydb.project_issue.db.redis.flushdb()!
|
|||||||
|
|
||||||
// Create a new project issue
|
// Create a new project issue
|
||||||
mut issue := mydb.project_issue.new(
|
mut issue := mydb.project_issue.new(
|
||||||
name: 'Fix login bug'
|
name: 'Fix login bug'
|
||||||
description: 'Users are unable to login with their credentials'
|
description: 'Users are unable to login with their credentials'
|
||||||
title: 'Login functionality is broken'
|
title: 'Login functionality is broken'
|
||||||
project_id: u32(1)
|
project_id: u32(1)
|
||||||
issue_type: .bug
|
issue_type: .bug
|
||||||
priority: .high
|
priority: .high
|
||||||
status: .open
|
status: .open
|
||||||
swimlane: 'todo'
|
swimlane: 'todo'
|
||||||
assignees: [u32(10), u32(20)]
|
assignees: [u32(10), u32(20)]
|
||||||
reporter: u32(5)
|
reporter: u32(5)
|
||||||
milestone: 'phase_1'
|
milestone: 'phase_1'
|
||||||
deadline: '2023-01-15'
|
deadline: '2023-01-15'
|
||||||
estimate: 8
|
estimate: 8
|
||||||
fs_files: [u32(1000), u32(2000)]
|
fs_files: [u32(1000), u32(2000)]
|
||||||
parent_id: u32(0)
|
parent_id: u32(0)
|
||||||
children: [u32(100), u32(101)]
|
children: [u32(100), u32(101)]
|
||||||
tags: ['bug', 'login', 'authentication']
|
tags: ['bug', 'login', 'authentication']
|
||||||
comments: [
|
comments: [
|
||||||
db.CommentArg{comment: 'This issue needs to be fixed urgently'},
|
db.CommentArg{
|
||||||
db.CommentArg{comment: 'I am working on this now'}
|
comment: 'This issue needs to be fixed urgently'
|
||||||
|
},
|
||||||
|
db.CommentArg{
|
||||||
|
comment: 'I am working on this now'
|
||||||
|
},
|
||||||
]
|
]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
|
|||||||
@@ -6,19 +6,19 @@ mut mydb := heromodels.new()!
|
|||||||
|
|
||||||
// Create a new user
|
// Create a new user
|
||||||
mut o := mydb.user.new(
|
mut o := mydb.user.new(
|
||||||
name: 'John Doe'
|
name: 'John Doe'
|
||||||
description: 'Software Developer'
|
description: 'Software Developer'
|
||||||
email: 'john.doe@example.com'
|
email: 'john.doe@example.com'
|
||||||
public_key: '-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA...\n-----END PUBLIC KEY-----'
|
public_key: '-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA...\n-----END PUBLIC KEY-----'
|
||||||
phone: '+1234567890'
|
phone: '+1234567890'
|
||||||
address: '123 Main St, City, Country'
|
address: '123 Main St, City, Country'
|
||||||
avatar_url: 'https://example.com/avatar.jpg'
|
avatar_url: 'https://example.com/avatar.jpg'
|
||||||
bio: 'Experienced software developer with a passion for V language'
|
bio: 'Experienced software developer with a passion for V language'
|
||||||
timezone: 'UTC'
|
timezone: 'UTC'
|
||||||
status: .active
|
status: .active
|
||||||
securitypolicy: 0
|
securitypolicy: 0
|
||||||
tags: 0
|
tags: 0
|
||||||
comments: []
|
comments: []
|
||||||
)!
|
)!
|
||||||
|
|
||||||
// Save to database
|
// Save to database
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ pub fn encode[T](obj T) ![]u8 {
|
|||||||
d.add_u32(u32(obj.$(field.name)))
|
d.add_u32(u32(obj.$(field.name)))
|
||||||
} $else $if field.typ is u64 {
|
} $else $if field.typ is u64 {
|
||||||
d.add_u64(u64(obj.$(field.name)))
|
d.add_u64(u64(obj.$(field.name)))
|
||||||
}$else $if field.typ is i64 {
|
} $else $if field.typ is i64 {
|
||||||
d.add_i64(i64(obj.$(field.name)))
|
d.add_i64(i64(obj.$(field.name)))
|
||||||
} $else $if field.typ is time.Time {
|
} $else $if field.typ is time.Time {
|
||||||
d.add_time(time.new(obj.$(field.name)))
|
d.add_time(time.new(obj.$(field.name)))
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ module db
|
|||||||
|
|
||||||
import crypto.md5
|
import crypto.md5
|
||||||
|
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
pub struct CommentArg {
|
pub struct CommentArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pub fn (mut self DB) tags_get(tags []string) !u32 {
|
|||||||
hash := md5.hexhash(tags_fixed.join(','))
|
hash := md5.hexhash(tags_fixed.join(','))
|
||||||
tags_found := self.redis.hget('db:tags', hash)!
|
tags_found := self.redis.hget('db:tags', hash)!
|
||||||
return if tags_found == '' {
|
return if tags_found == '' {
|
||||||
println('tags_get: new tags: ${tags_fixed.join(",")}')
|
println('tags_get: new tags: ${tags_fixed.join(',')}')
|
||||||
id := self.new_id()!
|
id := self.new_id()!
|
||||||
self.redis.hset('db:tags', hash, id.str())!
|
self.redis.hset('db:tags', hash, id.str())!
|
||||||
self.redis.hset('db:tags', id.str(), tags_fixed.join(','))!
|
self.redis.hset('db:tags', id.str(), tags_fixed.join(','))!
|
||||||
|
|||||||
@@ -4,30 +4,30 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
|
|
||||||
pub struct FsFactory {
|
pub struct FsFactory {
|
||||||
pub mut:
|
pub mut:
|
||||||
fs DBFs
|
fs DBFs
|
||||||
fs_blob DBFsBlob
|
fs_blob DBFsBlob
|
||||||
fs_dir DBFsDir
|
fs_dir DBFsDir
|
||||||
fs_file DBFsFile
|
fs_file DBFsFile
|
||||||
fs_symlink DBFsSymlink
|
fs_symlink DBFsSymlink
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new() !FsFactory {
|
pub fn new() !FsFactory {
|
||||||
mut mydb := db.new()!
|
mut mydb := db.new()!
|
||||||
return FsFactory{
|
return FsFactory{
|
||||||
fs: DBFs{
|
fs: DBFs{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
},
|
}
|
||||||
fs_blob: DBFsBlob{
|
fs_blob: DBFsBlob{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
},
|
}
|
||||||
fs_dir: DBFsDir{
|
fs_dir: DBFsDir{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
},
|
}
|
||||||
fs_file: DBFsFile{
|
fs_file: DBFsFile{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
},
|
}
|
||||||
fs_symlink: DBFsSymlink{
|
fs_symlink: DBFsSymlink{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ pub fn (self Fs) type_name() string {
|
|||||||
return 'fs'
|
return 'fs'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self Fs) dump(mut e &encoder.Encoder) ! {
|
pub fn (self Fs) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.name)
|
e.add_string(self.name)
|
||||||
e.add_u32(self.group_id)
|
e.add_u32(self.group_id)
|
||||||
e.add_u32(self.root_dir_id)
|
e.add_u32(self.root_dir_id)
|
||||||
@@ -38,7 +38,7 @@ pub fn (self Fs) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_u64(self.used_bytes)
|
e.add_u64(self.used_bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFs) load(mut o Fs, mut e &encoder.Decoder) ! {
|
fn (mut self DBFs) load(mut o Fs, mut e encoder.Decoder) ! {
|
||||||
o.name = e.get_string()!
|
o.name = e.get_string()!
|
||||||
o.group_id = e.get_u32()!
|
o.group_id = e.get_u32()!
|
||||||
o.root_dir_id = e.get_u32()!
|
o.root_dir_id = e.get_u32()!
|
||||||
@@ -68,32 +68,32 @@ pub fn (mut self DBFs) new(args FsArg) !Fs {
|
|||||||
quota_bytes: args.quota_bytes
|
quota_bytes: args.quota_bytes
|
||||||
used_bytes: args.used_bytes
|
used_bytes: args.used_bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFs) set(o Fs) !u32 {
|
pub fn (mut self DBFs) set(o Fs) !u32 {
|
||||||
id := self.db.set[Fs](o)!
|
id := self.db.set[Fs](o)!
|
||||||
|
|
||||||
// Store name -> id mapping for lookups
|
// Store name -> id mapping for lookups
|
||||||
self.db.redis.hset('fs:names', o.name, id.str())!
|
self.db.redis.hset('fs:names', o.name, id.str())!
|
||||||
|
|
||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFs) delete(id u32) ! {
|
pub fn (mut self DBFs) delete(id u32) ! {
|
||||||
// Get the filesystem to retrieve its name
|
// Get the filesystem to retrieve its name
|
||||||
fs := self.get(id)!
|
fs := self.get(id)!
|
||||||
|
|
||||||
// Remove name -> id mapping
|
// Remove name -> id mapping
|
||||||
self.db.redis.hdel('fs:names', fs.name)!
|
self.db.redis.hdel('fs:names', fs.name)!
|
||||||
|
|
||||||
// Delete the filesystem
|
// Delete the filesystem
|
||||||
self.db.delete[Fs](id)!
|
self.db.delete[Fs](id)!
|
||||||
}
|
}
|
||||||
@@ -147,4 +147,4 @@ pub fn (mut self DBFs) decrease_usage(id u32, bytes u64) !u64 {
|
|||||||
pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
|
pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
|
||||||
fs := self.get(id)!
|
fs := self.get(id)!
|
||||||
return (fs.used_bytes + additional_bytes) <= fs.quota_bytes
|
return (fs.used_bytes + additional_bytes) <= fs.quota_bytes
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
pub struct FsBlob {
|
pub struct FsBlob {
|
||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
hash string // blake192 hash of content
|
hash string // blake192 hash of content
|
||||||
data []u8 // Binary data (max 1MB)
|
data []u8 // Binary data (max 1MB)
|
||||||
size_bytes int // Size in bytes
|
size_bytes int // Size in bytes
|
||||||
created_at i64
|
created_at i64
|
||||||
mime_type string // MIME type
|
mime_type string // MIME type
|
||||||
encoding string // Encoding type
|
encoding string // Encoding type
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DBFsBlob {
|
pub struct DBFsBlob {
|
||||||
@@ -28,7 +28,7 @@ pub fn (self FsBlob) type_name() string {
|
|||||||
return 'fs_blob'
|
return 'fs_blob'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self FsBlob) dump(mut e &encoder.Encoder) ! {
|
pub fn (self FsBlob) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.hash)
|
e.add_string(self.hash)
|
||||||
e.add_list_u8(self.data)
|
e.add_list_u8(self.data)
|
||||||
e.add_int(self.size_bytes)
|
e.add_int(self.size_bytes)
|
||||||
@@ -37,7 +37,7 @@ pub fn (self FsBlob) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_string(self.encoding)
|
e.add_string(self.encoding)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFsBlob) load(mut o FsBlob, mut e &encoder.Decoder) ! {
|
fn (mut self DBFsBlob) load(mut o FsBlob, mut e encoder.Decoder) ! {
|
||||||
o.hash = e.get_string()!
|
o.hash = e.get_string()!
|
||||||
o.data = e.get_list_u8()!
|
o.data = e.get_list_u8()!
|
||||||
o.size_bytes = e.get_int()!
|
o.size_bytes = e.get_int()!
|
||||||
@@ -76,17 +76,17 @@ pub fn (mut self DBFsBlob) new(args FsBlobArg) !FsBlob {
|
|||||||
mime_type: args.mime_type
|
mime_type: args.mime_type
|
||||||
encoding: if args.encoding == '' { 'none' } else { args.encoding }
|
encoding: if args.encoding == '' { 'none' } else { args.encoding }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate hash
|
// Calculate hash
|
||||||
o.calculate_hash()
|
o.calculate_hash()
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.name = args.name
|
o.name = args.name
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,23 +97,23 @@ pub fn (mut self DBFsBlob) set(o FsBlob) !u32 {
|
|||||||
// Blob already exists, return existing ID
|
// Blob already exists, return existing ID
|
||||||
return hash_id.u32()
|
return hash_id.u32()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use db set function which now returns the ID
|
// Use db set function which now returns the ID
|
||||||
id := self.db.set[FsBlob](o)!
|
id := self.db.set[FsBlob](o)!
|
||||||
|
|
||||||
// Store the hash -> id mapping for lookup
|
// Store the hash -> id mapping for lookup
|
||||||
self.db.redis.hset('fsblob:hashes', o.hash, id.str())!
|
self.db.redis.hset('fsblob:hashes', o.hash, id.str())!
|
||||||
|
|
||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFsBlob) delete(id u32) ! {
|
pub fn (mut self DBFsBlob) delete(id u32) ! {
|
||||||
// Get the blob to retrieve its hash
|
// Get the blob to retrieve its hash
|
||||||
mut blob := self.get(id)!
|
mut blob := self.get(id)!
|
||||||
|
|
||||||
// Remove hash -> id mapping
|
// Remove hash -> id mapping
|
||||||
self.db.redis.hdel('fsblob:hashes', blob.hash)!
|
self.db.redis.hdel('fsblob:hashes', blob.hash)!
|
||||||
|
|
||||||
// Delete the blob
|
// Delete the blob
|
||||||
self.db.delete[FsBlob](id)!
|
self.db.delete[FsBlob](id)!
|
||||||
}
|
}
|
||||||
@@ -149,4 +149,4 @@ pub fn (mut self DBFsBlob) exists_by_hash(hash string) !bool {
|
|||||||
pub fn (blob FsBlob) verify_integrity() bool {
|
pub fn (blob FsBlob) verify_integrity() bool {
|
||||||
hash := blake3.sum256(blob.data)
|
hash := blake3.sum256(blob.data)
|
||||||
return hash.hex()[..48] == blob.hash
|
return hash.hex()[..48] == blob.hash
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,12 +12,12 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
pub struct FsDir {
|
pub struct FsDir {
|
||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
fs_id u32 // Associated filesystem
|
fs_id u32 // Associated filesystem
|
||||||
parent_id u32 // Parent directory ID (0 for root)
|
parent_id u32 // Parent directory ID (0 for root)
|
||||||
}
|
}
|
||||||
|
|
||||||
//we only keep the parents, not the children, as children can be found by doing a query on parent_id, we will need some smart hsets to make this fast enough and efficient
|
// we only keep the parents, not the children, as children can be found by doing a query on parent_id, we will need some smart hsets to make this fast enough and efficient
|
||||||
|
|
||||||
pub struct DBFsDir {
|
pub struct DBFsDir {
|
||||||
pub mut:
|
pub mut:
|
||||||
@@ -28,13 +28,13 @@ pub fn (self FsDir) type_name() string {
|
|||||||
return 'fs_dir'
|
return 'fs_dir'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self FsDir) dump(mut e &encoder.Encoder) ! {
|
pub fn (self FsDir) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.name)
|
e.add_string(self.name)
|
||||||
e.add_u32(self.fs_id)
|
e.add_u32(self.fs_id)
|
||||||
e.add_u32(self.parent_id)
|
e.add_u32(self.parent_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFsDir) load(mut o FsDir, mut e &encoder.Decoder) ! {
|
fn (mut self DBFsDir) load(mut o FsDir, mut e encoder.Decoder) ! {
|
||||||
o.name = e.get_string()!
|
o.name = e.get_string()!
|
||||||
o.fs_id = e.get_u32()!
|
o.fs_id = e.get_u32()!
|
||||||
o.parent_id = e.get_u32()!
|
o.parent_id = e.get_u32()!
|
||||||
@@ -54,60 +54,60 @@ pub mut:
|
|||||||
// get new directory, not from the DB
|
// get new directory, not from the DB
|
||||||
pub fn (mut self DBFsDir) new(args FsDirArg) !FsDir {
|
pub fn (mut self DBFsDir) new(args FsDirArg) !FsDir {
|
||||||
mut o := FsDir{
|
mut o := FsDir{
|
||||||
name: args.name,
|
name: args.name
|
||||||
fs_id: args.fs_id,
|
fs_id: args.fs_id
|
||||||
parent_id: args.parent_id
|
parent_id: args.parent_id
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFsDir) set(o FsDir) !u32 {
|
pub fn (mut self DBFsDir) set(o FsDir) !u32 {
|
||||||
id := self.db.set[FsDir](o)!
|
id := self.db.set[FsDir](o)!
|
||||||
|
|
||||||
// Store directory in filesystem's directory index
|
// Store directory in filesystem's directory index
|
||||||
path_key := '${o.fs_id}:${o.parent_id}:${o.name}'
|
path_key := '${o.fs_id}:${o.parent_id}:${o.name}'
|
||||||
self.db.redis.hset('fsdir:paths', path_key, id.str())!
|
self.db.redis.hset('fsdir:paths', path_key, id.str())!
|
||||||
|
|
||||||
// Store in filesystem's directory list using hset
|
// Store in filesystem's directory list using hset
|
||||||
self.db.redis.hset('fsdir:fs:${o.fs_id}', id.str(), id.str())!
|
self.db.redis.hset('fsdir:fs:${o.fs_id}', id.str(), id.str())!
|
||||||
|
|
||||||
// Store in parent's children list using hset
|
// Store in parent's children list using hset
|
||||||
if o.parent_id > 0 {
|
if o.parent_id > 0 {
|
||||||
self.db.redis.hset('fsdir:children:${o.parent_id}', id.str(), id.str())!
|
self.db.redis.hset('fsdir:children:${o.parent_id}', id.str(), id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFsDir) delete(id u32) ! {
|
pub fn (mut self DBFsDir) delete(id u32) ! {
|
||||||
// Get the directory info before deleting
|
// Get the directory info before deleting
|
||||||
dir := self.get(id)!
|
dir := self.get(id)!
|
||||||
|
|
||||||
// Check if directory has children using hkeys
|
// Check if directory has children using hkeys
|
||||||
children := self.db.redis.hkeys('fsdir:children:${id}')!
|
children := self.db.redis.hkeys('fsdir:children:${id}')!
|
||||||
if children.len > 0 {
|
if children.len > 0 {
|
||||||
return error('Cannot delete directory ${dir.name} (ID: ${id}) because it has ${children.len} children')
|
return error('Cannot delete directory ${dir.name} (ID: ${id}) because it has ${children.len} children')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove from path index
|
// Remove from path index
|
||||||
path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
||||||
self.db.redis.hdel('fsdir:paths', path_key)!
|
self.db.redis.hdel('fsdir:paths', path_key)!
|
||||||
|
|
||||||
// Remove from filesystem's directory list using hdel
|
// Remove from filesystem's directory list using hdel
|
||||||
self.db.redis.hdel('fsdir:fs:${dir.fs_id}', id.str())!
|
self.db.redis.hdel('fsdir:fs:${dir.fs_id}', id.str())!
|
||||||
|
|
||||||
// Remove from parent's children list using hdel
|
// Remove from parent's children list using hdel
|
||||||
if dir.parent_id > 0 {
|
if dir.parent_id > 0 {
|
||||||
self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
|
self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete the directory itself
|
// Delete the directory itself
|
||||||
self.db.delete[FsDir](id)!
|
self.db.delete[FsDir](id)!
|
||||||
}
|
}
|
||||||
@@ -166,14 +166,14 @@ pub fn (mut self DBFsDir) has_children(dir_id u32) !bool {
|
|||||||
// Rename a directory
|
// Rename a directory
|
||||||
pub fn (mut self DBFsDir) rename(id u32, new_name string) !u32 {
|
pub fn (mut self DBFsDir) rename(id u32, new_name string) !u32 {
|
||||||
mut dir := self.get(id)!
|
mut dir := self.get(id)!
|
||||||
|
|
||||||
// Remove old path index
|
// Remove old path index
|
||||||
old_path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
old_path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
||||||
self.db.redis.hdel('fsdir:paths', old_path_key)!
|
self.db.redis.hdel('fsdir:paths', old_path_key)!
|
||||||
|
|
||||||
// Update name
|
// Update name
|
||||||
dir.name = new_name
|
dir.name = new_name
|
||||||
|
|
||||||
// Save with new name
|
// Save with new name
|
||||||
return self.set(dir)!
|
return self.set(dir)!
|
||||||
}
|
}
|
||||||
@@ -181,7 +181,7 @@ pub fn (mut self DBFsDir) rename(id u32, new_name string) !u32 {
|
|||||||
// Move a directory to a new parent
|
// Move a directory to a new parent
|
||||||
pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
|
pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
|
||||||
mut dir := self.get(id)!
|
mut dir := self.get(id)!
|
||||||
|
|
||||||
// Check that new parent exists and is in the same filesystem
|
// Check that new parent exists and is in the same filesystem
|
||||||
if new_parent_id > 0 {
|
if new_parent_id > 0 {
|
||||||
parent := self.get(new_parent_id)!
|
parent := self.get(new_parent_id)!
|
||||||
@@ -189,19 +189,19 @@ pub fn (mut self DBFsDir) move(id u32, new_parent_id u32) !u32 {
|
|||||||
return error('Cannot move directory across filesystems')
|
return error('Cannot move directory across filesystems')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove old path index
|
// Remove old path index
|
||||||
old_path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
old_path_key := '${dir.fs_id}:${dir.parent_id}:${dir.name}'
|
||||||
self.db.redis.hdel('fsdir:paths', old_path_key)!
|
self.db.redis.hdel('fsdir:paths', old_path_key)!
|
||||||
|
|
||||||
// Remove from old parent's children list
|
// Remove from old parent's children list
|
||||||
if dir.parent_id > 0 {
|
if dir.parent_id > 0 {
|
||||||
self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
|
self.db.redis.hdel('fsdir:children:${dir.parent_id}', id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update parent
|
// Update parent
|
||||||
dir.parent_id = new_parent_id
|
dir.parent_id = new_parent_id
|
||||||
|
|
||||||
// Save with new parent
|
// Save with new parent
|
||||||
return self.set(dir)!
|
return self.set(dir)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ pub struct FsFile {
|
|||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
fs_id u32 // Associated filesystem
|
fs_id u32 // Associated filesystem
|
||||||
directories []u32 // Directory IDs where this file exists, means file can be part of multiple directories (like hard links in Linux)
|
directories []u32 // Directory IDs where this file exists, means file can be part of multiple directories (like hard links in Linux)
|
||||||
blobs []u32 // IDs of file content blobs
|
blobs []u32 // IDs of file content blobs
|
||||||
size_bytes u64
|
size_bytes u64
|
||||||
mime_type string // e.g., "image/png"
|
mime_type string // e.g., "image/png"
|
||||||
checksum string // e.g., SHA256 checksum of the file
|
checksum string // e.g., SHA256 checksum of the file
|
||||||
accessed_at i64
|
accessed_at i64
|
||||||
metadata map[string]string // Custom metadata
|
metadata map[string]string // Custom metadata
|
||||||
}
|
}
|
||||||
@@ -32,27 +32,27 @@ pub fn (self FsFile) type_name() string {
|
|||||||
return 'fs_file'
|
return 'fs_file'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self FsFile) dump(mut e &encoder.Encoder) ! {
|
pub fn (self FsFile) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.name)
|
e.add_string(self.name)
|
||||||
e.add_u32(self.fs_id)
|
e.add_u32(self.fs_id)
|
||||||
|
|
||||||
// Handle directories
|
// Handle directories
|
||||||
e.add_u16(u16(self.directories.len))
|
e.add_u16(u16(self.directories.len))
|
||||||
for dir_id in self.directories {
|
for dir_id in self.directories {
|
||||||
e.add_u32(dir_id)
|
e.add_u32(dir_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle blobs
|
// Handle blobs
|
||||||
e.add_u16(u16(self.blobs.len))
|
e.add_u16(u16(self.blobs.len))
|
||||||
for blob_id in self.blobs {
|
for blob_id in self.blobs {
|
||||||
e.add_u32(blob_id)
|
e.add_u32(blob_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
e.add_u64(self.size_bytes)
|
e.add_u64(self.size_bytes)
|
||||||
e.add_string(self.mime_type)
|
e.add_string(self.mime_type)
|
||||||
e.add_string(self.checksum)
|
e.add_string(self.checksum)
|
||||||
e.add_i64(self.accessed_at)
|
e.add_i64(self.accessed_at)
|
||||||
|
|
||||||
// Handle metadata map
|
// Handle metadata map
|
||||||
e.add_u16(u16(self.metadata.len))
|
e.add_u16(u16(self.metadata.len))
|
||||||
for key, value in self.metadata {
|
for key, value in self.metadata {
|
||||||
@@ -61,29 +61,29 @@ pub fn (self FsFile) dump(mut e &encoder.Encoder) ! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFsFile) load(mut o FsFile, mut e &encoder.Decoder) ! {
|
fn (mut self DBFsFile) load(mut o FsFile, mut e encoder.Decoder) ! {
|
||||||
o.name = e.get_string()!
|
o.name = e.get_string()!
|
||||||
o.fs_id = e.get_u32()!
|
o.fs_id = e.get_u32()!
|
||||||
|
|
||||||
// Load directories
|
// Load directories
|
||||||
dirs_count := e.get_u16()!
|
dirs_count := e.get_u16()!
|
||||||
o.directories = []u32{cap: int(dirs_count)}
|
o.directories = []u32{cap: int(dirs_count)}
|
||||||
for _ in 0 .. dirs_count {
|
for _ in 0 .. dirs_count {
|
||||||
o.directories << e.get_u32()!
|
o.directories << e.get_u32()!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load blobs
|
// Load blobs
|
||||||
blobs_count := e.get_u16()!
|
blobs_count := e.get_u16()!
|
||||||
o.blobs = []u32{cap: int(blobs_count)}
|
o.blobs = []u32{cap: int(blobs_count)}
|
||||||
for _ in 0 .. blobs_count {
|
for _ in 0 .. blobs_count {
|
||||||
o.blobs << e.get_u32()!
|
o.blobs << e.get_u32()!
|
||||||
}
|
}
|
||||||
|
|
||||||
o.size_bytes = e.get_u64()!
|
o.size_bytes = e.get_u64()!
|
||||||
o.mime_type = e.get_string()!
|
o.mime_type = e.get_string()!
|
||||||
o.checksum = e.get_string()!
|
o.checksum = e.get_string()!
|
||||||
o.accessed_at = e.get_i64()!
|
o.accessed_at = e.get_i64()!
|
||||||
|
|
||||||
// Load metadata map
|
// Load metadata map
|
||||||
metadata_count := e.get_u16()!
|
metadata_count := e.get_u16()!
|
||||||
o.metadata = map[string]string{}
|
o.metadata = map[string]string{}
|
||||||
@@ -97,17 +97,17 @@ fn (mut self DBFsFile) load(mut o FsFile, mut e &encoder.Decoder) ! {
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct FsFileArg {
|
pub struct FsFileArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string @[required]
|
name string @[required]
|
||||||
description string
|
description string
|
||||||
fs_id u32 @[required]
|
fs_id u32 @[required]
|
||||||
directories []u32 @[required]
|
directories []u32 @[required]
|
||||||
blobs []u32
|
blobs []u32
|
||||||
size_bytes u64
|
size_bytes u64
|
||||||
mime_type string
|
mime_type string
|
||||||
checksum string
|
checksum string
|
||||||
metadata map[string]string
|
metadata map[string]string
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
comments []db.CommentArg
|
||||||
}
|
}
|
||||||
|
|
||||||
// get new file, not from the DB
|
// get new file, not from the DB
|
||||||
@@ -121,38 +121,38 @@ pub fn (mut self DBFsFile) new(args FsFileArg) !FsFile {
|
|||||||
if !blob_exists {
|
if !blob_exists {
|
||||||
return error('Blob with ID ${blob_id} does not exist')
|
return error('Blob with ID ${blob_id} does not exist')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get blob data
|
// Get blob data
|
||||||
mut blob_obj, blob_data := self.db.get_data[FsBlob](blob_id)!
|
mut blob_obj, blob_data := self.db.get_data[FsBlob](blob_id)!
|
||||||
mut e_decoder := encoder.decoder_new(blob_data)
|
mut e_decoder := encoder.decoder_new(blob_data)
|
||||||
|
|
||||||
// Skip hash
|
// Skip hash
|
||||||
e_decoder.get_string()!
|
e_decoder.get_string()!
|
||||||
|
|
||||||
// Skip data, get size directly
|
// Skip data, get size directly
|
||||||
e_decoder.get_list_u8()!
|
e_decoder.get_list_u8()!
|
||||||
size += u64(e_decoder.get_int()!)
|
size += u64(e_decoder.get_int()!)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mut o := FsFile{
|
mut o := FsFile{
|
||||||
name: args.name,
|
name: args.name
|
||||||
fs_id: args.fs_id,
|
fs_id: args.fs_id
|
||||||
directories: args.directories,
|
directories: args.directories
|
||||||
blobs: args.blobs,
|
blobs: args.blobs
|
||||||
size_bytes: size,
|
size_bytes: size
|
||||||
mime_type: args.mime_type,
|
mime_type: args.mime_type
|
||||||
checksum: args.checksum,
|
checksum: args.checksum
|
||||||
accessed_at: ourtime.now().unix(),
|
accessed_at: ourtime.now().unix()
|
||||||
metadata: args.metadata
|
metadata: args.metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -164,7 +164,7 @@ pub fn (mut self DBFsFile) set(o FsFile) !u32 {
|
|||||||
return error('Directory with ID ${dir_id} does not exist')
|
return error('Directory with ID ${dir_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that blobs exist
|
// Check that blobs exist
|
||||||
for blob_id in o.blobs {
|
for blob_id in o.blobs {
|
||||||
blob_exists := self.db.exists[FsBlob](blob_id)!
|
blob_exists := self.db.exists[FsBlob](blob_id)!
|
||||||
@@ -172,52 +172,52 @@ pub fn (mut self DBFsFile) set(o FsFile) !u32 {
|
|||||||
return error('Blob with ID ${blob_id} does not exist')
|
return error('Blob with ID ${blob_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
id := self.db.set[FsFile](o)!
|
id := self.db.set[FsFile](o)!
|
||||||
|
|
||||||
// Store file in each directory's file index
|
// Store file in each directory's file index
|
||||||
for dir_id in o.directories {
|
for dir_id in o.directories {
|
||||||
// Store by name in each directory
|
// Store by name in each directory
|
||||||
path_key := '${dir_id}:${o.name}'
|
path_key := '${dir_id}:${o.name}'
|
||||||
self.db.redis.hset('fsfile:paths', path_key, id.str())!
|
self.db.redis.hset('fsfile:paths', path_key, id.str())!
|
||||||
|
|
||||||
// Add to directory's file list using hset
|
// Add to directory's file list using hset
|
||||||
self.db.redis.hset('fsfile:dir:${dir_id}', id.str(), id.str())!
|
self.db.redis.hset('fsfile:dir:${dir_id}', id.str(), id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store in filesystem's file list using hset
|
// Store in filesystem's file list using hset
|
||||||
self.db.redis.hset('fsfile:fs:${o.fs_id}', id.str(), id.str())!
|
self.db.redis.hset('fsfile:fs:${o.fs_id}', id.str(), id.str())!
|
||||||
|
|
||||||
// Store by mimetype using hset
|
// Store by mimetype using hset
|
||||||
if o.mime_type != '' {
|
if o.mime_type != '' {
|
||||||
self.db.redis.hset('fsfile:mime:${o.mime_type}', id.str(), id.str())!
|
self.db.redis.hset('fsfile:mime:${o.mime_type}', id.str(), id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFsFile) delete(id u32) ! {
|
pub fn (mut self DBFsFile) delete(id u32) ! {
|
||||||
// Get the file info before deleting
|
// Get the file info before deleting
|
||||||
file := self.get(id)!
|
file := self.get(id)!
|
||||||
|
|
||||||
// Remove from each directory's file index
|
// Remove from each directory's file index
|
||||||
for dir_id in file.directories {
|
for dir_id in file.directories {
|
||||||
// Remove from path index
|
// Remove from path index
|
||||||
path_key := '${dir_id}:${file.name}'
|
path_key := '${dir_id}:${file.name}'
|
||||||
self.db.redis.hdel('fsfile:paths', path_key)!
|
self.db.redis.hdel('fsfile:paths', path_key)!
|
||||||
|
|
||||||
// Remove from directory's file list using hdel
|
// Remove from directory's file list using hdel
|
||||||
self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
|
self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove from filesystem's file list using hdel
|
// Remove from filesystem's file list using hdel
|
||||||
self.db.redis.hdel('fsfile:fs:${file.fs_id}', id.str())!
|
self.db.redis.hdel('fsfile:fs:${file.fs_id}', id.str())!
|
||||||
|
|
||||||
// Remove from mimetype index using hdel
|
// Remove from mimetype index using hdel
|
||||||
if file.mime_type != '' {
|
if file.mime_type != '' {
|
||||||
self.db.redis.hdel('fsfile:mime:${file.mime_type}', id.str())!
|
self.db.redis.hdel('fsfile:mime:${file.mime_type}', id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete the file itself
|
// Delete the file itself
|
||||||
self.db.delete[FsFile](id)!
|
self.db.delete[FsFile](id)!
|
||||||
}
|
}
|
||||||
@@ -284,28 +284,28 @@ pub fn (mut self DBFsFile) append_blob(id u32, blob_id u32) !u32 {
|
|||||||
if !blob_exists {
|
if !blob_exists {
|
||||||
return error('Blob with ID ${blob_id} does not exist')
|
return error('Blob with ID ${blob_id} does not exist')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get blob size
|
// Get blob size
|
||||||
mut blob_obj, blob_data := self.db.get_data[FsBlob](blob_id)!
|
mut blob_obj, blob_data := self.db.get_data[FsBlob](blob_id)!
|
||||||
mut e_decoder := encoder.decoder_new(blob_data)
|
mut e_decoder := encoder.decoder_new(blob_data)
|
||||||
|
|
||||||
// Skip hash
|
// Skip hash
|
||||||
e_decoder.get_string()!
|
e_decoder.get_string()!
|
||||||
|
|
||||||
// Skip data, get size directly
|
// Skip data, get size directly
|
||||||
e_decoder.get_list_u8()!
|
e_decoder.get_list_u8()!
|
||||||
blob_size := e_decoder.get_int()!
|
blob_size := e_decoder.get_int()!
|
||||||
|
|
||||||
// Get file
|
// Get file
|
||||||
mut file := self.get(id)!
|
mut file := self.get(id)!
|
||||||
|
|
||||||
// Add blob if not already in the list
|
// Add blob if not already in the list
|
||||||
if blob_id !in file.blobs {
|
if blob_id !in file.blobs {
|
||||||
file.blobs << blob_id
|
file.blobs << blob_id
|
||||||
file.size_bytes += u64(blob_size)
|
file.size_bytes += u64(blob_size)
|
||||||
file.updated_at = ourtime.now().unix()
|
file.updated_at = ourtime.now().unix()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save file
|
// Save file
|
||||||
return self.set(file)!
|
return self.set(file)!
|
||||||
}
|
}
|
||||||
@@ -328,16 +328,16 @@ pub fn (mut self DBFsFile) update_metadata(id u32, key string, value string) !u3
|
|||||||
// Rename a file
|
// Rename a file
|
||||||
pub fn (mut self DBFsFile) rename(id u32, new_name string) !u32 {
|
pub fn (mut self DBFsFile) rename(id u32, new_name string) !u32 {
|
||||||
mut file := self.get(id)!
|
mut file := self.get(id)!
|
||||||
|
|
||||||
// Remove old path indexes
|
// Remove old path indexes
|
||||||
for dir_id in file.directories {
|
for dir_id in file.directories {
|
||||||
old_path_key := '${dir_id}:${file.name}'
|
old_path_key := '${dir_id}:${file.name}'
|
||||||
self.db.redis.hdel('fsfile:paths', old_path_key)!
|
self.db.redis.hdel('fsfile:paths', old_path_key)!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update name
|
// Update name
|
||||||
file.name = new_name
|
file.name = new_name
|
||||||
|
|
||||||
// Save with new name
|
// Save with new name
|
||||||
return self.set(file)!
|
return self.set(file)!
|
||||||
}
|
}
|
||||||
@@ -345,7 +345,7 @@ pub fn (mut self DBFsFile) rename(id u32, new_name string) !u32 {
|
|||||||
// Move file to different directories
|
// Move file to different directories
|
||||||
pub fn (mut self DBFsFile) move(id u32, new_directories []u32) !u32 {
|
pub fn (mut self DBFsFile) move(id u32, new_directories []u32) !u32 {
|
||||||
mut file := self.get(id)!
|
mut file := self.get(id)!
|
||||||
|
|
||||||
// Check that all new directories exist
|
// Check that all new directories exist
|
||||||
for dir_id in new_directories {
|
for dir_id in new_directories {
|
||||||
dir_exists := self.db.exists[FsDir](dir_id)!
|
dir_exists := self.db.exists[FsDir](dir_id)!
|
||||||
@@ -353,17 +353,17 @@ pub fn (mut self DBFsFile) move(id u32, new_directories []u32) !u32 {
|
|||||||
return error('Directory with ID ${dir_id} does not exist')
|
return error('Directory with ID ${dir_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove from old directories
|
// Remove from old directories
|
||||||
for dir_id in file.directories {
|
for dir_id in file.directories {
|
||||||
path_key := '${dir_id}:${file.name}'
|
path_key := '${dir_id}:${file.name}'
|
||||||
self.db.redis.hdel('fsfile:paths', path_key)!
|
self.db.redis.hdel('fsfile:paths', path_key)!
|
||||||
self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
|
self.db.redis.hdel('fsfile:dir:${dir_id}', id.str())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update directories
|
// Update directories
|
||||||
file.directories = new_directories
|
file.directories = new_directories
|
||||||
|
|
||||||
// Save with new directories
|
// Save with new directories
|
||||||
return self.set(file)!
|
return self.set(file)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ pub fn (self FsSymlink) type_name() string {
|
|||||||
return 'fs_symlink'
|
return 'fs_symlink'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self FsSymlink) dump(mut e &encoder.Encoder) ! {
|
pub fn (self FsSymlink) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.name)
|
e.add_string(self.name)
|
||||||
e.add_u32(self.fs_id)
|
e.add_u32(self.fs_id)
|
||||||
e.add_u32(self.parent_id)
|
e.add_u32(self.parent_id)
|
||||||
@@ -41,7 +41,7 @@ pub fn (self FsSymlink) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_u8(u8(self.target_type))
|
e.add_u8(u8(self.target_type))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFsSymlink) load(mut o FsSymlink, mut e &encoder.Decoder) ! {
|
fn (mut self DBFsSymlink) load(mut o FsSymlink, mut e encoder.Decoder) ! {
|
||||||
o.name = e.get_string()!
|
o.name = e.get_string()!
|
||||||
o.fs_id = e.get_u32()!
|
o.fs_id = e.get_u32()!
|
||||||
o.parent_id = e.get_u32()!
|
o.parent_id = e.get_u32()!
|
||||||
@@ -54,9 +54,9 @@ pub struct FsSymlinkArg {
|
|||||||
pub mut:
|
pub mut:
|
||||||
name string @[required]
|
name string @[required]
|
||||||
description string
|
description string
|
||||||
fs_id u32 @[required]
|
fs_id u32 @[required]
|
||||||
parent_id u32 @[required]
|
parent_id u32 @[required]
|
||||||
target_id u32 @[required]
|
target_id u32 @[required]
|
||||||
target_type SymlinkTargetType @[required]
|
target_type SymlinkTargetType @[required]
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
comments []db.CommentArg
|
||||||
@@ -65,19 +65,19 @@ pub mut:
|
|||||||
// get new symlink, not from the DB
|
// get new symlink, not from the DB
|
||||||
pub fn (mut self DBFsSymlink) new(args FsSymlinkArg) !FsSymlink {
|
pub fn (mut self DBFsSymlink) new(args FsSymlinkArg) !FsSymlink {
|
||||||
mut o := FsSymlink{
|
mut o := FsSymlink{
|
||||||
name: args.name,
|
name: args.name
|
||||||
fs_id: args.fs_id,
|
fs_id: args.fs_id
|
||||||
parent_id: args.parent_id,
|
parent_id: args.parent_id
|
||||||
target_id: args.target_id,
|
target_id: args.target_id
|
||||||
target_type: args.target_type
|
target_type: args.target_type
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ pub fn (mut self DBFsSymlink) set(o FsSymlink) !u32 {
|
|||||||
return error('Parent directory with ID ${o.parent_id} does not exist')
|
return error('Parent directory with ID ${o.parent_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check target exists based on target type
|
// Check target exists based on target type
|
||||||
if o.target_type == .file {
|
if o.target_type == .file {
|
||||||
target_exists := self.db.exists[FsFile](o.target_id)!
|
target_exists := self.db.exists[FsFile](o.target_id)!
|
||||||
@@ -102,44 +102,44 @@ pub fn (mut self DBFsSymlink) set(o FsSymlink) !u32 {
|
|||||||
return error('Target directory with ID ${o.target_id} does not exist')
|
return error('Target directory with ID ${o.target_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
id := self.db.set[FsSymlink](o)!
|
id := self.db.set[FsSymlink](o)!
|
||||||
|
|
||||||
// Store symlink in parent directory's symlink index
|
// Store symlink in parent directory's symlink index
|
||||||
path_key := '${o.parent_id}:${o.name}'
|
path_key := '${o.parent_id}:${o.name}'
|
||||||
self.db.redis.hset('fssymlink:paths', path_key, id.str())!
|
self.db.redis.hset('fssymlink:paths', path_key, id.str())!
|
||||||
|
|
||||||
// Add to parent's symlinks list using hset
|
// Add to parent's symlinks list using hset
|
||||||
self.db.redis.hset('fssymlink:parent:${o.parent_id}', id.str(), id.str())!
|
self.db.redis.hset('fssymlink:parent:${o.parent_id}', id.str(), id.str())!
|
||||||
|
|
||||||
// Store in filesystem's symlink list using hset
|
// Store in filesystem's symlink list using hset
|
||||||
self.db.redis.hset('fssymlink:fs:${o.fs_id}', id.str(), id.str())!
|
self.db.redis.hset('fssymlink:fs:${o.fs_id}', id.str(), id.str())!
|
||||||
|
|
||||||
// Store in target's referrers list using hset
|
// Store in target's referrers list using hset
|
||||||
target_key := '${o.target_type}:${o.target_id}'
|
target_key := '${o.target_type}:${o.target_id}'
|
||||||
self.db.redis.hset('fssymlink:target:${target_key}', id.str(), id.str())!
|
self.db.redis.hset('fssymlink:target:${target_key}', id.str(), id.str())!
|
||||||
|
|
||||||
return id
|
return id
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut self DBFsSymlink) delete(id u32) ! {
|
pub fn (mut self DBFsSymlink) delete(id u32) ! {
|
||||||
// Get the symlink info before deleting
|
// Get the symlink info before deleting
|
||||||
symlink := self.get(id)!
|
symlink := self.get(id)!
|
||||||
|
|
||||||
// Remove from path index
|
// Remove from path index
|
||||||
path_key := '${symlink.parent_id}:${symlink.name}'
|
path_key := '${symlink.parent_id}:${symlink.name}'
|
||||||
self.db.redis.hdel('fssymlink:paths', path_key)!
|
self.db.redis.hdel('fssymlink:paths', path_key)!
|
||||||
|
|
||||||
// Remove from parent's symlinks list using hdel
|
// Remove from parent's symlinks list using hdel
|
||||||
self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
|
self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
|
||||||
|
|
||||||
// Remove from filesystem's symlink list using hdel
|
// Remove from filesystem's symlink list using hdel
|
||||||
self.db.redis.hdel('fssymlink:fs:${symlink.fs_id}', id.str())!
|
self.db.redis.hdel('fssymlink:fs:${symlink.fs_id}', id.str())!
|
||||||
|
|
||||||
// Remove from target's referrers list using hdel
|
// Remove from target's referrers list using hdel
|
||||||
target_key := '${symlink.target_type}:${symlink.target_id}'
|
target_key := '${symlink.target_type}:${symlink.target_id}'
|
||||||
self.db.redis.hdel('fssymlink:target:${target_key}', id.str())!
|
self.db.redis.hdel('fssymlink:target:${target_key}', id.str())!
|
||||||
|
|
||||||
// Delete the symlink itself
|
// Delete the symlink itself
|
||||||
self.db.delete[FsSymlink](id)!
|
self.db.delete[FsSymlink](id)!
|
||||||
}
|
}
|
||||||
@@ -203,14 +203,14 @@ pub fn (mut self DBFsSymlink) list_by_target(target_type SymlinkTargetType, targ
|
|||||||
// Rename a symlink
|
// Rename a symlink
|
||||||
pub fn (mut self DBFsSymlink) rename(id u32, new_name string) !u32 {
|
pub fn (mut self DBFsSymlink) rename(id u32, new_name string) !u32 {
|
||||||
mut symlink := self.get(id)!
|
mut symlink := self.get(id)!
|
||||||
|
|
||||||
// Remove old path index
|
// Remove old path index
|
||||||
old_path_key := '${symlink.parent_id}:${symlink.name}'
|
old_path_key := '${symlink.parent_id}:${symlink.name}'
|
||||||
self.db.redis.hdel('fssymlink:paths', old_path_key)!
|
self.db.redis.hdel('fssymlink:paths', old_path_key)!
|
||||||
|
|
||||||
// Update name
|
// Update name
|
||||||
symlink.name = new_name
|
symlink.name = new_name
|
||||||
|
|
||||||
// Save with new name
|
// Save with new name
|
||||||
return self.set(symlink)!
|
return self.set(symlink)!
|
||||||
}
|
}
|
||||||
@@ -218,7 +218,7 @@ pub fn (mut self DBFsSymlink) rename(id u32, new_name string) !u32 {
|
|||||||
// Move symlink to a new parent directory
|
// Move symlink to a new parent directory
|
||||||
pub fn (mut self DBFsSymlink) move(id u32, new_parent_id u32) !u32 {
|
pub fn (mut self DBFsSymlink) move(id u32, new_parent_id u32) !u32 {
|
||||||
mut symlink := self.get(id)!
|
mut symlink := self.get(id)!
|
||||||
|
|
||||||
// Check that new parent exists and is in the same filesystem
|
// Check that new parent exists and is in the same filesystem
|
||||||
if new_parent_id > 0 {
|
if new_parent_id > 0 {
|
||||||
parent_data, _ := self.db.get_data[FsDir](new_parent_id)!
|
parent_data, _ := self.db.get_data[FsDir](new_parent_id)!
|
||||||
@@ -226,17 +226,17 @@ pub fn (mut self DBFsSymlink) move(id u32, new_parent_id u32) !u32 {
|
|||||||
return error('Cannot move symlink across filesystems')
|
return error('Cannot move symlink across filesystems')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove old path index
|
// Remove old path index
|
||||||
old_path_key := '${symlink.parent_id}:${symlink.name}'
|
old_path_key := '${symlink.parent_id}:${symlink.name}'
|
||||||
self.db.redis.hdel('fssymlink:paths', old_path_key)!
|
self.db.redis.hdel('fssymlink:paths', old_path_key)!
|
||||||
|
|
||||||
// Remove from old parent's symlinks list using hdel
|
// Remove from old parent's symlinks list using hdel
|
||||||
self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
|
self.db.redis.hdel('fssymlink:parent:${symlink.parent_id}', id.str())!
|
||||||
|
|
||||||
// Update parent
|
// Update parent
|
||||||
symlink.parent_id = new_parent_id
|
symlink.parent_id = new_parent_id
|
||||||
|
|
||||||
// Save with new parent
|
// Save with new parent
|
||||||
return self.set(symlink)!
|
return self.set(symlink)!
|
||||||
}
|
}
|
||||||
@@ -244,7 +244,7 @@ pub fn (mut self DBFsSymlink) move(id u32, new_parent_id u32) !u32 {
|
|||||||
// Redirect symlink to a new target
|
// Redirect symlink to a new target
|
||||||
pub fn (mut self DBFsSymlink) redirect(id u32, new_target_id u32, new_target_type SymlinkTargetType) !u32 {
|
pub fn (mut self DBFsSymlink) redirect(id u32, new_target_id u32, new_target_type SymlinkTargetType) !u32 {
|
||||||
mut symlink := self.get(id)!
|
mut symlink := self.get(id)!
|
||||||
|
|
||||||
// Check new target exists
|
// Check new target exists
|
||||||
if new_target_type == .file {
|
if new_target_type == .file {
|
||||||
target_exists := self.db.exists[FsFile](new_target_id)!
|
target_exists := self.db.exists[FsFile](new_target_id)!
|
||||||
@@ -257,15 +257,15 @@ pub fn (mut self DBFsSymlink) redirect(id u32, new_target_id u32, new_target_typ
|
|||||||
return error('Target directory with ID ${new_target_id} does not exist')
|
return error('Target directory with ID ${new_target_id} does not exist')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove from old target's referrers list
|
// Remove from old target's referrers list
|
||||||
old_target_key := '${symlink.target_type}:${symlink.target_id}'
|
old_target_key := '${symlink.target_type}:${symlink.target_id}'
|
||||||
self.db.redis.hdel('fssymlink:target:${old_target_key}', id.str())!
|
self.db.redis.hdel('fssymlink:target:${old_target_key}', id.str())!
|
||||||
|
|
||||||
// Update target
|
// Update target
|
||||||
symlink.target_id = new_target_id
|
symlink.target_id = new_target_id
|
||||||
symlink.target_type = new_target_type
|
symlink.target_type = new_target_type
|
||||||
|
|
||||||
// Save with new target
|
// Save with new target
|
||||||
return self.set(symlink)!
|
return self.set(symlink)!
|
||||||
}
|
}
|
||||||
@@ -279,12 +279,12 @@ pub fn (mut self DBFsSymlink) resolve(id u32) !u32 {
|
|||||||
// Check if a symlink is broken (target doesn't exist)
|
// Check if a symlink is broken (target doesn't exist)
|
||||||
pub fn (mut self DBFsSymlink) is_broken(id u32) !bool {
|
pub fn (mut self DBFsSymlink) is_broken(id u32) !bool {
|
||||||
symlink := self.get(id)!
|
symlink := self.get(id)!
|
||||||
|
|
||||||
if symlink.target_type == .file {
|
if symlink.target_type == .file {
|
||||||
return !self.db.exists[FsFile](symlink.target_id)!
|
return !self.db.exists[FsFile](symlink.target_id)!
|
||||||
} else if symlink.target_type == .directory {
|
} else if symlink.target_type == .directory {
|
||||||
return !self.db.exists[FsDir](symlink.target_id)!
|
return !self.db.exists[FsDir](symlink.target_id)!
|
||||||
}
|
}
|
||||||
|
|
||||||
return true // Unknown target type is considered broken
|
return true // Unknown target type is considered broken
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,14 +24,14 @@ pub fn (self Calendar) type_name() string {
|
|||||||
return 'calendar'
|
return 'calendar'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self Calendar) dump(mut e &encoder.Encoder) ! {
|
pub fn (self Calendar) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_list_u32(self.events)
|
e.add_list_u32(self.events)
|
||||||
e.add_string(self.color)
|
e.add_string(self.color)
|
||||||
e.add_string(self.timezone)
|
e.add_string(self.timezone)
|
||||||
e.add_bool(self.is_public)
|
e.add_bool(self.is_public)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBCalendar) load(mut o Calendar, mut e &encoder.Decoder) ! {
|
fn (mut self DBCalendar) load(mut o Calendar, mut e encoder.Decoder) ! {
|
||||||
o.events = e.get_list_u32()!
|
o.events = e.get_list_u32()!
|
||||||
o.color = e.get_string()!
|
o.color = e.get_string()!
|
||||||
o.timezone = e.get_string()!
|
o.timezone = e.get_string()!
|
||||||
@@ -52,17 +52,17 @@ pub mut:
|
|||||||
// get new calendar, not from the DB
|
// get new calendar, not from the DB
|
||||||
pub fn (mut self DBCalendar) new(args CalendarArg) !Calendar {
|
pub fn (mut self DBCalendar) new(args CalendarArg) !Calendar {
|
||||||
mut o := Calendar{
|
mut o := Calendar{
|
||||||
color: args.color
|
color: args.color
|
||||||
timezone: args.timezone
|
timezone: args.timezone
|
||||||
is_public: args.is_public
|
is_public: args.is_public
|
||||||
events: args.events
|
events: args.events
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.name = args.name
|
o.name = args.name
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -89,4 +89,3 @@ pub fn (mut self DBCalendar) get(id u32) !Calendar {
|
|||||||
pub fn (mut self DBCalendar) list() ![]Calendar {
|
pub fn (mut self DBCalendar) list() ![]Calendar {
|
||||||
return self.db.list[Calendar]()!.map(self.get(it)!)
|
return self.db.list[Calendar]()!.map(self.get(it)!)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
module heromodels
|
module heromodels
|
||||||
|
|
||||||
import freeflowuniverse.herolib.data.encoder
|
import freeflowuniverse.herolib.data.encoder
|
||||||
@@ -80,7 +79,7 @@ pub fn (self CalendarEvent) type_name() string {
|
|||||||
return 'calendar_event'
|
return 'calendar_event'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self CalendarEvent) dump(mut e &encoder.Encoder) ! {
|
pub fn (self CalendarEvent) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.title)
|
e.add_string(self.title)
|
||||||
e.add_i64(self.start_time)
|
e.add_i64(self.start_time)
|
||||||
e.add_i64(self.end_time)
|
e.add_i64(self.end_time)
|
||||||
@@ -108,7 +107,7 @@ pub fn (self CalendarEvent) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_string(self.timezone)
|
e.add_string(self.timezone)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBCalendarEvent) load(mut o CalendarEvent, mut e &encoder.Decoder) ! {
|
fn (mut self DBCalendarEvent) load(mut o CalendarEvent, mut e encoder.Decoder) ! {
|
||||||
o.title = e.get_string()!
|
o.title = e.get_string()!
|
||||||
o.start_time = e.get_i64()!
|
o.start_time = e.get_i64()!
|
||||||
o.end_time = e.get_i64()!
|
o.end_time = e.get_i64()!
|
||||||
@@ -116,7 +115,7 @@ fn (mut self DBCalendarEvent) load(mut o CalendarEvent, mut e &encoder.Decoder)
|
|||||||
o.attendees = e.get_list_u32()!
|
o.attendees = e.get_list_u32()!
|
||||||
o.fs_items = e.get_list_u32()!
|
o.fs_items = e.get_list_u32()!
|
||||||
o.calendar_id = e.get_u32()!
|
o.calendar_id = e.get_u32()!
|
||||||
o.status = unsafe { EventStatus(e.get_u8()!) } //TODO: is there no better way?
|
o.status = unsafe { EventStatus(e.get_u8()!) } // TODO: is there no better way?
|
||||||
o.is_all_day = e.get_bool()!
|
o.is_all_day = e.get_bool()!
|
||||||
o.is_recurring = e.get_bool()!
|
o.is_recurring = e.get_bool()!
|
||||||
|
|
||||||
|
|||||||
@@ -30,13 +30,13 @@ pub fn (self ChatGroup) type_name() string {
|
|||||||
return 'chat_group'
|
return 'chat_group'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self ChatGroup) dump(mut e &encoder.Encoder) ! {
|
pub fn (self ChatGroup) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_u8(u8(self.chat_type))
|
e.add_u8(u8(self.chat_type))
|
||||||
e.add_i64(self.last_activity)
|
e.add_i64(self.last_activity)
|
||||||
e.add_bool(self.is_archived)
|
e.add_bool(self.is_archived)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBChatGroup) load(mut o ChatGroup, mut e &encoder.Decoder) ! {
|
fn (mut self DBChatGroup) load(mut o ChatGroup, mut e encoder.Decoder) ! {
|
||||||
o.chat_type = unsafe { ChatType(e.get_u8()!) }
|
o.chat_type = unsafe { ChatType(e.get_u8()!) }
|
||||||
o.last_activity = e.get_i64()!
|
o.last_activity = e.get_i64()!
|
||||||
o.is_archived = e.get_bool()!
|
o.is_archived = e.get_bool()!
|
||||||
|
|||||||
@@ -10,10 +10,10 @@ pub struct ChatMessage {
|
|||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
content string
|
content string
|
||||||
chat_group_id u32 // Associated chat group
|
chat_group_id u32 // Associated chat group
|
||||||
sender_id u32 // User ID of sender
|
sender_id u32 // User ID of sender
|
||||||
parent_messages []MessageLink // Referenced/replied messages
|
parent_messages []MessageLink // Referenced/replied messages
|
||||||
fs_files []u32 // IDs of linked files
|
fs_files []u32 // IDs of linked files
|
||||||
message_type MessageType
|
message_type MessageType
|
||||||
status MessageStatus
|
status MessageStatus
|
||||||
reactions []MessageReaction
|
reactions []MessageReaction
|
||||||
@@ -67,7 +67,7 @@ pub fn (self ChatMessage) type_name() string {
|
|||||||
return 'chat_message'
|
return 'chat_message'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self ChatMessage) dump(mut e &encoder.Encoder) ! {
|
pub fn (self ChatMessage) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.content)
|
e.add_string(self.content)
|
||||||
e.add_u32(self.chat_group_id)
|
e.add_u32(self.chat_group_id)
|
||||||
e.add_u32(self.sender_id)
|
e.add_u32(self.sender_id)
|
||||||
@@ -94,7 +94,7 @@ pub fn (self ChatMessage) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_list_u32(self.mentions)
|
e.add_list_u32(self.mentions)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBChatMessage) load(mut o ChatMessage, mut e &encoder.Decoder) ! {
|
fn (mut self DBChatMessage) load(mut o ChatMessage, mut e encoder.Decoder) ! {
|
||||||
o.content = e.get_string()!
|
o.content = e.get_string()!
|
||||||
o.chat_group_id = e.get_u32()!
|
o.chat_group_id = e.get_u32()!
|
||||||
o.sender_id = e.get_u32()!
|
o.sender_id = e.get_u32()!
|
||||||
@@ -137,20 +137,20 @@ fn (mut self DBChatMessage) load(mut o ChatMessage, mut e &encoder.Decoder) ! {
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ChatMessageArg {
|
pub struct ChatMessageArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
content string
|
content string
|
||||||
chat_group_id u32
|
chat_group_id u32
|
||||||
sender_id u32
|
sender_id u32
|
||||||
parent_messages []MessageLink
|
parent_messages []MessageLink
|
||||||
fs_files []u32
|
fs_files []u32
|
||||||
message_type MessageType
|
message_type MessageType
|
||||||
status MessageStatus
|
status MessageStatus
|
||||||
reactions []MessageReaction
|
reactions []MessageReaction
|
||||||
mentions []u32
|
mentions []u32
|
||||||
securitypolicy u32
|
securitypolicy u32
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
comments []db.CommentArg
|
||||||
}
|
}
|
||||||
|
|
||||||
// get new chat message, not from the DB
|
// get new chat message, not from the DB
|
||||||
|
|||||||
@@ -26,13 +26,13 @@ pub fn (self Comment) type_name() string {
|
|||||||
return 'comments'
|
return 'comments'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self Comment) dump(mut e &encoder.Encoder) ! {
|
pub fn (self Comment) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.comment)
|
e.add_string(self.comment)
|
||||||
e.add_u32(self.parent)
|
e.add_u32(self.parent)
|
||||||
e.add_u32(self.author)
|
e.add_u32(self.author)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBComments) load(mut o Comment, mut e &encoder.Decoder) ! {
|
fn (mut self DBComments) load(mut o Comment, mut e encoder.Decoder) ! {
|
||||||
o.comment = e.get_string()!
|
o.comment = e.get_string()!
|
||||||
o.parent = e.get_u32()!
|
o.parent = e.get_u32()!
|
||||||
o.author = e.get_u32()!
|
o.author = e.get_u32()!
|
||||||
|
|||||||
@@ -4,45 +4,45 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
|
|
||||||
pub struct ModelsFactory {
|
pub struct ModelsFactory {
|
||||||
pub mut:
|
pub mut:
|
||||||
comments DBComments
|
comments DBComments
|
||||||
calendar DBCalendar
|
calendar DBCalendar
|
||||||
calendar_event DBCalendarEvent
|
calendar_event DBCalendarEvent
|
||||||
group DBGroup
|
group DBGroup
|
||||||
user DBUser
|
user DBUser
|
||||||
project DBProject
|
project DBProject
|
||||||
project_issue DBProjectIssue
|
project_issue DBProjectIssue
|
||||||
chat_group DBChatGroup
|
chat_group DBChatGroup
|
||||||
chat_message DBChatMessage
|
chat_message DBChatMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new() !ModelsFactory {
|
pub fn new() !ModelsFactory {
|
||||||
mut mydb := db.new()!
|
mut mydb := db.new()!
|
||||||
return ModelsFactory{
|
return ModelsFactory{
|
||||||
comments: DBComments{
|
comments: DBComments{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
calendar: DBCalendar{
|
calendar: DBCalendar{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
calendar_event: DBCalendarEvent{
|
calendar_event: DBCalendarEvent{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
group: DBGroup{
|
group: DBGroup{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
user: DBUser{
|
user: DBUser{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
project: DBProject{
|
project: DBProject{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
project_issue: DBProjectIssue{
|
project_issue: DBProjectIssue{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
chat_group: DBChatGroup{
|
chat_group: DBChatGroup{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
chat_message: DBChatMessage{
|
chat_message: DBChatMessage{
|
||||||
db: &mydb
|
db: &mydb
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ pub fn (self Group) type_name() string {
|
|||||||
return 'group'
|
return 'group'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self Group) dump(mut e &encoder.Encoder) ! {
|
pub fn (self Group) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_u16(u16(self.members.len))
|
e.add_u16(u16(self.members.len))
|
||||||
for member in self.members {
|
for member in self.members {
|
||||||
e.add_u32(member.user_id)
|
e.add_u32(member.user_id)
|
||||||
@@ -45,14 +45,14 @@ pub fn (self Group) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_bool(self.is_public)
|
e.add_bool(self.is_public)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBGroup) load(mut o Group, mut e &encoder.Decoder) ! {
|
fn (mut self DBGroup) load(mut o Group, mut e encoder.Decoder) ! {
|
||||||
members_len := e.get_u16()!
|
members_len := e.get_u16()!
|
||||||
mut members := []GroupMember{}
|
mut members := []GroupMember{}
|
||||||
for _ in 0 .. members_len {
|
for _ in 0 .. members_len {
|
||||||
user_id := e.get_u32()!
|
user_id := e.get_u32()!
|
||||||
role := unsafe { GroupRole(e.get_u8()!) }
|
role := unsafe { GroupRole(e.get_u8()!) }
|
||||||
joined_at := e.get_i64()!
|
joined_at := e.get_i64()!
|
||||||
|
|
||||||
members << GroupMember{
|
members << GroupMember{
|
||||||
user_id: user_id
|
user_id: user_id
|
||||||
role: role
|
role: role
|
||||||
@@ -60,7 +60,7 @@ fn (mut self DBGroup) load(mut o Group, mut e &encoder.Decoder) ! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
o.members = members
|
o.members = members
|
||||||
|
|
||||||
o.subgroups = e.get_list_u32()!
|
o.subgroups = e.get_list_u32()!
|
||||||
o.parent_group = e.get_u32()!
|
o.parent_group = e.get_u32()!
|
||||||
o.is_public = e.get_bool()!
|
o.is_public = e.get_bool()!
|
||||||
@@ -90,12 +90,12 @@ pub fn (mut self DBGroup) new(args GroupArg) !Group {
|
|||||||
parent_group: args.parent_group
|
parent_group: args.parent_group
|
||||||
is_public: args.is_public
|
is_public: args.is_public
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.name = args.name
|
o.name = args.name
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,5 +131,4 @@ pub fn (mut self Group) add_member(user_id u32, role GroupRole) {
|
|||||||
self.members << member
|
self.members << member
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CUSTOM FEATURES FOR GROUP
|
||||||
//CUSTOM FEATURES FOR GROUP
|
|
||||||
|
|||||||
@@ -9,18 +9,18 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
pub struct Project {
|
pub struct Project {
|
||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
swimlanes []Swimlane
|
swimlanes []Swimlane
|
||||||
milestones []Milestone
|
milestones []Milestone
|
||||||
issues []string // IDs of project issues
|
issues []string // IDs of project issues
|
||||||
fs_files []u32 // IDs of linked files or dirs
|
fs_files []u32 // IDs of linked files or dirs
|
||||||
status ProjectStatus
|
status ProjectStatus
|
||||||
start_date i64
|
start_date i64
|
||||||
end_date i64
|
end_date i64
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Swimlane {
|
pub struct Swimlane {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string //allways to to_lower and trim_space
|
name string // allways to to_lower and trim_space
|
||||||
description string
|
description string
|
||||||
order int
|
order int
|
||||||
color string
|
color string
|
||||||
@@ -29,7 +29,7 @@ pub mut:
|
|||||||
|
|
||||||
pub struct Milestone {
|
pub struct Milestone {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string //allways to to_lower and trim_space
|
name string // allways to to_lower and trim_space
|
||||||
description string
|
description string
|
||||||
due_date i64
|
due_date i64
|
||||||
completed bool
|
completed bool
|
||||||
@@ -53,7 +53,7 @@ pub fn (self Project) type_name() string {
|
|||||||
return 'project'
|
return 'project'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self Project) dump(mut e &encoder.Encoder) ! {
|
pub fn (self Project) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_u16(u16(self.swimlanes.len))
|
e.add_u16(u16(self.swimlanes.len))
|
||||||
for swimlane in self.swimlanes {
|
for swimlane in self.swimlanes {
|
||||||
e.add_string(swimlane.name)
|
e.add_string(swimlane.name)
|
||||||
@@ -62,7 +62,7 @@ pub fn (self Project) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_string(swimlane.color)
|
e.add_string(swimlane.color)
|
||||||
e.add_bool(swimlane.is_done)
|
e.add_bool(swimlane.is_done)
|
||||||
}
|
}
|
||||||
|
|
||||||
e.add_u16(u16(self.milestones.len))
|
e.add_u16(u16(self.milestones.len))
|
||||||
for milestone in self.milestones {
|
for milestone in self.milestones {
|
||||||
e.add_string(milestone.name)
|
e.add_string(milestone.name)
|
||||||
@@ -71,7 +71,7 @@ pub fn (self Project) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_bool(milestone.completed)
|
e.add_bool(milestone.completed)
|
||||||
e.add_list_u32(milestone.issues)
|
e.add_list_u32(milestone.issues)
|
||||||
}
|
}
|
||||||
|
|
||||||
e.add_list_string(self.issues)
|
e.add_list_string(self.issues)
|
||||||
e.add_list_u32(self.fs_files)
|
e.add_list_u32(self.fs_files)
|
||||||
e.add_u8(u8(self.status))
|
e.add_u8(u8(self.status))
|
||||||
@@ -79,7 +79,7 @@ pub fn (self Project) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_i64(self.end_date)
|
e.add_i64(self.end_date)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
fn (mut self DBProject) load(mut o Project, mut e encoder.Decoder) ! {
|
||||||
swimlanes_len := e.get_u16()!
|
swimlanes_len := e.get_u16()!
|
||||||
mut swimlanes := []Swimlane{}
|
mut swimlanes := []Swimlane{}
|
||||||
for _ in 0 .. swimlanes_len {
|
for _ in 0 .. swimlanes_len {
|
||||||
@@ -88,7 +88,7 @@ fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
|||||||
order := e.get_int()!
|
order := e.get_int()!
|
||||||
color := e.get_string()!
|
color := e.get_string()!
|
||||||
is_done := e.get_bool()!
|
is_done := e.get_bool()!
|
||||||
|
|
||||||
swimlanes << Swimlane{
|
swimlanes << Swimlane{
|
||||||
name: name
|
name: name
|
||||||
description: description
|
description: description
|
||||||
@@ -98,7 +98,7 @@ fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
o.swimlanes = swimlanes
|
o.swimlanes = swimlanes
|
||||||
|
|
||||||
milestones_len := e.get_u16()!
|
milestones_len := e.get_u16()!
|
||||||
mut milestones := []Milestone{}
|
mut milestones := []Milestone{}
|
||||||
for _ in 0 .. milestones_len {
|
for _ in 0 .. milestones_len {
|
||||||
@@ -107,7 +107,7 @@ fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
|||||||
due_date := e.get_i64()!
|
due_date := e.get_i64()!
|
||||||
completed := e.get_bool()!
|
completed := e.get_bool()!
|
||||||
issues := e.get_list_u32()!
|
issues := e.get_list_u32()!
|
||||||
|
|
||||||
milestones << Milestone{
|
milestones << Milestone{
|
||||||
name: name
|
name: name
|
||||||
description: description
|
description: description
|
||||||
@@ -117,7 +117,7 @@ fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
o.milestones = milestones
|
o.milestones = milestones
|
||||||
|
|
||||||
o.issues = e.get_list_string()!
|
o.issues = e.get_list_string()!
|
||||||
o.fs_files = e.get_list_u32()!
|
o.fs_files = e.get_list_u32()!
|
||||||
o.status = unsafe { ProjectStatus(e.get_u8()!) }
|
o.status = unsafe { ProjectStatus(e.get_u8()!) }
|
||||||
@@ -128,15 +128,15 @@ fn (mut self DBProject) load(mut o Project, mut e &encoder.Decoder) ! {
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ProjectArg {
|
pub struct ProjectArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
swimlanes []Swimlane
|
swimlanes []Swimlane
|
||||||
milestones []Milestone
|
milestones []Milestone
|
||||||
issues []string
|
issues []string
|
||||||
fs_files []u32
|
fs_files []u32
|
||||||
status ProjectStatus
|
status ProjectStatus
|
||||||
start_date string // Use ourtime module to convert to epoch
|
start_date string // Use ourtime module to convert to epoch
|
||||||
end_date string // Use ourtime module to convert to epoch
|
end_date string // Use ourtime module to convert to epoch
|
||||||
securitypolicy u32
|
securitypolicy u32
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
comments []db.CommentArg
|
||||||
@@ -145,13 +145,13 @@ pub mut:
|
|||||||
// get new project, not from the DB
|
// get new project, not from the DB
|
||||||
pub fn (mut self DBProject) new(args ProjectArg) !Project {
|
pub fn (mut self DBProject) new(args ProjectArg) !Project {
|
||||||
mut o := Project{
|
mut o := Project{
|
||||||
swimlanes: args.swimlanes
|
swimlanes: args.swimlanes
|
||||||
milestones: args.milestones
|
milestones: args.milestones
|
||||||
issues: args.issues
|
issues: args.issues
|
||||||
fs_files: args.fs_files
|
fs_files: args.fs_files
|
||||||
status: args.status
|
status: args.status
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.name = args.name
|
o.name = args.name
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
@@ -159,14 +159,14 @@ pub fn (mut self DBProject) new(args ProjectArg) !Project {
|
|||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
// Convert string dates to Unix timestamps
|
// Convert string dates to Unix timestamps
|
||||||
mut start_time_obj := ourtime.new(args.start_date)!
|
mut start_time_obj := ourtime.new(args.start_date)!
|
||||||
o.start_date = start_time_obj.unix()
|
o.start_date = start_time_obj.unix()
|
||||||
|
|
||||||
mut end_time_obj := ourtime.new(args.end_date)!
|
mut end_time_obj := ourtime.new(args.end_date)!
|
||||||
o.end_date = end_time_obj.unix()
|
o.end_date = end_time_obj.unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,20 +9,20 @@ import freeflowuniverse.herolib.hero.db
|
|||||||
pub struct ProjectIssue {
|
pub struct ProjectIssue {
|
||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
title string
|
title string
|
||||||
project_id u32 // Associated project
|
project_id u32 // Associated project
|
||||||
issue_type IssueType
|
issue_type IssueType
|
||||||
priority IssuePriority
|
priority IssuePriority
|
||||||
status IssueStatus
|
status IssueStatus
|
||||||
swimlane string // Current swimlane, is string corresponds to name, need to be to_lower and trim_space
|
swimlane string // Current swimlane, is string corresponds to name, need to be to_lower and trim_space
|
||||||
assignees []u32 // User IDs
|
assignees []u32 // User IDs
|
||||||
reporter u32 // User ID who created the issue
|
reporter u32 // User ID who created the issue
|
||||||
milestone string // Associated milestone, is string corresponds to name, need to be to_lower and trim_space
|
milestone string // Associated milestone, is string corresponds to name, need to be to_lower and trim_space
|
||||||
deadline i64 // Unix timestamp
|
deadline i64 // Unix timestamp
|
||||||
estimate int // Story points or hours
|
estimate int // Story points or hours
|
||||||
fs_files []u32 // IDs of linked files
|
fs_files []u32 // IDs of linked files
|
||||||
parent_id u32 // Parent issue ID (for sub-tasks)
|
parent_id u32 // Parent issue ID (for sub-tasks)
|
||||||
children []u32 // Child issue IDs
|
children []u32 // Child issue IDs
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum IssueType {
|
pub enum IssueType {
|
||||||
@@ -62,7 +62,7 @@ pub fn (self ProjectIssue) type_name() string {
|
|||||||
return 'project_issue'
|
return 'project_issue'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self ProjectIssue) dump(mut e &encoder.Encoder) ! {
|
pub fn (self ProjectIssue) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.title)
|
e.add_string(self.title)
|
||||||
e.add_u32(self.project_id)
|
e.add_u32(self.project_id)
|
||||||
e.add_u8(u8(self.issue_type))
|
e.add_u8(u8(self.issue_type))
|
||||||
@@ -79,7 +79,7 @@ pub fn (self ProjectIssue) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_list_u32(self.children)
|
e.add_list_u32(self.children)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBProjectIssue) load(mut o ProjectIssue, mut e &encoder.Decoder) ! {
|
fn (mut self DBProjectIssue) load(mut o ProjectIssue, mut e encoder.Decoder) ! {
|
||||||
o.title = e.get_string()!
|
o.title = e.get_string()!
|
||||||
o.project_id = e.get_u32()!
|
o.project_id = e.get_u32()!
|
||||||
o.issue_type = unsafe { IssueType(e.get_u8()!) }
|
o.issue_type = unsafe { IssueType(e.get_u8()!) }
|
||||||
@@ -99,22 +99,22 @@ fn (mut self DBProjectIssue) load(mut o ProjectIssue, mut e &encoder.Decoder) !
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ProjectIssueArg {
|
pub struct ProjectIssueArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
description string
|
description string
|
||||||
title string
|
title string
|
||||||
project_id u32
|
project_id u32
|
||||||
issue_type IssueType
|
issue_type IssueType
|
||||||
priority IssuePriority
|
priority IssuePriority
|
||||||
status IssueStatus
|
status IssueStatus
|
||||||
swimlane string
|
swimlane string
|
||||||
assignees []u32
|
assignees []u32
|
||||||
reporter u32
|
reporter u32
|
||||||
milestone string
|
milestone string
|
||||||
deadline string // Use ourtime module to convert to epoch
|
deadline string // Use ourtime module to convert to epoch
|
||||||
estimate int
|
estimate int
|
||||||
fs_files []u32
|
fs_files []u32
|
||||||
parent_id u32
|
parent_id u32
|
||||||
children []u32
|
children []u32
|
||||||
securitypolicy u32
|
securitypolicy u32
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
comments []db.CommentArg
|
||||||
@@ -145,10 +145,10 @@ pub fn (mut self DBProjectIssue) new(args ProjectIssueArg) !ProjectIssue {
|
|||||||
if !db_project.exist(args.project_id)! {
|
if !db_project.exist(args.project_id)! {
|
||||||
return error('Project with ID ${args.project_id} does not exist')
|
return error('Project with ID ${args.project_id} does not exist')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the project to validate swimlane and milestone
|
// Get the project to validate swimlane and milestone
|
||||||
project_obj := db_project.get(args.project_id)!
|
project_obj := db_project.get(args.project_id)!
|
||||||
|
|
||||||
// Validate swimlane exists in the project
|
// Validate swimlane exists in the project
|
||||||
mut swimlane_exists := false
|
mut swimlane_exists := false
|
||||||
for swimlane in project_obj.swimlanes {
|
for swimlane in project_obj.swimlanes {
|
||||||
@@ -160,7 +160,7 @@ pub fn (mut self DBProjectIssue) new(args ProjectIssueArg) !ProjectIssue {
|
|||||||
if !swimlane_exists {
|
if !swimlane_exists {
|
||||||
return error('Swimlane "${args.swimlane}" does not exist in project "${project_obj.name}"')
|
return error('Swimlane "${args.swimlane}" does not exist in project "${project_obj.name}"')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate milestone exists in the project
|
// Validate milestone exists in the project
|
||||||
mut milestone_exists := false
|
mut milestone_exists := false
|
||||||
for milestone in project_obj.milestones {
|
for milestone in project_obj.milestones {
|
||||||
@@ -172,7 +172,7 @@ pub fn (mut self DBProjectIssue) new(args ProjectIssueArg) !ProjectIssue {
|
|||||||
if !milestone_exists {
|
if !milestone_exists {
|
||||||
return error('Milestone "${args.milestone}" does not exist in project "${project_obj.name}"')
|
return error('Milestone "${args.milestone}" does not exist in project "${project_obj.name}"')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.name = args.name
|
o.name = args.name
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
@@ -180,11 +180,11 @@ pub fn (mut self DBProjectIssue) new(args ProjectIssueArg) !ProjectIssue {
|
|||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.comments = self.db.comments_get(args.comments)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
// Convert deadline string to Unix timestamp
|
// Convert deadline string to Unix timestamp
|
||||||
mut deadline_obj := ourtime.new(args.deadline)!
|
mut deadline_obj := ourtime.new(args.deadline)!
|
||||||
o.deadline = deadline_obj.unix()
|
o.deadline = deadline_obj.unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ pub fn (self User) type_name() string {
|
|||||||
return 'user'
|
return 'user'
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (self User) dump(mut e &encoder.Encoder) ! {
|
pub fn (self User) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.email)
|
e.add_string(self.email)
|
||||||
e.add_string(self.public_key)
|
e.add_string(self.public_key)
|
||||||
e.add_string(self.phone)
|
e.add_string(self.phone)
|
||||||
@@ -41,7 +41,7 @@ pub fn (self User) dump(mut e &encoder.Encoder) ! {
|
|||||||
e.add_u8(u8(self.status))
|
e.add_u8(u8(self.status))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBUser) load(mut o User, mut e &encoder.Decoder) ! {
|
fn (mut self DBUser) load(mut o User, mut e encoder.Decoder) ! {
|
||||||
o.email = e.get_string()!
|
o.email = e.get_string()!
|
||||||
o.public_key = e.get_string()!
|
o.public_key = e.get_string()!
|
||||||
o.phone = e.get_string()!
|
o.phone = e.get_string()!
|
||||||
@@ -55,19 +55,19 @@ fn (mut self DBUser) load(mut o User, mut e &encoder.Decoder) ! {
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct UserArg {
|
pub struct UserArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string @[required]
|
name string @[required]
|
||||||
description string
|
description string
|
||||||
email string
|
email string
|
||||||
public_key string // for encryption/signing
|
public_key string // for encryption/signing
|
||||||
phone string
|
phone string
|
||||||
address string
|
address string
|
||||||
avatar_url string
|
avatar_url string
|
||||||
bio string
|
bio string
|
||||||
timezone string
|
timezone string
|
||||||
status UserStatus
|
status UserStatus
|
||||||
securitypolicy u32
|
securitypolicy u32
|
||||||
tags u32
|
tags u32
|
||||||
comments []u32
|
comments []u32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DBUser {
|
pub struct DBUser {
|
||||||
|
|||||||
@@ -72,13 +72,13 @@ pub fn (mut handler Handler) register_procedure_handle(method string, procedure
|
|||||||
|
|
||||||
pub struct Procedure[T, U] {
|
pub struct Procedure[T, U] {
|
||||||
pub mut:
|
pub mut:
|
||||||
method string
|
method string
|
||||||
function fn (T) !U
|
function fn (T) !U
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ProcedureVoid[T] {
|
pub struct ProcedureVoid[T] {
|
||||||
pub mut:
|
pub mut:
|
||||||
method string
|
method string
|
||||||
function fn (T) !
|
function fn (T) !
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -124,7 +124,7 @@ fn error_to_jsonrpc(err IError) !RPCError {
|
|||||||
// - The JSON-RPC response as a string
|
// - The JSON-RPC response as a string
|
||||||
// Note: This method panics if an error occurs during handling
|
// Note: This method panics if an error occurs during handling
|
||||||
// pub fn (handler Handler) handle_message(client &websocket.Client, message string) string {
|
// pub fn (handler Handler) handle_message(client &websocket.Client, message string) string {
|
||||||
// req := decode_request(message) or {
|
// req := decode_request(message) or {
|
||||||
// return invalid_request }
|
// return invalid_request }
|
||||||
// resp := handler.handle(req) or { panic(err) }
|
// resp := handler.handle(req) or { panic(err) }
|
||||||
// return resp.encode()
|
// return resp.encode()
|
||||||
@@ -138,13 +138,11 @@ fn error_to_jsonrpc(err IError) !RPCError {
|
|||||||
//
|
//
|
||||||
// Returns:
|
// Returns:
|
||||||
// - The JSON-RPC response as a string, or an error if processing fails
|
// - The JSON-RPC response as a string, or an error if processing fails
|
||||||
pub fn (handler Handler) handle(request Request) !Response {
|
pub fn (handler Handler) handle(request Request) !Response {
|
||||||
procedure_func := handler.procedures[request.method] or {
|
procedure_func := handler.procedures[request.method] or {
|
||||||
return new_error(request.id, method_not_found)
|
return new_error(request.id, method_not_found)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute the procedure handler with the request payload
|
// Execute the procedure handler with the request payload
|
||||||
return procedure_func(request) or {
|
return procedure_func(request) or { panic(err) }
|
||||||
panic(err)
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -16,6 +16,6 @@ pub fn new_handler[T](receiver T) Handler[T] {
|
|||||||
pub fn (mut h Handler[T]) handle(request jsonrpc.Request) !jsonrpc.Response {
|
pub fn (mut h Handler[T]) handle(request jsonrpc.Request) !jsonrpc.Response {
|
||||||
receiver := h.receiver
|
receiver := h.receiver
|
||||||
$for method in receiver.methods {
|
$for method in receiver.methods {
|
||||||
println("method ${method}")
|
println('method ${method}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
module openrpc
|
module openrpc
|
||||||
|
|
||||||
import freeflowuniverse.herolib.core.code { Attribute, Struct, StructField, Type }
|
import freeflowuniverse.herolib.core.code { Attribute, Struct, StructField }
|
||||||
|
|
||||||
const example_txt = "
|
const example_txt = "
|
||||||
Example: Get pet example.
|
Example: Get pet example.
|
||||||
@@ -40,7 +40,7 @@ const test_struct = Struct{
|
|||||||
fields: [
|
fields: [
|
||||||
StructField{
|
StructField{
|
||||||
name: 'TestField'
|
name: 'TestField'
|
||||||
typ: code.type_i32
|
typ: code.type_i32
|
||||||
attrs: [Attribute{
|
attrs: [Attribute{
|
||||||
name: 'example'
|
name: 'example'
|
||||||
arg: '21'
|
arg: '21'
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ pub mut:
|
|||||||
pub fn new_unix_client(params UNIXClientParams) &UNIXClient {
|
pub fn new_unix_client(params UNIXClientParams) &UNIXClient {
|
||||||
return &UNIXClient{
|
return &UNIXClient{
|
||||||
socket_path: params.socket_path
|
socket_path: params.socket_path
|
||||||
timeout: params.timeout
|
timeout: params.timeout
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,11 +46,9 @@ pub fn (mut client UNIXClient) call(method string, params string) !string {
|
|||||||
response := jsonrpc.decode_response(response_json) or {
|
response := jsonrpc.decode_response(response_json) or {
|
||||||
return error('Failed to decode response: ${err}')
|
return error('Failed to decode response: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate response
|
// Validate response
|
||||||
response.validate() or {
|
response.validate() or { return error('Invalid response: ${err}') }
|
||||||
return error('Invalid response: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check ID matches
|
// Check ID matches
|
||||||
if response.id != request.id {
|
if response.id != request.id {
|
||||||
@@ -73,46 +71,40 @@ fn (mut client UNIXClient) send_request(request string) !string {
|
|||||||
mut conn := unix.connect_stream(client.socket_path) or {
|
mut conn := unix.connect_stream(client.socket_path) or {
|
||||||
return error('Failed to connect to Unix socket at ${client.socket_path}: ${err}')
|
return error('Failed to connect to Unix socket at ${client.socket_path}: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
defer {
|
defer {
|
||||||
conn.close() or { console.print_stderr('Error closing connection: ${err}') }
|
conn.close() or { console.print_stderr('Error closing connection: ${err}') }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set timeout
|
// Set timeout
|
||||||
if client.timeout > 0 {
|
if client.timeout > 0 {
|
||||||
conn.set_read_timeout(client.timeout * time.second)
|
conn.set_read_timeout(client.timeout * time.second)
|
||||||
conn.set_write_timeout(client.timeout * time.second)
|
conn.set_write_timeout(client.timeout * time.second)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send request
|
// Send request
|
||||||
console.print_debug('Sending request: ${request}')
|
console.print_debug('Sending request: ${request}')
|
||||||
|
|
||||||
conn.write_string(request) or {
|
conn.write_string(request) or { return error('Failed to send request: ${err}') }
|
||||||
return error('Failed to send request: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read response
|
// Read response
|
||||||
mut buffer := []u8{len: 4096}
|
mut buffer := []u8{len: 4096}
|
||||||
bytes_read := conn.read(mut buffer) or {
|
bytes_read := conn.read(mut buffer) or { return error('Failed to read response: ${err}') }
|
||||||
return error('Failed to read response: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
if bytes_read == 0 {
|
if bytes_read == 0 {
|
||||||
return error('No response received from server')
|
return error('No response received from server')
|
||||||
}
|
}
|
||||||
|
|
||||||
response := buffer[..bytes_read].bytestr()
|
response := buffer[..bytes_read].bytestr()
|
||||||
console.print_debug('Received response: ${response}')
|
console.print_debug('Received response: ${response}')
|
||||||
|
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// ping sends a simple ping to test connectivity
|
// ping sends a simple ping to test connectivity
|
||||||
pub fn (mut client UNIXClient) ping() !bool {
|
pub fn (mut client UNIXClient) ping() !bool {
|
||||||
// Try to discover the specification as a connectivity test
|
// Try to discover the specification as a connectivity test
|
||||||
client.discover() or {
|
client.discover() or { return error('Ping failed: ${err}') }
|
||||||
return error('Ping failed: ${err}')
|
|
||||||
}
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -41,53 +41,53 @@ fn (mut h TestHandler) handle(req jsonrpc.Request) !jsonrpc.Response {
|
|||||||
fn test_unix_client_basic() {
|
fn test_unix_client_basic() {
|
||||||
// This test requires a running server, so it's more of an integration test
|
// This test requires a running server, so it's more of an integration test
|
||||||
// In practice, you would start a server in a separate goroutine or process
|
// In practice, you would start a server in a separate goroutine or process
|
||||||
|
|
||||||
mut client := new_unix_client(
|
mut client := new_unix_client(
|
||||||
socket_path: '/tmp/test_heromodels'
|
socket_path: '/tmp/test_heromodels'
|
||||||
timeout: 5
|
timeout: 5
|
||||||
)
|
)
|
||||||
|
|
||||||
// Test string-based call
|
// Test string-based call
|
||||||
result := client.call('test.echo', '{"message": "hello"}') or {
|
result := client.call('test.echo', '{"message": "hello"}') or {
|
||||||
println('Expected error since no server is running: ${err}')
|
println('Expected error since no server is running: ${err}')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Echo result: ${result}')
|
println('Echo result: ${result}')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_unix_client_typed() {
|
fn test_unix_client_typed() {
|
||||||
mut client := new_unix_client(
|
mut client := new_unix_client(
|
||||||
socket_path: '/tmp/test_heromodels'
|
socket_path: '/tmp/test_heromodels'
|
||||||
timeout: 5
|
timeout: 5
|
||||||
)
|
)
|
||||||
|
|
||||||
// Test typed call
|
// Test typed call
|
||||||
params := TestParams{
|
params := TestParams{
|
||||||
name: 'test'
|
name: 'test'
|
||||||
value: 42
|
value: 42
|
||||||
}
|
}
|
||||||
|
|
||||||
result := client.call_generic[TestParams, TestResult]('test.process', params) or {
|
result := client.call_generic[TestParams, TestResult]('test.process', params) or {
|
||||||
println('Expected error since no server is running: ${err}')
|
println('Expected error since no server is running: ${err}')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Typed result: ${result}')
|
println('Typed result: ${result}')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_unix_client_discover() {
|
fn test_unix_client_discover() {
|
||||||
mut client := new_unix_client(
|
mut client := new_unix_client(
|
||||||
socket_path: '/tmp/test_heromodels'
|
socket_path: '/tmp/test_heromodels'
|
||||||
timeout: 5
|
timeout: 5
|
||||||
)
|
)
|
||||||
|
|
||||||
// Test discovery
|
// Test discovery
|
||||||
spec := client.discover() or {
|
spec := client.discover() or {
|
||||||
println('Expected error since no server is running: ${err}')
|
println('Expected error since no server is running: ${err}')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
println('OpenRPC spec version: ${spec.openrpc}')
|
println('OpenRPC spec version: ${spec.openrpc}')
|
||||||
println('Info title: ${spec.info.title}')
|
println('Info title: ${spec.info.title}')
|
||||||
}
|
}
|
||||||
@@ -95,81 +95,77 @@ fn test_unix_client_discover() {
|
|||||||
fn test_unix_client_ping() {
|
fn test_unix_client_ping() {
|
||||||
mut client := new_unix_client(
|
mut client := new_unix_client(
|
||||||
socket_path: '/tmp/test_heromodels'
|
socket_path: '/tmp/test_heromodels'
|
||||||
timeout: 5
|
timeout: 5
|
||||||
)
|
)
|
||||||
|
|
||||||
// Test ping
|
// Test ping
|
||||||
is_alive := client.ping() or {
|
is_alive := client.ping() or {
|
||||||
println('Expected error since no server is running: ${err}')
|
println('Expected error since no server is running: ${err}')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Server is alive: ${is_alive}')
|
println('Server is alive: ${is_alive}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Integration test that demonstrates full client-server interaction
|
// Integration test that demonstrates full client-server interaction
|
||||||
fn test_full_integration() {
|
fn test_full_integration() {
|
||||||
socket_path := '/tmp/test_heromodels_integration'
|
socket_path := '/tmp/test_heromodels_integration'
|
||||||
|
|
||||||
// Create a test OpenRPC specification
|
// Create a test OpenRPC specification
|
||||||
mut spec := OpenRPC{
|
mut spec := OpenRPC{
|
||||||
openrpc: '1.3.0'
|
openrpc: '1.3.0'
|
||||||
info: Info{
|
info: Info{
|
||||||
title: 'Test API'
|
title: 'Test API'
|
||||||
version: '1.0.0'
|
version: '1.0.0'
|
||||||
}
|
}
|
||||||
methods: [
|
methods: [
|
||||||
Method{
|
Method{
|
||||||
name: 'test.echo'
|
name: 'test.echo'
|
||||||
params: []
|
params: []
|
||||||
result: ContentDescriptor{
|
result: ContentDescriptor{
|
||||||
name: 'result'
|
name: 'result'
|
||||||
schema: jsonschema.Schema{}
|
schema: jsonschema.Schema{}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create handler
|
// Create handler
|
||||||
mut test_handler := TestHandler{}
|
mut test_handler := TestHandler{}
|
||||||
handler := Handler{
|
handler := Handler{
|
||||||
specification: spec
|
specification: spec
|
||||||
handler: test_handler
|
handler: test_handler
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start server in background
|
// Start server in background
|
||||||
mut server := new_unix_server(handler, socket_path: socket_path) or {
|
mut server := new_unix_server(handler, socket_path: socket_path) or {
|
||||||
println('Failed to create server: ${err}')
|
println('Failed to create server: ${err}')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start server in a separate thread
|
// Start server in a separate thread
|
||||||
spawn fn [mut server] () {
|
spawn fn [mut server] () {
|
||||||
server.start() or {
|
server.start() or { println('Server error: ${err}') }
|
||||||
println('Server error: ${err}')
|
|
||||||
}
|
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Give server time to start
|
// Give server time to start
|
||||||
// time.sleep(100 * time.millisecond)
|
// time.sleep(100 * time.millisecond)
|
||||||
|
|
||||||
// Create client and test
|
// Create client and test
|
||||||
mut client := new_unix_client(
|
mut client := new_unix_client(
|
||||||
socket_path: socket_path
|
socket_path: socket_path
|
||||||
timeout: 5
|
timeout: 5
|
||||||
)
|
)
|
||||||
|
|
||||||
// Test the connection
|
// Test the connection
|
||||||
result := client.call('test.echo', '{"test": "data"}') or {
|
result := client.call('test.echo', '{"test": "data"}') or {
|
||||||
println('Client call failed: ${err}')
|
println('Client call failed: ${err}')
|
||||||
server.close() or {}
|
server.close() or {}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Integration test result: ${result}')
|
println('Integration test result: ${result}')
|
||||||
|
|
||||||
// Clean up
|
// Clean up
|
||||||
server.close() or {
|
server.close() or { println('Failed to close server: ${err}') }
|
||||||
println('Failed to close server: ${err}')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub fn decode_json_any(data string) !Any {
|
|||||||
|
|
||||||
pub fn decode_json_string(data string) !string {
|
pub fn decode_json_string(data string) !string {
|
||||||
mut o := decode(data)!
|
mut o := decode(data)!
|
||||||
return json.encode(o)
|
return json.encode(o)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode(data string) !OpenRPC {
|
pub fn decode(data string) !OpenRPC {
|
||||||
|
|||||||
@@ -13,5 +13,6 @@ fn test_decode() ! {
|
|||||||
content := doc_file.read()!
|
content := doc_file.read()!
|
||||||
object := json.decode(OpenRPC, content)!
|
object := json.decode(OpenRPC, content)!
|
||||||
assert object.openrpc == '1.0.0-rc1'
|
assert object.openrpc == '1.0.0-rc1'
|
||||||
assert object.methods.map(it.name) == ['list_pets', 'create_pet', 'get_pet', 'update_pet', 'delete_pet']
|
assert object.methods.map(it.name) == ['list_pets', 'create_pet', 'get_pet', 'update_pet',
|
||||||
|
'delete_pet']
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
module openrpc
|
module openrpc
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import x.json2 { Any }
|
import x.json2
|
||||||
|
|
||||||
// encode encodes an OpenRPC document struct into json string.
|
// encode encodes an OpenRPC document struct into json string.
|
||||||
// eliminates undefined variable by calling prune on the initial encoding.
|
// eliminates undefined variable by calling prune on the initial encoding.
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ const blank_openrpc = '{"openrpc": "1.0.0","info": {"version": "1.0.0"},"methods
|
|||||||
// test if encode can correctly encode a blank OpenRPC
|
// test if encode can correctly encode a blank OpenRPC
|
||||||
fn test_encode_blank() ! {
|
fn test_encode_blank() ! {
|
||||||
doc := OpenRPC{
|
doc := OpenRPC{
|
||||||
info: Info{
|
info: Info{
|
||||||
title: ''
|
title: ''
|
||||||
version: '1.0.0'
|
version: '1.0.0'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,7 +20,9 @@ pub fn new(params Params) !OpenRPC {
|
|||||||
}
|
}
|
||||||
|
|
||||||
text := if params.path != '' {
|
text := if params.path != '' {
|
||||||
os.read_file(params.path) or { return error('Could not read openrpc spec file at ${params.path}: ${err}') }
|
os.read_file(params.path) or {
|
||||||
|
return error('Could not read openrpc spec file at ${params.path}: ${err}')
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
params.text
|
params.text
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,14 +3,11 @@ module openrpc
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
// path to openrpc.json file
|
||||||
//path to openrpc.json file
|
|
||||||
pub fn new_handler(openrpc_path string) !Handler {
|
pub fn new_handler(openrpc_path string) !Handler {
|
||||||
|
mut openrpc_handler := Handler{
|
||||||
mut openrpc_handler := openrpc.Handler {
|
|
||||||
specification: new(path: openrpc_path)!
|
specification: new(path: openrpc_path)!
|
||||||
}
|
}
|
||||||
|
|
||||||
return openrpc_handler
|
return openrpc_handler
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,9 @@ import freeflowuniverse.herolib.schemas.jsonrpc
|
|||||||
|
|
||||||
pub struct UNIXServer {
|
pub struct UNIXServer {
|
||||||
pub mut:
|
pub mut:
|
||||||
listener &unix.StreamListener
|
listener &unix.StreamListener
|
||||||
socket_path string
|
socket_path string
|
||||||
handler Handler @[required]
|
handler Handler @[required]
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
@@ -30,19 +30,19 @@ pub fn new_unix_server(handler Handler, params UNIXServerParams) !&UNIXServer {
|
|||||||
if os.exists(params.socket_path) {
|
if os.exists(params.socket_path) {
|
||||||
os.rm(params.socket_path)!
|
os.rm(params.socket_path)!
|
||||||
}
|
}
|
||||||
|
|
||||||
listener := unix.listen_stream(params.socket_path, unix.ListenOptions{})!
|
listener := unix.listen_stream(params.socket_path, unix.ListenOptions{})!
|
||||||
|
|
||||||
return &UNIXServer{
|
return &UNIXServer{
|
||||||
listener: listener
|
listener: listener
|
||||||
handler: handler
|
handler: handler
|
||||||
socket_path: params.socket_path
|
socket_path: params.socket_path
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut server UNIXServer) start() ! {
|
pub fn (mut server UNIXServer) start() ! {
|
||||||
console.print_header('Starting HeroModels OpenRPC Server on ${server.socket_path}')
|
console.print_header('Starting HeroModels OpenRPC Server on ${server.socket_path}')
|
||||||
|
|
||||||
for {
|
for {
|
||||||
mut conn := server.listener.accept()!
|
mut conn := server.listener.accept()!
|
||||||
spawn server.handle_connection(mut conn)
|
spawn server.handle_connection(mut conn)
|
||||||
@@ -60,7 +60,7 @@ fn (mut server UNIXServer) handle_connection(mut conn unix.StreamConn) {
|
|||||||
defer {
|
defer {
|
||||||
conn.close() or { console.print_stderr('Error closing connection: ${err}') }
|
conn.close() or { console.print_stderr('Error closing connection: ${err}') }
|
||||||
}
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
// Read JSON-RPC request
|
// Read JSON-RPC request
|
||||||
mut buffer := []u8{len: 4096}
|
mut buffer := []u8{len: 4096}
|
||||||
@@ -68,11 +68,11 @@ fn (mut server UNIXServer) handle_connection(mut conn unix.StreamConn) {
|
|||||||
console.print_debug('Connection closed or error reading: ${err}')
|
console.print_debug('Connection closed or error reading: ${err}')
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if bytes_read == 0 {
|
if bytes_read == 0 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
request_data := buffer[..bytes_read].bytestr()
|
request_data := buffer[..bytes_read].bytestr()
|
||||||
console.print_debug('Received request: ${request_data}')
|
console.print_debug('Received request: ${request_data}')
|
||||||
|
|
||||||
@@ -109,4 +109,4 @@ fn (mut server UNIXServer) process_request(request_data string) ?string {
|
|||||||
return jsonrpc.new_error(request.id, jsonrpc.internal_error).encode()
|
return jsonrpc.new_error(request.id, jsonrpc.internal_error).encode()
|
||||||
}
|
}
|
||||||
return response.encode()
|
return response.encode()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub fn test_new_unix_server() ! {
|
|||||||
mut server := new_unix_server(handler)!
|
mut server := new_unix_server(handler)!
|
||||||
|
|
||||||
defer {
|
defer {
|
||||||
server.close() or {panic(err)}
|
server.close() or { panic(err) }
|
||||||
}
|
}
|
||||||
|
|
||||||
spawn server.start()
|
spawn server.start()
|
||||||
@@ -49,49 +49,48 @@ pub fn test_unix_server_handle_connection() ! {
|
|||||||
specification: new(path: openrpc_path)!
|
specification: new(path: openrpc_path)!
|
||||||
}
|
}
|
||||||
mut server := new_unix_server(handler)!
|
mut server := new_unix_server(handler)!
|
||||||
|
|
||||||
// Start server in background
|
// Start server in background
|
||||||
spawn server.start()
|
spawn server.start()
|
||||||
|
|
||||||
// Give server time to start
|
// Give server time to start
|
||||||
time.sleep(50 * time.millisecond)
|
time.sleep(50 * time.millisecond)
|
||||||
|
|
||||||
// Connect to the server
|
// Connect to the server
|
||||||
mut conn := unix.connect_stream(server.socket_path)!
|
mut conn := unix.connect_stream(server.socket_path)!
|
||||||
|
|
||||||
defer {
|
defer {
|
||||||
conn.close() or {panic(err)}
|
conn.close() or { panic(err) }
|
||||||
server.close() or {panic(err)}
|
server.close() or { panic(err) }
|
||||||
}
|
}
|
||||||
println('Connected to server at ${server.socket_path}')
|
println('Connected to server at ${server.socket_path}')
|
||||||
|
|
||||||
// Test 1: Send rpc.discover request
|
// Test 1: Send rpc.discover request
|
||||||
discover_request := jsonrpc.new_request('rpc.discover', '')
|
discover_request := jsonrpc.new_request('rpc.discover', '')
|
||||||
request_json := discover_request.encode()
|
request_json := discover_request.encode()
|
||||||
|
|
||||||
// Send the request
|
// Send the request
|
||||||
conn.write_string(request_json)!
|
conn.write_string(request_json)!
|
||||||
|
|
||||||
|
|
||||||
// Read the response
|
// Read the response
|
||||||
mut buffer := []u8{len: 4096}
|
mut buffer := []u8{len: 4096}
|
||||||
bytes_read := conn.read(mut buffer)!
|
bytes_read := conn.read(mut buffer)!
|
||||||
response_data := buffer[..bytes_read].bytestr()
|
response_data := buffer[..bytes_read].bytestr()
|
||||||
|
|
||||||
// Parse and validate response
|
// Parse and validate response
|
||||||
response := jsonrpc.decode_response(response_data)!
|
response := jsonrpc.decode_response(response_data)!
|
||||||
assert response.id == discover_request.id
|
assert response.id == discover_request.id
|
||||||
assert response.is_result()
|
assert response.is_result()
|
||||||
assert !response.is_error()
|
assert !response.is_error()
|
||||||
|
|
||||||
// Validate that the result contains OpenRPC specification
|
// Validate that the result contains OpenRPC specification
|
||||||
result := response.result()!
|
result := response.result()!
|
||||||
assert result.len > 0
|
assert result.len > 0
|
||||||
|
|
||||||
// Test 2: Send invalid JSON request
|
// Test 2: Send invalid JSON request
|
||||||
invalid_request := '{"invalid": "json"}'
|
invalid_request := '{"invalid": "json"}'
|
||||||
conn.write_string(invalid_request)!
|
conn.write_string(invalid_request)!
|
||||||
|
|
||||||
// Set a short read timeout to test no response behavior
|
// Set a short read timeout to test no response behavior
|
||||||
conn.set_read_timeout(10 * time.millisecond)
|
conn.set_read_timeout(10 * time.millisecond)
|
||||||
|
|
||||||
@@ -107,17 +106,17 @@ pub fn test_unix_server_handle_connection() ! {
|
|||||||
// Test 3: Send request with non-existent method
|
// Test 3: Send request with non-existent method
|
||||||
nonexistent_request := jsonrpc.new_request('nonexistent.method', '{}')
|
nonexistent_request := jsonrpc.new_request('nonexistent.method', '{}')
|
||||||
nonexistent_json := nonexistent_request.encode()
|
nonexistent_json := nonexistent_request.encode()
|
||||||
|
|
||||||
conn.write_string(nonexistent_json)!
|
conn.write_string(nonexistent_json)!
|
||||||
|
|
||||||
// Read method not found response
|
// Read method not found response
|
||||||
bytes_read3 := conn.read(mut buffer)!
|
bytes_read3 := conn.read(mut buffer)!
|
||||||
method_error_data := buffer[..bytes_read3].bytestr()
|
method_error_data := buffer[..bytes_read3].bytestr()
|
||||||
|
|
||||||
method_error_response := jsonrpc.decode_response(method_error_data)!
|
method_error_response := jsonrpc.decode_response(method_error_data)!
|
||||||
assert method_error_response.is_error()
|
assert method_error_response.is_error()
|
||||||
assert method_error_response.id == nonexistent_request.id
|
assert method_error_response.id == nonexistent_request.id
|
||||||
|
|
||||||
if error_obj := method_error_response.error() {
|
if error_obj := method_error_response.error() {
|
||||||
assert error_obj.code == jsonrpc.method_not_found.code
|
assert error_obj.code == jsonrpc.method_not_found.code
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ pub fn decode_file_metadata(data []u8) !File {
|
|||||||
// blocksize is max 2 bytes, so max 4gb entry size
|
// blocksize is max 2 bytes, so max 4gb entry size
|
||||||
blocksize := d.get_u16()!
|
blocksize := d.get_u16()!
|
||||||
for i in 0 .. blocksize {
|
for i in 0 .. blocksize {
|
||||||
chunk_ids << d.get_u32()! or { return error('Failed to get block id ${err}') }
|
chunk_ids << d.get_u32() or { return error('Failed to get block id ${err}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ fn test_json_generation() {
|
|||||||
mut configs := map[string]&CrunConfig{}
|
mut configs := map[string]&CrunConfig{}
|
||||||
mut config := new(mut configs, name: 'test')!
|
mut config := new(mut configs, name: 'test')!
|
||||||
json_str := config.to_json()!
|
json_str := config.to_json()!
|
||||||
|
|
||||||
// Parse back to verify structure
|
// Parse back to verify structure
|
||||||
parsed := json.decode(map[string]json.Any, json_str)!
|
parsed := json.decode(map[string]json.Any, json_str)!
|
||||||
|
|
||||||
assert parsed['ociVersion']! as string == '1.0.2'
|
assert parsed['ociVersion']! as string == '1.0.2'
|
||||||
|
|
||||||
process := parsed['process']! as map[string]json.Any
|
process := parsed['process']! as map[string]json.Any
|
||||||
assert process['terminal']! as bool == true
|
assert process['terminal']! as bool == true
|
||||||
}
|
}
|
||||||
@@ -26,11 +26,11 @@ fn test_json_generation() {
|
|||||||
fn test_configuration_methods() {
|
fn test_configuration_methods() {
|
||||||
mut configs := map[string]&CrunConfig{}
|
mut configs := map[string]&CrunConfig{}
|
||||||
mut config := new(mut configs, name: 'test')!
|
mut config := new(mut configs, name: 'test')!
|
||||||
|
|
||||||
config.set_command(['/bin/echo', 'hello'])
|
config.set_command(['/bin/echo', 'hello'])
|
||||||
.set_working_dir('/tmp')
|
.set_working_dir('/tmp')
|
||||||
.set_hostname('test-host')
|
.set_hostname('test-host')
|
||||||
|
|
||||||
assert config.spec.process.args == ['/bin/echo', 'hello']
|
assert config.spec.process.args == ['/bin/echo', 'hello']
|
||||||
assert config.spec.process.cwd == '/tmp'
|
assert config.spec.process.cwd == '/tmp'
|
||||||
assert config.spec.hostname == 'test-host'
|
assert config.spec.hostname == 'test-host'
|
||||||
@@ -39,10 +39,10 @@ fn test_configuration_methods() {
|
|||||||
fn test_validation() {
|
fn test_validation() {
|
||||||
mut configs := map[string]&CrunConfig{}
|
mut configs := map[string]&CrunConfig{}
|
||||||
mut config := new(mut configs, name: 'test')!
|
mut config := new(mut configs, name: 'test')!
|
||||||
|
|
||||||
// Should validate successfully with defaults
|
// Should validate successfully with defaults
|
||||||
config.validate()!
|
config.validate()!
|
||||||
|
|
||||||
// Should fail with empty args
|
// Should fail with empty args
|
||||||
config.spec.process.args = []
|
config.spec.process.args = []
|
||||||
if _ := config.validate() {
|
if _ := config.validate() {
|
||||||
@@ -55,20 +55,20 @@ fn test_validation() {
|
|||||||
fn test_heropods_compatibility() {
|
fn test_heropods_compatibility() {
|
||||||
mut configs := map[string]&CrunConfig{}
|
mut configs := map[string]&CrunConfig{}
|
||||||
mut config := new(mut configs, name: 'heropods')!
|
mut config := new(mut configs, name: 'heropods')!
|
||||||
|
|
||||||
// The default config should match heropods template structure
|
// The default config should match heropods template structure
|
||||||
json_str := config.to_json()!
|
json_str := config.to_json()!
|
||||||
parsed := json.decode(map[string]json.Any, json_str)!
|
parsed := json.decode(map[string]json.Any, json_str)!
|
||||||
|
|
||||||
// Check key fields match template
|
// Check key fields match template
|
||||||
assert parsed['ociVersion']! as string == '1.0.2'
|
assert parsed['ociVersion']! as string == '1.0.2'
|
||||||
|
|
||||||
process := parsed['process']! as map[string]json.Any
|
process := parsed['process']! as map[string]json.Any
|
||||||
assert process['noNewPrivileges']! as bool == true
|
assert process['noNewPrivileges']! as bool == true
|
||||||
|
|
||||||
capabilities := process['capabilities']! as map[string]json.Any
|
capabilities := process['capabilities']! as map[string]json.Any
|
||||||
bounding := capabilities['bounding']! as []json.Any
|
bounding := capabilities['bounding']! as []json.Any
|
||||||
assert 'CAP_AUDIT_WRITE' in bounding.map(it as string)
|
assert 'CAP_AUDIT_WRITE' in bounding.map(it as string)
|
||||||
assert 'CAP_KILL' in bounding.map(it as string)
|
assert 'CAP_KILL' in bounding.map(it as string)
|
||||||
assert 'CAP_NET_BIND_SERVICE' in bounding.map(it as string)
|
assert 'CAP_NET_BIND_SERVICE' in bounding.map(it as string)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub fn (config CrunConfig) to_json() !string {
|
|||||||
// Convenience method to save JSON to file
|
// Convenience method to save JSON to file
|
||||||
pub fn (config CrunConfig) save_to_file(path string) ! {
|
pub fn (config CrunConfig) save_to_file(path string) ! {
|
||||||
json_content := config.to_json()!
|
json_content := config.to_json()!
|
||||||
|
|
||||||
mut file := pathlib.get_file(path: path, create: true)!
|
mut file := pathlib.get_file(path: path, create: true)!
|
||||||
file.write(json_content)!
|
file.write(json_content)!
|
||||||
}
|
}
|
||||||
@@ -21,15 +21,15 @@ pub fn (config CrunConfig) validate() ! {
|
|||||||
if config.spec.oci_version == '' {
|
if config.spec.oci_version == '' {
|
||||||
return error('ociVersion cannot be empty')
|
return error('ociVersion cannot be empty')
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.spec.process.args.len == 0 {
|
if config.spec.process.args.len == 0 {
|
||||||
return error('process.args cannot be empty')
|
return error('process.args cannot be empty')
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.spec.root.path == '' {
|
if config.spec.root.path == '' {
|
||||||
return error('root.path cannot be empty')
|
return error('root.path cannot be empty')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate that required capabilities are present
|
// Validate that required capabilities are present
|
||||||
required_caps := ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
|
required_caps := ['CAP_AUDIT_WRITE', 'CAP_KILL', 'CAP_NET_BIND_SERVICE']
|
||||||
for cap in required_caps {
|
for cap in required_caps {
|
||||||
@@ -37,4 +37,4 @@ pub fn (config CrunConfig) validate() ! {
|
|||||||
return error('missing required capability: ${cap}')
|
return error('missing required capability: ${cap}')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user