refactor: Update Fs and DBFs structures for new fields
- Add `group_id` to Fs and DBFs structures - Update `FsFile` to include `directories` and `accessed_at` fields - Update `FsBlobArg` with `mime_type`, `encoding`, and `created_at` fields - Add usage tracking methods `increase_usage` and `decrease_usage` to DBFs
This commit is contained in:
@@ -18,7 +18,7 @@ fn main() {
|
|||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the filesystem
|
// Save the filesystem
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
||||||
|
|
||||||
// Create root directory
|
// Create root directory
|
||||||
@@ -28,11 +28,11 @@ fn main() {
|
|||||||
parent_id: 0
|
parent_id: 0
|
||||||
description: 'Root directory'
|
description: 'Root directory'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
|
|
||||||
// Update the filesystem with the root directory ID
|
// Update the filesystem with the root directory ID
|
||||||
my_fs.root_dir_id = root_dir.id
|
my_fs.root_dir_id = root_dir.id
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
|
|
||||||
// Create some sample directory structure
|
// Create some sample directory structure
|
||||||
println('\nCreating sample directory structure...')
|
println('\nCreating sample directory structure...')
|
||||||
@@ -49,15 +49,15 @@ fn main() {
|
|||||||
// Create blobs for file content
|
// Create blobs for file content
|
||||||
v_code := 'fn main() {\n println("Hello from V!")\n}\n'.bytes()
|
v_code := 'fn main() {\n println("Hello from V!")\n}\n'.bytes()
|
||||||
mut v_blob := fs_factory.fs_blob.new(data: v_code)!
|
mut v_blob := fs_factory.fs_blob.new(data: v_code)!
|
||||||
fs_factory.fs_blob.set(mut v_blob)!
|
v_blob = fs_factory.fs_blob.set(v_blob)!
|
||||||
|
|
||||||
readme_content := '# My Project\n\nThis is a sample project.\n\n## Features\n\n- Feature 1\n- Feature 2\n'.bytes()
|
readme_content := '# My Project\n\nThis is a sample project.\n\n## Features\n\n- Feature 1\n- Feature 2\n'.bytes()
|
||||||
mut readme_blob := fs_factory.fs_blob.new(data: readme_content)!
|
mut readme_blob := fs_factory.fs_blob.new(data: readme_content)!
|
||||||
fs_factory.fs_blob.set(mut readme_blob)!
|
readme_blob = fs_factory.fs_blob.set(readme_blob)!
|
||||||
|
|
||||||
test_content := 'fn test_main() {\n assert 1 == 1\n}\n'.bytes()
|
test_content := 'fn test_main() {\n assert 1 == 1\n}\n'.bytes()
|
||||||
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
|
mut test_blob := fs_factory.fs_blob.new(data: test_content)!
|
||||||
fs_factory.fs_blob.set(mut test_blob)!
|
test_blob = fs_factory.fs_blob.set(test_blob)!
|
||||||
|
|
||||||
// Create files
|
// Create files
|
||||||
mut main_file := fs_factory.fs_file.new(
|
mut main_file := fs_factory.fs_file.new(
|
||||||
@@ -66,7 +66,7 @@ fn main() {
|
|||||||
blobs: [v_blob.id]
|
blobs: [v_blob.id]
|
||||||
mime_type: .txt
|
mime_type: .txt
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut main_file)!
|
main_file = fs_factory.fs_file.set(main_file)!
|
||||||
fs_factory.fs_file.add_to_directory(main_file.id, src_dir_id)!
|
fs_factory.fs_file.add_to_directory(main_file.id, src_dir_id)!
|
||||||
|
|
||||||
mut readme_file := fs_factory.fs_file.new(
|
mut readme_file := fs_factory.fs_file.new(
|
||||||
@@ -75,7 +75,7 @@ fn main() {
|
|||||||
blobs: [readme_blob.id]
|
blobs: [readme_blob.id]
|
||||||
mime_type: .md
|
mime_type: .md
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut readme_file)!
|
readme_file = fs_factory.fs_file.set(readme_file)!
|
||||||
fs_factory.fs_file.add_to_directory(readme_file.id, root_dir.id)!
|
fs_factory.fs_file.add_to_directory(readme_file.id, root_dir.id)!
|
||||||
|
|
||||||
mut test_file := fs_factory.fs_file.new(
|
mut test_file := fs_factory.fs_file.new(
|
||||||
@@ -84,7 +84,7 @@ fn main() {
|
|||||||
blobs: [test_blob.id]
|
blobs: [test_blob.id]
|
||||||
mime_type: .txt
|
mime_type: .txt
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut test_file)!
|
test_file = fs_factory.fs_file.set(test_file)!
|
||||||
fs_factory.fs_file.add_to_directory(test_file.id, test_dir_id)!
|
fs_factory.fs_file.add_to_directory(test_file.id, test_dir_id)!
|
||||||
|
|
||||||
// Create a symbolic link
|
// Create a symbolic link
|
||||||
@@ -96,7 +96,7 @@ fn main() {
|
|||||||
target_type: .file
|
target_type: .file
|
||||||
description: 'Link to main.v'
|
description: 'Link to main.v'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_symlink.set(mut main_symlink)!
|
main_symlink = fs_factory.fs_symlink.set(main_symlink)!
|
||||||
|
|
||||||
println('Sample filesystem structure created!')
|
println('Sample filesystem structure created!')
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ fn main() {
|
|||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the filesystem
|
// Save the filesystem
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
||||||
|
|
||||||
// Create root directory
|
// Create root directory
|
||||||
@@ -35,12 +35,12 @@ fn main() {
|
|||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the root directory
|
// Save the root directory
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
println('Created root directory with ID: ${root_dir.id}')
|
println('Created root directory with ID: ${root_dir.id}')
|
||||||
|
|
||||||
// Update the filesystem with the root directory ID
|
// Update the filesystem with the root directory ID
|
||||||
my_fs.root_dir_id = root_dir.id
|
my_fs.root_dir_id = root_dir.id
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
|
|
||||||
// Create a directory hierarchy
|
// Create a directory hierarchy
|
||||||
println('\nCreating directory hierarchy...')
|
println('\nCreating directory hierarchy...')
|
||||||
@@ -52,7 +52,7 @@ fn main() {
|
|||||||
parent_id: root_dir.id
|
parent_id: root_dir.id
|
||||||
description: 'Source code'
|
description: 'Source code'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut src_dir)!
|
src_dir = fs_factory.fs_dir.set(src_dir)!
|
||||||
|
|
||||||
mut docs_dir := fs_factory.fs_dir.new(
|
mut docs_dir := fs_factory.fs_dir.new(
|
||||||
name: 'docs'
|
name: 'docs'
|
||||||
@@ -60,7 +60,7 @@ fn main() {
|
|||||||
parent_id: root_dir.id
|
parent_id: root_dir.id
|
||||||
description: 'Documentation'
|
description: 'Documentation'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut docs_dir)!
|
docs_dir = fs_factory.fs_dir.set(docs_dir)!
|
||||||
|
|
||||||
mut assets_dir := fs_factory.fs_dir.new(
|
mut assets_dir := fs_factory.fs_dir.new(
|
||||||
name: 'assets'
|
name: 'assets'
|
||||||
@@ -68,7 +68,7 @@ fn main() {
|
|||||||
parent_id: root_dir.id
|
parent_id: root_dir.id
|
||||||
description: 'Project assets'
|
description: 'Project assets'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut assets_dir)!
|
assets_dir = fs_factory.fs_dir.set(assets_dir)!
|
||||||
|
|
||||||
// Subdirectories
|
// Subdirectories
|
||||||
mut images_dir := fs_factory.fs_dir.new(
|
mut images_dir := fs_factory.fs_dir.new(
|
||||||
@@ -77,7 +77,7 @@ fn main() {
|
|||||||
parent_id: assets_dir.id
|
parent_id: assets_dir.id
|
||||||
description: 'Image assets'
|
description: 'Image assets'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut images_dir)!
|
images_dir = fs_factory.fs_dir.set(images_dir)!
|
||||||
|
|
||||||
mut api_docs_dir := fs_factory.fs_dir.new(
|
mut api_docs_dir := fs_factory.fs_dir.new(
|
||||||
name: 'api'
|
name: 'api'
|
||||||
@@ -85,19 +85,19 @@ fn main() {
|
|||||||
parent_id: docs_dir.id
|
parent_id: docs_dir.id
|
||||||
description: 'API documentation'
|
description: 'API documentation'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut api_docs_dir)!
|
api_docs_dir = fs_factory.fs_dir.set(api_docs_dir)!
|
||||||
|
|
||||||
// Add directories to their parents
|
// Add directories to their parents
|
||||||
root_dir.directories << src_dir.id
|
root_dir.directories << src_dir.id
|
||||||
root_dir.directories << docs_dir.id
|
root_dir.directories << docs_dir.id
|
||||||
root_dir.directories << assets_dir.id
|
root_dir.directories << assets_dir.id
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
|
|
||||||
assets_dir.directories << images_dir.id
|
assets_dir.directories << images_dir.id
|
||||||
fs_factory.fs_dir.set(mut assets_dir)!
|
assets_dir = fs_factory.fs_dir.set(assets_dir)!
|
||||||
|
|
||||||
docs_dir.directories << api_docs_dir.id
|
docs_dir.directories << api_docs_dir.id
|
||||||
fs_factory.fs_dir.set(mut docs_dir)!
|
docs_dir = fs_factory.fs_dir.set(docs_dir)!
|
||||||
|
|
||||||
println('Directory hierarchy created successfully')
|
println('Directory hierarchy created successfully')
|
||||||
|
|
||||||
@@ -107,7 +107,7 @@ fn main() {
|
|||||||
// Text file for source code
|
// Text file for source code
|
||||||
code_content := 'fn main() {\n println("Hello, HeroFS!")\n}\n'.bytes()
|
code_content := 'fn main() {\n println("Hello, HeroFS!")\n}\n'.bytes()
|
||||||
mut code_blob := fs_factory.fs_blob.new(data: code_content)!
|
mut code_blob := fs_factory.fs_blob.new(data: code_content)!
|
||||||
fs_factory.fs_blob.set(mut code_blob)!
|
code_blob = fs_factory.fs_blob.set(code_blob)!
|
||||||
|
|
||||||
mut code_file := fs_factory.fs_file.new(
|
mut code_file := fs_factory.fs_file.new(
|
||||||
name: 'main.v'
|
name: 'main.v'
|
||||||
@@ -119,13 +119,13 @@ fn main() {
|
|||||||
'version': '0.3.3'
|
'version': '0.3.3'
|
||||||
}
|
}
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut code_file)!
|
code_file = fs_factory.fs_file.set(code_file)!
|
||||||
fs_factory.fs_file.add_to_directory(code_file.id, src_dir.id)!
|
fs_factory.fs_file.add_to_directory(code_file.id, src_dir.id)!
|
||||||
|
|
||||||
// Markdown documentation file
|
// Markdown documentation file
|
||||||
docs_content := '# API Documentation\n\n## Endpoints\n\n- GET /api/v1/users\n- POST /api/v1/users\n'.bytes()
|
docs_content := '# API Documentation\n\n## Endpoints\n\n- GET /api/v1/users\n- POST /api/v1/users\n'.bytes()
|
||||||
mut docs_blob := fs_factory.fs_blob.new(data: docs_content)!
|
mut docs_blob := fs_factory.fs_blob.new(data: docs_content)!
|
||||||
fs_factory.fs_blob.set(mut docs_blob)!
|
docs_blob = fs_factory.fs_blob.set(docs_blob)!
|
||||||
|
|
||||||
mut docs_file := fs_factory.fs_file.new(
|
mut docs_file := fs_factory.fs_file.new(
|
||||||
name: 'api.md'
|
name: 'api.md'
|
||||||
@@ -133,14 +133,14 @@ fn main() {
|
|||||||
blobs: [docs_blob.id]
|
blobs: [docs_blob.id]
|
||||||
mime_type: .md
|
mime_type: .md
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut docs_file)!
|
docs_file = fs_factory.fs_file.set(docs_file)!
|
||||||
fs_factory.fs_file.add_to_directory(docs_file.id, api_docs_dir.id)!
|
fs_factory.fs_file.add_to_directory(docs_file.id, api_docs_dir.id)!
|
||||||
|
|
||||||
// Create a binary file (sample image)
|
// Create a binary file (sample image)
|
||||||
// For this example, we'll just create random bytes
|
// For this example, we'll just create random bytes
|
||||||
mut image_data := []u8{len: 1024, init: u8(index % 256)}
|
mut image_data := []u8{len: 1024, init: u8(index % 256)}
|
||||||
mut image_blob := fs_factory.fs_blob.new(data: image_data)!
|
mut image_blob := fs_factory.fs_blob.new(data: image_data)!
|
||||||
fs_factory.fs_blob.set(mut image_blob)!
|
image_blob = fs_factory.fs_blob.set(image_blob)!
|
||||||
|
|
||||||
mut image_file := fs_factory.fs_file.new(
|
mut image_file := fs_factory.fs_file.new(
|
||||||
name: 'logo.png'
|
name: 'logo.png'
|
||||||
@@ -153,7 +153,7 @@ fn main() {
|
|||||||
'format': 'PNG'
|
'format': 'PNG'
|
||||||
}
|
}
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_file.set(mut image_file)!
|
image_file = fs_factory.fs_file.set(image_file)!
|
||||||
fs_factory.fs_file.add_to_directory(image_file.id, images_dir.id)!
|
fs_factory.fs_file.add_to_directory(image_file.id, images_dir.id)!
|
||||||
|
|
||||||
println('Files created successfully')
|
println('Files created successfully')
|
||||||
@@ -170,7 +170,7 @@ fn main() {
|
|||||||
target_type: .directory
|
target_type: .directory
|
||||||
description: 'Shortcut to API documentation'
|
description: 'Shortcut to API documentation'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_symlink.set(mut api_symlink)!
|
api_symlink = fs_factory.fs_symlink.set(api_symlink)!
|
||||||
|
|
||||||
// Symlink to the logo from the docs directory
|
// Symlink to the logo from the docs directory
|
||||||
mut logo_symlink := fs_factory.fs_symlink.new(
|
mut logo_symlink := fs_factory.fs_symlink.new(
|
||||||
@@ -181,14 +181,14 @@ fn main() {
|
|||||||
target_type: .file
|
target_type: .file
|
||||||
description: 'Shortcut to project logo'
|
description: 'Shortcut to project logo'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_symlink.set(mut logo_symlink)!
|
logo_symlink = fs_factory.fs_symlink.set(logo_symlink)!
|
||||||
|
|
||||||
// Add symlinks to their parent directories
|
// Add symlinks to their parent directories
|
||||||
root_dir.symlinks << api_symlink.id
|
root_dir.symlinks << api_symlink.id
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
|
|
||||||
docs_dir.symlinks << logo_symlink.id
|
docs_dir.symlinks << logo_symlink.id
|
||||||
fs_factory.fs_dir.set(mut docs_dir)!
|
docs_dir = fs_factory.fs_dir.set(docs_dir)!
|
||||||
|
|
||||||
println('Symlinks created successfully')
|
println('Symlinks created successfully')
|
||||||
|
|
||||||
@@ -252,7 +252,7 @@ fn main() {
|
|||||||
println('Appending content to API docs...')
|
println('Appending content to API docs...')
|
||||||
additional_content := '\n## Authentication\n\nUse Bearer token for authentication.\n'.bytes()
|
additional_content := '\n## Authentication\n\nUse Bearer token for authentication.\n'.bytes()
|
||||||
mut additional_blob := fs_factory.fs_blob.new(data: additional_content)!
|
mut additional_blob := fs_factory.fs_blob.new(data: additional_content)!
|
||||||
fs_factory.fs_blob.set(mut additional_blob)!
|
additional_blob = fs_factory.fs_blob.set(additional_blob)!
|
||||||
fs_factory.fs_file.append_blob(docs_file.id, additional_blob.id)!
|
fs_factory.fs_file.append_blob(docs_file.id, additional_blob.id)!
|
||||||
|
|
||||||
// Demonstrate directory operations
|
// Demonstrate directory operations
|
||||||
@@ -265,11 +265,11 @@ fn main() {
|
|||||||
parent_id: root_dir.id
|
parent_id: root_dir.id
|
||||||
description: 'Temporary directory'
|
description: 'Temporary directory'
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut temp_dir)!
|
temp_dir = fs_factory.fs_dir.set(temp_dir)!
|
||||||
|
|
||||||
// Add to parent
|
// Add to parent
|
||||||
root_dir.directories << temp_dir.id
|
root_dir.directories << temp_dir.id
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
|
|
||||||
// Move temp directory under docs
|
// Move temp directory under docs
|
||||||
println('Moving temp directory under docs...')
|
println('Moving temp directory under docs...')
|
||||||
|
|||||||
20
examples/hero/herofs/import_export_example.vsh
Normal file → Executable file
20
examples/hero/herofs/import_export_example.vsh
Normal file → Executable file
@@ -19,7 +19,7 @@ fn main() {
|
|||||||
)!
|
)!
|
||||||
|
|
||||||
// Save the filesystem
|
// Save the filesystem
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
println('Created filesystem: ${my_fs.name} with ID: ${my_fs.id}')
|
||||||
|
|
||||||
// Create root directory
|
// Create root directory
|
||||||
@@ -28,9 +28,9 @@ fn main() {
|
|||||||
fs_id: my_fs.id
|
fs_id: my_fs.id
|
||||||
parent_id: 0 // Root has no parent
|
parent_id: 0 // Root has no parent
|
||||||
)!
|
)!
|
||||||
fs_factory.fs_dir.set(mut root_dir)!
|
root_dir = fs_factory.fs_dir.set(root_dir)!
|
||||||
my_fs.root_dir_id = root_dir.id
|
my_fs.root_dir_id = root_dir.id
|
||||||
fs_factory.fs.set(mut my_fs)!
|
my_fs = fs_factory.fs.set(my_fs)!
|
||||||
|
|
||||||
// Get filesystem instance for operations
|
// Get filesystem instance for operations
|
||||||
mut fs := fs_factory.fs.get(my_fs.id)!
|
mut fs := fs_factory.fs.get(my_fs.id)!
|
||||||
@@ -112,16 +112,14 @@ fn main() {
|
|||||||
|
|
||||||
// Export single file
|
// Export single file
|
||||||
println('Exporting single file to: ${export_dir}/exported_hello.txt')
|
println('Exporting single file to: ${export_dir}/exported_hello.txt')
|
||||||
fs.export('/imported_hello.txt', os.join_path(export_dir, 'exported_hello.txt'),
|
fs.export('/imported_hello.txt', os.join_path(export_dir, 'exported_hello.txt'), herofs.ExportOptions{
|
||||||
herofs.ExportOptions{
|
|
||||||
overwrite: true
|
overwrite: true
|
||||||
preserve_meta: true
|
preserve_meta: true
|
||||||
})!
|
})!
|
||||||
|
|
||||||
// Export entire directory
|
// Export entire directory
|
||||||
println('Exporting directory to: ${export_dir}/exported_files')
|
println('Exporting directory to: ${export_dir}/exported_files')
|
||||||
fs.export('/imported_files', os.join_path(export_dir, 'exported_files'),
|
fs.export('/imported_files', os.join_path(export_dir, 'exported_files'), herofs.ExportOptions{
|
||||||
herofs.ExportOptions{
|
|
||||||
recursive: true
|
recursive: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
preserve_meta: true
|
preserve_meta: true
|
||||||
@@ -173,9 +171,7 @@ fn main() {
|
|||||||
println('Testing import without overwrite (should fail)...')
|
println('Testing import without overwrite (should fail)...')
|
||||||
fs.import(test_overwrite_file, '/overwrite_test.txt', herofs.ImportOptions{
|
fs.import(test_overwrite_file, '/overwrite_test.txt', herofs.ImportOptions{
|
||||||
overwrite: false
|
overwrite: false
|
||||||
}) or {
|
}) or { println('✓ Import correctly failed when overwrite=false: ${err}') }
|
||||||
println('✓ Import correctly failed when overwrite=false: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update file content and import with overwrite
|
// Update file content and import with overwrite
|
||||||
os.write_file(test_overwrite_file, 'Updated content')!
|
os.write_file(test_overwrite_file, 'Updated content')!
|
||||||
@@ -196,9 +192,7 @@ fn main() {
|
|||||||
println('Testing export without overwrite (should fail)...')
|
println('Testing export without overwrite (should fail)...')
|
||||||
fs.export('/overwrite_test.txt', export_test_file, herofs.ExportOptions{
|
fs.export('/overwrite_test.txt', export_test_file, herofs.ExportOptions{
|
||||||
overwrite: false
|
overwrite: false
|
||||||
}) or {
|
}) or { println('✓ Export correctly failed when overwrite=false: ${err}') }
|
||||||
println('✓ Export correctly failed when overwrite=false: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export with overwrite
|
// Export with overwrite
|
||||||
fs.export('/overwrite_test.txt', export_test_file, herofs.ExportOptions{
|
fs.export('/overwrite_test.txt', export_test_file, herofs.ExportOptions{
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
module herofs
|
module herofs
|
||||||
|
|
||||||
import freeflowuniverse.herolib.data.encoder
|
import freeflowuniverse.herolib.data.encoder
|
||||||
|
import freeflowuniverse.herolib.data.ourtime
|
||||||
import freeflowuniverse.herolib.hero.db
|
import freeflowuniverse.herolib.hero.db
|
||||||
|
|
||||||
// Fs represents a filesystem, is the top level container for files and directories and symlinks, blobs are used over filesystems
|
// Fs represents a filesystem, is the top level container for files and directories and symlinks, blobs are used over filesystems
|
||||||
@@ -9,6 +10,7 @@ pub struct Fs {
|
|||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
|
group_id u32 // Associated group for permissions
|
||||||
root_dir_id u32 // ID of root directory
|
root_dir_id u32 // ID of root directory
|
||||||
quota_bytes u64 // Storage quota in bytes
|
quota_bytes u64 // Storage quota in bytes
|
||||||
used_bytes u64 // Current usage in bytes
|
used_bytes u64 // Current usage in bytes
|
||||||
@@ -29,6 +31,7 @@ pub fn (self Fs) type_name() string {
|
|||||||
|
|
||||||
pub fn (self Fs) dump(mut e encoder.Encoder) ! {
|
pub fn (self Fs) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_string(self.name)
|
e.add_string(self.name)
|
||||||
|
e.add_u32(self.group_id)
|
||||||
e.add_u32(self.root_dir_id)
|
e.add_u32(self.root_dir_id)
|
||||||
e.add_u64(self.quota_bytes)
|
e.add_u64(self.quota_bytes)
|
||||||
e.add_u64(self.used_bytes)
|
e.add_u64(self.used_bytes)
|
||||||
@@ -36,6 +39,7 @@ pub fn (self Fs) dump(mut e encoder.Encoder) ! {
|
|||||||
|
|
||||||
fn (mut self DBFs) load(mut o Fs, mut e encoder.Decoder) ! {
|
fn (mut self DBFs) load(mut o Fs, mut e encoder.Decoder) ! {
|
||||||
o.name = e.get_string()!
|
o.name = e.get_string()!
|
||||||
|
o.group_id = e.get_u32()!
|
||||||
o.root_dir_id = e.get_u32()!
|
o.root_dir_id = e.get_u32()!
|
||||||
o.quota_bytes = e.get_u64()!
|
o.quota_bytes = e.get_u64()!
|
||||||
o.used_bytes = e.get_u64()!
|
o.used_bytes = e.get_u64()!
|
||||||
@@ -46,11 +50,12 @@ pub struct FsArg {
|
|||||||
pub mut:
|
pub mut:
|
||||||
name string @[required]
|
name string @[required]
|
||||||
description string
|
description string
|
||||||
|
group_id u32
|
||||||
root_dir_id u32
|
root_dir_id u32
|
||||||
quota_bytes u64
|
quota_bytes u64
|
||||||
used_bytes u64
|
used_bytes u64
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
messages []db.MessageArg
|
||||||
}
|
}
|
||||||
|
|
||||||
// get new filesystem, not from the DB
|
// get new filesystem, not from the DB
|
||||||
@@ -63,6 +68,9 @@ pub fn (mut self DBFs) new(args FsArg) !Fs {
|
|||||||
if args.description != '' {
|
if args.description != '' {
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
}
|
}
|
||||||
|
if args.group_id != 0 {
|
||||||
|
o.group_id = args.group_id
|
||||||
|
}
|
||||||
if args.root_dir_id != 0 {
|
if args.root_dir_id != 0 {
|
||||||
o.root_dir_id = args.root_dir_id
|
o.root_dir_id = args.root_dir_id
|
||||||
}
|
}
|
||||||
@@ -77,8 +85,8 @@ pub fn (mut self DBFs) new(args FsArg) !Fs {
|
|||||||
if args.tags.len > 0 {
|
if args.tags.len > 0 {
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
}
|
}
|
||||||
if args.comments.len > 0 {
|
if args.messages.len > 0 {
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.messages = self.db.messages_get(args.messages)!
|
||||||
}
|
}
|
||||||
|
|
||||||
return o
|
return o
|
||||||
@@ -124,8 +132,8 @@ pub fn (mut self DBFs) new_get_set(args_ FsArg) !Fs {
|
|||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
changes = true
|
changes = true
|
||||||
}
|
}
|
||||||
if args.comments.len > 0 {
|
if args.messages.len > 0 {
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.messages = self.db.messages_get(args.messages)!
|
||||||
changes = true
|
changes = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,9 +210,25 @@ pub fn (mut self DBFs) get_by_name(name string) !Fs {
|
|||||||
return self.get(id_str.u32())!
|
return self.get(id_str.u32())!
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Filesystem usage tracking methods are not implemented yet
|
// Increase used bytes counter
|
||||||
// These would be used for quota enforcement and storage monitoring
|
pub fn (mut self DBFs) increase_usage(id u32, bytes u64) ! {
|
||||||
// Future implementation should use separate Redis structures for performance
|
mut fs := self.get(id)!
|
||||||
|
fs.used_bytes += bytes
|
||||||
|
fs.updated_at = ourtime.now().unix()
|
||||||
|
self.set(fs)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decrease used bytes counter
|
||||||
|
pub fn (mut self DBFs) decrease_usage(id u32, bytes u64) ! {
|
||||||
|
mut fs := self.get(id)!
|
||||||
|
if fs.used_bytes >= bytes {
|
||||||
|
fs.used_bytes -= bytes
|
||||||
|
} else {
|
||||||
|
fs.used_bytes = 0
|
||||||
|
}
|
||||||
|
fs.updated_at = ourtime.now().unix()
|
||||||
|
self.set(fs)!
|
||||||
|
}
|
||||||
|
|
||||||
// Check if quota is exceeded
|
// Check if quota is exceeded
|
||||||
pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
|
pub fn (mut self DBFs) check_quota(id u32, additional_bytes u64) !bool {
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ pub mut:
|
|||||||
hash string // blake192 hash of content
|
hash string // blake192 hash of content
|
||||||
data []u8 // Binary data (max 1MB)
|
data []u8 // Binary data (max 1MB)
|
||||||
size_bytes int // Size in bytes
|
size_bytes int // Size in bytes
|
||||||
|
created_at i64
|
||||||
|
mime_type string // MIME type
|
||||||
|
encoding string // Encoding type
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DBFsBlob {
|
pub struct DBFsBlob {
|
||||||
@@ -29,18 +32,27 @@ pub fn (self FsBlob) dump(mut e encoder.Encoder) ! {
|
|||||||
e.add_string(self.hash)
|
e.add_string(self.hash)
|
||||||
e.add_list_u8(self.data)
|
e.add_list_u8(self.data)
|
||||||
e.add_int(self.size_bytes)
|
e.add_int(self.size_bytes)
|
||||||
|
e.add_i64(self.created_at)
|
||||||
|
e.add_string(self.mime_type)
|
||||||
|
e.add_string(self.encoding)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn (mut self DBFsBlob) load(mut o FsBlob, mut e encoder.Decoder) ! {
|
fn (mut self DBFsBlob) load(mut o FsBlob, mut e encoder.Decoder) ! {
|
||||||
o.hash = e.get_string()!
|
o.hash = e.get_string()!
|
||||||
o.data = e.get_list_u8()!
|
o.data = e.get_list_u8()!
|
||||||
o.size_bytes = e.get_int()!
|
o.size_bytes = e.get_int()!
|
||||||
|
o.created_at = e.get_i64()!
|
||||||
|
o.mime_type = e.get_string()!
|
||||||
|
o.encoding = e.get_string()!
|
||||||
}
|
}
|
||||||
|
|
||||||
@[params]
|
@[params]
|
||||||
pub struct FsBlobArg {
|
pub struct FsBlobArg {
|
||||||
pub mut:
|
pub mut:
|
||||||
data []u8 @[required]
|
data []u8 @[required]
|
||||||
|
mime_type string
|
||||||
|
encoding string
|
||||||
|
created_at i64
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut blob FsBlob) calculate_hash() {
|
pub fn (mut blob FsBlob) calculate_hash() {
|
||||||
@@ -57,6 +69,9 @@ pub fn (mut self DBFsBlob) new(args FsBlobArg) !FsBlob {
|
|||||||
mut o := FsBlob{
|
mut o := FsBlob{
|
||||||
data: args.data
|
data: args.data
|
||||||
size_bytes: args.data.len
|
size_bytes: args.data.len
|
||||||
|
created_at: if args.created_at != 0 { args.created_at } else { ourtime.now().unix() }
|
||||||
|
mime_type: args.mime_type
|
||||||
|
encoding: args.encoding
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate hash
|
// Calculate hash
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ fn test_basic() ! {
|
|||||||
|
|
||||||
println(root_dir)
|
println(root_dir)
|
||||||
|
|
||||||
panic('sd')
|
|
||||||
// Create test blob for membership
|
// Create test blob for membership
|
||||||
test_data := 'This is test content for blob membership'.bytes()
|
test_data := 'This is test content for blob membership'.bytes()
|
||||||
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
|
mut test_blob := fs_factory.fs_blob.new(data: test_data)!
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ pub mut:
|
|||||||
fs_id u32 @[required]
|
fs_id u32 @[required]
|
||||||
parent_id u32
|
parent_id u32
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
messages []db.MessageArg
|
||||||
directories []u32
|
directories []u32
|
||||||
files []u32
|
files []u32
|
||||||
symlinks []u32
|
symlinks []u32
|
||||||
@@ -103,7 +103,7 @@ pub fn (mut self DBFsDir) new(args FsDirArg) !FsDir {
|
|||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.messages = self.db.messages_get(args.messages)!
|
||||||
o.created_at = ourtime.now().unix()
|
o.created_at = ourtime.now().unix()
|
||||||
o.updated_at = o.created_at
|
o.updated_at = o.created_at
|
||||||
|
|
||||||
|
|||||||
@@ -10,10 +10,12 @@ pub struct FsFile {
|
|||||||
db.Base
|
db.Base
|
||||||
pub mut:
|
pub mut:
|
||||||
fs_id u32 // Associated filesystem
|
fs_id u32 // Associated filesystem
|
||||||
|
directories []u32 // Directory IDs where this file exists
|
||||||
blobs []u32 // IDs of file content blobs
|
blobs []u32 // IDs of file content blobs
|
||||||
size_bytes u64
|
size_bytes u64
|
||||||
mime_type MimeType
|
mime_type MimeType
|
||||||
checksum string // e.g., checksum of the file, needs to be calculated is blake 192
|
checksum string // e.g., checksum of the file, needs to be calculated is blake 192
|
||||||
|
accessed_at i64
|
||||||
metadata map[string]string // Custom metadata
|
metadata map[string]string // Custom metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,6 +31,13 @@ pub fn (self FsFile) type_name() string {
|
|||||||
|
|
||||||
pub fn (self FsFile) dump(mut e encoder.Encoder) ! {
|
pub fn (self FsFile) dump(mut e encoder.Encoder) ! {
|
||||||
e.add_u32(self.fs_id)
|
e.add_u32(self.fs_id)
|
||||||
|
|
||||||
|
// Handle directories
|
||||||
|
e.add_u16(u16(self.directories.len))
|
||||||
|
for dir_id in self.directories {
|
||||||
|
e.add_u32(dir_id)
|
||||||
|
}
|
||||||
|
|
||||||
// Handle blobs
|
// Handle blobs
|
||||||
e.add_u16(u16(self.blobs.len))
|
e.add_u16(u16(self.blobs.len))
|
||||||
for blob_id in self.blobs {
|
for blob_id in self.blobs {
|
||||||
@@ -38,6 +47,7 @@ pub fn (self FsFile) dump(mut e encoder.Encoder) ! {
|
|||||||
e.add_u64(self.size_bytes)
|
e.add_u64(self.size_bytes)
|
||||||
e.add_u8(u8(self.mime_type)) // ADD: Serialize mime_type as u8
|
e.add_u8(u8(self.mime_type)) // ADD: Serialize mime_type as u8
|
||||||
e.add_string(self.checksum)
|
e.add_string(self.checksum)
|
||||||
|
e.add_i64(self.accessed_at)
|
||||||
|
|
||||||
// Handle metadata map
|
// Handle metadata map
|
||||||
e.add_u16(u16(self.metadata.len))
|
e.add_u16(u16(self.metadata.len))
|
||||||
@@ -50,6 +60,13 @@ pub fn (self FsFile) dump(mut e encoder.Encoder) ! {
|
|||||||
fn (mut self DBFsFile) load(mut o FsFile, mut e encoder.Decoder) ! {
|
fn (mut self DBFsFile) load(mut o FsFile, mut e encoder.Decoder) ! {
|
||||||
o.fs_id = e.get_u32()!
|
o.fs_id = e.get_u32()!
|
||||||
|
|
||||||
|
// Load directories
|
||||||
|
directories_count := e.get_u16()!
|
||||||
|
o.directories = []u32{cap: int(directories_count)}
|
||||||
|
for _ in 0 .. directories_count {
|
||||||
|
o.directories << e.get_u32()!
|
||||||
|
}
|
||||||
|
|
||||||
// Load blobs
|
// Load blobs
|
||||||
blobs_count := e.get_u16()!
|
blobs_count := e.get_u16()!
|
||||||
o.blobs = []u32{cap: int(blobs_count)}
|
o.blobs = []u32{cap: int(blobs_count)}
|
||||||
@@ -60,6 +77,7 @@ fn (mut self DBFsFile) load(mut o FsFile, mut e encoder.Decoder) ! {
|
|||||||
o.size_bytes = e.get_u64()!
|
o.size_bytes = e.get_u64()!
|
||||||
o.mime_type = unsafe { MimeType(e.get_u8()!) } // ADD: Deserialize mime_type
|
o.mime_type = unsafe { MimeType(e.get_u8()!) } // ADD: Deserialize mime_type
|
||||||
o.checksum = e.get_string()!
|
o.checksum = e.get_string()!
|
||||||
|
o.accessed_at = e.get_i64()!
|
||||||
|
|
||||||
// Load metadata map
|
// Load metadata map
|
||||||
metadata_count := e.get_u16()!
|
metadata_count := e.get_u16()!
|
||||||
@@ -77,13 +95,15 @@ pub mut:
|
|||||||
name string @[required]
|
name string @[required]
|
||||||
description string
|
description string
|
||||||
fs_id u32 @[required]
|
fs_id u32 @[required]
|
||||||
|
directories []u32
|
||||||
blobs []u32
|
blobs []u32
|
||||||
size_bytes u64
|
size_bytes u64
|
||||||
mime_type MimeType // Changed from string to MimeType enum
|
mime_type MimeType // Changed from string to MimeType enum
|
||||||
checksum string
|
checksum string
|
||||||
|
accessed_at i64
|
||||||
metadata map[string]string
|
metadata map[string]string
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
messages []db.MessageArg
|
||||||
}
|
}
|
||||||
|
|
||||||
// get new file, not from the DB
|
// get new file, not from the DB
|
||||||
@@ -114,17 +134,19 @@ pub fn (mut self DBFsFile) new(args FsFileArg) !FsFile {
|
|||||||
mut o := FsFile{
|
mut o := FsFile{
|
||||||
name: args.name
|
name: args.name
|
||||||
fs_id: args.fs_id
|
fs_id: args.fs_id
|
||||||
|
directories: args.directories
|
||||||
blobs: args.blobs
|
blobs: args.blobs
|
||||||
size_bytes: size
|
size_bytes: size
|
||||||
mime_type: args.mime_type // ADD: Set mime_type
|
mime_type: args.mime_type // ADD: Set mime_type
|
||||||
checksum: args.checksum
|
checksum: args.checksum
|
||||||
|
accessed_at: if args.accessed_at != 0 { args.accessed_at } else { ourtime.now().unix() }
|
||||||
metadata: args.metadata
|
metadata: args.metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.messages = self.db.messages_get(args.messages)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ pub mut:
|
|||||||
target_id u32 @[required]
|
target_id u32 @[required]
|
||||||
target_type SymlinkTargetType @[required]
|
target_type SymlinkTargetType @[required]
|
||||||
tags []string
|
tags []string
|
||||||
comments []db.CommentArg
|
messages []db.MessageArg
|
||||||
}
|
}
|
||||||
|
|
||||||
// get new symlink, not from the DB
|
// get new symlink, not from the DB
|
||||||
@@ -73,7 +73,7 @@ pub fn (mut self DBFsSymlink) new(args FsSymlinkArg) !FsSymlink {
|
|||||||
// Set base fields
|
// Set base fields
|
||||||
o.description = args.description
|
o.description = args.description
|
||||||
o.tags = self.db.tags_get(args.tags)!
|
o.tags = self.db.tags_get(args.tags)!
|
||||||
o.comments = self.db.comments_get(args.comments)!
|
o.messages = self.db.messages_get(args.messages)!
|
||||||
o.updated_at = ourtime.now().unix()
|
o.updated_at = ourtime.now().unix()
|
||||||
|
|
||||||
return o
|
return o
|
||||||
|
|||||||
@@ -92,21 +92,16 @@ pub fn (mut self Fs) find(start_path string, opts FindOptions) ![]FindResult {
|
|||||||
// - Symlinks: Symbolic links in the current directory (handled according to opts.follow_symlinks)
|
// - Symlinks: Symbolic links in the current directory (handled according to opts.follow_symlinks)
|
||||||
// - Directories: Subdirectories of the current directory (recursed into according to opts.recursive)
|
// - Directories: Subdirectories of the current directory (recursed into according to opts.recursive)
|
||||||
fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOptions, mut results []FindResult, current_depth int) ! {
|
fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOptions, mut results []FindResult, current_depth int) ! {
|
||||||
println('DEBUG: find_recursive called with dir_id=${dir_id}, current_path="${current_path}", current_depth=${current_depth}')
|
|
||||||
|
|
||||||
// Check depth limit
|
// Check depth limit
|
||||||
if opts.max_depth >= 0 && current_depth > opts.max_depth {
|
if opts.max_depth >= 0 && current_depth > opts.max_depth {
|
||||||
println('DEBUG: Max depth reached, returning')
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current directory info
|
// Get current directory info
|
||||||
current_dir := self.factory.fs_dir.get(dir_id)!
|
current_dir := self.factory.fs_dir.get(dir_id)!
|
||||||
println('DEBUG: Got directory "${current_dir.name}" with ${current_dir.files.len} files, ${current_dir.directories.len} directories, ${current_dir.symlinks.len} symlinks')
|
|
||||||
|
|
||||||
// Check if current directory matches search criteria
|
// Check if current directory matches search criteria
|
||||||
if should_include(current_dir.name, opts.include_patterns, opts.exclude_patterns) {
|
if should_include(current_dir.name, opts.include_patterns, opts.exclude_patterns) {
|
||||||
println('DEBUG: Including directory "${current_dir.name}" in results')
|
|
||||||
results << FindResult{
|
results << FindResult{
|
||||||
result_type: .directory
|
result_type: .directory
|
||||||
id: dir_id
|
id: dir_id
|
||||||
@@ -116,11 +111,9 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
|
|||||||
|
|
||||||
// Get files in current directory
|
// Get files in current directory
|
||||||
for file_id in current_dir.files {
|
for file_id in current_dir.files {
|
||||||
println('DEBUG: Processing file ID ${file_id}')
|
|
||||||
file := self.factory.fs_file.get(file_id)!
|
file := self.factory.fs_file.get(file_id)!
|
||||||
if should_include(file.name, opts.include_patterns, opts.exclude_patterns) {
|
if should_include(file.name, opts.include_patterns, opts.exclude_patterns) {
|
||||||
file_path := join_path(current_path, file.name)
|
file_path := join_path(current_path, file.name)
|
||||||
println('DEBUG: Including file "${file.name}" in results')
|
|
||||||
results << FindResult{
|
results << FindResult{
|
||||||
result_type: .file
|
result_type: .file
|
||||||
id: file.id
|
id: file.id
|
||||||
@@ -201,14 +194,12 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
|
|||||||
}
|
}
|
||||||
|
|
||||||
for dir_id2 in current_dir.directories {
|
for dir_id2 in current_dir.directories {
|
||||||
println('DEBUG: Found child directory ID ${dir_id2} in directory ${dir_id}')
|
|
||||||
subdir := self.factory.fs_dir.get(dir_id2)!
|
subdir := self.factory.fs_dir.get(dir_id2)!
|
||||||
subdir_path := join_path(current_path, subdir.name)
|
subdir_path := join_path(current_path, subdir.name)
|
||||||
|
|
||||||
// Include child directories in results if they match patterns
|
// Include child directories in results if they match patterns
|
||||||
if should_include(subdir.name, opts.include_patterns, opts.exclude_patterns) {
|
if should_include(subdir.name, opts.include_patterns, opts.exclude_patterns) {
|
||||||
if !opts.recursive {
|
if !opts.recursive {
|
||||||
println('DEBUG: Including directory "${subdir.name}" in results')
|
|
||||||
results << FindResult{
|
results << FindResult{
|
||||||
result_type: .directory
|
result_type: .directory
|
||||||
id: subdir.id
|
id: subdir.id
|
||||||
@@ -220,12 +211,9 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
|
|||||||
// Always recurse into directories when recursive is true, regardless of patterns
|
// Always recurse into directories when recursive is true, regardless of patterns
|
||||||
// The patterns apply to what gets included in results, not to traversal
|
// The patterns apply to what gets included in results, not to traversal
|
||||||
if opts.recursive {
|
if opts.recursive {
|
||||||
println('DEBUG: Processing directory "${subdir.name}"')
|
|
||||||
self.find_recursive(dir_id2, subdir_path, opts, mut results, current_depth + 1)!
|
self.find_recursive(dir_id2, subdir_path, opts, mut results, current_depth + 1)!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println('DEBUG: find_recursive finished with ${results.len} results')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// get_dir_by_absolute_path resolves an absolute path to a directory ID
|
// get_dir_by_absolute_path resolves an absolute path to a directory ID
|
||||||
@@ -241,15 +229,11 @@ fn (mut self Fs) find_recursive(dir_id u32, current_path string, opts FindOption
|
|||||||
// dir := tools.get_dir_by_absolute_path('/home/user/documents')!
|
// dir := tools.get_dir_by_absolute_path('/home/user/documents')!
|
||||||
// ```
|
// ```
|
||||||
pub fn (mut self Fs) get_dir_by_absolute_path(path string) !FsDir {
|
pub fn (mut self Fs) get_dir_by_absolute_path(path string) !FsDir {
|
||||||
println('DEBUG: get_dir_by_absolute_path called with path "${path}"')
|
|
||||||
normalized_path_ := normalize_path(path)
|
normalized_path_ := normalize_path(path)
|
||||||
println('DEBUG: normalized_path_ = "${normalized_path_}"')
|
|
||||||
|
|
||||||
// Handle root directory case
|
// Handle root directory case
|
||||||
if normalized_path_ == '/' {
|
if normalized_path_ == '/' {
|
||||||
println('DEBUG: Handling root directory case')
|
|
||||||
fs := self.factory.fs.get(self.id)!
|
fs := self.factory.fs.get(self.id)!
|
||||||
println('DEBUG: fs.root_dir_id = ${fs.root_dir_id}')
|
|
||||||
return self.factory.fs_dir.get(fs.root_dir_id)!
|
return self.factory.fs_dir.get(fs.root_dir_id)!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user