This commit is contained in:
2025-10-13 06:52:31 +04:00
parent d6979d7167
commit 73ff7e5534
12 changed files with 443 additions and 176 deletions

View File

@@ -1,5 +1,8 @@
# OSAL Core Module - Key Capabilities (incubaid.herolib.osal.core) # OSAL Core Module - Key Capabilities (incubaid.herolib.osal.core)
> **Note:** Platform detection functions (`platform()` and `cputype()`) have moved to `incubaid.herolib.core`.
> Use `import incubaid.herolib.core` and call `core.platform()!` and `core.cputype()!` instead.
```v ```v
//example how to get started //example how to get started
@@ -68,8 +71,8 @@ This document describes the core functionalities of the Operating System Abstrac
* **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children. * **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children.
* **Key Parameters**: `name` (string), `pid` (int). * **Key Parameters**: `name` (string), `pid` (int).
* **`osal.whoami() !string`**: Return the current username. * **`osal.whoami() !string`**: Return the current username.
* **`osal.platform() !PlatformType`**: Identify the operating system. * ~~**`osal.platform() !PlatformType`**: Identify the operating system.~~ → **Moved to `incubaid.herolib.core`**
* **`osal.cputype() !CPUType`**: Identify the CPU architecture. * ~~**`osal.cputype() !CPUType`**: Identify the CPU architecture.~~ → **Moved to `incubaid.herolib.core`**
* **`osal.hostname() !string`**: Get system hostname. * **`osal.hostname() !string`**: Get system hostname.
* **`osal.sleep(duration int)`**: Pause execution for a specified duration. * **`osal.sleep(duration int)`**: Pause execution for a specified duration.
* **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL. * **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL.

View File

@@ -343,8 +343,6 @@ This document describes the core functionalities of the Operating System Abstrac
- **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children. - **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children.
- **Key Parameters**: `name` (string), `pid` (int). - **Key Parameters**: `name` (string), `pid` (int).
- **`osal.whoami() !string`**: Return the current username. - **`osal.whoami() !string`**: Return the current username.
- **`osal.platform() !PlatformType`**: Identify the operating system.
- **`osal.cputype() !CPUType`**: Identify the CPU architecture.
- **`osal.hostname() !string`**: Get system hostname. - **`osal.hostname() !string`**: Get system hostname.
- **`osal.sleep(duration int)`**: Pause execution for a specified duration. - **`osal.sleep(duration int)`**: Pause execution for a specified duration.
- **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL. - **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL.
@@ -354,8 +352,31 @@ This document describes the core functionalities of the Operating System Abstrac
- **`osal.user_id_get(username string) !int`**: Get user ID. - **`osal.user_id_get(username string) !int`**: Get user ID.
- **`osal.user_add(args: UserArgs) !int`**: Add a user. - **`osal.user_add(args: UserArgs) !int`**: Add a user.
- **Key Parameters**: `name` (string). - **Key Parameters**: `name` (string).
``` ```
## 7. Platform Information
* **`core.platform() !PlatformType`**: Identify the operating system.
* **Returns**: Platform type (osx, ubuntu, arch, etc.)
* **`core.cputype() !CPUType`**: Identify the CPU architecture.
* **Returns**: CPU type (intel, arm, etc.)
### Usage Example
```v
import incubaid.herolib.core
platform := core.platform()! // Returns .osx, .ubuntu, etc.
cpu := core.cputype()! // Returns .intel, .arm, etc.
match platform {
.osx { println('Running on macOS') }
.ubuntu { println('Running on Ubuntu') }
.arch { println('Running on Arch Linux') }
else { println('Other platform') }
}
```
# OurTime Module # OurTime Module

View File

@@ -92,16 +92,23 @@ pub fn cmd_git(mut cmdroot Command) {
description: 'Get the path to a git repository. Use with cd $(hero git path <url>)' description: 'Get the path to a git repository. Use with cd $(hero git path <url>)'
} }
cmd_path.add_flag(Flag{ mut cmd_check := Command{
flag: .string sort_flags: true
required: false name: 'check'
name: 'url' execute: cmd_git_execute
abbrev: 'u' description: 'Check if a git repository is properly configured.'
description: 'url for git path operation, so we know which repo path to get' }
})
mut cmd_lfs := Command{
sort_flags: true
name: 'lfs'
execute: cmd_git_execute
description: 'Make sure git repo has lfs enabled and system is ready to support lfs.'
}
mut allcmdsref := [&list_command, &clone_command, &push_command, &pull_command, &commit_command, mut allcmdsref := [&list_command, &clone_command, &push_command, &pull_command, &commit_command,
&reload_command, &delete_command, &sourcetree_command, &editor_command, &exists_command] &reload_command, &delete_command, &sourcetree_command, &editor_command, &exists_command,
&cmd_check, &cmd_lfs]
for mut c in allcmdsref { for mut c in allcmdsref {
c.add_flag(Flag{ c.add_flag(Flag{
@@ -141,7 +148,8 @@ pub fn cmd_git(mut cmdroot Command) {
}) })
} }
mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command] mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command,
&cmd_check, &cmd_lfs]
for mut c in urlcmds { for mut c in urlcmds {
c.add_flag(Flag{ c.add_flag(Flag{
flag: .bool flag: .bool

View File

@@ -0,0 +1,195 @@
module gittools
import time
import incubaid.herolib.ui.console
import incubaid.herolib.core.texttools
import incubaid.herolib.core
import incubaid.herolib.osal.core as osal
import os
const binary_extensions = ['.pdf', '.docx', '.xlsx', '.pptx', '.zip', '.tar', '.gz', '.jpg', '.jpeg',
'.png', '.gif', '.bmp', '.tiff', '.ico', '.webp', '.mp4', '.mp3', '.avi', '.mov', '.wmv', '.flv',
'.mkv', '.wav', '.exe', '.dll', '.so', '.dylib', '.bin', '.dat', '.iso']
// check if repo has lfs enabled
pub fn (mut repo GitRepo) check() ! {
repo.init()!
// Get list of all files in the repository
files_result := repo.exec('ls-files')!
files := files_result.split('\n').filter(it.trim_space() != '')
// Binary file extensions that should use LFS
mut needs_lfs := false
mut large_files := []string{}
for file in files {
file_path := '${repo.path()}/${file}'
// Check if file exists (might be deleted)
if !os.exists(file_path) {
continue
}
// Check if file is in bin/ folder
if file.starts_with('bin/') {
needs_lfs = true
large_files << file
continue
}
// Check if file has binary extension
for ext in binary_extensions {
if file.to_lower().ends_with(ext) {
needs_lfs = true
large_files << file
break
}
}
// Check file size (files > 50MB should use LFS)
file_info := os.stat(file_path) or { continue }
size_mb := file_info.size / (1024 * 1024)
if size_mb > 50 {
needs_lfs = true
large_files << '${file} (${size_mb}MB)'
}
}
if needs_lfs {
console.print_header('Repository contains files that should use Git LFS:')
for file in large_files {
console.print_item(file)
}
if !repo.lfs()! {
console.print_stderr('Git LFS is not initialized. Run lfs_init() to set it up.')
}
} else {
console.print_green('No large binary files detected. Git LFS may not be needed.')
}
}
pub fn (mut repo GitRepo) lfs_init() ! {
repo.init()!
// Step 1: Check if git-lfs is installed on the system
if !osal.cmd_exists('git-lfs') {
console.print_header('Git LFS not found. Installing...')
// Install based on platform
platform := core.platform()!
match platform {
.osx {
// Use brew on macOS
if osal.cmd_exists('brew') {
osal.execute_stdout('brew install git-lfs')!
} else {
return error('Homebrew not found. Please install git-lfs manually: https://git-lfs.github.com')
}
}
.ubuntu {
osal.execute_stdout('sudo apt-get update && sudo apt-get install -y git-lfs')!
}
.arch {
osal.execute_stdout('sudo pacman -S git-lfs')!
}
else {
return error('Unsupported platform. Please install git-lfs manually: https://git-lfs.github.com')
}
}
console.print_green('Git LFS installed successfully.')
}
// Step 2: Initialize git-lfs in the repository
console.print_header('Initializing Git LFS in repository...')
repo.exec('lfs install --local')!
console.print_header('Tracking binary file extensions...')
for ext in binary_extensions {
repo.exec('lfs track "*${ext}"') or {
console.print_debug('Could not track ${ext}: ${err}')
continue
}
console.print_item('Tracking *${ext}')
}
// Step 4: Track files in bin/ folders
console.print_header('Tracking bin/ folders...')
repo.exec('lfs track "bin/**"') or { console.print_debug('Could not track bin/**: ${err}') }
// Step 5: Create and install pre-commit hook
mut check_script := '
#!/bin/bash
# Prevent committing large files without LFS
max_size=50 # MB
files=$(git diff --cached --name-only)
for file in \${files}; do
if [ -f "\${file}" ]; then
size=$(du -m "\${file}" | cut -f1)
if [ "\${size}" -gt "\${max_size}" ]; then
if ! git check-attr filter -- "\${file}" | grep -q "lfs"; then
echo "❌ ERROR: \${file} is \${size}MB and not tracked by LFS."
echo "Please run: git lfs track \\"\${file}\\""
exit 1
fi
fi
fi
done
'
check_script = texttools.dedent(check_script)
hook_path := '${repo.path()}/.git/hooks/pre-commit'
hooks_dir := '${repo.path()}/.git/hooks'
// Ensure hooks directory exists
osal.dir_ensure(hooks_dir)!
// Write the pre-commit hook
osal.file_write(hook_path, check_script)!
// Make the hook executable
osal.exec(cmd: 'chmod +x ${hook_path}')!
console.print_green('Pre-commit hook installed at ${hook_path}')
// Step 6: Add .gitattributes to the repository
gitattributes_path := '${repo.path()}/.gitattributes'
if os.exists(gitattributes_path) {
console.print_debug('.gitattributes already exists, LFS tracks have been added to it.')
} else {
console.print_debug('.gitattributes created with LFS tracking rules.')
}
// Step 7: Verify LFS is properly configured
if !repo.lfs()! {
return error('Git LFS initialization failed verification')
}
console.print_green('Git LFS initialized successfully for ${repo.name}')
console.print_header('Next steps:')
console.print_item('1. Review .gitattributes to ensure all desired files are tracked')
console.print_item('2. Run: git add .gitattributes')
console.print_item('3. Commit the changes: git commit -m "chore: Initialize Git LFS"')
}
// Check if repo has lfs enabled
pub fn (mut repo GitRepo) lfs() !bool {
repo.init()!
// Check if .git/hooks/pre-commit exists and contains LFS check
hook_path := '${repo.path()}/.git/hooks/pre-commit'
if !os.exists(hook_path) {
return false
}
// Check if git lfs is initialized locally
// This checks if .git/config contains lfs filter configuration
config_result := repo.exec('config --local --get filter.lfs.clean') or { return false }
return config_result.contains('git-lfs clean')
}

View File

@@ -46,7 +46,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// load_internal performs the expensive git operations to refresh the repository state. // load_internal performs the expensive git operations to refresh the repository state.
// It should only be called by status_update(). // It should only be called by status_update().
fn (mut repo GitRepo) load_internal() ! { fn (mut repo GitRepo) load_internal() ! {
console.print_debug('load ${repo.print_key()}') console.print_item('load ${repo.print_key()}')
repo.init()! repo.init()!
repo.exec('fetch --all') or { repo.exec('fetch --all') or {

View File

@@ -3,6 +3,7 @@ module models_ledger
import incubaid.herolib.data.encoder import incubaid.herolib.data.encoder
import incubaid.herolib.data.ourtime import incubaid.herolib.data.ourtime
import incubaid.herolib.hero.db import incubaid.herolib.hero.db
import json
// AccountStatus represents the status of an account // AccountStatus represents the status of an account
pub enum AccountStatus { pub enum AccountStatus {
@@ -343,38 +344,43 @@ pub mut:
pub fn (mut self DBAccount) list(args AccountListArg) ![]Account { pub fn (mut self DBAccount) list(args AccountListArg) ![]Account {
mut all_accounts := self.db.list[Account]()!.map(self.get(it)!) mut all_accounts := self.db.list[Account]()!.map(self.get(it)!)
mut filtered_accounts := []Account{} mut filtered_accounts := []Account{}
for account in all_accounts { for account in all_accounts {
// Add filter logic based on account properties // Add filter logic based on account properties
if args.filter != '' && !account.name.contains(args.filter) && !account.description.contains(args.filter) { if args.filter != '' && !account.name.contains(args.filter)
&& !account.description.contains(args.filter) {
continue continue
} }
// We could add more filters based on status if the Account struct has a status field // We could add more filters based on status if the Account struct has a status field
filtered_accounts << account filtered_accounts << account
} }
// Apply pagination // Apply pagination
mut start := args.offset mut start := args.offset
if start >= filtered_accounts.len { if start >= filtered_accounts.len {
start = 0 start = 0
} }
mut limit := args.limit mut limit := args.limit
if limit > 100 { if limit > 100 {
limit = 100 limit = 100
} }
if start + limit > filtered_accounts.len { if start + limit > filtered_accounts.len {
limit = filtered_accounts.len - start limit = filtered_accounts.len - start
} }
if limit <= 0 { if limit <= 0 {
return []Account{} return []Account{}
} }
return if filtered_accounts.len > 0 { filtered_accounts[start..start+limit] } else { []Account{} } return if filtered_accounts.len > 0 {
filtered_accounts[start..start + limit]
} else {
[]Account{}
}
} }
pub fn (mut self DBAccount) list_all() ![]Account { pub fn (mut self DBAccount) list_all() ![]Account {
@@ -398,37 +404,37 @@ pub mut:
pub fn new_response(rpcid int, result string) Response { pub fn new_response(rpcid int, result string) Response {
return Response{ return Response{
id: rpcid id: rpcid
result: result result: result
} }
} }
pub fn new_response_true(rpcid int) Response { pub fn new_response_true(rpcid int) Response {
return Response{ return Response{
id: rpcid id: rpcid
result: 'true' result: 'true'
} }
} }
pub fn new_response_false(rpcid int) Response { pub fn new_response_false(rpcid int) Response {
return Response{ return Response{
id: rpcid id: rpcid
result: 'false' result: 'false'
} }
} }
pub fn new_response_int(rpcid int, result int) Response { pub fn new_response_int(rpcid int, result int) Response {
return Response{ return Response{
id: rpcid id: rpcid
result: result.str() result: result.str()
} }
} }
pub fn new_error(rpcid int, code int, message string) Response { pub fn new_error(rpcid int, code int, message string) Response {
return Response{ return Response{
id: rpcid id: rpcid
error: ResponseError{ error: ResponseError{
code: code code: code
message: message message: message
} }
} }
@@ -479,8 +485,8 @@ pub fn account_handle(mut f ModelsFactory, rpcid int, servercontext map[string]s
} }
else { else {
return new_error( return new_error(
rpcid: rpcid rpcid: rpcid
code: 32601 code: 32601
message: 'Method ${method} not found on account' message: 'Method ${method} not found on account'
) )
} }

View File

@@ -4,24 +4,26 @@ import json
fn test_account_crud() ! { fn test_account_crud() ! {
mut db := setup_test_db()! mut db := setup_test_db()!
mut account_db := DBAccount{db: db} mut account_db := DBAccount{
db: db
}
// Create test // Create test
mut account_arg := AccountArg{ mut account_arg := AccountArg{
name: 'Test Account' name: 'Test Account'
description: 'Description for test account' description: 'Description for test account'
owner_id: 1 owner_id: 1
location_id: 2 location_id: 2
accountpolicies: []AccountPolicyArg{} accountpolicies: []AccountPolicyArg{}
assets: []AccountAsset{} assets: []AccountAsset{}
assetid: 3 assetid: 3
administrators: [u32(1), 2, 3] administrators: [u32(1), 2, 3]
} }
mut account := account_db.new(account_arg)! mut account := account_db.new(account_arg)!
account = account_db.set(account)! account = account_db.set(account)!
assert account.id > 0 assert account.id > 0
// Get test // Get test
retrieved := account_db.get(account.id)! retrieved := account_db.get(account.id)!
assert retrieved.name == 'Test Account' assert retrieved.name == 'Test Account'
@@ -30,7 +32,7 @@ fn test_account_crud() ! {
assert retrieved.location_id == 2 assert retrieved.location_id == 2
assert retrieved.assetid == 3 assert retrieved.assetid == 3
assert retrieved.administrators == [u32(1), 2, 3] assert retrieved.administrators == [u32(1), 2, 3]
// Update test // Update test
account.name = 'Updated Account' account.name = 'Updated Account'
account.description = 'Updated description' account.description = 'Updated description'
@@ -38,7 +40,7 @@ fn test_account_crud() ! {
retrieved = account_db.get(account.id)! retrieved = account_db.get(account.id)!
assert retrieved.name == 'Updated Account' assert retrieved.name == 'Updated Account'
assert retrieved.description == 'Updated description' assert retrieved.description == 'Updated description'
// Delete test // Delete test
success := account_db.delete(account.id)! success := account_db.delete(account.id)!
assert success == true assert success == true
@@ -48,41 +50,41 @@ fn test_account_crud() ! {
fn test_account_api_handler() ! { fn test_account_api_handler() ! {
mut db := setup_test_db()! mut db := setup_test_db()!
mut factory := new_models_factory(db)! mut factory := new_models_factory(db)!
// Test set method // Test set method
account_arg := AccountArg{ account_arg := AccountArg{
name: 'API Test Account' name: 'API Test Account'
description: 'API test description' description: 'API test description'
owner_id: 10 owner_id: 10
location_id: 20 location_id: 20
assetid: 30 assetid: 30
administrators: [u32(10), 20] administrators: [u32(10), 20]
} }
json_params := json.encode(account_arg) json_params := json.encode(account_arg)
// Set // Set
response := account_handle(mut factory, 1, {}, UserRef{id: 1}, 'set', json_params)! response := account_handle(mut factory, 1, {}, UserRef{ id: 1 }, 'set', json_params)!
id := response.result.int() id := response.result.int()
assert id > 0 assert id > 0
// Exist // Exist
response2 := account_handle(mut factory, 2, {}, UserRef{id: 1}, 'exist', id.str())! response2 := account_handle(mut factory, 2, {}, UserRef{ id: 1 }, 'exist', id.str())!
assert response2.result == 'true' assert response2.result == 'true'
// Get // Get
response3 := account_handle(mut factory, 3, {}, UserRef{id: 1}, 'get', id.str())! response3 := account_handle(mut factory, 3, {}, UserRef{ id: 1 }, 'get', id.str())!
assert response3.result.contains('API Test Account') assert response3.result.contains('API Test Account')
// List // List
response4 := account_handle(mut factory, 4, {}, UserRef{id: 1}, 'list', '{}')! response4 := account_handle(mut factory, 4, {}, UserRef{ id: 1 }, 'list', '{}')!
assert response4.result.contains('API Test Account') assert response4.result.contains('API Test Account')
// Delete // Delete
response5 := account_handle(mut factory, 5, {}, UserRef{id: 1}, 'delete', id.str())! response5 := account_handle(mut factory, 5, {}, UserRef{ id: 1 }, 'delete', id.str())!
assert response5.result == 'true' assert response5.result == 'true'
// Verify deletion // Verify deletion
response6 := account_handle(mut factory, 6, {}, UserRef{id: 1}, 'exist', id.str())! response6 := account_handle(mut factory, 6, {}, UserRef{ id: 1 }, 'exist', id.str())!
assert response6.result == 'false' assert response6.result == 'false'
} }

View File

@@ -145,65 +145,65 @@ pub fn (mut self DBAsset) get(id u32) !Asset {
@[params] @[params]
pub struct AssetListArg { pub struct AssetListArg {
pub mut: pub mut:
filter string filter string
asset_type string asset_type string
is_frozen bool = false is_frozen bool = false
filter_frozen bool = false filter_frozen bool = false
issuer u32 issuer u32
filter_issuer bool = false filter_issuer bool = false
limit int = 20 limit int = 20
offset int = 0 offset int = 0
} }
pub fn (mut self DBAsset) list(args AssetListArg) ![]Asset { pub fn (mut self DBAsset) list(args AssetListArg) ![]Asset {
mut all_assets := self.db.list[Asset]()!.map(self.get(it)!) mut all_assets := self.db.list[Asset]()!.map(self.get(it)!)
mut filtered_assets := []Asset{} mut filtered_assets := []Asset{}
for asset in all_assets { for asset in all_assets {
// Filter by text in name or description // Filter by text in name or description
if args.filter != '' && !asset.name.contains(args.filter) && if args.filter != '' && !asset.name.contains(args.filter)
!asset.description.contains(args.filter) && !asset.address.contains(args.filter) { && !asset.description.contains(args.filter) && !asset.address.contains(args.filter) {
continue continue
} }
// Filter by asset_type // Filter by asset_type
if args.asset_type != '' && asset.asset_type != args.asset_type { if args.asset_type != '' && asset.asset_type != args.asset_type {
continue continue
} }
// Filter by is_frozen // Filter by is_frozen
if args.filter_frozen && asset.is_frozen != args.is_frozen { if args.filter_frozen && asset.is_frozen != args.is_frozen {
continue continue
} }
// Filter by issuer // Filter by issuer
if args.filter_issuer && asset.issuer != args.issuer { if args.filter_issuer && asset.issuer != args.issuer {
continue continue
} }
filtered_assets << asset filtered_assets << asset
} }
// Apply pagination // Apply pagination
mut start := args.offset mut start := args.offset
if start >= filtered_assets.len { if start >= filtered_assets.len {
start = 0 start = 0
} }
mut limit := args.limit mut limit := args.limit
if limit > 100 { if limit > 100 {
limit = 100 limit = 100
} }
if start + limit > filtered_assets.len { if start + limit > filtered_assets.len {
limit = filtered_assets.len - start limit = filtered_assets.len - start
} }
if limit <= 0 { if limit <= 0 {
return []Asset{} return []Asset{}
} }
return if filtered_assets.len > 0 { filtered_assets[start..start+limit] } else { []Asset{} } return if filtered_assets.len > 0 { filtered_assets[start..start + limit] } else { []Asset{} }
} }
pub fn (mut self DBAsset) list_all() ![]Asset { pub fn (mut self DBAsset) list_all() ![]Asset {
@@ -250,8 +250,8 @@ pub fn asset_handle(mut f ModelsFactory, rpcid int, servercontext map[string]str
} }
else { else {
return new_error( return new_error(
rpcid: rpcid rpcid: rpcid
code: 32601 code: 32601
message: 'Method ${method} not found on asset' message: 'Method ${method} not found on asset'
) )
} }

View File

@@ -4,27 +4,32 @@ import json
fn test_asset_crud() ! { fn test_asset_crud() ! {
mut db := setup_test_db()! mut db := setup_test_db()!
mut asset_db := DBAsset{db: db} mut asset_db := DBAsset{
db: db
}
// Create test // Create test
mut asset_arg := AssetArg{ mut asset_arg := AssetArg{
name: 'TFT Token' name: 'TFT Token'
description: 'ThreeFold Token' description: 'ThreeFold Token'
address: 'TFT123456789' address: 'TFT123456789'
asset_type: 'token' asset_type: 'token'
issuer: 1 issuer: 1
supply: 1000000.0 supply: 1000000.0
decimals: 8 decimals: 8
is_frozen: false is_frozen: false
metadata: {'symbol': 'TFT', 'blockchain': 'Stellar'} metadata: {
'symbol': 'TFT'
'blockchain': 'Stellar'
}
administrators: [u32(1), 2] administrators: [u32(1), 2]
min_signatures: 1 min_signatures: 1
} }
mut asset := asset_db.new(asset_arg)! mut asset := asset_db.new(asset_arg)!
asset = asset_db.set(asset)! asset = asset_db.set(asset)!
assert asset.id > 0 assert asset.id > 0
// Get test // Get test
retrieved := asset_db.get(asset.id)! retrieved := asset_db.get(asset.id)!
assert retrieved.name == 'TFT Token' assert retrieved.name == 'TFT Token'
@@ -35,10 +40,13 @@ fn test_asset_crud() ! {
assert retrieved.supply == 1000000.0 assert retrieved.supply == 1000000.0
assert retrieved.decimals == 8 assert retrieved.decimals == 8
assert retrieved.is_frozen == false assert retrieved.is_frozen == false
assert retrieved.metadata == {'symbol': 'TFT', 'blockchain': 'Stellar'} assert retrieved.metadata == {
'symbol': 'TFT'
'blockchain': 'Stellar'
}
assert retrieved.administrators == [u32(1), 2] assert retrieved.administrators == [u32(1), 2]
assert retrieved.min_signatures == 1 assert retrieved.min_signatures == 1
// Update test // Update test
asset.name = 'Updated TFT Token' asset.name = 'Updated TFT Token'
asset.supply = 2000000.0 asset.supply = 2000000.0
@@ -48,7 +56,7 @@ fn test_asset_crud() ! {
assert retrieved.name == 'Updated TFT Token' assert retrieved.name == 'Updated TFT Token'
assert retrieved.supply == 2000000.0 assert retrieved.supply == 2000000.0
assert retrieved.is_frozen == true assert retrieved.is_frozen == true
// Delete test // Delete test
success := asset_db.delete(asset.id)! success := asset_db.delete(asset.id)!
assert success == true assert success == true
@@ -57,95 +65,97 @@ fn test_asset_crud() ! {
fn test_asset_list_filtering() ! { fn test_asset_list_filtering() ! {
mut db := setup_test_db()! mut db := setup_test_db()!
mut asset_db := DBAsset{db: db} mut asset_db := DBAsset{
db: db
}
// Create multiple test assets // Create multiple test assets
for i in 0..5 { for i in 0 .. 5 {
mut asset_arg := AssetArg{ mut asset_arg := AssetArg{
name: 'Token ${i}' name: 'Token ${i}'
description: 'Description ${i}' description: 'Description ${i}'
address: 'ADDR${i}' address: 'ADDR${i}'
asset_type: if i < 3 { 'token' } else { 'nft' } asset_type: if i < 3 { 'token' } else { 'nft' }
issuer: if i % 2 == 0 { u32(1) } else { u32(2) } issuer: if i % 2 == 0 { u32(1) } else { u32(2) }
supply: 1000.0 * f64(i+1) supply: 1000.0 * f64(i + 1)
decimals: 8 decimals: 8
is_frozen: i >= 3 is_frozen: i >= 3
} }
mut asset := asset_db.new(asset_arg)! mut asset := asset_db.new(asset_arg)!
asset_db.set(asset)! asset_db.set(asset)!
} }
// Test filter by text // Test filter by text
filtered := asset_db.list(AssetListArg{filter: 'Token 1'})! filtered := asset_db.list(AssetListArg{ filter: 'Token 1' })!
assert filtered.len == 1 assert filtered.len == 1
assert filtered[0].name == 'Token 1' assert filtered[0].name == 'Token 1'
// Test filter by asset_type // Test filter by asset_type
tokens := asset_db.list(AssetListArg{asset_type: 'token'})! tokens := asset_db.list(AssetListArg{ asset_type: 'token' })!
assert tokens.len == 3 assert tokens.len == 3
// Test filter by frozen status // Test filter by frozen status
frozen := asset_db.list(AssetListArg{is_frozen: true, filter_frozen: true})! frozen := asset_db.list(AssetListArg{ is_frozen: true, filter_frozen: true })!
assert frozen.len == 2 assert frozen.len == 2
// Test filter by issuer // Test filter by issuer
issuer1 := asset_db.list(AssetListArg{issuer: 1, filter_issuer: true})! issuer1 := asset_db.list(AssetListArg{ issuer: 1, filter_issuer: true })!
assert issuer1.len == 3 assert issuer1.len == 3
// Test pagination // Test pagination
page1 := asset_db.list(AssetListArg{limit: 2, offset: 0})! page1 := asset_db.list(AssetListArg{ limit: 2, offset: 0 })!
assert page1.len == 2 assert page1.len == 2
page2 := asset_db.list(AssetListArg{limit: 2, offset: 2})! page2 := asset_db.list(AssetListArg{ limit: 2, offset: 2 })!
assert page2.len == 2 assert page2.len == 2
page3 := asset_db.list(AssetListArg{limit: 2, offset: 4})! page3 := asset_db.list(AssetListArg{ limit: 2, offset: 4 })!
assert page3.len == 1 assert page3.len == 1
} }
fn test_asset_api_handler() ! { fn test_asset_api_handler() ! {
mut db := setup_test_db()! mut db := setup_test_db()!
mut factory := new_models_factory(db)! mut factory := new_models_factory(db)!
// Test set method // Test set method
asset_arg := AssetArg{ asset_arg := AssetArg{
name: 'API Test Asset' name: 'API Test Asset'
description: 'API test description' description: 'API test description'
address: 'TEST123' address: 'TEST123'
asset_type: 'token' asset_type: 'token'
issuer: 1 issuer: 1
supply: 1000.0 supply: 1000.0
decimals: 8 decimals: 8
} }
json_params := json.encode(asset_arg) json_params := json.encode(asset_arg)
// Set // Set
response := asset_handle(mut factory, 1, {}, UserRef{id: 1}, 'set', json_params)! response := asset_handle(mut factory, 1, {}, UserRef{ id: 1 }, 'set', json_params)!
id := response.result.int() id := response.result.int()
assert id > 0 assert id > 0
// Exist // Exist
response2 := asset_handle(mut factory, 2, {}, UserRef{id: 1}, 'exist', id.str())! response2 := asset_handle(mut factory, 2, {}, UserRef{ id: 1 }, 'exist', id.str())!
assert response2.result == 'true' assert response2.result == 'true'
// Get // Get
response3 := asset_handle(mut factory, 3, {}, UserRef{id: 1}, 'get', id.str())! response3 := asset_handle(mut factory, 3, {}, UserRef{ id: 1 }, 'get', id.str())!
assert response3.result.contains('API Test Asset') assert response3.result.contains('API Test Asset')
// List // List
response4 := asset_handle(mut factory, 4, {}, UserRef{id: 1}, 'list', '{}')! response4 := asset_handle(mut factory, 4, {}, UserRef{ id: 1 }, 'list', '{}')!
assert response4.result.contains('API Test Asset') assert response4.result.contains('API Test Asset')
// List with filters // List with filters
filter_params := json.encode(AssetListArg{asset_type: 'token'}) filter_params := json.encode(AssetListArg{ asset_type: 'token' })
response5 := asset_handle(mut factory, 5, {}, UserRef{id: 1}, 'list', filter_params)! response5 := asset_handle(mut factory, 5, {}, UserRef{ id: 1 }, 'list', filter_params)!
assert response5.result.contains('API Test Asset') assert response5.result.contains('API Test Asset')
// Delete // Delete
response6 := asset_handle(mut factory, 6, {}, UserRef{id: 1}, 'delete', id.str())! response6 := asset_handle(mut factory, 6, {}, UserRef{ id: 1 }, 'delete', id.str())!
assert response6.result == 'true' assert response6.result == 'true'
// Verify deletion // Verify deletion
response7 := asset_handle(mut factory, 7, {}, UserRef{id: 1}, 'exist', id.str())! response7 := asset_handle(mut factory, 7, {}, UserRef{ id: 1 }, 'exist', id.str())!
assert response7.result == 'false' assert response7.result == 'false'
} }

View File

@@ -1,5 +1,5 @@
module models_ledger module models_ledger
fn test_setup_db_only() ! { fn test_setup_db_only() ! {
mut store := setup_test_db()! mut store := setup_test_db()!
} }

View File

@@ -5,36 +5,58 @@ import json
pub struct ModelsFactory { pub struct ModelsFactory {
pub mut: pub mut:
db &db.DB db &db.DB
account &DBAccount account &DBAccount
asset &DBAsset asset &DBAsset
dnszone &DBDNSZone dnszone &DBDNSZone
group &DBGroup group &DBGroup
member &DBMember member &DBMember
notary &DBNotary notary &DBNotary
signature &DBSignature signature &DBSignature
transaction &DBTransaction transaction &DBTransaction
user &DBUser user &DBUser
userkvs &DBUserKVS userkvs &DBUserKVS
userkvsitem &DBUserKVSItem userkvsitem &DBUserKVSItem
} }
pub fn new_models_factory(mut database db.DB) !&ModelsFactory { pub fn new_models_factory(mut database db.DB) !&ModelsFactory {
mut factory := &ModelsFactory{ mut factory := &ModelsFactory{
db: database db: database
} }
factory.account = &DBAccount{db: database} factory.account = &DBAccount{
factory.asset = &DBAsset{db: database} db: database
factory.dnszone = &DBDNSZone{db: database} }
factory.group = &DBGroup{db: database} factory.asset = &DBAsset{
factory.member = &DBMember{db: database} db: database
factory.notary = &DBNotary{db: database} }
factory.signature = &DBSignature{db: database} factory.dnszone = &DBDNSZone{
factory.transaction = &DBTransaction{db: database} db: database
factory.user = &DBUser{db: database} }
factory.userkvs = &DBUserKVS{db: database} factory.group = &DBGroup{
factory.userkvsitem = &DBUserKVSItem{db: database} db: database
}
factory.member = &DBMember{
db: database
}
factory.notary = &DBNotary{
db: database
}
factory.signature = &DBSignature{
db: database
}
factory.transaction = &DBTransaction{
db: database
}
factory.user = &DBUser{
db: database
}
factory.userkvs = &DBUserKVS{
db: database
}
factory.userkvsitem = &DBUserKVSItem{
db: database
}
return factory return factory
} }

View File

@@ -3,5 +3,5 @@ module models_ledger
import incubaid.herolib.hero.db import incubaid.herolib.hero.db
fn setup_test_db() !db.DB { fn setup_test_db() !db.DB {
return db.new(path:"/tmp/testdb")! return db.new(path: '/tmp/testdb')!
} }