...
This commit is contained in:
@@ -1,5 +1,8 @@
|
||||
# OSAL Core Module - Key Capabilities (incubaid.herolib.osal.core)
|
||||
|
||||
> **Note:** Platform detection functions (`platform()` and `cputype()`) have moved to `incubaid.herolib.core`.
|
||||
> Use `import incubaid.herolib.core` and call `core.platform()!` and `core.cputype()!` instead.
|
||||
|
||||
```v
|
||||
//example how to get started
|
||||
|
||||
@@ -68,8 +71,8 @@ This document describes the core functionalities of the Operating System Abstrac
|
||||
* **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children.
|
||||
* **Key Parameters**: `name` (string), `pid` (int).
|
||||
* **`osal.whoami() !string`**: Return the current username.
|
||||
* **`osal.platform() !PlatformType`**: Identify the operating system.
|
||||
* **`osal.cputype() !CPUType`**: Identify the CPU architecture.
|
||||
* ~~**`osal.platform() !PlatformType`**: Identify the operating system.~~ → **Moved to `incubaid.herolib.core`**
|
||||
* ~~**`osal.cputype() !CPUType`**: Identify the CPU architecture.~~ → **Moved to `incubaid.herolib.core`**
|
||||
* **`osal.hostname() !string`**: Get system hostname.
|
||||
* **`osal.sleep(duration int)`**: Pause execution for a specified duration.
|
||||
* **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL.
|
||||
|
||||
@@ -343,8 +343,6 @@ This document describes the core functionalities of the Operating System Abstrac
|
||||
- **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children.
|
||||
- **Key Parameters**: `name` (string), `pid` (int).
|
||||
- **`osal.whoami() !string`**: Return the current username.
|
||||
- **`osal.platform() !PlatformType`**: Identify the operating system.
|
||||
- **`osal.cputype() !CPUType`**: Identify the CPU architecture.
|
||||
- **`osal.hostname() !string`**: Get system hostname.
|
||||
- **`osal.sleep(duration int)`**: Pause execution for a specified duration.
|
||||
- **`osal.download(args: DownloadArgs) !pathlib.Path`**: Download a file from a URL.
|
||||
@@ -355,7 +353,30 @@ This document describes the core functionalities of the Operating System Abstrac
|
||||
- **`osal.user_add(args: UserArgs) !int`**: Add a user.
|
||||
- **Key Parameters**: `name` (string).
|
||||
|
||||
```
|
||||
```
|
||||
|
||||
## 7. Platform Information
|
||||
|
||||
* **`core.platform() !PlatformType`**: Identify the operating system.
|
||||
* **Returns**: Platform type (osx, ubuntu, arch, etc.)
|
||||
* **`core.cputype() !CPUType`**: Identify the CPU architecture.
|
||||
* **Returns**: CPU type (intel, arm, etc.)
|
||||
|
||||
### Usage Example
|
||||
|
||||
```v
|
||||
import incubaid.herolib.core
|
||||
|
||||
platform := core.platform()! // Returns .osx, .ubuntu, etc.
|
||||
cpu := core.cputype()! // Returns .intel, .arm, etc.
|
||||
|
||||
match platform {
|
||||
.osx { println('Running on macOS') }
|
||||
.ubuntu { println('Running on Ubuntu') }
|
||||
.arch { println('Running on Arch Linux') }
|
||||
else { println('Other platform') }
|
||||
}
|
||||
```
|
||||
|
||||
# OurTime Module
|
||||
|
||||
|
||||
@@ -92,16 +92,23 @@ pub fn cmd_git(mut cmdroot Command) {
|
||||
description: 'Get the path to a git repository. Use with cd $(hero git path <url>)'
|
||||
}
|
||||
|
||||
cmd_path.add_flag(Flag{
|
||||
flag: .string
|
||||
required: false
|
||||
name: 'url'
|
||||
abbrev: 'u'
|
||||
description: 'url for git path operation, so we know which repo path to get'
|
||||
})
|
||||
mut cmd_check := Command{
|
||||
sort_flags: true
|
||||
name: 'check'
|
||||
execute: cmd_git_execute
|
||||
description: 'Check if a git repository is properly configured.'
|
||||
}
|
||||
|
||||
mut cmd_lfs := Command{
|
||||
sort_flags: true
|
||||
name: 'lfs'
|
||||
execute: cmd_git_execute
|
||||
description: 'Make sure git repo has lfs enabled and system is ready to support lfs.'
|
||||
}
|
||||
|
||||
mut allcmdsref := [&list_command, &clone_command, &push_command, &pull_command, &commit_command,
|
||||
&reload_command, &delete_command, &sourcetree_command, &editor_command, &exists_command]
|
||||
&reload_command, &delete_command, &sourcetree_command, &editor_command, &exists_command,
|
||||
&cmd_check, &cmd_lfs]
|
||||
|
||||
for mut c in allcmdsref {
|
||||
c.add_flag(Flag{
|
||||
@@ -141,7 +148,8 @@ pub fn cmd_git(mut cmdroot Command) {
|
||||
})
|
||||
}
|
||||
|
||||
mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command]
|
||||
mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command,
|
||||
&cmd_check, &cmd_lfs]
|
||||
for mut c in urlcmds {
|
||||
c.add_flag(Flag{
|
||||
flag: .bool
|
||||
|
||||
195
lib/develop/gittools/repository_lfs.v
Normal file
195
lib/develop/gittools/repository_lfs.v
Normal file
@@ -0,0 +1,195 @@
|
||||
module gittools
|
||||
|
||||
import time
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core
|
||||
import incubaid.herolib.osal.core as osal
|
||||
import os
|
||||
|
||||
const binary_extensions = ['.pdf', '.docx', '.xlsx', '.pptx', '.zip', '.tar', '.gz', '.jpg', '.jpeg',
|
||||
'.png', '.gif', '.bmp', '.tiff', '.ico', '.webp', '.mp4', '.mp3', '.avi', '.mov', '.wmv', '.flv',
|
||||
'.mkv', '.wav', '.exe', '.dll', '.so', '.dylib', '.bin', '.dat', '.iso']
|
||||
|
||||
// check if repo has lfs enabled
|
||||
pub fn (mut repo GitRepo) check() ! {
|
||||
repo.init()!
|
||||
|
||||
// Get list of all files in the repository
|
||||
files_result := repo.exec('ls-files')!
|
||||
files := files_result.split('\n').filter(it.trim_space() != '')
|
||||
|
||||
// Binary file extensions that should use LFS
|
||||
|
||||
mut needs_lfs := false
|
||||
mut large_files := []string{}
|
||||
|
||||
for file in files {
|
||||
file_path := '${repo.path()}/${file}'
|
||||
|
||||
// Check if file exists (might be deleted)
|
||||
if !os.exists(file_path) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if file is in bin/ folder
|
||||
if file.starts_with('bin/') {
|
||||
needs_lfs = true
|
||||
large_files << file
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if file has binary extension
|
||||
for ext in binary_extensions {
|
||||
if file.to_lower().ends_with(ext) {
|
||||
needs_lfs = true
|
||||
large_files << file
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check file size (files > 50MB should use LFS)
|
||||
file_info := os.stat(file_path) or { continue }
|
||||
size_mb := file_info.size / (1024 * 1024)
|
||||
if size_mb > 50 {
|
||||
needs_lfs = true
|
||||
large_files << '${file} (${size_mb}MB)'
|
||||
}
|
||||
}
|
||||
|
||||
if needs_lfs {
|
||||
console.print_header('Repository contains files that should use Git LFS:')
|
||||
for file in large_files {
|
||||
console.print_item(file)
|
||||
}
|
||||
|
||||
if !repo.lfs()! {
|
||||
console.print_stderr('Git LFS is not initialized. Run lfs_init() to set it up.')
|
||||
}
|
||||
} else {
|
||||
console.print_green('No large binary files detected. Git LFS may not be needed.')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut repo GitRepo) lfs_init() ! {
|
||||
repo.init()!
|
||||
|
||||
// Step 1: Check if git-lfs is installed on the system
|
||||
if !osal.cmd_exists('git-lfs') {
|
||||
console.print_header('Git LFS not found. Installing...')
|
||||
|
||||
// Install based on platform
|
||||
platform := core.platform()!
|
||||
match platform {
|
||||
.osx {
|
||||
// Use brew on macOS
|
||||
if osal.cmd_exists('brew') {
|
||||
osal.execute_stdout('brew install git-lfs')!
|
||||
} else {
|
||||
return error('Homebrew not found. Please install git-lfs manually: https://git-lfs.github.com')
|
||||
}
|
||||
}
|
||||
.ubuntu {
|
||||
osal.execute_stdout('sudo apt-get update && sudo apt-get install -y git-lfs')!
|
||||
}
|
||||
.arch {
|
||||
osal.execute_stdout('sudo pacman -S git-lfs')!
|
||||
}
|
||||
else {
|
||||
return error('Unsupported platform. Please install git-lfs manually: https://git-lfs.github.com')
|
||||
}
|
||||
}
|
||||
|
||||
console.print_green('Git LFS installed successfully.')
|
||||
}
|
||||
|
||||
// Step 2: Initialize git-lfs in the repository
|
||||
console.print_header('Initializing Git LFS in repository...')
|
||||
repo.exec('lfs install --local')!
|
||||
|
||||
console.print_header('Tracking binary file extensions...')
|
||||
for ext in binary_extensions {
|
||||
repo.exec('lfs track "*${ext}"') or {
|
||||
console.print_debug('Could not track ${ext}: ${err}')
|
||||
continue
|
||||
}
|
||||
console.print_item('Tracking *${ext}')
|
||||
}
|
||||
|
||||
// Step 4: Track files in bin/ folders
|
||||
console.print_header('Tracking bin/ folders...')
|
||||
repo.exec('lfs track "bin/**"') or { console.print_debug('Could not track bin/**: ${err}') }
|
||||
|
||||
// Step 5: Create and install pre-commit hook
|
||||
mut check_script := '
|
||||
#!/bin/bash
|
||||
# Prevent committing large files without LFS
|
||||
|
||||
max_size=50 # MB
|
||||
files=$(git diff --cached --name-only)
|
||||
|
||||
for file in \${files}; do
|
||||
if [ -f "\${file}" ]; then
|
||||
size=$(du -m "\${file}" | cut -f1)
|
||||
if [ "\${size}" -gt "\${max_size}" ]; then
|
||||
if ! git check-attr filter -- "\${file}" | grep -q "lfs"; then
|
||||
echo "❌ ERROR: \${file} is \${size}MB and not tracked by LFS."
|
||||
echo "Please run: git lfs track \\"\${file}\\""
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
'
|
||||
check_script = texttools.dedent(check_script)
|
||||
|
||||
hook_path := '${repo.path()}/.git/hooks/pre-commit'
|
||||
hooks_dir := '${repo.path()}/.git/hooks'
|
||||
|
||||
// Ensure hooks directory exists
|
||||
osal.dir_ensure(hooks_dir)!
|
||||
|
||||
// Write the pre-commit hook
|
||||
osal.file_write(hook_path, check_script)!
|
||||
|
||||
// Make the hook executable
|
||||
osal.exec(cmd: 'chmod +x ${hook_path}')!
|
||||
|
||||
console.print_green('Pre-commit hook installed at ${hook_path}')
|
||||
|
||||
// Step 6: Add .gitattributes to the repository
|
||||
gitattributes_path := '${repo.path()}/.gitattributes'
|
||||
if os.exists(gitattributes_path) {
|
||||
console.print_debug('.gitattributes already exists, LFS tracks have been added to it.')
|
||||
} else {
|
||||
console.print_debug('.gitattributes created with LFS tracking rules.')
|
||||
}
|
||||
|
||||
// Step 7: Verify LFS is properly configured
|
||||
if !repo.lfs()! {
|
||||
return error('Git LFS initialization failed verification')
|
||||
}
|
||||
|
||||
console.print_green('Git LFS initialized successfully for ${repo.name}')
|
||||
console.print_header('Next steps:')
|
||||
console.print_item('1. Review .gitattributes to ensure all desired files are tracked')
|
||||
console.print_item('2. Run: git add .gitattributes')
|
||||
console.print_item('3. Commit the changes: git commit -m "chore: Initialize Git LFS"')
|
||||
}
|
||||
|
||||
// Check if repo has lfs enabled
|
||||
pub fn (mut repo GitRepo) lfs() !bool {
|
||||
repo.init()!
|
||||
|
||||
// Check if .git/hooks/pre-commit exists and contains LFS check
|
||||
hook_path := '${repo.path()}/.git/hooks/pre-commit'
|
||||
if !os.exists(hook_path) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if git lfs is initialized locally
|
||||
// This checks if .git/config contains lfs filter configuration
|
||||
config_result := repo.exec('config --local --get filter.lfs.clean') or { return false }
|
||||
|
||||
return config_result.contains('git-lfs clean')
|
||||
}
|
||||
@@ -46,7 +46,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
|
||||
// load_internal performs the expensive git operations to refresh the repository state.
|
||||
// It should only be called by status_update().
|
||||
fn (mut repo GitRepo) load_internal() ! {
|
||||
console.print_debug('load ${repo.print_key()}')
|
||||
console.print_item('load ${repo.print_key()}')
|
||||
repo.init()!
|
||||
|
||||
repo.exec('fetch --all') or {
|
||||
|
||||
@@ -3,6 +3,7 @@ module models_ledger
|
||||
import incubaid.herolib.data.encoder
|
||||
import incubaid.herolib.data.ourtime
|
||||
import incubaid.herolib.hero.db
|
||||
import json
|
||||
|
||||
// AccountStatus represents the status of an account
|
||||
pub enum AccountStatus {
|
||||
@@ -346,7 +347,8 @@ pub fn (mut self DBAccount) list(args AccountListArg) ![]Account {
|
||||
|
||||
for account in all_accounts {
|
||||
// Add filter logic based on account properties
|
||||
if args.filter != '' && !account.name.contains(args.filter) && !account.description.contains(args.filter) {
|
||||
if args.filter != '' && !account.name.contains(args.filter)
|
||||
&& !account.description.contains(args.filter) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -374,7 +376,11 @@ pub fn (mut self DBAccount) list(args AccountListArg) ![]Account {
|
||||
return []Account{}
|
||||
}
|
||||
|
||||
return if filtered_accounts.len > 0 { filtered_accounts[start..start+limit] } else { []Account{} }
|
||||
return if filtered_accounts.len > 0 {
|
||||
filtered_accounts[start..start + limit]
|
||||
} else {
|
||||
[]Account{}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut self DBAccount) list_all() ![]Account {
|
||||
|
||||
@@ -4,7 +4,9 @@ import json
|
||||
|
||||
fn test_account_crud() ! {
|
||||
mut db := setup_test_db()!
|
||||
mut account_db := DBAccount{db: db}
|
||||
mut account_db := DBAccount{
|
||||
db: db
|
||||
}
|
||||
|
||||
// Create test
|
||||
mut account_arg := AccountArg{
|
||||
@@ -62,27 +64,27 @@ fn test_account_api_handler() ! {
|
||||
json_params := json.encode(account_arg)
|
||||
|
||||
// Set
|
||||
response := account_handle(mut factory, 1, {}, UserRef{id: 1}, 'set', json_params)!
|
||||
response := account_handle(mut factory, 1, {}, UserRef{ id: 1 }, 'set', json_params)!
|
||||
id := response.result.int()
|
||||
assert id > 0
|
||||
|
||||
// Exist
|
||||
response2 := account_handle(mut factory, 2, {}, UserRef{id: 1}, 'exist', id.str())!
|
||||
response2 := account_handle(mut factory, 2, {}, UserRef{ id: 1 }, 'exist', id.str())!
|
||||
assert response2.result == 'true'
|
||||
|
||||
// Get
|
||||
response3 := account_handle(mut factory, 3, {}, UserRef{id: 1}, 'get', id.str())!
|
||||
response3 := account_handle(mut factory, 3, {}, UserRef{ id: 1 }, 'get', id.str())!
|
||||
assert response3.result.contains('API Test Account')
|
||||
|
||||
// List
|
||||
response4 := account_handle(mut factory, 4, {}, UserRef{id: 1}, 'list', '{}')!
|
||||
response4 := account_handle(mut factory, 4, {}, UserRef{ id: 1 }, 'list', '{}')!
|
||||
assert response4.result.contains('API Test Account')
|
||||
|
||||
// Delete
|
||||
response5 := account_handle(mut factory, 5, {}, UserRef{id: 1}, 'delete', id.str())!
|
||||
response5 := account_handle(mut factory, 5, {}, UserRef{ id: 1 }, 'delete', id.str())!
|
||||
assert response5.result == 'true'
|
||||
|
||||
// Verify deletion
|
||||
response6 := account_handle(mut factory, 6, {}, UserRef{id: 1}, 'exist', id.str())!
|
||||
response6 := account_handle(mut factory, 6, {}, UserRef{ id: 1 }, 'exist', id.str())!
|
||||
assert response6.result == 'false'
|
||||
}
|
||||
|
||||
@@ -161,8 +161,8 @@ pub fn (mut self DBAsset) list(args AssetListArg) ![]Asset {
|
||||
|
||||
for asset in all_assets {
|
||||
// Filter by text in name or description
|
||||
if args.filter != '' && !asset.name.contains(args.filter) &&
|
||||
!asset.description.contains(args.filter) && !asset.address.contains(args.filter) {
|
||||
if args.filter != '' && !asset.name.contains(args.filter)
|
||||
&& !asset.description.contains(args.filter) && !asset.address.contains(args.filter) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -203,7 +203,7 @@ pub fn (mut self DBAsset) list(args AssetListArg) ![]Asset {
|
||||
return []Asset{}
|
||||
}
|
||||
|
||||
return if filtered_assets.len > 0 { filtered_assets[start..start+limit] } else { []Asset{} }
|
||||
return if filtered_assets.len > 0 { filtered_assets[start..start + limit] } else { []Asset{} }
|
||||
}
|
||||
|
||||
pub fn (mut self DBAsset) list_all() ![]Asset {
|
||||
|
||||
@@ -4,7 +4,9 @@ import json
|
||||
|
||||
fn test_asset_crud() ! {
|
||||
mut db := setup_test_db()!
|
||||
mut asset_db := DBAsset{db: db}
|
||||
mut asset_db := DBAsset{
|
||||
db: db
|
||||
}
|
||||
|
||||
// Create test
|
||||
mut asset_arg := AssetArg{
|
||||
@@ -16,7 +18,10 @@ fn test_asset_crud() ! {
|
||||
supply: 1000000.0
|
||||
decimals: 8
|
||||
is_frozen: false
|
||||
metadata: {'symbol': 'TFT', 'blockchain': 'Stellar'}
|
||||
metadata: {
|
||||
'symbol': 'TFT'
|
||||
'blockchain': 'Stellar'
|
||||
}
|
||||
administrators: [u32(1), 2]
|
||||
min_signatures: 1
|
||||
}
|
||||
@@ -35,7 +40,10 @@ fn test_asset_crud() ! {
|
||||
assert retrieved.supply == 1000000.0
|
||||
assert retrieved.decimals == 8
|
||||
assert retrieved.is_frozen == false
|
||||
assert retrieved.metadata == {'symbol': 'TFT', 'blockchain': 'Stellar'}
|
||||
assert retrieved.metadata == {
|
||||
'symbol': 'TFT'
|
||||
'blockchain': 'Stellar'
|
||||
}
|
||||
assert retrieved.administrators == [u32(1), 2]
|
||||
assert retrieved.min_signatures == 1
|
||||
|
||||
@@ -57,17 +65,19 @@ fn test_asset_crud() ! {
|
||||
|
||||
fn test_asset_list_filtering() ! {
|
||||
mut db := setup_test_db()!
|
||||
mut asset_db := DBAsset{db: db}
|
||||
mut asset_db := DBAsset{
|
||||
db: db
|
||||
}
|
||||
|
||||
// Create multiple test assets
|
||||
for i in 0..5 {
|
||||
for i in 0 .. 5 {
|
||||
mut asset_arg := AssetArg{
|
||||
name: 'Token ${i}'
|
||||
description: 'Description ${i}'
|
||||
address: 'ADDR${i}'
|
||||
asset_type: if i < 3 { 'token' } else { 'nft' }
|
||||
issuer: if i % 2 == 0 { u32(1) } else { u32(2) }
|
||||
supply: 1000.0 * f64(i+1)
|
||||
supply: 1000.0 * f64(i + 1)
|
||||
decimals: 8
|
||||
is_frozen: i >= 3
|
||||
}
|
||||
@@ -77,28 +87,28 @@ fn test_asset_list_filtering() ! {
|
||||
}
|
||||
|
||||
// Test filter by text
|
||||
filtered := asset_db.list(AssetListArg{filter: 'Token 1'})!
|
||||
filtered := asset_db.list(AssetListArg{ filter: 'Token 1' })!
|
||||
assert filtered.len == 1
|
||||
assert filtered[0].name == 'Token 1'
|
||||
|
||||
// Test filter by asset_type
|
||||
tokens := asset_db.list(AssetListArg{asset_type: 'token'})!
|
||||
tokens := asset_db.list(AssetListArg{ asset_type: 'token' })!
|
||||
assert tokens.len == 3
|
||||
|
||||
// Test filter by frozen status
|
||||
frozen := asset_db.list(AssetListArg{is_frozen: true, filter_frozen: true})!
|
||||
frozen := asset_db.list(AssetListArg{ is_frozen: true, filter_frozen: true })!
|
||||
assert frozen.len == 2
|
||||
|
||||
// Test filter by issuer
|
||||
issuer1 := asset_db.list(AssetListArg{issuer: 1, filter_issuer: true})!
|
||||
issuer1 := asset_db.list(AssetListArg{ issuer: 1, filter_issuer: true })!
|
||||
assert issuer1.len == 3
|
||||
|
||||
// Test pagination
|
||||
page1 := asset_db.list(AssetListArg{limit: 2, offset: 0})!
|
||||
page1 := asset_db.list(AssetListArg{ limit: 2, offset: 0 })!
|
||||
assert page1.len == 2
|
||||
page2 := asset_db.list(AssetListArg{limit: 2, offset: 2})!
|
||||
page2 := asset_db.list(AssetListArg{ limit: 2, offset: 2 })!
|
||||
assert page2.len == 2
|
||||
page3 := asset_db.list(AssetListArg{limit: 2, offset: 4})!
|
||||
page3 := asset_db.list(AssetListArg{ limit: 2, offset: 4 })!
|
||||
assert page3.len == 1
|
||||
}
|
||||
|
||||
@@ -120,32 +130,32 @@ fn test_asset_api_handler() ! {
|
||||
json_params := json.encode(asset_arg)
|
||||
|
||||
// Set
|
||||
response := asset_handle(mut factory, 1, {}, UserRef{id: 1}, 'set', json_params)!
|
||||
response := asset_handle(mut factory, 1, {}, UserRef{ id: 1 }, 'set', json_params)!
|
||||
id := response.result.int()
|
||||
assert id > 0
|
||||
|
||||
// Exist
|
||||
response2 := asset_handle(mut factory, 2, {}, UserRef{id: 1}, 'exist', id.str())!
|
||||
response2 := asset_handle(mut factory, 2, {}, UserRef{ id: 1 }, 'exist', id.str())!
|
||||
assert response2.result == 'true'
|
||||
|
||||
// Get
|
||||
response3 := asset_handle(mut factory, 3, {}, UserRef{id: 1}, 'get', id.str())!
|
||||
response3 := asset_handle(mut factory, 3, {}, UserRef{ id: 1 }, 'get', id.str())!
|
||||
assert response3.result.contains('API Test Asset')
|
||||
|
||||
// List
|
||||
response4 := asset_handle(mut factory, 4, {}, UserRef{id: 1}, 'list', '{}')!
|
||||
response4 := asset_handle(mut factory, 4, {}, UserRef{ id: 1 }, 'list', '{}')!
|
||||
assert response4.result.contains('API Test Asset')
|
||||
|
||||
// List with filters
|
||||
filter_params := json.encode(AssetListArg{asset_type: 'token'})
|
||||
response5 := asset_handle(mut factory, 5, {}, UserRef{id: 1}, 'list', filter_params)!
|
||||
filter_params := json.encode(AssetListArg{ asset_type: 'token' })
|
||||
response5 := asset_handle(mut factory, 5, {}, UserRef{ id: 1 }, 'list', filter_params)!
|
||||
assert response5.result.contains('API Test Asset')
|
||||
|
||||
// Delete
|
||||
response6 := asset_handle(mut factory, 6, {}, UserRef{id: 1}, 'delete', id.str())!
|
||||
response6 := asset_handle(mut factory, 6, {}, UserRef{ id: 1 }, 'delete', id.str())!
|
||||
assert response6.result == 'true'
|
||||
|
||||
// Verify deletion
|
||||
response7 := asset_handle(mut factory, 7, {}, UserRef{id: 1}, 'exist', id.str())!
|
||||
response7 := asset_handle(mut factory, 7, {}, UserRef{ id: 1 }, 'exist', id.str())!
|
||||
assert response7.result == 'false'
|
||||
}
|
||||
|
||||
@@ -24,17 +24,39 @@ pub fn new_models_factory(mut database db.DB) !&ModelsFactory {
|
||||
db: database
|
||||
}
|
||||
|
||||
factory.account = &DBAccount{db: database}
|
||||
factory.asset = &DBAsset{db: database}
|
||||
factory.dnszone = &DBDNSZone{db: database}
|
||||
factory.group = &DBGroup{db: database}
|
||||
factory.member = &DBMember{db: database}
|
||||
factory.notary = &DBNotary{db: database}
|
||||
factory.signature = &DBSignature{db: database}
|
||||
factory.transaction = &DBTransaction{db: database}
|
||||
factory.user = &DBUser{db: database}
|
||||
factory.userkvs = &DBUserKVS{db: database}
|
||||
factory.userkvsitem = &DBUserKVSItem{db: database}
|
||||
factory.account = &DBAccount{
|
||||
db: database
|
||||
}
|
||||
factory.asset = &DBAsset{
|
||||
db: database
|
||||
}
|
||||
factory.dnszone = &DBDNSZone{
|
||||
db: database
|
||||
}
|
||||
factory.group = &DBGroup{
|
||||
db: database
|
||||
}
|
||||
factory.member = &DBMember{
|
||||
db: database
|
||||
}
|
||||
factory.notary = &DBNotary{
|
||||
db: database
|
||||
}
|
||||
factory.signature = &DBSignature{
|
||||
db: database
|
||||
}
|
||||
factory.transaction = &DBTransaction{
|
||||
db: database
|
||||
}
|
||||
factory.user = &DBUser{
|
||||
db: database
|
||||
}
|
||||
factory.userkvs = &DBUserKVS{
|
||||
db: database
|
||||
}
|
||||
factory.userkvsitem = &DBUserKVSItem{
|
||||
db: database
|
||||
}
|
||||
|
||||
return factory
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@ module models_ledger
|
||||
import incubaid.herolib.hero.db
|
||||
|
||||
fn setup_test_db() !db.DB {
|
||||
return db.new(path:"/tmp/testdb")!
|
||||
return db.new(path: '/tmp/testdb')!
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user