fixes in rclone module

This commit is contained in:
2024-12-29 14:05:09 +02:00
parent 84d3aaade7
commit f25be20e21
5 changed files with 151 additions and 165 deletions

View File

@@ -2,8 +2,9 @@ module rclone
import freeflowuniverse.herolib.core.playbook import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import os
const configfile = '${HOME}/.config/rclone/rclone.conf' const configfile = '${os.home_dir()}/.config/rclone/rclone.conf'
// will look for personal configuration file in ~/hero/config . // will look for personal configuration file in ~/hero/config .
// this file is in heroscript format and will have all required info to configure rclone // this file is in heroscript format and will have all required info to configure rclone
@@ -17,13 +18,15 @@ const configfile = '${HOME}/.config/rclone/rclone.conf'
// url:'' // url:''
//``` //```
pub fn configure() ! { pub fn configure() ! {
actions := playbook.new( mut plbook := playbook.new(
path: configfile path: rclone.configfile
actor_filter: ['config'] // actor_filter: ['config']
action_filter: [ // action_filter: [
's3server_define', // 's3server_define',
] // ]
)! )!
actions := plbook.find(filter: 'config.s3server_define')!
mut out := '' mut out := ''
for action in actions { for action in actions {
mut name := action.params.get_default('name', '')! mut name := action.params.get_default('name', '')!

View File

@@ -1,7 +1,6 @@
module rclone module rclone
import os import os
import freeflowuniverse.herolib.core.texttools
// // RCloneClient represents a configured rclone instance // // RCloneClient represents a configured rclone instance
// pub struct RCloneClient { // pub struct RCloneClient {

View File

@@ -2,7 +2,6 @@ module rclone
import freeflowuniverse.herolib.core.base import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
__global ( __global (
rclone_global map[string]&RCloneClient rclone_global map[string]&RCloneClient
@@ -91,9 +90,9 @@ pub fn play(args_ PlayArgs) ! {
if install_actions.len > 0 { if install_actions.len > 0 {
for install_action in install_actions { for install_action in install_actions {
mut p := install_action.params mut p := install_action.params
mycfg := cfg_play(p)! cfg_play(p)!
console.print_debug('install action rclone.configure\n${mycfg}') // console.print_debug('install action rclone.configure\n${mycfg}')
set(mycfg)! // set(mycfg)!
} }
} }
} }

View File

@@ -5,18 +5,14 @@ import flag
import time import time
import json import json
const ( const cache_file = '/tmp/herolib_tests.json'
cache_file = '/tmp/herolib_tests.json' const test_expiry_seconds = 3600 // 1 hour
test_expiry_seconds = 3600 // 1 hour
)
struct TestCache { struct TestCache {
mut: mut:
tests map[string]i64 // Map of test paths to last successful run timestamp tests map[string]i64 // Map of test paths to last successful run timestamp
} }
// Load the test cache from JSON file // Load the test cache from JSON file
fn load_test_cache() TestCache { fn load_test_cache() TestCache {
if !os.exists(cache_file) { if !os.exists(cache_file) {
@@ -25,25 +21,19 @@ fn load_test_cache() TestCache {
} }
} }
content := os.read_file(cache_file) or { content := os.read_file(cache_file) or { return TestCache{
return TestCache{
tests: map[string]i64{} tests: map[string]i64{}
} } }
}
return json.decode(TestCache, content) or { return json.decode(TestCache, content) or { return TestCache{
return TestCache{
tests: map[string]i64{} tests: map[string]i64{}
} } }
}
} }
// Save the test cache to JSON file // Save the test cache to JSON file
fn save_test_cache(cache TestCache) { fn save_test_cache(cache TestCache) {
json_str := json.encode_pretty(cache) json_str := json.encode_pretty(cache)
os.write_file(cache_file, json_str) or { os.write_file(cache_file, json_str) or { eprintln('Failed to save test cache: ${err}') }
eprintln('Failed to save test cache: ${err}')
}
} }
// Check if a test needs to be rerun based on timestamp // Check if a test needs to be rerun based on timestamp
@@ -82,14 +72,14 @@ fn get_cache_key(path string, base_dir string) string {
} }
// Check if a file should be ignored or marked as error based on its path // Check if a file should be ignored or marked as error based on its path
fn process_test_file(path string, base_dir string, test_files_ignore []string, test_files_error []string, mut cache TestCache, mut tests_in_error []string)! { fn process_test_file(path string, base_dir string, test_files_ignore []string, test_files_error []string, mut cache TestCache, mut tests_in_error []string) ! {
// Get normalized paths // Get normalized paths
norm_path, rel_path := get_normalized_paths(path, base_dir) norm_path, rel_path := get_normalized_paths(path, base_dir)
mut should_ignore := false mut should_ignore := false
mut is_error := false mut is_error := false
if ! path.to_lower().contains("_test.v"){ if !path.to_lower().contains('_test.v') {
return return
} }
@@ -119,7 +109,7 @@ fn process_test_file(path string, base_dir string, test_files_ignore []string, t
} }
} }
fn dotest(path string, base_dir string, mut cache TestCache)! { fn dotest(path string, base_dir string, mut cache TestCache) ! {
norm_path, _ := get_normalized_paths(path, base_dir) norm_path, _ := get_normalized_paths(path, base_dir)
test_key := get_cache_key(norm_path, base_dir) test_key := get_cache_key(norm_path, base_dir)
@@ -144,7 +134,6 @@ fn dotest(path string, base_dir string, mut cache TestCache)! {
println('Test passed: ${path}') println('Test passed: ${path}')
} }
///////////////////////// /////////////////////////
///////////////////////// /////////////////////////
@@ -152,13 +141,12 @@ fn dotest(path string, base_dir string, mut cache TestCache)! {
mut fp := flag.new_flag_parser(os.args) mut fp := flag.new_flag_parser(os.args)
fp.application('test_basic') fp.application('test_basic')
fp.description('Run tests for herolib') fp.description('Run tests for herolib')
remove_cache := fp.bool('r', `r`, false, 'Remove cache file before running tests', flag.FlagConfig{}) remove_cache := fp.bool('r', `r`, false, 'Remove cache file before running tests')
fp.finalize() or { fp.finalize() or {
eprintln(err) eprintln(err)
exit(1) exit(1)
} }
// Remove cache file if -r flag is set // Remove cache file if -r flag is set
if remove_cache && os.exists(cache_file) { if remove_cache && os.exists(cache_file) {
os.rm(cache_file) or { os.rm(cache_file) or {
@@ -168,15 +156,12 @@ if remove_cache && os.exists(cache_file) {
println('Removed cache file: ${cache_file}') println('Removed cache file: ${cache_file}')
} }
abs_dir_of_script := dir(@FILE) abs_dir_of_script := dir(@FILE)
norm_dir_of_script := normalize_path(abs_dir_of_script) norm_dir_of_script := normalize_path(abs_dir_of_script)
os.chdir(abs_dir_of_script) or { panic(err) } os.chdir(abs_dir_of_script) or { panic(err) }
// can use // inside this list as well to ignore temporary certain dirs, useful for testing // can use // inside this list as well to ignore temporary certain dirs, useful for testing
tests := " tests := '
lib/data lib/data
lib/osal lib/osal
lib/lang lib/lang
@@ -185,17 +170,17 @@ lib/clients
// lib/crypt // lib/crypt
lib/core lib/core
lib/develop lib/develop
" '
//the following tests have no prio and can be ignored // the following tests have no prio and can be ignored
tests_ignore := " tests_ignore := '
notifier_test.v notifier_test.v
clients/meilisearch clients/meilisearch
clients/zdb clients/zdb
systemd_process_test.v systemd_process_test.v
" '
tests_error := " tests_error := '
net_test.v net_test.v
osal/package_test.v osal/package_test.v
rpc_test.v rpc_test.v
@@ -229,8 +214,7 @@ encoderhero/encoder_test.v
encoderhero/decoder_test.v encoderhero/decoder_test.v
code/codeparser code/codeparser
gittools_test.v gittools_test.v
" '
// Split tests into array and remove empty lines // Split tests into array and remove empty lines
test_files := tests.split('\n').filter(it.trim_space() != '') test_files := tests.split('\n').filter(it.trim_space() != '')
@@ -239,14 +223,14 @@ test_files_error := tests_error.split('\n').filter(it.trim_space() != '')
mut tests_in_error := []string{} mut tests_in_error := []string{}
// Load test cache // Load test cache
mut cache := load_test_cache() mut cache := load_test_cache()
println('Test cache loaded from ${cache_file}') println('Test cache loaded from ${cache_file}')
// Run each test with proper v command flags // Run each test with proper v command flags
for test in test_files { for test in test_files {
if test.trim_space() == '' || test.trim_space().starts_with("//") || test.trim_space().starts_with("#") { if test.trim_space() == '' || test.trim_space().starts_with('//')
|| test.trim_space().starts_with('#') {
continue continue
} }
@@ -261,11 +245,12 @@ for test in test_files {
// If directory, run tests for each .v file in it recursively // If directory, run tests for each .v file in it recursively
files := os.walk_ext(full_path, '.v') files := os.walk_ext(full_path, '.v')
for file in files { for file in files {
process_test_file(file, norm_dir_of_script, test_files_ignore, test_files_error, mut cache, mut tests_in_error)! process_test_file(file, norm_dir_of_script, test_files_ignore, test_files_error, mut
cache, mut tests_in_error)!
} }
} else if os.is_file(full_path) { } else if os.is_file(full_path) {
process_test_file(full_path, norm_dir_of_script, test_files_ignore, test_files_error, mut cache, mut tests_in_error)! process_test_file(full_path, norm_dir_of_script, test_files_ignore, test_files_error, mut
cache, mut tests_in_error)!
} }
} }