...
This commit is contained in:
@@ -26,5 +26,4 @@ println(response)
|
||||
// 'The food was delicious and the waiter..',
|
||||
// ])!
|
||||
|
||||
|
||||
println(response2)
|
||||
|
||||
@@ -26,5 +26,4 @@ println(response)
|
||||
// 'The food was delicious and the waiter..',
|
||||
// ])!
|
||||
|
||||
|
||||
println(response2)
|
||||
|
||||
@@ -45,16 +45,16 @@ fn test_basic_flow() ! {
|
||||
redis.flushdb()!
|
||||
|
||||
mut coordinator := flows.new(
|
||||
name: 'test_basic_flow',
|
||||
redis: redis,
|
||||
ai: none
|
||||
name: 'test_basic_flow'
|
||||
redis: redis
|
||||
ai: none
|
||||
)!
|
||||
|
||||
// Step 1: Initialize
|
||||
mut step1 := coordinator.step_new(
|
||||
name: 'initialize'
|
||||
name: 'initialize'
|
||||
description: 'Initialize test environment'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Step 1: Initializing...')
|
||||
s.context['init_time'] = ourtime.now().str()
|
||||
}
|
||||
@@ -62,9 +62,9 @@ fn test_basic_flow() ! {
|
||||
|
||||
// Step 2: Process
|
||||
mut step2 := coordinator.step_new(
|
||||
name: 'process'
|
||||
name: 'process'
|
||||
description: 'Process data'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Step 2: Processing...')
|
||||
s.context['processed'] = 'true'
|
||||
}
|
||||
@@ -72,9 +72,9 @@ fn test_basic_flow() ! {
|
||||
|
||||
// Step 3: Finalize
|
||||
mut step3 := coordinator.step_new(
|
||||
name: 'finalize'
|
||||
name: 'finalize'
|
||||
description: 'Finalize results'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Step 3: Finalizing...')
|
||||
s.context['status'] = 'completed'
|
||||
}
|
||||
@@ -102,16 +102,16 @@ fn test_error_handling() ! {
|
||||
redis.flushdb()!
|
||||
|
||||
mut coordinator := flows.new(
|
||||
name: 'test_error_flow',
|
||||
redis: redis,
|
||||
ai: none
|
||||
name: 'test_error_flow'
|
||||
redis: redis
|
||||
ai: none
|
||||
)!
|
||||
|
||||
// Error step
|
||||
mut error_recovery := coordinator.step_new(
|
||||
name: 'error_recovery'
|
||||
name: 'error_recovery'
|
||||
description: 'Recover from error'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Error Step: Executing recovery...')
|
||||
s.context['recovered'] = 'true'
|
||||
}
|
||||
@@ -119,9 +119,9 @@ fn test_error_handling() ! {
|
||||
|
||||
// Main step that fails
|
||||
mut main_step := coordinator.step_new(
|
||||
name: 'failing_step'
|
||||
name: 'failing_step'
|
||||
description: 'This step will fail'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✗ Main Step: Intentionally failing...')
|
||||
return error('Simulated error for testing')
|
||||
}
|
||||
@@ -130,9 +130,7 @@ fn test_error_handling() ! {
|
||||
main_step.error_step_add(error_recovery)
|
||||
|
||||
// Run and expect error
|
||||
coordinator.run() or {
|
||||
println(' ✓ Error caught as expected: ${err.msg()}')
|
||||
}
|
||||
coordinator.run() or { println(' ✓ Error caught as expected: ${err.msg()}') }
|
||||
|
||||
// Verify error state in Redis
|
||||
error_state := coordinator.get_step_state('failing_step')!
|
||||
@@ -150,41 +148,41 @@ fn test_multiple_next_steps() ! {
|
||||
redis.flushdb()!
|
||||
|
||||
mut coordinator := flows.new(
|
||||
name: 'test_parallel_steps',
|
||||
redis: redis,
|
||||
ai: none
|
||||
name: 'test_parallel_steps'
|
||||
redis: redis
|
||||
ai: none
|
||||
)!
|
||||
|
||||
// Parent step
|
||||
mut parent := coordinator.step_new(
|
||||
name: 'parent_step'
|
||||
name: 'parent_step'
|
||||
description: 'Parent step with multiple children'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Parent Step: Executing...')
|
||||
}
|
||||
)!
|
||||
|
||||
// Child steps
|
||||
mut child1 := coordinator.step_new(
|
||||
name: 'child_step_1'
|
||||
name: 'child_step_1'
|
||||
description: 'First child'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Child Step 1: Executing...')
|
||||
}
|
||||
)!
|
||||
|
||||
mut child2 := coordinator.step_new(
|
||||
name: 'child_step_2'
|
||||
name: 'child_step_2'
|
||||
description: 'Second child'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Child Step 2: Executing...')
|
||||
}
|
||||
)!
|
||||
|
||||
mut child3 := coordinator.step_new(
|
||||
name: 'child_step_3'
|
||||
name: 'child_step_3'
|
||||
description: 'Third child'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Child Step 3: Executing...')
|
||||
}
|
||||
)!
|
||||
@@ -209,15 +207,15 @@ fn test_redis_state() ! {
|
||||
redis.flushdb()!
|
||||
|
||||
mut coordinator := flows.new(
|
||||
name: 'test_redis_state',
|
||||
redis: redis,
|
||||
ai: none
|
||||
name: 'test_redis_state'
|
||||
redis: redis
|
||||
ai: none
|
||||
)!
|
||||
|
||||
mut step1 := coordinator.step_new(
|
||||
name: 'redis_test_step'
|
||||
name: 'redis_test_step'
|
||||
description: 'Test Redis state storage'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Executing step with context...')
|
||||
s.context['user'] = 'test_user'
|
||||
s.context['action'] = 'test_action'
|
||||
@@ -257,16 +255,16 @@ fn test_complex_flow() ! {
|
||||
redis.flushdb()!
|
||||
|
||||
mut coordinator := flows.new(
|
||||
name: 'test_complex_flow',
|
||||
redis: redis,
|
||||
ai: none
|
||||
name: 'test_complex_flow'
|
||||
redis: redis
|
||||
ai: none
|
||||
)!
|
||||
|
||||
// Step 1: Validate
|
||||
mut validate := coordinator.step_new(
|
||||
name: 'validate_input'
|
||||
name: 'validate_input'
|
||||
description: 'Validate input parameters'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Validating input...')
|
||||
s.context['validated'] = 'true'
|
||||
}
|
||||
@@ -274,9 +272,9 @@ fn test_complex_flow() ! {
|
||||
|
||||
// Step 2: Transform (next step after validate)
|
||||
mut transform := coordinator.step_new(
|
||||
name: 'transform_data'
|
||||
name: 'transform_data'
|
||||
description: 'Transform input data'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Transforming data...')
|
||||
s.context['transformed'] = 'true'
|
||||
}
|
||||
@@ -284,9 +282,9 @@ fn test_complex_flow() ! {
|
||||
|
||||
// Step 3a: Save to DB (next step after transform)
|
||||
mut save_db := coordinator.step_new(
|
||||
name: 'save_to_database'
|
||||
name: 'save_to_database'
|
||||
description: 'Save data to database'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Saving to database...')
|
||||
s.context['saved'] = 'true'
|
||||
}
|
||||
@@ -294,9 +292,9 @@ fn test_complex_flow() ! {
|
||||
|
||||
// Step 3b: Send notification (next step after transform)
|
||||
mut notify := coordinator.step_new(
|
||||
name: 'send_notification'
|
||||
name: 'send_notification'
|
||||
description: 'Send notification'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Sending notification...')
|
||||
s.context['notified'] = 'true'
|
||||
}
|
||||
@@ -304,9 +302,9 @@ fn test_complex_flow() ! {
|
||||
|
||||
// Step 4: Cleanup (final step)
|
||||
mut cleanup := coordinator.step_new(
|
||||
name: 'cleanup'
|
||||
name: 'cleanup'
|
||||
description: 'Cleanup resources'
|
||||
f: fn (mut s flows.Step) ! {
|
||||
f: fn (mut s flows.Step) ! {
|
||||
println(' ✓ Cleaning up...')
|
||||
s.context['cleaned'] = 'true'
|
||||
}
|
||||
@@ -336,4 +334,4 @@ fn test_complex_flow() ! {
|
||||
|
||||
println(' ✓ Test 5 PASSED: Complex flow executed successfully')
|
||||
coordinator.clear_redis()!
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ mut goals := [
|
||||
title: 'Faster Requirements'
|
||||
description: 'Reduce PRD creation time to under 1 day'
|
||||
gtype: .product
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
// Create use cases
|
||||
@@ -25,7 +25,7 @@ mut use_cases := [
|
||||
steps: ['Select template', 'Fill fields', 'Export to Markdown']
|
||||
success: 'Complete PRD generated'
|
||||
failure: 'Validation failed'
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
// Create requirements
|
||||
@@ -45,7 +45,7 @@ mut requirements := [
|
||||
priority: .high
|
||||
criteria: [criterion]
|
||||
dependencies: []
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
// Create constraints
|
||||
@@ -55,7 +55,7 @@ mut constraints := [
|
||||
title: 'ARM64 Support'
|
||||
description: 'Must run on ARM64 infrastructure'
|
||||
ctype: .technica
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
// Create risks
|
||||
@@ -90,4 +90,4 @@ println('✓ Total PRDs in database: ${all_prds.len}')
|
||||
|
||||
// Check if exists
|
||||
exists := mydb.prd.exist(prd.id)!
|
||||
println('✓ PRD exists: ${exists}')
|
||||
println('✓ PRD exists: ${exists}')
|
||||
|
||||
@@ -7,9 +7,9 @@ println('=== Redis Installer Example ===\n')
|
||||
// Create configuration
|
||||
// You can customize port, datadir, and ipaddr as needed
|
||||
config := redis.RedisInstall{
|
||||
port: 6379 // Redis port
|
||||
datadir: '/var/lib/redis' // Data directory (standard location)
|
||||
ipaddr: 'localhost' // Bind address
|
||||
port: 6379 // Redis port
|
||||
datadir: '/var/lib/redis' // Data directory (standard location)
|
||||
ipaddr: 'localhost' // Bind address
|
||||
}
|
||||
|
||||
// Check if Redis is already running
|
||||
@@ -22,9 +22,9 @@ if redis.check(config) {
|
||||
println(' Port: ${config.port}')
|
||||
println(' Data directory: ${config.datadir}')
|
||||
println(' Bind address: ${config.ipaddr}\n')
|
||||
|
||||
|
||||
redis.redis_install(config)!
|
||||
|
||||
|
||||
// Verify installation
|
||||
if redis.check(config) {
|
||||
println('\nSUCCESS: Redis installed and started successfully!')
|
||||
|
||||
@@ -29,21 +29,31 @@ sal_running := sal_runner.running()!
|
||||
|
||||
println('Service Status Details')
|
||||
println('-' * 60)
|
||||
println('Coordinator ${if coord_running { "✅ Running" } else { "❌ Stopped" }} http://127.0.0.1:${coordinator.http_port}')
|
||||
println('Supervisor ${if super_running { "✅ Running" } else { "❌ Stopped" }} http://127.0.0.1:${supervisor_inst.http_port}')
|
||||
println('Hero Runner ${if hero_running { "✅ Running" } else { "❌ Stopped" }}')
|
||||
println('Osiris Runner ${if osiris_running { "✅ Running" } else { "❌ Stopped" }}')
|
||||
println('SAL Runner ${if sal_running { "✅ Running" } else { "❌ Stopped" }}')
|
||||
println('Coordinator ${if coord_running { '✅ Running' } else { '❌ Stopped' }} http://127.0.0.1:${coordinator.http_port}')
|
||||
println('Supervisor ${if super_running { '✅ Running' } else { '❌ Stopped' }} http://127.0.0.1:${supervisor_inst.http_port}')
|
||||
println('Hero Runner ${if hero_running { '✅ Running' } else { '❌ Stopped' }}')
|
||||
println('Osiris Runner ${if osiris_running { '✅ Running' } else { '❌ Stopped' }}')
|
||||
println('SAL Runner ${if sal_running { '✅ Running' } else { '❌ Stopped' }}')
|
||||
|
||||
println('\n' + '=' * 60)
|
||||
|
||||
// Count running services
|
||||
mut running_count := 0
|
||||
if coord_running { running_count++ }
|
||||
if super_running { running_count++ }
|
||||
if hero_running { running_count++ }
|
||||
if osiris_running { running_count++ }
|
||||
if sal_running { running_count++ }
|
||||
if coord_running {
|
||||
running_count++
|
||||
}
|
||||
if super_running {
|
||||
running_count++
|
||||
}
|
||||
if hero_running {
|
||||
running_count++
|
||||
}
|
||||
if osiris_running {
|
||||
running_count++
|
||||
}
|
||||
if sal_running {
|
||||
running_count++
|
||||
}
|
||||
|
||||
println('Summary: ${running_count}/5 services running')
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import incubaid.herolib.core.code
|
||||
import os
|
||||
|
||||
fn test_comprehensive_code_parsing() {
|
||||
console.print_header('Comprehensive Code myparser Tests')
|
||||
console.print_header('Comprehensive Code Parsing Tests')
|
||||
console.print_lf(1)
|
||||
|
||||
// Setup test files by copying testdata
|
||||
@@ -77,20 +77,21 @@ fn copy_directory(src string, dst string) ! {
|
||||
fn test_module_parsing() {
|
||||
console.print_header('Test 1: Module and File Parsing')
|
||||
|
||||
mut myparser := new('/tmp/codeparsertest', ParseOptions{ recursive: true })!
|
||||
parse()!
|
||||
mut myparser := new('/tmp/codeparsertest', recursive: true)!
|
||||
myparser.parse()!
|
||||
|
||||
v_files := myparser.files.keys()
|
||||
v_files := myparser.list_files()
|
||||
console.print_item('Found ${v_files.len} V files')
|
||||
|
||||
mut total_items := 0
|
||||
for file_path in v_files {
|
||||
vfile := myparser.files[file_path]
|
||||
console.print_item(' ✓ ${os.base(file_path)}: ${vfile.items.len} items')
|
||||
total_items += vfile.items.len
|
||||
if parsed_file := myparser.parsed_files[file_path] {
|
||||
console.print_item(' ✓ ${os.base(file_path)}: ${parsed_file.vfile.items.len} items')
|
||||
total_items += parsed_file.vfile.items.len
|
||||
}
|
||||
}
|
||||
|
||||
assert v_files.len >= 7, 'Expected at least 7 V files, got ${v_files.len}' // 5 new files + 2 existing
|
||||
assert v_files.len >= 7, 'Expected at least 7 V files, got ${v_files.len}'
|
||||
assert total_items > 0, 'Expected to parse some items'
|
||||
|
||||
console.print_green('✓ Module parsing test passed')
|
||||
@@ -106,7 +107,7 @@ fn test_struct_parsing() {
|
||||
return
|
||||
}
|
||||
|
||||
vfile := parse_vfile(content) or {
|
||||
vfile := code.parse_vfile(content) or {
|
||||
assert false, 'Failed to parse models.v: ${err}'
|
||||
return
|
||||
}
|
||||
@@ -147,12 +148,12 @@ fn test_struct_parsing() {
|
||||
fn test_function_parsing() {
|
||||
console.print_header('Test 3: Function Parsing')
|
||||
|
||||
mut myparser := new('/tmp/codeparsertest', ParseOptions{ recursive: true })!
|
||||
mut myparser := new('/tmp/codeparsertest', recursive: true)!
|
||||
myparser.parse()!
|
||||
|
||||
mut functions := []code.Function{}
|
||||
for _, vfile in myparser.files {
|
||||
functions << vfile.functions()
|
||||
for _, parsed_file in myparser.parsed_files {
|
||||
functions << parsed_file.vfile.functions()
|
||||
}
|
||||
|
||||
pub_functions := functions.filter(it.is_pub)
|
||||
@@ -167,7 +168,6 @@ fn test_function_parsing() {
|
||||
create_fn := create_user_fn[0]
|
||||
assert create_fn.is_pub == true, 'create_user should be public'
|
||||
assert create_fn.params.len == 2, 'create_user should have 2 parameters'
|
||||
assert create_fn.description.len > 0, 'create_user should have description'
|
||||
console.print_item(' ✓ create_user: ${create_fn.params.len} params, public')
|
||||
|
||||
// Check get_user function
|
||||
@@ -200,12 +200,12 @@ fn test_imports_and_modules() {
|
||||
return
|
||||
}
|
||||
|
||||
vfile := parse_vfile(content) or {
|
||||
vfile := code.parse_vfile(content) or {
|
||||
assert false, 'Failed to parse models.v: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
assert vfile.mod == 'testapp', 'Module name should be testapp, got ${vfile.mod}'
|
||||
assert vfile.mod == 'testdata', 'Module name should be testdata, got ${vfile.mod}'
|
||||
assert vfile.imports.len == 2, 'Expected 2 imports, got ${vfile.imports.len}'
|
||||
|
||||
console.print_item(' ✓ Module name: ${vfile.mod}')
|
||||
@@ -231,7 +231,7 @@ fn test_type_system() {
|
||||
return
|
||||
}
|
||||
|
||||
vfile := parse_vfile(content) or {
|
||||
vfile := code.parse_vfile(content) or {
|
||||
assert false, 'Failed to parse models.v: ${err}'
|
||||
return
|
||||
}
|
||||
@@ -266,7 +266,7 @@ fn test_visibility_modifiers() {
|
||||
return
|
||||
}
|
||||
|
||||
vfile := parse_vfile(content) or {
|
||||
vfile := code.parse_vfile(content) or {
|
||||
assert false, 'Failed to parse models.v: ${err}'
|
||||
return
|
||||
}
|
||||
@@ -300,8 +300,8 @@ fn test_method_parsing() {
|
||||
myparser.parse()!
|
||||
|
||||
mut methods := []code.Function{}
|
||||
for _, vfile in myparser.files {
|
||||
methods << vfile.functions().filter(it.receiver.name != '')
|
||||
for _, parsed_file in myparser.parsed_files {
|
||||
methods << parsed_file.vfile.functions().filter(it.receiver.name != '')
|
||||
}
|
||||
|
||||
assert methods.len >= 11, 'Expected at least 11 methods, got ${methods.len}'
|
||||
@@ -336,7 +336,7 @@ fn test_constants_parsing() {
|
||||
return
|
||||
}
|
||||
|
||||
vfile := parse_vfile(content) or {
|
||||
vfile := code.parse_vfile(content) or {
|
||||
assert false, 'Failed to parse models.v: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,150 +1,154 @@
|
||||
module codeparser
|
||||
|
||||
import incubaid.herolib.core.code
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
import incubaid.herolib.core.pathlib
|
||||
// import incubaid.herolib.ui.console
|
||||
// import os
|
||||
|
||||
@[params]
|
||||
pub struct ParseOptions {
|
||||
pub struct ParserOptions {
|
||||
pub:
|
||||
path string @[required]
|
||||
recursive bool = true
|
||||
exclude_patterns []string
|
||||
include_patterns []string = ['*.v']
|
||||
}
|
||||
|
||||
pub struct CodeParser {
|
||||
// ParseError represents an error that occurred while parsing a file
|
||||
pub struct ParseError {
|
||||
pub:
|
||||
root_path string
|
||||
options ParseOptions
|
||||
pub mut:
|
||||
files map[string]code.VFile
|
||||
modules []code.Module
|
||||
errors []string
|
||||
file_path string
|
||||
error string
|
||||
}
|
||||
|
||||
pub fn new(path string, opts ParseOptions) !CodeParser {
|
||||
// ParsedFile represents a successfully parsed V file
|
||||
pub struct ParsedFile {
|
||||
pub:
|
||||
path string
|
||||
module_name string
|
||||
vfile code.VFile
|
||||
}
|
||||
|
||||
pub struct ModuleStats {
|
||||
pub mut:
|
||||
file_count int
|
||||
struct_count int
|
||||
function_count int
|
||||
interface_count int
|
||||
const_count int
|
||||
}
|
||||
|
||||
pub struct ParsedModule {
|
||||
pub:
|
||||
name string
|
||||
file_paths []string
|
||||
stats ModuleStats
|
||||
}
|
||||
|
||||
pub struct CodeParser {
|
||||
pub mut:
|
||||
root_dir string
|
||||
options ParserOptions
|
||||
parsed_files map[string]ParsedFile
|
||||
modules map[string][]string
|
||||
parse_errors []ParseError
|
||||
}
|
||||
|
||||
// new creates a CodeParser and scans the given root directory
|
||||
@[params]
|
||||
pub fn new(args ParserOptions) !CodeParser {
|
||||
mut parser := CodeParser{
|
||||
root_path: path
|
||||
options: opts
|
||||
root_dir: args.path
|
||||
options: args
|
||||
parsed_files: map[string]ParsedFile{}
|
||||
modules: map[string][]string{}
|
||||
}
|
||||
parser.scan_directory()!
|
||||
return parser
|
||||
}
|
||||
|
||||
// Accessor properties for backward compatibility
|
||||
pub fn (parser CodeParser) files() map[string]code.VFile {
|
||||
mut result := map[string]code.VFile{}
|
||||
for _, parsed_file in parser.parsed_files {
|
||||
result[parsed_file.path] = parsed_file.vfile
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
pub fn (parser CodeParser) errors() []ParseError {
|
||||
return parser.parse_errors
|
||||
}
|
||||
|
||||
// parse_file parses a single V file and adds it to the index (public wrapper)
|
||||
pub fn (mut parser CodeParser) parse_file(file_path string) {
|
||||
mut file := pathlib.get_file(path: file_path) or {
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err.msg()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
content := file.read() or {
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err.msg()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the V file
|
||||
vfile := code.parse_vfile(content) or {
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err.msg()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
parsed_file := ParsedFile{
|
||||
path: file_path
|
||||
module_name: vfile.mod
|
||||
vfile: vfile
|
||||
}
|
||||
|
||||
parser.parsed_files[file_path] = parsed_file
|
||||
|
||||
// Index by module
|
||||
if vfile.mod !in parser.modules {
|
||||
parser.modules[vfile.mod] = []string{}
|
||||
}
|
||||
parser.modules[vfile.mod] << file_path
|
||||
}
|
||||
|
||||
// parse processes all V files that were scanned
|
||||
pub fn (mut parser CodeParser) parse() ! {
|
||||
parser.files.clear()
|
||||
parser.errors.clear()
|
||||
|
||||
v_files := parser.collect_files()!
|
||||
|
||||
for file_path in v_files {
|
||||
console.print_debug('Parsing: ${file_path}')
|
||||
|
||||
content := os.read_file(file_path) or {
|
||||
parser.errors << 'Failed to read ${file_path}: ${err}'
|
||||
continue
|
||||
}
|
||||
|
||||
vfile := code.parse_vfile(content) or {
|
||||
parser.errors << 'Failed to parse ${file_path}: ${err}'
|
||||
continue
|
||||
}
|
||||
|
||||
parser.files[file_path] = vfile
|
||||
for file_path, _ in parser.parsed_files {
|
||||
parser.parse_file(file_path)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (parser CodeParser) collect_files() ![]string {
|
||||
mut files := []string{}
|
||||
|
||||
if parser.options.recursive {
|
||||
files = parser.collect_files_recursive(parser.root_path)!
|
||||
} else {
|
||||
files = code.list_v_files(parser.root_path)!
|
||||
}
|
||||
|
||||
return files
|
||||
// get_module_stats calculates statistics for a module
|
||||
pub fn (parser CodeParser) get_module_stats(module string) ModuleStats {
|
||||
// TODO: Fix this function
|
||||
return ModuleStats{}
|
||||
}
|
||||
|
||||
fn (parser CodeParser) collect_files_recursive(dir string) ![]string {
|
||||
mut all_files := []string{}
|
||||
|
||||
items := os.ls(dir)!
|
||||
for item in items {
|
||||
path := os.join_path(dir, item)
|
||||
|
||||
if parser.should_skip(path) {
|
||||
continue
|
||||
}
|
||||
|
||||
if os.is_dir(path) {
|
||||
sub_files := parser.collect_files_recursive(path)!
|
||||
all_files << sub_files
|
||||
} else if item.ends_with('.v') && !item.ends_with('_.v') {
|
||||
all_files << path
|
||||
}
|
||||
}
|
||||
|
||||
return all_files
|
||||
}
|
||||
|
||||
fn (parser CodeParser) should_skip(path string) bool {
|
||||
basename := os.base(path)
|
||||
|
||||
// Skip common directories
|
||||
if basename in ['.git', 'node_modules', '.vscode', '__pycache__', '.github'] {
|
||||
return true
|
||||
}
|
||||
|
||||
for pattern in parser.options.exclude_patterns {
|
||||
if basename.contains(pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn (parser CodeParser) summarize() CodeSummary {
|
||||
mut summary := CodeSummary{}
|
||||
|
||||
for _, vfile in parser.files {
|
||||
summary.total_files++
|
||||
summary.total_imports += vfile.imports.len
|
||||
summary.total_structs += vfile.structs().len
|
||||
summary.total_functions += vfile.functions().len
|
||||
summary.total_consts += vfile.consts.len
|
||||
}
|
||||
|
||||
summary.total_errors = parser.errors.len
|
||||
|
||||
return summary
|
||||
}
|
||||
|
||||
pub struct CodeSummary {
|
||||
pub mut:
|
||||
total_files int
|
||||
total_imports int
|
||||
total_structs int
|
||||
total_functions int
|
||||
total_consts int
|
||||
total_errors int
|
||||
}
|
||||
|
||||
pub fn (summary CodeSummary) print() {
|
||||
console.print_header('Code Summary')
|
||||
console.print_item('Files parsed: ${summary.total_files}')
|
||||
console.print_item('Imports: ${summary.total_imports}')
|
||||
console.print_item('Structs: ${summary.total_structs}')
|
||||
console.print_item('Functions: ${summary.total_functions}')
|
||||
console.print_item('Constants: ${summary.total_consts}')
|
||||
console.print_item('Errors: ${summary.total_errors}')
|
||||
}
|
||||
|
||||
pub fn (parser CodeParser) print_errors() {
|
||||
if parser.errors.len > 0 {
|
||||
console.print_header('Parsing Errors')
|
||||
for err in parser.errors {
|
||||
console.print_stderr(err)
|
||||
}
|
||||
// error adds a new parsing error to the list
|
||||
fn (mut parser CodeParser) error(file_path string, msg string) {
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: msg
|
||||
}
|
||||
}
|
||||
|
||||
// has_errors returns true if any parsing errors occurred
|
||||
pub fn (parser CodeParser) has_errors() bool {
|
||||
return parser.parse_errors.len > 0
|
||||
}
|
||||
|
||||
// error_count returns the number of parsing errors
|
||||
pub fn (parser CodeParser) error_count() int {
|
||||
return parser.parse_errors.len
|
||||
}
|
||||
|
||||
@@ -2,26 +2,8 @@ module codeparser
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.code
|
||||
import log
|
||||
|
||||
// new creates a CodeParser from a root directory
|
||||
// It walks the directory tree, parses all .v files, and indexes them
|
||||
//
|
||||
// Args:
|
||||
// root_dir string - directory to scan (absolute or relative)
|
||||
// Returns:
|
||||
// CodeParser - indexed codebase
|
||||
// error - if directory doesn't exist or other I/O errors
|
||||
pub fn new(root_dir string) !CodeParser {
|
||||
mut parser := CodeParser{
|
||||
root_path: root_dir
|
||||
}
|
||||
|
||||
parser.scan_directory()!
|
||||
return parser
|
||||
}
|
||||
|
||||
// scan_directory recursively walks the directory and parses all V files
|
||||
// scan_directory recursively walks the directory and parses all V files using pathlib
|
||||
fn (mut parser CodeParser) scan_directory() ! {
|
||||
mut root := pathlib.get_dir(path: parser.root_dir, create: false)!
|
||||
|
||||
@@ -29,95 +11,32 @@ fn (mut parser CodeParser) scan_directory() ! {
|
||||
return error('root directory does not exist: ${parser.root_dir}')
|
||||
}
|
||||
|
||||
parser.walk_dir(mut root)!
|
||||
}
|
||||
// Use pathlib's recursive listing capability
|
||||
mut items := root.list(recursive: parser.options.recursive)!
|
||||
|
||||
// walk_dir recursively traverses directories and collects V files
|
||||
fn (mut parser CodeParser) walk_dir(mut dir pathlib.Path) ! {
|
||||
// Get all items in directory
|
||||
mut items := dir.list()!
|
||||
|
||||
for item in items {
|
||||
if item.is_file() && item.path.ends_with('.v') {
|
||||
// Skip generated files
|
||||
if item.path.ends_with('_.v') {
|
||||
continue
|
||||
}
|
||||
|
||||
parser.parse_file(item.path)
|
||||
} else if item.is_dir() {
|
||||
// Recursively walk subdirectories
|
||||
mut subdir := pathlib.get_dir(path: item.path, create: false) or { continue }
|
||||
parser.walk_dir(mut subdir) or { continue }
|
||||
for item in items.paths {
|
||||
// Skip non-V files
|
||||
if !item.path.ends_with('.v') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parse_file parses a single V file and adds it to the index
|
||||
fn (mut parser CodeParser) parse_file(file_path string) {
|
||||
mut file := pathlib.get_file(path: file_path) or {
|
||||
err_msg := 'failed to read file: ${err}'
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err_msg
|
||||
// Skip generated files
|
||||
if item.path.ends_with('_.v') {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
content := file.read() or {
|
||||
err_msg := 'failed to read content: ${err}'
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err_msg
|
||||
// Check exclude patterns
|
||||
should_skip := parser.options.exclude_patterns.any(item.path.contains(it))
|
||||
if should_skip {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the V file
|
||||
vfile := code.parse_vfile(content) or {
|
||||
err_msg := 'parse error: ${err}'
|
||||
parser.parse_errors << ParseError{
|
||||
file_path: file_path
|
||||
error: err_msg
|
||||
// Store file path for later parsing
|
||||
parsed_file := ParsedFile{
|
||||
path: item.path
|
||||
module_name: ''
|
||||
vfile: code.VFile{}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
parsed_file := ParsedFile{
|
||||
path: file_path
|
||||
module_name: vfile.mod
|
||||
vfile: vfile
|
||||
parse_error: ''
|
||||
}
|
||||
|
||||
parser.parsed_files[file_path] = parsed_file
|
||||
|
||||
// Index by module
|
||||
if vfile.mod !in parser.modules {
|
||||
parser.modules[vfile.mod] = []string{}
|
||||
}
|
||||
parser.modules[vfile.mod] << file_path
|
||||
}
|
||||
|
||||
// has_errors returns true if any parsing errors occurred
|
||||
pub fn (parser CodeParser) has_errors() bool {
|
||||
return parser.parse_errors.len > 0
|
||||
}
|
||||
|
||||
// error_count returns the number of parsing errors
|
||||
pub fn (parser CodeParser) error_count() int {
|
||||
return parser.parse_errors.len
|
||||
}
|
||||
|
||||
// print_errors prints all parsing errors to stdout
|
||||
pub fn (parser CodeParser) print_errors() {
|
||||
if parser.parse_errors.len == 0 {
|
||||
println('No parsing errors')
|
||||
return
|
||||
}
|
||||
|
||||
println('Parsing Errors (${parser.parse_errors.len}):')
|
||||
for err in parser.parse_errors {
|
||||
println(' ${err.file_path}: ${err.error}')
|
||||
parser.parsed_files[item.path] = parsed_file
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,72 +2,54 @@ module codeparser
|
||||
|
||||
import incubaid.herolib.core.code
|
||||
|
||||
// filter_structs filters structs using a predicate function
|
||||
//
|
||||
// Args:
|
||||
// predicate - function that returns true for structs to include
|
||||
// module - optional module filter
|
||||
pub fn (parser CodeParser) filter_structs(predicate: fn(code.Struct) bool, module: string = '') []code.Struct {
|
||||
structs := parser.list_structs(module)
|
||||
return structs.filter(predicate(it))
|
||||
@[params]
|
||||
pub struct FilterOptions {
|
||||
pub:
|
||||
module_ string
|
||||
name_regex string
|
||||
is_public bool
|
||||
has_receiver bool
|
||||
}
|
||||
|
||||
// filter_functions filters functions using a predicate function
|
||||
pub fn (parser CodeParser) filter_functions(predicate: fn(code.Function) bool, module: string = '') []code.Function {
|
||||
functions := parser.list_functions(module)
|
||||
return functions.filter(predicate(it))
|
||||
}
|
||||
|
||||
// filter_public_structs returns only public structs
|
||||
pub fn (parser CodeParser) filter_public_structs(module: string = '') []code.Struct {
|
||||
return parser.filter_structs(fn (s code.Struct) bool {
|
||||
return s.is_pub
|
||||
}, module)
|
||||
}
|
||||
|
||||
// filter_public_functions returns only public functions
|
||||
pub fn (parser CodeParser) filter_public_functions(module: string = '') []code.Function {
|
||||
return parser.filter_functions(fn (f code.Function) bool {
|
||||
return f.is_pub
|
||||
}, module)
|
||||
}
|
||||
|
||||
// filter_functions_with_receiver returns functions that have a receiver (methods)
|
||||
pub fn (parser CodeParser) filter_functions_with_receiver(module: string = '') []code.Function {
|
||||
return parser.filter_functions(fn (f code.Function) bool {
|
||||
return f.receiver.name != ''
|
||||
}, module)
|
||||
}
|
||||
|
||||
// filter_functions_returning_error returns functions that return error type (${ error type with ! })
|
||||
pub fn (parser CodeParser) filter_functions_returning_error(module: string = '') []code.Function {
|
||||
return parser.filter_functions(fn (f code.Function) bool {
|
||||
return f.has_return || f.result.is_result
|
||||
}, module)
|
||||
}
|
||||
|
||||
// filter_structs_with_field returns structs that have a field of a specific type
|
||||
pub fn (parser CodeParser) filter_structs_with_field(field_type: string, module: string = '') []code.Struct {
|
||||
return parser.filter_structs(fn [field_type] (s code.Struct) bool {
|
||||
for field in s.fields {
|
||||
if field.typ.symbol() == field_type {
|
||||
return true
|
||||
}
|
||||
// structs returns a filtered list of all structs found in the parsed files
|
||||
pub fn (p CodeParser) structs(options FilterOptions) []code.Struct {
|
||||
mut result := []code.Struct{}
|
||||
for _, file in p.parsed_files {
|
||||
if options.module_ != '' && file.module_name != options.module_ {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}, module)
|
||||
for struct_ in file.vfile.structs() {
|
||||
if options.name_regex != '' && !struct_.name.match_regex(options.name_regex) {
|
||||
continue
|
||||
}
|
||||
if options.is_public && !struct_.is_pub {
|
||||
continue
|
||||
}
|
||||
result << struct_
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// filter_by_name_pattern returns items matching a name pattern (substring match)
|
||||
pub fn (parser CodeParser) filter_structs_by_name(pattern: string, module: string = '') []code.Struct {
|
||||
return parser.filter_structs(fn [pattern] (s code.Struct) bool {
|
||||
return s.name.contains(pattern)
|
||||
}, module)
|
||||
// functions returns a filtered list of all functions found in the parsed files
|
||||
pub fn (p CodeParser) functions(options FilterOptions) []code.Function {
|
||||
mut result := []code.Function{}
|
||||
for _, file in p.parsed_files {
|
||||
if options.module_ != '' && file.module_name != options.module_ {
|
||||
continue
|
||||
}
|
||||
for func in file.vfile.functions() {
|
||||
if options.name_regex != '' && !func.name.match_regex(options.name_regex) {
|
||||
continue
|
||||
}
|
||||
if options.is_public && !func.is_pub {
|
||||
continue
|
||||
}
|
||||
if options.has_receiver && func.receiver.typ.name == '' {
|
||||
continue
|
||||
}
|
||||
result << func
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// filter_functions_by_name returns functions matching a name pattern
|
||||
pub fn (parser CodeParser) filter_functions_by_name(pattern: string, module: string = '') []code.Function {
|
||||
return parser.filter_functions(fn [pattern] (f code.Function) bool {
|
||||
return f.name.contains(pattern)
|
||||
}, module)
|
||||
}
|
||||
@@ -2,22 +2,7 @@ module codeparser
|
||||
|
||||
import incubaid.herolib.core.code
|
||||
|
||||
// SearchContext provides context for a found item
|
||||
pub struct SearchContext {
|
||||
pub:
|
||||
file_path string
|
||||
module_name string
|
||||
line_number int // optional, 0 if unknown
|
||||
}
|
||||
|
||||
// find_struct searches for a struct by name
|
||||
//
|
||||
// Args:
|
||||
// name string - struct name to find
|
||||
// module string - optional module filter
|
||||
// Returns:
|
||||
// Struct - if found
|
||||
// error - if not found
|
||||
pub fn (parser CodeParser) find_struct(name: string, module: string = '') !code.Struct {
|
||||
for _, parsed_file in parser.parsed_files {
|
||||
if module != '' && parsed_file.module_name != module {
|
||||
@@ -36,13 +21,6 @@ pub fn (parser CodeParser) find_struct(name: string, module: string = '') !code.
|
||||
}
|
||||
|
||||
// find_function searches for a function by name
|
||||
//
|
||||
// Args:
|
||||
// name string - function name to find
|
||||
// module string - optional module filter
|
||||
// Returns:
|
||||
// Function - if found
|
||||
// error - if not found
|
||||
pub fn (parser CodeParser) find_function(name: string, module: string = '') !code.Function {
|
||||
for _, parsed_file in parser.parsed_files {
|
||||
if module != '' && parsed_file.module_name != module {
|
||||
@@ -78,14 +56,6 @@ pub fn (parser CodeParser) find_interface(name: string, module: string = '') !co
|
||||
}
|
||||
|
||||
// find_method searches for a method on a struct
|
||||
//
|
||||
// Args:
|
||||
// struct_name string - name of the struct
|
||||
// method_name string - name of the method
|
||||
// module string - optional module filter
|
||||
// Returns:
|
||||
// Function - if found
|
||||
// error - if not found
|
||||
pub fn (parser CodeParser) find_method(struct_name: string, method_name: string, module: string = '') !code.Function {
|
||||
methods := parser.list_methods_on_struct(struct_name, module)
|
||||
|
||||
@@ -105,7 +75,7 @@ pub fn (parser CodeParser) find_module(module_name: string) !ParsedModule {
|
||||
}
|
||||
|
||||
file_paths := parser.modules[module_name]
|
||||
|
||||
|
||||
mut stats := ModuleStats{}
|
||||
for file_path in file_paths {
|
||||
if parsed_file := parser.parsed_files[file_path] {
|
||||
|
||||
87
lib/core/codeparser/getters.v
Normal file
87
lib/core/codeparser/getters.v
Normal file
@@ -0,0 +1,87 @@
|
||||
module codeparser
|
||||
|
||||
import incubaid.herolib.core.code
|
||||
|
||||
// list_modules returns a list of all parsed module names
|
||||
pub fn (parser CodeParser) list_modules() []string {
|
||||
return parser.modules.keys()
|
||||
}
|
||||
|
||||
// get_module_stats returns statistics for a given module
|
||||
pub fn (parser CodeParser) get_module_stats(module_name string) ModuleStats {
|
||||
mut stats := ModuleStats{}
|
||||
if file_paths := parser.modules[module_name] {
|
||||
stats.file_count = file_paths.len
|
||||
for file_path in file_paths {
|
||||
if parsed_file := parser.parsed_files[file_path] {
|
||||
vfile := parsed_file.vfile
|
||||
stats.struct_count += vfile.structs().len
|
||||
stats.function_count += vfile.functions().len
|
||||
stats.const_count += vfile.consts.len
|
||||
stats.interface_count += vfile.interfaces().len
|
||||
}
|
||||
}
|
||||
}
|
||||
return stats
|
||||
}
|
||||
|
||||
// get_parsed_file returns the parsed file for a given path
|
||||
pub fn (parser CodeParser) get_parsed_file(file_path string) ?ParsedFile {
|
||||
return parser.parsed_files[file_path]
|
||||
}
|
||||
|
||||
// all_structs returns all structs from all parsed files
|
||||
pub fn (p CodeParser) all_structs() []code.Struct {
|
||||
mut all := []code.Struct{}
|
||||
for _, file in p.parsed_files {
|
||||
all << file.vfile.structs()
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
// all_functions returns all functions from all parsed files
|
||||
pub fn (p CodeParser) all_functions() []code.Function {
|
||||
mut all := []code.Function{}
|
||||
for _, file in p.parsed_files {
|
||||
all << file.vfile.functions()
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
// all_consts returns all constants from all parsed files
|
||||
pub fn (p CodeParser) all_consts() []code.Const {
|
||||
mut all := []code.Const{}
|
||||
for _, file in p.parsed_files {
|
||||
all << file.vfile.consts
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
// all_imports returns a map of all unique imports
|
||||
pub fn (p CodeParser) all_imports() map[string]bool {
|
||||
mut all := map[string]bool{}
|
||||
for _, file in p.parsed_files {
|
||||
for imp in file.vfile.imports {
|
||||
all[imp.mod] = true
|
||||
}
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
// all_enums returns all enums from all parsed files
|
||||
pub fn (p CodeParser) all_enums() []code.Enum {
|
||||
mut all := []code.Enum{}
|
||||
for _, file in p.parsed_files {
|
||||
all << file.vfile.enums()
|
||||
}
|
||||
return all
|
||||
}
|
||||
|
||||
// all_interfaces returns all interfaces from all parsed files
|
||||
pub fn (p CodeParser) all_interfaces() []code.Interface {
|
||||
mut all := []code.Interface{}
|
||||
for _, file in p.parsed_files {
|
||||
all << file.vfile.interfaces()
|
||||
}
|
||||
return all
|
||||
}
|
||||
@@ -71,10 +71,6 @@ pub fn (parser CodeParser) list_interfaces(module: string = '') []code.Interface
|
||||
}
|
||||
|
||||
// list_methods_on_struct returns all methods (receiver functions) for a struct
|
||||
//
|
||||
// Args:
|
||||
// struct_name string - name of the struct
|
||||
// module string - optional module filter
|
||||
pub fn (parser CodeParser) list_methods_on_struct(struct_name: string, module: string = '') []code.Function {
|
||||
mut methods := []code.Function{}
|
||||
|
||||
@@ -119,31 +115,4 @@ pub fn (parser CodeParser) list_constants(module: string = '') []code.Const {
|
||||
}
|
||||
|
||||
return consts
|
||||
}
|
||||
|
||||
// get_module_stats calculates statistics for a module
|
||||
pub fn (parser CodeParser) get_module_stats(module: string) ModuleStats {
|
||||
mut stats := ModuleStats{}
|
||||
|
||||
file_paths := parser.list_files_in_module(module)
|
||||
stats.file_count = file_paths.len
|
||||
|
||||
for _, parsed_file in parser.parsed_files {
|
||||
if parsed_file.module_name != module {
|
||||
continue
|
||||
}
|
||||
|
||||
stats.struct_count += parsed_file.vfile.structs().len
|
||||
stats.function_count += parsed_file.vfile.functions().len
|
||||
stats.const_count += parsed_file.vfile.consts.len
|
||||
|
||||
// Count interfaces
|
||||
for item in parsed_file.vfile.items {
|
||||
if item is code.Interface {
|
||||
stats.interface_count++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
22
lib/core/codeparser/testdata/functions.v
vendored
22
lib/core/codeparser/testdata/functions.v
vendored
@@ -16,12 +16,12 @@ pub fn create_user(email string, username string) !User {
|
||||
return error('username cannot be empty')
|
||||
}
|
||||
return User{
|
||||
id: 1
|
||||
email: email
|
||||
id: 1
|
||||
email: email
|
||||
username: username
|
||||
active: true
|
||||
created: time.now().str()
|
||||
updated: time.now().str()
|
||||
active: true
|
||||
created: time.now().str()
|
||||
updated: time.now().str()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,12 +31,12 @@ pub fn get_user(user_id int) ?User {
|
||||
return none
|
||||
}
|
||||
return User{
|
||||
id: user_id
|
||||
email: 'user_${user_id}@example.com'
|
||||
id: user_id
|
||||
email: 'user_${user_id}@example.com'
|
||||
username: 'user_${user_id}'
|
||||
active: true
|
||||
created: '2024-01-01'
|
||||
updated: '2024-01-01'
|
||||
active: true
|
||||
created: '2024-01-01'
|
||||
updated: '2024-01-01'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,4 +61,4 @@ fn batch_create_users(emails []string) ![]User {
|
||||
users << user
|
||||
}
|
||||
return users
|
||||
}
|
||||
}
|
||||
|
||||
2
lib/core/codeparser/testdata/methods.v
vendored
2
lib/core/codeparser/testdata/methods.v
vendored
@@ -37,4 +37,4 @@ pub fn (mut u User) set_profile(mut profile Profile) ! {
|
||||
// get_profile_info returns profile information as string
|
||||
pub fn (p &Profile) get_profile_info() string {
|
||||
return 'Bio: ${p.bio}, Followers: ${p.followers}'
|
||||
}
|
||||
}
|
||||
|
||||
20
lib/core/codeparser/testdata/models.v
vendored
20
lib/core/codeparser/testdata/models.v
vendored
@@ -3,11 +3,9 @@ module testdata
|
||||
import time
|
||||
import os
|
||||
|
||||
const (
|
||||
app_version = '1.0.0'
|
||||
max_users = 1000
|
||||
default_timeout = 30
|
||||
)
|
||||
const app_version = '1.0.0'
|
||||
const max_users = 1000
|
||||
const default_timeout = 30
|
||||
|
||||
// User represents an application user
|
||||
// It stores all information related to a user
|
||||
@@ -18,9 +16,9 @@ pub:
|
||||
email string
|
||||
username string
|
||||
pub mut:
|
||||
active bool
|
||||
created string
|
||||
updated string
|
||||
active bool
|
||||
created string
|
||||
updated string
|
||||
}
|
||||
|
||||
// Profile represents user profile information
|
||||
@@ -33,7 +31,7 @@ mut:
|
||||
followers int
|
||||
following int
|
||||
pub mut:
|
||||
verified bool
|
||||
verified bool
|
||||
}
|
||||
|
||||
// Settings represents user settings
|
||||
@@ -46,6 +44,6 @@ mut:
|
||||
}
|
||||
|
||||
struct InternalConfig {
|
||||
debug bool
|
||||
debug bool
|
||||
log_level int
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,4 +33,4 @@ pub fn (c &Cache) get(key string) ?string {
|
||||
// clear removes all items from cache
|
||||
pub fn (mut c Cache) clear() {
|
||||
c.items.clear()
|
||||
}
|
||||
}
|
||||
|
||||
2
lib/core/codeparser/testdata/utils/helpers.v
vendored
2
lib/core/codeparser/testdata/utils/helpers.v
vendored
@@ -41,4 +41,4 @@ fn truncate_string(text string, max_len int) string {
|
||||
return text[..max_len]
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,4 +23,4 @@ pub fn is_alphanumeric(text string) bool {
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ pub mut:
|
||||
|
||||
pub fn new(args CoordinatorArgs) !Coordinator {
|
||||
ai := args.ai
|
||||
|
||||
|
||||
return Coordinator{
|
||||
name: args.name
|
||||
logger: logger.new(path: '/tmp/flowlogger')!
|
||||
@@ -42,7 +42,6 @@ pub fn new(args CoordinatorArgs) !Coordinator {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@[params]
|
||||
pub struct StepNewArgs {
|
||||
pub mut:
|
||||
|
||||
@@ -9,7 +9,7 @@ pub fn (mut c Coordinator) run() ! {
|
||||
}
|
||||
|
||||
// Run a single step, including error and next steps
|
||||
pub fn (mut c Coordinator) run_step(mut step &Step) ! {
|
||||
pub fn (mut c Coordinator) run_step(mut step Step) ! {
|
||||
// Initialize step
|
||||
step.status = .running
|
||||
step.started_at = ostime.now().unix_milli()
|
||||
@@ -17,8 +17,8 @@ pub fn (mut c Coordinator) run_step(mut step &Step) ! {
|
||||
|
||||
// Log step start
|
||||
step.log(
|
||||
logtype: .stdout
|
||||
log: 'Step "${step.name}" started'
|
||||
logtype: .stdout
|
||||
log: 'Step "${step.name}" started'
|
||||
)!
|
||||
|
||||
// Execute main step function
|
||||
@@ -30,8 +30,8 @@ pub fn (mut c Coordinator) run_step(mut step &Step) ! {
|
||||
step.store_redis()!
|
||||
|
||||
step.log(
|
||||
logtype: .error
|
||||
log: 'Step "${step.name}" failed: ${err.msg()}'
|
||||
logtype: .error
|
||||
log: 'Step "${step.name}" failed: ${err.msg()}'
|
||||
)!
|
||||
|
||||
// Run error steps if any
|
||||
@@ -53,8 +53,8 @@ pub fn (mut c Coordinator) run_step(mut step &Step) ! {
|
||||
step.store_redis()!
|
||||
|
||||
step.log(
|
||||
logtype: .stdout
|
||||
log: 'Step "${step.name}" completed successfully'
|
||||
logtype: .stdout
|
||||
log: 'Step "${step.name}" completed successfully'
|
||||
)!
|
||||
|
||||
// Run next steps if any
|
||||
|
||||
@@ -16,7 +16,7 @@ pub enum StepStatus {
|
||||
pub struct Step {
|
||||
pub mut:
|
||||
status StepStatus = .pending
|
||||
started_at i64 // Unix timestamp
|
||||
started_at i64 // Unix timestamp
|
||||
finished_at i64
|
||||
error_msg string
|
||||
name string
|
||||
@@ -44,11 +44,10 @@ pub fn (mut s Step) log(l logger.LogItemArgs) ! {
|
||||
s.logs << l2
|
||||
}
|
||||
|
||||
|
||||
pub fn (mut s Step) store_redis() ! {
|
||||
if mut redis := s.coordinator.redis {
|
||||
key := 'flow:${s.coordinator.name}:${s.name}'
|
||||
|
||||
|
||||
redis.hset(key, 'name', s.name)!
|
||||
redis.hset(key, 'description', s.description)!
|
||||
redis.hset(key, 'status', s.status.str())!
|
||||
@@ -57,13 +56,12 @@ pub fn (mut s Step) store_redis() ! {
|
||||
redis.hset(key, 'started_at', s.started_at.str())!
|
||||
redis.hset(key, 'finished_at', s.finished_at.str())!
|
||||
redis.hset(key, 'json', s.to_json()!)!
|
||||
|
||||
|
||||
// Set expiration to 24 hours
|
||||
redis.expire(key, 86400)!
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@[json: id]
|
||||
pub struct StepJSON {
|
||||
pub:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module flow
|
||||
module flow
|
||||
|
||||
// Flow represents a signing flow
|
||||
@[heap]
|
||||
|
||||
Reference in New Issue
Block a user