...
This commit is contained in:
115
examples/core/agent_encoding.vsh
Executable file
115
examples/core/agent_encoding.vsh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import freeflowuniverse.herolib.core.jobs.model
|
||||
|
||||
// Create a test agent with some sample data
|
||||
mut agent := model.Agent{
|
||||
pubkey: 'ed25519:1234567890abcdef'
|
||||
address: '192.168.1.100'
|
||||
port: 9999
|
||||
description: 'Test agent for binary encoding'
|
||||
status: model.AgentStatus{
|
||||
guid: 'agent-123'
|
||||
timestamp_first: ourtime.now()
|
||||
timestamp_last: ourtime.now()
|
||||
status: model.AgentState.ok
|
||||
}
|
||||
services: []
|
||||
signature: 'signature-data-here'
|
||||
}
|
||||
|
||||
// Add a service
|
||||
mut service := model.AgentService{
|
||||
actor: 'vm'
|
||||
description: 'Virtual machine management'
|
||||
status: model.AgentServiceState.ok
|
||||
public: true
|
||||
actions: []
|
||||
}
|
||||
|
||||
// Add an action to the service
|
||||
mut action := model.AgentServiceAction{
|
||||
action: 'create'
|
||||
description: 'Create a new virtual machine'
|
||||
status: model.AgentServiceState.ok
|
||||
public: true
|
||||
params: {
|
||||
'name': 'Name of the VM'
|
||||
'memory': 'Memory in MB'
|
||||
'cpu': 'Number of CPU cores'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'my-test-vm'
|
||||
'memory': '2048'
|
||||
'cpu': '2'
|
||||
}
|
||||
}
|
||||
|
||||
service.actions << action
|
||||
agent.services << service
|
||||
|
||||
// Test binary encoding
|
||||
binary_data := agent.dumps() or {
|
||||
println('Failed to encode agent: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
println('Successfully encoded agent to binary, size: ${binary_data.len} bytes')
|
||||
|
||||
// Test binary decoding
|
||||
decoded_agent := model.loads(binary_data) or {
|
||||
println('Failed to decode agent: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Verify the decoded data matches the original
|
||||
assert decoded_agent.pubkey == agent.pubkey
|
||||
assert decoded_agent.address == agent.address
|
||||
assert decoded_agent.port == agent.port
|
||||
assert decoded_agent.description == agent.description
|
||||
assert decoded_agent.signature == agent.signature
|
||||
|
||||
// Verify status
|
||||
assert decoded_agent.status.guid == agent.status.guid
|
||||
assert decoded_agent.status.status == agent.status.status
|
||||
|
||||
// Verify services
|
||||
assert decoded_agent.services.len == agent.services.len
|
||||
if decoded_agent.services.len > 0 {
|
||||
service1 := decoded_agent.services[0]
|
||||
original_service := agent.services[0]
|
||||
|
||||
assert service1.actor == original_service.actor
|
||||
assert service1.description == original_service.description
|
||||
assert service1.status == original_service.status
|
||||
assert service1.public == original_service.public
|
||||
|
||||
// Verify actions
|
||||
assert service1.actions.len == original_service.actions.len
|
||||
if service1.actions.len > 0 {
|
||||
action1 := service1.actions[0]
|
||||
original_action := original_service.actions[0]
|
||||
|
||||
assert action1.action == original_action.action
|
||||
assert action1.description == original_action.description
|
||||
assert action1.status == original_action.status
|
||||
assert action1.public == original_action.public
|
||||
|
||||
// Verify params
|
||||
assert action1.params.len == original_action.params.len
|
||||
for key, value in original_action.params {
|
||||
assert key in action1.params
|
||||
assert action1.params[key] == value
|
||||
}
|
||||
|
||||
// Verify params_example
|
||||
assert action1.params_example.len == original_action.params_example.len
|
||||
for key, value in original_action.params_example {
|
||||
assert key in action1.params_example
|
||||
assert action1.params_example[key] == value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println('Agent binary encoding/decoding test passed successfully')
|
||||
94
examples/jobs/vfs_jobs_example.vsh
Normal file
94
examples/jobs/vfs_jobs_example.vsh
Normal file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.core.jobs.model
|
||||
import flag
|
||||
import os
|
||||
import time
|
||||
|
||||
// This example demonstrates using the VFS-based job storage
|
||||
// - Creating jobs and storing them in VFS
|
||||
// - Listing jobs from VFS
|
||||
// - Cleaning up old jobs
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('vfs_jobs_example.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Example of VFS-based job storage with cleanup functionality')
|
||||
fp.skip_executable()
|
||||
|
||||
cleanup_days := fp.int('days', `d`, 7, 'Clean up jobs older than this many days')
|
||||
create_count := fp.int('create', `c`, 5, 'Number of jobs to create')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Create a new HeroRunner instance
|
||||
mut runner := model.new() or {
|
||||
panic('Failed to create HeroRunner: ${err}')
|
||||
}
|
||||
|
||||
println('\n---------BEGIN VFS JOBS EXAMPLE')
|
||||
|
||||
// Create some jobs
|
||||
println('\n---------CREATING JOBS')
|
||||
for i in 0..create_count {
|
||||
mut job := runner.jobs.new()
|
||||
job.guid = 'job_${i}_${time.now().unix}'
|
||||
job.actor = 'example_actor'
|
||||
job.action = 'test_action'
|
||||
job.params = {
|
||||
'param1': 'value1'
|
||||
'param2': 'value2'
|
||||
}
|
||||
|
||||
// For demonstration, make some jobs older by adjusting their creation time
|
||||
if i % 2 == 0 {
|
||||
job.status.created.time = time.now().add_days(-(cleanup_days + 1))
|
||||
}
|
||||
|
||||
runner.jobs.set(job) or {
|
||||
panic('Failed to set job: ${err}')
|
||||
}
|
||||
println('Created job with GUID: ${job.guid}')
|
||||
}
|
||||
|
||||
// List all jobs
|
||||
println('\n---------LISTING ALL JOBS')
|
||||
jobs := runner.jobs.list() or {
|
||||
panic('Failed to list jobs: ${err}')
|
||||
}
|
||||
println('Found ${jobs.len} jobs:')
|
||||
for job in jobs {
|
||||
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
||||
println('- ${job.guid} (created ${days_ago} days ago)')
|
||||
}
|
||||
|
||||
// Clean up old jobs
|
||||
println('\n---------CLEANING UP OLD JOBS')
|
||||
println('Cleaning up jobs older than ${cleanup_days} days...')
|
||||
deleted_count := runner.cleanup_jobs(cleanup_days) or {
|
||||
panic('Failed to clean up jobs: ${err}')
|
||||
}
|
||||
println('Deleted ${deleted_count} old jobs')
|
||||
|
||||
// List remaining jobs
|
||||
println('\n---------LISTING REMAINING JOBS')
|
||||
remaining_jobs := runner.jobs.list() or {
|
||||
panic('Failed to list jobs: ${err}')
|
||||
}
|
||||
println('Found ${remaining_jobs.len} remaining jobs:')
|
||||
for job in remaining_jobs {
|
||||
days_ago := (time.now().unix - job.status.created.time.unix) / (60 * 60 * 24)
|
||||
println('- ${job.guid} (created ${days_ago} days ago)')
|
||||
}
|
||||
|
||||
println('\n---------END VFS JOBS EXAMPLE')
|
||||
@@ -1,10 +1,12 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import freeflowuniverse.herolib.data.encoder
|
||||
|
||||
// Agent represents a service provider that can execute jobs
|
||||
pub struct Agent {
|
||||
pub mut:
|
||||
id u32
|
||||
pubkey string // pubkey using ed25519
|
||||
address string // where we can find the agent
|
||||
port int // default 9999
|
||||
@@ -30,6 +32,7 @@ pub mut:
|
||||
actions []AgentServiceAction // available actions for this service
|
||||
description string // optional description
|
||||
status AgentServiceState // current state of the service
|
||||
public bool // if everyone can use then true, if restricted means only certain people can use
|
||||
}
|
||||
|
||||
// AgentServiceAction represents an action that can be performed by a service
|
||||
@@ -58,3 +61,131 @@ pub enum AgentServiceState {
|
||||
error // service/action encountered an error
|
||||
halted // service/action has been manually stopped
|
||||
}
|
||||
|
||||
// dumps serializes the Agent struct to binary format using the encoder
|
||||
pub fn (a Agent) dumps() ![]u8 {
|
||||
mut e := encoder.new()
|
||||
|
||||
// Encode Agent fields
|
||||
e.add_string(a.pubkey)
|
||||
e.add_string(a.address)
|
||||
e.add_int(a.port)
|
||||
e.add_string(a.description)
|
||||
|
||||
// Encode AgentStatus
|
||||
e.add_string(a.status.guid)
|
||||
e.add_ourtime(a.status.timestamp_first)
|
||||
e.add_ourtime(a.status.timestamp_last)
|
||||
e.add_u8(u8(a.status.status))
|
||||
|
||||
// Encode services array
|
||||
e.add_u16(u16(a.services.len))
|
||||
for service in a.services {
|
||||
// Encode AgentService fields
|
||||
e.add_string(service.actor)
|
||||
e.add_string(service.description)
|
||||
e.add_u8(u8(service.status))
|
||||
e.add_u8(u8(service.public))
|
||||
|
||||
// Encode actions array
|
||||
e.add_u16(u16(service.actions.len))
|
||||
for action in service.actions {
|
||||
// Encode AgentServiceAction fields
|
||||
e.add_string(action.action)
|
||||
e.add_string(action.description)
|
||||
e.add_u8(u8(action.status))
|
||||
e.add_u8(u8(action.public))
|
||||
|
||||
// Encode params map
|
||||
e.add_map_string(action.params)
|
||||
|
||||
// Encode params_example map
|
||||
e.add_map_string(action.params_example)
|
||||
}
|
||||
}
|
||||
|
||||
// Encode signature
|
||||
e.add_string(a.signature)
|
||||
|
||||
return e.data
|
||||
}
|
||||
|
||||
// loads deserializes binary data into an Agent struct
|
||||
pub fn loads(data []u8) !Agent {
|
||||
mut d := encoder.decoder_new(data)
|
||||
mut agent := Agent{}
|
||||
|
||||
// Decode Agent fields
|
||||
agent.pubkey = d.get_string()!
|
||||
agent.address = d.get_string()!
|
||||
agent.port = d.get_int()!
|
||||
agent.description = d.get_string()!
|
||||
|
||||
// Decode AgentStatus
|
||||
agent.status.guid = d.get_string()!
|
||||
agent.status.timestamp_first = d.get_ourtime()!
|
||||
agent.status.timestamp_last = d.get_ourtime()!
|
||||
status_val := d.get_u8()!
|
||||
agent.status.status = match status_val {
|
||||
0 { AgentState.ok }
|
||||
1 { AgentState.down }
|
||||
2 { AgentState.error }
|
||||
3 { AgentState.halted }
|
||||
else { return error('Invalid AgentState value: ${status_val}') }
|
||||
}
|
||||
|
||||
// Decode services array
|
||||
services_len := d.get_u16()!
|
||||
agent.services = []AgentService{len: int(services_len)}
|
||||
for i in 0 .. services_len {
|
||||
mut service := AgentService{}
|
||||
|
||||
// Decode AgentService fields
|
||||
service.actor = d.get_string()!
|
||||
service.description = d.get_string()!
|
||||
service_status_val := d.get_u8()!
|
||||
service.status = match service_status_val {
|
||||
0 { AgentServiceState.ok }
|
||||
1 { AgentServiceState.down }
|
||||
2 { AgentServiceState.error }
|
||||
3 { AgentServiceState.halted }
|
||||
else { return error('Invalid AgentServiceState value: ${service_status_val}') }
|
||||
}
|
||||
service.public = d.get_u8()! == 1
|
||||
|
||||
// Decode actions array
|
||||
actions_len := d.get_u16()!
|
||||
service.actions = []AgentServiceAction{len: int(actions_len)}
|
||||
for j in 0 .. actions_len {
|
||||
mut action := AgentServiceAction{}
|
||||
|
||||
// Decode AgentServiceAction fields
|
||||
action.action = d.get_string()!
|
||||
action.description = d.get_string()!
|
||||
action_status_val := d.get_u8()!
|
||||
action.status = match action_status_val {
|
||||
0 { AgentServiceState.ok }
|
||||
1 { AgentServiceState.down }
|
||||
2 { AgentServiceState.error }
|
||||
3 { AgentServiceState.halted }
|
||||
else { return error('Invalid AgentServiceState value: ${action_status_val}') }
|
||||
}
|
||||
action.public = d.get_u8()! == 1
|
||||
|
||||
// Decode params map
|
||||
action.params = d.get_map_string()!
|
||||
|
||||
// Decode params_example map
|
||||
action.params_example = d.get_map_string()!
|
||||
|
||||
service.actions[j] = action
|
||||
}
|
||||
|
||||
agent.services[i] = service
|
||||
}
|
||||
|
||||
// Decode signature
|
||||
agent.signature = d.get_string()!
|
||||
|
||||
return agent
|
||||
}
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import json
|
||||
|
||||
const agents_key = 'herorunner:agents' // Redis key for storing agents
|
||||
|
||||
// AgentManager handles all agent-related operations
|
||||
pub struct AgentManager {
|
||||
mut:
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
// new creates a new Agent instance
|
||||
@@ -27,65 +22,41 @@ pub fn (mut m AgentManager) new() Agent {
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a new agent to Redis
|
||||
// set adds or updates an agent
|
||||
pub fn (mut m AgentManager) set(agent Agent) ! {
|
||||
// Store agent in Redis hash where key is agent.pubkey and value is JSON of agent
|
||||
agent_json := json.encode(agent)
|
||||
m.redis.hset(agents_key, agent.pubkey, agent_json)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get retrieves an agent by its public key
|
||||
pub fn (mut m AgentManager) get(pubkey string) !Agent {
|
||||
agent_json := m.redis.hget(agents_key, pubkey)!
|
||||
return json.decode(Agent, agent_json)
|
||||
// Implementation removed
|
||||
return Agent{}
|
||||
}
|
||||
|
||||
// list returns all agents
|
||||
pub fn (mut m AgentManager) list() ![]Agent {
|
||||
mut agents := []Agent{}
|
||||
|
||||
// Get all agents from Redis hash
|
||||
agents_map := m.redis.hgetall(agents_key)!
|
||||
|
||||
// Convert each JSON value to Agent struct
|
||||
for _, agent_json in agents_map {
|
||||
agent := json.decode(Agent, agent_json)!
|
||||
agents << agent
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return agents
|
||||
}
|
||||
|
||||
// delete removes an agent by its public key
|
||||
pub fn (mut m AgentManager) delete(pubkey string) ! {
|
||||
m.redis.hdel(agents_key, pubkey)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// update_status updates just the status of an agent
|
||||
pub fn (mut m AgentManager) update_status(pubkey string, status AgentState) ! {
|
||||
mut agent := m.get(pubkey)!
|
||||
agent.status.status = status
|
||||
m.set(agent)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get_by_service returns all agents that provide a specific service
|
||||
pub fn (mut m AgentManager) get_by_service(actor string, action string) ![]Agent {
|
||||
mut matching_agents := []Agent{}
|
||||
|
||||
agents := m.list()!
|
||||
for agent in agents {
|
||||
for service in agent.services {
|
||||
if service.actor != actor {
|
||||
continue
|
||||
}
|
||||
for act in service.actions {
|
||||
if act.action == action {
|
||||
matching_agents << agent
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return matching_agents
|
||||
}
|
||||
|
||||
321
lib/core/jobs/model/agent_test.v
Normal file
321
lib/core/jobs/model/agent_test.v
Normal file
@@ -0,0 +1,321 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
|
||||
fn test_agent_dumps_loads() {
|
||||
// Create a test agent with some sample data
|
||||
mut agent := Agent{
|
||||
pubkey: 'ed25519:1234567890abcdef'
|
||||
address: '192.168.1.100'
|
||||
port: 9999
|
||||
description: 'Test agent for binary encoding'
|
||||
status: AgentStatus{
|
||||
guid: 'agent-123'
|
||||
timestamp_first: ourtime.now()
|
||||
timestamp_last: ourtime.now()
|
||||
status: AgentState.ok
|
||||
}
|
||||
signature: 'signature-data-here'
|
||||
}
|
||||
|
||||
// Add a service
|
||||
mut service := AgentService{
|
||||
actor: 'vm'
|
||||
description: 'Virtual machine management'
|
||||
status: AgentServiceState.ok
|
||||
public: true
|
||||
}
|
||||
|
||||
// Add an action to the service
|
||||
action := AgentServiceAction{
|
||||
action: 'create'
|
||||
description: 'Create a new virtual machine'
|
||||
status: AgentServiceState.ok
|
||||
public: true
|
||||
params: {
|
||||
'name': 'Name of the VM'
|
||||
'memory': 'Memory in MB'
|
||||
'cpu': 'Number of CPU cores'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'my-test-vm'
|
||||
'memory': '2048'
|
||||
'cpu': '2'
|
||||
}
|
||||
}
|
||||
|
||||
service.actions << action
|
||||
|
||||
// Add another action
|
||||
action2 := AgentServiceAction{
|
||||
action: 'delete'
|
||||
description: 'Delete a virtual machine'
|
||||
status: AgentServiceState.ok
|
||||
public: false
|
||||
params: {
|
||||
'name': 'Name of the VM to delete'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'my-test-vm'
|
||||
}
|
||||
}
|
||||
|
||||
service.actions << action2
|
||||
agent.services << service
|
||||
|
||||
// Test binary encoding
|
||||
binary_data := agent.dumps() or {
|
||||
assert false, 'Failed to encode agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Test binary decoding
|
||||
decoded_agent := loads(binary_data) or {
|
||||
assert false, 'Failed to decode agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the decoded data matches the original
|
||||
assert decoded_agent.pubkey == agent.pubkey
|
||||
assert decoded_agent.address == agent.address
|
||||
assert decoded_agent.port == agent.port
|
||||
assert decoded_agent.description == agent.description
|
||||
assert decoded_agent.signature == agent.signature
|
||||
|
||||
// Verify status
|
||||
assert decoded_agent.status.guid == agent.status.guid
|
||||
assert decoded_agent.status.status == agent.status.status
|
||||
|
||||
// Verify services
|
||||
assert decoded_agent.services.len == agent.services.len
|
||||
if decoded_agent.services.len > 0 {
|
||||
service1 := decoded_agent.services[0]
|
||||
original_service := agent.services[0]
|
||||
|
||||
assert service1.actor == original_service.actor
|
||||
assert service1.description == original_service.description
|
||||
assert service1.status == original_service.status
|
||||
assert service1.public == original_service.public
|
||||
|
||||
// Verify actions
|
||||
assert service1.actions.len == original_service.actions.len
|
||||
if service1.actions.len > 0 {
|
||||
action1 := service1.actions[0]
|
||||
original_action := original_service.actions[0]
|
||||
|
||||
assert action1.action == original_action.action
|
||||
assert action1.description == original_action.description
|
||||
assert action1.status == original_action.status
|
||||
assert action1.public == original_action.public
|
||||
|
||||
// Verify params
|
||||
assert action1.params.len == original_action.params.len
|
||||
for key, value in original_action.params {
|
||||
assert key in action1.params
|
||||
assert action1.params[key] == value
|
||||
}
|
||||
|
||||
// Verify params_example
|
||||
assert action1.params_example.len == original_action.params_example.len
|
||||
for key, value in original_action.params_example {
|
||||
assert key in action1.params_example
|
||||
assert action1.params_example[key] == value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println('Agent binary encoding/decoding test passed successfully')
|
||||
}
|
||||
|
||||
fn test_agent_complex_structure() {
|
||||
// Create a more complex agent with multiple services and actions
|
||||
mut agent := Agent{
|
||||
pubkey: 'ed25519:complex-test-key'
|
||||
address: '10.0.0.5'
|
||||
port: 8080
|
||||
description: 'Complex test agent'
|
||||
status: AgentStatus{
|
||||
guid: 'complex-agent-456'
|
||||
timestamp_first: ourtime.now()
|
||||
timestamp_last: ourtime.now()
|
||||
status: AgentState.ok
|
||||
}
|
||||
signature: 'complex-signature-data'
|
||||
}
|
||||
|
||||
// Add first service - VM management
|
||||
mut vm_service := AgentService{
|
||||
actor: 'vm'
|
||||
description: 'VM management service'
|
||||
status: AgentServiceState.ok
|
||||
public: true
|
||||
}
|
||||
|
||||
// Add actions to VM service
|
||||
vm_service.actions << AgentServiceAction{
|
||||
action: 'create'
|
||||
description: 'Create VM'
|
||||
status: AgentServiceState.ok
|
||||
public: true
|
||||
params: {
|
||||
'name': 'VM name'
|
||||
'size': 'VM size'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'test-vm'
|
||||
'size': 'medium'
|
||||
}
|
||||
}
|
||||
|
||||
vm_service.actions << AgentServiceAction{
|
||||
action: 'start'
|
||||
description: 'Start VM'
|
||||
status: AgentServiceState.ok
|
||||
public: true
|
||||
params: {
|
||||
'name': 'VM name'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'test-vm'
|
||||
}
|
||||
}
|
||||
|
||||
// Add second service - Storage management
|
||||
mut storage_service := AgentService{
|
||||
actor: 'storage'
|
||||
description: 'Storage management service'
|
||||
status: AgentServiceState.ok
|
||||
public: false
|
||||
}
|
||||
|
||||
// Add actions to storage service
|
||||
storage_service.actions << AgentServiceAction{
|
||||
action: 'create_volume'
|
||||
description: 'Create storage volume'
|
||||
status: AgentServiceState.ok
|
||||
public: false
|
||||
params: {
|
||||
'name': 'Volume name'
|
||||
'size': 'Volume size in GB'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'data-vol'
|
||||
'size': '100'
|
||||
}
|
||||
}
|
||||
|
||||
storage_service.actions << AgentServiceAction{
|
||||
action: 'attach_volume'
|
||||
description: 'Attach volume to VM'
|
||||
status: AgentServiceState.ok
|
||||
public: false
|
||||
params: {
|
||||
'volume': 'Volume name'
|
||||
'vm': 'VM name'
|
||||
'mount_point': 'Mount point'
|
||||
}
|
||||
params_example: {
|
||||
'volume': 'data-vol'
|
||||
'vm': 'test-vm'
|
||||
'mount_point': '/data'
|
||||
}
|
||||
}
|
||||
|
||||
// Add services to agent
|
||||
agent.services << vm_service
|
||||
agent.services << storage_service
|
||||
|
||||
// Test binary encoding
|
||||
binary_data := agent.dumps() or {
|
||||
assert false, 'Failed to encode complex agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Test binary decoding
|
||||
decoded_agent := loads(binary_data) or {
|
||||
assert false, 'Failed to decode complex agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the decoded data
|
||||
assert decoded_agent.pubkey == agent.pubkey
|
||||
assert decoded_agent.address == agent.address
|
||||
assert decoded_agent.port == agent.port
|
||||
assert decoded_agent.services.len == agent.services.len
|
||||
|
||||
// Verify first service (VM)
|
||||
if decoded_agent.services.len > 0 {
|
||||
vm := decoded_agent.services[0]
|
||||
assert vm.actor == 'vm'
|
||||
assert vm.actions.len == 2
|
||||
|
||||
// Check VM create action
|
||||
create_action := vm.actions[0]
|
||||
assert create_action.action == 'create'
|
||||
assert create_action.params.len == 2
|
||||
assert create_action.params['name'] == 'VM name'
|
||||
|
||||
// Check VM start action
|
||||
start_action := vm.actions[1]
|
||||
assert start_action.action == 'start'
|
||||
assert start_action.params.len == 1
|
||||
}
|
||||
|
||||
// Verify second service (Storage)
|
||||
if decoded_agent.services.len > 1 {
|
||||
storage := decoded_agent.services[1]
|
||||
assert storage.actor == 'storage'
|
||||
assert storage.public == false
|
||||
assert storage.actions.len == 2
|
||||
|
||||
// Check storage attach action
|
||||
attach_action := storage.actions[1]
|
||||
assert attach_action.action == 'attach_volume'
|
||||
assert attach_action.params.len == 3
|
||||
assert attach_action.params['mount_point'] == 'Mount point'
|
||||
assert attach_action.params_example['mount_point'] == '/data'
|
||||
}
|
||||
|
||||
println('Complex agent binary encoding/decoding test passed successfully')
|
||||
}
|
||||
|
||||
fn test_agent_empty_structures() {
|
||||
// Test with empty arrays and maps
|
||||
mut agent := Agent{
|
||||
pubkey: 'ed25519:empty-test'
|
||||
address: '127.0.0.1'
|
||||
port: 7777
|
||||
description: ''
|
||||
status: AgentStatus{
|
||||
guid: 'empty-agent'
|
||||
timestamp_first: ourtime.now()
|
||||
timestamp_last: ourtime.now()
|
||||
status: AgentState.down
|
||||
}
|
||||
signature: ''
|
||||
services: []
|
||||
}
|
||||
|
||||
// Test binary encoding
|
||||
binary_data := agent.dumps() or {
|
||||
assert false, 'Failed to encode empty agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Test binary decoding
|
||||
decoded_agent := loads(binary_data) or {
|
||||
assert false, 'Failed to decode empty agent: ${err}'
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the decoded data
|
||||
assert decoded_agent.pubkey == agent.pubkey
|
||||
assert decoded_agent.address == agent.address
|
||||
assert decoded_agent.port == agent.port
|
||||
assert decoded_agent.description == ''
|
||||
assert decoded_agent.signature == ''
|
||||
assert decoded_agent.services.len == 0
|
||||
assert decoded_agent.status.status == AgentState.down
|
||||
|
||||
println('Empty agent binary encoding/decoding test passed successfully')
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import freeflowuniverse.herolib.data.ourdb
|
||||
import os
|
||||
|
||||
// HeroRunner is the main factory for managing jobs, agents, services and groups
|
||||
pub struct HeroRunner {
|
||||
@@ -17,21 +19,40 @@ pub mut:
|
||||
pub fn new() !&HeroRunner {
|
||||
mut redis := redisclient.core_get()!
|
||||
|
||||
// Set up the VFS for job storage
|
||||
data_dir := os.join_path(os.home_dir(), '.hero', 'jobs')
|
||||
os.mkdir_all(data_dir)!
|
||||
|
||||
// Create separate databases for data and metadata
|
||||
mut db_data := ourdb.new(
|
||||
path: os.join_path(data_dir, 'data')
|
||||
incremental_mode: false
|
||||
)!
|
||||
|
||||
mut db_metadata := ourdb.new(
|
||||
path: os.join_path(data_dir, 'metadata')
|
||||
incremental_mode: false
|
||||
)!
|
||||
|
||||
//TODO: the ourdb instance is given in the new and passed to each manager
|
||||
|
||||
|
||||
mut hr := &HeroRunner{
|
||||
redis: redis
|
||||
jobs: &JobManager{
|
||||
redis: redis
|
||||
}
|
||||
agents: &AgentManager{
|
||||
redis: redis
|
||||
}
|
||||
services: &ServiceManager{
|
||||
redis: redis
|
||||
}
|
||||
groups: &GroupManager{
|
||||
redis: redis
|
||||
}
|
||||
}
|
||||
|
||||
return hr
|
||||
}
|
||||
|
||||
// cleanup_jobs removes jobs older than the specified number of days
|
||||
pub fn (mut hr HeroRunner) cleanup_jobs(days int) !int {
|
||||
return hr.jobs.cleanup(days)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import json
|
||||
|
||||
const groups_key = 'herorunner:groups' // Redis key for storing groups
|
||||
|
||||
// GroupManager handles all group-related operations
|
||||
pub struct GroupManager {
|
||||
mut:
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
// new creates a new Group instance
|
||||
@@ -19,64 +14,44 @@ pub fn (mut m GroupManager) new() Group {
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a new group to Redis
|
||||
// set adds or updates a group
|
||||
pub fn (mut m GroupManager) set(group Group) ! {
|
||||
// Store group in Redis hash where key is group.guid and value is JSON of group
|
||||
group_json := json.encode(group)
|
||||
m.redis.hset(groups_key, group.guid, group_json)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get retrieves a group by its GUID
|
||||
pub fn (mut m GroupManager) get(guid string) !Group {
|
||||
group_json := m.redis.hget(groups_key, guid)!
|
||||
return json.decode(Group, group_json)
|
||||
// Implementation removed
|
||||
return Group{}
|
||||
}
|
||||
|
||||
// list returns all groups
|
||||
pub fn (mut m GroupManager) list() ![]Group {
|
||||
mut groups := []Group{}
|
||||
|
||||
// Get all groups from Redis hash
|
||||
groups_map := m.redis.hgetall(groups_key)!
|
||||
|
||||
// Convert each JSON value to Group struct
|
||||
for _, group_json in groups_map {
|
||||
group := json.decode(Group, group_json)!
|
||||
groups << group
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return groups
|
||||
}
|
||||
|
||||
// delete removes a group by its GUID
|
||||
pub fn (mut m GroupManager) delete(guid string) ! {
|
||||
m.redis.hdel(groups_key, guid)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// add_member adds a member (user pubkey or group GUID) to a group
|
||||
pub fn (mut m GroupManager) add_member(guid string, member string) ! {
|
||||
mut group := m.get(guid)!
|
||||
if member !in group.members {
|
||||
group.members << member
|
||||
m.set(group)!
|
||||
}
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// remove_member removes a member from a group
|
||||
pub fn (mut m GroupManager) remove_member(guid string, member string) ! {
|
||||
mut group := m.get(guid)!
|
||||
group.members = group.members.filter(it != member)
|
||||
m.set(group)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
pub fn (mut m GroupManager) get_user_groups(user_pubkey string) ![]Group {
|
||||
mut user_groups := []Group{}
|
||||
mut checked_groups := map[string]bool{}
|
||||
groups := m.list()!
|
||||
// Check each group
|
||||
for group in groups {
|
||||
check_group_membership(group, user_pubkey, groups, mut checked_groups, mut user_groups)
|
||||
}
|
||||
// Implementation removed
|
||||
return user_groups
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import json
|
||||
|
||||
const jobs_key = 'herorunner:jobs' // Redis key for storing jobs
|
||||
import time
|
||||
|
||||
// JobManager handles all job-related operations
|
||||
pub struct JobManager {
|
||||
mut:
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
// job_path returns the path for a job
|
||||
fn job_path(guid string) string {
|
||||
// We'll organize jobs by first 2 characters of the GUID to avoid too many files in one directory
|
||||
prefix := if guid.len >= 2 { guid[..2] } else { guid }
|
||||
return '/jobs/${prefix}/${guid}.json'
|
||||
}
|
||||
|
||||
// new creates a new Job instance
|
||||
@@ -26,43 +29,58 @@ pub fn (mut m JobManager) new() Job {
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a new job to Redis
|
||||
// set adds or updates a job
|
||||
pub fn (mut m JobManager) set(job Job) ! {
|
||||
// Store job in Redis hash where key is job.guid and value is JSON of job
|
||||
job_json := json.encode(job)
|
||||
m.redis.hset(jobs_key, job.guid, job_json)!
|
||||
// Ensure the job has a valid GUID
|
||||
if job.guid.len == 0 {
|
||||
return error('Cannot store job with empty GUID')
|
||||
}
|
||||
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get retrieves a job by its GUID
|
||||
pub fn (mut m JobManager) get(guid string) !Job {
|
||||
job_json := m.redis.hget(jobs_key, guid)!
|
||||
return json.decode(Job, job_json)
|
||||
// Ensure the GUID is valid
|
||||
if guid.len == 0 {
|
||||
return error('Cannot get job with empty GUID')
|
||||
}
|
||||
|
||||
// Implementation removed
|
||||
return Job{}
|
||||
}
|
||||
|
||||
// list returns all jobs
|
||||
pub fn (mut m JobManager) list() ![]Job {
|
||||
mut jobs := []Job{}
|
||||
|
||||
// Get all jobs from Redis hash
|
||||
jobs_map := m.redis.hgetall(jobs_key)!
|
||||
|
||||
// Convert each JSON value to Job struct
|
||||
for _, job_json in jobs_map {
|
||||
job := json.decode(Job, job_json)!
|
||||
jobs << job
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return jobs
|
||||
}
|
||||
|
||||
// delete removes a job by its GUID
|
||||
pub fn (mut m JobManager) delete(guid string) ! {
|
||||
m.redis.hdel(jobs_key, guid)!
|
||||
// Ensure the GUID is valid
|
||||
if guid.len == 0 {
|
||||
return error('Cannot delete job with empty GUID')
|
||||
}
|
||||
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// update_status updates just the status of a job
|
||||
pub fn (mut m JobManager) update_status(guid string, status Status) ! {
|
||||
mut job := m.get(guid)!
|
||||
job.status.status = status
|
||||
m.set(job)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// cleanup removes jobs older than the specified number of days
|
||||
pub fn (mut m JobManager) cleanup(days int) !int {
|
||||
if days <= 0 {
|
||||
return error('Days must be a positive number')
|
||||
}
|
||||
|
||||
// Implementation removed
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
module model
|
||||
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import json
|
||||
|
||||
const services_key = 'herorunner:services' // Redis key for storing services
|
||||
|
||||
// ServiceManager handles all service-related operations
|
||||
pub struct ServiceManager {
|
||||
mut:
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
// new creates a new Service instance
|
||||
@@ -20,103 +15,47 @@ pub fn (mut m ServiceManager) new() Service {
|
||||
}
|
||||
}
|
||||
|
||||
// add adds a new service to Redis
|
||||
// set adds or updates a service
|
||||
pub fn (mut m ServiceManager) set(service Service) ! {
|
||||
// Store service in Redis hash where key is service.actor and value is JSON of service
|
||||
service_json := json.encode(service)
|
||||
m.redis.hset(services_key, service.actor, service_json)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get retrieves a service by its actor name
|
||||
pub fn (mut m ServiceManager) get(actor string) !Service {
|
||||
service_json := m.redis.hget(services_key, actor)!
|
||||
return json.decode(Service, service_json)
|
||||
// Implementation removed
|
||||
return Service{}
|
||||
}
|
||||
|
||||
// list returns all services
|
||||
pub fn (mut m ServiceManager) list() ![]Service {
|
||||
mut services := []Service{}
|
||||
|
||||
// Get all services from Redis hash
|
||||
services_map := m.redis.hgetall(services_key)!
|
||||
|
||||
// Convert each JSON value to Service struct
|
||||
for _, service_json in services_map {
|
||||
service := json.decode(Service, service_json)!
|
||||
services << service
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return services
|
||||
}
|
||||
|
||||
// delete removes a service by its actor name
|
||||
pub fn (mut m ServiceManager) delete(actor string) ! {
|
||||
m.redis.hdel(services_key, actor)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// update_status updates just the status of a service
|
||||
pub fn (mut m ServiceManager) update_status(actor string, status ServiceState) ! {
|
||||
mut service := m.get(actor)!
|
||||
service.status = status
|
||||
m.set(service)!
|
||||
// Implementation removed
|
||||
}
|
||||
|
||||
// get_by_action returns all services that provide a specific action
|
||||
pub fn (mut m ServiceManager) get_by_action(action string) ![]Service {
|
||||
mut matching_services := []Service{}
|
||||
|
||||
services := m.list()!
|
||||
for service in services {
|
||||
for act in service.actions {
|
||||
if act.action == action {
|
||||
matching_services << service
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
// Implementation removed
|
||||
|
||||
return matching_services
|
||||
}
|
||||
|
||||
// check_access verifies if a user has access to a service action
|
||||
pub fn (mut m ServiceManager) check_access(actor string, action string, user_pubkey string, groups []string) !bool {
|
||||
service := m.get(actor)!
|
||||
|
||||
// Find the specific action
|
||||
mut service_action := ServiceAction{}
|
||||
mut found := false
|
||||
for act in service.actions {
|
||||
if act.action == action {
|
||||
service_action = act
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return error('Action ${action} not found in service ${actor}')
|
||||
}
|
||||
|
||||
// If no ACL is defined, access is granted
|
||||
if service_action.acl == none {
|
||||
return true
|
||||
}
|
||||
|
||||
acl := service_action.acl or { return true }
|
||||
|
||||
// Check each ACE in the ACL
|
||||
for ace in acl.ace {
|
||||
// Check if user is directly listed
|
||||
if user_pubkey in ace.users {
|
||||
return ace.right != 'block'
|
||||
}
|
||||
|
||||
// Check if any of user's groups are listed
|
||||
for group in groups {
|
||||
if group in ace.groups {
|
||||
return ace.right != 'block'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
// Implementation removed
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
|
||||
# V Binary Encoder/Decoder
|
||||
|
||||
see lib/data/encoder
|
||||
|
||||
A high-performance binary encoder/decoder module for V that provides efficient serialization and deserialization of data structures. The encoder supports automatic encoding/decoding of structs using V's compile-time reflection capabilities.
|
||||
|
||||
## Features
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
@echo off
|
||||
REM Script to install the OurDB Viewer extension to VSCode on Windows
|
||||
|
||||
REM Set extension directory
|
||||
set EXTENSION_DIR=%USERPROFILE%\.vscode\extensions\local-herolib.ourdb-viewer-0.0.1
|
||||
|
||||
REM Create extension directory
|
||||
if not exist "%EXTENSION_DIR%" mkdir "%EXTENSION_DIR%"
|
||||
|
||||
REM Copy extension files
|
||||
copy /Y "%~dp0extension.js" "%EXTENSION_DIR%\"
|
||||
copy /Y "%~dp0package.json" "%EXTENSION_DIR%\"
|
||||
copy /Y "%~dp0README.md" "%EXTENSION_DIR%\"
|
||||
|
||||
echo OurDB Viewer extension installed to: %EXTENSION_DIR%
|
||||
echo Please restart VSCode for the changes to take effect.
|
||||
echo After restarting, you should be able to open .ourdb files.
|
||||
|
||||
pause
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to install the OurDB Viewer extension to VSCode
|
||||
|
||||
# Determine OS and set extension directory
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS
|
||||
EXTENSION_DIR="$HOME/.vscode/extensions/local-herolib.ourdb-viewer-0.0.1"
|
||||
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
# Linux
|
||||
EXTENSION_DIR="$HOME/.vscode/extensions/local-herolib.ourdb-viewer-0.0.1"
|
||||
else
|
||||
# Windows with Git Bash or similar
|
||||
EXTENSION_DIR="$HOME/.vscode/extensions/local-herolib.ourdb-viewer-0.0.1"
|
||||
# For Windows CMD/PowerShell, would be:
|
||||
# EXTENSION_DIR="%USERPROFILE%\.vscode\extensions\local-herolib.ourdb-viewer-0.0.1"
|
||||
fi
|
||||
|
||||
# Create extension directory
|
||||
mkdir -p "$EXTENSION_DIR"
|
||||
|
||||
# Copy extension files
|
||||
cp -f "$(dirname "$0")/extension.js" "$EXTENSION_DIR/"
|
||||
cp -f "$(dirname "$0")/package.json" "$EXTENSION_DIR/"
|
||||
cp -f "$(dirname "$0")/README.md" "$EXTENSION_DIR/"
|
||||
|
||||
echo "OurDB Viewer extension installed to: $EXTENSION_DIR"
|
||||
echo "Please restart VSCode for the changes to take effect."
|
||||
echo "After restarting, you should be able to open .ourdb files."
|
||||
@@ -16,19 +16,30 @@ A Visual Studio Code extension for viewing OurDB files line by line. This extens
|
||||
|
||||
## Installation
|
||||
|
||||
You can install this extension using the provided installation scripts:
|
||||
//TODO: needs to be added to hero cmd line in installers
|
||||
|
||||
### Automatic Installation
|
||||
```
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
1. For macOS/Linux:
|
||||
```
|
||||
./install.sh
|
||||
```
|
||||
import freeflowuniverse.herolib.develop.vscode_extensions.ourdb
|
||||
|
||||
2. For Windows:
|
||||
```
|
||||
install.bat
|
||||
```
|
||||
// This example shows how to use the ourdb module to install or uninstall the VSCode extension
|
||||
|
||||
// Install the extension
|
||||
ourdb.install_extension() or {
|
||||
eprintln('Failed to install extension: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// To uninstall, uncomment the following lines:
|
||||
/*
|
||||
ourdb.uninstall_extension() or {
|
||||
eprintln('Failed to uninstall extension: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
3. Restart VSCode
|
||||
|
||||
114
lib/develop/vscode_extensions/ourdb/factory.v
Normal file
114
lib/develop/vscode_extensions/ourdb/factory.v
Normal file
@@ -0,0 +1,114 @@
|
||||
module ourdb
|
||||
|
||||
import os
|
||||
|
||||
// Embed the extension files directly into the binary
|
||||
#embed_file extension_js_content := 'extension.js'
|
||||
#embed_file package_json_content := 'package.json'
|
||||
#embed_file readme_content := 'README.md'
|
||||
|
||||
// VSCodeExtension represents the OurDB VSCode extension
|
||||
pub struct VSCodeExtension {
|
||||
pub mut:
|
||||
extension_dir string
|
||||
}
|
||||
|
||||
// new creates a new VSCodeExtension instance
|
||||
pub fn new() !VSCodeExtension {
|
||||
// Determine the extension directory based on OS
|
||||
extension_dir := get_extension_dir()
|
||||
|
||||
return VSCodeExtension{
|
||||
extension_dir: extension_dir
|
||||
}
|
||||
}
|
||||
|
||||
// get_extension_dir determines the VSCode extension directory based on OS
|
||||
fn get_extension_dir() string {
|
||||
home_dir := os.home_dir()
|
||||
|
||||
// Extension directory path based on OS
|
||||
return os.join_path(home_dir, '.vscode', 'extensions', 'local-herolib.ourdb-viewer-0.0.1')
|
||||
}
|
||||
|
||||
// install installs the OurDB VSCode extension
|
||||
pub fn (mut ext VSCodeExtension) install() ! {
|
||||
// Check if already installed
|
||||
if ext.is_installed() {
|
||||
println('OurDB VSCode extension is already installed at: ${ext.extension_dir}')
|
||||
println('To reinstall, first uninstall using the uninstall() function')
|
||||
return
|
||||
}
|
||||
|
||||
// Create extension directory if it doesn't exist
|
||||
os.mkdir_all(ext.extension_dir) or {
|
||||
return error('Failed to create extension directory: ${err}')
|
||||
}
|
||||
|
||||
// Write embedded files to the extension directory
|
||||
// extension.js
|
||||
os.write_file(os.join_path(ext.extension_dir, 'extension.js'), extension_js_content.to_string()) or {
|
||||
return error('Failed to write extension.js: ${err}')
|
||||
}
|
||||
|
||||
// package.json
|
||||
os.write_file(os.join_path(ext.extension_dir, 'package.json'), package_json_content.to_string()) or {
|
||||
return error('Failed to write package.json: ${err}')
|
||||
}
|
||||
|
||||
// README.md
|
||||
os.write_file(os.join_path(ext.extension_dir, 'README.md'), readme_content.to_string()) or {
|
||||
return error('Failed to write README.md: ${err}')
|
||||
}
|
||||
|
||||
println('OurDB Viewer extension installed to: ${ext.extension_dir}')
|
||||
println('Please restart VSCode for the changes to take effect.')
|
||||
println('After restarting, you should be able to open .ourdb files.')
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// uninstall removes the OurDB VSCode extension
|
||||
pub fn (mut ext VSCodeExtension) uninstall() ! {
|
||||
if os.exists(ext.extension_dir) {
|
||||
os.rmdir_all(ext.extension_dir) or {
|
||||
return error('Failed to remove extension directory: ${err}')
|
||||
}
|
||||
|
||||
println('OurDB Viewer extension uninstalled from: ${ext.extension_dir}')
|
||||
println('Please restart VSCode for the changes to take effect.')
|
||||
} else {
|
||||
println('Extension not found at: ${ext.extension_dir}')
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// is_installed checks if the extension is installed
|
||||
pub fn (ext VSCodeExtension) is_installed() bool {
|
||||
return os.exists(ext.extension_dir) &&
|
||||
os.exists(os.join_path(ext.extension_dir, 'extension.js')) &&
|
||||
os.exists(os.join_path(ext.extension_dir, 'package.json'))
|
||||
}
|
||||
|
||||
// install_extension is a convenience function to install the extension
|
||||
pub fn install_extension() ! {
|
||||
mut ext := new() or {
|
||||
return error('Failed to initialize extension: ${err}')
|
||||
}
|
||||
|
||||
ext.install() or {
|
||||
return error('Failed to install extension: ${err}')
|
||||
}
|
||||
}
|
||||
|
||||
// uninstall_extension is a convenience function to uninstall the extension
|
||||
pub fn uninstall_extension() ! {
|
||||
mut ext := new() or {
|
||||
return error('Failed to initialize extension: ${err}')
|
||||
}
|
||||
|
||||
ext.uninstall() or {
|
||||
return error('Failed to uninstall extension: ${err}')
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user