Update osiris dependency to use osiris-core package
This commit is contained in:
1645
Cargo.lock
generated
1645
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -54,8 +54,8 @@ rand = "0.8"
|
|||||||
hero_logger = { git = "https://git.ourworld.tf/herocode/baobab.git", branch = "logger" }
|
hero_logger = { git = "https://git.ourworld.tf/herocode/baobab.git", branch = "logger" }
|
||||||
|
|
||||||
# Osiris dependencies (used by runner_osiris binary)
|
# Osiris dependencies (used by runner_osiris binary)
|
||||||
osiris = { path = "../osiris" }
|
osiris = { package = "osiris-core", path = "../osiris/core" }
|
||||||
# osiris = { git = "https://git.ourworld.tf/herocode/osiris.git" }
|
# osiris = { package = "osiris-core", git = "https://git.ourworld.tf/herocode/osiris.git" }
|
||||||
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
|
|||||||
@@ -1,149 +0,0 @@
|
|||||||
//! Terminal UI for Async Actor - Monitor and dispatch jobs to async actor with SAL modules
|
|
||||||
//!
|
|
||||||
//! This binary provides a TUI interface for monitoring and dispatching jobs to the async actor.
|
|
||||||
|
|
||||||
use anyhow::{Result, Context};
|
|
||||||
use clap::Parser;
|
|
||||||
use log::{info, warn, error};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::process::{Child, Command};
|
|
||||||
use tokio::signal;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "async-actor-tui")]
|
|
||||||
#[command(about = "Terminal UI for Async Actor - Monitor and dispatch jobs with SAL modules")]
|
|
||||||
struct Args {
|
|
||||||
/// Actor ID for this instance
|
|
||||||
#[arg(short, long, default_value = "async_sal")]
|
|
||||||
actor_id: String,
|
|
||||||
|
|
||||||
/// Redis URL for job queue
|
|
||||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
|
||||||
redis_url: String,
|
|
||||||
|
|
||||||
/// Database path
|
|
||||||
#[arg(short, long, default_value = "/tmp/actor_db")]
|
|
||||||
db_path: String,
|
|
||||||
|
|
||||||
/// Default timeout in seconds for job execution
|
|
||||||
#[arg(short, long, default_value = "300")]
|
|
||||||
timeout: u64,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(short, long)]
|
|
||||||
verbose: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize logging based on verbosity level
|
|
||||||
fn init_logging(verbose: bool) {
|
|
||||||
if verbose {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Debug)
|
|
||||||
.init();
|
|
||||||
} else {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Spawn the async actor binary as a background process
|
|
||||||
fn spawn_actor_process(args: &Args) -> Result<Child> {
|
|
||||||
// Get the crate root directory
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
let actor_path = PathBuf::from(crate_root).join("target/debug/async_actor");
|
|
||||||
info!("🎬 Spawning async actor process: {}", actor_path.display());
|
|
||||||
|
|
||||||
let mut cmd = Command::new(&actor_path);
|
|
||||||
|
|
||||||
// Add command line arguments
|
|
||||||
cmd.args(&[
|
|
||||||
"--actor-id", &args.actor_id,
|
|
||||||
"--db-path", &args.db_path,
|
|
||||||
"--redis-url", &args.redis_url,
|
|
||||||
"--timeout", &args.timeout.to_string(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Redirect stdout and stderr to null to prevent logs from interfering with TUI
|
|
||||||
cmd.stdout(std::process::Stdio::null())
|
|
||||||
.stderr(std::process::Stdio::null());
|
|
||||||
|
|
||||||
// Spawn the process
|
|
||||||
let child = cmd
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| format!("Failed to spawn async actor process: {}", actor_path.display()))?;
|
|
||||||
|
|
||||||
info!("✅ Async actor process spawned with PID: {}", child.id());
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cleanup function to terminate actor process
|
|
||||||
fn cleanup_actor_process(mut actor_process: Child) {
|
|
||||||
info!("🧹 Cleaning up async actor process...");
|
|
||||||
|
|
||||||
match actor_process.try_wait() {
|
|
||||||
Ok(Some(status)) => {
|
|
||||||
info!("Async actor process already exited with status: {}", status);
|
|
||||||
}
|
|
||||||
Ok(None) => {
|
|
||||||
info!("Terminating async actor process...");
|
|
||||||
if let Err(e) = actor_process.kill() {
|
|
||||||
error!("Failed to kill async actor process: {}", e);
|
|
||||||
} else {
|
|
||||||
match actor_process.wait() {
|
|
||||||
Ok(status) => info!("Async actor process terminated with status: {}", status),
|
|
||||||
Err(e) => error!("Failed to wait for async actor process: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to check async actor process status: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Initialize logging
|
|
||||||
init_logging(args.verbose);
|
|
||||||
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
|
|
||||||
info!("🚀 Starting Async Actor TUI...");
|
|
||||||
info!("Actor ID: {}", args.actor_id);
|
|
||||||
info!("Actor Path: {}/target/debug/async_actor", crate_root);
|
|
||||||
info!("Redis URL: {}", args.redis_url);
|
|
||||||
info!("Database Path: {}", args.db_path);
|
|
||||||
info!("Default Timeout: {}s", args.timeout);
|
|
||||||
info!("Script Type: SAL (System Abstraction Layer)");
|
|
||||||
|
|
||||||
// Spawn the actor process first
|
|
||||||
let actor_process = spawn_actor_process(&args)?;
|
|
||||||
|
|
||||||
// Give the actor a moment to start up
|
|
||||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
|
||||||
|
|
||||||
info!("📋 Async Actor TUI is running. The actor processes jobs concurrently.");
|
|
||||||
info!("💡 Use Redis CLI or job submission tools to send jobs to queue: actor_queue:{}", args.actor_id);
|
|
||||||
info!("🔄 Jobs will be processed with SAL modules (system operations)");
|
|
||||||
info!("⏱️ Each job has a timeout of {}s", args.timeout);
|
|
||||||
info!("📊 Multiple jobs can run simultaneously");
|
|
||||||
info!("Press Ctrl+C to exit...");
|
|
||||||
|
|
||||||
// Wait for Ctrl+C
|
|
||||||
let result = tokio::select! {
|
|
||||||
_ = signal::ctrl_c() => {
|
|
||||||
info!("Received Ctrl+C, shutting down...");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Clean up the actor process
|
|
||||||
cleanup_actor_process(actor_process);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
//! Terminal UI for Sync Actor - Monitor and dispatch jobs to sync actor with DSL modules
|
|
||||||
//!
|
|
||||||
//! This binary provides a TUI interface for monitoring and dispatching jobs to the sync actor.
|
|
||||||
|
|
||||||
use anyhow::{Result, Context};
|
|
||||||
use clap::Parser;
|
|
||||||
use log::{info, warn, error};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::process::{Child, Command};
|
|
||||||
use tokio::signal;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "sync-actor-tui")]
|
|
||||||
#[command(about = "Terminal UI for Sync Actor - Monitor and dispatch jobs with DSL modules")]
|
|
||||||
struct Args {
|
|
||||||
/// Actor ID for this instance
|
|
||||||
#[arg(short, long, default_value = "sync_osis")]
|
|
||||||
actor_id: String,
|
|
||||||
|
|
||||||
/// Redis URL for job queue
|
|
||||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
|
||||||
redis_url: String,
|
|
||||||
|
|
||||||
/// Database path
|
|
||||||
#[arg(short, long, default_value = "/tmp/actor_db")]
|
|
||||||
db_path: String,
|
|
||||||
|
|
||||||
/// Preserve completed tasks in Redis (don't delete them)
|
|
||||||
#[arg(short, long, default_value = "false")]
|
|
||||||
preserve_tasks: bool,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(short, long)]
|
|
||||||
verbose: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize logging based on verbosity level
|
|
||||||
fn init_logging(verbose: bool) {
|
|
||||||
if verbose {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Debug)
|
|
||||||
.init();
|
|
||||||
} else {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Spawn the sync actor binary as a background process
|
|
||||||
fn spawn_actor_process(args: &Args) -> Result<Child> {
|
|
||||||
// Get the crate root directory
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
let actor_path = PathBuf::from(crate_root).join("target/debug/sync_actor");
|
|
||||||
info!("🎬 Spawning sync actor process: {}", actor_path.display());
|
|
||||||
|
|
||||||
let mut cmd = Command::new(&actor_path);
|
|
||||||
|
|
||||||
// Add command line arguments
|
|
||||||
cmd.args(&[
|
|
||||||
"--actor-id", &args.actor_id,
|
|
||||||
"--db-path", &args.db_path,
|
|
||||||
"--redis-url", &args.redis_url,
|
|
||||||
"--preserve-tasks", &args.preserve_tasks.to_string(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Redirect stdout and stderr to null to prevent logs from interfering with TUI
|
|
||||||
cmd.stdout(std::process::Stdio::null())
|
|
||||||
.stderr(std::process::Stdio::null());
|
|
||||||
|
|
||||||
// Spawn the process
|
|
||||||
let child = cmd
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| format!("Failed to spawn sync actor process: {}", actor_path.display()))?;
|
|
||||||
|
|
||||||
info!("✅ Sync actor process spawned with PID: {}", child.id());
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cleanup function to terminate actor process
|
|
||||||
fn cleanup_actor_process(mut actor_process: Child) {
|
|
||||||
info!("🧹 Cleaning up sync actor process...");
|
|
||||||
|
|
||||||
match actor_process.try_wait() {
|
|
||||||
Ok(Some(status)) => {
|
|
||||||
info!("Sync actor process already exited with status: {}", status);
|
|
||||||
}
|
|
||||||
Ok(None) => {
|
|
||||||
info!("Terminating sync actor process...");
|
|
||||||
if let Err(e) = actor_process.kill() {
|
|
||||||
error!("Failed to kill sync actor process: {}", e);
|
|
||||||
} else {
|
|
||||||
match actor_process.wait() {
|
|
||||||
Ok(status) => info!("Sync actor process terminated with status: {}", status),
|
|
||||||
Err(e) => error!("Failed to wait for sync actor process: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to check sync actor process status: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Initialize logging
|
|
||||||
init_logging(args.verbose);
|
|
||||||
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
|
|
||||||
info!("🚀 Starting Sync Actor TUI...");
|
|
||||||
info!("Actor ID: {}", args.actor_id);
|
|
||||||
info!("Actor Path: {}/target/debug/sync_actor", crate_root);
|
|
||||||
info!("Redis URL: {}", args.redis_url);
|
|
||||||
info!("Database Path: {}", args.db_path);
|
|
||||||
info!("Preserve Tasks: {}", args.preserve_tasks);
|
|
||||||
info!("Script Type: DSL (Domain Specific Language)");
|
|
||||||
|
|
||||||
// Spawn the actor process first
|
|
||||||
let actor_process = spawn_actor_process(&args)?;
|
|
||||||
|
|
||||||
// Give the actor a moment to start up
|
|
||||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
|
||||||
|
|
||||||
info!("📋 Sync Actor TUI is running. The actor processes jobs sequentially.");
|
|
||||||
info!("💡 Use Redis CLI or job submission tools to send jobs to queue: actor_queue:{}", args.actor_id);
|
|
||||||
info!("🔄 Jobs will be processed with DSL modules (business operations)");
|
|
||||||
info!("📊 Jobs are processed one at a time in order");
|
|
||||||
if args.preserve_tasks {
|
|
||||||
info!("💾 Completed tasks will be preserved in Redis");
|
|
||||||
} else {
|
|
||||||
info!("🗑️ Completed tasks will be cleaned up from Redis");
|
|
||||||
}
|
|
||||||
info!("Press Ctrl+C to exit...");
|
|
||||||
|
|
||||||
// Wait for Ctrl+C
|
|
||||||
let result = tokio::select! {
|
|
||||||
_ = signal::ctrl_c() => {
|
|
||||||
info!("Received Ctrl+C, shutting down...");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Clean up the actor process
|
|
||||||
cleanup_actor_process(actor_process);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
//! Simplified main function for Baobab Actor TUI
|
|
||||||
//!
|
|
||||||
//! This binary provides a clean entry point for the actor monitoring and job dispatch interface.
|
|
||||||
|
|
||||||
use anyhow::{Result, Context};
|
|
||||||
use baobab_actor::terminal_ui::{App, setup_and_run_tui};
|
|
||||||
use clap::Parser;
|
|
||||||
use hero_job::ScriptType;
|
|
||||||
use log::{info, warn, error};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::process::{Child, Command};
|
|
||||||
use tokio::signal;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "baobab-actor-tui")]
|
|
||||||
#[command(about = "Terminal UI for Baobab Actor - Monitor and dispatch jobs to a single actor")]
|
|
||||||
struct Args {
|
|
||||||
/// Redis URL for job queue
|
|
||||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
|
||||||
redis_url: String,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(short, long)]
|
|
||||||
verbose: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize logging based on verbosity level
|
|
||||||
fn init_logging(verbose: bool) {
|
|
||||||
if verbose {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Debug)
|
|
||||||
.init();
|
|
||||||
} else {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create and configure the TUI application
|
|
||||||
fn create_app(args: &Args) -> Result<App> {
|
|
||||||
let actor_id = "sal".to_string();
|
|
||||||
|
|
||||||
// Get the crate root directory
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
let crate_root = PathBuf::from(crate_root);
|
|
||||||
|
|
||||||
let actor_path = crate_root.join("target/debug/runner_rust");
|
|
||||||
let example_dir = Some(crate_root.join("examples/scripts"));
|
|
||||||
|
|
||||||
let mut app = App::new(
|
|
||||||
actor_id,
|
|
||||||
actor_path,
|
|
||||||
args.redis_url.clone(),
|
|
||||||
example_dir,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Set the correct script type for the system actor
|
|
||||||
// System actor processes SAL (System Abstraction Layer) scripts, not OSIS scripts
|
|
||||||
app.job_form.script_type = ScriptType::SAL;
|
|
||||||
|
|
||||||
Ok(app)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Spawn the actor binary as a background process
|
|
||||||
fn spawn_actor_process(_args: &Args) -> Result<Child> {
|
|
||||||
// Get the crate root directory
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
let actor_path = PathBuf::from(crate_root).join("target/debug/runner_rust");
|
|
||||||
info!("🎬 Spawning actor process: {}", actor_path.display());
|
|
||||||
|
|
||||||
let mut cmd = Command::new(&actor_path);
|
|
||||||
|
|
||||||
// Redirect stdout and stderr to null to prevent logs from interfering with TUI
|
|
||||||
cmd.stdout(std::process::Stdio::null())
|
|
||||||
.stderr(std::process::Stdio::null());
|
|
||||||
|
|
||||||
// Spawn the process
|
|
||||||
let child = cmd
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| format!("Failed to spawn actor process: {}", actor_path.display()))?;
|
|
||||||
|
|
||||||
info!("✅ Actor process spawned with PID: {}", child.id());
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cleanup function to terminate actor process
|
|
||||||
fn cleanup_actor_process(mut actor_process: Child) {
|
|
||||||
info!("🧹 Cleaning up actor process...");
|
|
||||||
|
|
||||||
match actor_process.try_wait() {
|
|
||||||
Ok(Some(status)) => {
|
|
||||||
info!("Actor process already exited with status: {}", status);
|
|
||||||
}
|
|
||||||
Ok(None) => {
|
|
||||||
info!("Terminating actor process...");
|
|
||||||
if let Err(e) = actor_process.kill() {
|
|
||||||
error!("Failed to kill actor process: {}", e);
|
|
||||||
} else {
|
|
||||||
match actor_process.wait() {
|
|
||||||
Ok(status) => info!("Actor process terminated with status: {}", status),
|
|
||||||
Err(e) => error!("Failed to wait for actor process: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to check actor process status: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Initialize logging
|
|
||||||
init_logging(args.verbose);
|
|
||||||
|
|
||||||
let crate_root = std::env::var("CARGO_MANIFEST_DIR")
|
|
||||||
.unwrap_or_else(|_| ".".to_string());
|
|
||||||
|
|
||||||
info!("🚀 Starting Baobab Actor TUI...");
|
|
||||||
info!("Actor ID: sal (System Actor)");
|
|
||||||
info!("Actor Path: {}/target/debug/runner_rust", crate_root);
|
|
||||||
info!("Redis URL: {}", args.redis_url);
|
|
||||||
info!("Script Type: SAL");
|
|
||||||
info!("Example Directory: {}/examples/scripts", crate_root);
|
|
||||||
|
|
||||||
// Spawn the actor process first
|
|
||||||
let actor_process = spawn_actor_process(&args)?;
|
|
||||||
|
|
||||||
// Give the actor a moment to start up
|
|
||||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
|
||||||
|
|
||||||
// Create app and run TUI
|
|
||||||
let app = create_app(&args)?;
|
|
||||||
|
|
||||||
// Set up signal handling for graceful shutdown
|
|
||||||
let result = tokio::select! {
|
|
||||||
tui_result = setup_and_run_tui(app) => {
|
|
||||||
info!("TUI exited");
|
|
||||||
tui_result
|
|
||||||
}
|
|
||||||
_ = signal::ctrl_c() => {
|
|
||||||
info!("Received Ctrl+C, shutting down...");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Clean up the actor process
|
|
||||||
cleanup_actor_process(actor_process);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
@@ -2,7 +2,7 @@ use runner_rust::{spawn_sync_runner, script_mode::execute_script_mode};
|
|||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use log::{error, info};
|
use log::{error, info};
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use osiris::rhai::create_osiris_engine;
|
use osiris::create_osiris_engine;
|
||||||
|
|
||||||
#[derive(Parser, Debug)]
|
#[derive(Parser, Debug)]
|
||||||
#[command(author, version, about, long_about = None)]
|
#[command(author, version, about, long_about = None)]
|
||||||
@@ -41,7 +41,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
|||||||
&args.runner_id,
|
&args.runner_id,
|
||||||
args.redis_url,
|
args.redis_url,
|
||||||
std::time::Duration::from_secs(300), // Default timeout for OSIS
|
std::time::Duration::from_secs(300), // Default timeout for OSIS
|
||||||
move || create_osiris_engine(&redis_url, base_db_id)
|
move || create_osiris_engine()
|
||||||
.expect("Failed to create OSIRIS engine"),
|
.expect("Failed to create OSIRIS engine"),
|
||||||
).await;
|
).await;
|
||||||
|
|
||||||
@@ -78,7 +78,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
|||||||
args.runner_id.clone(),
|
args.runner_id.clone(),
|
||||||
args.redis_url,
|
args.redis_url,
|
||||||
shutdown_rx,
|
shutdown_rx,
|
||||||
move || create_osiris_engine(&redis_url, base_db_id)
|
move || create_osiris_engine()
|
||||||
.expect("Failed to create OSIRIS engine"),
|
.expect("Failed to create OSIRIS engine"),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -101,3 +101,15 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/// Example: Run a Rhai script with OSIRIS support
|
||||||
|
pub fn run_osiris_script(
|
||||||
|
script: &str,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let engine = create_osiris_engine()?;
|
||||||
|
engine.run(script)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -71,3 +71,128 @@ pub fn create_sal_engine() -> Engine {
|
|||||||
pub fn create_shared_sal_engine() -> Arc<Engine> {
|
pub fn create_shared_sal_engine() -> Arc<Engine> {
|
||||||
EngineFactory::global().get_engine()
|
EngineFactory::global().get_engine()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use std::sync::{Arc, OnceLock};
|
||||||
|
// Re-export common Rhai types for convenience
|
||||||
|
pub use rhai::{Array, Dynamic, Engine, EvalAltResult, Map};
|
||||||
|
|
||||||
|
// Re-export specific functions from sal-os package
|
||||||
|
pub use sal_os::rhai::{
|
||||||
|
delete,
|
||||||
|
// Download functions
|
||||||
|
download,
|
||||||
|
download_install,
|
||||||
|
// File system functions
|
||||||
|
exist,
|
||||||
|
file_size,
|
||||||
|
find_dir,
|
||||||
|
find_dirs,
|
||||||
|
find_file,
|
||||||
|
find_files,
|
||||||
|
mkdir,
|
||||||
|
register_os_module,
|
||||||
|
rsync,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Re-export Redis client module registration function
|
||||||
|
pub use sal_redisclient::rhai::register_redisclient_module;
|
||||||
|
|
||||||
|
// Re-export PostgreSQL client module registration function
|
||||||
|
pub use sal_postgresclient::rhai::register_postgresclient_module;
|
||||||
|
|
||||||
|
pub use sal_process::rhai::{
|
||||||
|
kill,
|
||||||
|
process_get,
|
||||||
|
process_list,
|
||||||
|
register_process_module,
|
||||||
|
// Run functions
|
||||||
|
// Process management functions
|
||||||
|
which,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Re-export virt functions from sal-virt package
|
||||||
|
pub use sal_virt::rhai::nerdctl::{
|
||||||
|
nerdctl_copy,
|
||||||
|
nerdctl_exec,
|
||||||
|
nerdctl_image_build,
|
||||||
|
nerdctl_image_commit,
|
||||||
|
nerdctl_image_pull,
|
||||||
|
nerdctl_image_push,
|
||||||
|
nerdctl_image_remove,
|
||||||
|
nerdctl_image_tag,
|
||||||
|
// Image functions
|
||||||
|
nerdctl_images,
|
||||||
|
nerdctl_list,
|
||||||
|
nerdctl_remove,
|
||||||
|
// Container functions
|
||||||
|
nerdctl_run,
|
||||||
|
nerdctl_run_with_name,
|
||||||
|
nerdctl_run_with_port,
|
||||||
|
nerdctl_stop,
|
||||||
|
};
|
||||||
|
pub use sal_virt::rhai::{
|
||||||
|
bah_new, register_bah_module, register_nerdctl_module, register_rfs_module,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub use sal_git::rhai::register_git_module;
|
||||||
|
pub use sal_git::{GitRepo, GitTree};
|
||||||
|
pub use sal_zinit_client::rhai::register_zinit_module;
|
||||||
|
pub use sal_mycelium::rhai::register_mycelium_module;
|
||||||
|
pub use sal_text::rhai::register_text_module;
|
||||||
|
pub use sal_net::rhai::register_net_module;
|
||||||
|
pub use sal_kubernetes::rhai::register_kubernetes_module;
|
||||||
|
pub use sal_kubernetes::KubernetesManager;
|
||||||
|
pub use sal_os::rhai::copy as os_copy;
|
||||||
|
pub use sal_hetzner::rhai::register_hetzner_module;
|
||||||
|
|
||||||
|
/// Engine factory for creating and sharing Rhai engines with SAL modules.
|
||||||
|
pub struct EngineFactory {
|
||||||
|
engine: Arc<Engine>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngineFactory {
|
||||||
|
/// Create a new engine factory with a configured Rhai engine.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
register_sal_modules(&mut engine);
|
||||||
|
// Logger
|
||||||
|
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "system_actor");
|
||||||
|
|
||||||
|
Self {
|
||||||
|
engine: Arc::new(engine),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a shared reference to the engine.
|
||||||
|
pub fn get_engine(&self) -> Arc<Engine> {
|
||||||
|
Arc::clone(&self.engine)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the global singleton engine factory.
|
||||||
|
pub fn global() -> &'static EngineFactory {
|
||||||
|
static FACTORY: OnceLock<EngineFactory> = OnceLock::new();
|
||||||
|
FACTORY.get_or_init(|| EngineFactory::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_sal_modules(engine: &mut Engine) {
|
||||||
|
let _ = sal_os::rhai::register_os_module(engine);
|
||||||
|
let _ = sal_redisclient::rhai::register_redisclient_module(engine);
|
||||||
|
let _ = sal_postgresclient::rhai::register_postgresclient_module(engine);
|
||||||
|
let _ = sal_process::rhai::register_process_module(engine);
|
||||||
|
let _ = sal_virt::rhai::register_virt_module(engine);
|
||||||
|
let _ = sal_git::rhai::register_git_module(engine);
|
||||||
|
let _ = sal_zinit_client::rhai::register_zinit_module(engine);
|
||||||
|
let _ = sal_mycelium::rhai::register_mycelium_module(engine);
|
||||||
|
let _ = sal_text::rhai::register_text_module(engine);
|
||||||
|
let _ = sal_net::rhai::register_net_module(engine);
|
||||||
|
let _ = sal_kubernetes::rhai::register_kubernetes_module(engine);
|
||||||
|
let _ = sal_hetzner::rhai::register_hetzner_module(engine);
|
||||||
|
|
||||||
|
println!("SAL modules registered successfully.");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a shared system engine using the factory.
|
||||||
|
pub fn create_system_engine() -> Arc<Engine> {
|
||||||
|
EngineFactory::global().get_engine()
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
/// Engine module for Rhai script execution
|
|
||||||
///
|
|
||||||
/// This module provides two different engine configurations:
|
|
||||||
/// - `system`: SAL modules for system operations (async worker)
|
|
||||||
/// - `osis`: OSIRIS engine for business operations (sync worker)
|
|
||||||
|
|
||||||
pub mod system;
|
|
||||||
pub mod osis;
|
|
||||||
|
|
||||||
pub use osis::create_osis_engine;
|
|
||||||
pub use system::create_system_engine;
|
|
||||||
|
|
||||||
// Re-export common Rhai types for convenience
|
|
||||||
pub use rhai::{Array, Dynamic, Engine, EvalAltResult, Map};
|
|
||||||
|
|
||||||
/// Evaluate a Rhai script string with any engine
|
|
||||||
pub fn eval_script(
|
|
||||||
engine: &Engine,
|
|
||||||
script: &str,
|
|
||||||
) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>> {
|
|
||||||
engine.eval(script)
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
/// OSIRIS Rhai Engine Integration
|
|
||||||
///
|
|
||||||
/// This module provides a Rhai engine configured with OSIRIS object support.
|
|
||||||
/// It allows Rhai scripts to create Notes, Events, and other OSIRIS objects
|
|
||||||
/// using a fluent builder pattern and store/retrieve them from HeroDB.
|
|
||||||
///
|
|
||||||
/// # Example Rhai Script
|
|
||||||
///
|
|
||||||
/// ```rhai
|
|
||||||
/// // Create a note with builder pattern
|
|
||||||
/// let note = note("notes")
|
|
||||||
/// .title("My First Note")
|
|
||||||
/// .content("This is the content of my note")
|
|
||||||
/// .tag("topic", "rust")
|
|
||||||
/// .tag("project", "osiris")
|
|
||||||
/// .mime("text/plain");
|
|
||||||
///
|
|
||||||
/// // Store the note
|
|
||||||
/// let id = put_note(note);
|
|
||||||
/// print(`Note stored with ID: ${id}`);
|
|
||||||
///
|
|
||||||
/// // Retrieve the note
|
|
||||||
/// let retrieved = get_note("notes", id);
|
|
||||||
/// print(`Retrieved: ${retrieved.get_title()}`);
|
|
||||||
///
|
|
||||||
/// // Query by tag
|
|
||||||
/// let ids = query("notes", "tags:tag", "project=osiris");
|
|
||||||
/// print(`Found ${ids.len()} notes`);
|
|
||||||
/// ```
|
|
||||||
|
|
||||||
use osiris::rhai_support::{register_note_api, register_event_api, OsirisInstance};
|
|
||||||
use rhai::Engine;
|
|
||||||
|
|
||||||
/// Create a new Rhai engine with OSIRIS support
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
/// * `herodb_url` - HeroDB connection URL (e.g., "redis://localhost:6379")
|
|
||||||
/// * `db_id` - Database ID to use
|
|
||||||
///
|
|
||||||
/// # Returns
|
|
||||||
/// A configured Rhai engine with OSIRIS objects and methods registered
|
|
||||||
pub fn create_osiris_engine(
|
|
||||||
herodb_url: &str,
|
|
||||||
db_id: u16,
|
|
||||||
) -> Result<Engine, Box<dyn std::error::Error>> {
|
|
||||||
let mut engine = Engine::new();
|
|
||||||
|
|
||||||
// Register Note and Event APIs
|
|
||||||
register_note_api(&mut engine);
|
|
||||||
register_event_api(&mut engine);
|
|
||||||
|
|
||||||
// Register OsirisInstance type
|
|
||||||
engine.build_type::<OsirisInstance>();
|
|
||||||
|
|
||||||
// Register a function to create OSIRIS instances
|
|
||||||
engine.register_fn("osiris", move |name: &str, url: &str, db_id: rhai::INT| -> Result<OsirisInstance, Box<rhai::EvalAltResult>> {
|
|
||||||
OsirisInstance::new(name, url, db_id as u16)
|
|
||||||
.map_err(|e| format!("Failed to create OSIRIS instance: {}", e).into())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(engine)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Example: Run a Rhai script with OSIRIS support
|
|
||||||
pub fn run_osiris_script(
|
|
||||||
script: &str,
|
|
||||||
herodb_url: &str,
|
|
||||||
db_id: u16,
|
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
let engine = create_osiris_engine(herodb_url, db_id)?;
|
|
||||||
engine.run(script)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_create_engine() {
|
|
||||||
let engine = create_osiris_engine("redis://localhost:6379", 1);
|
|
||||||
assert!(engine.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore] // Requires HeroDB running
|
|
||||||
fn test_run_script() {
|
|
||||||
let script = r#"
|
|
||||||
let note = note("notes");
|
|
||||||
print(note);
|
|
||||||
"#;
|
|
||||||
|
|
||||||
let result = run_osiris_script(script, "redis://localhost:6379", 1);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
// use heromodels::models::heroledger::rhai::register_heroledger_rhai_modules;
|
|
||||||
use rhai::Engine;
|
|
||||||
use rhailib_dsl;
|
|
||||||
use std::sync::{Arc, OnceLock};
|
|
||||||
|
|
||||||
/// Engine factory for creating and sharing Rhai engines with DSL modules.
|
|
||||||
pub struct EngineFactory {
|
|
||||||
engine: Arc<Engine>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EngineFactory {
|
|
||||||
/// Create a new engine factory with a configured Rhai engine.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
let mut engine = Engine::new();
|
|
||||||
register_dsl_modules(&mut engine);
|
|
||||||
// Logger
|
|
||||||
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "osis_actor");
|
|
||||||
|
|
||||||
Self {
|
|
||||||
engine: Arc::new(engine),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a shared reference to the engine.
|
|
||||||
pub fn get_engine(&self) -> Arc<Engine> {
|
|
||||||
Arc::clone(&self.engine)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the global singleton engine factory.
|
|
||||||
pub fn global() -> &'static EngineFactory {
|
|
||||||
static FACTORY: OnceLock<EngineFactory> = OnceLock::new();
|
|
||||||
FACTORY.get_or_init(|| EngineFactory::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register basic object functions directly in the engine.
|
|
||||||
/// This provides object functionality without relying on the problematic rhailib_dsl object module.
|
|
||||||
fn register_object_functions(engine: &mut Engine) {
|
|
||||||
use heromodels::models::object::Object;
|
|
||||||
|
|
||||||
// Register the Object type
|
|
||||||
engine.register_type_with_name::<Object>("Object");
|
|
||||||
|
|
||||||
// Register constructor function
|
|
||||||
engine.register_fn("new_object", || Object::new());
|
|
||||||
|
|
||||||
// Register setter functions
|
|
||||||
engine.register_fn("object_title", |obj: &mut Object, title: String| {
|
|
||||||
obj.title = title;
|
|
||||||
obj.clone()
|
|
||||||
});
|
|
||||||
|
|
||||||
engine.register_fn(
|
|
||||||
"object_description",
|
|
||||||
|obj: &mut Object, description: String| {
|
|
||||||
obj.description = description;
|
|
||||||
obj.clone()
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// Register getter functions
|
|
||||||
engine.register_fn("get_object_id", |obj: &mut Object| obj.id() as i64);
|
|
||||||
engine.register_fn("get_object_title", |obj: &mut Object| obj.title.clone());
|
|
||||||
engine.register_fn("get_object_description", |obj: &mut Object| {
|
|
||||||
obj.description.clone()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Registers all DSL modules with the provided Rhai engine.
|
|
||||||
///
|
|
||||||
/// This function is the main entry point for integrating the rhailib DSL with a Rhai engine.
|
|
||||||
/// It registers all business domain modules, making their functions available to Rhai scripts.
|
|
||||||
///
|
|
||||||
/// # Arguments
|
|
||||||
///
|
|
||||||
/// * `engine` - A mutable reference to the Rhai engine to register modules with
|
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
///
|
|
||||||
/// ```rust
|
|
||||||
/// use rhai::Engine;
|
|
||||||
/// use runner_rust::engine::osis::register_dsl_modules;
|
|
||||||
///
|
|
||||||
/// let mut engine = Engine::new();
|
|
||||||
/// register_dsl_modules(&mut engine);
|
|
||||||
///
|
|
||||||
/// // Engine now has access to all DSL functions
|
|
||||||
/// let result = engine.eval::<String>(r#"
|
|
||||||
/// let company = new_company().name("Test Corp");
|
|
||||||
/// company.name
|
|
||||||
/// "#).unwrap();
|
|
||||||
/// assert_eq!(result, "Test Corp");
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// # Registered Modules
|
|
||||||
///
|
|
||||||
/// This function registers the following domain modules:
|
|
||||||
/// - Access control functions
|
|
||||||
/// - Business operation functions (companies, products, sales, shareholders)
|
|
||||||
/// - Calendar and scheduling functions
|
|
||||||
/// - Circle and community management functions
|
|
||||||
/// - Company management functions
|
|
||||||
/// - Contact management functions
|
|
||||||
/// - Core utility functions
|
|
||||||
/// - Financial operation functions (accounts, assets, marketplace)
|
|
||||||
/// - Workflow management functions (flows, steps, signatures)
|
|
||||||
/// - Library and content management functions
|
|
||||||
/// - Generic object manipulation functions (custom implementation)
|
|
||||||
pub fn register_dsl_modules(engine: &mut Engine) {
|
|
||||||
rhailib_dsl::access::register_access_rhai_module(engine);
|
|
||||||
rhailib_dsl::biz::register_biz_rhai_module(engine);
|
|
||||||
rhailib_dsl::calendar::register_calendar_rhai_module(engine);
|
|
||||||
rhailib_dsl::circle::register_circle_rhai_module(engine);
|
|
||||||
rhailib_dsl::company::register_company_rhai_module(engine);
|
|
||||||
rhailib_dsl::contact::register_contact_rhai_module(engine);
|
|
||||||
rhailib_dsl::core::register_core_rhai_module(engine);
|
|
||||||
rhailib_dsl::finance::register_finance_rhai_modules(engine);
|
|
||||||
// rhailib_dsl::flow::register_flow_rhai_modules(engine);
|
|
||||||
rhailib_dsl::library::register_library_rhai_module(engine);
|
|
||||||
// Skip problematic object module for now - can be implemented separately if needed
|
|
||||||
// rhailib_dsl::object::register_object_fns(engine);
|
|
||||||
rhailib_dsl::payment::register_payment_rhai_module(engine);
|
|
||||||
|
|
||||||
// Register basic object functionality directly
|
|
||||||
register_object_functions(engine);
|
|
||||||
// heromodels::heroledger::rhai::register_heroledger_rhai_modules(&mut engine);
|
|
||||||
|
|
||||||
|
|
||||||
println!("Rhailib Domain Specific Language modules registered successfully.");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new osis engine instance.
|
|
||||||
pub fn create_osis_engine() -> Engine {
|
|
||||||
let mut engine = Engine::new();
|
|
||||||
register_dsl_modules(&mut engine);
|
|
||||||
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "osis_actor");
|
|
||||||
engine
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a shared osis engine using the factory.
|
|
||||||
pub fn create_shared_osis_engine() -> Arc<Engine> {
|
|
||||||
EngineFactory::global().get_engine()
|
|
||||||
}
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
use std::sync::{Arc, OnceLock};
|
|
||||||
// Re-export common Rhai types for convenience
|
|
||||||
pub use rhai::{Array, Dynamic, Engine, EvalAltResult, Map};
|
|
||||||
|
|
||||||
// Re-export specific functions from sal-os package
|
|
||||||
pub use sal_os::rhai::{
|
|
||||||
delete,
|
|
||||||
// Download functions
|
|
||||||
download,
|
|
||||||
download_install,
|
|
||||||
// File system functions
|
|
||||||
exist,
|
|
||||||
file_size,
|
|
||||||
find_dir,
|
|
||||||
find_dirs,
|
|
||||||
find_file,
|
|
||||||
find_files,
|
|
||||||
mkdir,
|
|
||||||
register_os_module,
|
|
||||||
rsync,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Re-export Redis client module registration function
|
|
||||||
pub use sal_redisclient::rhai::register_redisclient_module;
|
|
||||||
|
|
||||||
// Re-export PostgreSQL client module registration function
|
|
||||||
pub use sal_postgresclient::rhai::register_postgresclient_module;
|
|
||||||
|
|
||||||
pub use sal_process::rhai::{
|
|
||||||
kill,
|
|
||||||
process_get,
|
|
||||||
process_list,
|
|
||||||
register_process_module,
|
|
||||||
// Run functions
|
|
||||||
// Process management functions
|
|
||||||
which,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Re-export virt functions from sal-virt package
|
|
||||||
pub use sal_virt::rhai::nerdctl::{
|
|
||||||
nerdctl_copy,
|
|
||||||
nerdctl_exec,
|
|
||||||
nerdctl_image_build,
|
|
||||||
nerdctl_image_commit,
|
|
||||||
nerdctl_image_pull,
|
|
||||||
nerdctl_image_push,
|
|
||||||
nerdctl_image_remove,
|
|
||||||
nerdctl_image_tag,
|
|
||||||
// Image functions
|
|
||||||
nerdctl_images,
|
|
||||||
nerdctl_list,
|
|
||||||
nerdctl_remove,
|
|
||||||
// Container functions
|
|
||||||
nerdctl_run,
|
|
||||||
nerdctl_run_with_name,
|
|
||||||
nerdctl_run_with_port,
|
|
||||||
nerdctl_stop,
|
|
||||||
};
|
|
||||||
pub use sal_virt::rhai::{
|
|
||||||
bah_new, register_bah_module, register_nerdctl_module, register_rfs_module,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use sal_git::rhai::register_git_module;
|
|
||||||
pub use sal_git::{GitRepo, GitTree};
|
|
||||||
pub use sal_zinit_client::rhai::register_zinit_module;
|
|
||||||
pub use sal_mycelium::rhai::register_mycelium_module;
|
|
||||||
pub use sal_text::rhai::register_text_module;
|
|
||||||
pub use sal_net::rhai::register_net_module;
|
|
||||||
pub use sal_kubernetes::rhai::register_kubernetes_module;
|
|
||||||
pub use sal_kubernetes::KubernetesManager;
|
|
||||||
pub use sal_os::rhai::copy as os_copy;
|
|
||||||
pub use sal_hetzner::rhai::register_hetzner_module;
|
|
||||||
|
|
||||||
/// Engine factory for creating and sharing Rhai engines with SAL modules.
|
|
||||||
pub struct EngineFactory {
|
|
||||||
engine: Arc<Engine>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EngineFactory {
|
|
||||||
/// Create a new engine factory with a configured Rhai engine.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
let mut engine = Engine::new();
|
|
||||||
register_sal_modules(&mut engine);
|
|
||||||
// Logger
|
|
||||||
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "system_actor");
|
|
||||||
|
|
||||||
Self {
|
|
||||||
engine: Arc::new(engine),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a shared reference to the engine.
|
|
||||||
pub fn get_engine(&self) -> Arc<Engine> {
|
|
||||||
Arc::clone(&self.engine)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the global singleton engine factory.
|
|
||||||
pub fn global() -> &'static EngineFactory {
|
|
||||||
static FACTORY: OnceLock<EngineFactory> = OnceLock::new();
|
|
||||||
FACTORY.get_or_init(|| EngineFactory::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn register_sal_modules(engine: &mut Engine) {
|
|
||||||
let _ = sal_os::rhai::register_os_module(engine);
|
|
||||||
let _ = sal_redisclient::rhai::register_redisclient_module(engine);
|
|
||||||
let _ = sal_postgresclient::rhai::register_postgresclient_module(engine);
|
|
||||||
let _ = sal_process::rhai::register_process_module(engine);
|
|
||||||
let _ = sal_virt::rhai::register_virt_module(engine);
|
|
||||||
let _ = sal_git::rhai::register_git_module(engine);
|
|
||||||
let _ = sal_zinit_client::rhai::register_zinit_module(engine);
|
|
||||||
let _ = sal_mycelium::rhai::register_mycelium_module(engine);
|
|
||||||
let _ = sal_text::rhai::register_text_module(engine);
|
|
||||||
let _ = sal_net::rhai::register_net_module(engine);
|
|
||||||
let _ = sal_kubernetes::rhai::register_kubernetes_module(engine);
|
|
||||||
let _ = sal_hetzner::rhai::register_hetzner_module(engine);
|
|
||||||
|
|
||||||
println!("SAL modules registered successfully.");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a shared system engine using the factory.
|
|
||||||
pub fn create_system_engine() -> Arc<Engine> {
|
|
||||||
EngineFactory::global().get_engine()
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
// Core modules
|
// Core modules
|
||||||
pub mod engine;
|
|
||||||
pub mod async_runner;
|
pub mod async_runner;
|
||||||
pub mod sync_runner;
|
pub mod sync_runner;
|
||||||
pub mod runner_trait;
|
pub mod runner_trait;
|
||||||
@@ -11,8 +10,6 @@ pub mod client;
|
|||||||
pub use runner_trait::{Runner, RunnerConfig, spawn_runner};
|
pub use runner_trait::{Runner, RunnerConfig, spawn_runner};
|
||||||
pub use async_runner::{AsyncRunner, spawn_async_runner};
|
pub use async_runner::{AsyncRunner, spawn_async_runner};
|
||||||
pub use sync_runner::{SyncRunner, SyncRunnerConfig, spawn_sync_runner};
|
pub use sync_runner::{SyncRunner, SyncRunnerConfig, spawn_sync_runner};
|
||||||
pub use engine::system::{register_sal_modules, create_system_engine};
|
|
||||||
pub use engine::osis::{register_dsl_modules, create_osis_engine, create_shared_osis_engine};
|
|
||||||
|
|
||||||
// Re-export job types from local job module
|
// Re-export job types from local job module
|
||||||
pub use job::{Job, JobStatus, JobError, JobBuilder};
|
pub use job::{Job, JobStatus, JobError, JobBuilder};
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use crate::runner_trait::Runner;
|
|||||||
use log::{debug, error, info};
|
use log::{debug, error, info};
|
||||||
use rhai::{Engine, Dynamic};
|
use rhai::{Engine, Dynamic};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use tracing::subscriber::with_default;
|
||||||
|
|
||||||
/// Configuration for sync runner instances
|
/// Configuration for sync runner instances
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -72,11 +73,58 @@ impl Runner for SyncRunner {
|
|||||||
debug!("Sync Runner '{}', Job {}: Processing started.", runner_id, job_id);
|
debug!("Sync Runner '{}', Job {}: Processing started.", runner_id, job_id);
|
||||||
info!("Sync Runner '{}' processing job_id: {}. Script: {:.50}...", job.context_id, job_id, job.payload);
|
info!("Sync Runner '{}' processing job_id: {}. Script: {:.50}...", job.context_id, job_id, job.payload);
|
||||||
|
|
||||||
// Create a new engine instance (cheap with factory pattern)
|
// Determine logs directory (default to ~/hero/logs)
|
||||||
|
let logs_root = if let Some(home) = std::env::var_os("HOME") {
|
||||||
|
std::path::PathBuf::from(home).join("hero").join("logs")
|
||||||
|
} else {
|
||||||
|
std::path::PathBuf::from("logs")
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create job-specific logger
|
||||||
|
let job_logger_result = hero_logger::create_job_logger_with_guard(
|
||||||
|
&logs_root,
|
||||||
|
runner_id, // Use runner_id as the actor_type
|
||||||
|
job_id,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify signatures before executing (if any)
|
||||||
|
if let Err(e) = job.verify_signatures() {
|
||||||
|
error!("Job {} signature verification failed: {}", job_id, e);
|
||||||
|
return Err(Box::new(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute job within logging context
|
||||||
|
let result = match job_logger_result {
|
||||||
|
Ok((job_logger, _guard)) => {
|
||||||
|
// Execute ALL job processing within logging context
|
||||||
|
with_default(job_logger, || {
|
||||||
|
tracing::info!("Job {} started", job_id);
|
||||||
|
|
||||||
|
// Create a new engine instance and configure Rhai logging
|
||||||
let mut engine = (self.engine_factory)();
|
let mut engine = (self.engine_factory)();
|
||||||
|
|
||||||
|
// Reconfigure Rhai logging for this specific job context
|
||||||
|
// This ensures print() and debug() calls go to the job logger
|
||||||
|
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, runner_id);
|
||||||
|
|
||||||
// Execute the script
|
// Execute the script
|
||||||
match Self::execute_job_with_engine(&mut engine, &job) {
|
let script_result = Self::execute_job_with_engine(&mut engine, &job);
|
||||||
|
|
||||||
|
tracing::info!("Job {} completed", job_id);
|
||||||
|
|
||||||
|
script_result
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Failed to create job logger for job {}: {}", job_id, e);
|
||||||
|
// Fallback: execute without job-specific logging
|
||||||
|
let mut engine = (self.engine_factory)();
|
||||||
|
Self::execute_job_with_engine(&mut engine, &job)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Process result
|
||||||
|
match result {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
let output_str = if result.is::<String>() {
|
let output_str = if result.is::<String>() {
|
||||||
result.into_string().unwrap()
|
result.into_string().unwrap()
|
||||||
|
|||||||
@@ -1,118 +1,212 @@
|
|||||||
use std::process::Command;
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tokio::time::timeout;
|
use tokio::time::sleep;
|
||||||
|
use runner_rust::{JobBuilder, Client};
|
||||||
|
use runner_rust::job::JobSignature;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use rhai::Engine;
|
||||||
|
|
||||||
/// Test the SAL runner in script mode with a simple ping script
|
/// Helper function to create a SAL engine for testing
|
||||||
|
fn create_test_sal_engine() -> Engine {
|
||||||
|
// Create a basic Rhai engine for testing
|
||||||
|
Engine::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test job execution with empty signatures array
|
||||||
|
/// This verifies that jobs without signatures can execute successfully
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_sal_runner_script_mode_ping() {
|
async fn test_job_execution_without_signatures() {
|
||||||
let output = timeout(
|
let redis_url = "redis://localhost:6379";
|
||||||
Duration::from_secs(10),
|
let runner_id = format!("test-runner-{}", Uuid::new_v4());
|
||||||
run_sal_runner_script_mode("test_sal_ping")
|
|
||||||
).await;
|
|
||||||
|
|
||||||
match output {
|
// Create client
|
||||||
Ok(result) => {
|
let mut client = Client::builder()
|
||||||
assert!(result.is_ok(), "SAL runner should execute successfully");
|
.redis_url(redis_url)
|
||||||
let stdout = result.unwrap();
|
.build()
|
||||||
assert!(stdout.contains("pong"),
|
.await
|
||||||
"Output should contain 'pong' response: {}", stdout);
|
.expect("Failed to create client");
|
||||||
}
|
|
||||||
Err(_) => panic!("Test timed out after 10 seconds"),
|
// Create job with empty signatures array
|
||||||
}
|
let job = JobBuilder::new()
|
||||||
|
.caller_id("test_caller")
|
||||||
|
.context_id("test_context")
|
||||||
|
.payload("print(\"Hello from unsigned job\");")
|
||||||
|
.runner(&runner_id)
|
||||||
|
.executor("rhai")
|
||||||
|
.timeout(30)
|
||||||
|
.build()
|
||||||
|
.expect("Job creation should succeed");
|
||||||
|
|
||||||
|
let job_id = job.id.clone();
|
||||||
|
|
||||||
|
// Verify signatures array is empty
|
||||||
|
assert!(job.signatures.is_empty(), "Job should have no signatures");
|
||||||
|
|
||||||
|
// Save job to Redis
|
||||||
|
client.store_job_in_redis(&job).await
|
||||||
|
.expect("Failed to save job to Redis");
|
||||||
|
|
||||||
|
// Queue the job
|
||||||
|
client.dispatch_job(&job_id, &runner_id).await
|
||||||
|
.expect("Failed to queue job");
|
||||||
|
|
||||||
|
// Spawn runner in background
|
||||||
|
let runner_id_clone = runner_id.clone();
|
||||||
|
let redis_url_clone = redis_url.to_string();
|
||||||
|
let runner_handle = tokio::spawn(async move {
|
||||||
|
let (shutdown_tx, shutdown_rx) = tokio::sync::mpsc::channel::<()>(1);
|
||||||
|
|
||||||
|
// Run for 5 seconds then shutdown
|
||||||
|
tokio::spawn(async move {
|
||||||
|
sleep(Duration::from_secs(5)).await;
|
||||||
|
let _ = shutdown_tx.send(()).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
runner_rust::spawn_sync_runner(
|
||||||
|
runner_id_clone,
|
||||||
|
redis_url_clone,
|
||||||
|
shutdown_rx,
|
||||||
|
create_test_sal_engine,
|
||||||
|
).await
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for job to be processed
|
||||||
|
sleep(Duration::from_secs(3)).await;
|
||||||
|
|
||||||
|
// Check job result
|
||||||
|
let result = client.get_result(&job_id).await
|
||||||
|
.expect("Failed to get job result");
|
||||||
|
|
||||||
|
assert!(result.is_some(), "Job should have produced a result");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
let _ = runner_handle.await;
|
||||||
|
client.delete_job(&job_id).await.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test the OSIS runner in script mode with a simple ping script
|
/// Test job signature verification with valid signatures
|
||||||
|
/// This verifies that jobs with valid signatures are accepted
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_osis_runner_script_mode_ping() {
|
async fn test_job_signature_verification() {
|
||||||
let output = timeout(
|
use secp256k1::{Secp256k1, SecretKey, Message};
|
||||||
Duration::from_secs(10),
|
use sha2::{Sha256, Digest};
|
||||||
run_osis_runner_script_mode("test_osis_ping")
|
|
||||||
).await;
|
|
||||||
|
|
||||||
match output {
|
let redis_url = "redis://localhost:6379";
|
||||||
Ok(result) => {
|
let runner_id = format!("test-runner-{}", Uuid::new_v4());
|
||||||
assert!(result.is_ok(), "OSIS runner should execute successfully");
|
|
||||||
let stdout = result.unwrap();
|
// Create client
|
||||||
assert!(stdout.contains("pong"),
|
let mut client = Client::builder()
|
||||||
"Output should contain 'pong' response: {}", stdout);
|
.redis_url(redis_url)
|
||||||
}
|
.build()
|
||||||
Err(_) => panic!("Test timed out after 10 seconds"),
|
.await
|
||||||
}
|
.expect("Failed to create client");
|
||||||
|
|
||||||
|
// Generate a keypair for signing
|
||||||
|
let secp = Secp256k1::new();
|
||||||
|
let secret_key = SecretKey::from_slice(&[0xcd; 32])
|
||||||
|
.expect("32 bytes, within curve order");
|
||||||
|
let public_key = secp256k1::PublicKey::from_secret_key(&secp, &secret_key);
|
||||||
|
|
||||||
|
// Create job
|
||||||
|
let mut job = JobBuilder::new()
|
||||||
|
.caller_id("test_caller")
|
||||||
|
.context_id("test_context")
|
||||||
|
.payload("print(\"Hello from signed job\");")
|
||||||
|
.runner(&runner_id)
|
||||||
|
.executor("rhai")
|
||||||
|
.timeout(30)
|
||||||
|
.build()
|
||||||
|
.expect("Job creation should succeed");
|
||||||
|
|
||||||
|
let job_id = job.id.clone();
|
||||||
|
|
||||||
|
// Sign the job
|
||||||
|
let canonical = job.canonical_representation();
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
hasher.update(canonical.as_bytes());
|
||||||
|
let hash = hasher.finalize();
|
||||||
|
let message = Message::from_digest_slice(&hash)
|
||||||
|
.expect("32 bytes");
|
||||||
|
let signature = secp.sign_ecdsa(&message, &secret_key);
|
||||||
|
|
||||||
|
// Add signature to job
|
||||||
|
job.signatures.push(JobSignature {
|
||||||
|
public_key: hex::encode(public_key.serialize()),
|
||||||
|
signature: hex::encode(signature.serialize_compact()),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify the job has a signature
|
||||||
|
assert_eq!(job.signatures.len(), 1, "Job should have one signature");
|
||||||
|
|
||||||
|
// Verify signatures are valid
|
||||||
|
job.verify_signatures()
|
||||||
|
.expect("Signature verification should succeed");
|
||||||
|
|
||||||
|
// Save and queue job
|
||||||
|
client.store_job_in_redis(&job).await
|
||||||
|
.expect("Failed to save job to Redis");
|
||||||
|
client.dispatch_job(&job_id, &runner_id).await
|
||||||
|
.expect("Failed to queue job");
|
||||||
|
|
||||||
|
// Spawn runner
|
||||||
|
let runner_id_clone = runner_id.clone();
|
||||||
|
let redis_url_clone = redis_url.to_string();
|
||||||
|
let runner_handle = tokio::spawn(async move {
|
||||||
|
let (shutdown_tx, shutdown_rx) = tokio::sync::mpsc::channel::<()>(1);
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
sleep(Duration::from_secs(5)).await;
|
||||||
|
let _ = shutdown_tx.send(()).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
runner_rust::spawn_sync_runner(
|
||||||
|
runner_id_clone,
|
||||||
|
redis_url_clone,
|
||||||
|
shutdown_rx,
|
||||||
|
create_test_sal_engine,
|
||||||
|
).await
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for processing
|
||||||
|
sleep(Duration::from_secs(3)).await;
|
||||||
|
|
||||||
|
// Check result
|
||||||
|
let result = client.get_result(&job_id).await
|
||||||
|
.expect("Failed to get job result");
|
||||||
|
|
||||||
|
assert!(result.is_some(), "Signed job should have produced a result");
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
let _ = runner_handle.await;
|
||||||
|
client.delete_job(&job_id).await.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper function to run SAL runner in script mode
|
/// Test job with invalid signature is rejected
|
||||||
async fn run_sal_runner_script_mode(
|
|
||||||
runner_id: &str
|
|
||||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
|
||||||
let output = Command::new("cargo")
|
|
||||||
.args(&[
|
|
||||||
"run", "--bin", "runner_sal", "--",
|
|
||||||
runner_id,
|
|
||||||
"-s", "ping"
|
|
||||||
])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
|
||||||
} else {
|
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
||||||
Err(format!("Command failed: {}", stderr).into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to run OSIS runner in script mode
|
|
||||||
async fn run_osis_runner_script_mode(
|
|
||||||
runner_id: &str
|
|
||||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
|
||||||
let output = Command::new("cargo")
|
|
||||||
.args(&[
|
|
||||||
"run", "--bin", "runner_osis", "--",
|
|
||||||
runner_id,
|
|
||||||
"-s", "ping"
|
|
||||||
])
|
|
||||||
.output()?;
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
Ok(String::from_utf8_lossy(&output.stdout).to_string())
|
|
||||||
} else {
|
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
||||||
Err(format!("Command failed: {}", stderr).into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test basic compilation and help output
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_sal_runner_help() {
|
async fn test_job_invalid_signature_rejected() {
|
||||||
let output = Command::new("cargo")
|
// Create job with invalid signature
|
||||||
.args(&["run", "--bin", "runner_sal", "--", "--help"])
|
let mut job = JobBuilder::new()
|
||||||
.output()
|
.caller_id("test_caller")
|
||||||
.expect("Failed to execute command");
|
.context_id("test_context")
|
||||||
|
.payload("print(\"This should fail\");")
|
||||||
|
.runner("test-runner")
|
||||||
|
.executor("rhai")
|
||||||
|
.build()
|
||||||
|
.expect("Job creation should succeed");
|
||||||
|
|
||||||
assert!(output.status.success(), "Help command should succeed");
|
// Add invalid signature
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
job.signatures.push(JobSignature {
|
||||||
assert!(stdout.contains("Usage") || stdout.contains("USAGE"),
|
public_key: "04invalid_public_key".to_string(),
|
||||||
"Help output should contain usage information");
|
signature: "invalid_signature".to_string(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify signatures should fail
|
||||||
|
let result = job.verify_signatures();
|
||||||
|
assert!(result.is_err(), "Invalid signature should be rejected");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test basic compilation and help output for OSIS runner
|
/// Test job creation and serialization
|
||||||
#[tokio::test]
|
|
||||||
async fn test_osis_runner_help() {
|
|
||||||
let output = Command::new("cargo")
|
|
||||||
.args(&["run", "--bin", "runner_osis", "--", "--help"])
|
|
||||||
.output()
|
|
||||||
.expect("Failed to execute command");
|
|
||||||
|
|
||||||
assert!(output.status.success(), "Help command should succeed");
|
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
|
||||||
assert!(stdout.contains("Usage") || stdout.contains("USAGE"),
|
|
||||||
"Help output should contain usage information");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test library functionality - job creation and basic operations
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_job_creation_and_serialization() {
|
async fn test_job_creation_and_serialization() {
|
||||||
use runner_rust::JobBuilder;
|
|
||||||
|
|
||||||
let job = JobBuilder::new()
|
let job = JobBuilder::new()
|
||||||
.caller_id("test_caller")
|
.caller_id("test_caller")
|
||||||
.context_id("test_context")
|
.context_id("test_context")
|
||||||
@@ -127,4 +221,17 @@ async fn test_job_creation_and_serialization() {
|
|||||||
assert_eq!(job.payload, "ping");
|
assert_eq!(job.payload, "ping");
|
||||||
assert_eq!(job.runner, "default");
|
assert_eq!(job.runner, "default");
|
||||||
assert_eq!(job.executor, "rhai");
|
assert_eq!(job.executor, "rhai");
|
||||||
|
assert!(job.signatures.is_empty(), "Default job should have no signatures");
|
||||||
|
|
||||||
|
// Test serialization
|
||||||
|
let json = serde_json::to_string(&job)
|
||||||
|
.expect("Job should serialize to JSON");
|
||||||
|
|
||||||
|
// Test deserialization
|
||||||
|
let deserialized: runner_rust::Job = serde_json::from_str(&json)
|
||||||
|
.expect("Job should deserialize from JSON");
|
||||||
|
|
||||||
|
assert_eq!(job.id, deserialized.id);
|
||||||
|
assert_eq!(job.caller_id, deserialized.caller_id);
|
||||||
|
assert_eq!(job.signatures.len(), deserialized.signatures.len());
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user