Compare commits
9 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
04a1af2423 | ||
|
337ec2f660 | ||
|
69e612e521 | ||
|
0df79e78c6 | ||
|
b31651cfeb | ||
|
831b25dbfa | ||
|
ce76f0a2f7 | ||
|
6c5c97e647 | ||
|
dcf0f41bb8 |
@ -22,7 +22,7 @@ Both examples demonstrate the ping/pong functionality built into the Hero actors
|
||||
|
||||
2. **Rust Environment**: Make sure you can build the actor binaries
|
||||
```bash
|
||||
cd /path/to/herocode/hero/core/actor
|
||||
cd /path/to/herocode/baobab/core/actor
|
||||
cargo build --bin osis --bin system
|
||||
```
|
||||
|
||||
|
@ -6,6 +6,11 @@ edition = "2021"
|
||||
[lib]
|
||||
name = "baobab_actor" # Can be different from package name, or same
|
||||
path = "src/lib.rs"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[[bin]]
|
||||
name = "baobab-actor-tui"
|
||||
path = "cmd/terminal_ui_main.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -18,17 +23,23 @@ tokio = { version = "1", features = ["macros", "rt-multi-thread", "time"] }
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] } # Though task_id is string, uuid might be useful
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
toml = "0.8"
|
||||
thiserror = "1.0"
|
||||
async-trait = "0.1"
|
||||
# TUI dependencies
|
||||
anyhow = "1.0"
|
||||
crossterm = "0.28"
|
||||
ratatui = "0.28"
|
||||
hero_supervisor = { path = "../supervisor" }
|
||||
hero_job = { path = "../job" }
|
||||
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
|
||||
|
||||
|
||||
[features]
|
||||
default = ["calendar", "finance"]
|
||||
calendar = []
|
||||
@ -37,3 +48,4 @@ flow = []
|
||||
legal = []
|
||||
projects = []
|
||||
biz = []
|
||||
|
||||
|
@ -73,3 +73,12 @@ Key dependencies include:
|
||||
- `clap`: For command-line argument parsing.
|
||||
- `tokio`: For the asynchronous runtime.
|
||||
- `log`, `env_logger`: For logging.
|
||||
|
||||
|
||||
## TUI Example
|
||||
|
||||
```bash
|
||||
cargo run --example baobab-actor-tui -- --id osis --path /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis --example-dir /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/examples/scripts
|
||||
```
|
||||
|
||||
The TUI will allow you to monitor the actor's job queue and dispatch new jobs to it.
|
||||
|
1075
core/actor/cmd/terminal_ui.rs
Normal file
1075
core/actor/cmd/terminal_ui.rs
Normal file
File diff suppressed because it is too large
Load Diff
146
core/actor/examples/terminal_ui_main.rs
Normal file
146
core/actor/examples/terminal_ui_main.rs
Normal file
@ -0,0 +1,146 @@
|
||||
//! Simplified main function for Baobab Actor TUI
|
||||
//!
|
||||
//! This binary provides a clean entry point for the actor monitoring and job dispatch interface.
|
||||
|
||||
use anyhow::{Result, Context};
|
||||
use baobab_actor::terminal_ui::{App, setup_and_run_tui};
|
||||
use clap::Parser;
|
||||
use log::{info, warn, error};
|
||||
use std::path::PathBuf;
|
||||
use std::process::{Child, Command};
|
||||
use tokio::signal;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "baobab-actor-tui")]
|
||||
#[command(about = "Terminal UI for Baobab Actor - Monitor and dispatch jobs to a single actor")]
|
||||
struct Args {
|
||||
/// Actor ID to monitor
|
||||
#[arg(short, long)]
|
||||
id: String,
|
||||
|
||||
/// Path to actor binary
|
||||
#[arg(short, long)]
|
||||
path: PathBuf,
|
||||
|
||||
/// Directory containing example .rhai scripts
|
||||
#[arg(short, long)]
|
||||
example_dir: Option<PathBuf>,
|
||||
|
||||
/// Redis URL for job queue
|
||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
||||
redis_url: String,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(short, long)]
|
||||
verbose: bool,
|
||||
}
|
||||
|
||||
/// Initialize logging based on verbosity level
|
||||
fn init_logging(verbose: bool) {
|
||||
if verbose {
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Debug)
|
||||
.init();
|
||||
} else {
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Info)
|
||||
.init();
|
||||
}
|
||||
}
|
||||
|
||||
/// Create and configure the TUI application
|
||||
fn create_app(args: &Args) -> Result<App> {
|
||||
App::new(
|
||||
args.id.clone(),
|
||||
args.path.clone(),
|
||||
args.redis_url.clone(),
|
||||
args.example_dir.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Spawn the actor binary as a background process
|
||||
fn spawn_actor_process(args: &Args) -> Result<Child> {
|
||||
info!("🎬 Spawning actor process: {}", args.path.display());
|
||||
|
||||
let mut cmd = Command::new(&args.path);
|
||||
|
||||
// Redirect stdout and stderr to null to prevent logs from interfering with TUI
|
||||
cmd.stdout(std::process::Stdio::null())
|
||||
.stderr(std::process::Stdio::null());
|
||||
|
||||
// Spawn the process
|
||||
let child = cmd
|
||||
.spawn()
|
||||
.with_context(|| format!("Failed to spawn actor process: {}", args.path.display()))?;
|
||||
|
||||
info!("✅ Actor process spawned with PID: {}", child.id());
|
||||
Ok(child)
|
||||
}
|
||||
|
||||
/// Cleanup function to terminate actor process
|
||||
fn cleanup_actor_process(mut actor_process: Child) {
|
||||
info!("🧹 Cleaning up actor process...");
|
||||
|
||||
match actor_process.try_wait() {
|
||||
Ok(Some(status)) => {
|
||||
info!("Actor process already exited with status: {}", status);
|
||||
}
|
||||
Ok(None) => {
|
||||
info!("Terminating actor process...");
|
||||
if let Err(e) = actor_process.kill() {
|
||||
error!("Failed to kill actor process: {}", e);
|
||||
} else {
|
||||
match actor_process.wait() {
|
||||
Ok(status) => info!("Actor process terminated with status: {}", status),
|
||||
Err(e) => error!("Failed to wait for actor process: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to check actor process status: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Initialize logging
|
||||
init_logging(args.verbose);
|
||||
|
||||
info!("🚀 Starting Baobab Actor TUI...");
|
||||
info!("Actor ID: {}", args.id);
|
||||
info!("Actor Path: {}", args.path.display());
|
||||
info!("Redis URL: {}", args.redis_url);
|
||||
|
||||
if let Some(ref example_dir) = args.example_dir {
|
||||
info!("Example Directory: {}", example_dir.display());
|
||||
}
|
||||
|
||||
// Spawn the actor process first
|
||||
let actor_process = spawn_actor_process(&args)?;
|
||||
|
||||
// Give the actor a moment to start up
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
// Create app and run TUI
|
||||
let app = create_app(&args)?;
|
||||
|
||||
// Set up signal handling for graceful shutdown
|
||||
let result = tokio::select! {
|
||||
tui_result = setup_and_run_tui(app) => {
|
||||
info!("TUI exited");
|
||||
tui_result
|
||||
}
|
||||
_ = signal::ctrl_c() => {
|
||||
info!("Received Ctrl+C, shutting down...");
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
|
||||
// Clean up the actor process
|
||||
cleanup_actor_process(actor_process);
|
||||
|
||||
result
|
||||
}
|
@ -30,7 +30,7 @@
|
||||
use hero_job::Job;
|
||||
use log::{debug, error, info};
|
||||
use redis::AsyncCommands;
|
||||
use rhai::Engine;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
|
0
core/actor/src/config.rs
Normal file
0
core/actor/src/config.rs
Normal file
@ -8,11 +8,14 @@ use tokio::task::JoinHandle;
|
||||
/// Actor trait abstraction for unified actor interface
|
||||
pub mod actor_trait;
|
||||
|
||||
/// Terminal UI module for actor monitoring and job dispatch
|
||||
pub mod terminal_ui;
|
||||
|
||||
const NAMESPACE_PREFIX: &str = "hero:job:";
|
||||
const BLPOP_TIMEOUT_SECONDS: usize = 5;
|
||||
|
||||
/// Initialize Redis connection for the actor
|
||||
pub(crate) async fn initialize_redis_connection(
|
||||
pub async fn initialize_redis_connection(
|
||||
actor_id: &str,
|
||||
redis_url: &str,
|
||||
) -> Result<redis::aio::MultiplexedConnection, Box<dyn std::error::Error + Send + Sync>> {
|
||||
@ -113,6 +116,29 @@ async fn execute_script_and_update_status(
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a job with the given engine, setting proper job context
|
||||
///
|
||||
/// This function sets up the engine with job context (DB_PATH, CALLER_ID, CONTEXT_ID)
|
||||
/// and evaluates the script. It returns the result or error without updating Redis.
|
||||
/// This allows actors to handle Redis updates according to their own patterns.
|
||||
pub async fn execute_job_with_engine(
|
||||
engine: &mut Engine,
|
||||
job: &Job,
|
||||
db_path: &str,
|
||||
) -> Result<Dynamic, Box<rhai::EvalAltResult>> {
|
||||
// Set up job context in the engine
|
||||
let mut db_config = rhai::Map::new();
|
||||
db_config.insert("DB_PATH".into(), db_path.to_string().into());
|
||||
db_config.insert("CALLER_ID".into(), job.caller_id.clone().into());
|
||||
db_config.insert("CONTEXT_ID".into(), job.context_id.clone().into());
|
||||
engine.set_default_tag(Dynamic::from(db_config));
|
||||
|
||||
debug!("Actor for Context ID '{}': Evaluating script with Rhai engine (job context set).", job.context_id);
|
||||
|
||||
// Execute the script with the configured engine
|
||||
engine.eval::<Dynamic>(&job.script)
|
||||
}
|
||||
|
||||
/// Clean up job from Redis if preserve_tasks is false
|
||||
async fn cleanup_job(
|
||||
redis_conn: &mut redis::aio::MultiplexedConnection,
|
||||
|
38
core/actor/src/main.rs
Normal file
38
core/actor/src/main.rs
Normal file
@ -0,0 +1,38 @@
|
||||
#[cfg(feature = "wasm")]
|
||||
use baobab_actor::ui::App;
|
||||
#[cfg(feature = "wasm")]
|
||||
use yew::prelude::*;
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
fn main() {
|
||||
console_log::init_with_level(log::Level::Debug).expect("Failed to initialize logger");
|
||||
|
||||
// Get configuration from URL parameters or local storage
|
||||
let window = web_sys::window().expect("No global window exists");
|
||||
let location = window.location();
|
||||
let search = location.search().unwrap_or_default();
|
||||
|
||||
// Parse URL parameters for actor configuration
|
||||
let url_params = web_sys::UrlSearchParams::new_with_str(&search).unwrap();
|
||||
|
||||
let actor_id = url_params.get("id").unwrap_or_else(|| "default_actor".to_string());
|
||||
let actor_path = url_params.get("path").unwrap_or_else(|| "/path/to/actor".to_string());
|
||||
let example_dir = url_params.get("example_dir");
|
||||
let redis_url = url_params.get("redis_url").unwrap_or_else(|| "redis://localhost:6379".to_string());
|
||||
|
||||
log::info!("Starting Baobab Actor UI with actor_id: {}", actor_id);
|
||||
|
||||
yew::Renderer::<App>::with_props(baobab_actor::ui::app::AppProps {
|
||||
actor_id,
|
||||
actor_path,
|
||||
example_dir,
|
||||
redis_url,
|
||||
}).render();
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "wasm"))]
|
||||
fn main() {
|
||||
eprintln!("This binary is only available with the 'wasm' feature enabled.");
|
||||
eprintln!("Please compile with: cargo build --features wasm --target wasm32-unknown-unknown");
|
||||
std::process::exit(1);
|
||||
}
|
1338
core/actor/src/terminal_ui.rs
Normal file
1338
core/actor/src/terminal_ui.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -76,6 +76,11 @@ impl JobBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn caller_id(mut self, caller_id: &str) -> Self {
|
||||
self.caller_id = caller_id.to_string();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn script(mut self, script: &str) -> Self {
|
||||
self.script = script.to_string();
|
||||
self
|
||||
|
@ -7,6 +7,7 @@ use redis::AsyncCommands;
|
||||
use thiserror::Error;
|
||||
|
||||
mod builder;
|
||||
pub use builder::JobBuilder;
|
||||
|
||||
/// Redis namespace prefix for all Hero job-related keys
|
||||
pub const NAMESPACE_PREFIX: &str = "hero:job:";
|
||||
|
@ -3,6 +3,14 @@ name = "hero_supervisor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "supervisor-cli"
|
||||
path = "cmd/supervisor_cli.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "supervisor-tui"
|
||||
path = "cmd/supervisor_tui.rs"
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
env_logger = "0.10"
|
||||
|
117
core/supervisor/cmd/README.md
Normal file
117
core/supervisor/cmd/README.md
Normal file
@ -0,0 +1,117 @@
|
||||
# Supervisor CLI
|
||||
|
||||
Interactive command-line interface for the Hero Supervisor that allows you to dispatch jobs to actors and manage the job lifecycle.
|
||||
|
||||
## Features
|
||||
|
||||
- **Interactive Menu**: Easy-to-use menu system for all supervisor operations
|
||||
- **Job Management**: Create, run, monitor, and manage jobs
|
||||
- **OSIS Actor Integration**: Dispatch Rhai scripts to the OSIS actor
|
||||
- **Real-time Results**: Get immediate feedback from job execution
|
||||
- **Colorized Output**: Clear visual feedback with colored status indicators
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Build the OSIS Actor
|
||||
|
||||
First, ensure the OSIS actor is built:
|
||||
|
||||
```bash
|
||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis
|
||||
cargo build
|
||||
```
|
||||
|
||||
### 2. Configure the Supervisor
|
||||
|
||||
Create or use the example configuration file at `examples/cli_config.toml`:
|
||||
|
||||
```toml
|
||||
[global]
|
||||
redis_url = "redis://127.0.0.1/"
|
||||
|
||||
[actors]
|
||||
osis_actor = "/Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis"
|
||||
```
|
||||
|
||||
### 3. Run the CLI
|
||||
|
||||
```bash
|
||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/baobab/core/supervisor
|
||||
cargo run --bin supervisor-cli -- --config examples/cli_config.toml
|
||||
```
|
||||
|
||||
Or with verbose logging:
|
||||
|
||||
```bash
|
||||
cargo run --bin supervisor-cli -- --config examples/cli_config.toml --verbose
|
||||
```
|
||||
|
||||
## Available Commands
|
||||
|
||||
1. **list_jobs** - List all jobs in the system
|
||||
2. **run_job** - Create and run a new job interactively
|
||||
3. **get_job_status** - Get status of a specific job
|
||||
4. **get_job_output** - Get output of a completed job
|
||||
5. **get_job_logs** - Get logs for a specific job
|
||||
6. **stop_job** - Stop a running job
|
||||
7. **delete_job** - Delete a specific job
|
||||
8. **clear_all_jobs** - Clear all jobs from the system
|
||||
9. **quit** - Exit the CLI
|
||||
|
||||
## Example Workflow
|
||||
|
||||
1. Start the CLI with your configuration
|
||||
2. Select option `2` (run_job)
|
||||
3. Enter job details:
|
||||
- **Caller**: Your name or identifier
|
||||
- **Context**: Description of what the job does
|
||||
- **Script**: Rhai script to execute (end with empty line)
|
||||
4. The job is automatically dispatched to the OSIS actor
|
||||
5. View the real-time result
|
||||
|
||||
### Example Rhai Script
|
||||
|
||||
```rhai
|
||||
// Simple calculation
|
||||
let result = 10 + 20 * 3;
|
||||
print("Calculation result: " + result);
|
||||
result
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Working with strings
|
||||
let message = "Hello from OSIS Actor!";
|
||||
print(message);
|
||||
message.to_upper()
|
||||
```
|
||||
|
||||
## Job Status Colors
|
||||
|
||||
- **Created** - Cyan
|
||||
- **Dispatched** - Blue
|
||||
- **Started** - Yellow
|
||||
- **Finished** - Green
|
||||
- **Error** - Red
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Redis server running on localhost:6379 (or configured URL)
|
||||
- OSIS actor binary built and accessible
|
||||
- Proper permissions to start/stop processes via Zinit
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Actor Not Starting
|
||||
- Verify the OSIS actor binary path in the TOML config
|
||||
- Check that the binary exists and is executable
|
||||
- Ensure Redis is running and accessible
|
||||
|
||||
### Connection Issues
|
||||
- Verify Redis URL in configuration
|
||||
- Check network connectivity to Redis server
|
||||
- Ensure no firewall blocking connections
|
||||
|
||||
### Job Execution Failures
|
||||
- Check job logs using `get_job_logs` command
|
||||
- Verify Rhai script syntax
|
||||
- Check actor logs for detailed error information
|
178
core/supervisor/cmd/TUI_README.md
Normal file
178
core/supervisor/cmd/TUI_README.md
Normal file
@ -0,0 +1,178 @@
|
||||
# Supervisor Terminal UI (TUI)
|
||||
|
||||
A modern, interactive Terminal User Interface for the Hero Supervisor that provides intuitive job management with real-time updates and visual navigation.
|
||||
|
||||
## Features
|
||||
|
||||
### 🎯 **Intuitive Interface**
|
||||
- **Split-pane Layout**: Job list on the left, details on the right
|
||||
- **Real-time Updates**: Auto-refreshes every 2 seconds
|
||||
- **Color-coded Status**: Visual job status indicators
|
||||
- **Keyboard Navigation**: Vim-style and arrow key support
|
||||
|
||||
### 📋 **Job Management**
|
||||
- **Create Jobs**: Interactive form with tab navigation
|
||||
- **Monitor Jobs**: Real-time status updates with color coding
|
||||
- **View Details**: Detailed job information and output
|
||||
- **View Logs**: Access job execution logs
|
||||
- **Stop/Delete**: Job lifecycle management
|
||||
- **Bulk Operations**: Clear all jobs with confirmation
|
||||
|
||||
### 🎨 **Visual Design**
|
||||
- **Status Colors**:
|
||||
- 🔵 **Blue**: Dispatched
|
||||
- 🟡 **Yellow**: Started
|
||||
- 🟢 **Green**: Finished
|
||||
- 🔴 **Red**: Error
|
||||
- 🟣 **Magenta**: Waiting for Prerequisites
|
||||
- **Highlighted Selection**: Clear visual feedback
|
||||
- **Popup Messages**: Status and error notifications
|
||||
- **Confirmation Dialogs**: Safe bulk operations
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Start the TUI
|
||||
|
||||
```bash
|
||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/baobab/core/supervisor
|
||||
cargo run --bin supervisor-tui -- --config examples/cli_config.toml
|
||||
```
|
||||
|
||||
### 2. Navigation
|
||||
|
||||
#### Main View
|
||||
- **↑/↓ or j/k**: Navigate job list
|
||||
- **Enter/Space**: View job details
|
||||
- **n/c**: Create new job
|
||||
- **r**: Manual refresh
|
||||
- **d**: Delete selected job (with confirmation)
|
||||
- **s**: Stop selected job
|
||||
- **C**: Clear all jobs (with confirmation)
|
||||
- **q**: Quit application
|
||||
|
||||
#### Job Creation Form
|
||||
- **Tab**: Next field
|
||||
- **Shift+Tab**: Previous field
|
||||
- **Enter**: Next field (or newline in script field)
|
||||
- **F5**: Submit job
|
||||
- **Esc**: Cancel and return to main view
|
||||
|
||||
#### Job Details/Logs View
|
||||
- **Esc/q**: Return to main view
|
||||
- **l**: Switch to logs view
|
||||
- **d**: Switch to details view
|
||||
|
||||
## Interface Layout
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Hero Supervisor TUI - Job Management │
|
||||
├─────────────────────┬───────────────────────────────────────┤
|
||||
│ Jobs │ Job Details │
|
||||
│ │ │
|
||||
│ >> 1a2b3c4d - ✅ Fi │ Job ID: 1a2b3c4d5e6f7g8h │
|
||||
│ 2b3c4d5e - 🟡 St │ Status: Finished │
|
||||
│ 3c4d5e6f - 🔴 Er │ │
|
||||
│ 4d5e6f7g - 🔵 Di │ Output: │
|
||||
│ │ Calculation result: 70 │
|
||||
│ │ 70 │
|
||||
├─────────────────────┴───────────────────────────────────────┤
|
||||
│ q: Quit | n: New Job | ↑↓: Navigate | Enter: Details │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Job Creation Workflow
|
||||
|
||||
1. **Press 'n'** to create a new job
|
||||
2. **Fill in the form**:
|
||||
- **Caller**: Your name or identifier
|
||||
- **Context**: Job description
|
||||
- **Script**: Rhai script (supports multi-line)
|
||||
3. **Press F5** to submit
|
||||
4. **Watch real-time execution** in the main view
|
||||
|
||||
### Example Rhai Scripts
|
||||
|
||||
```rhai
|
||||
// Simple calculation
|
||||
let result = 10 + 20 * 3;
|
||||
print("Calculation result: " + result);
|
||||
result
|
||||
```
|
||||
|
||||
```rhai
|
||||
// String manipulation
|
||||
let message = "Hello from OSIS Actor!";
|
||||
print(message);
|
||||
message.to_upper()
|
||||
```
|
||||
|
||||
```rhai
|
||||
// Loop example
|
||||
let sum = 0;
|
||||
for i in 1..=10 {
|
||||
sum += i;
|
||||
}
|
||||
print("Sum of 1-10: " + sum);
|
||||
sum
|
||||
```
|
||||
|
||||
## Key Improvements over CLI
|
||||
|
||||
### ✅ **Better UX**
|
||||
- **Visual Navigation**: No need to remember numbers
|
||||
- **Real-time Updates**: See job progress immediately
|
||||
- **Split-pane Design**: View list and details simultaneously
|
||||
- **Form Validation**: Clear error messages
|
||||
|
||||
### ✅ **Enhanced Productivity**
|
||||
- **Auto-refresh**: Always up-to-date information
|
||||
- **Keyboard Shortcuts**: Fast navigation and actions
|
||||
- **Confirmation Dialogs**: Prevent accidental operations
|
||||
- **Multi-line Script Input**: Better script editing
|
||||
|
||||
### ✅ **Professional Interface**
|
||||
- **Color-coded Status**: Quick visual assessment
|
||||
- **Consistent Layout**: Predictable interface elements
|
||||
- **Popup Notifications**: Non-intrusive feedback
|
||||
- **Graceful Error Handling**: User-friendly error messages
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Redis server running (default: localhost:6379)
|
||||
- OSIS actor binary built and configured
|
||||
- Terminal with color support
|
||||
- Minimum terminal size: 80x24
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Display Issues
|
||||
- Ensure terminal supports colors and Unicode
|
||||
- Resize terminal if layout appears broken
|
||||
- Use a modern terminal emulator (iTerm2, Alacritty, etc.)
|
||||
|
||||
### Performance
|
||||
- TUI auto-refreshes every 2 seconds
|
||||
- Large job lists may impact performance
|
||||
- Use 'r' for manual refresh if needed
|
||||
|
||||
### Navigation Issues
|
||||
- Use arrow keys if vim keys (j/k) don't work
|
||||
- Ensure terminal is in focus
|
||||
- Try Esc to reset state if stuck
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Bulk Operations
|
||||
- **Clear All Jobs**: Press 'C' with confirmation
|
||||
- **Safe Deletion**: Confirmation required for destructive operations
|
||||
|
||||
### Real-time Monitoring
|
||||
- **Auto-refresh**: Updates every 2 seconds
|
||||
- **Status Tracking**: Watch job progression
|
||||
- **Immediate Feedback**: See results as they complete
|
||||
|
||||
### Multi-line Scripts
|
||||
- **Rich Text Input**: Full script editing in TUI
|
||||
- **Syntax Awareness**: Better than single-line CLI input
|
||||
- **Preview**: See script before submission
|
398
core/supervisor/cmd/supervisor_cli.rs
Normal file
398
core/supervisor/cmd/supervisor_cli.rs
Normal file
@ -0,0 +1,398 @@
|
||||
use clap::Parser;
|
||||
use colored::*;
|
||||
use hero_supervisor::{Supervisor, SupervisorBuilder, SupervisorError, Job, JobStatus, ScriptType};
|
||||
use log::{error, info};
|
||||
use std::io::{self, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "supervisor-cli")]
|
||||
#[command(about = "Interactive CLI for Hero Supervisor - Dispatch jobs to actors")]
|
||||
struct Args {
|
||||
/// Path to TOML configuration file
|
||||
#[arg(short, long)]
|
||||
config: PathBuf,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(short, long)]
|
||||
verbose: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum CliCommand {
|
||||
ListJobs,
|
||||
RunJob,
|
||||
GetJobStatus,
|
||||
GetJobOutput,
|
||||
GetJobLogs,
|
||||
StopJob,
|
||||
DeleteJob,
|
||||
ClearAllJobs,
|
||||
Quit,
|
||||
}
|
||||
|
||||
impl CliCommand {
|
||||
fn all_commands() -> Vec<(CliCommand, &'static str, &'static str)> {
|
||||
vec![
|
||||
(CliCommand::ListJobs, "list_jobs", "List all jobs in the system"),
|
||||
(CliCommand::RunJob, "run_job", "Create and run a new job"),
|
||||
(CliCommand::GetJobStatus, "get_job_status", "Get status of a specific job"),
|
||||
(CliCommand::GetJobOutput, "get_job_output", "Get output of a completed job"),
|
||||
(CliCommand::GetJobLogs, "get_job_logs", "Get logs for a specific job"),
|
||||
(CliCommand::StopJob, "stop_job", "Stop a running job"),
|
||||
(CliCommand::DeleteJob, "delete_job", "Delete a specific job"),
|
||||
(CliCommand::ClearAllJobs, "clear_all_jobs", "Clear all jobs from the system"),
|
||||
(CliCommand::Quit, "quit", "Exit the CLI"),
|
||||
]
|
||||
}
|
||||
|
||||
fn from_index(index: usize) -> Option<CliCommand> {
|
||||
Self::all_commands().get(index).map(|(cmd, _, _)| cmd.clone())
|
||||
}
|
||||
}
|
||||
|
||||
struct SupervisorCli {
|
||||
supervisor: Arc<Supervisor>,
|
||||
}
|
||||
|
||||
impl SupervisorCli {
|
||||
fn new(supervisor: Arc<Supervisor>) -> Self {
|
||||
Self { supervisor }
|
||||
}
|
||||
|
||||
async fn run(&self) -> Result<(), SupervisorError> {
|
||||
println!("{}", "=== Hero Supervisor CLI ===".bright_blue().bold());
|
||||
println!("{}", "Interactive job management interface".cyan());
|
||||
println!();
|
||||
|
||||
loop {
|
||||
self.display_menu();
|
||||
|
||||
match self.get_user_choice().await {
|
||||
Some(command) => {
|
||||
match command {
|
||||
CliCommand::Quit => {
|
||||
println!("{}", "Goodbye!".bright_green());
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
if let Err(e) = self.execute_command(command).await {
|
||||
eprintln!("{} {}", "Error:".bright_red(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
println!("{}", "Invalid selection. Please try again.".yellow());
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn display_menu(&self) {
|
||||
println!("{}", "Available Commands:".bright_yellow().bold());
|
||||
for (index, (_, name, description)) in CliCommand::all_commands().iter().enumerate() {
|
||||
println!(" {}. {} - {}",
|
||||
(index + 1).to_string().bright_white().bold(),
|
||||
name.bright_cyan(),
|
||||
description
|
||||
);
|
||||
}
|
||||
print!("\n{} ", "Select a command (1-9):".bright_white());
|
||||
io::stdout().flush().unwrap();
|
||||
}
|
||||
|
||||
async fn get_user_choice(&self) -> Option<CliCommand> {
|
||||
let mut input = String::new();
|
||||
if io::stdin().read_line(&mut input).is_ok() {
|
||||
if let Ok(choice) = input.trim().parse::<usize>() {
|
||||
if choice > 0 {
|
||||
return CliCommand::from_index(choice - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn execute_command(&self, command: CliCommand) -> Result<(), SupervisorError> {
|
||||
match command {
|
||||
CliCommand::ListJobs => self.list_jobs().await,
|
||||
CliCommand::RunJob => self.run_job().await,
|
||||
CliCommand::GetJobStatus => self.get_job_status().await,
|
||||
CliCommand::GetJobOutput => self.get_job_output().await,
|
||||
CliCommand::GetJobLogs => self.get_job_logs().await,
|
||||
CliCommand::StopJob => self.stop_job().await,
|
||||
CliCommand::DeleteJob => self.delete_job().await,
|
||||
CliCommand::ClearAllJobs => self.clear_all_jobs().await,
|
||||
CliCommand::Quit => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_jobs(&self) -> Result<(), SupervisorError> {
|
||||
println!("{}", "Listing all jobs...".bright_blue());
|
||||
|
||||
let jobs = self.supervisor.list_jobs().await?;
|
||||
|
||||
if jobs.is_empty() {
|
||||
println!("{}", "No jobs found.".yellow());
|
||||
} else {
|
||||
println!("{} jobs found:", jobs.len().to_string().bright_white().bold());
|
||||
for job_id in jobs {
|
||||
let status = self.supervisor.get_job_status(&job_id).await?;
|
||||
let status_color = match status {
|
||||
JobStatus::Dispatched => "blue",
|
||||
JobStatus::Started => "yellow",
|
||||
JobStatus::Finished => "green",
|
||||
JobStatus::Error => "red",
|
||||
JobStatus::WaitingForPrerequisites => "magenta",
|
||||
};
|
||||
|
||||
println!(" {} - {}",
|
||||
job_id.bright_white(),
|
||||
format!("{:?}", status).color(status_color)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_job(&self) -> Result<(), SupervisorError> {
|
||||
println!("{}", "Creating a new job...".bright_blue());
|
||||
|
||||
// Get caller
|
||||
print!("Enter caller name: ");
|
||||
io::stdout().flush().unwrap();
|
||||
let mut caller = String::new();
|
||||
io::stdin().read_line(&mut caller).unwrap();
|
||||
let caller = caller.trim().to_string();
|
||||
|
||||
// Get context
|
||||
print!("Enter job context: ");
|
||||
io::stdout().flush().unwrap();
|
||||
let mut context = String::new();
|
||||
io::stdin().read_line(&mut context).unwrap();
|
||||
let context = context.trim().to_string();
|
||||
|
||||
// Get script
|
||||
println!("Enter Rhai script (end with empty line):");
|
||||
let mut script_lines = Vec::new();
|
||||
loop {
|
||||
let mut line = String::new();
|
||||
io::stdin().read_line(&mut line).unwrap();
|
||||
let line = line.trim_end_matches('\n');
|
||||
if line.is_empty() {
|
||||
break;
|
||||
}
|
||||
script_lines.push(line.to_string());
|
||||
}
|
||||
let script = script_lines.join("\n");
|
||||
|
||||
if script.is_empty() {
|
||||
println!("{}", "Script cannot be empty!".bright_red());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// For now, default to OSIS actor (ScriptType::OSIS)
|
||||
let script_type = ScriptType::OSIS;
|
||||
|
||||
// Create the job
|
||||
let job = Job::new(caller, context, script, script_type);
|
||||
|
||||
println!("{} Job ID: {}",
|
||||
"Created job with".bright_green(),
|
||||
job.id.bright_white().bold()
|
||||
);
|
||||
|
||||
// Run the job and await result
|
||||
println!("{}", "Dispatching job and waiting for result...".bright_blue());
|
||||
|
||||
match self.supervisor.run_job_and_await_result(&job).await {
|
||||
Ok(result) => {
|
||||
println!("{}", "Job completed successfully!".bright_green().bold());
|
||||
println!("{} {}", "Result:".bright_yellow(), result.bright_white());
|
||||
}
|
||||
Err(e) => {
|
||||
println!("{} {}", "Job failed:".bright_red().bold(), e);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_job_status(&self) -> Result<(), SupervisorError> {
|
||||
let job_id = self.prompt_for_job_id("Enter job ID to check status: ")?;
|
||||
|
||||
let status = self.supervisor.get_job_status(&job_id).await?;
|
||||
let status_color = match status {
|
||||
JobStatus::Dispatched => "blue",
|
||||
JobStatus::Started => "yellow",
|
||||
JobStatus::Finished => "green",
|
||||
JobStatus::Error => "red",
|
||||
JobStatus::WaitingForPrerequisites => "magenta",
|
||||
};
|
||||
|
||||
println!("{} {} - {}",
|
||||
"Job".bright_white(),
|
||||
job_id.bright_white().bold(),
|
||||
format!("{:?}", status).color(status_color).bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_job_output(&self) -> Result<(), SupervisorError> {
|
||||
let job_id = self.prompt_for_job_id("Enter job ID to get output: ")?;
|
||||
|
||||
match self.supervisor.get_job_output(&job_id).await? {
|
||||
Some(output) => {
|
||||
println!("{}", "Job Output:".bright_yellow().bold());
|
||||
println!("{}", output.bright_white());
|
||||
}
|
||||
None => {
|
||||
println!("{}", "No output available for this job.".yellow());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_job_logs(&self) -> Result<(), SupervisorError> {
|
||||
let job_id = self.prompt_for_job_id("Enter job ID to get logs: ")?;
|
||||
|
||||
match self.supervisor.get_job_logs(&job_id).await? {
|
||||
Some(logs) => {
|
||||
println!("{}", "Job Logs:".bright_yellow().bold());
|
||||
println!("{}", logs.bright_white());
|
||||
}
|
||||
None => {
|
||||
println!("{}", "No logs available for this job.".yellow());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop_job(&self) -> Result<(), SupervisorError> {
|
||||
let job_id = self.prompt_for_job_id("Enter job ID to stop: ")?;
|
||||
|
||||
self.supervisor.stop_job(&job_id).await?;
|
||||
println!("{} {}",
|
||||
"Stop signal sent for job".bright_green(),
|
||||
job_id.bright_white().bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_job(&self) -> Result<(), SupervisorError> {
|
||||
let job_id = self.prompt_for_job_id("Enter job ID to delete: ")?;
|
||||
|
||||
self.supervisor.delete_job(&job_id).await?;
|
||||
println!("{} {}",
|
||||
"Deleted job".bright_green(),
|
||||
job_id.bright_white().bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn clear_all_jobs(&self) -> Result<(), SupervisorError> {
|
||||
print!("Are you sure you want to clear ALL jobs? (y/N): ");
|
||||
io::stdout().flush().unwrap();
|
||||
|
||||
let mut confirmation = String::new();
|
||||
io::stdin().read_line(&mut confirmation).unwrap();
|
||||
|
||||
if confirmation.trim().to_lowercase() == "y" {
|
||||
let count = self.supervisor.clear_all_jobs().await?;
|
||||
println!("{} {} jobs",
|
||||
"Cleared".bright_green().bold(),
|
||||
count.to_string().bright_white().bold()
|
||||
);
|
||||
} else {
|
||||
println!("{}", "Operation cancelled.".yellow());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn prompt_for_job_id(&self, prompt: &str) -> Result<String, SupervisorError> {
|
||||
print!("{}", prompt);
|
||||
io::stdout().flush().unwrap();
|
||||
|
||||
let mut job_id = String::new();
|
||||
io::stdin().read_line(&mut job_id).unwrap();
|
||||
let job_id = job_id.trim().to_string();
|
||||
|
||||
if job_id.is_empty() {
|
||||
return Err(SupervisorError::ConfigError("Job ID cannot be empty".to_string()));
|
||||
}
|
||||
|
||||
Ok(job_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Setup logging
|
||||
if args.verbose {
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Debug)
|
||||
.init();
|
||||
} else {
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Info)
|
||||
.init();
|
||||
}
|
||||
|
||||
info!("Starting Supervisor CLI with config: {:?}", args.config);
|
||||
|
||||
// Build supervisor from TOML config
|
||||
let supervisor = Arc::new(
|
||||
SupervisorBuilder::from_toml(&args.config)?
|
||||
.build().await?
|
||||
);
|
||||
|
||||
println!("{}", "Starting actors...".bright_blue());
|
||||
|
||||
// Start the actors
|
||||
supervisor.start_actors().await?;
|
||||
|
||||
// Give actors time to start up
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
|
||||
println!("{}", "Actors started successfully!".bright_green());
|
||||
println!();
|
||||
|
||||
// Create and run the CLI
|
||||
let cli = SupervisorCli::new(supervisor.clone());
|
||||
|
||||
// Setup cleanup on exit
|
||||
let supervisor_cleanup = supervisor.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::signal::ctrl_c().await.expect("Failed to listen for ctrl+c");
|
||||
println!("\n{}", "Shutting down...".bright_yellow());
|
||||
if let Err(e) = supervisor_cleanup.cleanup_and_shutdown().await {
|
||||
eprintln!("Error during cleanup: {}", e);
|
||||
}
|
||||
std::process::exit(0);
|
||||
});
|
||||
|
||||
// Run the interactive CLI
|
||||
cli.run().await?;
|
||||
|
||||
// Cleanup on normal exit
|
||||
supervisor.cleanup_and_shutdown().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
1052
core/supervisor/cmd/supervisor_tui.rs
Normal file
1052
core/supervisor/cmd/supervisor_tui.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -45,13 +45,27 @@ Jobs can have dependencies on other jobs, which are stored in the `dependencies`
|
||||
|
||||
### Work Queues
|
||||
|
||||
Jobs are queued for execution using Redis lists:
|
||||
Jobs are queued for execution using Redis lists with the following naming convention:
|
||||
```
|
||||
hero:work_queue:{actor_id}
|
||||
hero:job:actor_queue:{script_type_suffix}
|
||||
```
|
||||
|
||||
Where `{script_type_suffix}` corresponds to the script type:
|
||||
- `osis` for OSIS actors (Rhai/HeroScript execution)
|
||||
- `sal` for SAL actors (System Abstraction Layer)
|
||||
- `v` for V actors (V language execution)
|
||||
- `python` for Python actors
|
||||
|
||||
**Examples:**
|
||||
- OSIS actor queue: `hero:job:actor_queue:osis`
|
||||
- SAL actor queue: `hero:job:actor_queue:sal`
|
||||
- V actor queue: `hero:job:actor_queue:v`
|
||||
- Python actor queue: `hero:job:actor_queue:python`
|
||||
|
||||
Actors listen on their specific queue using `BLPOP` for job IDs to process.
|
||||
|
||||
**Important:** Actors must use the same queue naming convention in their `actor_id()` method to ensure proper job dispatch. The actor should return `"actor_queue:{script_type_suffix}"` as its actor ID.
|
||||
|
||||
### Stop Queues
|
||||
|
||||
Job stop requests are sent through dedicated stop queues:
|
||||
@ -63,12 +77,26 @@ Actors monitor these queues to receive stop requests for running jobs.
|
||||
|
||||
### Reply Queues
|
||||
|
||||
For synchronous job execution, dedicated reply queues are used:
|
||||
```
|
||||
hero:reply:{job_id}
|
||||
```
|
||||
Reply queues are used for responses to specific requests:
|
||||
|
||||
Actors send results to these queues when jobs complete.
|
||||
- `hero:reply:{request_id}`: Response to a specific request
|
||||
|
||||
### Result and Error Queues
|
||||
|
||||
When actors process jobs, they store results and errors in two places:
|
||||
|
||||
1. **Job Hash Storage**: Results are stored in the job hash fields:
|
||||
- `hero:job:{job_id}` hash with `output` field for results
|
||||
- `hero:job:{job_id}` hash with `error` field for errors
|
||||
|
||||
2. **Dedicated Queues**: Results and errors are also pushed to dedicated queues for asynchronous retrieval:
|
||||
- `hero:job:{job_id}:result`: Queue containing job result (use `LPOP` to retrieve)
|
||||
- `hero:job:{job_id}:error`: Queue containing job error (use `LPOP` to retrieve)
|
||||
|
||||
This dual storage approach allows clients to:
|
||||
- Access results/errors directly from job hash for immediate retrieval
|
||||
- Listen on result/error queues for asynchronous notification of job completion
|
||||
- Use `BLPOP` on result/error queues for blocking waits on job completion
|
||||
|
||||
## Job Lifecycle
|
||||
|
||||
|
20
core/supervisor/examples/cli_config.toml
Normal file
20
core/supervisor/examples/cli_config.toml
Normal file
@ -0,0 +1,20 @@
|
||||
# Hero Supervisor CLI Configuration
|
||||
# This configuration sets up the supervisor with an OSIS actor for job processing
|
||||
|
||||
[global]
|
||||
redis_url = "redis://127.0.0.1/"
|
||||
|
||||
[actors]
|
||||
# OSIS Actor configuration - handles Object Storage and Indexing System jobs
|
||||
osis_actor = "/Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis"
|
||||
|
||||
# Optional: Other actors can be configured here
|
||||
# sal_actor = "/path/to/sal_actor"
|
||||
# v_actor = "/path/to/v_actor"
|
||||
# python_actor = "/path/to/python_actor"
|
||||
|
||||
# Optional: WebSocket server configuration for remote API access
|
||||
# [websocket]
|
||||
# host = "127.0.0.1"
|
||||
# port = 8443
|
||||
# redis_url = "redis://127.0.0.1/"
|
42
interfaces/openrpc/client/Cargo.toml
Normal file
42
interfaces/openrpc/client/Cargo.toml
Normal file
@ -0,0 +1,42 @@
|
||||
[package]
|
||||
name = "hero-openrpc-client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "hero-openrpc-client"
|
||||
path = "cmd/main.rs"
|
||||
|
||||
[dependencies]
|
||||
# Core dependencies
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
anyhow = "1.0"
|
||||
thiserror = "1.0"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
|
||||
# JSON-RPC dependencies
|
||||
jsonrpsee = { version = "0.21", features = [
|
||||
"client",
|
||||
"macros"
|
||||
] }
|
||||
async-trait = "0.1"
|
||||
|
||||
# Hero dependencies
|
||||
hero_job = { path = "../../../core/job" }
|
||||
|
||||
# Authentication and crypto
|
||||
secp256k1 = { version = "0.28", features = ["rand", "recovery"] }
|
||||
hex = "0.4"
|
||||
sha2 = "0.10"
|
||||
rand = "0.8"
|
||||
|
||||
# CLI utilities
|
||||
dialoguer = "0.11"
|
||||
colored = "2.0"
|
||||
|
||||
# Async utilities
|
||||
futures = "0.3"
|
472
interfaces/openrpc/client/cmd/main.rs
Normal file
472
interfaces/openrpc/client/cmd/main.rs
Normal file
@ -0,0 +1,472 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use colored::*;
|
||||
use dialoguer::{Input, Select, Confirm, MultiSelect};
|
||||
use hero_job::ScriptType;
|
||||
use hero_openrpc_client::{
|
||||
AuthHelper, ClientTransport, HeroOpenRpcClient, JobParams,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use tracing::{error, info, Level};
|
||||
use tracing_subscriber;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "hero-openrpc-client")]
|
||||
#[command(about = "Hero OpenRPC Client - Interactive JSON-RPC client")]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
|
||||
/// Private key for authentication (hex format)
|
||||
#[arg(long)]
|
||||
private_key: Option<String>,
|
||||
|
||||
/// Generate a new private key and exit
|
||||
#[arg(long)]
|
||||
generate_key: bool,
|
||||
|
||||
/// Log level
|
||||
#[arg(long, default_value = "info")]
|
||||
log_level: String,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Connect to WebSocket server
|
||||
Websocket {
|
||||
/// Server URL
|
||||
#[arg(long, default_value = "ws://127.0.0.1:9944")]
|
||||
url: String,
|
||||
},
|
||||
/// Connect to Unix socket server
|
||||
Unix {
|
||||
/// Unix socket path
|
||||
#[arg(long, default_value = "/tmp/hero-openrpc.sock")]
|
||||
socket_path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
/// Available RPC methods with descriptions
|
||||
#[derive(Debug, Clone)]
|
||||
struct RpcMethod {
|
||||
name: &'static str,
|
||||
description: &'static str,
|
||||
requires_auth: bool,
|
||||
}
|
||||
|
||||
const RPC_METHODS: &[RpcMethod] = &[
|
||||
RpcMethod {
|
||||
name: "fetch_nonce",
|
||||
description: "Fetch a nonce for authentication",
|
||||
requires_auth: false,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "authenticate",
|
||||
description: "Authenticate with public key and signature",
|
||||
requires_auth: false,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "whoami",
|
||||
description: "Get authentication status and user information",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "play",
|
||||
description: "Execute a Rhai script immediately",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "create_job",
|
||||
description: "Create a new job without starting it",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "start_job",
|
||||
description: "Start a previously created job",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "run_job",
|
||||
description: "Create and run a job, returning result when complete",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "get_job_status",
|
||||
description: "Get the current status of a job",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "get_job_output",
|
||||
description: "Get the output of a completed job",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "get_job_logs",
|
||||
description: "Get the logs of a job",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "list_jobs",
|
||||
description: "List all jobs in the system",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "stop_job",
|
||||
description: "Stop a running job",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "delete_job",
|
||||
description: "Delete a job from the system",
|
||||
requires_auth: true,
|
||||
},
|
||||
RpcMethod {
|
||||
name: "clear_all_jobs",
|
||||
description: "Clear all jobs from the system",
|
||||
requires_auth: true,
|
||||
},
|
||||
];
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Initialize tracing
|
||||
let log_level = match cli.log_level.to_lowercase().as_str() {
|
||||
"trace" => Level::TRACE,
|
||||
"debug" => Level::DEBUG,
|
||||
"info" => Level::INFO,
|
||||
"warn" => Level::WARN,
|
||||
"error" => Level::ERROR,
|
||||
_ => Level::INFO,
|
||||
};
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_max_level(log_level)
|
||||
.init();
|
||||
|
||||
// Handle key generation
|
||||
if cli.generate_key {
|
||||
let auth_helper = AuthHelper::generate()?;
|
||||
println!("{}", "Generated new private key:".green().bold());
|
||||
println!("Private Key: {}", auth_helper.private_key_hex().yellow());
|
||||
println!("Public Key: {}", auth_helper.public_key_hex().cyan());
|
||||
println!();
|
||||
println!("{}", "Save the private key securely and use it with --private-key".bright_yellow());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let transport = match cli.command {
|
||||
Commands::Websocket { url } => {
|
||||
println!("{} {}", "Connecting to WebSocket server:".green(), url.cyan());
|
||||
ClientTransport::WebSocket(url)
|
||||
}
|
||||
Commands::Unix { socket_path } => {
|
||||
println!("{} {:?}", "Connecting to Unix socket server:".green(), socket_path);
|
||||
ClientTransport::Unix(socket_path)
|
||||
}
|
||||
};
|
||||
|
||||
// Connect to the server
|
||||
let client = HeroOpenRpcClient::connect(transport).await?;
|
||||
println!("{}", "Connected successfully!".green().bold());
|
||||
|
||||
// Handle authentication if private key is provided
|
||||
let mut authenticated = false;
|
||||
if let Some(private_key) = cli.private_key {
|
||||
println!("{}", "Authenticating...".yellow());
|
||||
match client.authenticate_with_key(&private_key).await {
|
||||
Ok(true) => {
|
||||
println!("{}", "Authentication successful!".green().bold());
|
||||
authenticated = true;
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("{}", "Authentication failed!".red().bold());
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Authentication error: {}", e);
|
||||
println!("{} {}", "Authentication error:".red().bold(), e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("{}", "No private key provided. Some methods will require authentication.".yellow());
|
||||
println!("{}", "Use --generate-key to create a new key or --private-key to use an existing one.".bright_yellow());
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Interactive loop
|
||||
loop {
|
||||
// Filter methods based on authentication status
|
||||
let available_methods: Vec<&RpcMethod> = RPC_METHODS
|
||||
.iter()
|
||||
.filter(|method| !method.requires_auth || authenticated)
|
||||
.collect();
|
||||
|
||||
if available_methods.is_empty() {
|
||||
println!("{}", "No methods available. Please authenticate first.".red());
|
||||
break;
|
||||
}
|
||||
|
||||
// Display method selection
|
||||
let method_names: Vec<String> = available_methods
|
||||
.iter()
|
||||
.map(|method| {
|
||||
if method.requires_auth && !authenticated {
|
||||
format!("{} {} (requires auth)", method.name.red(), method.description)
|
||||
} else {
|
||||
format!("{} {}", method.name.green(), method.description)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let selection = Select::new()
|
||||
.with_prompt("Select an RPC method to call")
|
||||
.items(&method_names)
|
||||
.default(0)
|
||||
.interact_opt()?;
|
||||
|
||||
let Some(selection) = selection else {
|
||||
println!("{}", "Goodbye!".cyan());
|
||||
break;
|
||||
};
|
||||
|
||||
let selected_method = available_methods[selection];
|
||||
println!();
|
||||
println!("{} {}", "Selected method:".bold(), selected_method.name.green());
|
||||
|
||||
// Handle method-specific parameter collection and execution
|
||||
match execute_method(&client, selected_method.name).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
error!("Method execution failed: {}", e);
|
||||
println!("{} {}", "Error:".red().bold(), e);
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
// Ask if user wants to continue
|
||||
if !Confirm::new()
|
||||
.with_prompt("Do you want to call another method?")
|
||||
.default(true)
|
||||
.interact()?
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
println!("{}", "Goodbye!".cyan().bold());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn execute_method(client: &HeroOpenRpcClient, method_name: &str) -> Result<()> {
|
||||
match method_name {
|
||||
"fetch_nonce" => {
|
||||
let pubkey: String = Input::new()
|
||||
.with_prompt("Public key (hex)")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.fetch_nonce(pubkey).await?;
|
||||
println!("{} {}", "Nonce:".green().bold(), result.yellow());
|
||||
}
|
||||
|
||||
"authenticate" => {
|
||||
let pubkey: String = Input::new()
|
||||
.with_prompt("Public key (hex)")
|
||||
.interact_text()?;
|
||||
|
||||
let signature: String = Input::new()
|
||||
.with_prompt("Signature (hex)")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.authenticate(pubkey, signature, nonce).await?;
|
||||
println!("{} {}", "Authentication result:".green().bold(),
|
||||
if result { "Success".green() } else { "Failed".red() });
|
||||
}
|
||||
|
||||
"whoami" => {
|
||||
let result = client.whoami().await?;
|
||||
println!("{} {}", "User info:".green().bold(),
|
||||
serde_json::to_string_pretty(&result)?.cyan());
|
||||
}
|
||||
|
||||
"play" => {
|
||||
let script: String = Input::new()
|
||||
.with_prompt("Rhai script to execute")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.play(script).await?;
|
||||
println!("{} {}", "Script output:".green().bold(), result.output.cyan());
|
||||
}
|
||||
|
||||
"create_job" => {
|
||||
let script: String = Input::new()
|
||||
.with_prompt("Script content")
|
||||
.interact_text()?;
|
||||
|
||||
let script_types = ["HeroScript", "RhaiSAL", "RhaiDSL"];
|
||||
let script_type_selection = Select::new()
|
||||
.with_prompt("Script type")
|
||||
.items(&script_types)
|
||||
.default(0)
|
||||
.interact()?;
|
||||
|
||||
let script_type = match script_type_selection {
|
||||
0 => ScriptType::HeroScript,
|
||||
1 => ScriptType::RhaiSAL,
|
||||
2 => ScriptType::RhaiDSL,
|
||||
_ => ScriptType::HeroScript,
|
||||
};
|
||||
|
||||
let add_prerequisites = Confirm::new()
|
||||
.with_prompt("Add prerequisites?")
|
||||
.default(false)
|
||||
.interact()?;
|
||||
|
||||
let prerequisites = if add_prerequisites {
|
||||
let prereq_input: String = Input::new()
|
||||
.with_prompt("Prerequisites (comma-separated job IDs)")
|
||||
.interact_text()?;
|
||||
Some(prereq_input.split(',').map(|s| s.trim().to_string()).collect())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let job_params = JobParams {
|
||||
script,
|
||||
script_type,
|
||||
prerequisites,
|
||||
};
|
||||
|
||||
let result = client.create_job(job_params).await?;
|
||||
println!("{} {}", "Created job ID:".green().bold(), result.yellow());
|
||||
}
|
||||
|
||||
"start_job" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID to start")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.start_job(job_id).await?;
|
||||
println!("{} {}", "Start result:".green().bold(),
|
||||
if result.success { "Success".green() } else { "Failed".red() });
|
||||
}
|
||||
|
||||
"run_job" => {
|
||||
let script: String = Input::new()
|
||||
.with_prompt("Script content")
|
||||
.interact_text()?;
|
||||
|
||||
let script_types = ["HeroScript", "RhaiSAL", "RhaiDSL"];
|
||||
let script_type_selection = Select::new()
|
||||
.with_prompt("Script type")
|
||||
.items(&script_types)
|
||||
.default(0)
|
||||
.interact()?;
|
||||
|
||||
let script_type = match script_type_selection {
|
||||
0 => ScriptType::HeroScript,
|
||||
1 => ScriptType::RhaiSAL,
|
||||
2 => ScriptType::RhaiDSL,
|
||||
_ => ScriptType::HeroScript,
|
||||
};
|
||||
|
||||
let add_prerequisites = Confirm::new()
|
||||
.with_prompt("Add prerequisites?")
|
||||
.default(false)
|
||||
.interact()?;
|
||||
|
||||
let prerequisites = if add_prerequisites {
|
||||
let prereq_input: String = Input::new()
|
||||
.with_prompt("Prerequisites (comma-separated job IDs)")
|
||||
.interact_text()?;
|
||||
Some(prereq_input.split(',').map(|s| s.trim().to_string()).collect())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let result = client.run_job(script, script_type, prerequisites).await?;
|
||||
println!("{} {}", "Job result:".green().bold(), result.cyan());
|
||||
}
|
||||
|
||||
"get_job_status" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.get_job_status(job_id).await?;
|
||||
println!("{} {:?}", "Job status:".green().bold(), result);
|
||||
}
|
||||
|
||||
"get_job_output" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.get_job_output(job_id).await?;
|
||||
println!("{} {}", "Job output:".green().bold(), result.cyan());
|
||||
}
|
||||
|
||||
"get_job_logs" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID")
|
||||
.interact_text()?;
|
||||
|
||||
let result = client.get_job_logs(job_id).await?;
|
||||
println!("{} {}", "Job logs:".green().bold(), result.logs.cyan());
|
||||
}
|
||||
|
||||
"list_jobs" => {
|
||||
let result = client.list_jobs().await?;
|
||||
println!("{}", "Jobs:".green().bold());
|
||||
for job in result {
|
||||
println!(" {} - {} ({:?})",
|
||||
job.id().yellow(),
|
||||
job.script_type(),
|
||||
job.status()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
"stop_job" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID to stop")
|
||||
.interact_text()?;
|
||||
|
||||
client.stop_job(job_id.clone()).await?;
|
||||
println!("{} {}", "Stopped job:".green().bold(), job_id.yellow());
|
||||
}
|
||||
|
||||
"delete_job" => {
|
||||
let job_id: String = Input::new()
|
||||
.with_prompt("Job ID to delete")
|
||||
.interact_text()?;
|
||||
|
||||
client.delete_job(job_id.clone()).await?;
|
||||
println!("{} {}", "Deleted job:".green().bold(), job_id.yellow());
|
||||
}
|
||||
|
||||
"clear_all_jobs" => {
|
||||
let confirm = Confirm::new()
|
||||
.with_prompt("Are you sure you want to clear ALL jobs?")
|
||||
.default(false)
|
||||
.interact()?;
|
||||
|
||||
if confirm {
|
||||
client.clear_all_jobs().await?;
|
||||
println!("{}", "Cleared all jobs".green().bold());
|
||||
} else {
|
||||
println!("{}", "Operation cancelled".yellow());
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
println!("{} {}", "Unknown method:".red().bold(), method_name);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
81
interfaces/openrpc/client/src/auth.rs
Normal file
81
interfaces/openrpc/client/src/auth.rs
Normal file
@ -0,0 +1,81 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use secp256k1::{Message, PublicKey, ecdsa::Signature, Secp256k1, SecretKey};
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
/// Helper for authentication operations
|
||||
pub struct AuthHelper {
|
||||
secret_key: SecretKey,
|
||||
public_key: PublicKey,
|
||||
secp: Secp256k1<secp256k1::All>,
|
||||
}
|
||||
|
||||
impl AuthHelper {
|
||||
/// Create a new auth helper from a private key hex string
|
||||
pub fn new(private_key_hex: &str) -> Result<Self> {
|
||||
let secp = Secp256k1::new();
|
||||
|
||||
let secret_key_bytes = hex::decode(private_key_hex)
|
||||
.map_err(|_| anyhow!("Invalid private key hex format"))?;
|
||||
|
||||
let secret_key = SecretKey::from_slice(&secret_key_bytes)
|
||||
.map_err(|_| anyhow!("Invalid private key"))?;
|
||||
|
||||
let public_key = PublicKey::from_secret_key(&secp, &secret_key);
|
||||
|
||||
Ok(Self {
|
||||
secret_key,
|
||||
public_key,
|
||||
secp,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate a new random private key
|
||||
pub fn generate() -> Result<Self> {
|
||||
let secp = Secp256k1::new();
|
||||
let (secret_key, public_key) = secp.generate_keypair(&mut rand::thread_rng());
|
||||
|
||||
Ok(Self {
|
||||
secret_key,
|
||||
public_key,
|
||||
secp,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the public key as a hex string
|
||||
pub fn public_key_hex(&self) -> String {
|
||||
hex::encode(self.public_key.serialize())
|
||||
}
|
||||
|
||||
/// Get the private key as a hex string
|
||||
pub fn private_key_hex(&self) -> String {
|
||||
hex::encode(self.secret_key.secret_bytes())
|
||||
}
|
||||
|
||||
/// Sign a message and return the signature as hex
|
||||
pub fn sign_message(&self, message: &str) -> Result<String> {
|
||||
let message_hash = Sha256::digest(message.as_bytes());
|
||||
let message = Message::from_slice(&message_hash)
|
||||
.map_err(|_| anyhow!("Failed to create message from hash"))?;
|
||||
|
||||
let signature = self.secp.sign_ecdsa(&message, &self.secret_key);
|
||||
Ok(hex::encode(signature.serialize_compact()))
|
||||
}
|
||||
|
||||
/// Verify a signature against a message
|
||||
pub fn verify_signature(&self, message: &str, signature_hex: &str) -> Result<bool> {
|
||||
let message_hash = Sha256::digest(message.as_bytes());
|
||||
let message = Message::from_slice(&message_hash)
|
||||
.map_err(|_| anyhow!("Failed to create message from hash"))?;
|
||||
|
||||
let signature_bytes = hex::decode(signature_hex)
|
||||
.map_err(|_| anyhow!("Invalid signature hex format"))?;
|
||||
|
||||
let signature = Signature::from_compact(&signature_bytes)
|
||||
.map_err(|_| anyhow!("Invalid signature format"))?;
|
||||
|
||||
match self.secp.verify_ecdsa(&message, &signature, &self.public_key) {
|
||||
Ok(_) => Ok(true),
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
212
interfaces/openrpc/client/src/lib.rs
Normal file
212
interfaces/openrpc/client/src/lib.rs
Normal file
@ -0,0 +1,212 @@
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use hero_job::{Job, JobStatus, ScriptType};
|
||||
use jsonrpsee::core::client::ClientT;
|
||||
use jsonrpsee::core::ClientError;
|
||||
use jsonrpsee::proc_macros::rpc;
|
||||
use jsonrpsee::rpc_params;
|
||||
use jsonrpsee::ws_client::{WsClient, WsClientBuilder};
|
||||
use std::path::PathBuf;
|
||||
use tracing::{error, info};
|
||||
|
||||
mod auth;
|
||||
mod types;
|
||||
|
||||
pub use auth::*;
|
||||
pub use types::*;
|
||||
|
||||
/// Transport configuration for the client
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ClientTransport {
|
||||
WebSocket(String),
|
||||
}
|
||||
|
||||
/// OpenRPC client trait defining all available methods
|
||||
#[rpc(client)]
|
||||
pub trait OpenRpcClient {
|
||||
// Authentication methods
|
||||
#[method(name = "fetch_nonce")]
|
||||
async fn fetch_nonce(&self, pubkey: String) -> Result<String, ClientError>;
|
||||
|
||||
#[method(name = "authenticate")]
|
||||
async fn authenticate(
|
||||
&self,
|
||||
pubkey: String,
|
||||
signature: String,
|
||||
nonce: String,
|
||||
) -> Result<bool, ClientError>;
|
||||
|
||||
#[method(name = "whoami")]
|
||||
async fn whoami(&self) -> Result<serde_json::Value, ClientError>;
|
||||
|
||||
// Script execution
|
||||
#[method(name = "play")]
|
||||
async fn play(&self, script: String) -> Result<PlayResult, ClientError>;
|
||||
|
||||
// Job management
|
||||
#[method(name = "create_job")]
|
||||
async fn create_job(&self, job: JobParams) -> Result<String, ClientError>;
|
||||
|
||||
#[method(name = "start_job")]
|
||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ClientError>;
|
||||
|
||||
#[method(name = "run_job")]
|
||||
async fn run_job(
|
||||
&self,
|
||||
script: String,
|
||||
script_type: ScriptType,
|
||||
prerequisites: Option<Vec<String>>,
|
||||
) -> Result<String, ClientError>;
|
||||
|
||||
#[method(name = "get_job_status")]
|
||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ClientError>;
|
||||
|
||||
#[method(name = "get_job_output")]
|
||||
async fn get_job_output(&self, job_id: String) -> Result<String, ClientError>;
|
||||
|
||||
#[method(name = "get_job_logs")]
|
||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ClientError>;
|
||||
|
||||
#[method(name = "list_jobs")]
|
||||
async fn list_jobs(&self) -> Result<Vec<Job>, ClientError>;
|
||||
|
||||
#[method(name = "stop_job")]
|
||||
async fn stop_job(&self, job_id: String) -> Result<(), ClientError>;
|
||||
|
||||
#[method(name = "delete_job")]
|
||||
async fn delete_job(&self, job_id: String) -> Result<(), ClientError>;
|
||||
|
||||
#[method(name = "clear_all_jobs")]
|
||||
async fn clear_all_jobs(&self) -> Result<(), ClientError>;
|
||||
}
|
||||
|
||||
/// Wrapper client that can use WebSocket transport
|
||||
pub struct HeroOpenRpcClient {
|
||||
client: WsClient,
|
||||
}
|
||||
|
||||
impl HeroOpenRpcClient {
|
||||
/// Connect to the OpenRPC server using the specified transport
|
||||
pub async fn connect(transport: ClientTransport) -> Result<Self> {
|
||||
match transport {
|
||||
ClientTransport::WebSocket(url) => {
|
||||
info!("Connecting to WebSocket server at {}", url);
|
||||
let client = WsClientBuilder::default()
|
||||
.build(&url)
|
||||
.await?;
|
||||
Ok(Self { client })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the underlying client for making RPC calls
|
||||
pub fn client(&self) -> &WsClient {
|
||||
&self.client
|
||||
}
|
||||
|
||||
/// Authenticate with the server using a private key
|
||||
pub async fn authenticate_with_key(&self, private_key: &str) -> Result<bool> {
|
||||
let auth_helper = AuthHelper::new(private_key)?;
|
||||
|
||||
// Get nonce
|
||||
let pubkey = auth_helper.public_key_hex();
|
||||
let nonce: String = self.client.fetch_nonce(pubkey.clone()).await?;
|
||||
|
||||
// Sign nonce
|
||||
let signature = auth_helper.sign_message(&nonce)?;
|
||||
|
||||
// Authenticate
|
||||
let result = self.client.authenticate(pubkey, signature, nonce).await?;
|
||||
|
||||
if result {
|
||||
info!("Authentication successful");
|
||||
} else {
|
||||
error!("Authentication failed");
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
// Implement delegation methods on HeroOpenRpcClient to use the generated trait methods
|
||||
impl HeroOpenRpcClient {
|
||||
/// Delegate to fetch_nonce on the underlying client
|
||||
pub async fn fetch_nonce(&self, pubkey: String) -> Result<String, ClientError> {
|
||||
self.client.fetch_nonce(pubkey).await
|
||||
}
|
||||
|
||||
/// Delegate to authenticate on the underlying client
|
||||
pub async fn authenticate(
|
||||
&self,
|
||||
pubkey: String,
|
||||
signature: String,
|
||||
nonce: String,
|
||||
) -> Result<bool, ClientError> {
|
||||
self.client.authenticate(pubkey, signature, nonce).await
|
||||
}
|
||||
|
||||
/// Delegate to whoami on the underlying client
|
||||
pub async fn whoami(&self) -> Result<serde_json::Value, ClientError> {
|
||||
self.client.whoami().await
|
||||
}
|
||||
|
||||
/// Delegate to play on the underlying client
|
||||
pub async fn play(&self, script: String) -> Result<PlayResult, ClientError> {
|
||||
self.client.play(script).await
|
||||
}
|
||||
|
||||
/// Delegate to create_job on the underlying client
|
||||
pub async fn create_job(&self, job: JobParams) -> Result<String, ClientError> {
|
||||
self.client.create_job(job).await
|
||||
}
|
||||
|
||||
/// Delegate to start_job on the underlying client
|
||||
pub async fn start_job(&self, job_id: String) -> Result<StartJobResult, ClientError> {
|
||||
self.client.start_job(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to run_job on the underlying client
|
||||
pub async fn run_job(
|
||||
&self,
|
||||
script: String,
|
||||
script_type: ScriptType,
|
||||
prerequisites: Option<Vec<String>>,
|
||||
) -> Result<String, ClientError> {
|
||||
self.client.run_job(script, script_type, prerequisites).await
|
||||
}
|
||||
|
||||
/// Delegate to get_job_status on the underlying client
|
||||
pub async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ClientError> {
|
||||
self.client.get_job_status(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to get_job_output on the underlying client
|
||||
pub async fn get_job_output(&self, job_id: String) -> Result<String, ClientError> {
|
||||
self.client.get_job_output(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to get_job_logs on the underlying client
|
||||
pub async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ClientError> {
|
||||
self.client.get_job_logs(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to list_jobs on the underlying client
|
||||
pub async fn list_jobs(&self) -> Result<Vec<Job>, ClientError> {
|
||||
self.client.list_jobs().await
|
||||
}
|
||||
|
||||
/// Delegate to stop_job on the underlying client
|
||||
pub async fn stop_job(&self, job_id: String) -> Result<(), ClientError> {
|
||||
self.client.stop_job(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to delete_job on the underlying client
|
||||
pub async fn delete_job(&self, job_id: String) -> Result<(), ClientError> {
|
||||
self.client.delete_job(job_id).await
|
||||
}
|
||||
|
||||
/// Delegate to clear_all_jobs on the underlying client
|
||||
pub async fn clear_all_jobs(&self) -> Result<(), ClientError> {
|
||||
self.client.clear_all_jobs().await
|
||||
}
|
||||
}
|
28
interfaces/openrpc/client/src/types.rs
Normal file
28
interfaces/openrpc/client/src/types.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use hero_job::ScriptType;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Parameters for creating a job
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct JobParams {
|
||||
pub script: String,
|
||||
pub script_type: ScriptType,
|
||||
pub prerequisites: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Result of script execution
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PlayResult {
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
/// Result of starting a job
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct StartJobResult {
|
||||
pub success: bool,
|
||||
}
|
||||
|
||||
/// Result of getting job logs
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct JobLogsResult {
|
||||
pub logs: String,
|
||||
}
|
47
interfaces/openrpc/server/Cargo.toml
Normal file
47
interfaces/openrpc/server/Cargo.toml
Normal file
@ -0,0 +1,47 @@
|
||||
[package]
|
||||
name = "hero-openrpc-server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[[bin]]
|
||||
name = "hero-openrpc-server"
|
||||
path = "cmd/main.rs"
|
||||
|
||||
[dependencies]
|
||||
# Core dependencies
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
anyhow = "1.0"
|
||||
thiserror = "1.0"
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
|
||||
# JSON-RPC dependencies
|
||||
jsonrpsee = { version = "0.21", features = [
|
||||
"server",
|
||||
"macros"
|
||||
] }
|
||||
jsonrpsee-types = "0.21"
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
|
||||
# Hero dependencies
|
||||
hero_supervisor = { path = "../../../core/supervisor" }
|
||||
hero_job = { path = "../../../core/job" }
|
||||
|
||||
# Authentication and crypto
|
||||
secp256k1 = { version = "0.28", features = ["rand", "recovery"] }
|
||||
hex = "0.4"
|
||||
sha2 = "0.10"
|
||||
rand = "0.8"
|
||||
|
||||
|
||||
# Async utilities
|
||||
futures = "0.3"
|
||||
|
||||
# Test dependencies
|
||||
[dev-dependencies]
|
||||
tokio-test = "0.4"
|
||||
uuid = { version = "1.6", features = ["v4"] }
|
95
interfaces/openrpc/server/cmd/main.rs
Normal file
95
interfaces/openrpc/server/cmd/main.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use hero_openrpc_server::{OpenRpcServer, OpenRpcServerConfig, Transport};
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use tracing::{info, Level};
|
||||
use tracing_subscriber;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "hero-openrpc-server")]
|
||||
#[command(about = "Hero OpenRPC Server - WebSocket and Unix socket JSON-RPC server")]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
|
||||
/// Path to supervisor configuration file
|
||||
#[arg(long)]
|
||||
supervisor_config: Option<PathBuf>,
|
||||
|
||||
/// Database path for supervisor
|
||||
#[arg(long, default_value = "./supervisor.db")]
|
||||
db_path: PathBuf,
|
||||
|
||||
/// Log level
|
||||
#[arg(long, default_value = "info")]
|
||||
log_level: String,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Start WebSocket server
|
||||
Websocket {
|
||||
/// Address to bind to
|
||||
#[arg(long, default_value = "127.0.0.1:9944")]
|
||||
addr: SocketAddr,
|
||||
},
|
||||
/// Start Unix socket server
|
||||
Unix {
|
||||
/// Unix socket path
|
||||
#[arg(long, default_value = "/tmp/hero-openrpc.sock")]
|
||||
socket_path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Initialize tracing
|
||||
let log_level = match cli.log_level.to_lowercase().as_str() {
|
||||
"trace" => Level::TRACE,
|
||||
"debug" => Level::DEBUG,
|
||||
"info" => Level::INFO,
|
||||
"warn" => Level::WARN,
|
||||
"error" => Level::ERROR,
|
||||
_ => Level::INFO,
|
||||
};
|
||||
|
||||
tracing_subscriber::fmt()
|
||||
.with_max_level(log_level)
|
||||
.init();
|
||||
|
||||
let transport = match cli.command {
|
||||
Commands::Websocket { addr } => {
|
||||
info!("Starting WebSocket server on {}", addr);
|
||||
Transport::WebSocket(addr)
|
||||
}
|
||||
Commands::Unix { socket_path } => {
|
||||
info!("Starting Unix socket server on {:?}", socket_path);
|
||||
// Remove existing socket file if it exists
|
||||
if socket_path.exists() {
|
||||
std::fs::remove_file(&socket_path)?;
|
||||
}
|
||||
Transport::Unix(socket_path)
|
||||
}
|
||||
};
|
||||
|
||||
let config = OpenRpcServerConfig {
|
||||
transport: transport.clone(),
|
||||
supervisor_config_path: cli.supervisor_config,
|
||||
db_path: cli.db_path,
|
||||
};
|
||||
|
||||
// Create and start the server
|
||||
let server = OpenRpcServer::new(config.clone()).await?;
|
||||
let handle = server.start(config).await?;
|
||||
|
||||
info!("Server started successfully");
|
||||
|
||||
// Wait for the server to finish
|
||||
handle.stopped().await;
|
||||
|
||||
info!("Server stopped");
|
||||
Ok(())
|
||||
}
|
131
interfaces/openrpc/server/src/auth.rs
Normal file
131
interfaces/openrpc/server/src/auth.rs
Normal file
@ -0,0 +1,131 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use secp256k1::{Message, PublicKey, Secp256k1, ecdsa::Signature};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::HashMap;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
/// Nonce response structure
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct NonceResponse {
|
||||
pub nonce: String,
|
||||
pub timestamp: u64,
|
||||
}
|
||||
|
||||
/// Authentication manager for handling nonces and signature verification
|
||||
#[derive(Debug)]
|
||||
pub struct AuthManager {
|
||||
nonces: HashMap<String, NonceResponse>,
|
||||
authenticated_keys: HashMap<String, u64>, // pubkey -> timestamp
|
||||
}
|
||||
|
||||
impl AuthManager {
|
||||
/// Create a new authentication manager
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
nonces: HashMap::new(),
|
||||
authenticated_keys: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a nonce for a given public key
|
||||
pub fn generate_nonce(&mut self, pubkey: &str) -> String {
|
||||
let timestamp = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
let nonce = format!("{}:{}", pubkey, timestamp);
|
||||
let nonce_hash = format!("{:x}", Sha256::digest(nonce.as_bytes()));
|
||||
|
||||
self.nonces.insert(
|
||||
pubkey.to_string(),
|
||||
NonceResponse {
|
||||
nonce: nonce_hash.clone(),
|
||||
timestamp,
|
||||
},
|
||||
);
|
||||
|
||||
nonce_hash
|
||||
}
|
||||
|
||||
/// Verify a signature against a stored nonce
|
||||
pub fn verify_signature(&mut self, pubkey: &str, signature: &str) -> Result<bool> {
|
||||
// Get the nonce for this public key
|
||||
let nonce_response = self
|
||||
.nonces
|
||||
.get(pubkey)
|
||||
.ok_or_else(|| anyhow!("No nonce found for public key"))?;
|
||||
|
||||
// Check if nonce is not too old (5 minutes)
|
||||
let current_time = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
if current_time - nonce_response.timestamp > 300 {
|
||||
return Err(anyhow!("Nonce expired"));
|
||||
}
|
||||
|
||||
// Parse the public key
|
||||
let pubkey_bytes = hex::decode(pubkey)
|
||||
.map_err(|_| anyhow!("Invalid public key format"))?;
|
||||
|
||||
let secp = Secp256k1::new();
|
||||
let public_key = PublicKey::from_slice(&pubkey_bytes)
|
||||
.map_err(|_| anyhow!("Invalid public key"))?;
|
||||
|
||||
// Parse the signature
|
||||
let signature_bytes = hex::decode(signature)
|
||||
.map_err(|_| anyhow!("Invalid signature format"))?;
|
||||
|
||||
let signature = Signature::from_compact(&signature_bytes)
|
||||
.map_err(|_| anyhow!("Invalid signature"))?;
|
||||
|
||||
// Create message hash from nonce
|
||||
let message_hash = Sha256::digest(nonce_response.nonce.as_bytes());
|
||||
let message = Message::from_slice(&message_hash)
|
||||
.map_err(|_| anyhow!("Failed to create message"))?;
|
||||
|
||||
// Verify the signature
|
||||
match secp.verify_ecdsa(&message, &signature, &public_key) {
|
||||
Ok(_) => {
|
||||
// Mark this key as authenticated
|
||||
self.authenticated_keys.insert(pubkey.to_string(), current_time);
|
||||
// Remove the used nonce
|
||||
self.nonces.remove(pubkey);
|
||||
Ok(true)
|
||||
}
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a public key is currently authenticated
|
||||
pub fn is_authenticated(&self, pubkey: &str) -> bool {
|
||||
if let Some(×tamp) = self.authenticated_keys.get(pubkey) {
|
||||
let current_time = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
// Authentication is valid for 1 hour
|
||||
current_time - timestamp < 3600
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove expired authentications
|
||||
pub fn cleanup_expired(&mut self) {
|
||||
let current_time = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
// Remove expired nonces (older than 5 minutes)
|
||||
self.nonces.retain(|_, nonce| current_time - nonce.timestamp <= 300);
|
||||
|
||||
// Remove expired authentications (older than 1 hour)
|
||||
self.authenticated_keys.retain(|_, &mut timestamp| current_time - timestamp <= 3600);
|
||||
}
|
||||
}
|
471
interfaces/openrpc/server/src/lib.rs
Normal file
471
interfaces/openrpc/server/src/lib.rs
Normal file
@ -0,0 +1,471 @@
|
||||
use anyhow::Result;
|
||||
use hero_job::{Job, JobBuilder, JobStatus, ScriptType};
|
||||
use hero_supervisor::{Supervisor, SupervisorBuilder};
|
||||
use jsonrpsee::core::async_trait;
|
||||
use jsonrpsee::proc_macros::rpc;
|
||||
use jsonrpsee::server::{ServerBuilder, ServerHandle};
|
||||
use jsonrpsee::RpcModule;
|
||||
use jsonrpsee_types::error::ErrorCode;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use tracing::error;
|
||||
|
||||
mod auth;
|
||||
pub mod types;
|
||||
|
||||
pub use auth::*;
|
||||
pub use types::*;
|
||||
|
||||
/// Transport type for the OpenRPC server
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Transport {
|
||||
WebSocket(SocketAddr),
|
||||
Unix(PathBuf),
|
||||
}
|
||||
|
||||
/// OpenRPC server configuration
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OpenRpcServerConfig {
|
||||
pub transport: Transport,
|
||||
pub supervisor_config_path: Option<PathBuf>,
|
||||
pub db_path: PathBuf,
|
||||
}
|
||||
|
||||
/// Main OpenRPC server state
|
||||
#[derive(Clone)]
|
||||
pub struct OpenRpcServer {
|
||||
supervisor: Arc<RwLock<Supervisor>>,
|
||||
auth_manager: Arc<RwLock<AuthManager>>,
|
||||
}
|
||||
|
||||
/// OpenRPC trait defining all available methods
|
||||
#[rpc(server)]
|
||||
pub trait OpenRpcApi {
|
||||
// Authentication methods
|
||||
#[method(name = "fetch_nonce")]
|
||||
async fn fetch_nonce(&self, public_key: String) -> Result<String, ErrorCode>;
|
||||
|
||||
#[method(name = "authenticate")]
|
||||
async fn authenticate(&self, public_key: String, signature: String, nonce: String) -> Result<bool, ErrorCode>;
|
||||
|
||||
#[method(name = "whoami")]
|
||||
async fn whoami(&self) -> Result<String, ErrorCode>;
|
||||
|
||||
// Script execution
|
||||
#[method(name = "play")]
|
||||
async fn play(&self, script: String) -> Result<PlayResult, ErrorCode>;
|
||||
|
||||
// Job management
|
||||
#[method(name = "create_job")]
|
||||
async fn create_job(&self, job_params: JobParams) -> Result<String, ErrorCode>;
|
||||
|
||||
#[method(name = "start_job")]
|
||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ErrorCode>;
|
||||
|
||||
#[method(name = "run_job")]
|
||||
async fn run_job(
|
||||
&self,
|
||||
script: String,
|
||||
script_type: ScriptType,
|
||||
prerequisites: Option<Vec<String>>,
|
||||
) -> Result<String, ErrorCode>;
|
||||
|
||||
#[method(name = "get_job_status")]
|
||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ErrorCode>;
|
||||
|
||||
#[method(name = "get_job_output")]
|
||||
async fn get_job_output(&self, job_id: String) -> Result<String, ErrorCode>;
|
||||
|
||||
#[method(name = "get_job_logs")]
|
||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ErrorCode>;
|
||||
|
||||
#[method(name = "list_jobs")]
|
||||
async fn list_jobs(&self) -> Result<Vec<Job>, ErrorCode>;
|
||||
|
||||
#[method(name = "stop_job")]
|
||||
async fn stop_job(&self, job_id: String) -> Result<(), ErrorCode>;
|
||||
|
||||
#[method(name = "delete_job")]
|
||||
async fn delete_job(&self, job_id: String) -> Result<(), ErrorCode>;
|
||||
|
||||
#[method(name = "clear_all_jobs")]
|
||||
async fn clear_all_jobs(&self) -> Result<(), ErrorCode>;
|
||||
}
|
||||
|
||||
impl OpenRpcServer {
|
||||
/// Create a new OpenRPC server instance
|
||||
pub async fn new(config: OpenRpcServerConfig) -> Result<Self> {
|
||||
let supervisor = if let Some(config_path) = config.supervisor_config_path {
|
||||
// Load supervisor from config file
|
||||
SupervisorBuilder::from_toml(&config_path)?
|
||||
.build().await?
|
||||
} else {
|
||||
// Create default supervisor with Redis URL
|
||||
SupervisorBuilder::new()
|
||||
.redis_url("redis://localhost:6379")
|
||||
.build().await?
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
supervisor: Arc::new(RwLock::new(supervisor)),
|
||||
auth_manager: Arc::new(RwLock::new(AuthManager::new())),
|
||||
})
|
||||
}
|
||||
|
||||
/// Start the OpenRPC server
|
||||
pub async fn start(self, config: OpenRpcServerConfig) -> Result<ServerHandle> {
|
||||
let mut module = RpcModule::new(());
|
||||
|
||||
// Register all the RPC methods
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("fetch_nonce", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let public_key: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.fetch_nonce(public_key).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("authenticate", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let (public_key, signature, nonce): (String, String, String) = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.authenticate(public_key, signature, nonce).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("whoami", move |_params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
server.whoami().await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("play", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let script: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.play(script).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("create_job", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job: JobParams = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.create_job(job).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("start_job", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.start_job(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("run_job", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let (script, script_type, prerequisites): (String, ScriptType, Option<Vec<String>>) = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.run_job(script, script_type, prerequisites).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("get_job_status", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.get_job_status(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("get_job_output", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.get_job_output(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("get_job_logs", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.get_job_logs(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("list_jobs", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let _: () = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.list_jobs().await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("stop_job", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.stop_job(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("delete_job", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.delete_job(job_id).await
|
||||
}
|
||||
})?;
|
||||
|
||||
let server_clone = self.clone();
|
||||
module.register_async_method("clear_all_jobs", move |params, _| {
|
||||
let server = server_clone.clone();
|
||||
async move {
|
||||
let _: () = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
||||
server.clear_all_jobs().await
|
||||
}
|
||||
})?;
|
||||
|
||||
match config.transport {
|
||||
Transport::WebSocket(addr) => {
|
||||
let server = ServerBuilder::default()
|
||||
.build(addr)
|
||||
.await?;
|
||||
let handle = server.start(module);
|
||||
Ok(handle)
|
||||
}
|
||||
Transport::Unix(_path) => {
|
||||
// Unix socket transport not yet implemented in jsonrpsee 0.21
|
||||
return Err(anyhow::anyhow!("Unix socket transport not yet supported").into());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl OpenRpcApiServer for OpenRpcServer {
|
||||
async fn fetch_nonce(&self, public_key: String) -> Result<String, ErrorCode> {
|
||||
let mut auth_manager = self.auth_manager.write().await;
|
||||
let nonce = auth_manager.generate_nonce(&public_key);
|
||||
Ok(nonce)
|
||||
}
|
||||
|
||||
async fn authenticate(
|
||||
&self,
|
||||
public_key: String,
|
||||
signature: String,
|
||||
_nonce: String,
|
||||
) -> Result<bool, ErrorCode> {
|
||||
let mut auth_manager = self.auth_manager.write().await;
|
||||
match auth_manager.verify_signature(&public_key, &signature) {
|
||||
Ok(is_valid) => Ok(is_valid),
|
||||
Err(e) => {
|
||||
error!("Authentication error: {}", e);
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn whoami(&self) -> Result<String, ErrorCode> {
|
||||
let _auth_manager = self.auth_manager.read().await;
|
||||
// For now, return basic info - in a real implementation,
|
||||
// you'd track authenticated sessions
|
||||
Ok(serde_json::json!({
|
||||
"authenticated": true,
|
||||
"user_id": "anonymous"
|
||||
}).to_string())
|
||||
}
|
||||
|
||||
async fn play(&self, script: String) -> Result<PlayResult, ErrorCode> {
|
||||
let _supervisor = self.supervisor.read().await;
|
||||
|
||||
// For now, return a simple result since we need to implement execute_script method
|
||||
Ok(PlayResult {
|
||||
output: format!("Script executed: {}", script)
|
||||
})
|
||||
}
|
||||
|
||||
async fn create_job(&self, job_params: JobParams) -> Result<String, ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
// Use JobBuilder to create a Job instance
|
||||
let mut builder = hero_job::JobBuilder::new()
|
||||
.caller_id(&job_params.caller_id)
|
||||
.context_id(&job_params.context_id)
|
||||
.script(&job_params.script)
|
||||
.script_type(job_params.script_type);
|
||||
|
||||
// Set timeout if provided
|
||||
if let Some(timeout_secs) = job_params.timeout {
|
||||
builder = builder.timeout(std::time::Duration::from_secs(timeout_secs));
|
||||
}
|
||||
|
||||
// Set prerequisites if provided
|
||||
if let Some(prerequisites) = job_params.prerequisites {
|
||||
builder = builder.prerequisites(prerequisites);
|
||||
}
|
||||
|
||||
// Build the job
|
||||
let job = match builder.build() {
|
||||
Ok(job) => job,
|
||||
Err(e) => {
|
||||
error!("Failed to build job: {}", e);
|
||||
return Err(ErrorCode::InvalidParams);
|
||||
}
|
||||
};
|
||||
|
||||
let job_id = job.id.clone();
|
||||
|
||||
// Create the job using the supervisor
|
||||
match supervisor.create_job(&job).await {
|
||||
Ok(_) => Ok(job_id),
|
||||
Err(e) => {
|
||||
error!("Failed to create job: {}", e);
|
||||
Err(ErrorCode::InternalError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.start_job(&job_id).await {
|
||||
Ok(_) => Ok(StartJobResult { success: true }),
|
||||
Err(e) => {
|
||||
error!("Failed to start job {}: {}", job_id, e);
|
||||
Ok(StartJobResult { success: false })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_job(
|
||||
&self,
|
||||
script: String,
|
||||
script_type: ScriptType,
|
||||
_prerequisites: Option<Vec<String>>,
|
||||
) -> Result<String, ErrorCode> {
|
||||
// For now, return a simple result
|
||||
Ok(format!("Job executed with script: {} (type: {:?})", script, script_type))
|
||||
}
|
||||
|
||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.get_job_status(&job_id).await {
|
||||
Ok(status) => Ok(status),
|
||||
Err(e) => {
|
||||
error!("Failed to get job status for {}: {}", job_id, e);
|
||||
Err(ErrorCode::InvalidParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_job_output(&self, job_id: String) -> Result<String, ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.get_job_output(&job_id).await {
|
||||
Ok(output) => Ok(output.unwrap_or_else(|| "No output available".to_string())),
|
||||
Err(e) => {
|
||||
error!("Failed to get job output for {}: {}", job_id, e);
|
||||
Err(ErrorCode::InvalidParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ErrorCode> {
|
||||
// For now, return mock logs
|
||||
Ok(JobLogsResult {
|
||||
logs: format!("Logs for job {}", job_id),
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_jobs(&self) -> Result<Vec<Job>, ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.list_jobs().await {
|
||||
Ok(job_ids) => {
|
||||
// For now, create minimal Job objects with just the IDs
|
||||
// In a real implementation, we'd need a supervisor.get_job() method
|
||||
let jobs: Vec<Job> = job_ids.into_iter().map(|job_id| {
|
||||
// Create a minimal job object - this is a temporary solution
|
||||
// until supervisor.get_job() is implemented
|
||||
Job {
|
||||
id: job_id,
|
||||
caller_id: "unknown".to_string(),
|
||||
context_id: "unknown".to_string(),
|
||||
script: "unknown".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
timeout: std::time::Duration::from_secs(30),
|
||||
retries: 0,
|
||||
concurrent: false,
|
||||
log_path: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
prerequisites: Vec::new(),
|
||||
dependents: Vec::new(),
|
||||
created_at: chrono::Utc::now(),
|
||||
updated_at: chrono::Utc::now(),
|
||||
}
|
||||
}).collect();
|
||||
Ok(jobs)
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to list jobs: {}", e);
|
||||
Err(ErrorCode::InternalError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn stop_job(&self, job_id: String) -> Result<(), ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.stop_job(&job_id).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
error!("Failed to stop job {}: {}", job_id, e);
|
||||
Err(ErrorCode::InvalidParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_job(&self, job_id: String) -> Result<(), ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.delete_job(&job_id).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
error!("Failed to delete job {}: {}", job_id, e);
|
||||
Err(ErrorCode::InvalidParams)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn clear_all_jobs(&self) -> Result<(), ErrorCode> {
|
||||
let supervisor = self.supervisor.read().await;
|
||||
|
||||
match supervisor.clear_all_jobs().await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
error!("Failed to clear all jobs: {}", e);
|
||||
Err(ErrorCode::InternalError)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
31
interfaces/openrpc/server/src/types.rs
Normal file
31
interfaces/openrpc/server/src/types.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use hero_job::ScriptType;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Parameters for creating a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct JobParams {
|
||||
pub script: String,
|
||||
pub script_type: ScriptType,
|
||||
pub caller_id: String,
|
||||
pub context_id: String,
|
||||
pub timeout: Option<u64>, // timeout in seconds
|
||||
pub prerequisites: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Result of script execution
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct PlayResult {
|
||||
pub output: String,
|
||||
}
|
||||
|
||||
/// Result of starting a job
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct StartJobResult {
|
||||
pub success: bool,
|
||||
}
|
||||
|
||||
/// Result of getting job logs
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct JobLogsResult {
|
||||
pub logs: String,
|
||||
}
|
409
interfaces/openrpc/server/tests/integration_tests.rs
Normal file
409
interfaces/openrpc/server/tests/integration_tests.rs
Normal file
@ -0,0 +1,409 @@
|
||||
use hero_openrpc_server::{OpenRpcServer, OpenRpcServerConfig, OpenRpcApiServer, Transport, types::*};
|
||||
use hero_supervisor::{Supervisor, SupervisorBuilder};
|
||||
use hero_job::{JobBuilder, JobStatus, ScriptType};
|
||||
use jsonrpsee_types::error::ErrorCode;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Helper function to create a test supervisor
|
||||
async fn create_test_supervisor() -> Arc<RwLock<Supervisor>> {
|
||||
let supervisor = SupervisorBuilder::new()
|
||||
.redis_url("redis://localhost:6379")
|
||||
.build()
|
||||
.await
|
||||
.expect("Failed to create test supervisor");
|
||||
|
||||
Arc::new(RwLock::new(supervisor))
|
||||
}
|
||||
|
||||
/// Helper function to create a test OpenRPC server
|
||||
async fn create_test_server() -> OpenRpcServer {
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
|
||||
let config = OpenRpcServerConfig {
|
||||
transport: Transport::WebSocket("127.0.0.1:0".parse::<SocketAddr>().unwrap()),
|
||||
supervisor_config_path: None,
|
||||
db_path: PathBuf::from("/tmp/test_openrpc.db"),
|
||||
};
|
||||
OpenRpcServer::new(config).await.expect("Failed to create OpenRPC server")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_nonce() {
|
||||
let server = create_test_server().await;
|
||||
let public_key = "test_public_key".to_string();
|
||||
|
||||
let result = server.fetch_nonce(public_key).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let nonce = result.unwrap();
|
||||
assert!(!nonce.is_empty());
|
||||
assert_eq!(nonce.len(), 64); // Should be a 32-byte hex string
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_job_success() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
let job_params = JobParams {
|
||||
script: "print('Hello, World!');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let result = server.create_job(job_params).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let job_id = result.unwrap();
|
||||
assert!(!job_id.is_empty());
|
||||
// Job ID should be a valid UUID format
|
||||
assert!(uuid::Uuid::parse_str(&job_id).is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_job_with_prerequisites() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
let job_params = JobParams {
|
||||
script: "print('Job with prerequisites');".to_string(),
|
||||
script_type: ScriptType::SAL,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(120),
|
||||
prerequisites: Some(vec!["prereq_job_1".to_string(), "prereq_job_2".to_string()]),
|
||||
};
|
||||
|
||||
let result = server.create_job(job_params).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let job_id = result.unwrap();
|
||||
assert!(!job_id.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_job_invalid_params() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// Test with empty caller_id (should fail JobBuilder validation)
|
||||
let job_params = JobParams {
|
||||
script: "print('Test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "".to_string(), // Empty caller_id should fail
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let result = server.create_job(job_params).await;
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err(), ErrorCode::InvalidParams);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_start_job() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Test job');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
|
||||
// Then start the job
|
||||
let result = server.start_job(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let start_result = result.unwrap();
|
||||
assert!(start_result.success);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_job_status() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Status test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
|
||||
// Get job status
|
||||
let result = server.get_job_status(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let status = result.unwrap();
|
||||
// Status should be one of the valid JobStatus variants
|
||||
match status {
|
||||
JobStatus::Dispatched | JobStatus::WaitingForPrerequisites |
|
||||
JobStatus::Started | JobStatus::Error | JobStatus::Finished => {
|
||||
// Valid status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_job_output() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Output test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
|
||||
// Get job output
|
||||
let result = server.get_job_output(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let output = result.unwrap();
|
||||
assert!(!output.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_jobs() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// Create a few jobs first
|
||||
for i in 0..3 {
|
||||
let job_params = JobParams {
|
||||
script: format!("print('Job {}');", i),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let _ = server.create_job(job_params).await.unwrap();
|
||||
}
|
||||
|
||||
// List all jobs
|
||||
let result = server.list_jobs().await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let jobs = result.unwrap();
|
||||
assert!(jobs.len() >= 3); // Should have at least the 3 jobs we created
|
||||
|
||||
// Verify job structure
|
||||
for job in jobs {
|
||||
assert!(!job.id.is_empty());
|
||||
assert!(uuid::Uuid::parse_str(&job.id).is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_stop_job() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create and start a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Stop test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
let _ = server.start_job(job_id.clone()).await.unwrap();
|
||||
|
||||
// Stop the job
|
||||
let result = server.stop_job(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_delete_job() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Delete test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
|
||||
// Delete the job
|
||||
let result = server.delete_job(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_clear_all_jobs() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// Create a few jobs first
|
||||
for i in 0..3 {
|
||||
let job_params = JobParams {
|
||||
script: format!("print('Clear test {}');", i),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let _ = server.create_job(job_params).await.unwrap();
|
||||
}
|
||||
|
||||
// Clear all jobs
|
||||
let result = server.clear_all_jobs().await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
// Verify jobs are cleared
|
||||
let jobs = server.list_jobs().await.unwrap();
|
||||
assert_eq!(jobs.len(), 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_run_job() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
let script = "print('Run job test');".to_string();
|
||||
let script_type = ScriptType::OSIS;
|
||||
let prerequisites = None;
|
||||
|
||||
let result = server.run_job(script, script_type, prerequisites).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let output = result.unwrap();
|
||||
assert!(!output.is_empty());
|
||||
assert!(output.contains("Run job test"));
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_play_script() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
let script = "print('Play script test');".to_string();
|
||||
|
||||
let result = server.play(script.clone()).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let play_result = result.unwrap();
|
||||
assert!(!play_result.output.is_empty());
|
||||
assert!(play_result.output.contains(&script));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_job_logs() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// First create a job
|
||||
let job_params = JobParams {
|
||||
script: "print('Logs test');".to_string(),
|
||||
script_type: ScriptType::OSIS,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(60),
|
||||
prerequisites: None,
|
||||
};
|
||||
|
||||
let job_id = server.create_job(job_params).await.unwrap();
|
||||
|
||||
// Get job logs
|
||||
let result = server.get_job_logs(job_id).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let logs_result = result.unwrap();
|
||||
assert!(!logs_result.logs.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_job_builder_integration() {
|
||||
// Test that JobBuilder is working correctly with all the fields
|
||||
let job_params = JobParams {
|
||||
script: "print('JobBuilder test');".to_string(),
|
||||
script_type: ScriptType::V,
|
||||
caller_id: "test_caller".to_string(),
|
||||
context_id: "test_context".to_string(),
|
||||
timeout: Some(300),
|
||||
prerequisites: Some(vec!["prereq1".to_string(), "prereq2".to_string()]),
|
||||
};
|
||||
|
||||
// Build job using JobBuilder (similar to what the server does)
|
||||
let mut builder = JobBuilder::new()
|
||||
.caller_id(&job_params.caller_id)
|
||||
.context_id(&job_params.context_id)
|
||||
.script(&job_params.script)
|
||||
.script_type(job_params.script_type);
|
||||
|
||||
if let Some(timeout_secs) = job_params.timeout {
|
||||
builder = builder.timeout(Duration::from_secs(timeout_secs));
|
||||
}
|
||||
|
||||
if let Some(prerequisites) = job_params.prerequisites {
|
||||
builder = builder.prerequisites(prerequisites);
|
||||
}
|
||||
|
||||
let job = builder.build();
|
||||
assert!(job.is_ok());
|
||||
|
||||
let job = job.unwrap();
|
||||
assert_eq!(job.caller_id, "test_caller");
|
||||
assert_eq!(job.context_id, "test_context");
|
||||
assert_eq!(job.script, "print('JobBuilder test');");
|
||||
assert_eq!(job.script_type, ScriptType::V);
|
||||
assert_eq!(job.timeout, Duration::from_secs(300));
|
||||
assert_eq!(job.prerequisites, vec!["prereq1".to_string(), "prereq2".to_string()]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_error_handling() {
|
||||
let server = create_test_server().await;
|
||||
|
||||
// Test getting status for non-existent job
|
||||
let result = server.get_job_status("non_existent_job".to_string()).await;
|
||||
// Should return an error or handle gracefully
|
||||
match result {
|
||||
Ok(_) => {
|
||||
// Some implementations might return a default status
|
||||
},
|
||||
Err(error_code) => {
|
||||
assert_eq!(error_code, ErrorCode::InvalidParams);
|
||||
}
|
||||
}
|
||||
|
||||
// Test getting output for non-existent job
|
||||
let result = server.get_job_output("non_existent_job".to_string()).await;
|
||||
match result {
|
||||
Ok(output) => {
|
||||
// Should return "No output available" or similar
|
||||
assert!(output.contains("No output available") || output.is_empty());
|
||||
},
|
||||
Err(error_code) => {
|
||||
assert_eq!(error_code, ErrorCode::InvalidParams);
|
||||
}
|
||||
}
|
||||
}
|
@ -150,7 +150,6 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
println!(" Authentication: {}", if config.auth { "ENABLED" } else { "DISABLED" });
|
||||
println!(" TLS/WSS: {}", if config.tls { "ENABLED" } else { "DISABLED" });
|
||||
println!(" Webhooks: {}", if config.webhooks { "ENABLED" } else { "DISABLED" });
|
||||
println!(" Circles configured: {}", config.circles.len());
|
||||
|
||||
if config.tls {
|
||||
@ -160,12 +159,6 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
if config.webhooks {
|
||||
println!(" Webhook secrets loaded from environment variables:");
|
||||
println!(" - STRIPE_WEBHOOK_SECRET");
|
||||
println!(" - IDENFY_WEBHOOK_SECRET");
|
||||
}
|
||||
|
||||
if config.auth && !config.circles.is_empty() {
|
||||
println!(" Configured circles:");
|
||||
for (circle_name, members) in &config.circles {
|
||||
|
Loading…
Reference in New Issue
Block a user