forked from herocode/horus
Compare commits
5 Commits
1208ef00a1
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9f6032e2e1 | ||
|
|
5d8189a653 | ||
|
|
4e3d7a815d | ||
|
|
8c33c73b3c | ||
|
|
7675dc2150 |
@@ -13,6 +13,7 @@ members = [
|
||||
"lib/clients/osiris",
|
||||
"lib/clients/supervisor",
|
||||
"lib/models/job",
|
||||
"lib/models/context",
|
||||
"lib/osiris/core",
|
||||
"lib/osiris/derive",
|
||||
"lib/runner",
|
||||
@@ -29,6 +30,7 @@ repository.workspace = true
|
||||
[dependencies]
|
||||
# Integration test dependencies - no library dependencies, tests spawn binaries
|
||||
hero-supervisor-openrpc-client = { path = "lib/clients/supervisor" }
|
||||
hero-coordinator-client = { path = "lib/clients/coordinator" }
|
||||
hero-job = { path = "lib/models/job" }
|
||||
hero-job-client = { path = "lib/clients/job" }
|
||||
tokio = { workspace = true }
|
||||
|
||||
@@ -70,6 +70,21 @@ struct Cli {
|
||||
help = "Bind port for WebSocket JSON-RPC server (default: 9653)"
|
||||
)]
|
||||
api_ws_port: u16,
|
||||
|
||||
#[arg(
|
||||
long = "supervisor-transport",
|
||||
env = "SUPERVISOR_TRANSPORT",
|
||||
default_value = "mycelium",
|
||||
help = "Transport to use for supervisor communication: 'mycelium' or 'http' (default: mycelium)"
|
||||
)]
|
||||
supervisor_transport: String,
|
||||
|
||||
#[arg(
|
||||
long = "supervisor-http-url",
|
||||
env = "SUPERVISOR_HTTP_URL",
|
||||
help = "HTTP URL for supervisor when using HTTP transport (e.g., http://127.0.0.1:3031)"
|
||||
)]
|
||||
supervisor_http_url: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -99,8 +114,9 @@ async fn main() {
|
||||
// Shared application state
|
||||
let state = Arc::new(herocoordinator::rpc::AppState::new(service));
|
||||
|
||||
// Start router workers (auto-discovered contexts) using a single global SupervisorHub (no separate inbound listener)
|
||||
{
|
||||
// Start router workers (auto-discovered contexts) using a single global SupervisorHub
|
||||
// Skip router if using HTTP transport (no mycelium needed)
|
||||
if cli.supervisor_transport == "mycelium" {
|
||||
let base_url = format!("http://{}:{}", cli.mycelium_ip, cli.mycelium_port);
|
||||
let hub = herocoordinator::clients::SupervisorHub::new(
|
||||
base_url.clone(),
|
||||
@@ -118,6 +134,9 @@ async fn main() {
|
||||
};
|
||||
// Per-context outbound delivery loops (replies handled by SupervisorHub)
|
||||
let _auto_handle = herocoordinator::router::start_router_auto(service_for_router, cfg);
|
||||
info!("Router started with mycelium transport");
|
||||
} else {
|
||||
info!("Skipping router - using HTTP transport for supervisor communication");
|
||||
}
|
||||
|
||||
// Build RPC modules for both servers
|
||||
|
||||
@@ -70,6 +70,21 @@ struct Cli {
|
||||
help = "Bind port for WebSocket JSON-RPC server (default: 9653)"
|
||||
)]
|
||||
api_ws_port: u16,
|
||||
|
||||
#[arg(
|
||||
long = "supervisor-transport",
|
||||
env = "SUPERVISOR_TRANSPORT",
|
||||
default_value = "mycelium",
|
||||
help = "Transport to use for supervisor communication: 'mycelium' or 'http' (default: mycelium)"
|
||||
)]
|
||||
supervisor_transport: String,
|
||||
|
||||
#[arg(
|
||||
long = "supervisor-http-url",
|
||||
env = "SUPERVISOR_HTTP_URL",
|
||||
help = "HTTP URL for supervisor when using HTTP transport (e.g., http://127.0.0.1:3031)"
|
||||
)]
|
||||
supervisor_http_url: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -99,8 +114,9 @@ async fn main() {
|
||||
// Shared application state
|
||||
let state = Arc::new(hero_coordinator::rpc::AppState::new(service));
|
||||
|
||||
// Start router workers (auto-discovered contexts) using a single global SupervisorHub (no separate inbound listener)
|
||||
{
|
||||
// Start router workers (auto-discovered contexts) using a single global SupervisorHub
|
||||
// Skip router if using HTTP transport (no mycelium needed)
|
||||
if cli.supervisor_transport == "mycelium" {
|
||||
let base_url = format!("http://{}:{}", cli.mycelium_ip, cli.mycelium_port);
|
||||
let mycelium = Arc::new(
|
||||
hero_supervisor_openrpc_client::transports::MyceliumClient::new(&base_url)
|
||||
@@ -121,6 +137,9 @@ async fn main() {
|
||||
};
|
||||
// Per-context outbound delivery loops (replies handled by SupervisorHub)
|
||||
let _auto_handle = hero_coordinator::router::start_router_auto(service_for_router, cfg);
|
||||
info!("Router started with mycelium transport");
|
||||
} else {
|
||||
info!("Skipping router - using HTTP transport for supervisor communication");
|
||||
}
|
||||
|
||||
// Build RPC modules for both servers
|
||||
|
||||
@@ -200,41 +200,8 @@ impl FlowCreate {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct JobCreate {
|
||||
pub id: u32,
|
||||
pub caller_id: u32,
|
||||
pub context_id: u32,
|
||||
pub script: String,
|
||||
pub runner: Option<String>,
|
||||
pub timeout: u32,
|
||||
pub retries: u8,
|
||||
pub env_vars: HashMap<String, String>,
|
||||
pub prerequisites: Vec<String>,
|
||||
pub depends: Vec<u32>,
|
||||
}
|
||||
|
||||
impl JobCreate {
|
||||
pub fn into_domain(self) -> Job {
|
||||
use chrono::Utc;
|
||||
|
||||
// Convert old format to hero_job::Job
|
||||
// Note: depends and prerequisites are workflow fields that need separate storage
|
||||
Job {
|
||||
id: self.id.to_string(),
|
||||
caller_id: self.caller_id.to_string(),
|
||||
context_id: self.context_id.to_string(),
|
||||
payload: self.script,
|
||||
runner: self.runner.unwrap_or_else(|| "default-runner".to_string()),
|
||||
timeout: self.timeout as u64,
|
||||
env_vars: self.env_vars,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
signatures: Vec::new(),
|
||||
}
|
||||
// TODO: Store depends and prerequisites separately in JobSummary/DAG
|
||||
}
|
||||
}
|
||||
// JobCreate removed - coordinator only manages flows, not individual jobs
|
||||
// Jobs should be created by the supervisor or other services
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MessageCreate {
|
||||
@@ -247,40 +214,31 @@ pub struct MessageCreate {
|
||||
pub timeout: u32,
|
||||
pub timeout_ack: u32,
|
||||
pub timeout_result: u32,
|
||||
pub job: Vec<JobCreate>,
|
||||
// Jobs removed - use flow nodes instead
|
||||
}
|
||||
impl MessageCreate {
|
||||
pub fn into_domain(self) -> Message {
|
||||
use crate::time::current_timestamp;
|
||||
|
||||
let ts = current_timestamp();
|
||||
|
||||
let MessageCreate {
|
||||
id,
|
||||
caller_id,
|
||||
context_id,
|
||||
message,
|
||||
message_type,
|
||||
message_format_type,
|
||||
timeout,
|
||||
timeout_ack,
|
||||
timeout_result,
|
||||
job,
|
||||
} = self;
|
||||
|
||||
// Convert to Message
|
||||
// Note: flow_id is set to 0 for now, should be set by the caller
|
||||
Message {
|
||||
id,
|
||||
caller_id,
|
||||
context_id,
|
||||
flow_id: 0, // TODO: MessageCreate should include flow_id
|
||||
message,
|
||||
message_type,
|
||||
message_format_type,
|
||||
timeout,
|
||||
timeout_ack,
|
||||
timeout_result,
|
||||
id: self.id,
|
||||
caller_id: self.caller_id,
|
||||
context_id: self.context_id,
|
||||
flow_id: 0, // TODO: Get from params or context
|
||||
message: self.message,
|
||||
message_type: self.message_type,
|
||||
message_format_type: self.message_format_type,
|
||||
timeout: self.timeout,
|
||||
timeout_ack: self.timeout_ack,
|
||||
timeout_result: self.timeout_result,
|
||||
transport_id: None,
|
||||
transport_status: None,
|
||||
nodes: Vec::new(), // TODO: MessageCreate should include nodes
|
||||
job: job.into_iter().map(JobCreate::into_domain).collect(),
|
||||
job: Vec::new(), // Jobs removed - coordinator only manages flows
|
||||
logs: Vec::new(),
|
||||
created_at: ts,
|
||||
updated_at: ts,
|
||||
@@ -330,17 +288,7 @@ pub struct FlowLoadParams {
|
||||
pub id: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct JobCreateParams {
|
||||
pub context_id: u32,
|
||||
pub job: JobCreate,
|
||||
}
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct JobLoadParams {
|
||||
pub context_id: u32,
|
||||
pub caller_id: u32,
|
||||
pub id: u32,
|
||||
}
|
||||
// JobCreateParams and JobLoadParams removed - coordinator only manages flows
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MessageCreateParams {
|
||||
@@ -506,42 +454,8 @@ pub fn build_module(state: Arc<AppState>) -> RpcModule<()> {
|
||||
.expect("register flow.start");
|
||||
}
|
||||
|
||||
// Job
|
||||
{
|
||||
let state = state.clone();
|
||||
module
|
||||
.register_async_method("job.create", move |params, _caller, _ctx| {
|
||||
let state = state.clone();
|
||||
async move {
|
||||
let p: JobCreateParams = params.parse().map_err(invalid_params_err)?;
|
||||
let job = p.job.into_domain();
|
||||
let job = state
|
||||
.service
|
||||
.create_job(p.context_id, job)
|
||||
.await
|
||||
.map_err(storage_err)?;
|
||||
Ok::<_, ErrorObjectOwned>(job)
|
||||
}
|
||||
})
|
||||
.expect("register job.create");
|
||||
}
|
||||
{
|
||||
let state = state.clone();
|
||||
module
|
||||
.register_async_method("job.load", move |params, _caller, _ctx| {
|
||||
let state = state.clone();
|
||||
async move {
|
||||
let p: JobLoadParams = params.parse().map_err(invalid_params_err)?;
|
||||
let job = state
|
||||
.service
|
||||
.load_job(p.context_id, p.caller_id, p.id)
|
||||
.await
|
||||
.map_err(storage_err)?;
|
||||
Ok::<_, ErrorObjectOwned>(job)
|
||||
}
|
||||
})
|
||||
.expect("register job.load");
|
||||
}
|
||||
// Job endpoints removed - coordinator only manages flows
|
||||
// Jobs should be created and managed by the supervisor
|
||||
|
||||
// Message
|
||||
{
|
||||
|
||||
197
bin/runners/osiris/examples/engine.rs
Normal file
197
bin/runners/osiris/examples/engine.rs
Normal file
@@ -0,0 +1,197 @@
|
||||
//! Osiris Engine Example
|
||||
//!
|
||||
//! This example demonstrates how to:
|
||||
//! 1. Create an Osiris Rhai engine with all registered functions
|
||||
//! 2. Execute Rhai scripts using the actual Osiris API
|
||||
//! 3. Test context creation, save, get, list, delete operations
|
||||
//!
|
||||
//! Run with: cargo run --example engine -p runner-osiris
|
||||
|
||||
use rhai::{Dynamic, Map};
|
||||
|
||||
// Import the actual engine creation function
|
||||
mod engine_impl {
|
||||
include!("../src/engine.rs");
|
||||
}
|
||||
|
||||
use engine_impl::create_osiris_engine;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🚀 Osiris Engine Example\n");
|
||||
println!("==========================================\n");
|
||||
|
||||
// Create the engine with all Osiris functions registered
|
||||
let mut engine = create_osiris_engine()?;
|
||||
|
||||
// Set up context tags (simulating what the runner does)
|
||||
let mut tag_map = Map::new();
|
||||
let signatories: rhai::Array = vec![
|
||||
Dynamic::from("pk1".to_string()),
|
||||
Dynamic::from("pk2".to_string()),
|
||||
];
|
||||
tag_map.insert("SIGNATORIES".into(), Dynamic::from(signatories));
|
||||
tag_map.insert("CALLER_ID".into(), "test-caller".to_string().into());
|
||||
tag_map.insert("CONTEXT_ID".into(), "test-context".to_string().into());
|
||||
engine.set_default_tag(Dynamic::from(tag_map));
|
||||
|
||||
// Test 1: Simple Rhai script
|
||||
println!("📝 Test 1: Simple Rhai Script");
|
||||
let script = r#"
|
||||
let x = 10;
|
||||
let y = 20;
|
||||
x + y
|
||||
"#;
|
||||
|
||||
match engine.eval::<i64>(script) {
|
||||
Ok(result) => println!(" ✓ Result: {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 2: Get context (Osiris function)
|
||||
println!("📝 Test 2: Get Context");
|
||||
let context_script = r#"
|
||||
// Get context with participants (must be signatories)
|
||||
let ctx = get_context(["pk1", "pk2"]);
|
||||
ctx.context_id()
|
||||
"#;
|
||||
|
||||
match engine.eval::<String>(context_script) {
|
||||
Ok(result) => println!(" ✓ Context ID: {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 3: Create a Note and save it
|
||||
println!("📝 Test 3: Create and Save a Note");
|
||||
let note_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
// Use the builder-style API
|
||||
let my_note = note("test-note-123")
|
||||
.title("Test Note")
|
||||
.content("This is a test note");
|
||||
ctx.save(my_note);
|
||||
"Note saved successfully"
|
||||
"#;
|
||||
|
||||
match engine.eval::<String>(note_script) {
|
||||
Ok(result) => println!(" ✓ {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 4: Get from collection
|
||||
println!("📝 Test 4: Get from Collection");
|
||||
let get_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
// Try to get a note (will fail if doesn't exist, but shows the API works)
|
||||
ctx.get("notes", "test-note-123")
|
||||
"#;
|
||||
|
||||
match engine.eval::<Dynamic>(get_script) {
|
||||
Ok(result) => println!(" ✓ Result: {:?}\n", result),
|
||||
Err(e) => println!(" ⚠ Error (expected if note doesn't exist): {}\n", e),
|
||||
}
|
||||
|
||||
// Test 5: List from collection
|
||||
println!("📝 Test 5: List from Collection");
|
||||
let list_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
// List all notes in the context
|
||||
ctx.list("notes")
|
||||
"#;
|
||||
|
||||
match engine.eval::<Dynamic>(list_script) {
|
||||
Ok(result) => println!(" ✓ Result: {:?}\n", result),
|
||||
Err(e) => println!(" ⚠ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 6: Delete from collection
|
||||
println!("📝 Test 6: Delete from Collection");
|
||||
let delete_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
// Try to delete a note
|
||||
ctx.delete("notes", "test-note-123")
|
||||
"#;
|
||||
|
||||
match engine.eval::<Dynamic>(delete_script) {
|
||||
Ok(result) => println!(" ✓ Result: {:?}\n", result),
|
||||
Err(e) => println!(" ⚠ Error (expected if note doesn't exist): {}\n", e),
|
||||
}
|
||||
|
||||
// Test 7: Create an Event
|
||||
println!("📝 Test 7: Create and Save an Event");
|
||||
let event_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
// event() takes (namespace, title) in the module version
|
||||
let my_event = event("test-event-123", "Test Event")
|
||||
.description("This is a test event");
|
||||
ctx.save(my_event);
|
||||
"Event saved successfully"
|
||||
"#;
|
||||
|
||||
match engine.eval::<String>(event_script) {
|
||||
Ok(result) => println!(" ✓ {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 8: Create a User (HeroLedger)
|
||||
println!("📝 Test 8: Create and Save a User");
|
||||
let user_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
let my_user = new_user()
|
||||
.username("testuser")
|
||||
.add_email("test@example.com")
|
||||
.pubkey("pk1");
|
||||
ctx.save(my_user);
|
||||
"User saved successfully"
|
||||
"#;
|
||||
|
||||
match engine.eval::<String>(user_script) {
|
||||
Ok(result) => println!(" ✓ {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 9: Create a Group (HeroLedger)
|
||||
println!("📝 Test 9: Create and Save a Group");
|
||||
let group_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
let my_group = new_group()
|
||||
.name("Test Group")
|
||||
.description("A test group");
|
||||
ctx.save(my_group);
|
||||
"Group saved successfully"
|
||||
"#;
|
||||
|
||||
match engine.eval::<String>(group_script) {
|
||||
Ok(result) => println!(" ✓ {}\n", result),
|
||||
Err(e) => println!(" ✗ Error: {}\n", e),
|
||||
}
|
||||
|
||||
// Test 10: List users
|
||||
println!("📝 Test 10: List Users from Collection");
|
||||
let list_users_script = r#"
|
||||
let ctx = get_context(["pk1"]);
|
||||
ctx.list("users")
|
||||
"#;
|
||||
|
||||
match engine.eval::<Dynamic>(list_users_script) {
|
||||
Ok(result) => println!(" ✓ Users: {:?}\n", result),
|
||||
Err(e) => println!(" ⚠ Error: {}\n", e),
|
||||
}
|
||||
|
||||
println!("==========================================");
|
||||
println!("🎉 All tests completed!\n");
|
||||
println!("📚 Available Object Types:");
|
||||
println!(" - Note: note(id).title(...).content(...)");
|
||||
println!(" - Event: event(id, title).description(...)");
|
||||
println!(" - User: new_user().username(...).add_email(...).pubkey(...)");
|
||||
println!(" - Group: new_group().name(...).description(...)");
|
||||
println!(" - Account: new_account()...");
|
||||
println!(" - And many more: KycSession, FlowTemplate, FlowInstance, Contract, etc.");
|
||||
println!("\n📖 Available Operations:");
|
||||
println!(" - ctx.save(object) - Save an object");
|
||||
println!(" - ctx.get(collection, id) - Get an object by ID");
|
||||
println!(" - ctx.list(collection) - List all objects in collection");
|
||||
println!(" - ctx.delete(collection, id) - Delete an object");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -140,39 +140,8 @@ impl CoordinatorClient {
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Job Methods ====================
|
||||
|
||||
/// Create a new job in a context
|
||||
pub async fn job_create(&self, context_id: u32, job: JobCreate) -> Result<Job> {
|
||||
let params = serde_json::json!({
|
||||
"context_id": context_id,
|
||||
"job": job
|
||||
});
|
||||
self.call("job.create", params).await
|
||||
}
|
||||
|
||||
/// Load an existing job from a context
|
||||
pub async fn job_load(&self, context_id: u32, caller_id: u32, id: u32) -> Result<Job> {
|
||||
let params = serde_json::json!({
|
||||
"context_id": context_id,
|
||||
"caller_id": caller_id,
|
||||
"id": id
|
||||
});
|
||||
self.call("job.load", params).await
|
||||
}
|
||||
|
||||
/// Try to create a job, or load it if it already exists
|
||||
pub async fn job_create_or_load(&self, context_id: u32, job: JobCreate) -> Result<Job> {
|
||||
let caller_id = job.caller_id;
|
||||
let job_id = job.id;
|
||||
match self.job_create(context_id, job).await {
|
||||
Ok(j) => Ok(j),
|
||||
Err(CoordinatorError::AlreadyExists | CoordinatorError::Storage(_)) => {
|
||||
self.job_load(context_id, caller_id, job_id).await
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
// Job methods removed - coordinator only manages flows
|
||||
// Jobs should be created and managed by the supervisor
|
||||
|
||||
// ==================== Flow Methods ====================
|
||||
|
||||
@@ -290,13 +259,19 @@ impl CoordinatorClient {
|
||||
|
||||
async fn call<T: serde::de::DeserializeOwned>(&self, method: &str, params: Value) -> Result<T> {
|
||||
use jsonrpsee::core::client::ClientT;
|
||||
use jsonrpsee::core::params::ArrayParams;
|
||||
use jsonrpsee::core::params::ObjectParams;
|
||||
|
||||
let mut array_params = ArrayParams::new();
|
||||
array_params.insert(params).map_err(|e| CoordinatorError::Rpc(e.to_string()))?;
|
||||
// Coordinator expects params as named parameters (object), not positional (array)
|
||||
// Convert the Value object to ObjectParams
|
||||
let mut object_params = ObjectParams::new();
|
||||
if let Value::Object(map) = params {
|
||||
for (key, value) in map {
|
||||
object_params.insert(&key, value).map_err(|e| CoordinatorError::Rpc(e.to_string()))?;
|
||||
}
|
||||
}
|
||||
|
||||
self.client
|
||||
.request(method, array_params)
|
||||
let result: T = self.client
|
||||
.request(method, object_params)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = e.to_string();
|
||||
@@ -311,7 +286,9 @@ impl CoordinatorClient {
|
||||
} else {
|
||||
CoordinatorError::Rpc(err_str)
|
||||
}
|
||||
})
|
||||
})?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -103,24 +103,8 @@ pub enum ScriptType {
|
||||
|
||||
// ==================== Job ====================
|
||||
|
||||
/// Parameters for creating a job
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct JobCreate {
|
||||
pub id: u32,
|
||||
pub caller_id: u32,
|
||||
pub context_id: u32,
|
||||
pub script: String,
|
||||
pub script_type: ScriptType,
|
||||
pub timeout: u64,
|
||||
#[serde(default)]
|
||||
pub retries: u8,
|
||||
#[serde(default)]
|
||||
pub env_vars: HashMap<String, String>,
|
||||
#[serde(default)]
|
||||
pub prerequisites: Vec<u32>,
|
||||
#[serde(default)]
|
||||
pub depends: Vec<u32>,
|
||||
}
|
||||
// JobCreate removed - coordinator only manages flows, not individual jobs
|
||||
// Use hero_job::Job from lib/models/job for job operations
|
||||
|
||||
// ==================== Flow ====================
|
||||
|
||||
|
||||
@@ -5,13 +5,19 @@ edition.workspace = true
|
||||
description = "Osiris client library"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
zdfz = ["dep:zdfz-models"]
|
||||
|
||||
[dependencies]
|
||||
zdfz-models = { path = "../../../../../zdfz/sdk/models", optional = true }
|
||||
# Core dependencies
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
anyhow.workspace = true
|
||||
thiserror.workspace = true
|
||||
chrono.workspace = true
|
||||
paste = "1.0"
|
||||
|
||||
# HTTP client
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json"] }
|
||||
|
||||
@@ -13,10 +13,15 @@ use thiserror::Error;
|
||||
pub mod kyc;
|
||||
pub mod payment;
|
||||
pub mod communication;
|
||||
pub mod macros;
|
||||
|
||||
#[cfg(feature = "zdfz")]
|
||||
pub mod zdfz_extensions;
|
||||
|
||||
pub use kyc::*;
|
||||
pub use payment::*;
|
||||
pub use communication::*;
|
||||
pub use macros::*;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum OsirisClientError {
|
||||
@@ -34,6 +39,9 @@ pub enum OsirisClientError {
|
||||
|
||||
#[error("Command execution failed: {0}")]
|
||||
CommandFailed(String),
|
||||
|
||||
#[error("Serialization failed: {0}")]
|
||||
SerializationFailed(String),
|
||||
}
|
||||
|
||||
/// Osiris client with CQRS support
|
||||
|
||||
204
lib/clients/osiris/src/macros.rs
Normal file
204
lib/clients/osiris/src/macros.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
//! Macros for generating CRUD methods on OsirisClient
|
||||
//!
|
||||
//! These macros allow you to quickly generate standard CRUD operations
|
||||
//! and custom methods for your models.
|
||||
|
||||
/// Generate CRUD methods for a model on OsirisClient
|
||||
///
|
||||
/// This macro generates 5 standard methods:
|
||||
/// - create_{collection}
|
||||
/// - get_{collection}
|
||||
/// - update_{collection}
|
||||
/// - delete_{collection}
|
||||
/// - list_{collection}
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use osiris_client::{OsirisClient, impl_osiris_crud};
|
||||
///
|
||||
/// #[derive(serde::Serialize, serde::Deserialize)]
|
||||
/// struct User {
|
||||
/// id: String,
|
||||
/// name: String,
|
||||
/// }
|
||||
///
|
||||
/// impl_osiris_crud!(User, "users", "id");
|
||||
///
|
||||
/// // Now you can use:
|
||||
/// // client.create_users(&user).await?;
|
||||
/// // client.get_users("123").await?;
|
||||
/// // client.update_users("123", &user).await?;
|
||||
/// // client.delete_users("123").await?;
|
||||
/// // client.list_users().await?;
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! impl_osiris_crud {
|
||||
($model:ty, $collection:expr, $id_field:expr) => {
|
||||
paste::paste! {
|
||||
impl $crate::OsirisClient {
|
||||
/// Create a new instance
|
||||
#[doc = "Create a new " $collection " instance"]
|
||||
pub async fn [<$collection:snake _create>](&self, model: &$model) -> Result<$model, $crate::OsirisClientError> {
|
||||
let json = serde_json::to_string(model)
|
||||
.map_err(|e| $crate::OsirisClientError::SerializationFailed(e.to_string()))?;
|
||||
|
||||
// Create Rhai script that uses Osiris context API
|
||||
// Note: The actual object creation depends on the model type
|
||||
// For now, we serialize the data and would need model-specific constructors
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
let data = {};
|
||||
// TODO: Model-specific object creation
|
||||
// For now, this is a placeholder
|
||||
data
|
||||
"#,
|
||||
json
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// TODO: Parse response from job result
|
||||
Err($crate::OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
|
||||
/// Get an instance by ID
|
||||
#[doc = "Get a " $collection " instance by ID"]
|
||||
pub async fn [<$collection:snake _get>](&self, id: &str) -> Result<$model, $crate::OsirisClientError> {
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
ctx.get("{}", "{}")
|
||||
"#,
|
||||
$collection, id
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// TODO: Parse response from job result
|
||||
Err($crate::OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
|
||||
/// Update an existing instance
|
||||
#[doc = "Update an existing " $collection " instance"]
|
||||
pub async fn [<$collection:snake _update>](&self, id: &str, model: &$model) -> Result<$model, $crate::OsirisClientError> {
|
||||
let json = serde_json::to_string(model)
|
||||
.map_err(|e| $crate::OsirisClientError::SerializationFailed(e.to_string()))?;
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
let obj = ctx.get("{}", "{}");
|
||||
let data = {};
|
||||
// TODO: Update object fields from data
|
||||
ctx.save(obj);
|
||||
obj
|
||||
"#,
|
||||
$collection, id, json
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// TODO: Parse response from job result
|
||||
Err($crate::OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
|
||||
/// Delete an instance
|
||||
#[doc = "Delete a " $collection " instance"]
|
||||
pub async fn [<$collection:snake _delete>](&self, id: &str) -> Result<(), $crate::OsirisClientError> {
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
ctx.delete("{}", "{}")
|
||||
"#,
|
||||
$collection, id
|
||||
);
|
||||
|
||||
self.execute_script(&script).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all instances
|
||||
#[doc = "List all " $collection " instances"]
|
||||
pub async fn [<$collection:snake _list>](&self) -> Result<Vec<$model>, $crate::OsirisClientError> {
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
ctx.list("{}")
|
||||
"#,
|
||||
$collection
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// TODO: Parse response from job result
|
||||
Err($crate::OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generate a custom method on a model
|
||||
///
|
||||
/// This macro generates a method that calls a custom Rhai function on the model.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use osiris_client::{OsirisClient, impl_osiris_method};
|
||||
///
|
||||
/// #[derive(serde::Serialize, serde::Deserialize)]
|
||||
/// struct CalendarEvent {
|
||||
/// id: String,
|
||||
/// start_time: i64,
|
||||
/// }
|
||||
///
|
||||
/// impl_osiris_method!(CalendarEvent, "calendar_events", reschedule, new_start: i64, new_end: i64);
|
||||
///
|
||||
/// // Now you can use:
|
||||
/// // client.reschedule_calendar_events("123", 1234567890, 1234567900).await?;
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! impl_osiris_method {
|
||||
($model:ty, $collection:expr, $method_name:ident $(, $param:ident: $param_type:ty)*) => {
|
||||
paste::paste! {
|
||||
impl $crate::OsirisClient {
|
||||
#[doc = "Call " $method_name " on a " $collection " instance"]
|
||||
pub async fn [<$collection:snake _ $method_name>](&self, id: &str $(, $param: $param_type)*) -> Result<$model, $crate::OsirisClientError> {
|
||||
let params = serde_json::json!({
|
||||
$(stringify!($param): $param),*
|
||||
});
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
let ctx = get_context(["system"]);
|
||||
let obj = ctx.get("{}", "{}");
|
||||
// TODO: Call custom method on object
|
||||
// obj.{}({});
|
||||
ctx.save(obj);
|
||||
obj
|
||||
"#,
|
||||
$collection, id, stringify!($method_name), params
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// TODO: Parse response from job result
|
||||
Err($crate::OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// Example model for testing
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
struct TestModel {
|
||||
id: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
// This would generate the methods (can't actually test async in doc tests easily)
|
||||
// impl_osiris_crud!(TestModel, "test_models", "id");
|
||||
}
|
||||
24
lib/clients/osiris/src/zdfz_extensions.rs
Normal file
24
lib/clients/osiris/src/zdfz_extensions.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
//! ZDFZ model extensions for OsirisClient
|
||||
//!
|
||||
//! This module generates CRUD and custom methods for ZDFZ models.
|
||||
//! It must be in the osiris-client crate to satisfy Rust's orphan rules.
|
||||
|
||||
use crate::{impl_osiris_crud, impl_osiris_method};
|
||||
|
||||
// Import ZDFZ models - these will be available when zdfz-models is a dependency
|
||||
#[cfg(feature = "zdfz")]
|
||||
use zdfz_models::*;
|
||||
|
||||
// ========== Core Business Models ==========
|
||||
|
||||
// Digital Residents - Individual users of the freezone
|
||||
#[cfg(feature = "zdfz")]
|
||||
impl_osiris_crud!(ApiDigitalResident, "digital_residents", "resident_id");
|
||||
|
||||
// Free Zone Companies - Companies registered in the freezone
|
||||
#[cfg(feature = "zdfz")]
|
||||
impl_osiris_crud!(FreeZoneCompany, "free_zone_companies", "fzc_id");
|
||||
|
||||
// Invoices - Financial documents for companies
|
||||
#[cfg(feature = "zdfz")]
|
||||
impl_osiris_crud!(FreeZoneInvoice, "invoices", "fz_invoice_id");
|
||||
18
lib/models/context/Cargo.toml
Normal file
18
lib/models/context/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "hero-context"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
description = "Context model for Hero platform"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
chrono.workspace = true
|
||||
rhai = { version = "1.19", features = ["sync"] }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
uuid.workspace = true
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
uuid = { workspace = true, features = ["js"] }
|
||||
181
lib/models/context/src/access.rs
Normal file
181
lib/models/context/src/access.rs
Normal file
@@ -0,0 +1,181 @@
|
||||
//! Access Control Logic for Context
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Unified ACL configuration for objects
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct ObjectAcl {
|
||||
/// Per-user permissions for this object type
|
||||
/// Maps public key -> list of permissions
|
||||
pub permissions: HashMap<String, Vec<ObjectPermission>>,
|
||||
|
||||
/// Multi-signature requirements (optional)
|
||||
pub multi_sig: Option<MultiSigRequirement>,
|
||||
}
|
||||
|
||||
/// Permissions for object operations
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ObjectPermission {
|
||||
/// Can create new objects of this type
|
||||
Create,
|
||||
|
||||
/// Can read objects of this type
|
||||
Read,
|
||||
|
||||
/// Can update existing objects of this type
|
||||
Update,
|
||||
|
||||
/// Can delete objects of this type
|
||||
Delete,
|
||||
|
||||
/// Can list all objects of this type
|
||||
List,
|
||||
}
|
||||
|
||||
/// SAL access control - binary permission (can call or not)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct SalAcl {
|
||||
/// List of public keys allowed to call this SAL
|
||||
pub allowed_callers: Vec<String>,
|
||||
|
||||
/// Multi-signature requirements (optional)
|
||||
pub multi_sig: Option<MultiSigRequirement>,
|
||||
}
|
||||
|
||||
/// Global permissions - simple RWX model
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum GlobalPermission {
|
||||
/// Can read data
|
||||
Read,
|
||||
|
||||
/// Can write/modify data
|
||||
Write,
|
||||
|
||||
/// Can execute operations
|
||||
Execute,
|
||||
}
|
||||
|
||||
/// Multi-signature requirements
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum MultiSigRequirement {
|
||||
/// Require ALL specified signers to sign unanimously
|
||||
Unanimous {
|
||||
/// List of public keys that must ALL sign
|
||||
required_signers: Vec<String>,
|
||||
},
|
||||
|
||||
/// Require a minimum number of signatures from a set
|
||||
Threshold {
|
||||
/// Minimum number of signatures required
|
||||
min_signatures: usize,
|
||||
|
||||
/// Optional: specific set of allowed signers
|
||||
/// If None, any signers from the context are allowed
|
||||
allowed_signers: Option<Vec<String>>,
|
||||
},
|
||||
|
||||
/// Require a percentage of signers from a set
|
||||
Percentage {
|
||||
/// Percentage required (0.0 to 1.0, e.g., 0.66 for 66%)
|
||||
percentage: f64,
|
||||
|
||||
/// Optional: specific set of allowed signers
|
||||
/// If None, any signers from the context are allowed
|
||||
allowed_signers: Option<Vec<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
impl MultiSigRequirement {
|
||||
/// Check if signatories satisfy this multi-sig requirement
|
||||
pub fn check(&self, signatories: &[String], total_members: usize) -> bool {
|
||||
match self {
|
||||
MultiSigRequirement::Unanimous { required_signers } => {
|
||||
// ALL required signers must be present
|
||||
required_signers.iter().all(|signer| signatories.contains(signer))
|
||||
}
|
||||
MultiSigRequirement::Threshold { min_signatures, allowed_signers } => {
|
||||
// Check if we have enough signatures
|
||||
if signatories.len() < *min_signatures {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If allowed_signers is specified, check all signatories are in the list
|
||||
if let Some(allowed) = allowed_signers {
|
||||
signatories.iter().all(|sig| allowed.contains(sig))
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
MultiSigRequirement::Percentage { percentage, allowed_signers } => {
|
||||
if let Some(allowed) = allowed_signers {
|
||||
// Filter signatories to only those in allowed list
|
||||
let valid_sigs: Vec<_> = signatories
|
||||
.iter()
|
||||
.filter(|sig| allowed.contains(sig))
|
||||
.collect();
|
||||
|
||||
let required_count = (allowed.len() as f64 * percentage).ceil() as usize;
|
||||
valid_sigs.len() >= required_count
|
||||
} else {
|
||||
// Use all context members
|
||||
let required_count = (total_members as f64 * percentage).ceil() as usize;
|
||||
signatories.len() >= required_count
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_multi_sig_unanimous() {
|
||||
let multi_sig = MultiSigRequirement::Unanimous {
|
||||
required_signers: vec!["alice".to_string(), "bob".to_string()],
|
||||
};
|
||||
|
||||
// Both signers present - should pass
|
||||
assert!(multi_sig.check(&["alice".to_string(), "bob".to_string()], 3));
|
||||
|
||||
// Only one signer - should fail
|
||||
assert!(!multi_sig.check(&["alice".to_string()], 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_sig_threshold() {
|
||||
let multi_sig = MultiSigRequirement::Threshold {
|
||||
min_signatures: 2,
|
||||
allowed_signers: Some(vec!["alice".to_string(), "bob".to_string(), "charlie".to_string()]),
|
||||
};
|
||||
|
||||
// 2 signatures - should pass
|
||||
assert!(multi_sig.check(&["alice".to_string(), "bob".to_string()], 3));
|
||||
|
||||
// 1 signature - should fail
|
||||
assert!(!multi_sig.check(&["alice".to_string()], 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_sig_percentage() {
|
||||
let multi_sig = MultiSigRequirement::Percentage {
|
||||
percentage: 0.66, // 66%
|
||||
allowed_signers: Some(vec![
|
||||
"alice".to_string(),
|
||||
"bob".to_string(),
|
||||
"charlie".to_string(),
|
||||
]),
|
||||
};
|
||||
|
||||
// 2 out of 3 (66%) - should pass
|
||||
assert!(multi_sig.check(&["alice".to_string(), "bob".to_string()], 3));
|
||||
|
||||
// 1 out of 3 (33%) - should fail
|
||||
assert!(!multi_sig.check(&["alice".to_string()], 3));
|
||||
}
|
||||
}
|
||||
343
lib/models/context/src/lib.rs
Normal file
343
lib/models/context/src/lib.rs
Normal file
@@ -0,0 +1,343 @@
|
||||
//! Context Model
|
||||
//!
|
||||
//! A Context represents an isolated instance/workspace where users can:
|
||||
//! - Store and retrieve objects (via Osiris)
|
||||
//! - Execute SALs (System Abstraction Layer functions)
|
||||
//! - Collaborate with specific permissions
|
||||
//!
|
||||
//! The Context is the authorization boundary - all operations go through it
|
||||
//! and are subject to ACL checks.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub mod access;
|
||||
pub mod rhai;
|
||||
|
||||
pub use access::*;
|
||||
|
||||
/// A Context represents an isolated workspace with ACL-controlled access
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct Context {
|
||||
/// Human-readable name
|
||||
pub name: String,
|
||||
|
||||
/// Description of this context's purpose
|
||||
pub description: Option<String>,
|
||||
|
||||
/// List of admin public keys - only admins can modify ACLs
|
||||
pub admins: Vec<String>,
|
||||
|
||||
/// Global permissions (RWX) - what can users do in this context?
|
||||
/// Maps public key -> list of global permissions
|
||||
pub global_permissions: HashMap<String, Vec<GlobalPermission>>,
|
||||
|
||||
/// Per-object-type ACLs - fine-grained control over data operations
|
||||
/// Maps object type (e.g., "notes", "events") -> ACL configuration
|
||||
pub object_acls: HashMap<String, ObjectAcl>,
|
||||
|
||||
/// SAL ACLs - binary permission (can call or not)
|
||||
/// Maps SAL name (e.g., "launch_vm", "send_email") -> ACL configuration
|
||||
pub sal_acls: HashMap<String, SalAcl>,
|
||||
}
|
||||
|
||||
impl Default for Context {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: String::new(),
|
||||
description: None,
|
||||
admins: Vec::new(),
|
||||
global_permissions: HashMap::new(),
|
||||
object_acls: HashMap::new(),
|
||||
sal_acls: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Context {
|
||||
/// Create a new context with a name and initial admin
|
||||
pub fn new(name: String, admin: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
description: None,
|
||||
admins: vec![admin],
|
||||
global_permissions: HashMap::new(),
|
||||
object_acls: HashMap::new(),
|
||||
sal_acls: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a user is an admin
|
||||
pub fn is_admin(&self, pubkey: &str) -> bool {
|
||||
self.admins.contains(&pubkey.to_string())
|
||||
}
|
||||
|
||||
/// Check if a user has a global permission
|
||||
pub fn has_global_permission(&self, pubkey: &str, permission: &GlobalPermission) -> bool {
|
||||
self.global_permissions
|
||||
.get(pubkey)
|
||||
.map(|perms| perms.contains(permission))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if a user has permission for an object type
|
||||
pub fn has_object_permission(
|
||||
&self,
|
||||
pubkey: &str,
|
||||
object_type: &str,
|
||||
permission: &ObjectPermission,
|
||||
) -> bool {
|
||||
self.object_acls
|
||||
.get(object_type)
|
||||
.and_then(|acl| acl.permissions.get(pubkey))
|
||||
.map(|perms| perms.contains(permission))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if a user can call a SAL
|
||||
pub fn can_call_sal(&self, pubkey: &str, sal_name: &str) -> bool {
|
||||
self.sal_acls
|
||||
.get(sal_name)
|
||||
.map(|acl| acl.allowed_callers.contains(&pubkey.to_string()))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Check if signatories satisfy multi-sig requirements for an object
|
||||
pub fn check_object_multi_sig(
|
||||
&self,
|
||||
signatories: &[String],
|
||||
object_type: &str,
|
||||
) -> bool {
|
||||
if let Some(acl) = self.object_acls.get(object_type) {
|
||||
if let Some(multi_sig) = &acl.multi_sig {
|
||||
return multi_sig.check(signatories, self.global_permissions.len());
|
||||
}
|
||||
}
|
||||
// No multi-sig requirement
|
||||
true
|
||||
}
|
||||
|
||||
/// Check if signatories satisfy multi-sig requirements for a SAL
|
||||
pub fn check_sal_multi_sig(
|
||||
&self,
|
||||
signatories: &[String],
|
||||
sal_name: &str,
|
||||
) -> bool {
|
||||
if let Some(acl) = self.sal_acls.get(sal_name) {
|
||||
if let Some(multi_sig) = &acl.multi_sig {
|
||||
return multi_sig.check(signatories, self.global_permissions.len());
|
||||
}
|
||||
}
|
||||
// No multi-sig requirement
|
||||
true
|
||||
}
|
||||
|
||||
/// Add an admin (only admins can call this)
|
||||
pub fn add_admin(&mut self, caller: &str, new_admin: String) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can add admins".to_string());
|
||||
}
|
||||
if !self.admins.contains(&new_admin) {
|
||||
self.admins.push(new_admin);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Grant a global permission to a user (only admins can call this)
|
||||
pub fn grant_global_permission(
|
||||
&mut self,
|
||||
caller: &str,
|
||||
pubkey: String,
|
||||
permission: GlobalPermission,
|
||||
) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can grant permissions".to_string());
|
||||
}
|
||||
self.global_permissions
|
||||
.entry(pubkey)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(permission);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Grant an object permission to a user (only admins can call this)
|
||||
pub fn grant_object_permission(
|
||||
&mut self,
|
||||
caller: &str,
|
||||
pubkey: String,
|
||||
object_type: String,
|
||||
permission: ObjectPermission,
|
||||
) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can grant permissions".to_string());
|
||||
}
|
||||
self.object_acls
|
||||
.entry(object_type)
|
||||
.or_insert_with(|| ObjectAcl {
|
||||
permissions: HashMap::new(),
|
||||
multi_sig: None,
|
||||
})
|
||||
.permissions
|
||||
.entry(pubkey)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(permission);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Grant SAL access to a user (only admins can call this)
|
||||
pub fn grant_sal_access(
|
||||
&mut self,
|
||||
caller: &str,
|
||||
pubkey: String,
|
||||
sal_name: String,
|
||||
) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can grant SAL access".to_string());
|
||||
}
|
||||
self.sal_acls
|
||||
.entry(sal_name)
|
||||
.or_insert_with(|| SalAcl {
|
||||
allowed_callers: Vec::new(),
|
||||
multi_sig: None,
|
||||
})
|
||||
.allowed_callers
|
||||
.push(pubkey);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set multi-sig requirement for an object (only admins can call this)
|
||||
pub fn set_object_multi_sig(
|
||||
&mut self,
|
||||
caller: &str,
|
||||
object_type: String,
|
||||
multi_sig: MultiSigRequirement,
|
||||
) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can set multi-sig requirements".to_string());
|
||||
}
|
||||
self.object_acls
|
||||
.entry(object_type)
|
||||
.or_insert_with(|| ObjectAcl {
|
||||
permissions: HashMap::new(),
|
||||
multi_sig: None,
|
||||
})
|
||||
.multi_sig = Some(multi_sig);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set multi-sig requirement for a SAL (only admins can call this)
|
||||
pub fn set_sal_multi_sig(
|
||||
&mut self,
|
||||
caller: &str,
|
||||
sal_name: String,
|
||||
multi_sig: MultiSigRequirement,
|
||||
) -> Result<(), String> {
|
||||
if !self.is_admin(caller) {
|
||||
return Err("Only admins can set multi-sig requirements".to_string());
|
||||
}
|
||||
self.sal_acls
|
||||
.entry(sal_name)
|
||||
.or_insert_with(|| SalAcl {
|
||||
allowed_callers: Vec::new(),
|
||||
multi_sig: None,
|
||||
})
|
||||
.multi_sig = Some(multi_sig);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_context_creation() {
|
||||
let ctx = Context::new("Test Context".to_string(), "admin_pk".to_string());
|
||||
assert_eq!(ctx.name, "Test Context");
|
||||
assert!(ctx.is_admin("admin_pk"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_admin_permissions() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
// Admin can add another admin
|
||||
assert!(ctx.add_admin("admin", "admin2".to_string()).is_ok());
|
||||
assert!(ctx.is_admin("admin2"));
|
||||
|
||||
// Non-admin cannot add admin
|
||||
assert!(ctx.add_admin("user1", "admin3".to_string()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_global_permissions() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
// Admin can grant permissions
|
||||
assert!(ctx.grant_global_permission("admin", "user1".to_string(), GlobalPermission::Read).is_ok());
|
||||
assert!(ctx.has_global_permission("user1", &GlobalPermission::Read));
|
||||
assert!(!ctx.has_global_permission("user1", &GlobalPermission::Write));
|
||||
|
||||
// Non-admin cannot grant permissions
|
||||
assert!(ctx.grant_global_permission("user1", "user2".to_string(), GlobalPermission::Read).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_object_permissions() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
// Admin can grant object permissions
|
||||
assert!(ctx.grant_object_permission("admin", "user1".to_string(), "notes".to_string(), ObjectPermission::Create).is_ok());
|
||||
assert!(ctx.has_object_permission("user1", "notes", &ObjectPermission::Create));
|
||||
assert!(!ctx.has_object_permission("user1", "notes", &ObjectPermission::Delete));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sal_permissions() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
// Admin can grant SAL access
|
||||
assert!(ctx.grant_sal_access("admin", "user1".to_string(), "launch_vm".to_string()).is_ok());
|
||||
assert!(ctx.can_call_sal("user1", "launch_vm"));
|
||||
assert!(!ctx.can_call_sal("user1", "send_email"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_object_multi_sig_unanimous() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
assert!(ctx.set_object_multi_sig(
|
||||
"admin",
|
||||
"sensitive_data".to_string(),
|
||||
MultiSigRequirement::Unanimous {
|
||||
required_signers: vec!["alice".to_string(), "bob".to_string()],
|
||||
},
|
||||
).is_ok());
|
||||
|
||||
// Both signers present - should pass
|
||||
assert!(ctx.check_object_multi_sig(&["alice".to_string(), "bob".to_string()], "sensitive_data"));
|
||||
|
||||
// Only one signer - should fail
|
||||
assert!(!ctx.check_object_multi_sig(&["alice".to_string()], "sensitive_data"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sal_multi_sig_threshold() {
|
||||
let mut ctx = Context::new("Test".to_string(), "admin".to_string());
|
||||
|
||||
assert!(ctx.set_sal_multi_sig(
|
||||
"admin",
|
||||
"launch_vm".to_string(),
|
||||
MultiSigRequirement::Threshold {
|
||||
min_signatures: 2,
|
||||
allowed_signers: Some(vec!["alice".to_string(), "bob".to_string(), "charlie".to_string()]),
|
||||
},
|
||||
).is_ok());
|
||||
|
||||
// 2 signatures - should pass
|
||||
assert!(ctx.check_sal_multi_sig(&["alice".to_string(), "bob".to_string()], "launch_vm"));
|
||||
|
||||
// 1 signature - should fail
|
||||
assert!(!ctx.check_sal_multi_sig(&["alice".to_string()], "launch_vm"));
|
||||
}
|
||||
}
|
||||
327
lib/models/context/src/rhai.rs
Normal file
327
lib/models/context/src/rhai.rs
Normal file
@@ -0,0 +1,327 @@
|
||||
use ::rhai::plugin::*;
|
||||
use ::rhai::{CustomType, Dynamic, Engine, EvalAltResult, Module, TypeBuilder};
|
||||
|
||||
use crate::Context;
|
||||
|
||||
// ============================================================================
|
||||
// Context Module
|
||||
// ============================================================================
|
||||
|
||||
type RhaiContext = Context;
|
||||
|
||||
#[export_module]
|
||||
mod rhai_context_module {
|
||||
use super::RhaiContext;
|
||||
use crate::MultiSigRequirement;
|
||||
use ::rhai::{Dynamic, EvalAltResult};
|
||||
|
||||
/// Create a new context with name and initial admin
|
||||
#[rhai_fn(name = "new_context", return_raw)]
|
||||
pub fn new_context(name: String, admin: String) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
Ok(RhaiContext::new(name, admin))
|
||||
}
|
||||
|
||||
/// Set context description
|
||||
#[rhai_fn(name = "description", return_raw)]
|
||||
pub fn set_description(
|
||||
ctx: &mut RhaiContext,
|
||||
description: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
ctx.description = Some(description);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Admin Management ==========
|
||||
|
||||
/// Check if a user is an admin
|
||||
#[rhai_fn(name = "is_admin")]
|
||||
pub fn is_admin(ctx: &mut RhaiContext, pubkey: String) -> bool {
|
||||
ctx.is_admin(&pubkey)
|
||||
}
|
||||
|
||||
/// Add an admin (only admins can call this)
|
||||
#[rhai_fn(name = "add_admin", return_raw)]
|
||||
pub fn add_admin(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
new_admin: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
ctx.add_admin(&caller, new_admin)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Global Permission Management (RWX) ==========
|
||||
|
||||
/// Grant a global permission to a user (only admins can call this)
|
||||
#[rhai_fn(name = "grant_global_permission", return_raw)]
|
||||
pub fn grant_global_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
pubkey: String,
|
||||
permission: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let perm = parse_global_permission(&permission)?;
|
||||
ctx.grant_global_permission(&caller, pubkey, perm)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user has a global permission
|
||||
#[rhai_fn(name = "has_global_permission", return_raw)]
|
||||
pub fn has_global_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
permission: String,
|
||||
) -> Result<bool, Box<EvalAltResult>> {
|
||||
let perm = parse_global_permission(&permission)?;
|
||||
Ok(ctx.has_global_permission(&pubkey, &perm))
|
||||
}
|
||||
|
||||
// ========== Object Permission Management ==========
|
||||
|
||||
/// Grant an object permission to a user (only admins can call this)
|
||||
#[rhai_fn(name = "grant_object_permission", return_raw)]
|
||||
pub fn grant_object_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
pubkey: String,
|
||||
object_type: String,
|
||||
permission: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let perm = parse_object_permission(&permission)?;
|
||||
ctx.grant_object_permission(&caller, pubkey, object_type, perm)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user has an object permission
|
||||
#[rhai_fn(name = "has_object_permission", return_raw)]
|
||||
pub fn has_object_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
object_type: String,
|
||||
permission: String,
|
||||
) -> Result<bool, Box<EvalAltResult>> {
|
||||
let perm = parse_object_permission(&permission)?;
|
||||
Ok(ctx.has_object_permission(&pubkey, &object_type, &perm))
|
||||
}
|
||||
|
||||
// ========== SAL Permission Management (Binary) ==========
|
||||
|
||||
/// Grant SAL access to a user (only admins can call this)
|
||||
#[rhai_fn(name = "grant_sal_access", return_raw)]
|
||||
pub fn grant_sal_access(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
pubkey: String,
|
||||
sal_name: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
ctx.grant_sal_access(&caller, pubkey, sal_name)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user can call a SAL
|
||||
#[rhai_fn(name = "can_call_sal")]
|
||||
pub fn can_call_sal(ctx: &mut RhaiContext, pubkey: String, sal_name: String) -> bool {
|
||||
ctx.can_call_sal(&pubkey, &sal_name)
|
||||
}
|
||||
|
||||
// ========== Multi-Sig Management for Objects ==========
|
||||
|
||||
/// Set unanimous multi-sig requirement for an object (only admins can call this)
|
||||
#[rhai_fn(name = "set_object_multisig_unanimous", return_raw)]
|
||||
pub fn set_object_multisig_unanimous(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
object_type: String,
|
||||
required_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers = parse_signers(required_signers)?;
|
||||
ctx.set_object_multi_sig(
|
||||
&caller,
|
||||
object_type,
|
||||
MultiSigRequirement::Unanimous { required_signers: signers },
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set threshold multi-sig requirement for an object (only admins can call this)
|
||||
#[rhai_fn(name = "set_object_multisig_threshold", return_raw)]
|
||||
pub fn set_object_multisig_threshold(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
object_type: String,
|
||||
min_signatures: i64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers = parse_signers(allowed_signers)?;
|
||||
ctx.set_object_multi_sig(
|
||||
&caller,
|
||||
object_type,
|
||||
MultiSigRequirement::Threshold {
|
||||
min_signatures: min_signatures as usize,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set percentage multi-sig requirement for an object (only admins can call this)
|
||||
#[rhai_fn(name = "set_object_multisig_percentage", return_raw)]
|
||||
pub fn set_object_multisig_percentage(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
object_type: String,
|
||||
percentage: f64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
if percentage < 0.0 || percentage > 1.0 {
|
||||
return Err("Percentage must be between 0.0 and 1.0".into());
|
||||
}
|
||||
let signers = parse_signers(allowed_signers)?;
|
||||
ctx.set_object_multi_sig(
|
||||
&caller,
|
||||
object_type,
|
||||
MultiSigRequirement::Percentage {
|
||||
percentage,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Multi-Sig Management for SALs ==========
|
||||
|
||||
/// Set unanimous multi-sig requirement for a SAL (only admins can call this)
|
||||
#[rhai_fn(name = "set_sal_multisig_unanimous", return_raw)]
|
||||
pub fn set_sal_multisig_unanimous(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
sal_name: String,
|
||||
required_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers = parse_signers(required_signers)?;
|
||||
ctx.set_sal_multi_sig(
|
||||
&caller,
|
||||
sal_name,
|
||||
MultiSigRequirement::Unanimous { required_signers: signers },
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set threshold multi-sig requirement for a SAL (only admins can call this)
|
||||
#[rhai_fn(name = "set_sal_multisig_threshold", return_raw)]
|
||||
pub fn set_sal_multisig_threshold(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
sal_name: String,
|
||||
min_signatures: i64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers = parse_signers(allowed_signers)?;
|
||||
ctx.set_sal_multi_sig(
|
||||
&caller,
|
||||
sal_name,
|
||||
MultiSigRequirement::Threshold {
|
||||
min_signatures: min_signatures as usize,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set percentage multi-sig requirement for a SAL (only admins can call this)
|
||||
#[rhai_fn(name = "set_sal_multisig_percentage", return_raw)]
|
||||
pub fn set_sal_multisig_percentage(
|
||||
ctx: &mut RhaiContext,
|
||||
caller: String,
|
||||
sal_name: String,
|
||||
percentage: f64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
if percentage < 0.0 || percentage > 1.0 {
|
||||
return Err("Percentage must be between 0.0 and 1.0".into());
|
||||
}
|
||||
let signers = parse_signers(allowed_signers)?;
|
||||
ctx.set_sal_multi_sig(
|
||||
&caller,
|
||||
sal_name,
|
||||
MultiSigRequirement::Percentage {
|
||||
percentage,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
)
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Getters ==========
|
||||
|
||||
#[rhai_fn(name = "get_name")]
|
||||
pub fn get_name(ctx: &mut RhaiContext) -> String {
|
||||
ctx.name.clone()
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_description")]
|
||||
pub fn get_description(ctx: &mut RhaiContext) -> String {
|
||||
ctx.description.clone().unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
// Helper functions to parse permissions
|
||||
fn parse_global_permission(permission: &str) -> Result<crate::GlobalPermission, Box<EvalAltResult>> {
|
||||
match permission {
|
||||
"read" => Ok(crate::GlobalPermission::Read),
|
||||
"write" => Ok(crate::GlobalPermission::Write),
|
||||
"execute" => Ok(crate::GlobalPermission::Execute),
|
||||
_ => Err(format!("Invalid global permission: {}", permission).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_object_permission(permission: &str) -> Result<crate::ObjectPermission, Box<EvalAltResult>> {
|
||||
match permission {
|
||||
"create" => Ok(crate::ObjectPermission::Create),
|
||||
"read" => Ok(crate::ObjectPermission::Read),
|
||||
"update" => Ok(crate::ObjectPermission::Update),
|
||||
"delete" => Ok(crate::ObjectPermission::Delete),
|
||||
"list" => Ok(crate::ObjectPermission::List),
|
||||
_ => Err(format!("Invalid object permission: {}", permission).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_signers(signers: Vec<Dynamic>) -> Result<Vec<String>, Box<EvalAltResult>> {
|
||||
signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect::<Result<Vec<String>, _>>()
|
||||
.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))
|
||||
}
|
||||
|
||||
impl CustomType for Context {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder.with_name("Context");
|
||||
}
|
||||
}
|
||||
|
||||
/// Register the Context module with the Rhai engine
|
||||
pub fn register_context_module(engine: &mut Engine) {
|
||||
let module = exported_module!(rhai_context_module);
|
||||
engine.register_static_module("context", module.into());
|
||||
engine.register_type::<Context>();
|
||||
}
|
||||
|
||||
/// Register Context functions directly on the engine (for global access)
|
||||
pub fn register_context_functions(engine: &mut Engine) {
|
||||
engine.register_type::<Context>();
|
||||
|
||||
// Register the module functions
|
||||
let module = exported_module!(rhai_context_module);
|
||||
engine.register_global_module(module.into());
|
||||
}
|
||||
333
lib/models/context/src/rhai_old.rs
Normal file
333
lib/models/context/src/rhai_old.rs
Normal file
@@ -0,0 +1,333 @@
|
||||
use ::rhai::plugin::*;
|
||||
use ::rhai::{CustomType, Dynamic, Engine, EvalAltResult, Module, TypeBuilder};
|
||||
|
||||
use crate::Context;
|
||||
|
||||
// ============================================================================
|
||||
// Context Module
|
||||
// ============================================================================
|
||||
|
||||
type RhaiContext = Context;
|
||||
|
||||
#[export_module]
|
||||
mod rhai_context_module {
|
||||
use super::RhaiContext;
|
||||
use crate::{GlobalPermission, MultiSigRequirement, ObjectPermission};
|
||||
use ::rhai::{Dynamic, EvalAltResult};
|
||||
|
||||
/// Create a new context with name and initial admin
|
||||
#[rhai_fn(name = "new_context", return_raw)]
|
||||
pub fn new_context(name: String, admin: String) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
Ok(RhaiContext::new(name, admin))
|
||||
}
|
||||
|
||||
/// Set context description
|
||||
#[rhai_fn(name = "description", return_raw)]
|
||||
pub fn set_description(
|
||||
ctx: &mut RhaiContext,
|
||||
description: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
ctx.description = Some(description);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Global Permission Management ==========
|
||||
|
||||
/// Grant a global permission to a user
|
||||
#[rhai_fn(name = "grant_permission", return_raw)]
|
||||
pub fn grant_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
permission: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let perm = match permission.as_str() {
|
||||
"read" => Permission::Read,
|
||||
"write" => Permission::Write,
|
||||
"delete" => Permission::Delete,
|
||||
"execute" => Permission::Execute,
|
||||
"admin" => Permission::Admin,
|
||||
"invite" => Permission::Invite,
|
||||
_ => return Err(format!("Invalid permission: {}", permission).into()),
|
||||
};
|
||||
ctx.grant_permission(pubkey, perm);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user has a global permission
|
||||
#[rhai_fn(name = "has_permission", return_raw)]
|
||||
pub fn has_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
permission: String,
|
||||
) -> Result<bool, Box<EvalAltResult>> {
|
||||
let perm = match permission.as_str() {
|
||||
"read" => Permission::Read,
|
||||
"write" => Permission::Write,
|
||||
"delete" => Permission::Delete,
|
||||
"execute" => Permission::Execute,
|
||||
"admin" => Permission::Admin,
|
||||
"invite" => Permission::Invite,
|
||||
_ => return Err(format!("Invalid permission: {}", permission).into()),
|
||||
};
|
||||
Ok(ctx.has_permission(&pubkey, &perm))
|
||||
}
|
||||
|
||||
// ========== Object Permission Management ==========
|
||||
|
||||
/// Grant an object permission to a user
|
||||
#[rhai_fn(name = "grant_object_permission", return_raw)]
|
||||
pub fn grant_object_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
object_type: String,
|
||||
permission: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let perm = parse_resource_permission(&permission)?;
|
||||
ctx.grant_resource_permission(pubkey, object_type, perm, false);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user has an object permission
|
||||
#[rhai_fn(name = "has_object_permission", return_raw)]
|
||||
pub fn has_object_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
object_type: String,
|
||||
permission: String,
|
||||
) -> Result<bool, Box<EvalAltResult>> {
|
||||
let perm = parse_resource_permission(&permission)?;
|
||||
Ok(ctx.has_resource_permission(&pubkey, &object_type, &perm, false))
|
||||
}
|
||||
|
||||
// ========== SAL Permission Management ==========
|
||||
|
||||
/// Grant a SAL permission to a user
|
||||
#[rhai_fn(name = "grant_sal_permission", return_raw)]
|
||||
pub fn grant_sal_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
sal_name: String,
|
||||
permission: String,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let perm = parse_resource_permission(&permission)?;
|
||||
ctx.grant_resource_permission(pubkey, sal_name, perm, true);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Check if a user has a SAL permission
|
||||
#[rhai_fn(name = "has_sal_permission", return_raw)]
|
||||
pub fn has_sal_permission(
|
||||
ctx: &mut RhaiContext,
|
||||
pubkey: String,
|
||||
sal_name: String,
|
||||
permission: String,
|
||||
) -> Result<bool, Box<EvalAltResult>> {
|
||||
let perm = parse_resource_permission(&permission)?;
|
||||
Ok(ctx.has_resource_permission(&pubkey, &sal_name, &perm, true))
|
||||
}
|
||||
|
||||
// ========== Multi-Sig Management ==========
|
||||
|
||||
/// Set unanimous multi-sig requirement for an object
|
||||
#[rhai_fn(name = "set_object_multisig_unanimous", return_raw)]
|
||||
pub fn set_object_multisig_unanimous(
|
||||
ctx: &mut RhaiContext,
|
||||
object_type: String,
|
||||
required_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers: Result<Vec<String>, _> = required_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
object_type,
|
||||
MultiSigRequirement::Unanimous { required_signers: signers },
|
||||
false,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set threshold multi-sig requirement for an object
|
||||
#[rhai_fn(name = "set_object_multisig_threshold", return_raw)]
|
||||
pub fn set_object_multisig_threshold(
|
||||
ctx: &mut RhaiContext,
|
||||
object_type: String,
|
||||
min_signatures: i64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers: Result<Vec<String>, _> = allowed_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
object_type,
|
||||
MultiSigRequirement::Threshold {
|
||||
min_signatures: min_signatures as usize,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
false,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set percentage multi-sig requirement for an object
|
||||
#[rhai_fn(name = "set_object_multisig_percentage", return_raw)]
|
||||
pub fn set_object_multisig_percentage(
|
||||
ctx: &mut RhaiContext,
|
||||
object_type: String,
|
||||
percentage: f64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
if percentage < 0.0 || percentage > 1.0 {
|
||||
return Err("Percentage must be between 0.0 and 1.0".into());
|
||||
}
|
||||
|
||||
let signers: Result<Vec<String>, _> = allowed_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
object_type,
|
||||
MultiSigRequirement::Percentage {
|
||||
percentage,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
false,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set unanimous multi-sig requirement for a SAL
|
||||
#[rhai_fn(name = "set_sal_multisig_unanimous", return_raw)]
|
||||
pub fn set_sal_multisig_unanimous(
|
||||
ctx: &mut RhaiContext,
|
||||
sal_name: String,
|
||||
required_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers: Result<Vec<String>, _> = required_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
sal_name,
|
||||
MultiSigRequirement::Unanimous { required_signers: signers },
|
||||
true,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set threshold multi-sig requirement for a SAL
|
||||
#[rhai_fn(name = "set_sal_multisig_threshold", return_raw)]
|
||||
pub fn set_sal_multisig_threshold(
|
||||
ctx: &mut RhaiContext,
|
||||
sal_name: String,
|
||||
min_signatures: i64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
let signers: Result<Vec<String>, _> = allowed_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
sal_name,
|
||||
MultiSigRequirement::Threshold {
|
||||
min_signatures: min_signatures as usize,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
true,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
/// Set percentage multi-sig requirement for a SAL
|
||||
#[rhai_fn(name = "set_sal_multisig_percentage", return_raw)]
|
||||
pub fn set_sal_multisig_percentage(
|
||||
ctx: &mut RhaiContext,
|
||||
sal_name: String,
|
||||
percentage: f64,
|
||||
allowed_signers: Vec<Dynamic>,
|
||||
) -> Result<RhaiContext, Box<EvalAltResult>> {
|
||||
if percentage < 0.0 || percentage > 1.0 {
|
||||
return Err("Percentage must be between 0.0 and 1.0".into());
|
||||
}
|
||||
|
||||
let signers: Result<Vec<String>, _> = allowed_signers
|
||||
.into_iter()
|
||||
.map(|d| d.into_string().map_err(|e| format!("Invalid signer: {:?}", e)))
|
||||
.collect();
|
||||
|
||||
let signers = signers.map_err(|e| Box::new(EvalAltResult::ErrorRuntime(e.into(), rhai::Position::NONE)))?;
|
||||
|
||||
ctx.set_multi_sig(
|
||||
sal_name,
|
||||
MultiSigRequirement::Percentage {
|
||||
percentage,
|
||||
allowed_signers: Some(signers),
|
||||
},
|
||||
true,
|
||||
);
|
||||
Ok(ctx.clone())
|
||||
}
|
||||
|
||||
// ========== Getters ==========
|
||||
|
||||
#[rhai_fn(name = "get_name")]
|
||||
pub fn get_name(ctx: &mut RhaiContext) -> String {
|
||||
ctx.name.clone()
|
||||
}
|
||||
|
||||
#[rhai_fn(name = "get_description")]
|
||||
pub fn get_description(ctx: &mut RhaiContext) -> String {
|
||||
ctx.description.clone().unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to parse resource permissions
|
||||
fn parse_resource_permission(permission: &str) -> Result<crate::ResourcePermission, Box<EvalAltResult>> {
|
||||
match permission {
|
||||
"create" => Ok(crate::ResourcePermission::Create),
|
||||
"read" => Ok(crate::ResourcePermission::Read),
|
||||
"update" => Ok(crate::ResourcePermission::Update),
|
||||
"delete" => Ok(crate::ResourcePermission::Delete),
|
||||
"list" => Ok(crate::ResourcePermission::List),
|
||||
"execute" => Ok(crate::ResourcePermission::Execute),
|
||||
_ => Err(format!("Invalid resource permission: {}", permission).into()),
|
||||
}
|
||||
}
|
||||
|
||||
impl CustomType for Context {
|
||||
fn build(mut builder: TypeBuilder<Self>) {
|
||||
builder.with_name("Context");
|
||||
}
|
||||
}
|
||||
|
||||
/// Register the Context module with the Rhai engine
|
||||
pub fn register_context_module(engine: &mut Engine) {
|
||||
let module = exported_module!(rhai_context_module);
|
||||
engine.register_static_module("context", module.into());
|
||||
engine.register_type::<Context>();
|
||||
}
|
||||
|
||||
/// Register Context functions directly on the engine (for global access)
|
||||
pub fn register_context_functions(engine: &mut Engine) {
|
||||
engine.register_type::<Context>();
|
||||
|
||||
// Register the module functions
|
||||
let module = exported_module!(rhai_context_module);
|
||||
engine.register_global_module(module.into());
|
||||
}
|
||||
@@ -200,3 +200,213 @@ fn is_offsetdatetime_type(ty: &Type) -> bool {
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Derive macro for generating CRUD client methods for Osiris models
|
||||
///
|
||||
/// This macro generates async CRUD methods (create, get, update, delete, list) for a model,
|
||||
/// plus any custom methods defined on the model.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #[derive(OsirisModel)]
|
||||
/// #[osiris(
|
||||
/// collection = "calendar_events",
|
||||
/// id_field = "event_id",
|
||||
/// methods = ["reschedule", "cancel"]
|
||||
/// )]
|
||||
/// pub struct CalendarEvent {
|
||||
/// pub event_id: String,
|
||||
/// pub title: String,
|
||||
/// pub start_time: i64,
|
||||
/// // ...
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// This generates methods on OsirisClient:
|
||||
/// - `create_calendar_event(&self, event: CalendarEvent) -> Result<CalendarEvent>`
|
||||
/// - `get_calendar_event(&self, event_id: &str) -> Result<CalendarEvent>`
|
||||
/// - `update_calendar_event(&self, event_id: &str, event: CalendarEvent) -> Result<CalendarEvent>`
|
||||
/// - `delete_calendar_event(&self, event_id: &str) -> Result<()>`
|
||||
/// - `list_calendar_events(&self) -> Result<Vec<CalendarEvent>>`
|
||||
/// - `reschedule_calendar_event(&self, event_id: &str, new_time: i64) -> Result<CalendarEvent>`
|
||||
/// - `cancel_calendar_event(&self, event_id: &str) -> Result<CalendarEvent>`
|
||||
#[proc_macro_derive(OsirisModel, attributes(osiris))]
|
||||
pub fn derive_osiris_model(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
let model_name = &input.ident;
|
||||
let model_name_snake = to_snake_case(&model_name.to_string());
|
||||
|
||||
// Parse attributes
|
||||
let mut collection = model_name_snake.clone();
|
||||
let mut id_field = "id".to_string();
|
||||
let mut custom_methods: Vec<String> = Vec::new();
|
||||
|
||||
for attr in &input.attrs {
|
||||
if attr.path().is_ident("osiris") {
|
||||
if let Ok(meta_list) = attr.parse_args::<syn::MetaList>() {
|
||||
// Parse nested attributes
|
||||
for nested in meta_list.tokens.clone() {
|
||||
let nested_str = nested.to_string();
|
||||
if nested_str.starts_with("collection") {
|
||||
if let Some(val) = extract_string_value(&nested_str) {
|
||||
collection = val;
|
||||
}
|
||||
} else if nested_str.starts_with("id_field") {
|
||||
if let Some(val) = extract_string_value(&nested_str) {
|
||||
id_field = val;
|
||||
}
|
||||
} else if nested_str.starts_with("methods") {
|
||||
custom_methods = extract_array_values(&nested_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate method names
|
||||
let create_method = syn::Ident::new(&format!("create_{}", model_name_snake), model_name.span());
|
||||
let get_method = syn::Ident::new(&format!("get_{}", model_name_snake), model_name.span());
|
||||
let update_method = syn::Ident::new(&format!("update_{}", model_name_snake), model_name.span());
|
||||
let delete_method = syn::Ident::new(&format!("delete_{}", model_name_snake), model_name.span());
|
||||
let list_method = syn::Ident::new(&format!("list_{}s", model_name_snake), model_name.span());
|
||||
|
||||
// Generate custom method implementations
|
||||
let custom_method_impls: Vec<_> = custom_methods.iter().map(|method_name| {
|
||||
let method_ident = syn::Ident::new(&format!("{}_{}", method_name, model_name_snake), model_name.span());
|
||||
let rhai_call = format!("{}_{}", model_name_snake, method_name);
|
||||
|
||||
quote! {
|
||||
pub async fn #method_ident(&self, id: &str, params: serde_json::Value) -> Result<#model_name, OsirisClientError> {
|
||||
let script = format!(
|
||||
r#"
|
||||
let obj = {}::get("{}");
|
||||
obj.{}(params);
|
||||
obj.save();
|
||||
obj
|
||||
"#,
|
||||
#collection, id, #method_name
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// Parse response and return model
|
||||
// This is a simplified version - actual implementation would parse the job result
|
||||
Err(OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let expanded = quote! {
|
||||
impl OsirisClient {
|
||||
/// Create a new instance of #model_name
|
||||
pub async fn #create_method(&self, model: &#model_name) -> Result<#model_name, OsirisClientError> {
|
||||
let json = serde_json::to_string(model)
|
||||
.map_err(|e| OsirisClientError::SerializationFailed(e.to_string()))?;
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
let data = {};
|
||||
let obj = {}::new(data);
|
||||
obj.save();
|
||||
obj
|
||||
"#,
|
||||
json, #collection
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
// Parse response - simplified for now
|
||||
Err(OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
|
||||
/// Get an instance of #model_name by ID
|
||||
pub async fn #get_method(&self, id: &str) -> Result<#model_name, OsirisClientError> {
|
||||
let query = format!(r#"{{ "{}": "{}" }}"#, #id_field, id);
|
||||
self.query::<#model_name>(#collection, &query).await
|
||||
}
|
||||
|
||||
/// Update an existing #model_name
|
||||
pub async fn #update_method(&self, id: &str, model: &#model_name) -> Result<#model_name, OsirisClientError> {
|
||||
let json = serde_json::to_string(model)
|
||||
.map_err(|e| OsirisClientError::SerializationFailed(e.to_string()))?;
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
let obj = {}::get("{}");
|
||||
let data = {};
|
||||
obj.update(data);
|
||||
obj.save();
|
||||
obj
|
||||
"#,
|
||||
#collection, id, json
|
||||
);
|
||||
|
||||
let response = self.execute_script(&script).await?;
|
||||
Err(OsirisClientError::CommandFailed("Not yet implemented".to_string()))
|
||||
}
|
||||
|
||||
/// Delete an instance of #model_name
|
||||
pub async fn #delete_method(&self, id: &str) -> Result<(), OsirisClientError> {
|
||||
let script = format!(
|
||||
r#"
|
||||
let obj = {}::get("{}");
|
||||
obj.delete();
|
||||
"#,
|
||||
#collection, id
|
||||
);
|
||||
|
||||
self.execute_script(&script).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all instances of #model_name
|
||||
pub async fn #list_method(&self) -> Result<Vec<#model_name>, OsirisClientError> {
|
||||
self.query_all::<#model_name>(#collection).await
|
||||
}
|
||||
|
||||
#(#custom_method_impls)*
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn to_snake_case(s: &str) -> String {
|
||||
let mut result = String::new();
|
||||
for (i, ch) in s.chars().enumerate() {
|
||||
if ch.is_uppercase() {
|
||||
if i > 0 {
|
||||
result.push('_');
|
||||
}
|
||||
result.push(ch.to_lowercase().next().unwrap());
|
||||
} else {
|
||||
result.push(ch);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn extract_string_value(s: &str) -> Option<String> {
|
||||
// Extract value from "key = \"value\"" format
|
||||
if let Some(eq_pos) = s.find('=') {
|
||||
let value_part = &s[eq_pos + 1..].trim();
|
||||
let cleaned = value_part.trim_matches(|c| c == '"' || c == ' ');
|
||||
return Some(cleaned.to_string());
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn extract_array_values(s: &str) -> Vec<String> {
|
||||
// Extract values from "methods = [\"method1\", \"method2\"]" format
|
||||
if let Some(start) = s.find('[') {
|
||||
if let Some(end) = s.find(']') {
|
||||
let array_content = &s[start + 1..end];
|
||||
return array_content
|
||||
.split(',')
|
||||
.map(|item| item.trim().trim_matches('"').to_string())
|
||||
.filter(|item| !item.is_empty())
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
1
priv_key.bin
Normal file
1
priv_key.bin
Normal file
@@ -0,0 +1 @@
|
||||
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>L<18><>F<>E<EFBFBD><45>Dȫ<44>Ag<41>ߧ9<DFA7>yQ)<
|
||||
@@ -1,6 +1,5 @@
|
||||
!!include configure.md
|
||||
# Horus Installation
|
||||
|
||||
// Install all components
|
||||
!!coordinator.install name:'development'
|
||||
!!supervisor.install name:'development'
|
||||
!!herorunner.install name:'development'
|
||||
|
||||
@@ -2,11 +2,9 @@
|
||||
|
||||
Starts all horus binaries
|
||||
|
||||
!!include install.md
|
||||
|
||||
// Start all services
|
||||
!!herocoordinator.start name:'default'
|
||||
!!supervisor.start name:'default'
|
||||
!!herorunner.start name:'default'
|
||||
!!osirisrunner.start name:'default'
|
||||
!!salrunner.start name:'default'
|
||||
!!herocoordinator.start name:'development'
|
||||
!!supervisor.start name:'development'
|
||||
!!herorunner.start name:'development'
|
||||
!!osirisrunner.start name:'development'
|
||||
!!salrunner.start name:'development'
|
||||
|
||||
170
tests/README.md
Normal file
170
tests/README.md
Normal file
@@ -0,0 +1,170 @@
|
||||
# End-to-End Integration Tests
|
||||
|
||||
This directory contains end-to-end integration tests for the Horus system components. Each test file spawns the actual binary and tests it via its client library.
|
||||
|
||||
## Test Files
|
||||
|
||||
### `coordinator.rs`
|
||||
End-to-end tests for the Hero Coordinator service.
|
||||
|
||||
**Tests:**
|
||||
- Actor creation and loading
|
||||
- Context creation and management
|
||||
- Runner registration and configuration
|
||||
- Job creation with dependencies
|
||||
- Flow creation and DAG generation
|
||||
- Flow execution (start)
|
||||
|
||||
**Prerequisites:**
|
||||
- Redis server running on `127.0.0.1:6379`
|
||||
- Port `9652` (HTTP API) and `9653` (WebSocket API) available
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
cargo test --test coordinator -- --test-threads=1
|
||||
```
|
||||
|
||||
### `supervisor.rs`
|
||||
End-to-end tests for the Hero Supervisor service.
|
||||
|
||||
**Tests:**
|
||||
- OpenRPC discovery
|
||||
- Runner registration and management
|
||||
- Job creation and execution
|
||||
- Job status tracking
|
||||
- API key generation and management
|
||||
- Authentication verification
|
||||
- Complete workflow integration
|
||||
|
||||
**Prerequisites:**
|
||||
- Redis server running on `127.0.0.1:6379`
|
||||
- Port `3031` available
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
cargo test --test coordinator -- --test-threads=1
|
||||
```
|
||||
|
||||
### `runner_hero.rs`
|
||||
End-to-end tests for the Hero (Python) runner.
|
||||
|
||||
**Prerequisites:**
|
||||
- Python 3 installed
|
||||
- Redis server running
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
cargo test --test runner_hero -- --test-threads=1
|
||||
```
|
||||
|
||||
### `runner_osiris.rs`
|
||||
End-to-end tests for the Osiris (V language) runner.
|
||||
|
||||
**Prerequisites:**
|
||||
- V language compiler installed
|
||||
- Redis server running
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
cargo test --test runner_osiris -- --test-threads=1
|
||||
```
|
||||
|
||||
### `runner_sal.rs`
|
||||
End-to-end tests for the Sal (Rhai scripting) runner.
|
||||
|
||||
**Prerequisites:**
|
||||
- Redis server running
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
cargo test --test runner_sal -- --test-threads=1
|
||||
```
|
||||
|
||||
## Running All Tests
|
||||
|
||||
To run all end-to-end tests sequentially:
|
||||
|
||||
```bash
|
||||
cargo test --tests -- --test-threads=1
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
### Sequential Execution Required
|
||||
|
||||
All tests **must** be run with `--test-threads=1` because:
|
||||
1. Each test spawns a server process that binds to specific ports
|
||||
2. Tests share Redis databases and may conflict if run in parallel
|
||||
3. Process cleanup needs to happen sequentially
|
||||
|
||||
### Redis Requirement
|
||||
|
||||
All tests require a Redis server running on `127.0.0.1:6379`. You can start Redis with:
|
||||
|
||||
```bash
|
||||
redis-server
|
||||
```
|
||||
|
||||
Or using Docker:
|
||||
|
||||
```bash
|
||||
docker run -d -p 6379:6379 redis:latest
|
||||
```
|
||||
|
||||
### Port Conflicts
|
||||
|
||||
If tests fail to start, check that the required ports are not in use:
|
||||
|
||||
- **Coordinator**: 9652 (HTTP), 9653 (WebSocket)
|
||||
- **Supervisor**: 3031
|
||||
- **Runners**: Various ports depending on configuration
|
||||
|
||||
You can check port usage with:
|
||||
|
||||
```bash
|
||||
lsof -i :9652
|
||||
lsof -i :3031
|
||||
```
|
||||
|
||||
### Test Isolation
|
||||
|
||||
Each test file:
|
||||
1. Builds the binary using `escargot`
|
||||
2. Starts the process with test-specific configuration
|
||||
3. Runs tests against the running instance
|
||||
4. Cleans up the process at the end
|
||||
|
||||
Tests within a file may share state through Redis, so they are designed to be idempotent and handle existing data.
|
||||
|
||||
### Debugging
|
||||
|
||||
To see detailed logs during test execution:
|
||||
|
||||
```bash
|
||||
RUST_LOG=debug cargo test --test coordinator -- --test-threads=1 --nocapture
|
||||
```
|
||||
|
||||
To run a specific test:
|
||||
|
||||
```bash
|
||||
cargo test --test coordinator test_01_actor_create -- --test-threads=1 --nocapture
|
||||
```
|
||||
|
||||
## Test Architecture
|
||||
|
||||
Each test file follows this pattern:
|
||||
|
||||
1. **Global Process Management**: Uses `lazy_static` and `Once` to ensure the server process starts only once
|
||||
2. **Setup Helper**: Common setup code (e.g., `setup_prerequisites()`) to reduce duplication
|
||||
3. **Sequential Tests**: Tests are numbered (e.g., `test_01_`, `test_02_`) to indicate execution order
|
||||
4. **Cleanup Test**: A final `test_zz_cleanup()` ensures the process is terminated and ports are freed
|
||||
|
||||
## Contributing
|
||||
|
||||
When adding new tests:
|
||||
|
||||
1. Follow the existing naming convention (`test_NN_description`)
|
||||
2. Use the setup helpers to avoid duplication
|
||||
3. Make tests idempotent (handle existing data gracefully)
|
||||
4. Add cleanup in the `test_zz_cleanup()` function
|
||||
5. Update this README with any new prerequisites or test descriptions
|
||||
331
tests/coordinator.rs
Normal file
331
tests/coordinator.rs
Normal file
@@ -0,0 +1,331 @@
|
||||
//! End-to-End Integration Tests for Hero Coordinator
|
||||
//!
|
||||
//! Tests coordinator flow management functionality against a running coordinator instance.
|
||||
//! The coordinator binary is automatically started and stopped for each test run.
|
||||
//!
|
||||
//! **IMPORTANT**: Run with `--test-threads=1` to ensure tests run sequentially:
|
||||
//! ```
|
||||
//! cargo test --test coordinator -- --test-threads=1
|
||||
//! ```
|
||||
|
||||
use hero_coordinator_client::{CoordinatorClient, models::*};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Once;
|
||||
use std::process::Child;
|
||||
|
||||
/// Test configuration
|
||||
const COORDINATOR_URL: &str = "http://127.0.0.1:9652";
|
||||
const TEST_CONTEXT_ID: u32 = 2;
|
||||
const TEST_CALLER_ID: u32 = 11001;
|
||||
const TEST_FLOW_ID: u32 = 13001;
|
||||
const BASE_JOB_ID: u32 = 20000;
|
||||
|
||||
use std::sync::Mutex;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
lazy_static! {
|
||||
static ref COORDINATOR_PROCESS: Mutex<Option<Child>> = Mutex::new(None);
|
||||
}
|
||||
|
||||
/// Global initialization flag
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
/// Initialize and start the coordinator binary (called once)
|
||||
async fn init_coordinator() {
|
||||
INIT.call_once(|| {
|
||||
// Register cleanup handler
|
||||
let _ = std::panic::catch_unwind(|| {
|
||||
ctrlc::set_handler(move || {
|
||||
cleanup_coordinator();
|
||||
std::process::exit(0);
|
||||
}).ok();
|
||||
});
|
||||
|
||||
// Use escargot to build and get the binary path
|
||||
let binary = escargot::CargoBuild::new()
|
||||
.bin("coordinator")
|
||||
.package("hero-coordinator")
|
||||
.run()
|
||||
.expect("Failed to build coordinator binary");
|
||||
|
||||
// Start the coordinator binary with HTTP transport (no mycelium needed)
|
||||
let child = binary
|
||||
.command()
|
||||
.env("RUST_LOG", "info")
|
||||
.args(&[
|
||||
"--api-http-port",
|
||||
"9652",
|
||||
"--api-ws-port",
|
||||
"9653",
|
||||
"--redis-addr",
|
||||
"127.0.0.1:6379",
|
||||
"--supervisor-transport",
|
||||
"http",
|
||||
])
|
||||
.spawn()
|
||||
.expect("Failed to start coordinator");
|
||||
|
||||
*COORDINATOR_PROCESS.lock().unwrap() = Some(child);
|
||||
|
||||
// Wait for server to be ready with simple TCP check
|
||||
use std::net::TcpStream;
|
||||
use std::time::Duration;
|
||||
|
||||
println!("⏳ Waiting for coordinator to start...");
|
||||
|
||||
for i in 0..30 {
|
||||
std::thread::sleep(Duration::from_millis(500));
|
||||
|
||||
// Try to connect to the port
|
||||
if TcpStream::connect_timeout(
|
||||
&"127.0.0.1:9652".parse().unwrap(),
|
||||
Duration::from_millis(100)
|
||||
).is_ok() {
|
||||
// Give it more time to fully initialize
|
||||
std::thread::sleep(Duration::from_secs(2));
|
||||
println!("✅ Coordinator ready after ~{}ms", (i * 500) + 2000);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
panic!("Coordinator failed to start within 15 seconds");
|
||||
});
|
||||
}
|
||||
|
||||
/// Cleanup coordinator process
|
||||
fn cleanup_coordinator() {
|
||||
if let Ok(mut guard) = COORDINATOR_PROCESS.lock() {
|
||||
if let Some(mut child) = guard.take() {
|
||||
println!("🧹 Cleaning up coordinator process...");
|
||||
let _ = child.kill();
|
||||
let _ = child.wait();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to create a test client
|
||||
async fn create_client() -> CoordinatorClient {
|
||||
// Ensure coordinator is running
|
||||
init_coordinator().await;
|
||||
|
||||
CoordinatorClient::new(COORDINATOR_URL)
|
||||
.expect("Failed to create coordinator client")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_01_flow_create_simple() {
|
||||
println!("\n🧪 Test: flow.create (simple flow)");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
// Note: Jobs should be created by the supervisor, not the coordinator
|
||||
// For this test, we'll create a flow with job IDs that may not exist yet
|
||||
// In a real scenario, jobs would be created by the supervisor first
|
||||
let job_ids = vec![BASE_JOB_ID, BASE_JOB_ID + 1];
|
||||
|
||||
// Create flow
|
||||
let flow_create = FlowCreate {
|
||||
id: TEST_FLOW_ID,
|
||||
caller_id: TEST_CALLER_ID,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
jobs: job_ids.clone(),
|
||||
env_vars: HashMap::new(),
|
||||
};
|
||||
|
||||
let result = client.flow_create_or_load(TEST_CONTEXT_ID, flow_create).await;
|
||||
|
||||
if let Err(ref e) = result {
|
||||
println!(" Error: {:?}", e);
|
||||
}
|
||||
assert!(result.is_ok(), "flow.create_or_load should succeed");
|
||||
let flow = result.unwrap();
|
||||
|
||||
assert_eq!(flow.id, TEST_FLOW_ID);
|
||||
assert_eq!(flow.jobs, job_ids);
|
||||
println!("✅ flow.create works - flow: {}, jobs: {:?}", flow.id, flow.jobs);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_02_flow_load() {
|
||||
println!("\n🧪 Test: flow.load");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
// Create a flow first (reuse from test_01)
|
||||
let job_ids = vec![BASE_JOB_ID, BASE_JOB_ID + 1];
|
||||
|
||||
let flow_create = FlowCreate {
|
||||
id: TEST_FLOW_ID,
|
||||
caller_id: TEST_CALLER_ID,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
jobs: job_ids.clone(),
|
||||
env_vars: HashMap::new(),
|
||||
};
|
||||
let _ = client.flow_create_or_load(TEST_CONTEXT_ID, flow_create).await;
|
||||
|
||||
// Load the flow
|
||||
let result = client.flow_load(TEST_CONTEXT_ID, TEST_FLOW_ID).await;
|
||||
|
||||
if let Err(ref e) = result {
|
||||
println!(" Error: {:?}", e);
|
||||
}
|
||||
assert!(result.is_ok(), "flow.load should succeed");
|
||||
let flow = result.unwrap();
|
||||
|
||||
assert_eq!(flow.id, TEST_FLOW_ID);
|
||||
assert_eq!(flow.jobs, job_ids);
|
||||
println!("✅ flow.load works - loaded flow: {}", flow.id);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_03_flow_dag() {
|
||||
println!("\n🧪 Test: flow.dag");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
// Note: Jobs should be created by the supervisor
|
||||
let job_ids = vec![BASE_JOB_ID + 100, BASE_JOB_ID + 101, BASE_JOB_ID + 102];
|
||||
|
||||
let flow_id = TEST_FLOW_ID + 1;
|
||||
let flow_create = FlowCreate {
|
||||
id: flow_id,
|
||||
caller_id: TEST_CALLER_ID,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
jobs: job_ids.clone(),
|
||||
env_vars: HashMap::new(),
|
||||
};
|
||||
let _ = client.flow_create_or_load(TEST_CONTEXT_ID, flow_create).await;
|
||||
|
||||
// Get the DAG
|
||||
let result = client.flow_dag(TEST_CONTEXT_ID, flow_id).await;
|
||||
|
||||
if let Err(ref e) = result {
|
||||
println!(" Error: {:?}", e);
|
||||
}
|
||||
assert!(result.is_ok(), "flow.dag should succeed");
|
||||
let dag = result.unwrap();
|
||||
|
||||
assert_eq!(dag.flow_id, flow_id);
|
||||
assert_eq!(dag.nodes.len(), 3);
|
||||
assert_eq!(dag.edges.len(), 2); // Two edges for the chain
|
||||
println!("✅ flow.dag works - flow: {}, nodes: {}, edges: {}",
|
||||
dag.flow_id, dag.nodes.len(), dag.edges.len());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_04_flow_start() {
|
||||
println!("\n🧪 Test: flow.start");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
// Create a simple flow
|
||||
let job_id = BASE_JOB_ID + 200;
|
||||
|
||||
let flow_id = TEST_FLOW_ID + 2;
|
||||
let flow_create = FlowCreate {
|
||||
id: flow_id,
|
||||
caller_id: TEST_CALLER_ID,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
jobs: vec![job_id],
|
||||
env_vars: HashMap::new(),
|
||||
};
|
||||
let _ = client.flow_create_or_load(TEST_CONTEXT_ID, flow_create).await;
|
||||
|
||||
// Start the flow
|
||||
let result = client.flow_start(TEST_CONTEXT_ID, flow_id).await;
|
||||
|
||||
match result {
|
||||
Ok(started) => {
|
||||
println!("✅ flow.start works - started: {}", started);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ flow.start: {:?} (runner may not be available)", e);
|
||||
// This is expected if no actual runner is listening
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_05_message_create() {
|
||||
println!("\n🧪 Test: message.create");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
let message_create = MessageCreate {
|
||||
id: 1,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
runner_id: 12001,
|
||||
job_id: BASE_JOB_ID,
|
||||
message_type: MessageType::JobRun,
|
||||
format: MessageFormatType::JsonRpc,
|
||||
payload: r#"{"method":"job.run","params":{}}"#.to_string(),
|
||||
};
|
||||
|
||||
let result = client.message_create(TEST_CONTEXT_ID, message_create).await;
|
||||
|
||||
match result {
|
||||
Ok(message) => {
|
||||
assert_eq!(message.id, 1);
|
||||
assert_eq!(message.context_id, TEST_CONTEXT_ID);
|
||||
println!("✅ message.create works - message: {}", message.id);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ message.create: {:?} (may already exist)", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_06_message_load() {
|
||||
println!("\n🧪 Test: message.load");
|
||||
|
||||
let client = create_client().await;
|
||||
|
||||
// Create a message first
|
||||
let message_create = MessageCreate {
|
||||
id: 2,
|
||||
context_id: TEST_CONTEXT_ID,
|
||||
runner_id: 12001,
|
||||
job_id: BASE_JOB_ID,
|
||||
message_type: MessageType::JobRun,
|
||||
format: MessageFormatType::JsonRpc,
|
||||
payload: r#"{"method":"job.run","params":{}}"#.to_string(),
|
||||
};
|
||||
let _ = client.message_create(TEST_CONTEXT_ID, message_create).await;
|
||||
|
||||
// Load the message
|
||||
let result = client.message_load(TEST_CONTEXT_ID, 2).await;
|
||||
|
||||
if let Err(ref e) = result {
|
||||
println!(" Error: {:?}", e);
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(message) => {
|
||||
assert_eq!(message.id, 2);
|
||||
assert_eq!(message.context_id, TEST_CONTEXT_ID);
|
||||
println!("✅ message.load works - loaded message: {}", message.id);
|
||||
}
|
||||
Err(_) => {
|
||||
println!("⚠️ message.load failed (message may not exist)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Final test that ensures cleanup happens
|
||||
#[tokio::test]
|
||||
async fn test_zz_cleanup() {
|
||||
println!("🧹 Running cleanup...");
|
||||
cleanup_coordinator();
|
||||
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
use std::net::TcpStream;
|
||||
let port_free = TcpStream::connect_timeout(
|
||||
&"127.0.0.1:9652".parse().unwrap(),
|
||||
std::time::Duration::from_millis(100)
|
||||
).is_err();
|
||||
|
||||
assert!(port_free, "Port 9652 should be free after cleanup");
|
||||
println!("✅ Cleanup complete - port 9652 is free");
|
||||
}
|
||||
Reference in New Issue
Block a user