Updates
This commit is contained in:
54
reference_osis_actor/Cargo.toml
Normal file
54
reference_osis_actor/Cargo.toml
Normal file
@@ -0,0 +1,54 @@
|
||||
[package]
|
||||
name = "actor_osis"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "actor_osis" # Can be different from package name, or same
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "actor_osis"
|
||||
path = "cmd/actor_osis.rs"
|
||||
|
||||
[[example]]
|
||||
name = "engine"
|
||||
path = "examples/engine.rs"
|
||||
|
||||
[[example]]
|
||||
name = "actor"
|
||||
path = "examples/actor.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
||||
rhai = { version = "1.21.0", features = ["std", "sync", "decimal", "internals"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time"] }
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
uuid = { version = "1.6", features = ["v4", "serde"] } # Though task_id is string, uuid might be useful
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
toml = "0.8"
|
||||
thiserror = "1.0"
|
||||
async-trait = "0.1"
|
||||
hero_job = { git = "https://git.ourworld.tf/herocode/baobab.git"}
|
||||
baobab_actor = { git = "https://git.ourworld.tf/herocode/baobab.git"}
|
||||
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||
rhailib_dsl = { git = "https://git.ourworld.tf/herocode/rhailib.git" }
|
||||
hero_logger = { git = "https://git.ourworld.tf/herocode/baobab.git", branch = "logger" }
|
||||
tracing = "0.1.41"
|
||||
|
||||
[features]
|
||||
default = ["calendar", "finance"]
|
||||
calendar = []
|
||||
finance = []
|
||||
flow = []
|
||||
legal = []
|
||||
projects = []
|
||||
biz = []
|
79
reference_osis_actor/README.md
Normal file
79
reference_osis_actor/README.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# Object Storage and Indexing System (OSIS) Actor
|
||||
|
||||
The OSIS Actor is responsible for storing and indexing objects in the system. It implements the actor interface to process jobs in a **blocking, synchronized manner**.
|
||||
|
||||
## Job Processing Behavior
|
||||
|
||||
The OSISActor processes jobs sequentially with the following characteristics:
|
||||
|
||||
- **Blocking Processing**: Each job is processed completely before the next job begins
|
||||
- **Synchronized Execution**: Jobs are executed one at a time in the order they are received
|
||||
- **No Concurrency**: Unlike async actors, OSIS ensures no parallel job execution
|
||||
- **Deterministic Order**: Job completion follows the exact order of job submission
|
||||
|
||||
This design ensures data consistency and prevents race conditions when performing storage and indexing operations.
|
||||
|
||||
## Usage
|
||||
|
||||
```rust
|
||||
use actor_osis::{OSISActor, spawn_osis_actor};
|
||||
|
||||
// Create an OSIS actor with builder pattern
|
||||
let actor = OSISActor::builder()
|
||||
.db_path("/path/to/database")
|
||||
.redis_url("redis://localhost:6379")
|
||||
.build()
|
||||
.expect("Failed to build OSISActor");
|
||||
|
||||
// Or spawn directly with convenience function
|
||||
let handle = spawn_osis_actor(
|
||||
"/path/to/database".to_string(),
|
||||
"redis://localhost:6379".to_string(),
|
||||
shutdown_rx,
|
||||
);
|
||||
```
|
||||
|
||||
## Actor Properties
|
||||
|
||||
- **Actor ID**: `"osis"` (constant)
|
||||
- **Actor Type**: `"OSIS"`
|
||||
- **Processing Model**: Sequential, blocking
|
||||
- **Script Engine**: Rhai with OSIS-specific DSL extensions
|
||||
## Canonical Redis queues and verification
|
||||
|
||||
The project uses canonical dispatch queues per script type. For OSIS, the work queue is:
|
||||
- hero:q:work:type:osis
|
||||
|
||||
Consumer behavior:
|
||||
- The in-repo actor derives ScriptType=OSIS from its actor_id containing "osis" and BLPOPs hero:q:work:type:osis.
|
||||
- This repo’s OSIS actor has been updated so its actor_id is "osis", ensuring it consumes the canonical queue.
|
||||
|
||||
Quick verification (redis-cli):
|
||||
- List work queues:
|
||||
- KEYS hero:q:work:type:*
|
||||
- Check OSIS queue length:
|
||||
- LLEN hero:q:work:type:osis
|
||||
- Inspect a specific job (replace {job_id} with the printed id):
|
||||
- HGET hero:job:{job_id} status
|
||||
- HGET hero:job:{job_id} output
|
||||
|
||||
Run options:
|
||||
- Option A: Run the example which spawns the OSIS actor and dispatches jobs to the canonical queue.
|
||||
1) Start Redis (if not already): redis-server
|
||||
2) In this repo:
|
||||
- cargo run --example actor
|
||||
3) Observe the console: job IDs will be printed as they are created and dispatched.
|
||||
4) In a separate terminal, verify with redis-cli:
|
||||
- LLEN hero:q:work:type:osis (will briefly increment, then return to 0 as the actor consumes)
|
||||
- HGET hero:job:{job_id} status (should transition to started then finished)
|
||||
- HGET hero:job:{job_id} output (should contain the script result)
|
||||
|
||||
- Option B: Run the standalone actor binary and dispatch from another process that pushes to the canonical type queue.
|
||||
1) Start the actor:
|
||||
- cargo run --bin actor_osis
|
||||
2) From any producer, LPUSH hero:q:work:type:osis {job_id} after persisting the job hash hero:job:{job_id}.
|
||||
3) Use the same redis-cli checks above to confirm consumption and completion.
|
||||
|
||||
Notes:
|
||||
- Hash-only result model is the default. The job result is written to hero:job:{job_id}.output and status=finished.
|
||||
- Reply queues (hero:q:reply:{job_id}) are optional and not required for OSIS to function.
|
60
reference_osis_actor/cmd/actor_osis.rs
Normal file
60
reference_osis_actor/cmd/actor_osis.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
use actor_osis::OSISActor;
|
||||
use clap::Parser;
|
||||
use log::info;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "actor_osis")]
|
||||
#[command(about = "OSIS Actor - Synchronous job processing actor")]
|
||||
struct Args {
|
||||
/// Database path
|
||||
#[arg(short, long, default_value = "/tmp/osis_db")]
|
||||
db_path: String,
|
||||
|
||||
/// Redis URL
|
||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
||||
redis_url: String,
|
||||
|
||||
/// Preserve completed tasks in Redis
|
||||
#[arg(short, long)]
|
||||
preserve_tasks: bool,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
env_logger::init();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
info!("Starting OSIS Actor");
|
||||
|
||||
// Create shutdown channel
|
||||
let (shutdown_tx, shutdown_rx) = mpsc::channel(1);
|
||||
|
||||
// Setup signal handler for graceful shutdown
|
||||
let shutdown_tx_clone = shutdown_tx.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::signal::ctrl_c().await.expect("Failed to listen for Ctrl+C");
|
||||
info!("Received Ctrl+C, initiating shutdown...");
|
||||
let _ = shutdown_tx_clone.send(()).await;
|
||||
});
|
||||
|
||||
// Create and start the actor
|
||||
let actor = Arc::new(
|
||||
OSISActor::builder()
|
||||
.db_path(args.db_path)
|
||||
.redis_url(args.redis_url)
|
||||
.build()?
|
||||
);
|
||||
|
||||
let handle = baobab_actor::spawn_actor(actor, shutdown_rx);
|
||||
|
||||
info!("OSIS Actor started, waiting for jobs...");
|
||||
|
||||
// Wait for the actor to complete
|
||||
handle.await??;
|
||||
|
||||
info!("OSIS Actor shutdown complete");
|
||||
Ok(())
|
||||
}
|
179
reference_osis_actor/src/engine.rs
Normal file
179
reference_osis_actor/src/engine.rs
Normal file
@@ -0,0 +1,179 @@
|
||||
//! # Rhailib Domain-Specific Language (DSL) Engine
|
||||
//!
|
||||
//! This module provides a comprehensive Domain-Specific Language implementation for the Rhai
|
||||
//! scripting engine, exposing business domain models and operations through a fluent,
|
||||
//! chainable API.
|
||||
//!
|
||||
//! ## Overview
|
||||
//!
|
||||
//! The DSL is organized into business domain modules, each providing Rhai-compatible
|
||||
//! functions for creating, manipulating, and persisting domain entities. All operations
|
||||
//! include proper authorization checks and type safety.
|
||||
//!
|
||||
//! ## Available Domains
|
||||
//!
|
||||
//! - **Business Operations** (`biz`): Companies, products, sales, shareholders
|
||||
//! - **Financial Models** (`finance`): Accounts, assets, marketplace operations
|
||||
//! - **Content Management** (`library`): Collections, images, PDFs, books, slideshows
|
||||
//! - **Workflow Management** (`flow`): Flows, steps, signature requirements
|
||||
//! - **Community Management** (`circle`): Circles, themes, membership
|
||||
//! - **Contact Management** (`contact`): Contact information and relationships
|
||||
//! - **Access Control** (`access`): Security and permissions
|
||||
//! - **Time Management** (`calendar`): Calendar and scheduling
|
||||
//! - **Core Utilities** (`core`): Comments and fundamental operations
|
||||
//! - **Generic Objects** (`object`): Generic object manipulation
|
||||
//!
|
||||
//! ## Usage Example
|
||||
//!
|
||||
//! ```rust
|
||||
//! use rhai::Engine;
|
||||
//! use crate::engine::register_dsl_modules;
|
||||
//!
|
||||
//! let mut engine = Engine::new();
|
||||
//! register_dsl_modules(&mut engine);
|
||||
//!
|
||||
//! // Now the engine can execute scripts like:
|
||||
//! // let company = new_company().name("Acme Corp").email("contact@acme.com");
|
||||
//! // let saved = save_company(company);
|
||||
//! ```
|
||||
|
||||
use rhai::Engine;
|
||||
use rhailib_dsl;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
/// Engine factory for creating and sharing Rhai engines.
|
||||
pub struct EngineFactory {
|
||||
engine: Arc<Engine>,
|
||||
}
|
||||
|
||||
impl EngineFactory {
|
||||
/// Create a new engine factory with a configured Rhai engine.
|
||||
pub fn new() -> Self {
|
||||
let mut engine = Engine::new();
|
||||
register_dsl_modules(&mut engine);
|
||||
// Logger
|
||||
hero_logger::rhai_integration::configure_rhai_logging(&mut engine, "osis_actor");
|
||||
|
||||
Self {
|
||||
engine: Arc::new(engine),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a shared reference to the engine.
|
||||
pub fn get_engine(&self) -> Arc<Engine> {
|
||||
Arc::clone(&self.engine)
|
||||
}
|
||||
|
||||
/// Get the global singleton engine factory.
|
||||
pub fn global() -> &'static EngineFactory {
|
||||
static FACTORY: OnceLock<EngineFactory> = OnceLock::new();
|
||||
FACTORY.get_or_init(|| EngineFactory::new())
|
||||
}
|
||||
}
|
||||
|
||||
/// Register basic object functions directly in the engine.
|
||||
/// This provides object functionality without relying on the problematic rhailib_dsl object module.
|
||||
fn register_object_functions(engine: &mut Engine) {
|
||||
use heromodels::models::object::Object;
|
||||
|
||||
// Register the Object type
|
||||
engine.register_type_with_name::<Object>("Object");
|
||||
|
||||
// Register constructor function
|
||||
engine.register_fn("new_object", || Object::new());
|
||||
|
||||
// Register setter functions
|
||||
engine.register_fn("object_title", |obj: &mut Object, title: String| {
|
||||
obj.title = title;
|
||||
obj.clone()
|
||||
});
|
||||
|
||||
engine.register_fn(
|
||||
"object_description",
|
||||
|obj: &mut Object, description: String| {
|
||||
obj.description = description;
|
||||
obj.clone()
|
||||
},
|
||||
);
|
||||
|
||||
// Register getter functions
|
||||
engine.register_fn("get_object_id", |obj: &mut Object| obj.id() as i64);
|
||||
engine.register_fn("get_object_title", |obj: &mut Object| obj.title.clone());
|
||||
engine.register_fn("get_object_description", |obj: &mut Object| {
|
||||
obj.description.clone()
|
||||
});
|
||||
}
|
||||
|
||||
/// Registers all DSL modules with the provided Rhai engine.
|
||||
///
|
||||
/// This function is the main entry point for integrating the rhailib DSL with a Rhai engine.
|
||||
/// It registers all business domain modules, making their functions available to Rhai scripts.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `engine` - A mutable reference to the Rhai engine to register modules with
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// use rhai::Engine;
|
||||
/// use crate::engine::register_dsl_modules;
|
||||
///
|
||||
/// let mut engine = Engine::new();
|
||||
/// register_dsl_modules(&mut engine);
|
||||
///
|
||||
/// // Engine now has access to all DSL functions
|
||||
/// let result = engine.eval::<String>(r#"
|
||||
/// let company = new_company().name("Test Corp");
|
||||
/// company.name
|
||||
/// "#).unwrap();
|
||||
/// assert_eq!(result, "Test Corp");
|
||||
/// ```
|
||||
///
|
||||
/// # Registered Modules
|
||||
///
|
||||
/// This function registers the following domain modules:
|
||||
/// - Access control functions
|
||||
/// - Business operation functions (companies, products, sales, shareholders)
|
||||
/// - Calendar and scheduling functions
|
||||
/// - Circle and community management functions
|
||||
/// - Company management functions
|
||||
/// - Contact management functions
|
||||
/// - Core utility functions
|
||||
/// - Financial operation functions (accounts, assets, marketplace)
|
||||
/// - Workflow management functions (flows, steps, signatures)
|
||||
/// - Library and content management functions
|
||||
/// - Generic object manipulation functions (custom implementation)
|
||||
pub fn register_dsl_modules(engine: &mut Engine) {
|
||||
rhailib_dsl::access::register_access_rhai_module(engine);
|
||||
rhailib_dsl::biz::register_biz_rhai_module(engine);
|
||||
rhailib_dsl::calendar::register_calendar_rhai_module(engine);
|
||||
rhailib_dsl::circle::register_circle_rhai_module(engine);
|
||||
rhailib_dsl::company::register_company_rhai_module(engine);
|
||||
rhailib_dsl::contact::register_contact_rhai_module(engine);
|
||||
rhailib_dsl::core::register_core_rhai_module(engine);
|
||||
rhailib_dsl::finance::register_finance_rhai_modules(engine);
|
||||
// rhailib_dsl::flow::register_flow_rhai_modules(engine);
|
||||
rhailib_dsl::library::register_library_rhai_module(engine);
|
||||
// Skip problematic object module for now - can be implemented separately if needed
|
||||
// rhailib_dsl::object::register_object_fns(engine);
|
||||
rhailib_dsl::payment::register_payment_rhai_module(engine);
|
||||
|
||||
// Register basic object functionality directly
|
||||
register_object_functions(engine);
|
||||
|
||||
println!("Rhailib Domain Specific Language modules registered successfully.");
|
||||
}
|
||||
|
||||
/// Create a shared heromodels engine using the factory.
|
||||
pub fn create_osis_engine() -> Arc<Engine> {
|
||||
EngineFactory::global().get_engine()
|
||||
}
|
||||
|
||||
/// Evaluate a Rhai script string.
|
||||
pub fn eval_script(
|
||||
engine: &Engine,
|
||||
script: &str,
|
||||
) -> Result<rhai::Dynamic, Box<rhai::EvalAltResult>> {
|
||||
engine.eval(script)
|
||||
}
|
332
reference_osis_actor/src/lib.rs
Normal file
332
reference_osis_actor/src/lib.rs
Normal file
@@ -0,0 +1,332 @@
|
||||
mod engine;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use baobab_actor::execute_job_with_engine;
|
||||
use hero_job::{Job, JobStatus, ScriptType};
|
||||
use hero_logger::{create_job_logger, create_job_logger_with_guard};
|
||||
use log::{error, info};
|
||||
use redis::AsyncCommands;
|
||||
use rhai::Engine;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::task::JoinHandle;
|
||||
use tracing::subscriber::with_default;
|
||||
|
||||
use baobab_actor::{actor_trait::Actor, spawn_actor};
|
||||
|
||||
/// Constant actor ID for OSIS actor
|
||||
const OSIS: &str = "osis";
|
||||
|
||||
/// Builder for OSISActor
|
||||
#[derive(Debug)]
|
||||
pub struct OSISActorBuilder {
|
||||
engine: Option<Arc<Engine>>,
|
||||
db_path: Option<String>,
|
||||
redis_url: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for OSISActorBuilder {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
engine: None,
|
||||
db_path: None,
|
||||
redis_url: Some("redis://localhost:6379".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl OSISActorBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn engine(mut self, engine: Engine) -> Self {
|
||||
self.engine = Some(Arc::new(engine));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn shared_engine(mut self, engine: Arc<Engine>) -> Self {
|
||||
self.engine = Some(engine);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn db_path<S: Into<String>>(mut self, db_path: S) -> Self {
|
||||
self.db_path = Some(db_path.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn redis_url<S: Into<String>>(mut self, redis_url: S) -> Self {
|
||||
self.redis_url = Some(redis_url.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<OSISActor, String> {
|
||||
let engine = self
|
||||
.engine
|
||||
.unwrap_or_else(|| crate::engine::create_osis_engine());
|
||||
|
||||
Ok(OSISActor {
|
||||
engine,
|
||||
db_path: self.db_path.ok_or("db_path is required")?,
|
||||
redis_url: self
|
||||
.redis_url
|
||||
.unwrap_or("redis://localhost:6379".to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// OSIS actor that processes jobs in a blocking, synchronized manner
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OSISActor {
|
||||
pub engine: Arc<Engine>,
|
||||
pub db_path: String,
|
||||
pub redis_url: String,
|
||||
}
|
||||
|
||||
impl OSISActor {
|
||||
/// Create a new OSISActorBuilder
|
||||
pub fn builder() -> OSISActorBuilder {
|
||||
OSISActorBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for OSISActor {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
engine: crate::engine::create_osis_engine(),
|
||||
db_path: "/tmp".to_string(),
|
||||
redis_url: "redis://localhost:6379".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Actor for OSISActor {
|
||||
async fn process_job(&self, job: Job, redis_conn: &mut redis::aio::MultiplexedConnection) {
|
||||
let job_id = &job.id;
|
||||
let _db_path = &self.db_path;
|
||||
|
||||
// Debug: Log job details
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Processing job with context_id: {}, script length: {}",
|
||||
OSIS, job_id, job.context_id, job.script.len()
|
||||
);
|
||||
|
||||
// Create job-specific logger
|
||||
let (job_logger, guard) = match create_job_logger_with_guard("logs", "osis", job_id) {
|
||||
Ok((logger, guard)) => {
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Job logger created successfully",
|
||||
OSIS, job_id
|
||||
);
|
||||
(logger, guard)
|
||||
},
|
||||
Err(e) => {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to create job logger: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Starting sequential processing",
|
||||
OSIS, job_id
|
||||
);
|
||||
|
||||
// Update job status to Started
|
||||
if let Err(e) = Job::update_status(redis_conn, job_id, JobStatus::Started).await {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to update status to Started: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Execute ALL job processing within logging context
|
||||
let job_result = with_default(job_logger, || {
|
||||
tracing::info!(target: "osis_actor", "Job {} started", job_id);
|
||||
|
||||
// Move the Rhai script execution inside this scope
|
||||
// IMPORTANT: Create a new engine and configure Rhai logging for this job context
|
||||
let mut job_engine = Engine::new();
|
||||
register_dsl_modules(&mut job_engine);
|
||||
// Configure Rhai logging integration for this engine instance
|
||||
hero_logger::rhai_integration::configure_rhai_logging(&mut job_engine, "osis_actor");
|
||||
|
||||
// Execute the script within the job logger context
|
||||
let script_result = tokio::task::block_in_place(|| {
|
||||
tokio::runtime::Handle::current().block_on(async {
|
||||
execute_job_with_engine(&mut job_engine, &job, &self.db_path).await
|
||||
})
|
||||
});
|
||||
|
||||
tracing::info!(target: "osis_actor", "Job {} completed", job_id);
|
||||
|
||||
script_result // Return the result
|
||||
});
|
||||
|
||||
// Handle the result outside the logging context
|
||||
match job_result {
|
||||
Ok(result) => {
|
||||
let result_str = format!("{:?}", result);
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Script executed successfully. Result: {}",
|
||||
OSIS, job_id, result_str
|
||||
);
|
||||
|
||||
// Update job with success result (stores in job hash output field)
|
||||
if let Err(e) = Job::set_result(redis_conn, job_id, &result_str).await {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to set result: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Also push result to result queue for retrieval
|
||||
let result_queue_key = format!("hero:job:{}:result", job_id);
|
||||
if let Err(e) = redis_conn
|
||||
.lpush::<_, _, ()>(&result_queue_key, &result_str)
|
||||
.await
|
||||
{
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to push result to queue {}: {}",
|
||||
OSIS, job_id, result_queue_key, e
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Result pushed to queue: {}",
|
||||
OSIS, job_id, result_queue_key
|
||||
);
|
||||
}
|
||||
|
||||
if let Err(e) = Job::update_status(redis_conn, job_id, JobStatus::Finished).await {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to update status to Finished: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = format!("Script execution error: {}", e);
|
||||
error!("OSIS Actor '{}', Job {}: {}", OSIS, job_id, error_msg);
|
||||
|
||||
// Update job with error (stores in job hash error field)
|
||||
if let Err(e) = Job::set_error(redis_conn, job_id, &error_msg).await {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to set error: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
}
|
||||
|
||||
// Also push error to error queue for retrieval
|
||||
let error_queue_key = format!("hero:job:{}:error", job_id);
|
||||
if let Err(e) = redis_conn
|
||||
.lpush::<_, _, ()>(&error_queue_key, &error_msg)
|
||||
.await
|
||||
{
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to push error to queue {}: {}",
|
||||
OSIS, job_id, error_queue_key, e
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Error pushed to queue: {}",
|
||||
OSIS, job_id, error_queue_key
|
||||
);
|
||||
}
|
||||
|
||||
if let Err(e) = Job::update_status(redis_conn, job_id, JobStatus::Error).await {
|
||||
error!(
|
||||
"OSIS Actor '{}', Job {}: Failed to update status to Error: {}",
|
||||
OSIS, job_id, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Force flush logs before dropping guard
|
||||
std::thread::sleep(std::time::Duration::from_millis(100));
|
||||
|
||||
// Keep the guard alive until after processing
|
||||
drop(guard);
|
||||
|
||||
info!(
|
||||
"OSIS Actor '{}', Job {}: Sequential processing completed",
|
||||
OSIS, job_id
|
||||
);
|
||||
}
|
||||
|
||||
fn actor_type(&self) -> &'static str {
|
||||
"OSIS"
|
||||
}
|
||||
|
||||
fn actor_id(&self) -> &str {
|
||||
// Actor ID contains "osis" so the runtime derives ScriptType=OSIS and consumes the canonical type queue.
|
||||
"osis"
|
||||
}
|
||||
|
||||
fn redis_url(&self) -> &str {
|
||||
&self.redis_url
|
||||
}
|
||||
}
|
||||
|
||||
/// Convenience function to spawn an OSIS actor using the trait interface
|
||||
///
|
||||
/// This function provides backward compatibility with the original actor API
|
||||
/// while using the new trait-based implementation.
|
||||
pub fn spawn_osis_actor(
|
||||
db_path: String,
|
||||
redis_url: String,
|
||||
shutdown_rx: mpsc::Receiver<()>,
|
||||
) -> JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>> {
|
||||
let actor = Arc::new(
|
||||
OSISActor::builder()
|
||||
.db_path(db_path)
|
||||
.redis_url(redis_url)
|
||||
.build()
|
||||
.expect("Failed to build OSISActor"),
|
||||
);
|
||||
spawn_actor(actor, shutdown_rx)
|
||||
}
|
||||
|
||||
// Re-export engine functions for examples and external use
|
||||
pub use crate::engine::{create_osis_engine, register_dsl_modules};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_osis_actor_creation() {
|
||||
let actor = OSISActor::builder().build().unwrap();
|
||||
assert_eq!(actor.actor_type(), "OSIS");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_osis_actor_default() {
|
||||
let actor = OSISActor::default();
|
||||
assert_eq!(actor.actor_type(), "OSIS");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_osis_actor_process_job_interface() {
|
||||
let actor = OSISActor::default();
|
||||
|
||||
// Create a simple test job
|
||||
let _job = Job::new(
|
||||
"test_caller".to_string(),
|
||||
"test_context".to_string(),
|
||||
r#"print("Hello from sync actor test!"); 42"#.to_string(),
|
||||
ScriptType::OSIS,
|
||||
);
|
||||
|
||||
// Note: This test doesn't actually connect to Redis, it just tests the interface
|
||||
// In a real test environment, you'd need a Redis instance or mock
|
||||
|
||||
// For now, just verify the actor was created successfully
|
||||
assert_eq!(actor.actor_type(), "OSIS");
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user