merge runners into single project

This commit is contained in:
Timur Gordon
2025-09-09 15:42:20 +02:00
parent 89a3abee63
commit 629d59f7db
20 changed files with 2033 additions and 894 deletions

124
src/sync_runner.rs Normal file
View File

@@ -0,0 +1,124 @@
use hero_job::Job;
use log::{debug, error, info};
use rhai::{Dynamic, Engine};
use std::sync::Arc;
use crate::runner_trait::Runner;
/// Configuration for sync runner instances
#[derive(Debug, Clone)]
pub struct SyncRunnerConfig {
pub runner_id: String,
pub db_path: String,
pub redis_url: String,
pub preserve_tasks: bool,
}
/// Synchronous runner that processes jobs sequentially
pub struct SyncRunner {
pub config: SyncRunnerConfig,
pub engine_factory: Arc<dyn Fn() -> Engine + Send + Sync>,
}
impl SyncRunner {
/// Create a new SyncRunner with the provided engine factory
pub fn new<F>(config: SyncRunnerConfig, engine_factory: F) -> Self
where
F: Fn() -> Engine + Send + Sync + 'static,
{
Self {
config,
engine_factory: Arc::new(engine_factory),
}
}
/// Execute a job with the given engine, setting proper job context
///
/// This function sets up the engine with job context (DB_PATH, CALLER_ID, CONTEXT_ID)
/// and evaluates the script. It returns the result or error.
fn execute_job_with_engine(
engine: &mut Engine,
job: &Job,
db_path: &str,
) -> Result<Dynamic, Box<rhai::EvalAltResult>> {
// Set up job context in the engine
let mut db_config = rhai::Map::new();
db_config.insert("DB_PATH".into(), db_path.to_string().into());
db_config.insert("CALLER_ID".into(), job.caller_id.clone().into());
db_config.insert("CONTEXT_ID".into(), job.context_id.clone().into());
engine.set_default_tag(Dynamic::from(db_config));
debug!("Sync Runner for Context ID '{}': Evaluating script with Rhai engine (job context set).", job.context_id);
// Execute the script with the configured engine
engine.eval::<Dynamic>(&job.payload)
}
}
impl Runner for SyncRunner {
fn process_job(&self, job: Job) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let job_id = &job.id;
let runner_id = &self.config.runner_id;
debug!("Sync Runner '{}', Job {}: Processing started.", runner_id, job_id);
info!("Sync Runner '{}' processing job_id: {}. Script: {:.50}...", job.context_id, job_id, job.payload);
// Create a new engine instance for this job execution
let mut engine = (self.engine_factory)();
// Execute the script
match Self::execute_job_with_engine(&mut engine, &job, &self.config.db_path) {
Ok(result) => {
let output_str = if result.is::<String>() {
result.into_string().unwrap()
} else {
result.to_string()
};
info!("Sync Runner for Context ID '{}' job {} completed. Output: {}", job.context_id, job.id, output_str);
Ok(output_str)
}
Err(e) => {
let error_str = format!("{:?}", *e);
error!("Sync Runner for Context ID '{}' job {} script evaluation failed. Error: {}", job.context_id, job.id, error_str);
Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>)
}
}
}
fn runner_type(&self) -> &'static str {
"Sync"
}
fn runner_id(&self) -> &str {
&self.config.runner_id
}
fn redis_url(&self) -> &str {
&self.config.redis_url
}
}
/// Convenience function to spawn a synchronous runner using the trait interface
pub fn spawn_sync_runner<F>(
runner_id: String,
db_path: String,
redis_url: String,
shutdown_rx: tokio::sync::mpsc::Receiver<()>,
preserve_tasks: bool,
engine_factory: F,
) -> tokio::task::JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>>
where
F: Fn() -> Engine + Send + Sync + 'static,
{
let config = SyncRunnerConfig {
runner_id,
db_path,
redis_url,
preserve_tasks,
};
let runner = Arc::new(SyncRunner::new(config, engine_factory));
crate::runner_trait::spawn_runner(runner, shutdown_rx)
}