Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
d7a7eae19e | ||
|
2ab69f039c | ||
|
0da7b9363c | ||
|
78da9da539 | ||
|
9c4fa1a78b |
213
Cargo.lock
generated
213
Cargo.lock
generated
@ -402,6 +402,28 @@ version = "0.7.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-stream"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
|
||||||
|
dependencies = [
|
||||||
|
"async-stream-impl",
|
||||||
|
"futures-core",
|
||||||
|
"pin-project-lite",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-stream-impl"
|
||||||
|
version = "0.3.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.88"
|
version = "0.1.88"
|
||||||
@ -470,13 +492,12 @@ dependencies = [
|
|||||||
"async-trait",
|
"async-trait",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
"env_logger",
|
|
||||||
"hero_job",
|
"hero_job",
|
||||||
|
"hero_logger",
|
||||||
"hero_supervisor",
|
"hero_supervisor",
|
||||||
"heromodels 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
"heromodels 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
||||||
"heromodels-derive 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
"heromodels-derive 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
||||||
"heromodels_core 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
"heromodels_core 0.1.0 (git+https://git.ourworld.tf/herocode/db.git)",
|
||||||
"log",
|
|
||||||
"redis 0.25.4",
|
"redis 0.25.4",
|
||||||
"rhai",
|
"rhai",
|
||||||
"serde",
|
"serde",
|
||||||
@ -484,6 +505,8 @@ dependencies = [
|
|||||||
"thiserror 1.0.69",
|
"thiserror 1.0.69",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml",
|
"toml",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1639,16 +1662,16 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"chrono",
|
"chrono",
|
||||||
"criterion",
|
"criterion",
|
||||||
"env_logger",
|
"hero_logger",
|
||||||
"hero_supervisor",
|
"hero_supervisor",
|
||||||
"hero_websocket_server",
|
"hero_websocket_server",
|
||||||
"log",
|
|
||||||
"redis 0.25.4",
|
"redis 0.25.4",
|
||||||
"rhai",
|
"rhai",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tracing",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1712,6 +1735,25 @@ dependencies = [
|
|||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hero_logger"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"chrono",
|
||||||
|
"rhai",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tempfile",
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
"tokio",
|
||||||
|
"tokio-test",
|
||||||
|
"tracing",
|
||||||
|
"tracing-appender",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"tracing-test",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hero_supervisor"
|
name = "hero_supervisor"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
@ -1721,9 +1763,7 @@ dependencies = [
|
|||||||
"clap",
|
"clap",
|
||||||
"colored",
|
"colored",
|
||||||
"crossterm",
|
"crossterm",
|
||||||
"env_logger",
|
|
||||||
"hero_job",
|
"hero_job",
|
||||||
"log",
|
|
||||||
"ratatui",
|
"ratatui",
|
||||||
"redis 0.25.4",
|
"redis 0.25.4",
|
||||||
"rhai",
|
"rhai",
|
||||||
@ -1731,6 +1771,8 @@ dependencies = [
|
|||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
"tokio",
|
||||||
"toml",
|
"toml",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
"uuid",
|
"uuid",
|
||||||
"zinit-client",
|
"zinit-client",
|
||||||
]
|
]
|
||||||
@ -2737,6 +2779,15 @@ dependencies = [
|
|||||||
"hashbrown",
|
"hashbrown",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "matchers"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||||
|
dependencies = [
|
||||||
|
"regex-automata 0.1.10",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "md-5"
|
name = "md-5"
|
||||||
version = "0.10.6"
|
version = "0.10.6"
|
||||||
@ -2831,6 +2882,16 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nu-ansi-term"
|
||||||
|
version = "0.46.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||||
|
dependencies = [
|
||||||
|
"overload",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-conv"
|
name = "num-conv"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
@ -2950,6 +3011,12 @@ dependencies = [
|
|||||||
"thiserror 1.0.69",
|
"thiserror 1.0.69",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "overload"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "parking_lot"
|
||||||
version = "0.12.4"
|
version = "0.12.4"
|
||||||
@ -3392,8 +3459,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-automata",
|
"regex-automata 0.4.9",
|
||||||
"regex-syntax",
|
"regex-syntax 0.8.5",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-automata"
|
||||||
|
version = "0.1.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||||
|
dependencies = [
|
||||||
|
"regex-syntax 0.6.29",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3404,7 +3480,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-syntax",
|
"regex-syntax 0.8.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3413,6 +3489,12 @@ version = "0.1.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
|
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "regex-syntax"
|
||||||
|
version = "0.6.29"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.5"
|
version = "0.8.5"
|
||||||
@ -3899,6 +3981,15 @@ dependencies = [
|
|||||||
"keccak",
|
"keccak",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sharded-slab"
|
||||||
|
version = "0.1.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shlex"
|
name = "shlex"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
@ -4206,6 +4297,15 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thread_local"
|
||||||
|
version = "1.1.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.41"
|
version = "0.3.41"
|
||||||
@ -4370,6 +4470,19 @@ dependencies = [
|
|||||||
"tokio-util",
|
"tokio-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-test"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
||||||
|
dependencies = [
|
||||||
|
"async-stream",
|
||||||
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
|
"tokio",
|
||||||
|
"tokio-stream",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-tungstenite"
|
name = "tokio-tungstenite"
|
||||||
version = "0.19.0"
|
version = "0.19.0"
|
||||||
@ -4505,6 +4618,18 @@ dependencies = [
|
|||||||
"tracing-core",
|
"tracing-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-appender"
|
||||||
|
version = "0.2.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam-channel",
|
||||||
|
"thiserror 1.0.69",
|
||||||
|
"time",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-attributes"
|
name = "tracing-attributes"
|
||||||
version = "0.1.30"
|
version = "0.1.30"
|
||||||
@ -4523,6 +4648,70 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
"valuable",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-log"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
|
||||||
|
dependencies = [
|
||||||
|
"log",
|
||||||
|
"once_cell",
|
||||||
|
"tracing-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-serde"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
"tracing-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-subscriber"
|
||||||
|
version = "0.3.19"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
||||||
|
dependencies = [
|
||||||
|
"matchers",
|
||||||
|
"nu-ansi-term",
|
||||||
|
"once_cell",
|
||||||
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sharded-slab",
|
||||||
|
"smallvec",
|
||||||
|
"thread_local",
|
||||||
|
"tracing",
|
||||||
|
"tracing-core",
|
||||||
|
"tracing-log",
|
||||||
|
"tracing-serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-test"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68"
|
||||||
|
dependencies = [
|
||||||
|
"tracing-core",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"tracing-test-macro",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-test-macro"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
|
||||||
|
dependencies = [
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4722,6 +4911,12 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "valuable"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "vcpkg"
|
name = "vcpkg"
|
||||||
version = "0.2.15"
|
version = "0.2.15"
|
||||||
|
11
Cargo.toml
11
Cargo.toml
@ -6,14 +6,14 @@ edition = "2024"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
env_logger = "0.10"
|
hero_logger = { path = "core/logger" }
|
||||||
hero_supervisor = { path = "core/supervisor" }
|
hero_supervisor = { path = "core/supervisor" }
|
||||||
hero_websocket_server = { path = "interfaces/websocket/server" }
|
hero_websocket_server = { path = "interfaces/websocket/server" }
|
||||||
log = "0.4"
|
|
||||||
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "sync", "signal"] }
|
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "sync", "signal"] }
|
||||||
|
tracing = "0.1"
|
||||||
rhai = "1.21.0"
|
rhai = "1.21.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
@ -48,6 +48,9 @@ serde_json = "1.0"
|
|||||||
sha3 = "0.10"
|
sha3 = "0.10"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "sync", "signal"] }
|
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "sync", "signal"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "registry", "fmt"] }
|
||||||
|
tracing-appender = "0.2"
|
||||||
url = "2.5"
|
url = "2.5"
|
||||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||||
|
|
||||||
@ -59,7 +62,9 @@ members = [
|
|||||||
"interfaces/websocket/server",
|
"interfaces/websocket/server",
|
||||||
"core/supervisor",
|
"core/supervisor",
|
||||||
"core/actor",
|
"core/actor",
|
||||||
"core/job", "interfaces/websocket/examples",
|
"core/job",
|
||||||
|
"core/logger",
|
||||||
|
"interfaces/websocket/examples",
|
||||||
"proxies/http",
|
"proxies/http",
|
||||||
]
|
]
|
||||||
resolver = "2" # Recommended for new workspaces
|
resolver = "2" # Recommended for new workspaces
|
||||||
|
@ -22,7 +22,7 @@ Both examples demonstrate the ping/pong functionality built into the Hero actors
|
|||||||
|
|
||||||
2. **Rust Environment**: Make sure you can build the actor binaries
|
2. **Rust Environment**: Make sure you can build the actor binaries
|
||||||
```bash
|
```bash
|
||||||
cd /path/to/herocode/baobab/core/actor
|
cd /path/to/herocode/hero/core/actor
|
||||||
cargo build --bin osis --bin system
|
cargo build --bin osis --bin system
|
||||||
```
|
```
|
||||||
|
|
||||||
|
19
cmd/main.rs
19
cmd/main.rs
@ -4,17 +4,11 @@ use std::time::Duration;
|
|||||||
use hero_supervisor::{SupervisorBuilder, SupervisorError};
|
use hero_supervisor::{SupervisorBuilder, SupervisorError};
|
||||||
use hero_websocket_server::ServerBuilder;
|
use hero_websocket_server::ServerBuilder;
|
||||||
use tokio::signal;
|
use tokio::signal;
|
||||||
use log::{info, error};
|
use tracing::{info, error};
|
||||||
use env_logger::Builder;
|
|
||||||
|
|
||||||
/// The main entry point of the Hero Supervisor.
|
/// The main entry point of the Hero Supervisor.
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// Initialize logging
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
|
|
||||||
info!("Hero Supervisor starting up...");
|
info!("Hero Supervisor starting up...");
|
||||||
|
|
||||||
// Get config path from command line arguments or use default
|
// Get config path from command line arguments or use default
|
||||||
@ -41,6 +35,17 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||||||
let actor_configs = supervisor.get_actor_configs()?;
|
let actor_configs = supervisor.get_actor_configs()?;
|
||||||
info!("Loaded {} actor configurations from TOML", actor_configs.len());
|
info!("Loaded {} actor configurations from TOML", actor_configs.len());
|
||||||
|
|
||||||
|
// Initialize the system logger with all components
|
||||||
|
let mut system_components = vec!["supervisor".to_string()];
|
||||||
|
for config in &actor_configs {
|
||||||
|
system_components.push(config.name.clone()); // e.g., "osis_actor_1"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize the logger for all system components
|
||||||
|
let _logger_guards = hero_logger::init_system_logger("logs", &system_components)?;
|
||||||
|
|
||||||
|
info!(target: "supervisor", "System logger initialized with {} components", system_components.len());
|
||||||
|
|
||||||
// Spawn the background lifecycle manager with 5-minute health check interval
|
// Spawn the background lifecycle manager with 5-minute health check interval
|
||||||
let health_check_interval = Duration::from_secs(5 * 60); // 5 minutes
|
let health_check_interval = Duration::from_secs(5 * 60); // 5 minutes
|
||||||
let mut lifecycle_handle = supervisor.clone().spawn_lifecycle_manager(actor_configs, health_check_interval);
|
let mut lifecycle_handle = supervisor.clone().spawn_lifecycle_manager(actor_configs, health_check_interval);
|
||||||
|
@ -6,11 +6,6 @@ edition = "2021"
|
|||||||
[lib]
|
[lib]
|
||||||
name = "baobab_actor" # Can be different from package name, or same
|
name = "baobab_actor" # Can be different from package name, or same
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
crate-type = ["cdylib", "rlib"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "baobab-actor-tui"
|
|
||||||
path = "cmd/terminal_ui_main.rs"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -20,26 +15,21 @@ rhai = { version = "1.21.0", features = ["std", "sync", "decimal", "internals"]
|
|||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time"] }
|
tokio = { version = "1", features = ["macros", "rt-multi-thread", "time"] }
|
||||||
log = "0.4"
|
tracing = { version = "0.1", features = ["log"] }
|
||||||
env_logger = "0.10"
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
clap = { version = "4.4", features = ["derive"] }
|
clap = { version = "4.4", features = ["derive"] }
|
||||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
uuid = { version = "1.6", features = ["v4", "serde"] } # Though task_id is string, uuid might be useful
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
# TUI dependencies
|
|
||||||
anyhow = "1.0"
|
|
||||||
crossterm = "0.28"
|
|
||||||
ratatui = "0.28"
|
|
||||||
hero_supervisor = { path = "../supervisor" }
|
hero_supervisor = { path = "../supervisor" }
|
||||||
hero_job = { path = "../job" }
|
hero_job = { path = "../job" }
|
||||||
|
hero_logger = { path = "../logger" }
|
||||||
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels_core = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
heromodels-derive = { git = "https://git.ourworld.tf/herocode/db.git" }
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["calendar", "finance"]
|
default = ["calendar", "finance"]
|
||||||
calendar = []
|
calendar = []
|
||||||
@ -48,4 +38,3 @@ flow = []
|
|||||||
legal = []
|
legal = []
|
||||||
projects = []
|
projects = []
|
||||||
biz = []
|
biz = []
|
||||||
|
|
||||||
|
@ -73,12 +73,3 @@ Key dependencies include:
|
|||||||
- `clap`: For command-line argument parsing.
|
- `clap`: For command-line argument parsing.
|
||||||
- `tokio`: For the asynchronous runtime.
|
- `tokio`: For the asynchronous runtime.
|
||||||
- `log`, `env_logger`: For logging.
|
- `log`, `env_logger`: For logging.
|
||||||
|
|
||||||
|
|
||||||
## TUI Example
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo run --example baobab-actor-tui -- --id osis --path /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis --example-dir /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/examples/scripts
|
|
||||||
```
|
|
||||||
|
|
||||||
The TUI will allow you to monitor the actor's job queue and dispatch new jobs to it.
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,146 +0,0 @@
|
|||||||
//! Simplified main function for Baobab Actor TUI
|
|
||||||
//!
|
|
||||||
//! This binary provides a clean entry point for the actor monitoring and job dispatch interface.
|
|
||||||
|
|
||||||
use anyhow::{Result, Context};
|
|
||||||
use baobab_actor::terminal_ui::{App, setup_and_run_tui};
|
|
||||||
use clap::Parser;
|
|
||||||
use log::{info, warn, error};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::process::{Child, Command};
|
|
||||||
use tokio::signal;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "baobab-actor-tui")]
|
|
||||||
#[command(about = "Terminal UI for Baobab Actor - Monitor and dispatch jobs to a single actor")]
|
|
||||||
struct Args {
|
|
||||||
/// Actor ID to monitor
|
|
||||||
#[arg(short, long)]
|
|
||||||
id: String,
|
|
||||||
|
|
||||||
/// Path to actor binary
|
|
||||||
#[arg(short, long)]
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Directory containing example .rhai scripts
|
|
||||||
#[arg(short, long)]
|
|
||||||
example_dir: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Redis URL for job queue
|
|
||||||
#[arg(short, long, default_value = "redis://localhost:6379")]
|
|
||||||
redis_url: String,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(short, long)]
|
|
||||||
verbose: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize logging based on verbosity level
|
|
||||||
fn init_logging(verbose: bool) {
|
|
||||||
if verbose {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Debug)
|
|
||||||
.init();
|
|
||||||
} else {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create and configure the TUI application
|
|
||||||
fn create_app(args: &Args) -> Result<App> {
|
|
||||||
App::new(
|
|
||||||
args.id.clone(),
|
|
||||||
args.path.clone(),
|
|
||||||
args.redis_url.clone(),
|
|
||||||
args.example_dir.clone(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Spawn the actor binary as a background process
|
|
||||||
fn spawn_actor_process(args: &Args) -> Result<Child> {
|
|
||||||
info!("🎬 Spawning actor process: {}", args.path.display());
|
|
||||||
|
|
||||||
let mut cmd = Command::new(&args.path);
|
|
||||||
|
|
||||||
// Redirect stdout and stderr to null to prevent logs from interfering with TUI
|
|
||||||
cmd.stdout(std::process::Stdio::null())
|
|
||||||
.stderr(std::process::Stdio::null());
|
|
||||||
|
|
||||||
// Spawn the process
|
|
||||||
let child = cmd
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| format!("Failed to spawn actor process: {}", args.path.display()))?;
|
|
||||||
|
|
||||||
info!("✅ Actor process spawned with PID: {}", child.id());
|
|
||||||
Ok(child)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cleanup function to terminate actor process
|
|
||||||
fn cleanup_actor_process(mut actor_process: Child) {
|
|
||||||
info!("🧹 Cleaning up actor process...");
|
|
||||||
|
|
||||||
match actor_process.try_wait() {
|
|
||||||
Ok(Some(status)) => {
|
|
||||||
info!("Actor process already exited with status: {}", status);
|
|
||||||
}
|
|
||||||
Ok(None) => {
|
|
||||||
info!("Terminating actor process...");
|
|
||||||
if let Err(e) = actor_process.kill() {
|
|
||||||
error!("Failed to kill actor process: {}", e);
|
|
||||||
} else {
|
|
||||||
match actor_process.wait() {
|
|
||||||
Ok(status) => info!("Actor process terminated with status: {}", status),
|
|
||||||
Err(e) => error!("Failed to wait for actor process: {}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to check actor process status: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Initialize logging
|
|
||||||
init_logging(args.verbose);
|
|
||||||
|
|
||||||
info!("🚀 Starting Baobab Actor TUI...");
|
|
||||||
info!("Actor ID: {}", args.id);
|
|
||||||
info!("Actor Path: {}", args.path.display());
|
|
||||||
info!("Redis URL: {}", args.redis_url);
|
|
||||||
|
|
||||||
if let Some(ref example_dir) = args.example_dir {
|
|
||||||
info!("Example Directory: {}", example_dir.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spawn the actor process first
|
|
||||||
let actor_process = spawn_actor_process(&args)?;
|
|
||||||
|
|
||||||
// Give the actor a moment to start up
|
|
||||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
|
||||||
|
|
||||||
// Create app and run TUI
|
|
||||||
let app = create_app(&args)?;
|
|
||||||
|
|
||||||
// Set up signal handling for graceful shutdown
|
|
||||||
let result = tokio::select! {
|
|
||||||
tui_result = setup_and_run_tui(app) => {
|
|
||||||
info!("TUI exited");
|
|
||||||
tui_result
|
|
||||||
}
|
|
||||||
_ = signal::ctrl_c() => {
|
|
||||||
info!("Received Ctrl+C, shutting down...");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Clean up the actor process
|
|
||||||
cleanup_actor_process(actor_process);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
@ -28,9 +28,9 @@
|
|||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use hero_job::Job;
|
use hero_job::Job;
|
||||||
use log::{debug, error, info};
|
use tracing::{debug, error, info};
|
||||||
use redis::AsyncCommands;
|
use redis::AsyncCommands;
|
||||||
|
use rhai::Engine;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use hero_job::{Job, JobStatus};
|
use hero_job::{Job, JobStatus};
|
||||||
use log::{debug, error, info};
|
use tracing::log::{debug, error, info};
|
||||||
use redis::AsyncCommands;
|
use redis::AsyncCommands;
|
||||||
use rhai::{Dynamic, Engine};
|
use rhai::{Dynamic, Engine};
|
||||||
use tokio::sync::mpsc; // For shutdown signal
|
use tokio::sync::mpsc; // For shutdown signal
|
||||||
@ -8,9 +8,6 @@ use tokio::task::JoinHandle;
|
|||||||
/// Actor trait abstraction for unified actor interface
|
/// Actor trait abstraction for unified actor interface
|
||||||
pub mod actor_trait;
|
pub mod actor_trait;
|
||||||
|
|
||||||
/// Terminal UI module for actor monitoring and job dispatch
|
|
||||||
pub mod terminal_ui;
|
|
||||||
|
|
||||||
const NAMESPACE_PREFIX: &str = "hero:job:";
|
const NAMESPACE_PREFIX: &str = "hero:job:";
|
||||||
const BLPOP_TIMEOUT_SECONDS: usize = 5;
|
const BLPOP_TIMEOUT_SECONDS: usize = 5;
|
||||||
|
|
||||||
@ -36,7 +33,7 @@ pub async fn initialize_redis_connection(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Load job from Redis using Job struct
|
/// Load job from Redis using Job struct
|
||||||
pub(crate) async fn load_job_from_redis(
|
pub async fn load_job_from_redis(
|
||||||
redis_conn: &mut redis::aio::MultiplexedConnection,
|
redis_conn: &mut redis::aio::MultiplexedConnection,
|
||||||
job_id: &str,
|
job_id: &str,
|
||||||
actor_id: &str,
|
actor_id: &str,
|
||||||
@ -116,29 +113,6 @@ async fn execute_script_and_update_status(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Execute a job with the given engine, setting proper job context
|
|
||||||
///
|
|
||||||
/// This function sets up the engine with job context (DB_PATH, CALLER_ID, CONTEXT_ID)
|
|
||||||
/// and evaluates the script. It returns the result or error without updating Redis.
|
|
||||||
/// This allows actors to handle Redis updates according to their own patterns.
|
|
||||||
pub async fn execute_job_with_engine(
|
|
||||||
engine: &mut Engine,
|
|
||||||
job: &Job,
|
|
||||||
db_path: &str,
|
|
||||||
) -> Result<Dynamic, Box<rhai::EvalAltResult>> {
|
|
||||||
// Set up job context in the engine
|
|
||||||
let mut db_config = rhai::Map::new();
|
|
||||||
db_config.insert("DB_PATH".into(), db_path.to_string().into());
|
|
||||||
db_config.insert("CALLER_ID".into(), job.caller_id.clone().into());
|
|
||||||
db_config.insert("CONTEXT_ID".into(), job.context_id.clone().into());
|
|
||||||
engine.set_default_tag(Dynamic::from(db_config));
|
|
||||||
|
|
||||||
debug!("Actor for Context ID '{}': Evaluating script with Rhai engine (job context set).", job.context_id);
|
|
||||||
|
|
||||||
// Execute the script with the configured engine
|
|
||||||
engine.eval::<Dynamic>(&job.script)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clean up job from Redis if preserve_tasks is false
|
/// Clean up job from Redis if preserve_tasks is false
|
||||||
async fn cleanup_job(
|
async fn cleanup_job(
|
||||||
redis_conn: &mut redis::aio::MultiplexedConnection,
|
redis_conn: &mut redis::aio::MultiplexedConnection,
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
#[cfg(feature = "wasm")]
|
|
||||||
use baobab_actor::ui::App;
|
|
||||||
#[cfg(feature = "wasm")]
|
|
||||||
use yew::prelude::*;
|
|
||||||
|
|
||||||
#[cfg(feature = "wasm")]
|
|
||||||
fn main() {
|
|
||||||
console_log::init_with_level(log::Level::Debug).expect("Failed to initialize logger");
|
|
||||||
|
|
||||||
// Get configuration from URL parameters or local storage
|
|
||||||
let window = web_sys::window().expect("No global window exists");
|
|
||||||
let location = window.location();
|
|
||||||
let search = location.search().unwrap_or_default();
|
|
||||||
|
|
||||||
// Parse URL parameters for actor configuration
|
|
||||||
let url_params = web_sys::UrlSearchParams::new_with_str(&search).unwrap();
|
|
||||||
|
|
||||||
let actor_id = url_params.get("id").unwrap_or_else(|| "default_actor".to_string());
|
|
||||||
let actor_path = url_params.get("path").unwrap_or_else(|| "/path/to/actor".to_string());
|
|
||||||
let example_dir = url_params.get("example_dir");
|
|
||||||
let redis_url = url_params.get("redis_url").unwrap_or_else(|| "redis://localhost:6379".to_string());
|
|
||||||
|
|
||||||
log::info!("Starting Baobab Actor UI with actor_id: {}", actor_id);
|
|
||||||
|
|
||||||
yew::Renderer::<App>::with_props(baobab_actor::ui::app::AppProps {
|
|
||||||
actor_id,
|
|
||||||
actor_path,
|
|
||||||
example_dir,
|
|
||||||
redis_url,
|
|
||||||
}).render();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "wasm"))]
|
|
||||||
fn main() {
|
|
||||||
eprintln!("This binary is only available with the 'wasm' feature enabled.");
|
|
||||||
eprintln!("Please compile with: cargo build --features wasm --target wasm32-unknown-unknown");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -76,11 +76,6 @@ impl JobBuilder {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn caller_id(mut self, caller_id: &str) -> Self {
|
|
||||||
self.caller_id = caller_id.to_string();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn script(mut self, script: &str) -> Self {
|
pub fn script(mut self, script: &str) -> Self {
|
||||||
self.script = script.to_string();
|
self.script = script.to_string();
|
||||||
self
|
self
|
||||||
|
@ -7,7 +7,6 @@ use redis::AsyncCommands;
|
|||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
mod builder;
|
mod builder;
|
||||||
pub use builder::JobBuilder;
|
|
||||||
|
|
||||||
/// Redis namespace prefix for all Hero job-related keys
|
/// Redis namespace prefix for all Hero job-related keys
|
||||||
pub const NAMESPACE_PREFIX: &str = "hero:job:";
|
pub const NAMESPACE_PREFIX: &str = "hero:job:";
|
||||||
|
23
core/logger/Cargo.toml
Normal file
23
core/logger/Cargo.toml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
[package]
|
||||||
|
name = "hero_logger"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
description = "Hierarchical logging system for the Hero project with system and per-job isolation"
|
||||||
|
authors = ["Hero Team"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "registry", "fmt"] }
|
||||||
|
tracing-appender = "0.2"
|
||||||
|
tokio = { version = "1", features = ["fs", "time", "rt", "rt-multi-thread", "macros"] }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
thiserror = "1.0"
|
||||||
|
anyhow = "1.0"
|
||||||
|
rhai = "1.21.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.0"
|
||||||
|
tokio-test = "0.4"
|
||||||
|
tracing-test = "0.2"
|
292
core/logger/README.md
Normal file
292
core/logger/README.md
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
# Hero Logger
|
||||||
|
|
||||||
|
A hierarchical logging system for the Hero project that provides system-level and per-job logging with complete isolation using the `tracing` ecosystem.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Hierarchical Organization**: Physical separation of logs by component and job
|
||||||
|
- **System Logger**: Global logging for all non-job-specific events
|
||||||
|
- **Per-Job Logger**: Isolated logging for individual job execution
|
||||||
|
- **Custom Log Format**: Readable format with precise formatting rules
|
||||||
|
- **Hourly Rotation**: Automatic log file rotation every hour
|
||||||
|
- **Rhai Integration**: Capture Rhai script `print()` and `debug()` calls
|
||||||
|
- **High Performance**: Async logging with efficient filtering
|
||||||
|
- **Structured Logging**: Rich context and metadata support
|
||||||
|
|
||||||
|
## Custom Log Format
|
||||||
|
|
||||||
|
Hero Logger uses a custom format designed for readability and consistency:
|
||||||
|
|
||||||
|
```
|
||||||
|
21:23:42
|
||||||
|
system - This is a normal log message
|
||||||
|
system - This is a multi-line message
|
||||||
|
second line with proper indentation
|
||||||
|
third line maintaining alignment
|
||||||
|
E error_cat - This is an error message
|
||||||
|
E second line of error
|
||||||
|
E third line of error
|
||||||
|
```
|
||||||
|
|
||||||
|
### Format Rules
|
||||||
|
|
||||||
|
- **Time stamps (HH:MM:SS)** are written once per second when the log time changes
|
||||||
|
- **Categories** are:
|
||||||
|
- Limited to 10 characters maximum
|
||||||
|
- Padded with spaces to exactly 10 characters
|
||||||
|
- Any `-` in category names are converted to `_`
|
||||||
|
- **Each line starts with either:**
|
||||||
|
- ` ` (space) for normal logs (INFO, WARN, DEBUG, TRACE)
|
||||||
|
- `E` for error logs
|
||||||
|
- **Multi-line messages** maintain consistent indentation (14 spaces after the prefix)
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
The logging system uses a hybrid approach with two main components:
|
||||||
|
|
||||||
|
### System Logger (Global)
|
||||||
|
- Long-lived logger initialized at application startup
|
||||||
|
- Routes logs to different files based on tracing targets
|
||||||
|
- Supports multiple components simultaneously
|
||||||
|
|
||||||
|
### Per-Job Logger (Dynamic)
|
||||||
|
- Created on-demand for each job execution
|
||||||
|
- Provides complete isolation for job-specific logs
|
||||||
|
- Automatically disposed after job completion
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
logs/
|
||||||
|
├── supervisor/ # System logs for supervisor
|
||||||
|
│ └── 2025-08-06-11.log
|
||||||
|
└── actor/
|
||||||
|
├── osis/
|
||||||
|
│ ├── 2025-08-06-11.log # General OSIS actor logs
|
||||||
|
│ ├── job-a1b2c3d4/ # Job-specific logs
|
||||||
|
│ │ └── 2025-08-06-11.log
|
||||||
|
│ └── job-9a8b7c6d/
|
||||||
|
│ └── 2025-08-06-12.log
|
||||||
|
└── sal/
|
||||||
|
├── 2025-08-06-13.log # General SAL actor logs
|
||||||
|
└── job-f1e2d3c4/
|
||||||
|
└── 2025-08-06-13.log
|
||||||
|
```
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Initialize System Logger
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use hero_logger;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
// Define your system components
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Initialize the system logger
|
||||||
|
let _guards = hero_logger::init_system_logger("logs", &components)?;
|
||||||
|
|
||||||
|
// Now you can use tracing macros with targets
|
||||||
|
tracing::info!(target: "supervisor", "System started");
|
||||||
|
tracing::info!(target: "osis_actor", "Actor ready");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Per-Job Logging
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use hero_logger::create_job_logger;
|
||||||
|
use tracing::subscriber::with_default;
|
||||||
|
|
||||||
|
async fn process_job(job_id: &str, actor_type: &str) {
|
||||||
|
// Create job-specific logger
|
||||||
|
let job_logger = create_job_logger("logs", actor_type, job_id)?;
|
||||||
|
|
||||||
|
// Execute job within logging context
|
||||||
|
with_default(job_logger, || {
|
||||||
|
tracing::info!(target: "osis_actor", "Job {} started", job_id);
|
||||||
|
|
||||||
|
// All tracing calls here go to the job-specific log
|
||||||
|
tracing::debug!(target: "osis_actor", "Processing data...");
|
||||||
|
tracing::info!(target: "osis_actor", "Job {} completed", job_id);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Rhai Script Integration
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use hero_logger::rhai_integration::configure_rhai_logging;
|
||||||
|
use rhai::Engine;
|
||||||
|
|
||||||
|
fn setup_rhai_engine() -> Engine {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
|
||||||
|
// Configure Rhai to capture print/debug calls
|
||||||
|
configure_rhai_logging(&mut engine, "osis_actor");
|
||||||
|
|
||||||
|
engine
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now Rhai scripts can use print() and debug()
|
||||||
|
let script = r#"
|
||||||
|
print("Hello from Rhai!");
|
||||||
|
debug("Debug information");
|
||||||
|
42
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let result = engine.eval::<i64>(script)?;
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Reference
|
||||||
|
|
||||||
|
### Core Functions
|
||||||
|
|
||||||
|
#### `init_system_logger(logs_root, components)`
|
||||||
|
Initialize the global system logger with component-based filtering.
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `logs_root`: Root directory for log files
|
||||||
|
- `components`: List of component names for dedicated logging
|
||||||
|
|
||||||
|
**Returns:** Vector of `WorkerGuard`s that must be kept alive
|
||||||
|
|
||||||
|
#### `create_job_logger(logs_root, actor_type, job_id)`
|
||||||
|
Create a per-job logger for isolated logging.
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `logs_root`: Root directory for log files
|
||||||
|
- `actor_type`: Type of actor (e.g., "osis", "sal")
|
||||||
|
- `job_id`: Unique job identifier
|
||||||
|
|
||||||
|
**Returns:** Boxed subscriber for use with `with_default()`
|
||||||
|
|
||||||
|
### Rhai Integration
|
||||||
|
|
||||||
|
#### `configure_rhai_logging(engine, target)`
|
||||||
|
Configure a Rhai engine to capture print/debug output.
|
||||||
|
|
||||||
|
#### `add_custom_logging_functions(engine, target)`
|
||||||
|
Add custom logging functions (`log_info`, `log_debug`, etc.) to Rhai.
|
||||||
|
|
||||||
|
#### `create_logging_enabled_engine(target, include_custom)`
|
||||||
|
Create a new Rhai engine with full logging integration.
|
||||||
|
|
||||||
|
### Utilities
|
||||||
|
|
||||||
|
#### `ensure_log_directories(logs_root, components)`
|
||||||
|
Ensure the log directory structure exists.
|
||||||
|
|
||||||
|
#### `extract_actor_type(component)`
|
||||||
|
Extract actor type from component name.
|
||||||
|
|
||||||
|
#### `cleanup_old_logs(directory, pattern, max_age_days)`
|
||||||
|
Clean up old log files based on age.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Log Levels
|
||||||
|
The system supports standard tracing log levels:
|
||||||
|
- `ERROR`: Critical errors
|
||||||
|
- `WARN`: Warning messages
|
||||||
|
- `INFO`: Informational messages
|
||||||
|
- `DEBUG`: Debug information
|
||||||
|
- `TRACE`: Detailed trace information
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
- `RUST_LOG`: Set log level filtering (e.g., `RUST_LOG=debug`)
|
||||||
|
|
||||||
|
### File Rotation
|
||||||
|
- **Hourly**: Default rotation every hour
|
||||||
|
- **Daily**: Optional daily rotation
|
||||||
|
- **Never**: Single file (no rotation)
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
```bash
|
||||||
|
cargo run --example logging_demo
|
||||||
|
```
|
||||||
|
|
||||||
|
### Custom Format Demo
|
||||||
|
```bash
|
||||||
|
cargo run --example custom_format_demo
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration with Actor System
|
||||||
|
```rust
|
||||||
|
// In your actor implementation
|
||||||
|
async fn process_job(&self, job: &Job) {
|
||||||
|
let job_logger = hero_logger::create_job_logger(
|
||||||
|
"logs",
|
||||||
|
&self.actor_type,
|
||||||
|
&job.id
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
let job_task = async move {
|
||||||
|
tracing::info!(target: &self.actor_type, "Job processing started");
|
||||||
|
|
||||||
|
// Configure Rhai engine for this job
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
hero_logger::rhai_integration::configure_rhai_logging(
|
||||||
|
&mut engine,
|
||||||
|
&self.actor_type
|
||||||
|
);
|
||||||
|
|
||||||
|
// Execute Rhai script - print/debug calls captured
|
||||||
|
let result = engine.eval::<String>(&job.script)?;
|
||||||
|
|
||||||
|
tracing::info!(target: &self.actor_type, "Job finished: {}", result);
|
||||||
|
Ok(result)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Execute with job-specific logging
|
||||||
|
tracing::subscriber::with_default(job_logger, job_task).await;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
- **Async Logging**: All file I/O is asynchronous
|
||||||
|
- **Efficient Filtering**: Target-based filtering minimizes overhead
|
||||||
|
- **Memory Usage**: Per-job loggers are short-lived and automatically cleaned up
|
||||||
|
- **File Handles**: Automatic rotation prevents excessive file handle usage
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **Logs not appearing**: Ensure `WorkerGuard`s are kept alive
|
||||||
|
2. **Permission errors**: Check write permissions on log directory
|
||||||
|
3. **Missing directories**: Use `ensure_log_directories()` before logging
|
||||||
|
4. **Rhai output not captured**: Verify `configure_rhai_logging()` is called
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
Enable debug logging to see internal logger operations:
|
||||||
|
```bash
|
||||||
|
RUST_LOG=hero_logger=debug cargo run
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Run the test suite:
|
||||||
|
```bash
|
||||||
|
cargo test
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the demo example:
|
||||||
|
```bash
|
||||||
|
cargo run --example logging_demo
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is part of the Hero ecosystem and follows the same licensing terms.
|
142
core/logger/examples/logging_demo.rs
Normal file
142
core/logger/examples/logging_demo.rs
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
//! Logging System Demo
|
||||||
|
//!
|
||||||
|
//! This example demonstrates the Hero logging system functionality including:
|
||||||
|
//! - System logger initialization
|
||||||
|
//! - Per-job logger creation
|
||||||
|
//! - Rhai script integration with logging
|
||||||
|
//! - Directory structure creation
|
||||||
|
|
||||||
|
use hero_logger::{
|
||||||
|
init_system_logger, create_job_logger, rhai_integration::configure_rhai_logging,
|
||||||
|
};
|
||||||
|
use tracing::{info, debug, warn, error};
|
||||||
|
use tracing::subscriber::with_default;
|
||||||
|
use rhai::Engine;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::time::sleep;
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
println!("🚀 Hero Logging System Demo");
|
||||||
|
println!("============================");
|
||||||
|
|
||||||
|
// 1. Initialize the system logger
|
||||||
|
println!("\n📋 Step 1: Initializing system logger...");
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let _guards = init_system_logger("demo_logs", &components)?;
|
||||||
|
println!("✅ System logger initialized with {} components", components.len());
|
||||||
|
|
||||||
|
// 2. Test system-level logging
|
||||||
|
println!("\n📝 Step 2: Testing system-level logging...");
|
||||||
|
info!(target: "supervisor", "Supervisor started successfully");
|
||||||
|
info!(target: "osis_actor", "OSIS actor is ready");
|
||||||
|
info!(target: "sal_actor", "SAL actor is ready");
|
||||||
|
warn!(target: "supervisor", "This is a warning message");
|
||||||
|
error!(target: "supervisor", "This is an error message for testing");
|
||||||
|
|
||||||
|
// Give time for async logging
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
println!("✅ System logs written to demo_logs/supervisor/ and demo_logs/actor/*/");
|
||||||
|
|
||||||
|
// 3. Test per-job logging
|
||||||
|
println!("\n🔄 Step 3: Testing per-job logging...");
|
||||||
|
|
||||||
|
// Create job loggers for different jobs
|
||||||
|
let job1_logger = create_job_logger("demo_logs", "osis", "demo-job-001")?;
|
||||||
|
let job2_logger = create_job_logger("demo_logs", "sal", "demo-job-002")?;
|
||||||
|
|
||||||
|
// Execute logging within job contexts
|
||||||
|
with_default(job1_logger, || {
|
||||||
|
info!(target: "osis_actor", "Job demo-job-001 started");
|
||||||
|
debug!(target: "osis_actor", "Processing OSIS data");
|
||||||
|
info!(target: "osis_actor", "Job demo-job-001 completed successfully");
|
||||||
|
});
|
||||||
|
|
||||||
|
with_default(job2_logger, || {
|
||||||
|
info!(target: "sal_actor", "Job demo-job-002 started");
|
||||||
|
debug!(target: "sal_actor", "Processing SAL data");
|
||||||
|
warn!(target: "sal_actor", "Minor issue detected but continuing");
|
||||||
|
info!(target: "sal_actor", "Job demo-job-002 completed successfully");
|
||||||
|
});
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
println!("✅ Per-job logs written to demo_logs/actor/*/job-*/");
|
||||||
|
|
||||||
|
// 4. Test Rhai integration
|
||||||
|
println!("\n🔧 Step 4: Testing Rhai script logging integration...");
|
||||||
|
|
||||||
|
let job3_logger = create_job_logger("demo_logs", "osis", "rhai-demo-003")?;
|
||||||
|
|
||||||
|
with_default(job3_logger, || {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
configure_rhai_logging(&mut engine, "osis_actor");
|
||||||
|
|
||||||
|
info!(target: "osis_actor", "Starting Rhai script execution");
|
||||||
|
|
||||||
|
// Execute a Rhai script that uses print and debug
|
||||||
|
let script = r#"
|
||||||
|
print("Hello from Rhai script!");
|
||||||
|
debug("This is a debug message from Rhai");
|
||||||
|
|
||||||
|
let result = 42 + 8;
|
||||||
|
print("Calculation result: " + result);
|
||||||
|
|
||||||
|
result
|
||||||
|
"#;
|
||||||
|
|
||||||
|
match engine.eval::<i64>(script) {
|
||||||
|
Ok(result) => {
|
||||||
|
info!(target: "osis_actor", "Rhai script completed with result: {}", result);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!(target: "osis_actor", "Rhai script failed: {:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
println!("✅ Rhai script logs captured in per-job logger");
|
||||||
|
|
||||||
|
// 5. Display directory structure
|
||||||
|
println!("\n📁 Step 5: Generated directory structure:");
|
||||||
|
display_directory_structure("demo_logs", 0)?;
|
||||||
|
|
||||||
|
println!("\n🎉 Demo completed successfully!");
|
||||||
|
println!("Check the 'demo_logs' directory to see the generated log files.");
|
||||||
|
println!("Each component and job has its own isolated log files with hourly rotation.");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Recursively display directory structure
|
||||||
|
fn display_directory_structure(path: &str, depth: usize) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let path = std::path::Path::new(path);
|
||||||
|
if !path.exists() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let indent = " ".repeat(depth);
|
||||||
|
|
||||||
|
if path.is_dir() {
|
||||||
|
println!("{}📁 {}/", indent, path.file_name().unwrap_or_default().to_string_lossy());
|
||||||
|
|
||||||
|
let mut entries: Vec<_> = std::fs::read_dir(path)?.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
entries.sort_by_key(|entry| entry.file_name());
|
||||||
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.is_dir() {
|
||||||
|
display_directory_structure(&entry_path.to_string_lossy(), depth + 1)?;
|
||||||
|
} else {
|
||||||
|
println!("{}📄 {}", " ".repeat(depth + 1), entry.file_name().to_string_lossy());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
234
core/logger/src/custom_formatter.rs
Normal file
234
core/logger/src/custom_formatter.rs
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
//! Custom Hero Logger Formatter
|
||||||
|
//!
|
||||||
|
//! This module implements a custom formatter for the Hero logging system that provides:
|
||||||
|
//! - Time stamps (HH:MM:SS) written once per second when the log time changes
|
||||||
|
//! - Categories limited to 10 characters maximum, padded with spaces, dashes converted to underscores
|
||||||
|
//! - Each line starts with either space (normal logs) or E (error logs)
|
||||||
|
//! - Multi-line messages maintain consistent indentation (14 spaces after the prefix)
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use tracing::{Event, Level, Subscriber};
|
||||||
|
use tracing_subscriber::fmt::{format::Writer, FmtContext, FormatEvent, FormatFields};
|
||||||
|
use tracing_subscriber::registry::LookupSpan;
|
||||||
|
use chrono::{DateTime, Local};
|
||||||
|
|
||||||
|
/// Custom formatter for Hero logging system
|
||||||
|
pub struct HeroFormatter {
|
||||||
|
/// Tracks the last written timestamp to avoid duplicate timestamps
|
||||||
|
last_timestamp: Arc<Mutex<Option<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeroFormatter {
|
||||||
|
/// Create a new Hero formatter
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
last_timestamp: Arc::new(Mutex::new(None)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format a category name according to Hero rules:
|
||||||
|
/// - Convert dashes to underscores
|
||||||
|
/// - Limit to 10 characters maximum
|
||||||
|
/// - Pad with spaces to exactly 10 characters
|
||||||
|
fn format_category(&self, target: &str) -> String {
|
||||||
|
let processed = target.replace('-', "_");
|
||||||
|
let truncated = if processed.len() > 10 {
|
||||||
|
&processed[..10]
|
||||||
|
} else {
|
||||||
|
&processed
|
||||||
|
};
|
||||||
|
format!("{:<10}", truncated)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the log level prefix (space for normal, E for error)
|
||||||
|
fn get_level_prefix(&self, level: &Level) -> char {
|
||||||
|
match *level {
|
||||||
|
Level::ERROR => 'E',
|
||||||
|
_ => ' ',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get current timestamp in HH:MM:SS format
|
||||||
|
fn get_current_timestamp(&self) -> String {
|
||||||
|
let now: DateTime<Local> = Local::now();
|
||||||
|
now.format("%H:%M:%S").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if we need to write a timestamp and update the last timestamp
|
||||||
|
fn should_write_timestamp(&self, current_timestamp: &str) -> bool {
|
||||||
|
let mut last_ts = self.last_timestamp.lock().unwrap();
|
||||||
|
match last_ts.as_ref() {
|
||||||
|
Some(last) if last == current_timestamp => false,
|
||||||
|
_ => {
|
||||||
|
*last_ts = Some(current_timestamp.to_string());
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format a multi-line message with proper indentation
|
||||||
|
fn format_message(&self, prefix: char, category: &str, message: &str) -> String {
|
||||||
|
let lines: Vec<&str> = message.lines().collect();
|
||||||
|
if lines.is_empty() {
|
||||||
|
return format!("{} {} - \n", prefix, category);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = String::new();
|
||||||
|
|
||||||
|
// First line: prefix + category + " - " + message
|
||||||
|
result.push_str(&format!("{} {} - {}\n", prefix, category, lines[0]));
|
||||||
|
|
||||||
|
// Subsequent lines: prefix + 14 spaces + message
|
||||||
|
for line in lines.iter().skip(1) {
|
||||||
|
result.push_str(&format!("{} {}\n", prefix, line));
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for HeroFormatter {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, N> FormatEvent<S, N> for HeroFormatter
|
||||||
|
where
|
||||||
|
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||||
|
N: for<'a> FormatFields<'a> + 'static,
|
||||||
|
{
|
||||||
|
fn format_event(
|
||||||
|
&self,
|
||||||
|
_ctx: &FmtContext<'_, S, N>,
|
||||||
|
mut writer: Writer<'_>,
|
||||||
|
event: &Event<'_>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
// Get current timestamp
|
||||||
|
let current_timestamp = self.get_current_timestamp();
|
||||||
|
|
||||||
|
// Write timestamp if it has changed
|
||||||
|
if self.should_write_timestamp(¤t_timestamp) {
|
||||||
|
writeln!(writer, "{}", current_timestamp)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get event metadata
|
||||||
|
let metadata = event.metadata();
|
||||||
|
let level = metadata.level();
|
||||||
|
let target = metadata.target();
|
||||||
|
|
||||||
|
// Format category and get prefix
|
||||||
|
let category = self.format_category(target);
|
||||||
|
let prefix = self.get_level_prefix(level);
|
||||||
|
|
||||||
|
// Capture the message
|
||||||
|
let mut message_visitor = MessageVisitor::new();
|
||||||
|
event.record(&mut message_visitor);
|
||||||
|
let message = message_visitor.message;
|
||||||
|
|
||||||
|
// Format and write the message
|
||||||
|
let formatted = self.format_message(prefix, &category, &message);
|
||||||
|
write!(writer, "{}", formatted)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Visitor to extract the message from tracing events
|
||||||
|
struct MessageVisitor {
|
||||||
|
message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MessageVisitor {
|
||||||
|
fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
message: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl tracing::field::Visit for MessageVisitor {
|
||||||
|
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn fmt::Debug) {
|
||||||
|
if field.name() == "message" {
|
||||||
|
self.message = format!("{:?}", value);
|
||||||
|
// Remove surrounding quotes if present
|
||||||
|
if self.message.starts_with('"') && self.message.ends_with('"') {
|
||||||
|
self.message = self.message[1..self.message.len()-1].to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn record_str(&mut self, field: &tracing::field::Field, value: &str) {
|
||||||
|
if field.name() == "message" {
|
||||||
|
self.message = value.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tracing::{info, error};
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use std::io::Cursor;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_category() {
|
||||||
|
let formatter = HeroFormatter::new();
|
||||||
|
|
||||||
|
// Test normal category
|
||||||
|
assert_eq!(formatter.format_category("system"), "system ");
|
||||||
|
|
||||||
|
// Test category with dashes
|
||||||
|
assert_eq!(formatter.format_category("osis-actor"), "osis_actor");
|
||||||
|
|
||||||
|
// Test long category (truncation)
|
||||||
|
assert_eq!(formatter.format_category("very-long-category-name"), "very_long_");
|
||||||
|
|
||||||
|
// Test exact 10 characters
|
||||||
|
assert_eq!(formatter.format_category("exactly10c"), "exactly10c");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_level_prefix() {
|
||||||
|
let formatter = HeroFormatter::new();
|
||||||
|
|
||||||
|
assert_eq!(formatter.get_level_prefix(&Level::ERROR), 'E');
|
||||||
|
assert_eq!(formatter.get_level_prefix(&Level::WARN), ' ');
|
||||||
|
assert_eq!(formatter.get_level_prefix(&Level::INFO), ' ');
|
||||||
|
assert_eq!(formatter.get_level_prefix(&Level::DEBUG), ' ');
|
||||||
|
assert_eq!(formatter.get_level_prefix(&Level::TRACE), ' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_message() {
|
||||||
|
let formatter = HeroFormatter::new();
|
||||||
|
|
||||||
|
// Test single line message
|
||||||
|
let result = formatter.format_message(' ', "system ", "Hello world");
|
||||||
|
assert_eq!(result, " system - Hello world\n");
|
||||||
|
|
||||||
|
// Test multi-line message
|
||||||
|
let result = formatter.format_message('E', "error_cat ", "Line 1\nLine 2\nLine 3");
|
||||||
|
let expected = "E error_cat - Line 1\nE Line 2\nE Line 3\n";
|
||||||
|
assert_eq!(result, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_timestamp_tracking() {
|
||||||
|
let formatter = HeroFormatter::new();
|
||||||
|
let timestamp = "12:34:56";
|
||||||
|
|
||||||
|
// First call should return true (write timestamp)
|
||||||
|
assert!(formatter.should_write_timestamp(timestamp));
|
||||||
|
|
||||||
|
// Second call with same timestamp should return false
|
||||||
|
assert!(!formatter.should_write_timestamp(timestamp));
|
||||||
|
|
||||||
|
// Call with different timestamp should return true
|
||||||
|
assert!(formatter.should_write_timestamp("12:34:57"));
|
||||||
|
}
|
||||||
|
}
|
285
core/logger/src/file_appender.rs
Normal file
285
core/logger/src/file_appender.rs
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
//! Custom File Appender Implementation
|
||||||
|
//!
|
||||||
|
//! This module provides custom file appender functionality with enhanced
|
||||||
|
//! rotation and directory management capabilities.
|
||||||
|
|
||||||
|
use crate::{LoggerError, Result};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tracing_appender::rolling::{RollingFileAppender, Rotation};
|
||||||
|
|
||||||
|
/// Create a custom rolling file appender with enhanced configuration
|
||||||
|
pub fn create_rolling_appender<P: AsRef<Path>>(
|
||||||
|
directory: P,
|
||||||
|
file_name_prefix: &str,
|
||||||
|
rotation: AppenderRotation,
|
||||||
|
) -> Result<RollingFileAppender> {
|
||||||
|
let directory = directory.as_ref();
|
||||||
|
|
||||||
|
// Ensure directory exists
|
||||||
|
std::fs::create_dir_all(directory)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(
|
||||||
|
format!("Failed to create directory {}: {}", directory.display(), e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
let rotation = match rotation {
|
||||||
|
AppenderRotation::Hourly => Rotation::HOURLY,
|
||||||
|
AppenderRotation::Daily => Rotation::DAILY,
|
||||||
|
AppenderRotation::Never => Rotation::NEVER,
|
||||||
|
};
|
||||||
|
|
||||||
|
let appender = tracing_appender::rolling::Builder::new()
|
||||||
|
.rotation(rotation)
|
||||||
|
.filename_prefix(file_name_prefix)
|
||||||
|
.filename_suffix("log")
|
||||||
|
.build(directory)
|
||||||
|
.map_err(|e| LoggerError::Config(format!("Failed to create rolling appender: {}", e)))?;
|
||||||
|
|
||||||
|
Ok(appender)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enhanced rotation configuration
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum AppenderRotation {
|
||||||
|
/// Rotate files every hour
|
||||||
|
Hourly,
|
||||||
|
/// Rotate files every day
|
||||||
|
Daily,
|
||||||
|
/// Never rotate (single file)
|
||||||
|
Never,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// File appender builder for more complex configurations
|
||||||
|
pub struct FileAppenderBuilder {
|
||||||
|
directory: PathBuf,
|
||||||
|
file_prefix: String,
|
||||||
|
file_suffix: String,
|
||||||
|
rotation: AppenderRotation,
|
||||||
|
max_files: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileAppenderBuilder {
|
||||||
|
/// Create a new file appender builder
|
||||||
|
pub fn new<P: AsRef<Path>>(directory: P) -> Self {
|
||||||
|
Self {
|
||||||
|
directory: directory.as_ref().to_path_buf(),
|
||||||
|
file_prefix: "log".to_string(),
|
||||||
|
file_suffix: "log".to_string(),
|
||||||
|
rotation: AppenderRotation::Hourly,
|
||||||
|
max_files: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the file prefix
|
||||||
|
pub fn file_prefix<S: Into<String>>(mut self, prefix: S) -> Self {
|
||||||
|
self.file_prefix = prefix.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the file suffix
|
||||||
|
pub fn file_suffix<S: Into<String>>(mut self, suffix: S) -> Self {
|
||||||
|
self.file_suffix = suffix.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the rotation policy
|
||||||
|
pub fn rotation(mut self, rotation: AppenderRotation) -> Self {
|
||||||
|
self.rotation = rotation;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set maximum number of files to keep (for cleanup)
|
||||||
|
pub fn max_files(mut self, max: usize) -> Self {
|
||||||
|
self.max_files = Some(max);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the file appender
|
||||||
|
pub fn build(self) -> Result<RollingFileAppender> {
|
||||||
|
// Ensure directory exists
|
||||||
|
std::fs::create_dir_all(&self.directory)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(
|
||||||
|
format!("Failed to create directory {}: {}", self.directory.display(), e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
let rotation = match self.rotation {
|
||||||
|
AppenderRotation::Hourly => Rotation::HOURLY,
|
||||||
|
AppenderRotation::Daily => Rotation::DAILY,
|
||||||
|
AppenderRotation::Never => Rotation::NEVER,
|
||||||
|
};
|
||||||
|
|
||||||
|
let appender = tracing_appender::rolling::Builder::new()
|
||||||
|
.rotation(rotation)
|
||||||
|
.filename_prefix(&self.file_prefix)
|
||||||
|
.filename_suffix(&self.file_suffix)
|
||||||
|
.build(&self.directory)
|
||||||
|
.map_err(|e| LoggerError::Config(format!("Failed to create rolling appender: {}", e)))?;
|
||||||
|
|
||||||
|
// Perform cleanup if max_files is set
|
||||||
|
if let Some(max_files) = self.max_files {
|
||||||
|
if let Err(e) = cleanup_old_files(&self.directory, &self.file_prefix, max_files) {
|
||||||
|
tracing::warn!("Failed to cleanup old log files: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(appender)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clean up old log files, keeping only the most recent ones
|
||||||
|
fn cleanup_old_files<P: AsRef<Path>>(
|
||||||
|
directory: P,
|
||||||
|
file_prefix: &str,
|
||||||
|
max_files: usize,
|
||||||
|
) -> Result<()> {
|
||||||
|
let directory = directory.as_ref();
|
||||||
|
|
||||||
|
let mut log_files = Vec::new();
|
||||||
|
|
||||||
|
// Read directory and collect log files
|
||||||
|
let entries = std::fs::read_dir(directory)
|
||||||
|
.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry = entry.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
let path = entry.path();
|
||||||
|
|
||||||
|
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
|
||||||
|
if file_name.starts_with(file_prefix) && file_name.ends_with(".log") {
|
||||||
|
if let Ok(metadata) = entry.metadata() {
|
||||||
|
if let Ok(modified) = metadata.modified() {
|
||||||
|
log_files.push((path, modified));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by modification time (newest first)
|
||||||
|
log_files.sort_by(|a, b| b.1.cmp(&a.1));
|
||||||
|
|
||||||
|
// Remove old files if we exceed max_files
|
||||||
|
if log_files.len() > max_files {
|
||||||
|
for (old_file, _) in log_files.iter().skip(max_files) {
|
||||||
|
if let Err(e) = std::fs::remove_file(old_file) {
|
||||||
|
tracing::warn!("Failed to remove old log file {}: {}", old_file.display(), e);
|
||||||
|
} else {
|
||||||
|
tracing::debug!("Removed old log file: {}", old_file.display());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Utility function to get the current log file path for a given configuration
|
||||||
|
pub fn get_current_log_file<P: AsRef<Path>>(
|
||||||
|
directory: P,
|
||||||
|
file_prefix: &str,
|
||||||
|
rotation: AppenderRotation,
|
||||||
|
) -> PathBuf {
|
||||||
|
let directory = directory.as_ref();
|
||||||
|
|
||||||
|
match rotation {
|
||||||
|
AppenderRotation::Hourly => {
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let timestamp = now.format("%Y-%m-%d-%H");
|
||||||
|
directory.join(format!("{}.{}.log", file_prefix, timestamp))
|
||||||
|
}
|
||||||
|
AppenderRotation::Daily => {
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let timestamp = now.format("%Y-%m-%d");
|
||||||
|
directory.join(format!("{}.{}.log", file_prefix, timestamp))
|
||||||
|
}
|
||||||
|
AppenderRotation::Never => {
|
||||||
|
directory.join(format!("{}.log", file_prefix))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_rolling_appender() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let directory = temp_dir.path().join("logs");
|
||||||
|
|
||||||
|
let appender = create_rolling_appender(&directory, "test", AppenderRotation::Hourly).unwrap();
|
||||||
|
|
||||||
|
// Verify directory was created
|
||||||
|
assert!(directory.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_file_appender_builder() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let directory = temp_dir.path().join("logs");
|
||||||
|
|
||||||
|
let appender = FileAppenderBuilder::new(&directory)
|
||||||
|
.file_prefix("custom")
|
||||||
|
.file_suffix("txt")
|
||||||
|
.rotation(AppenderRotation::Daily)
|
||||||
|
.max_files(5)
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(directory.exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_current_log_file() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let directory = temp_dir.path();
|
||||||
|
|
||||||
|
// Test hourly rotation
|
||||||
|
let hourly_file = get_current_log_file(directory, "test", AppenderRotation::Hourly);
|
||||||
|
assert!(hourly_file.to_string_lossy().contains("test."));
|
||||||
|
assert!(hourly_file.extension().unwrap() == "log");
|
||||||
|
|
||||||
|
// Test daily rotation
|
||||||
|
let daily_file = get_current_log_file(directory, "test", AppenderRotation::Daily);
|
||||||
|
assert!(daily_file.to_string_lossy().contains("test."));
|
||||||
|
assert!(daily_file.extension().unwrap() == "log");
|
||||||
|
|
||||||
|
// Test never rotation
|
||||||
|
let never_file = get_current_log_file(directory, "test", AppenderRotation::Never);
|
||||||
|
assert_eq!(never_file, directory.join("test.log"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cleanup_old_files() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let directory = temp_dir.path();
|
||||||
|
|
||||||
|
// Create some test log files
|
||||||
|
for i in 0..10 {
|
||||||
|
let file_path = directory.join(format!("test.{}.log", i));
|
||||||
|
std::fs::write(&file_path, "test content").unwrap();
|
||||||
|
|
||||||
|
// Sleep briefly to ensure different modification times
|
||||||
|
std::thread::sleep(Duration::from_millis(10));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup, keeping only 5 files
|
||||||
|
cleanup_old_files(directory, "test", 5).unwrap();
|
||||||
|
|
||||||
|
// Count remaining files
|
||||||
|
let remaining_files: Vec<_> = std::fs::read_dir(directory)
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|entry| {
|
||||||
|
let entry = entry.ok()?;
|
||||||
|
let name = entry.file_name().to_string_lossy().to_string();
|
||||||
|
if name.starts_with("test.") && name.ends_with(".log") {
|
||||||
|
Some(name)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
assert_eq!(remaining_files.len(), 5);
|
||||||
|
}
|
||||||
|
}
|
306
core/logger/src/job_logger.rs
Normal file
306
core/logger/src/job_logger.rs
Normal file
@ -0,0 +1,306 @@
|
|||||||
|
//! Per-Job Logger Implementation
|
||||||
|
//!
|
||||||
|
//! This module implements the per-job logging functionality that creates
|
||||||
|
//! temporary, isolated loggers for individual job execution.
|
||||||
|
|
||||||
|
use crate::{LoggerError, Result, custom_formatter::HeroFormatter};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tracing_subscriber::{
|
||||||
|
filter::{EnvFilter, LevelFilter},
|
||||||
|
fmt,
|
||||||
|
layer::SubscriberExt,
|
||||||
|
util::SubscriberInitExt,
|
||||||
|
Layer, Registry,
|
||||||
|
};
|
||||||
|
use tracing_appender::{non_blocking::WorkerGuard, rolling};
|
||||||
|
|
||||||
|
/// Create a per-job logger for isolated job logging
|
||||||
|
///
|
||||||
|
/// This creates a temporary tracing subscriber that writes exclusively
|
||||||
|
/// to a job-specific directory. The subscriber is designed to be used
|
||||||
|
/// with `tracing::subscriber::with_default()` to scope all logging within a job.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `actor_type` - Type of actor (e.g., "osis", "sal")
|
||||||
|
/// * `job_id` - Unique job identifier
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a boxed subscriber that can be used with `with_default()`
|
||||||
|
/// The WorkerGuard is managed internally and will be dropped when the subscriber is dropped.
|
||||||
|
pub fn create_job_logger<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
) -> Result<Box<dyn tracing::Subscriber + Send + Sync>> {
|
||||||
|
let (subscriber, _guard) = create_job_logger_with_guard(logs_root, actor_type, job_id)?;
|
||||||
|
|
||||||
|
// Note: The guard is intentionally dropped here because the job logger
|
||||||
|
// is meant to be short-lived. In practice, the job execution should be
|
||||||
|
// fast enough that logs are flushed before the guard is dropped.
|
||||||
|
// For longer-running jobs, use create_job_logger_with_guard instead.
|
||||||
|
|
||||||
|
Ok(subscriber)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a job logger that returns both the subscriber and the guard
|
||||||
|
///
|
||||||
|
/// This variant returns both the subscriber and the worker guard, giving
|
||||||
|
/// the caller control over the guard's lifetime for proper log flushing.
|
||||||
|
pub fn create_job_logger_with_guard<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
) -> Result<(Box<dyn tracing::Subscriber + Send + Sync>, WorkerGuard)> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
// Create job-specific directory: logs/actor/<type>/job-<job_id>/
|
||||||
|
let job_dir = logs_root
|
||||||
|
.join("actor")
|
||||||
|
.join(actor_type)
|
||||||
|
.join(format!("job-{}", job_id));
|
||||||
|
|
||||||
|
// Ensure the job directory exists
|
||||||
|
std::fs::create_dir_all(&job_dir)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(format!("Failed to create job directory {}: {}", job_dir.display(), e)))?;
|
||||||
|
|
||||||
|
// Create hourly rolling file appender for the job
|
||||||
|
let file_appender = rolling::hourly(&job_dir, "log");
|
||||||
|
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
|
||||||
|
|
||||||
|
// Create a formatted layer for the job with custom Hero formatter
|
||||||
|
let layer = fmt::layer()
|
||||||
|
.with_writer(non_blocking)
|
||||||
|
.event_format(HeroFormatter::new())
|
||||||
|
.with_ansi(false) // No ANSI colors in log files
|
||||||
|
.with_filter(
|
||||||
|
EnvFilter::new("trace") // Capture all logs within the job context
|
||||||
|
.add_directive(LevelFilter::TRACE.into())
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create a registry with the job layer
|
||||||
|
let subscriber = Registry::default()
|
||||||
|
.with(layer);
|
||||||
|
|
||||||
|
tracing::debug!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Created job logger for actor_type={}, job_id={}, log_dir={}",
|
||||||
|
actor_type,
|
||||||
|
job_id,
|
||||||
|
job_dir.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((Box::new(subscriber), guard))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a job logger with custom configuration
|
||||||
|
///
|
||||||
|
/// This allows for more fine-grained control over the job logger configuration.
|
||||||
|
pub fn create_job_logger_with_config<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
config: JobLoggerConfig,
|
||||||
|
) -> Result<(Box<dyn tracing::Subscriber + Send + Sync>, WorkerGuard)> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
// Create job-specific directory
|
||||||
|
let job_dir = logs_root
|
||||||
|
.join("actor")
|
||||||
|
.join(actor_type)
|
||||||
|
.join(format!("job-{}", job_id));
|
||||||
|
|
||||||
|
std::fs::create_dir_all(&job_dir)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(format!("Failed to create job directory {}: {}", job_dir.display(), e)))?;
|
||||||
|
|
||||||
|
// Create file appender based on config
|
||||||
|
let file_appender = match config.rotation {
|
||||||
|
RotationConfig::Hourly => rolling::hourly(&job_dir, &config.file_prefix),
|
||||||
|
RotationConfig::Daily => rolling::daily(&job_dir, &config.file_prefix),
|
||||||
|
RotationConfig::Never => rolling::never(&job_dir, format!("{}.log", config.file_prefix)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
|
||||||
|
|
||||||
|
// Create layer with custom configuration and Hero formatter
|
||||||
|
let mut layer = fmt::layer()
|
||||||
|
.with_writer(non_blocking)
|
||||||
|
.event_format(HeroFormatter::new())
|
||||||
|
.with_ansi(false);
|
||||||
|
|
||||||
|
// Apply level filter
|
||||||
|
let layer = layer.with_filter(
|
||||||
|
EnvFilter::new(&config.level_filter)
|
||||||
|
.add_directive(config.max_level.into())
|
||||||
|
);
|
||||||
|
|
||||||
|
let subscriber = Registry::default()
|
||||||
|
.with(layer);
|
||||||
|
|
||||||
|
Ok((Box::new(subscriber), guard))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for job logger creation
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct JobLoggerConfig {
|
||||||
|
/// File prefix for log files
|
||||||
|
pub file_prefix: String,
|
||||||
|
/// Log rotation configuration
|
||||||
|
pub rotation: RotationConfig,
|
||||||
|
/// Maximum log level to capture
|
||||||
|
pub max_level: LevelFilter,
|
||||||
|
/// Level filter string (e.g., "debug", "info", "trace")
|
||||||
|
pub level_filter: String,
|
||||||
|
/// Include target in log output
|
||||||
|
pub include_target: bool,
|
||||||
|
/// Include thread IDs in log output
|
||||||
|
pub include_thread_ids: bool,
|
||||||
|
/// Include file location in log output
|
||||||
|
pub include_file_location: bool,
|
||||||
|
/// Include line numbers in log output
|
||||||
|
pub include_line_numbers: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for JobLoggerConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
file_prefix: "job".to_string(),
|
||||||
|
rotation: RotationConfig::Hourly,
|
||||||
|
max_level: LevelFilter::TRACE,
|
||||||
|
level_filter: "trace".to_string(),
|
||||||
|
include_target: true,
|
||||||
|
include_thread_ids: true,
|
||||||
|
include_file_location: true,
|
||||||
|
include_line_numbers: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Log file rotation configuration
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum RotationConfig {
|
||||||
|
/// Rotate logs hourly
|
||||||
|
Hourly,
|
||||||
|
/// Rotate logs daily
|
||||||
|
Daily,
|
||||||
|
/// Never rotate logs (single file)
|
||||||
|
Never,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use tracing::{info, debug, error};
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::time::sleep;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_job_logger_creation() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let job_logger = create_job_logger(logs_root, "osis", "test-job-123").unwrap();
|
||||||
|
|
||||||
|
// Verify job directory was created
|
||||||
|
let job_dir = logs_root.join("actor/osis/job-test-job-123");
|
||||||
|
assert!(job_dir.exists());
|
||||||
|
|
||||||
|
// Test logging within the job context
|
||||||
|
tracing::subscriber::with_default(job_logger, || {
|
||||||
|
info!(target: "osis_actor", "Job started");
|
||||||
|
debug!(target: "osis_actor", "Processing data");
|
||||||
|
info!(target: "osis_actor", "Job completed");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Give some time for async writing
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_job_logger_with_guard() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let (job_logger, _guard) = create_job_logger_with_guard(logs_root, "sal", "test-job-456").unwrap();
|
||||||
|
|
||||||
|
// Verify job directory was created
|
||||||
|
let job_dir = logs_root.join("actor/sal/job-test-job-456");
|
||||||
|
assert!(job_dir.exists());
|
||||||
|
|
||||||
|
// Test logging
|
||||||
|
tracing::subscriber::with_default(job_logger, || {
|
||||||
|
error!(target: "sal_actor", "Job failed with error");
|
||||||
|
});
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_job_logger_with_custom_config() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let config = JobLoggerConfig {
|
||||||
|
file_prefix: "custom".to_string(),
|
||||||
|
rotation: RotationConfig::Never,
|
||||||
|
max_level: LevelFilter::INFO,
|
||||||
|
level_filter: "info".to_string(),
|
||||||
|
include_target: false,
|
||||||
|
include_thread_ids: false,
|
||||||
|
include_file_location: false,
|
||||||
|
include_line_numbers: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (job_logger, _guard) = create_job_logger_with_config(
|
||||||
|
logs_root,
|
||||||
|
"python",
|
||||||
|
"custom-job",
|
||||||
|
config
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Verify job directory was created
|
||||||
|
let job_dir = logs_root.join("actor/python/job-custom-job");
|
||||||
|
assert!(job_dir.exists());
|
||||||
|
|
||||||
|
// Test logging
|
||||||
|
tracing::subscriber::with_default(job_logger, || {
|
||||||
|
info!(target: "python_actor", "Custom job logging");
|
||||||
|
});
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_multiple_job_loggers() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
// Create multiple job loggers
|
||||||
|
let job1 = create_job_logger(logs_root, "osis", "job-1").unwrap();
|
||||||
|
let job2 = create_job_logger(logs_root, "osis", "job-2").unwrap();
|
||||||
|
let job3 = create_job_logger(logs_root, "sal", "job-3").unwrap();
|
||||||
|
|
||||||
|
// Verify all directories were created
|
||||||
|
assert!(logs_root.join("actor/osis/job-job-1").exists());
|
||||||
|
assert!(logs_root.join("actor/osis/job-job-2").exists());
|
||||||
|
assert!(logs_root.join("actor/sal/job-job-3").exists());
|
||||||
|
|
||||||
|
// Test isolated logging
|
||||||
|
tracing::subscriber::with_default(job1, || {
|
||||||
|
info!(target: "osis_actor", "Job 1 message");
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::subscriber::with_default(job2, || {
|
||||||
|
info!(target: "osis_actor", "Job 2 message");
|
||||||
|
});
|
||||||
|
|
||||||
|
tracing::subscriber::with_default(job3, || {
|
||||||
|
info!(target: "sal_actor", "Job 3 message");
|
||||||
|
});
|
||||||
|
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
}
|
||||||
|
}
|
234
core/logger/src/lib.rs
Normal file
234
core/logger/src/lib.rs
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
//! # Hero Logger
|
||||||
|
//!
|
||||||
|
//! A hierarchical logging system for the Hero project that provides:
|
||||||
|
//! - System-level logging with component-based filtering
|
||||||
|
//! - Per-job logging with complete isolation
|
||||||
|
//! - Hourly log rotation
|
||||||
|
//! - Integration with the tracing ecosystem
|
||||||
|
//!
|
||||||
|
//! ## Architecture
|
||||||
|
//!
|
||||||
|
//! The logging system uses a hybrid approach:
|
||||||
|
//! - **System Logger**: Long-lived, captures all non-job-specific logs
|
||||||
|
//! - **Per-Job Logger**: Short-lived, captures all logs for a single job
|
||||||
|
//!
|
||||||
|
//! ## Usage
|
||||||
|
//!
|
||||||
|
//! ```rust
|
||||||
|
//! use hero_logger;
|
||||||
|
//!
|
||||||
|
//! // Initialize system logger (once at startup)
|
||||||
|
//! let components = vec!["supervisor".to_string(), "osis_actor".to_string()];
|
||||||
|
//! hero_logger::init_system_logger("logs", &components)?;
|
||||||
|
//!
|
||||||
|
//! // Use system logging
|
||||||
|
//! tracing::info!(target: "supervisor", "System started");
|
||||||
|
//!
|
||||||
|
//! // Create per-job logger for isolated logging
|
||||||
|
//! let job_logger = hero_logger::create_job_logger("logs", "osis", "job-123")?;
|
||||||
|
//! tracing::subscriber::with_default(job_logger, || {
|
||||||
|
//! tracing::info!(target: "osis_actor", "Job processing started");
|
||||||
|
//! });
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, Layer};
|
||||||
|
use tracing_appender::non_blocking::WorkerGuard;
|
||||||
|
|
||||||
|
mod system_logger;
|
||||||
|
mod job_logger;
|
||||||
|
mod file_appender;
|
||||||
|
mod utils;
|
||||||
|
mod custom_formatter;
|
||||||
|
pub mod rhai_integration;
|
||||||
|
|
||||||
|
pub use system_logger::*;
|
||||||
|
pub use job_logger::*;
|
||||||
|
pub use file_appender::*;
|
||||||
|
pub use utils::*;
|
||||||
|
|
||||||
|
/// Errors that can occur during logging operations
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum LoggerError {
|
||||||
|
#[error("IO error: {0}")]
|
||||||
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
#[error("Tracing error: {0}")]
|
||||||
|
Tracing(String),
|
||||||
|
|
||||||
|
#[error("Invalid configuration: {0}")]
|
||||||
|
Config(String),
|
||||||
|
|
||||||
|
#[error("Directory creation failed: {0}")]
|
||||||
|
DirectoryCreation(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Result type for logger operations
|
||||||
|
pub type Result<T> = std::result::Result<T, LoggerError>;
|
||||||
|
|
||||||
|
/// Initialize the system logger with component-based filtering
|
||||||
|
///
|
||||||
|
/// This function sets up the global tracing subscriber with multiple file appenders,
|
||||||
|
/// each filtered by component target. It should be called once at application startup.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `components` - List of component names that will have dedicated log directories
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a vector of `WorkerGuard`s that must be kept alive for the duration
|
||||||
|
/// of the application to ensure proper log flushing.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// let components = vec![
|
||||||
|
/// "supervisor".to_string(),
|
||||||
|
/// "osis_actor".to_string(),
|
||||||
|
/// "sal_actor".to_string(),
|
||||||
|
/// ];
|
||||||
|
/// let _guards = hero_logger::init_system_logger("logs", &components)?;
|
||||||
|
/// ```
|
||||||
|
pub fn init_system_logger<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
components: &[String],
|
||||||
|
) -> Result<Vec<WorkerGuard>> {
|
||||||
|
system_logger::init_system_logger(logs_root, components)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a per-job logger for isolated job logging
|
||||||
|
///
|
||||||
|
/// This function creates a temporary tracing subscriber that writes exclusively
|
||||||
|
/// to a job-specific directory. The subscriber should be used with
|
||||||
|
/// `tracing::subscriber::with_default()` to scope all logging within a job.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `actor_type` - Type of actor (e.g., "osis", "sal")
|
||||||
|
/// * `job_id` - Unique job identifier
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a boxed subscriber that can be used with `with_default()`
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// let job_logger = hero_logger::create_job_logger("logs", "osis", "job-abc123")?;
|
||||||
|
///
|
||||||
|
/// tracing::subscriber::with_default(job_logger, || {
|
||||||
|
/// tracing::info!(target: "osis_actor", "Job started");
|
||||||
|
/// // All tracing calls here go to the job-specific log
|
||||||
|
/// });
|
||||||
|
/// ```
|
||||||
|
pub fn create_job_logger<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
) -> Result<Box<dyn tracing::Subscriber + Send + Sync>> {
|
||||||
|
job_logger::create_job_logger(logs_root, actor_type, job_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a job logger that returns both the subscriber and the guard
|
||||||
|
///
|
||||||
|
/// This variant returns both the subscriber and the worker guard, giving
|
||||||
|
/// the caller control over the guard's lifetime.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `actor_type` - Type of actor (e.g., "osis", "sal")
|
||||||
|
/// * `job_id` - Unique job identifier
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a tuple of (subscriber, guard) where the guard must be kept alive
|
||||||
|
/// for proper log flushing.
|
||||||
|
pub fn create_job_logger_with_guard<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
) -> Result<(Box<dyn tracing::Subscriber + Send + Sync>, WorkerGuard)> {
|
||||||
|
job_logger::create_job_logger_with_guard(logs_root, actor_type, job_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure the log directory structure exists
|
||||||
|
///
|
||||||
|
/// Creates the necessary directory structure for the logging system:
|
||||||
|
/// - `logs/supervisor/`
|
||||||
|
/// - `logs/actor/osis/`
|
||||||
|
/// - `logs/actor/sal/`
|
||||||
|
/// - etc.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `components` - List of component names
|
||||||
|
pub fn ensure_log_directories<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
components: &[String],
|
||||||
|
) -> Result<()> {
|
||||||
|
utils::ensure_log_directories(logs_root, components)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_system_logger_initialization() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"test_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let _guards = init_system_logger(logs_root, &components).unwrap();
|
||||||
|
|
||||||
|
// Verify directories were created
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
assert!(logs_root.join("actor/test_actor").exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_job_logger_creation() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let job_logger = create_job_logger(logs_root, "test", "job-123").unwrap();
|
||||||
|
|
||||||
|
// Verify job directory was created
|
||||||
|
assert!(logs_root.join("actor/test/job-job-123").exists());
|
||||||
|
|
||||||
|
// Test that we can use the logger
|
||||||
|
tracing::subscriber::with_default(job_logger, || {
|
||||||
|
info!(target: "test_actor", "Test log message");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_directory_creation() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
ensure_log_directories(logs_root, &components).unwrap();
|
||||||
|
|
||||||
|
// Verify all directories exist
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
assert!(logs_root.join("actor/osis_actor").exists());
|
||||||
|
assert!(logs_root.join("actor/sal_actor").exists());
|
||||||
|
}
|
||||||
|
}
|
411
core/logger/src/rhai_integration.rs
Normal file
411
core/logger/src/rhai_integration.rs
Normal file
@ -0,0 +1,411 @@
|
|||||||
|
//! Rhai Engine Integration for Logging
|
||||||
|
//!
|
||||||
|
//! This module provides integration between Rhai scripts and the tracing logging system,
|
||||||
|
//! allowing Rhai print() and debug() calls to be captured in the logging infrastructure.
|
||||||
|
|
||||||
|
use rhai::{Engine, Dynamic};
|
||||||
|
use tracing::{info, debug, warn, error};
|
||||||
|
|
||||||
|
/// Configure a Rhai engine to capture print and debug output through tracing
|
||||||
|
///
|
||||||
|
/// This function sets up custom print and debug hooks that route Rhai script
|
||||||
|
/// output through the tracing system, allowing it to be captured by both
|
||||||
|
/// system and per-job loggers.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `engine` - Mutable reference to the Rhai engine to configure
|
||||||
|
/// * `target` - Target name for tracing (e.g., "osis_actor", "sal_actor")
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use rhai::Engine;
|
||||||
|
/// use hero_logger::rhai_integration::configure_rhai_logging;
|
||||||
|
///
|
||||||
|
/// let mut engine = Engine::new();
|
||||||
|
/// configure_rhai_logging(&mut engine, "osis_actor");
|
||||||
|
///
|
||||||
|
/// // Now when Rhai scripts call print() or debug(), they will be logged
|
||||||
|
/// engine.eval::<()>(r#"print("Hello from Rhai!");"#).unwrap();
|
||||||
|
/// ```
|
||||||
|
pub fn configure_rhai_logging(engine: &mut Engine, target: &str) {
|
||||||
|
// Use a macro to create the logging functions with constant targets
|
||||||
|
match target {
|
||||||
|
"supervisor" => {
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!(target: "supervisor", "[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!(target: "supervisor", "[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!(target: "supervisor", "[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(target: "supervisor", "[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"osis_actor" => {
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!(target: "osis_actor", "[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!(target: "osis_actor", "[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!(target: "osis_actor", "[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(target: "osis_actor", "[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"sal_actor" => {
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!(target: "sal_actor", "[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!(target: "sal_actor", "[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!(target: "sal_actor", "[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(target: "sal_actor", "[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"v_actor" => {
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!(target: "v_actor", "[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!(target: "v_actor", "[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!(target: "v_actor", "[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(target: "v_actor", "[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"python_actor" => {
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!(target: "python_actor", "[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!(target: "python_actor", "[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!(target: "python_actor", "[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!(target: "python_actor", "[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Default fallback
|
||||||
|
engine.on_print(|text| {
|
||||||
|
info!("[Rhai Script] {}", text);
|
||||||
|
});
|
||||||
|
engine.on_debug(|text, source, pos| {
|
||||||
|
if let Some(source) = source {
|
||||||
|
if pos.is_none() {
|
||||||
|
debug!("[Rhai Debug] {} (from {})", text, source);
|
||||||
|
} else {
|
||||||
|
debug!("[Rhai Debug] {} (from {} at {:?})", text, source, pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug!("[Rhai Debug] {}", text);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configure a Rhai engine with enhanced logging capabilities
|
||||||
|
///
|
||||||
|
/// This function provides more advanced logging configuration, including
|
||||||
|
/// custom log levels and structured logging support.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `engine` - Mutable reference to the Rhai engine to configure
|
||||||
|
/// * `config` - Configuration for Rhai logging behavior
|
||||||
|
pub fn configure_rhai_logging_advanced(engine: &mut Engine, config: RhaiLoggingConfig) {
|
||||||
|
// For now, use the basic configuration since tracing requires constant targets
|
||||||
|
configure_rhai_logging(engine, &config.target);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for Rhai logging behavior
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RhaiLoggingConfig {
|
||||||
|
/// Target name for tracing
|
||||||
|
pub target: String,
|
||||||
|
/// Log level for print() calls ("error", "warn", "info", "debug")
|
||||||
|
pub print_level: String,
|
||||||
|
/// Log level for debug() calls ("error", "warn", "info", "debug")
|
||||||
|
pub debug_level: String,
|
||||||
|
/// Whether to include source file and position information
|
||||||
|
pub include_source_info: bool,
|
||||||
|
/// Prefix for all Rhai log messages
|
||||||
|
pub message_prefix: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for RhaiLoggingConfig {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
target: "rhai_script".to_string(),
|
||||||
|
print_level: "info".to_string(),
|
||||||
|
debug_level: "debug".to_string(),
|
||||||
|
include_source_info: true,
|
||||||
|
message_prefix: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RhaiLoggingConfig {
|
||||||
|
/// Create a new configuration with the specified target
|
||||||
|
pub fn new(target: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
target: target.to_string(),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the log level for print() calls
|
||||||
|
pub fn print_level(mut self, level: &str) -> Self {
|
||||||
|
self.print_level = level.to_string();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the log level for debug() calls
|
||||||
|
pub fn debug_level(mut self, level: &str) -> Self {
|
||||||
|
self.debug_level = level.to_string();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set whether to include source information
|
||||||
|
pub fn include_source_info(mut self, include: bool) -> Self {
|
||||||
|
self.include_source_info = include;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set a prefix for all log messages
|
||||||
|
pub fn message_prefix(mut self, prefix: &str) -> Self {
|
||||||
|
self.message_prefix = Some(prefix.to_string());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add custom logging functions to a Rhai engine
|
||||||
|
///
|
||||||
|
/// This function adds custom logging functions (log_info, log_debug, log_warn, log_error)
|
||||||
|
/// that Rhai scripts can call directly for more granular logging control.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `engine` - Mutable reference to the Rhai engine
|
||||||
|
/// * `target` - Target name for tracing
|
||||||
|
pub fn add_custom_logging_functions(engine: &mut Engine, target: &str) {
|
||||||
|
// Use match to handle different targets with constant strings
|
||||||
|
match target {
|
||||||
|
"supervisor" => {
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!(target: "supervisor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!(target: "supervisor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!(target: "supervisor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!(target: "supervisor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"osis_actor" => {
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!(target: "osis_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!(target: "osis_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!(target: "osis_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!(target: "osis_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"sal_actor" => {
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!(target: "sal_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!(target: "sal_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!(target: "sal_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!(target: "sal_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"v_actor" => {
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!(target: "v_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!(target: "v_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!(target: "v_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!(target: "v_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
"python_actor" => {
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!(target: "python_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!(target: "python_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!(target: "python_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!(target: "python_actor", "[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// Default fallback
|
||||||
|
engine.register_fn("log_info", |message: &str| {
|
||||||
|
info!("[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_debug", |message: &str| {
|
||||||
|
debug!("[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_warn", |message: &str| {
|
||||||
|
warn!("[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
engine.register_fn("log_error", |message: &str| {
|
||||||
|
error!("[Rhai] {}", message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a Rhai engine with full logging integration
|
||||||
|
///
|
||||||
|
/// This is a convenience function that creates a new Rhai engine and configures
|
||||||
|
/// it with comprehensive logging support.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `target` - Target name for tracing
|
||||||
|
/// * `include_custom_functions` - Whether to include custom logging functions
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a configured Rhai engine ready for use with logging
|
||||||
|
pub fn create_logging_enabled_engine(target: &str, include_custom_functions: bool) -> Engine {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
|
||||||
|
// Configure basic logging
|
||||||
|
configure_rhai_logging(&mut engine, target);
|
||||||
|
|
||||||
|
// Add custom logging functions if requested
|
||||||
|
if include_custom_functions {
|
||||||
|
add_custom_logging_functions(&mut engine, target);
|
||||||
|
}
|
||||||
|
|
||||||
|
engine
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tracing_test::traced_test;
|
||||||
|
|
||||||
|
#[traced_test]
|
||||||
|
#[test]
|
||||||
|
fn test_configure_rhai_logging() {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
configure_rhai_logging(&mut engine, "test_actor");
|
||||||
|
|
||||||
|
// Test print output
|
||||||
|
engine.eval::<()>(r#"print("Hello from Rhai!");"#).unwrap();
|
||||||
|
|
||||||
|
// Verify that the log was captured (tracing_test will capture it)
|
||||||
|
// In a real test, you would check the captured logs
|
||||||
|
}
|
||||||
|
|
||||||
|
#[traced_test]
|
||||||
|
#[test]
|
||||||
|
fn test_configure_rhai_logging_advanced() {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
let config = RhaiLoggingConfig::new("test_actor")
|
||||||
|
.print_level("warn")
|
||||||
|
.debug_level("info")
|
||||||
|
.include_source_info(false);
|
||||||
|
|
||||||
|
configure_rhai_logging_advanced(&mut engine, config);
|
||||||
|
|
||||||
|
// Test print and debug output
|
||||||
|
engine.eval::<()>(r#"
|
||||||
|
print("This is a print message");
|
||||||
|
debug("This is a debug message");
|
||||||
|
"#).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[traced_test]
|
||||||
|
#[test]
|
||||||
|
fn test_add_custom_logging_functions() {
|
||||||
|
let mut engine = Engine::new();
|
||||||
|
add_custom_logging_functions(&mut engine, "test_actor");
|
||||||
|
|
||||||
|
// Test custom logging functions
|
||||||
|
engine.eval::<()>(r#"
|
||||||
|
log_info("Info message");
|
||||||
|
log_debug("Debug message");
|
||||||
|
log_warn("Warning message");
|
||||||
|
log_error("Error message");
|
||||||
|
"#).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_logging_enabled_engine() {
|
||||||
|
let engine = create_logging_enabled_engine("test_actor", true);
|
||||||
|
|
||||||
|
// Verify engine was created successfully
|
||||||
|
// In a real test, you would verify the logging configuration
|
||||||
|
assert!(engine.eval::<i64>("1 + 1").unwrap() == 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_rhai_logging_config() {
|
||||||
|
let config = RhaiLoggingConfig::new("test")
|
||||||
|
.print_level("error")
|
||||||
|
.debug_level("warn")
|
||||||
|
.include_source_info(false)
|
||||||
|
.message_prefix("TEST");
|
||||||
|
|
||||||
|
assert_eq!(config.target, "test");
|
||||||
|
assert_eq!(config.print_level, "error");
|
||||||
|
assert_eq!(config.debug_level, "warn");
|
||||||
|
assert!(!config.include_source_info);
|
||||||
|
assert_eq!(config.message_prefix, Some("TEST".to_string()));
|
||||||
|
}
|
||||||
|
}
|
170
core/logger/src/system_logger.rs
Normal file
170
core/logger/src/system_logger.rs
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
//! System Logger Implementation
|
||||||
|
//!
|
||||||
|
//! This module implements the system-wide logging functionality that captures
|
||||||
|
//! all non-job-specific logs from every component with target-based filtering.
|
||||||
|
|
||||||
|
use crate::{LoggerError, Result, custom_formatter::HeroFormatter};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tracing_subscriber::{
|
||||||
|
filter::{EnvFilter, LevelFilter},
|
||||||
|
fmt,
|
||||||
|
layer::SubscriberExt,
|
||||||
|
util::SubscriberInitExt,
|
||||||
|
Layer,
|
||||||
|
};
|
||||||
|
use tracing_appender::{non_blocking::WorkerGuard, rolling};
|
||||||
|
|
||||||
|
/// Initialize the system logger with component-based filtering
|
||||||
|
///
|
||||||
|
/// This creates multiple file appenders, each filtered by a specific tracing target:
|
||||||
|
/// - `tracing::info!(target: "supervisor", ...)` -> `logs/supervisor/`
|
||||||
|
/// - `tracing::info!(target: "osis_actor", ...)` -> `logs/actor/osis/`
|
||||||
|
/// - etc.
|
||||||
|
pub fn init_system_logger<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
components: &[String],
|
||||||
|
) -> Result<Vec<WorkerGuard>> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
// Ensure log directories exist
|
||||||
|
crate::utils::ensure_log_directories(logs_root, components)?;
|
||||||
|
|
||||||
|
let mut guards = Vec::new();
|
||||||
|
let mut layers = Vec::new();
|
||||||
|
|
||||||
|
// Create a layer for each component
|
||||||
|
for component in components {
|
||||||
|
let (layer, guard) = create_component_layer(logs_root, component)?;
|
||||||
|
layers.push(layer);
|
||||||
|
guards.push(guard);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the registry with all layers
|
||||||
|
let registry = tracing_subscriber::registry();
|
||||||
|
|
||||||
|
// Add all component layers to the registry
|
||||||
|
let collected_layers = layers.into_iter().collect::<Vec<_>>();
|
||||||
|
let registry = registry.with(collected_layers);
|
||||||
|
|
||||||
|
// Add console output for development
|
||||||
|
let console_layer = fmt::layer()
|
||||||
|
.with_target(true)
|
||||||
|
.with_thread_ids(true)
|
||||||
|
.with_file(true)
|
||||||
|
.with_line_number(true)
|
||||||
|
.with_filter(EnvFilter::from_default_env().add_directive(LevelFilter::INFO.into()));
|
||||||
|
|
||||||
|
// Set as global default
|
||||||
|
registry.with(console_layer).init();
|
||||||
|
|
||||||
|
tracing::info!(target: "hero_logger", "System logger initialized with {} components", components.len());
|
||||||
|
|
||||||
|
Ok(guards)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a filtered layer for a specific component
|
||||||
|
fn create_component_layer<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
component: &str,
|
||||||
|
) -> Result<(Box<dyn Layer<tracing_subscriber::Registry> + Send + Sync>, WorkerGuard)> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
// Determine the log directory based on component type
|
||||||
|
let log_dir = if component == "supervisor" {
|
||||||
|
logs_root.join("supervisor")
|
||||||
|
} else {
|
||||||
|
// Extract actor type from component name (e.g., "osis_actor" -> "osis")
|
||||||
|
let actor_type = component.strip_suffix("_actor").unwrap_or(component);
|
||||||
|
logs_root.join("actor").join(actor_type)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create hourly rolling file appender
|
||||||
|
let file_appender = rolling::hourly(&log_dir, "log");
|
||||||
|
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
|
||||||
|
|
||||||
|
// Create a formatted layer with custom Hero formatter and target filtering
|
||||||
|
let layer = fmt::layer()
|
||||||
|
.with_writer(non_blocking)
|
||||||
|
.event_format(HeroFormatter::new())
|
||||||
|
.with_ansi(false) // No ANSI colors in log files
|
||||||
|
.with_filter(
|
||||||
|
EnvFilter::new(format!("{}=trace", component))
|
||||||
|
.add_directive(LevelFilter::INFO.into())
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((layer.boxed(), guard))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio::time::sleep;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_system_logger_initialization() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let _guards = init_system_logger(logs_root, &components).unwrap();
|
||||||
|
|
||||||
|
// Test logging to different targets
|
||||||
|
info!(target: "supervisor", "Supervisor started");
|
||||||
|
info!(target: "osis_actor", "OSIS actor ready");
|
||||||
|
info!(target: "sal_actor", "SAL actor ready");
|
||||||
|
|
||||||
|
// Give some time for async writing
|
||||||
|
sleep(Duration::from_millis(100)).await;
|
||||||
|
|
||||||
|
// Verify directories were created
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
assert!(logs_root.join("actor/osis").exists());
|
||||||
|
assert!(logs_root.join("actor/sal").exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_component_layer_creation() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
// Create supervisor layer
|
||||||
|
let (supervisor_layer, _guard1) = create_component_layer(logs_root, "supervisor").unwrap();
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
|
||||||
|
// Create actor layer
|
||||||
|
let (actor_layer, _guard2) = create_component_layer(logs_root, "osis_actor").unwrap();
|
||||||
|
assert!(logs_root.join("actor/osis").exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_multiple_components() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
"v_actor".to_string(),
|
||||||
|
"python_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
let guards = init_system_logger(logs_root, &components).unwrap();
|
||||||
|
assert_eq!(guards.len(), components.len());
|
||||||
|
|
||||||
|
// Test that all directories were created
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
assert!(logs_root.join("actor/osis").exists());
|
||||||
|
assert!(logs_root.join("actor/sal").exists());
|
||||||
|
assert!(logs_root.join("actor/v").exists());
|
||||||
|
assert!(logs_root.join("actor/python").exists());
|
||||||
|
}
|
||||||
|
}
|
468
core/logger/src/utils.rs
Normal file
468
core/logger/src/utils.rs
Normal file
@ -0,0 +1,468 @@
|
|||||||
|
//! Utility functions for the Hero Logger
|
||||||
|
//!
|
||||||
|
//! This module provides common utility functions used throughout the logging system.
|
||||||
|
|
||||||
|
use crate::{LoggerError, Result};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
/// Ensure the log directory structure exists
|
||||||
|
///
|
||||||
|
/// Creates the necessary directory structure for the logging system:
|
||||||
|
/// - `logs/supervisor/`
|
||||||
|
/// - `logs/actor/osis/`
|
||||||
|
/// - `logs/actor/sal/`
|
||||||
|
/// - etc.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `components` - List of component names
|
||||||
|
pub fn ensure_log_directories<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
components: &[String],
|
||||||
|
) -> Result<()> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
// Create the root logs directory
|
||||||
|
std::fs::create_dir_all(logs_root)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(
|
||||||
|
format!("Failed to create logs root directory {}: {}", logs_root.display(), e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
// Create directories for each component
|
||||||
|
for component in components {
|
||||||
|
let component_dir = get_component_log_directory(logs_root, component);
|
||||||
|
std::fs::create_dir_all(&component_dir)
|
||||||
|
.map_err(|e| LoggerError::DirectoryCreation(
|
||||||
|
format!("Failed to create component directory {}: {}", component_dir.display(), e)
|
||||||
|
))?;
|
||||||
|
|
||||||
|
tracing::debug!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Created log directory for component '{}': {}",
|
||||||
|
component,
|
||||||
|
component_dir.display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Log directory structure created at: {}",
|
||||||
|
logs_root.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the log directory path for a specific component
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `component` - Component name (e.g., "supervisor", "osis_actor")
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns the appropriate directory path:
|
||||||
|
/// - "supervisor" -> `logs/supervisor/`
|
||||||
|
/// - "osis_actor" -> `logs/actor/osis/`
|
||||||
|
/// - etc.
|
||||||
|
pub fn get_component_log_directory<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
component: &str,
|
||||||
|
) -> PathBuf {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
if component == "supervisor" {
|
||||||
|
logs_root.join("supervisor")
|
||||||
|
} else {
|
||||||
|
// Extract actor type from component name (e.g., "osis_actor" -> "osis")
|
||||||
|
let actor_type = component.strip_suffix("_actor").unwrap_or(component);
|
||||||
|
logs_root.join("actor").join(actor_type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the job log directory path for a specific job
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `logs_root` - Root directory for all log files
|
||||||
|
/// * `actor_type` - Type of actor (e.g., "osis", "sal")
|
||||||
|
/// * `job_id` - Unique job identifier
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns the job-specific directory path: `logs/actor/<type>/job-<job_id>/`
|
||||||
|
pub fn get_job_log_directory<P: AsRef<Path>>(
|
||||||
|
logs_root: P,
|
||||||
|
actor_type: &str,
|
||||||
|
job_id: &str,
|
||||||
|
) -> PathBuf {
|
||||||
|
logs_root
|
||||||
|
.as_ref()
|
||||||
|
.join("actor")
|
||||||
|
.join(actor_type)
|
||||||
|
.join(format!("job-{}", job_id))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract actor type from component name
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `component` - Component name (e.g., "osis_actor_1", "sal_actor")
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns the actor type (e.g., "osis", "sal")
|
||||||
|
pub fn extract_actor_type(component: &str) -> &str {
|
||||||
|
// Handle patterns like "osis_actor_1" -> "osis"
|
||||||
|
if let Some(actor_part) = component.strip_suffix("_actor") {
|
||||||
|
return actor_part;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle patterns like "osis_actor_1" -> "osis"
|
||||||
|
if component.contains("_actor_") {
|
||||||
|
if let Some(pos) = component.find("_actor_") {
|
||||||
|
return &component[..pos];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle patterns like "osis_actor" -> "osis"
|
||||||
|
component.strip_suffix("_actor").unwrap_or(component)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate a timestamp string for log file naming
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `format` - Timestamp format ("hourly", "daily", or custom format string)
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// Returns a formatted timestamp string
|
||||||
|
pub fn generate_timestamp(format: &str) -> String {
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
|
||||||
|
match format {
|
||||||
|
"hourly" => now.format("%Y-%m-%d-%H").to_string(),
|
||||||
|
"daily" => now.format("%Y-%m-%d").to_string(),
|
||||||
|
custom => now.format(custom).to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clean up old log files in a directory
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `directory` - Directory to clean up
|
||||||
|
/// * `file_pattern` - Pattern to match files (e.g., "*.log")
|
||||||
|
/// * `max_age_days` - Maximum age in days for files to keep
|
||||||
|
pub fn cleanup_old_logs<P: AsRef<Path>>(
|
||||||
|
directory: P,
|
||||||
|
file_pattern: &str,
|
||||||
|
max_age_days: u64,
|
||||||
|
) -> Result<usize> {
|
||||||
|
let directory = directory.as_ref();
|
||||||
|
|
||||||
|
if !directory.exists() {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
let cutoff_time = std::time::SystemTime::now()
|
||||||
|
.checked_sub(std::time::Duration::from_secs(max_age_days * 24 * 60 * 60))
|
||||||
|
.ok_or_else(|| LoggerError::Config("Invalid max_age_days value".to_string()))?;
|
||||||
|
|
||||||
|
let mut removed_count = 0;
|
||||||
|
|
||||||
|
let entries = std::fs::read_dir(directory)
|
||||||
|
.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry = entry.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
let path = entry.path();
|
||||||
|
|
||||||
|
if path.is_file() {
|
||||||
|
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
|
||||||
|
// Simple pattern matching (could be enhanced with regex)
|
||||||
|
let matches_pattern = if file_pattern == "*" {
|
||||||
|
true
|
||||||
|
} else if file_pattern.starts_with("*.") {
|
||||||
|
let extension = &file_pattern[2..];
|
||||||
|
file_name.ends_with(extension)
|
||||||
|
} else {
|
||||||
|
file_name.contains(file_pattern)
|
||||||
|
};
|
||||||
|
|
||||||
|
if matches_pattern {
|
||||||
|
if let Ok(metadata) = entry.metadata() {
|
||||||
|
if let Ok(modified) = metadata.modified() {
|
||||||
|
if modified < cutoff_time {
|
||||||
|
if let Err(e) = std::fs::remove_file(&path) {
|
||||||
|
tracing::warn!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Failed to remove old log file {}: {}",
|
||||||
|
path.display(),
|
||||||
|
e
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
tracing::debug!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Removed old log file: {}",
|
||||||
|
path.display()
|
||||||
|
);
|
||||||
|
removed_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if removed_count > 0 {
|
||||||
|
tracing::info!(
|
||||||
|
target: "hero_logger",
|
||||||
|
"Cleaned up {} old log files from {}",
|
||||||
|
removed_count,
|
||||||
|
directory.display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(removed_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get disk usage information for the logs directory
|
||||||
|
pub fn get_logs_disk_usage<P: AsRef<Path>>(logs_root: P) -> Result<LogsDiskUsage> {
|
||||||
|
let logs_root = logs_root.as_ref();
|
||||||
|
|
||||||
|
if !logs_root.exists() {
|
||||||
|
return Ok(LogsDiskUsage {
|
||||||
|
total_size_bytes: 0,
|
||||||
|
file_count: 0,
|
||||||
|
directories: Vec::new(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut total_size = 0u64;
|
||||||
|
let mut file_count = 0usize;
|
||||||
|
let mut directories = Vec::new();
|
||||||
|
|
||||||
|
fn scan_directory(
|
||||||
|
dir: &Path,
|
||||||
|
total_size: &mut u64,
|
||||||
|
file_count: &mut usize,
|
||||||
|
) -> Result<DirectoryUsage> {
|
||||||
|
let mut dir_size = 0u64;
|
||||||
|
let mut dir_file_count = 0usize;
|
||||||
|
|
||||||
|
let entries = std::fs::read_dir(dir)
|
||||||
|
.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry = entry.map_err(|e| LoggerError::Io(e))?;
|
||||||
|
let path = entry.path();
|
||||||
|
|
||||||
|
if path.is_file() {
|
||||||
|
if let Ok(metadata) = entry.metadata() {
|
||||||
|
let size = metadata.len();
|
||||||
|
dir_size += size;
|
||||||
|
*total_size += size;
|
||||||
|
dir_file_count += 1;
|
||||||
|
*file_count += 1;
|
||||||
|
}
|
||||||
|
} else if path.is_dir() {
|
||||||
|
let sub_usage = scan_directory(&path, total_size, file_count)?;
|
||||||
|
dir_size += sub_usage.size_bytes;
|
||||||
|
dir_file_count += sub_usage.file_count;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(DirectoryUsage {
|
||||||
|
path: dir.to_path_buf(),
|
||||||
|
size_bytes: dir_size,
|
||||||
|
file_count: dir_file_count,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
let root_usage = scan_directory(logs_root, &mut total_size, &mut file_count)?;
|
||||||
|
directories.push(root_usage);
|
||||||
|
|
||||||
|
Ok(LogsDiskUsage {
|
||||||
|
total_size_bytes: total_size,
|
||||||
|
file_count,
|
||||||
|
directories,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Information about disk usage of logs
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LogsDiskUsage {
|
||||||
|
pub total_size_bytes: u64,
|
||||||
|
pub file_count: usize,
|
||||||
|
pub directories: Vec<DirectoryUsage>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Information about disk usage of a specific directory
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DirectoryUsage {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub size_bytes: u64,
|
||||||
|
pub file_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LogsDiskUsage {
|
||||||
|
/// Get total size in human-readable format
|
||||||
|
pub fn total_size_human(&self) -> String {
|
||||||
|
format_bytes(self.total_size_bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DirectoryUsage {
|
||||||
|
/// Get size in human-readable format
|
||||||
|
pub fn size_human(&self) -> String {
|
||||||
|
format_bytes(self.size_bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format bytes in human-readable format
|
||||||
|
fn format_bytes(bytes: u64) -> String {
|
||||||
|
const UNITS: &[&str] = &["B", "KB", "MB", "GB", "TB"];
|
||||||
|
let mut size = bytes as f64;
|
||||||
|
let mut unit_index = 0;
|
||||||
|
|
||||||
|
while size >= 1024.0 && unit_index < UNITS.len() - 1 {
|
||||||
|
size /= 1024.0;
|
||||||
|
unit_index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if unit_index == 0 {
|
||||||
|
format!("{} {}", bytes, UNITS[unit_index])
|
||||||
|
} else {
|
||||||
|
format!("{:.2} {}", size, UNITS[unit_index])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ensure_log_directories() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
let components = vec![
|
||||||
|
"supervisor".to_string(),
|
||||||
|
"osis_actor".to_string(),
|
||||||
|
"sal_actor".to_string(),
|
||||||
|
];
|
||||||
|
|
||||||
|
ensure_log_directories(logs_root, &components).unwrap();
|
||||||
|
|
||||||
|
assert!(logs_root.join("supervisor").exists());
|
||||||
|
assert!(logs_root.join("actor/osis").exists());
|
||||||
|
assert!(logs_root.join("actor/sal").exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_component_log_directory() {
|
||||||
|
let logs_root = Path::new("/logs");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
get_component_log_directory(logs_root, "supervisor"),
|
||||||
|
logs_root.join("supervisor")
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
get_component_log_directory(logs_root, "osis_actor"),
|
||||||
|
logs_root.join("actor/osis")
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
get_component_log_directory(logs_root, "sal_actor_1"),
|
||||||
|
logs_root.join("actor/sal_actor_1")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_job_log_directory() {
|
||||||
|
let logs_root = Path::new("/logs");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
get_job_log_directory(logs_root, "osis", "job-123"),
|
||||||
|
logs_root.join("actor/osis/job-job-123")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extract_actor_type() {
|
||||||
|
assert_eq!(extract_actor_type("osis_actor"), "osis");
|
||||||
|
assert_eq!(extract_actor_type("sal_actor_1"), "sal");
|
||||||
|
assert_eq!(extract_actor_type("python_actor"), "python");
|
||||||
|
assert_eq!(extract_actor_type("supervisor"), "supervisor");
|
||||||
|
assert_eq!(extract_actor_type("custom"), "custom");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_timestamp() {
|
||||||
|
let hourly = generate_timestamp("hourly");
|
||||||
|
let daily = generate_timestamp("daily");
|
||||||
|
|
||||||
|
// Basic format validation
|
||||||
|
assert!(hourly.len() >= 13); // YYYY-MM-DD-HH
|
||||||
|
assert!(daily.len() >= 10); // YYYY-MM-DD
|
||||||
|
|
||||||
|
// Custom format
|
||||||
|
let custom = generate_timestamp("%Y%m%d");
|
||||||
|
assert!(custom.len() == 8); // YYYYMMDD
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cleanup_old_logs() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_dir = temp_dir.path();
|
||||||
|
|
||||||
|
// Create some test log files
|
||||||
|
for i in 0..5 {
|
||||||
|
let file_path = logs_dir.join(format!("test{}.log", i));
|
||||||
|
std::fs::write(&file_path, "test content").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a non-log file
|
||||||
|
std::fs::write(logs_dir.join("not_a_log.txt"), "content").unwrap();
|
||||||
|
|
||||||
|
// Cleanup with 0 days (should remove all files)
|
||||||
|
let removed = cleanup_old_logs(logs_dir, "*.log", 0).unwrap();
|
||||||
|
assert_eq!(removed, 5);
|
||||||
|
|
||||||
|
// Verify non-log file still exists
|
||||||
|
assert!(logs_dir.join("not_a_log.txt").exists());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_bytes() {
|
||||||
|
assert_eq!(format_bytes(0), "0 B");
|
||||||
|
assert_eq!(format_bytes(1023), "1023 B");
|
||||||
|
assert_eq!(format_bytes(1024), "1.00 KB");
|
||||||
|
assert_eq!(format_bytes(1024 * 1024), "1.00 MB");
|
||||||
|
assert_eq!(format_bytes(1024 * 1024 * 1024), "1.00 GB");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_logs_disk_usage() {
|
||||||
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
let logs_root = temp_dir.path();
|
||||||
|
|
||||||
|
// Create some test files
|
||||||
|
std::fs::create_dir_all(logs_root.join("supervisor")).unwrap();
|
||||||
|
std::fs::write(logs_root.join("supervisor/test.log"), "test content").unwrap();
|
||||||
|
|
||||||
|
let usage = get_logs_disk_usage(logs_root).unwrap();
|
||||||
|
assert!(usage.total_size_bytes > 0);
|
||||||
|
assert!(usage.file_count > 0);
|
||||||
|
assert!(!usage.directories.is_empty());
|
||||||
|
}
|
||||||
|
}
|
@ -3,24 +3,15 @@ name = "hero_supervisor"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "supervisor-cli"
|
|
||||||
path = "cmd/supervisor_cli.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "supervisor-tui"
|
|
||||||
path = "cmd/supervisor_tui.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
clap = { version = "4.4", features = ["derive"] }
|
clap = { version = "4.4", features = ["derive"] }
|
||||||
env_logger = "0.10"
|
|
||||||
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
redis = { version = "0.25.0", features = ["tokio-comp"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
uuid = { version = "1.6", features = ["v4", "serde"] }
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
log = "0.4"
|
tracing = "0.1"
|
||||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] } # For async main in examples, and general async
|
tokio = { version = "1", features = ["macros", "rt-multi-thread"] } # For async main in examples, and general async
|
||||||
colored = "2.0"
|
colored = "2.0"
|
||||||
hero_job = { path = "../job" }
|
hero_job = { path = "../job" }
|
||||||
@ -30,5 +21,5 @@ crossterm = "0.28"
|
|||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
|
|
||||||
[dev-dependencies] # For examples later
|
[dev-dependencies] # For examples later
|
||||||
env_logger = "0.10"
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
rhai = "1.18.0" # For examples that might need to show engine setup
|
rhai = "1.18.0" # For examples that might need to show engine setup
|
||||||
|
@ -1,117 +0,0 @@
|
|||||||
# Supervisor CLI
|
|
||||||
|
|
||||||
Interactive command-line interface for the Hero Supervisor that allows you to dispatch jobs to actors and manage the job lifecycle.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Interactive Menu**: Easy-to-use menu system for all supervisor operations
|
|
||||||
- **Job Management**: Create, run, monitor, and manage jobs
|
|
||||||
- **OSIS Actor Integration**: Dispatch Rhai scripts to the OSIS actor
|
|
||||||
- **Real-time Results**: Get immediate feedback from job execution
|
|
||||||
- **Colorized Output**: Clear visual feedback with colored status indicators
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### 1. Build the OSIS Actor
|
|
||||||
|
|
||||||
First, ensure the OSIS actor is built:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis
|
|
||||||
cargo build
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Configure the Supervisor
|
|
||||||
|
|
||||||
Create or use the example configuration file at `examples/cli_config.toml`:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[global]
|
|
||||||
redis_url = "redis://127.0.0.1/"
|
|
||||||
|
|
||||||
[actors]
|
|
||||||
osis_actor = "/Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Run the CLI
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/baobab/core/supervisor
|
|
||||||
cargo run --bin supervisor-cli -- --config examples/cli_config.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
Or with verbose logging:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo run --bin supervisor-cli -- --config examples/cli_config.toml --verbose
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available Commands
|
|
||||||
|
|
||||||
1. **list_jobs** - List all jobs in the system
|
|
||||||
2. **run_job** - Create and run a new job interactively
|
|
||||||
3. **get_job_status** - Get status of a specific job
|
|
||||||
4. **get_job_output** - Get output of a completed job
|
|
||||||
5. **get_job_logs** - Get logs for a specific job
|
|
||||||
6. **stop_job** - Stop a running job
|
|
||||||
7. **delete_job** - Delete a specific job
|
|
||||||
8. **clear_all_jobs** - Clear all jobs from the system
|
|
||||||
9. **quit** - Exit the CLI
|
|
||||||
|
|
||||||
## Example Workflow
|
|
||||||
|
|
||||||
1. Start the CLI with your configuration
|
|
||||||
2. Select option `2` (run_job)
|
|
||||||
3. Enter job details:
|
|
||||||
- **Caller**: Your name or identifier
|
|
||||||
- **Context**: Description of what the job does
|
|
||||||
- **Script**: Rhai script to execute (end with empty line)
|
|
||||||
4. The job is automatically dispatched to the OSIS actor
|
|
||||||
5. View the real-time result
|
|
||||||
|
|
||||||
### Example Rhai Script
|
|
||||||
|
|
||||||
```rhai
|
|
||||||
// Simple calculation
|
|
||||||
let result = 10 + 20 * 3;
|
|
||||||
print("Calculation result: " + result);
|
|
||||||
result
|
|
||||||
```
|
|
||||||
|
|
||||||
```rhai
|
|
||||||
// Working with strings
|
|
||||||
let message = "Hello from OSIS Actor!";
|
|
||||||
print(message);
|
|
||||||
message.to_upper()
|
|
||||||
```
|
|
||||||
|
|
||||||
## Job Status Colors
|
|
||||||
|
|
||||||
- **Created** - Cyan
|
|
||||||
- **Dispatched** - Blue
|
|
||||||
- **Started** - Yellow
|
|
||||||
- **Finished** - Green
|
|
||||||
- **Error** - Red
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Redis server running on localhost:6379 (or configured URL)
|
|
||||||
- OSIS actor binary built and accessible
|
|
||||||
- Proper permissions to start/stop processes via Zinit
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Actor Not Starting
|
|
||||||
- Verify the OSIS actor binary path in the TOML config
|
|
||||||
- Check that the binary exists and is executable
|
|
||||||
- Ensure Redis is running and accessible
|
|
||||||
|
|
||||||
### Connection Issues
|
|
||||||
- Verify Redis URL in configuration
|
|
||||||
- Check network connectivity to Redis server
|
|
||||||
- Ensure no firewall blocking connections
|
|
||||||
|
|
||||||
### Job Execution Failures
|
|
||||||
- Check job logs using `get_job_logs` command
|
|
||||||
- Verify Rhai script syntax
|
|
||||||
- Check actor logs for detailed error information
|
|
@ -1,178 +0,0 @@
|
|||||||
# Supervisor Terminal UI (TUI)
|
|
||||||
|
|
||||||
A modern, interactive Terminal User Interface for the Hero Supervisor that provides intuitive job management with real-time updates and visual navigation.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
### 🎯 **Intuitive Interface**
|
|
||||||
- **Split-pane Layout**: Job list on the left, details on the right
|
|
||||||
- **Real-time Updates**: Auto-refreshes every 2 seconds
|
|
||||||
- **Color-coded Status**: Visual job status indicators
|
|
||||||
- **Keyboard Navigation**: Vim-style and arrow key support
|
|
||||||
|
|
||||||
### 📋 **Job Management**
|
|
||||||
- **Create Jobs**: Interactive form with tab navigation
|
|
||||||
- **Monitor Jobs**: Real-time status updates with color coding
|
|
||||||
- **View Details**: Detailed job information and output
|
|
||||||
- **View Logs**: Access job execution logs
|
|
||||||
- **Stop/Delete**: Job lifecycle management
|
|
||||||
- **Bulk Operations**: Clear all jobs with confirmation
|
|
||||||
|
|
||||||
### 🎨 **Visual Design**
|
|
||||||
- **Status Colors**:
|
|
||||||
- 🔵 **Blue**: Dispatched
|
|
||||||
- 🟡 **Yellow**: Started
|
|
||||||
- 🟢 **Green**: Finished
|
|
||||||
- 🔴 **Red**: Error
|
|
||||||
- 🟣 **Magenta**: Waiting for Prerequisites
|
|
||||||
- **Highlighted Selection**: Clear visual feedback
|
|
||||||
- **Popup Messages**: Status and error notifications
|
|
||||||
- **Confirmation Dialogs**: Safe bulk operations
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### 1. Start the TUI
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /Users/timurgordon/code/git.ourworld.tf/herocode/baobab/core/supervisor
|
|
||||||
cargo run --bin supervisor-tui -- --config examples/cli_config.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Navigation
|
|
||||||
|
|
||||||
#### Main View
|
|
||||||
- **↑/↓ or j/k**: Navigate job list
|
|
||||||
- **Enter/Space**: View job details
|
|
||||||
- **n/c**: Create new job
|
|
||||||
- **r**: Manual refresh
|
|
||||||
- **d**: Delete selected job (with confirmation)
|
|
||||||
- **s**: Stop selected job
|
|
||||||
- **C**: Clear all jobs (with confirmation)
|
|
||||||
- **q**: Quit application
|
|
||||||
|
|
||||||
#### Job Creation Form
|
|
||||||
- **Tab**: Next field
|
|
||||||
- **Shift+Tab**: Previous field
|
|
||||||
- **Enter**: Next field (or newline in script field)
|
|
||||||
- **F5**: Submit job
|
|
||||||
- **Esc**: Cancel and return to main view
|
|
||||||
|
|
||||||
#### Job Details/Logs View
|
|
||||||
- **Esc/q**: Return to main view
|
|
||||||
- **l**: Switch to logs view
|
|
||||||
- **d**: Switch to details view
|
|
||||||
|
|
||||||
## Interface Layout
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
|
||||||
│ Hero Supervisor TUI - Job Management │
|
|
||||||
├─────────────────────┬───────────────────────────────────────┤
|
|
||||||
│ Jobs │ Job Details │
|
|
||||||
│ │ │
|
|
||||||
│ >> 1a2b3c4d - ✅ Fi │ Job ID: 1a2b3c4d5e6f7g8h │
|
|
||||||
│ 2b3c4d5e - 🟡 St │ Status: Finished │
|
|
||||||
│ 3c4d5e6f - 🔴 Er │ │
|
|
||||||
│ 4d5e6f7g - 🔵 Di │ Output: │
|
|
||||||
│ │ Calculation result: 70 │
|
|
||||||
│ │ 70 │
|
|
||||||
├─────────────────────┴───────────────────────────────────────┤
|
|
||||||
│ q: Quit | n: New Job | ↑↓: Navigate | Enter: Details │
|
|
||||||
└─────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Job Creation Workflow
|
|
||||||
|
|
||||||
1. **Press 'n'** to create a new job
|
|
||||||
2. **Fill in the form**:
|
|
||||||
- **Caller**: Your name or identifier
|
|
||||||
- **Context**: Job description
|
|
||||||
- **Script**: Rhai script (supports multi-line)
|
|
||||||
3. **Press F5** to submit
|
|
||||||
4. **Watch real-time execution** in the main view
|
|
||||||
|
|
||||||
### Example Rhai Scripts
|
|
||||||
|
|
||||||
```rhai
|
|
||||||
// Simple calculation
|
|
||||||
let result = 10 + 20 * 3;
|
|
||||||
print("Calculation result: " + result);
|
|
||||||
result
|
|
||||||
```
|
|
||||||
|
|
||||||
```rhai
|
|
||||||
// String manipulation
|
|
||||||
let message = "Hello from OSIS Actor!";
|
|
||||||
print(message);
|
|
||||||
message.to_upper()
|
|
||||||
```
|
|
||||||
|
|
||||||
```rhai
|
|
||||||
// Loop example
|
|
||||||
let sum = 0;
|
|
||||||
for i in 1..=10 {
|
|
||||||
sum += i;
|
|
||||||
}
|
|
||||||
print("Sum of 1-10: " + sum);
|
|
||||||
sum
|
|
||||||
```
|
|
||||||
|
|
||||||
## Key Improvements over CLI
|
|
||||||
|
|
||||||
### ✅ **Better UX**
|
|
||||||
- **Visual Navigation**: No need to remember numbers
|
|
||||||
- **Real-time Updates**: See job progress immediately
|
|
||||||
- **Split-pane Design**: View list and details simultaneously
|
|
||||||
- **Form Validation**: Clear error messages
|
|
||||||
|
|
||||||
### ✅ **Enhanced Productivity**
|
|
||||||
- **Auto-refresh**: Always up-to-date information
|
|
||||||
- **Keyboard Shortcuts**: Fast navigation and actions
|
|
||||||
- **Confirmation Dialogs**: Prevent accidental operations
|
|
||||||
- **Multi-line Script Input**: Better script editing
|
|
||||||
|
|
||||||
### ✅ **Professional Interface**
|
|
||||||
- **Color-coded Status**: Quick visual assessment
|
|
||||||
- **Consistent Layout**: Predictable interface elements
|
|
||||||
- **Popup Notifications**: Non-intrusive feedback
|
|
||||||
- **Graceful Error Handling**: User-friendly error messages
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Redis server running (default: localhost:6379)
|
|
||||||
- OSIS actor binary built and configured
|
|
||||||
- Terminal with color support
|
|
||||||
- Minimum terminal size: 80x24
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Display Issues
|
|
||||||
- Ensure terminal supports colors and Unicode
|
|
||||||
- Resize terminal if layout appears broken
|
|
||||||
- Use a modern terminal emulator (iTerm2, Alacritty, etc.)
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
- TUI auto-refreshes every 2 seconds
|
|
||||||
- Large job lists may impact performance
|
|
||||||
- Use 'r' for manual refresh if needed
|
|
||||||
|
|
||||||
### Navigation Issues
|
|
||||||
- Use arrow keys if vim keys (j/k) don't work
|
|
||||||
- Ensure terminal is in focus
|
|
||||||
- Try Esc to reset state if stuck
|
|
||||||
|
|
||||||
## Advanced Features
|
|
||||||
|
|
||||||
### Bulk Operations
|
|
||||||
- **Clear All Jobs**: Press 'C' with confirmation
|
|
||||||
- **Safe Deletion**: Confirmation required for destructive operations
|
|
||||||
|
|
||||||
### Real-time Monitoring
|
|
||||||
- **Auto-refresh**: Updates every 2 seconds
|
|
||||||
- **Status Tracking**: Watch job progression
|
|
||||||
- **Immediate Feedback**: See results as they complete
|
|
||||||
|
|
||||||
### Multi-line Scripts
|
|
||||||
- **Rich Text Input**: Full script editing in TUI
|
|
||||||
- **Syntax Awareness**: Better than single-line CLI input
|
|
||||||
- **Preview**: See script before submission
|
|
@ -1,398 +0,0 @@
|
|||||||
use clap::Parser;
|
|
||||||
use colored::*;
|
|
||||||
use hero_supervisor::{Supervisor, SupervisorBuilder, SupervisorError, Job, JobStatus, ScriptType};
|
|
||||||
use log::{error, info};
|
|
||||||
use std::io::{self, Write};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::Duration;
|
|
||||||
use tokio::time::sleep;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "supervisor-cli")]
|
|
||||||
#[command(about = "Interactive CLI for Hero Supervisor - Dispatch jobs to actors")]
|
|
||||||
struct Args {
|
|
||||||
/// Path to TOML configuration file
|
|
||||||
#[arg(short, long)]
|
|
||||||
config: PathBuf,
|
|
||||||
|
|
||||||
/// Enable verbose logging
|
|
||||||
#[arg(short, long)]
|
|
||||||
verbose: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
enum CliCommand {
|
|
||||||
ListJobs,
|
|
||||||
RunJob,
|
|
||||||
GetJobStatus,
|
|
||||||
GetJobOutput,
|
|
||||||
GetJobLogs,
|
|
||||||
StopJob,
|
|
||||||
DeleteJob,
|
|
||||||
ClearAllJobs,
|
|
||||||
Quit,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CliCommand {
|
|
||||||
fn all_commands() -> Vec<(CliCommand, &'static str, &'static str)> {
|
|
||||||
vec![
|
|
||||||
(CliCommand::ListJobs, "list_jobs", "List all jobs in the system"),
|
|
||||||
(CliCommand::RunJob, "run_job", "Create and run a new job"),
|
|
||||||
(CliCommand::GetJobStatus, "get_job_status", "Get status of a specific job"),
|
|
||||||
(CliCommand::GetJobOutput, "get_job_output", "Get output of a completed job"),
|
|
||||||
(CliCommand::GetJobLogs, "get_job_logs", "Get logs for a specific job"),
|
|
||||||
(CliCommand::StopJob, "stop_job", "Stop a running job"),
|
|
||||||
(CliCommand::DeleteJob, "delete_job", "Delete a specific job"),
|
|
||||||
(CliCommand::ClearAllJobs, "clear_all_jobs", "Clear all jobs from the system"),
|
|
||||||
(CliCommand::Quit, "quit", "Exit the CLI"),
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_index(index: usize) -> Option<CliCommand> {
|
|
||||||
Self::all_commands().get(index).map(|(cmd, _, _)| cmd.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct SupervisorCli {
|
|
||||||
supervisor: Arc<Supervisor>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SupervisorCli {
|
|
||||||
fn new(supervisor: Arc<Supervisor>) -> Self {
|
|
||||||
Self { supervisor }
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run(&self) -> Result<(), SupervisorError> {
|
|
||||||
println!("{}", "=== Hero Supervisor CLI ===".bright_blue().bold());
|
|
||||||
println!("{}", "Interactive job management interface".cyan());
|
|
||||||
println!();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
self.display_menu();
|
|
||||||
|
|
||||||
match self.get_user_choice().await {
|
|
||||||
Some(command) => {
|
|
||||||
match command {
|
|
||||||
CliCommand::Quit => {
|
|
||||||
println!("{}", "Goodbye!".bright_green());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
if let Err(e) = self.execute_command(command).await {
|
|
||||||
eprintln!("{} {}", "Error:".bright_red(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("{}", "Invalid selection. Please try again.".yellow());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn display_menu(&self) {
|
|
||||||
println!("{}", "Available Commands:".bright_yellow().bold());
|
|
||||||
for (index, (_, name, description)) in CliCommand::all_commands().iter().enumerate() {
|
|
||||||
println!(" {}. {} - {}",
|
|
||||||
(index + 1).to_string().bright_white().bold(),
|
|
||||||
name.bright_cyan(),
|
|
||||||
description
|
|
||||||
);
|
|
||||||
}
|
|
||||||
print!("\n{} ", "Select a command (1-9):".bright_white());
|
|
||||||
io::stdout().flush().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_user_choice(&self) -> Option<CliCommand> {
|
|
||||||
let mut input = String::new();
|
|
||||||
if io::stdin().read_line(&mut input).is_ok() {
|
|
||||||
if let Ok(choice) = input.trim().parse::<usize>() {
|
|
||||||
if choice > 0 {
|
|
||||||
return CliCommand::from_index(choice - 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn execute_command(&self, command: CliCommand) -> Result<(), SupervisorError> {
|
|
||||||
match command {
|
|
||||||
CliCommand::ListJobs => self.list_jobs().await,
|
|
||||||
CliCommand::RunJob => self.run_job().await,
|
|
||||||
CliCommand::GetJobStatus => self.get_job_status().await,
|
|
||||||
CliCommand::GetJobOutput => self.get_job_output().await,
|
|
||||||
CliCommand::GetJobLogs => self.get_job_logs().await,
|
|
||||||
CliCommand::StopJob => self.stop_job().await,
|
|
||||||
CliCommand::DeleteJob => self.delete_job().await,
|
|
||||||
CliCommand::ClearAllJobs => self.clear_all_jobs().await,
|
|
||||||
CliCommand::Quit => Ok(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_jobs(&self) -> Result<(), SupervisorError> {
|
|
||||||
println!("{}", "Listing all jobs...".bright_blue());
|
|
||||||
|
|
||||||
let jobs = self.supervisor.list_jobs().await?;
|
|
||||||
|
|
||||||
if jobs.is_empty() {
|
|
||||||
println!("{}", "No jobs found.".yellow());
|
|
||||||
} else {
|
|
||||||
println!("{} jobs found:", jobs.len().to_string().bright_white().bold());
|
|
||||||
for job_id in jobs {
|
|
||||||
let status = self.supervisor.get_job_status(&job_id).await?;
|
|
||||||
let status_color = match status {
|
|
||||||
JobStatus::Dispatched => "blue",
|
|
||||||
JobStatus::Started => "yellow",
|
|
||||||
JobStatus::Finished => "green",
|
|
||||||
JobStatus::Error => "red",
|
|
||||||
JobStatus::WaitingForPrerequisites => "magenta",
|
|
||||||
};
|
|
||||||
|
|
||||||
println!(" {} - {}",
|
|
||||||
job_id.bright_white(),
|
|
||||||
format!("{:?}", status).color(status_color)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run_job(&self) -> Result<(), SupervisorError> {
|
|
||||||
println!("{}", "Creating a new job...".bright_blue());
|
|
||||||
|
|
||||||
// Get caller
|
|
||||||
print!("Enter caller name: ");
|
|
||||||
io::stdout().flush().unwrap();
|
|
||||||
let mut caller = String::new();
|
|
||||||
io::stdin().read_line(&mut caller).unwrap();
|
|
||||||
let caller = caller.trim().to_string();
|
|
||||||
|
|
||||||
// Get context
|
|
||||||
print!("Enter job context: ");
|
|
||||||
io::stdout().flush().unwrap();
|
|
||||||
let mut context = String::new();
|
|
||||||
io::stdin().read_line(&mut context).unwrap();
|
|
||||||
let context = context.trim().to_string();
|
|
||||||
|
|
||||||
// Get script
|
|
||||||
println!("Enter Rhai script (end with empty line):");
|
|
||||||
let mut script_lines = Vec::new();
|
|
||||||
loop {
|
|
||||||
let mut line = String::new();
|
|
||||||
io::stdin().read_line(&mut line).unwrap();
|
|
||||||
let line = line.trim_end_matches('\n');
|
|
||||||
if line.is_empty() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
script_lines.push(line.to_string());
|
|
||||||
}
|
|
||||||
let script = script_lines.join("\n");
|
|
||||||
|
|
||||||
if script.is_empty() {
|
|
||||||
println!("{}", "Script cannot be empty!".bright_red());
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// For now, default to OSIS actor (ScriptType::OSIS)
|
|
||||||
let script_type = ScriptType::OSIS;
|
|
||||||
|
|
||||||
// Create the job
|
|
||||||
let job = Job::new(caller, context, script, script_type);
|
|
||||||
|
|
||||||
println!("{} Job ID: {}",
|
|
||||||
"Created job with".bright_green(),
|
|
||||||
job.id.bright_white().bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
// Run the job and await result
|
|
||||||
println!("{}", "Dispatching job and waiting for result...".bright_blue());
|
|
||||||
|
|
||||||
match self.supervisor.run_job_and_await_result(&job).await {
|
|
||||||
Ok(result) => {
|
|
||||||
println!("{}", "Job completed successfully!".bright_green().bold());
|
|
||||||
println!("{} {}", "Result:".bright_yellow(), result.bright_white());
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
println!("{} {}", "Job failed:".bright_red().bold(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_status(&self) -> Result<(), SupervisorError> {
|
|
||||||
let job_id = self.prompt_for_job_id("Enter job ID to check status: ")?;
|
|
||||||
|
|
||||||
let status = self.supervisor.get_job_status(&job_id).await?;
|
|
||||||
let status_color = match status {
|
|
||||||
JobStatus::Dispatched => "blue",
|
|
||||||
JobStatus::Started => "yellow",
|
|
||||||
JobStatus::Finished => "green",
|
|
||||||
JobStatus::Error => "red",
|
|
||||||
JobStatus::WaitingForPrerequisites => "magenta",
|
|
||||||
};
|
|
||||||
|
|
||||||
println!("{} {} - {}",
|
|
||||||
"Job".bright_white(),
|
|
||||||
job_id.bright_white().bold(),
|
|
||||||
format!("{:?}", status).color(status_color).bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_output(&self) -> Result<(), SupervisorError> {
|
|
||||||
let job_id = self.prompt_for_job_id("Enter job ID to get output: ")?;
|
|
||||||
|
|
||||||
match self.supervisor.get_job_output(&job_id).await? {
|
|
||||||
Some(output) => {
|
|
||||||
println!("{}", "Job Output:".bright_yellow().bold());
|
|
||||||
println!("{}", output.bright_white());
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("{}", "No output available for this job.".yellow());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_logs(&self) -> Result<(), SupervisorError> {
|
|
||||||
let job_id = self.prompt_for_job_id("Enter job ID to get logs: ")?;
|
|
||||||
|
|
||||||
match self.supervisor.get_job_logs(&job_id).await? {
|
|
||||||
Some(logs) => {
|
|
||||||
println!("{}", "Job Logs:".bright_yellow().bold());
|
|
||||||
println!("{}", logs.bright_white());
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
println!("{}", "No logs available for this job.".yellow());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn stop_job(&self) -> Result<(), SupervisorError> {
|
|
||||||
let job_id = self.prompt_for_job_id("Enter job ID to stop: ")?;
|
|
||||||
|
|
||||||
self.supervisor.stop_job(&job_id).await?;
|
|
||||||
println!("{} {}",
|
|
||||||
"Stop signal sent for job".bright_green(),
|
|
||||||
job_id.bright_white().bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_job(&self) -> Result<(), SupervisorError> {
|
|
||||||
let job_id = self.prompt_for_job_id("Enter job ID to delete: ")?;
|
|
||||||
|
|
||||||
self.supervisor.delete_job(&job_id).await?;
|
|
||||||
println!("{} {}",
|
|
||||||
"Deleted job".bright_green(),
|
|
||||||
job_id.bright_white().bold()
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn clear_all_jobs(&self) -> Result<(), SupervisorError> {
|
|
||||||
print!("Are you sure you want to clear ALL jobs? (y/N): ");
|
|
||||||
io::stdout().flush().unwrap();
|
|
||||||
|
|
||||||
let mut confirmation = String::new();
|
|
||||||
io::stdin().read_line(&mut confirmation).unwrap();
|
|
||||||
|
|
||||||
if confirmation.trim().to_lowercase() == "y" {
|
|
||||||
let count = self.supervisor.clear_all_jobs().await?;
|
|
||||||
println!("{} {} jobs",
|
|
||||||
"Cleared".bright_green().bold(),
|
|
||||||
count.to_string().bright_white().bold()
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
println!("{}", "Operation cancelled.".yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prompt_for_job_id(&self, prompt: &str) -> Result<String, SupervisorError> {
|
|
||||||
print!("{}", prompt);
|
|
||||||
io::stdout().flush().unwrap();
|
|
||||||
|
|
||||||
let mut job_id = String::new();
|
|
||||||
io::stdin().read_line(&mut job_id).unwrap();
|
|
||||||
let job_id = job_id.trim().to_string();
|
|
||||||
|
|
||||||
if job_id.is_empty() {
|
|
||||||
return Err(SupervisorError::ConfigError("Job ID cannot be empty".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(job_id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Setup logging
|
|
||||||
if args.verbose {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Debug)
|
|
||||||
.init();
|
|
||||||
} else {
|
|
||||||
env_logger::Builder::from_default_env()
|
|
||||||
.filter_level(log::LevelFilter::Info)
|
|
||||||
.init();
|
|
||||||
}
|
|
||||||
|
|
||||||
info!("Starting Supervisor CLI with config: {:?}", args.config);
|
|
||||||
|
|
||||||
// Build supervisor from TOML config
|
|
||||||
let supervisor = Arc::new(
|
|
||||||
SupervisorBuilder::from_toml(&args.config)?
|
|
||||||
.build().await?
|
|
||||||
);
|
|
||||||
|
|
||||||
println!("{}", "Starting actors...".bright_blue());
|
|
||||||
|
|
||||||
// Start the actors
|
|
||||||
supervisor.start_actors().await?;
|
|
||||||
|
|
||||||
// Give actors time to start up
|
|
||||||
sleep(Duration::from_secs(2)).await;
|
|
||||||
|
|
||||||
println!("{}", "Actors started successfully!".bright_green());
|
|
||||||
println!();
|
|
||||||
|
|
||||||
// Create and run the CLI
|
|
||||||
let cli = SupervisorCli::new(supervisor.clone());
|
|
||||||
|
|
||||||
// Setup cleanup on exit
|
|
||||||
let supervisor_cleanup = supervisor.clone();
|
|
||||||
tokio::spawn(async move {
|
|
||||||
tokio::signal::ctrl_c().await.expect("Failed to listen for ctrl+c");
|
|
||||||
println!("\n{}", "Shutting down...".bright_yellow());
|
|
||||||
if let Err(e) = supervisor_cleanup.cleanup_and_shutdown().await {
|
|
||||||
eprintln!("Error during cleanup: {}", e);
|
|
||||||
}
|
|
||||||
std::process::exit(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Run the interactive CLI
|
|
||||||
cli.run().await?;
|
|
||||||
|
|
||||||
// Cleanup on normal exit
|
|
||||||
supervisor.cleanup_and_shutdown().await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -45,27 +45,13 @@ Jobs can have dependencies on other jobs, which are stored in the `dependencies`
|
|||||||
|
|
||||||
### Work Queues
|
### Work Queues
|
||||||
|
|
||||||
Jobs are queued for execution using Redis lists with the following naming convention:
|
Jobs are queued for execution using Redis lists:
|
||||||
```
|
```
|
||||||
hero:job:actor_queue:{script_type_suffix}
|
hero:work_queue:{actor_id}
|
||||||
```
|
```
|
||||||
|
|
||||||
Where `{script_type_suffix}` corresponds to the script type:
|
|
||||||
- `osis` for OSIS actors (Rhai/HeroScript execution)
|
|
||||||
- `sal` for SAL actors (System Abstraction Layer)
|
|
||||||
- `v` for V actors (V language execution)
|
|
||||||
- `python` for Python actors
|
|
||||||
|
|
||||||
**Examples:**
|
|
||||||
- OSIS actor queue: `hero:job:actor_queue:osis`
|
|
||||||
- SAL actor queue: `hero:job:actor_queue:sal`
|
|
||||||
- V actor queue: `hero:job:actor_queue:v`
|
|
||||||
- Python actor queue: `hero:job:actor_queue:python`
|
|
||||||
|
|
||||||
Actors listen on their specific queue using `BLPOP` for job IDs to process.
|
Actors listen on their specific queue using `BLPOP` for job IDs to process.
|
||||||
|
|
||||||
**Important:** Actors must use the same queue naming convention in their `actor_id()` method to ensure proper job dispatch. The actor should return `"actor_queue:{script_type_suffix}"` as its actor ID.
|
|
||||||
|
|
||||||
### Stop Queues
|
### Stop Queues
|
||||||
|
|
||||||
Job stop requests are sent through dedicated stop queues:
|
Job stop requests are sent through dedicated stop queues:
|
||||||
@ -77,26 +63,12 @@ Actors monitor these queues to receive stop requests for running jobs.
|
|||||||
|
|
||||||
### Reply Queues
|
### Reply Queues
|
||||||
|
|
||||||
Reply queues are used for responses to specific requests:
|
For synchronous job execution, dedicated reply queues are used:
|
||||||
|
```
|
||||||
|
hero:reply:{job_id}
|
||||||
|
```
|
||||||
|
|
||||||
- `hero:reply:{request_id}`: Response to a specific request
|
Actors send results to these queues when jobs complete.
|
||||||
|
|
||||||
### Result and Error Queues
|
|
||||||
|
|
||||||
When actors process jobs, they store results and errors in two places:
|
|
||||||
|
|
||||||
1. **Job Hash Storage**: Results are stored in the job hash fields:
|
|
||||||
- `hero:job:{job_id}` hash with `output` field for results
|
|
||||||
- `hero:job:{job_id}` hash with `error` field for errors
|
|
||||||
|
|
||||||
2. **Dedicated Queues**: Results and errors are also pushed to dedicated queues for asynchronous retrieval:
|
|
||||||
- `hero:job:{job_id}:result`: Queue containing job result (use `LPOP` to retrieve)
|
|
||||||
- `hero:job:{job_id}:error`: Queue containing job error (use `LPOP` to retrieve)
|
|
||||||
|
|
||||||
This dual storage approach allows clients to:
|
|
||||||
- Access results/errors directly from job hash for immediate retrieval
|
|
||||||
- Listen on result/error queues for asynchronous notification of job completion
|
|
||||||
- Use `BLPOP` on result/error queues for blocking waits on job completion
|
|
||||||
|
|
||||||
## Job Lifecycle
|
## Job Lifecycle
|
||||||
|
|
||||||
|
@ -1,20 +0,0 @@
|
|||||||
# Hero Supervisor CLI Configuration
|
|
||||||
# This configuration sets up the supervisor with an OSIS actor for job processing
|
|
||||||
|
|
||||||
[global]
|
|
||||||
redis_url = "redis://127.0.0.1/"
|
|
||||||
|
|
||||||
[actors]
|
|
||||||
# OSIS Actor configuration - handles Object Storage and Indexing System jobs
|
|
||||||
osis_actor = "/Users/timurgordon/code/git.ourworld.tf/herocode/actor_osis/target/debug/actor_osis"
|
|
||||||
|
|
||||||
# Optional: Other actors can be configured here
|
|
||||||
# sal_actor = "/path/to/sal_actor"
|
|
||||||
# v_actor = "/path/to/v_actor"
|
|
||||||
# python_actor = "/path/to/python_actor"
|
|
||||||
|
|
||||||
# Optional: WebSocket server configuration for remote API access
|
|
||||||
# [websocket]
|
|
||||||
# host = "127.0.0.1"
|
|
||||||
# port = 8443
|
|
||||||
# redis_url = "redis://127.0.0.1/"
|
|
@ -1,4 +1,4 @@
|
|||||||
use log::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
use redis::AsyncCommands;
|
use redis::AsyncCommands;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
//! This module provides actor process lifecycle management using Zinit as the process manager.
|
//! This module provides actor process lifecycle management using Zinit as the process manager.
|
||||||
//! All functionality is implemented as methods on the Supervisor struct for a clean API.
|
//! All functionality is implemented as methods on the Supervisor struct for a clean API.
|
||||||
|
|
||||||
use log::{debug, error, info, warn};
|
use tracing::{debug, error, info, warn};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "hero-openrpc-client"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "hero-openrpc-client"
|
|
||||||
path = "cmd/main.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
# Core dependencies
|
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
serde_json = "1.0"
|
|
||||||
anyhow = "1.0"
|
|
||||||
thiserror = "1.0"
|
|
||||||
tracing = "0.1"
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
|
||||||
clap = { version = "4.0", features = ["derive"] }
|
|
||||||
|
|
||||||
# JSON-RPC dependencies
|
|
||||||
jsonrpsee = { version = "0.21", features = [
|
|
||||||
"client",
|
|
||||||
"macros"
|
|
||||||
] }
|
|
||||||
async-trait = "0.1"
|
|
||||||
|
|
||||||
# Hero dependencies
|
|
||||||
hero_job = { path = "../../../core/job" }
|
|
||||||
|
|
||||||
# Authentication and crypto
|
|
||||||
secp256k1 = { version = "0.28", features = ["rand", "recovery"] }
|
|
||||||
hex = "0.4"
|
|
||||||
sha2 = "0.10"
|
|
||||||
rand = "0.8"
|
|
||||||
|
|
||||||
# CLI utilities
|
|
||||||
dialoguer = "0.11"
|
|
||||||
colored = "2.0"
|
|
||||||
|
|
||||||
# Async utilities
|
|
||||||
futures = "0.3"
|
|
@ -1,472 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use clap::{Parser, Subcommand};
|
|
||||||
use colored::*;
|
|
||||||
use dialoguer::{Input, Select, Confirm, MultiSelect};
|
|
||||||
use hero_job::ScriptType;
|
|
||||||
use hero_openrpc_client::{
|
|
||||||
AuthHelper, ClientTransport, HeroOpenRpcClient, JobParams,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tracing::{error, info, Level};
|
|
||||||
use tracing_subscriber;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "hero-openrpc-client")]
|
|
||||||
#[command(about = "Hero OpenRPC Client - Interactive JSON-RPC client")]
|
|
||||||
struct Cli {
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
|
|
||||||
/// Private key for authentication (hex format)
|
|
||||||
#[arg(long)]
|
|
||||||
private_key: Option<String>,
|
|
||||||
|
|
||||||
/// Generate a new private key and exit
|
|
||||||
#[arg(long)]
|
|
||||||
generate_key: bool,
|
|
||||||
|
|
||||||
/// Log level
|
|
||||||
#[arg(long, default_value = "info")]
|
|
||||||
log_level: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands {
|
|
||||||
/// Connect to WebSocket server
|
|
||||||
Websocket {
|
|
||||||
/// Server URL
|
|
||||||
#[arg(long, default_value = "ws://127.0.0.1:9944")]
|
|
||||||
url: String,
|
|
||||||
},
|
|
||||||
/// Connect to Unix socket server
|
|
||||||
Unix {
|
|
||||||
/// Unix socket path
|
|
||||||
#[arg(long, default_value = "/tmp/hero-openrpc.sock")]
|
|
||||||
socket_path: PathBuf,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Available RPC methods with descriptions
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct RpcMethod {
|
|
||||||
name: &'static str,
|
|
||||||
description: &'static str,
|
|
||||||
requires_auth: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
const RPC_METHODS: &[RpcMethod] = &[
|
|
||||||
RpcMethod {
|
|
||||||
name: "fetch_nonce",
|
|
||||||
description: "Fetch a nonce for authentication",
|
|
||||||
requires_auth: false,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "authenticate",
|
|
||||||
description: "Authenticate with public key and signature",
|
|
||||||
requires_auth: false,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "whoami",
|
|
||||||
description: "Get authentication status and user information",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "play",
|
|
||||||
description: "Execute a Rhai script immediately",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "create_job",
|
|
||||||
description: "Create a new job without starting it",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "start_job",
|
|
||||||
description: "Start a previously created job",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "run_job",
|
|
||||||
description: "Create and run a job, returning result when complete",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "get_job_status",
|
|
||||||
description: "Get the current status of a job",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "get_job_output",
|
|
||||||
description: "Get the output of a completed job",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "get_job_logs",
|
|
||||||
description: "Get the logs of a job",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "list_jobs",
|
|
||||||
description: "List all jobs in the system",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "stop_job",
|
|
||||||
description: "Stop a running job",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "delete_job",
|
|
||||||
description: "Delete a job from the system",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
RpcMethod {
|
|
||||||
name: "clear_all_jobs",
|
|
||||||
description: "Clear all jobs from the system",
|
|
||||||
requires_auth: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
|
|
||||||
// Initialize tracing
|
|
||||||
let log_level = match cli.log_level.to_lowercase().as_str() {
|
|
||||||
"trace" => Level::TRACE,
|
|
||||||
"debug" => Level::DEBUG,
|
|
||||||
"info" => Level::INFO,
|
|
||||||
"warn" => Level::WARN,
|
|
||||||
"error" => Level::ERROR,
|
|
||||||
_ => Level::INFO,
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
|
||||||
.with_max_level(log_level)
|
|
||||||
.init();
|
|
||||||
|
|
||||||
// Handle key generation
|
|
||||||
if cli.generate_key {
|
|
||||||
let auth_helper = AuthHelper::generate()?;
|
|
||||||
println!("{}", "Generated new private key:".green().bold());
|
|
||||||
println!("Private Key: {}", auth_helper.private_key_hex().yellow());
|
|
||||||
println!("Public Key: {}", auth_helper.public_key_hex().cyan());
|
|
||||||
println!();
|
|
||||||
println!("{}", "Save the private key securely and use it with --private-key".bright_yellow());
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let transport = match cli.command {
|
|
||||||
Commands::Websocket { url } => {
|
|
||||||
println!("{} {}", "Connecting to WebSocket server:".green(), url.cyan());
|
|
||||||
ClientTransport::WebSocket(url)
|
|
||||||
}
|
|
||||||
Commands::Unix { socket_path } => {
|
|
||||||
println!("{} {:?}", "Connecting to Unix socket server:".green(), socket_path);
|
|
||||||
ClientTransport::Unix(socket_path)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Connect to the server
|
|
||||||
let client = HeroOpenRpcClient::connect(transport).await?;
|
|
||||||
println!("{}", "Connected successfully!".green().bold());
|
|
||||||
|
|
||||||
// Handle authentication if private key is provided
|
|
||||||
let mut authenticated = false;
|
|
||||||
if let Some(private_key) = cli.private_key {
|
|
||||||
println!("{}", "Authenticating...".yellow());
|
|
||||||
match client.authenticate_with_key(&private_key).await {
|
|
||||||
Ok(true) => {
|
|
||||||
println!("{}", "Authentication successful!".green().bold());
|
|
||||||
authenticated = true;
|
|
||||||
}
|
|
||||||
Ok(false) => {
|
|
||||||
println!("{}", "Authentication failed!".red().bold());
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Authentication error: {}", e);
|
|
||||||
println!("{} {}", "Authentication error:".red().bold(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
println!("{}", "No private key provided. Some methods will require authentication.".yellow());
|
|
||||||
println!("{}", "Use --generate-key to create a new key or --private-key to use an existing one.".bright_yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
|
|
||||||
// Interactive loop
|
|
||||||
loop {
|
|
||||||
// Filter methods based on authentication status
|
|
||||||
let available_methods: Vec<&RpcMethod> = RPC_METHODS
|
|
||||||
.iter()
|
|
||||||
.filter(|method| !method.requires_auth || authenticated)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if available_methods.is_empty() {
|
|
||||||
println!("{}", "No methods available. Please authenticate first.".red());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Display method selection
|
|
||||||
let method_names: Vec<String> = available_methods
|
|
||||||
.iter()
|
|
||||||
.map(|method| {
|
|
||||||
if method.requires_auth && !authenticated {
|
|
||||||
format!("{} {} (requires auth)", method.name.red(), method.description)
|
|
||||||
} else {
|
|
||||||
format!("{} {}", method.name.green(), method.description)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let selection = Select::new()
|
|
||||||
.with_prompt("Select an RPC method to call")
|
|
||||||
.items(&method_names)
|
|
||||||
.default(0)
|
|
||||||
.interact_opt()?;
|
|
||||||
|
|
||||||
let Some(selection) = selection else {
|
|
||||||
println!("{}", "Goodbye!".cyan());
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
let selected_method = available_methods[selection];
|
|
||||||
println!();
|
|
||||||
println!("{} {}", "Selected method:".bold(), selected_method.name.green());
|
|
||||||
|
|
||||||
// Handle method-specific parameter collection and execution
|
|
||||||
match execute_method(&client, selected_method.name).await {
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(e) => {
|
|
||||||
error!("Method execution failed: {}", e);
|
|
||||||
println!("{} {}", "Error:".red().bold(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
|
|
||||||
// Ask if user wants to continue
|
|
||||||
if !Confirm::new()
|
|
||||||
.with_prompt("Do you want to call another method?")
|
|
||||||
.default(true)
|
|
||||||
.interact()?
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
println!();
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("{}", "Goodbye!".cyan().bold());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn execute_method(client: &HeroOpenRpcClient, method_name: &str) -> Result<()> {
|
|
||||||
match method_name {
|
|
||||||
"fetch_nonce" => {
|
|
||||||
let pubkey: String = Input::new()
|
|
||||||
.with_prompt("Public key (hex)")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.fetch_nonce(pubkey).await?;
|
|
||||||
println!("{} {}", "Nonce:".green().bold(), result.yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
"authenticate" => {
|
|
||||||
let pubkey: String = Input::new()
|
|
||||||
.with_prompt("Public key (hex)")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let signature: String = Input::new()
|
|
||||||
.with_prompt("Signature (hex)")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.authenticate(pubkey, signature, nonce).await?;
|
|
||||||
println!("{} {}", "Authentication result:".green().bold(),
|
|
||||||
if result { "Success".green() } else { "Failed".red() });
|
|
||||||
}
|
|
||||||
|
|
||||||
"whoami" => {
|
|
||||||
let result = client.whoami().await?;
|
|
||||||
println!("{} {}", "User info:".green().bold(),
|
|
||||||
serde_json::to_string_pretty(&result)?.cyan());
|
|
||||||
}
|
|
||||||
|
|
||||||
"play" => {
|
|
||||||
let script: String = Input::new()
|
|
||||||
.with_prompt("Rhai script to execute")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.play(script).await?;
|
|
||||||
println!("{} {}", "Script output:".green().bold(), result.output.cyan());
|
|
||||||
}
|
|
||||||
|
|
||||||
"create_job" => {
|
|
||||||
let script: String = Input::new()
|
|
||||||
.with_prompt("Script content")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let script_types = ["HeroScript", "RhaiSAL", "RhaiDSL"];
|
|
||||||
let script_type_selection = Select::new()
|
|
||||||
.with_prompt("Script type")
|
|
||||||
.items(&script_types)
|
|
||||||
.default(0)
|
|
||||||
.interact()?;
|
|
||||||
|
|
||||||
let script_type = match script_type_selection {
|
|
||||||
0 => ScriptType::HeroScript,
|
|
||||||
1 => ScriptType::RhaiSAL,
|
|
||||||
2 => ScriptType::RhaiDSL,
|
|
||||||
_ => ScriptType::HeroScript,
|
|
||||||
};
|
|
||||||
|
|
||||||
let add_prerequisites = Confirm::new()
|
|
||||||
.with_prompt("Add prerequisites?")
|
|
||||||
.default(false)
|
|
||||||
.interact()?;
|
|
||||||
|
|
||||||
let prerequisites = if add_prerequisites {
|
|
||||||
let prereq_input: String = Input::new()
|
|
||||||
.with_prompt("Prerequisites (comma-separated job IDs)")
|
|
||||||
.interact_text()?;
|
|
||||||
Some(prereq_input.split(',').map(|s| s.trim().to_string()).collect())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_params = JobParams {
|
|
||||||
script,
|
|
||||||
script_type,
|
|
||||||
prerequisites,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = client.create_job(job_params).await?;
|
|
||||||
println!("{} {}", "Created job ID:".green().bold(), result.yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
"start_job" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID to start")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.start_job(job_id).await?;
|
|
||||||
println!("{} {}", "Start result:".green().bold(),
|
|
||||||
if result.success { "Success".green() } else { "Failed".red() });
|
|
||||||
}
|
|
||||||
|
|
||||||
"run_job" => {
|
|
||||||
let script: String = Input::new()
|
|
||||||
.with_prompt("Script content")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let script_types = ["HeroScript", "RhaiSAL", "RhaiDSL"];
|
|
||||||
let script_type_selection = Select::new()
|
|
||||||
.with_prompt("Script type")
|
|
||||||
.items(&script_types)
|
|
||||||
.default(0)
|
|
||||||
.interact()?;
|
|
||||||
|
|
||||||
let script_type = match script_type_selection {
|
|
||||||
0 => ScriptType::HeroScript,
|
|
||||||
1 => ScriptType::RhaiSAL,
|
|
||||||
2 => ScriptType::RhaiDSL,
|
|
||||||
_ => ScriptType::HeroScript,
|
|
||||||
};
|
|
||||||
|
|
||||||
let add_prerequisites = Confirm::new()
|
|
||||||
.with_prompt("Add prerequisites?")
|
|
||||||
.default(false)
|
|
||||||
.interact()?;
|
|
||||||
|
|
||||||
let prerequisites = if add_prerequisites {
|
|
||||||
let prereq_input: String = Input::new()
|
|
||||||
.with_prompt("Prerequisites (comma-separated job IDs)")
|
|
||||||
.interact_text()?;
|
|
||||||
Some(prereq_input.split(',').map(|s| s.trim().to_string()).collect())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = client.run_job(script, script_type, prerequisites).await?;
|
|
||||||
println!("{} {}", "Job result:".green().bold(), result.cyan());
|
|
||||||
}
|
|
||||||
|
|
||||||
"get_job_status" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.get_job_status(job_id).await?;
|
|
||||||
println!("{} {:?}", "Job status:".green().bold(), result);
|
|
||||||
}
|
|
||||||
|
|
||||||
"get_job_output" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.get_job_output(job_id).await?;
|
|
||||||
println!("{} {}", "Job output:".green().bold(), result.cyan());
|
|
||||||
}
|
|
||||||
|
|
||||||
"get_job_logs" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
let result = client.get_job_logs(job_id).await?;
|
|
||||||
println!("{} {}", "Job logs:".green().bold(), result.logs.cyan());
|
|
||||||
}
|
|
||||||
|
|
||||||
"list_jobs" => {
|
|
||||||
let result = client.list_jobs().await?;
|
|
||||||
println!("{}", "Jobs:".green().bold());
|
|
||||||
for job in result {
|
|
||||||
println!(" {} - {} ({:?})",
|
|
||||||
job.id().yellow(),
|
|
||||||
job.script_type(),
|
|
||||||
job.status()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
"stop_job" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID to stop")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
client.stop_job(job_id.clone()).await?;
|
|
||||||
println!("{} {}", "Stopped job:".green().bold(), job_id.yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
"delete_job" => {
|
|
||||||
let job_id: String = Input::new()
|
|
||||||
.with_prompt("Job ID to delete")
|
|
||||||
.interact_text()?;
|
|
||||||
|
|
||||||
client.delete_job(job_id.clone()).await?;
|
|
||||||
println!("{} {}", "Deleted job:".green().bold(), job_id.yellow());
|
|
||||||
}
|
|
||||||
|
|
||||||
"clear_all_jobs" => {
|
|
||||||
let confirm = Confirm::new()
|
|
||||||
.with_prompt("Are you sure you want to clear ALL jobs?")
|
|
||||||
.default(false)
|
|
||||||
.interact()?;
|
|
||||||
|
|
||||||
if confirm {
|
|
||||||
client.clear_all_jobs().await?;
|
|
||||||
println!("{}", "Cleared all jobs".green().bold());
|
|
||||||
} else {
|
|
||||||
println!("{}", "Operation cancelled".yellow());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {
|
|
||||||
println!("{} {}", "Unknown method:".red().bold(), method_name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,81 +0,0 @@
|
|||||||
use anyhow::{anyhow, Result};
|
|
||||||
use secp256k1::{Message, PublicKey, ecdsa::Signature, Secp256k1, SecretKey};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
|
|
||||||
/// Helper for authentication operations
|
|
||||||
pub struct AuthHelper {
|
|
||||||
secret_key: SecretKey,
|
|
||||||
public_key: PublicKey,
|
|
||||||
secp: Secp256k1<secp256k1::All>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthHelper {
|
|
||||||
/// Create a new auth helper from a private key hex string
|
|
||||||
pub fn new(private_key_hex: &str) -> Result<Self> {
|
|
||||||
let secp = Secp256k1::new();
|
|
||||||
|
|
||||||
let secret_key_bytes = hex::decode(private_key_hex)
|
|
||||||
.map_err(|_| anyhow!("Invalid private key hex format"))?;
|
|
||||||
|
|
||||||
let secret_key = SecretKey::from_slice(&secret_key_bytes)
|
|
||||||
.map_err(|_| anyhow!("Invalid private key"))?;
|
|
||||||
|
|
||||||
let public_key = PublicKey::from_secret_key(&secp, &secret_key);
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
secret_key,
|
|
||||||
public_key,
|
|
||||||
secp,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate a new random private key
|
|
||||||
pub fn generate() -> Result<Self> {
|
|
||||||
let secp = Secp256k1::new();
|
|
||||||
let (secret_key, public_key) = secp.generate_keypair(&mut rand::thread_rng());
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
secret_key,
|
|
||||||
public_key,
|
|
||||||
secp,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the public key as a hex string
|
|
||||||
pub fn public_key_hex(&self) -> String {
|
|
||||||
hex::encode(self.public_key.serialize())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the private key as a hex string
|
|
||||||
pub fn private_key_hex(&self) -> String {
|
|
||||||
hex::encode(self.secret_key.secret_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign a message and return the signature as hex
|
|
||||||
pub fn sign_message(&self, message: &str) -> Result<String> {
|
|
||||||
let message_hash = Sha256::digest(message.as_bytes());
|
|
||||||
let message = Message::from_slice(&message_hash)
|
|
||||||
.map_err(|_| anyhow!("Failed to create message from hash"))?;
|
|
||||||
|
|
||||||
let signature = self.secp.sign_ecdsa(&message, &self.secret_key);
|
|
||||||
Ok(hex::encode(signature.serialize_compact()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify a signature against a message
|
|
||||||
pub fn verify_signature(&self, message: &str, signature_hex: &str) -> Result<bool> {
|
|
||||||
let message_hash = Sha256::digest(message.as_bytes());
|
|
||||||
let message = Message::from_slice(&message_hash)
|
|
||||||
.map_err(|_| anyhow!("Failed to create message from hash"))?;
|
|
||||||
|
|
||||||
let signature_bytes = hex::decode(signature_hex)
|
|
||||||
.map_err(|_| anyhow!("Invalid signature hex format"))?;
|
|
||||||
|
|
||||||
let signature = Signature::from_compact(&signature_bytes)
|
|
||||||
.map_err(|_| anyhow!("Invalid signature format"))?;
|
|
||||||
|
|
||||||
match self.secp.verify_ecdsa(&message, &signature, &self.public_key) {
|
|
||||||
Ok(_) => Ok(true),
|
|
||||||
Err(_) => Ok(false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,212 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use hero_job::{Job, JobStatus, ScriptType};
|
|
||||||
use jsonrpsee::core::client::ClientT;
|
|
||||||
use jsonrpsee::core::ClientError;
|
|
||||||
use jsonrpsee::proc_macros::rpc;
|
|
||||||
use jsonrpsee::rpc_params;
|
|
||||||
use jsonrpsee::ws_client::{WsClient, WsClientBuilder};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tracing::{error, info};
|
|
||||||
|
|
||||||
mod auth;
|
|
||||||
mod types;
|
|
||||||
|
|
||||||
pub use auth::*;
|
|
||||||
pub use types::*;
|
|
||||||
|
|
||||||
/// Transport configuration for the client
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum ClientTransport {
|
|
||||||
WebSocket(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// OpenRPC client trait defining all available methods
|
|
||||||
#[rpc(client)]
|
|
||||||
pub trait OpenRpcClient {
|
|
||||||
// Authentication methods
|
|
||||||
#[method(name = "fetch_nonce")]
|
|
||||||
async fn fetch_nonce(&self, pubkey: String) -> Result<String, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "authenticate")]
|
|
||||||
async fn authenticate(
|
|
||||||
&self,
|
|
||||||
pubkey: String,
|
|
||||||
signature: String,
|
|
||||||
nonce: String,
|
|
||||||
) -> Result<bool, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "whoami")]
|
|
||||||
async fn whoami(&self) -> Result<serde_json::Value, ClientError>;
|
|
||||||
|
|
||||||
// Script execution
|
|
||||||
#[method(name = "play")]
|
|
||||||
async fn play(&self, script: String) -> Result<PlayResult, ClientError>;
|
|
||||||
|
|
||||||
// Job management
|
|
||||||
#[method(name = "create_job")]
|
|
||||||
async fn create_job(&self, job: JobParams) -> Result<String, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "start_job")]
|
|
||||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "run_job")]
|
|
||||||
async fn run_job(
|
|
||||||
&self,
|
|
||||||
script: String,
|
|
||||||
script_type: ScriptType,
|
|
||||||
prerequisites: Option<Vec<String>>,
|
|
||||||
) -> Result<String, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_status")]
|
|
||||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_output")]
|
|
||||||
async fn get_job_output(&self, job_id: String) -> Result<String, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_logs")]
|
|
||||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "list_jobs")]
|
|
||||||
async fn list_jobs(&self) -> Result<Vec<Job>, ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "stop_job")]
|
|
||||||
async fn stop_job(&self, job_id: String) -> Result<(), ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "delete_job")]
|
|
||||||
async fn delete_job(&self, job_id: String) -> Result<(), ClientError>;
|
|
||||||
|
|
||||||
#[method(name = "clear_all_jobs")]
|
|
||||||
async fn clear_all_jobs(&self) -> Result<(), ClientError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wrapper client that can use WebSocket transport
|
|
||||||
pub struct HeroOpenRpcClient {
|
|
||||||
client: WsClient,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HeroOpenRpcClient {
|
|
||||||
/// Connect to the OpenRPC server using the specified transport
|
|
||||||
pub async fn connect(transport: ClientTransport) -> Result<Self> {
|
|
||||||
match transport {
|
|
||||||
ClientTransport::WebSocket(url) => {
|
|
||||||
info!("Connecting to WebSocket server at {}", url);
|
|
||||||
let client = WsClientBuilder::default()
|
|
||||||
.build(&url)
|
|
||||||
.await?;
|
|
||||||
Ok(Self { client })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the underlying client for making RPC calls
|
|
||||||
pub fn client(&self) -> &WsClient {
|
|
||||||
&self.client
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Authenticate with the server using a private key
|
|
||||||
pub async fn authenticate_with_key(&self, private_key: &str) -> Result<bool> {
|
|
||||||
let auth_helper = AuthHelper::new(private_key)?;
|
|
||||||
|
|
||||||
// Get nonce
|
|
||||||
let pubkey = auth_helper.public_key_hex();
|
|
||||||
let nonce: String = self.client.fetch_nonce(pubkey.clone()).await?;
|
|
||||||
|
|
||||||
// Sign nonce
|
|
||||||
let signature = auth_helper.sign_message(&nonce)?;
|
|
||||||
|
|
||||||
// Authenticate
|
|
||||||
let result = self.client.authenticate(pubkey, signature, nonce).await?;
|
|
||||||
|
|
||||||
if result {
|
|
||||||
info!("Authentication successful");
|
|
||||||
} else {
|
|
||||||
error!("Authentication failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Implement delegation methods on HeroOpenRpcClient to use the generated trait methods
|
|
||||||
impl HeroOpenRpcClient {
|
|
||||||
/// Delegate to fetch_nonce on the underlying client
|
|
||||||
pub async fn fetch_nonce(&self, pubkey: String) -> Result<String, ClientError> {
|
|
||||||
self.client.fetch_nonce(pubkey).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to authenticate on the underlying client
|
|
||||||
pub async fn authenticate(
|
|
||||||
&self,
|
|
||||||
pubkey: String,
|
|
||||||
signature: String,
|
|
||||||
nonce: String,
|
|
||||||
) -> Result<bool, ClientError> {
|
|
||||||
self.client.authenticate(pubkey, signature, nonce).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to whoami on the underlying client
|
|
||||||
pub async fn whoami(&self) -> Result<serde_json::Value, ClientError> {
|
|
||||||
self.client.whoami().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to play on the underlying client
|
|
||||||
pub async fn play(&self, script: String) -> Result<PlayResult, ClientError> {
|
|
||||||
self.client.play(script).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to create_job on the underlying client
|
|
||||||
pub async fn create_job(&self, job: JobParams) -> Result<String, ClientError> {
|
|
||||||
self.client.create_job(job).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to start_job on the underlying client
|
|
||||||
pub async fn start_job(&self, job_id: String) -> Result<StartJobResult, ClientError> {
|
|
||||||
self.client.start_job(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to run_job on the underlying client
|
|
||||||
pub async fn run_job(
|
|
||||||
&self,
|
|
||||||
script: String,
|
|
||||||
script_type: ScriptType,
|
|
||||||
prerequisites: Option<Vec<String>>,
|
|
||||||
) -> Result<String, ClientError> {
|
|
||||||
self.client.run_job(script, script_type, prerequisites).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to get_job_status on the underlying client
|
|
||||||
pub async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ClientError> {
|
|
||||||
self.client.get_job_status(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to get_job_output on the underlying client
|
|
||||||
pub async fn get_job_output(&self, job_id: String) -> Result<String, ClientError> {
|
|
||||||
self.client.get_job_output(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to get_job_logs on the underlying client
|
|
||||||
pub async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ClientError> {
|
|
||||||
self.client.get_job_logs(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to list_jobs on the underlying client
|
|
||||||
pub async fn list_jobs(&self) -> Result<Vec<Job>, ClientError> {
|
|
||||||
self.client.list_jobs().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to stop_job on the underlying client
|
|
||||||
pub async fn stop_job(&self, job_id: String) -> Result<(), ClientError> {
|
|
||||||
self.client.stop_job(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to delete_job on the underlying client
|
|
||||||
pub async fn delete_job(&self, job_id: String) -> Result<(), ClientError> {
|
|
||||||
self.client.delete_job(job_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Delegate to clear_all_jobs on the underlying client
|
|
||||||
pub async fn clear_all_jobs(&self) -> Result<(), ClientError> {
|
|
||||||
self.client.clear_all_jobs().await
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
use hero_job::ScriptType;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Parameters for creating a job
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct JobParams {
|
|
||||||
pub script: String,
|
|
||||||
pub script_type: ScriptType,
|
|
||||||
pub prerequisites: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of script execution
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct PlayResult {
|
|
||||||
pub output: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of starting a job
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct StartJobResult {
|
|
||||||
pub success: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of getting job logs
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct JobLogsResult {
|
|
||||||
pub logs: String,
|
|
||||||
}
|
|
@ -1,47 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "hero-openrpc-server"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "hero-openrpc-server"
|
|
||||||
path = "cmd/main.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
# Core dependencies
|
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
serde_json = "1.0"
|
|
||||||
anyhow = "1.0"
|
|
||||||
thiserror = "1.0"
|
|
||||||
tracing = "0.1"
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
|
||||||
clap = { version = "4.0", features = ["derive"] }
|
|
||||||
|
|
||||||
# JSON-RPC dependencies
|
|
||||||
jsonrpsee = { version = "0.21", features = [
|
|
||||||
"server",
|
|
||||||
"macros"
|
|
||||||
] }
|
|
||||||
jsonrpsee-types = "0.21"
|
|
||||||
uuid = { version = "1.6", features = ["v4", "serde"] }
|
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
|
||||||
|
|
||||||
# Hero dependencies
|
|
||||||
hero_supervisor = { path = "../../../core/supervisor" }
|
|
||||||
hero_job = { path = "../../../core/job" }
|
|
||||||
|
|
||||||
# Authentication and crypto
|
|
||||||
secp256k1 = { version = "0.28", features = ["rand", "recovery"] }
|
|
||||||
hex = "0.4"
|
|
||||||
sha2 = "0.10"
|
|
||||||
rand = "0.8"
|
|
||||||
|
|
||||||
|
|
||||||
# Async utilities
|
|
||||||
futures = "0.3"
|
|
||||||
|
|
||||||
# Test dependencies
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = "0.4"
|
|
||||||
uuid = { version = "1.6", features = ["v4"] }
|
|
@ -1,95 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use clap::{Parser, Subcommand};
|
|
||||||
use hero_openrpc_server::{OpenRpcServer, OpenRpcServerConfig, Transport};
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use tracing::{info, Level};
|
|
||||||
use tracing_subscriber;
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(name = "hero-openrpc-server")]
|
|
||||||
#[command(about = "Hero OpenRPC Server - WebSocket and Unix socket JSON-RPC server")]
|
|
||||||
struct Cli {
|
|
||||||
#[command(subcommand)]
|
|
||||||
command: Commands,
|
|
||||||
|
|
||||||
/// Path to supervisor configuration file
|
|
||||||
#[arg(long)]
|
|
||||||
supervisor_config: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Database path for supervisor
|
|
||||||
#[arg(long, default_value = "./supervisor.db")]
|
|
||||||
db_path: PathBuf,
|
|
||||||
|
|
||||||
/// Log level
|
|
||||||
#[arg(long, default_value = "info")]
|
|
||||||
log_level: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand)]
|
|
||||||
enum Commands {
|
|
||||||
/// Start WebSocket server
|
|
||||||
Websocket {
|
|
||||||
/// Address to bind to
|
|
||||||
#[arg(long, default_value = "127.0.0.1:9944")]
|
|
||||||
addr: SocketAddr,
|
|
||||||
},
|
|
||||||
/// Start Unix socket server
|
|
||||||
Unix {
|
|
||||||
/// Unix socket path
|
|
||||||
#[arg(long, default_value = "/tmp/hero-openrpc.sock")]
|
|
||||||
socket_path: PathBuf,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let cli = Cli::parse();
|
|
||||||
|
|
||||||
// Initialize tracing
|
|
||||||
let log_level = match cli.log_level.to_lowercase().as_str() {
|
|
||||||
"trace" => Level::TRACE,
|
|
||||||
"debug" => Level::DEBUG,
|
|
||||||
"info" => Level::INFO,
|
|
||||||
"warn" => Level::WARN,
|
|
||||||
"error" => Level::ERROR,
|
|
||||||
_ => Level::INFO,
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing_subscriber::fmt()
|
|
||||||
.with_max_level(log_level)
|
|
||||||
.init();
|
|
||||||
|
|
||||||
let transport = match cli.command {
|
|
||||||
Commands::Websocket { addr } => {
|
|
||||||
info!("Starting WebSocket server on {}", addr);
|
|
||||||
Transport::WebSocket(addr)
|
|
||||||
}
|
|
||||||
Commands::Unix { socket_path } => {
|
|
||||||
info!("Starting Unix socket server on {:?}", socket_path);
|
|
||||||
// Remove existing socket file if it exists
|
|
||||||
if socket_path.exists() {
|
|
||||||
std::fs::remove_file(&socket_path)?;
|
|
||||||
}
|
|
||||||
Transport::Unix(socket_path)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let config = OpenRpcServerConfig {
|
|
||||||
transport: transport.clone(),
|
|
||||||
supervisor_config_path: cli.supervisor_config,
|
|
||||||
db_path: cli.db_path,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create and start the server
|
|
||||||
let server = OpenRpcServer::new(config.clone()).await?;
|
|
||||||
let handle = server.start(config).await?;
|
|
||||||
|
|
||||||
info!("Server started successfully");
|
|
||||||
|
|
||||||
// Wait for the server to finish
|
|
||||||
handle.stopped().await;
|
|
||||||
|
|
||||||
info!("Server stopped");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
@ -1,131 +0,0 @@
|
|||||||
use anyhow::{anyhow, Result};
|
|
||||||
use secp256k1::{Message, PublicKey, Secp256k1, ecdsa::Signature};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
|
||||||
|
|
||||||
/// Nonce response structure
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct NonceResponse {
|
|
||||||
pub nonce: String,
|
|
||||||
pub timestamp: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Authentication manager for handling nonces and signature verification
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AuthManager {
|
|
||||||
nonces: HashMap<String, NonceResponse>,
|
|
||||||
authenticated_keys: HashMap<String, u64>, // pubkey -> timestamp
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthManager {
|
|
||||||
/// Create a new authentication manager
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
nonces: HashMap::new(),
|
|
||||||
authenticated_keys: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate a nonce for a given public key
|
|
||||||
pub fn generate_nonce(&mut self, pubkey: &str) -> String {
|
|
||||||
let timestamp = SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_secs();
|
|
||||||
|
|
||||||
let nonce = format!("{}:{}", pubkey, timestamp);
|
|
||||||
let nonce_hash = format!("{:x}", Sha256::digest(nonce.as_bytes()));
|
|
||||||
|
|
||||||
self.nonces.insert(
|
|
||||||
pubkey.to_string(),
|
|
||||||
NonceResponse {
|
|
||||||
nonce: nonce_hash.clone(),
|
|
||||||
timestamp,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
nonce_hash
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify a signature against a stored nonce
|
|
||||||
pub fn verify_signature(&mut self, pubkey: &str, signature: &str) -> Result<bool> {
|
|
||||||
// Get the nonce for this public key
|
|
||||||
let nonce_response = self
|
|
||||||
.nonces
|
|
||||||
.get(pubkey)
|
|
||||||
.ok_or_else(|| anyhow!("No nonce found for public key"))?;
|
|
||||||
|
|
||||||
// Check if nonce is not too old (5 minutes)
|
|
||||||
let current_time = SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_secs();
|
|
||||||
|
|
||||||
if current_time - nonce_response.timestamp > 300 {
|
|
||||||
return Err(anyhow!("Nonce expired"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the public key
|
|
||||||
let pubkey_bytes = hex::decode(pubkey)
|
|
||||||
.map_err(|_| anyhow!("Invalid public key format"))?;
|
|
||||||
|
|
||||||
let secp = Secp256k1::new();
|
|
||||||
let public_key = PublicKey::from_slice(&pubkey_bytes)
|
|
||||||
.map_err(|_| anyhow!("Invalid public key"))?;
|
|
||||||
|
|
||||||
// Parse the signature
|
|
||||||
let signature_bytes = hex::decode(signature)
|
|
||||||
.map_err(|_| anyhow!("Invalid signature format"))?;
|
|
||||||
|
|
||||||
let signature = Signature::from_compact(&signature_bytes)
|
|
||||||
.map_err(|_| anyhow!("Invalid signature"))?;
|
|
||||||
|
|
||||||
// Create message hash from nonce
|
|
||||||
let message_hash = Sha256::digest(nonce_response.nonce.as_bytes());
|
|
||||||
let message = Message::from_slice(&message_hash)
|
|
||||||
.map_err(|_| anyhow!("Failed to create message"))?;
|
|
||||||
|
|
||||||
// Verify the signature
|
|
||||||
match secp.verify_ecdsa(&message, &signature, &public_key) {
|
|
||||||
Ok(_) => {
|
|
||||||
// Mark this key as authenticated
|
|
||||||
self.authenticated_keys.insert(pubkey.to_string(), current_time);
|
|
||||||
// Remove the used nonce
|
|
||||||
self.nonces.remove(pubkey);
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
Err(_) => Ok(false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a public key is currently authenticated
|
|
||||||
pub fn is_authenticated(&self, pubkey: &str) -> bool {
|
|
||||||
if let Some(×tamp) = self.authenticated_keys.get(pubkey) {
|
|
||||||
let current_time = SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_secs();
|
|
||||||
|
|
||||||
// Authentication is valid for 1 hour
|
|
||||||
current_time - timestamp < 3600
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove expired authentications
|
|
||||||
pub fn cleanup_expired(&mut self) {
|
|
||||||
let current_time = SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_secs();
|
|
||||||
|
|
||||||
// Remove expired nonces (older than 5 minutes)
|
|
||||||
self.nonces.retain(|_, nonce| current_time - nonce.timestamp <= 300);
|
|
||||||
|
|
||||||
// Remove expired authentications (older than 1 hour)
|
|
||||||
self.authenticated_keys.retain(|_, &mut timestamp| current_time - timestamp <= 3600);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,471 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use hero_job::{Job, JobBuilder, JobStatus, ScriptType};
|
|
||||||
use hero_supervisor::{Supervisor, SupervisorBuilder};
|
|
||||||
use jsonrpsee::core::async_trait;
|
|
||||||
use jsonrpsee::proc_macros::rpc;
|
|
||||||
use jsonrpsee::server::{ServerBuilder, ServerHandle};
|
|
||||||
use jsonrpsee::RpcModule;
|
|
||||||
use jsonrpsee_types::error::ErrorCode;
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use tracing::error;
|
|
||||||
|
|
||||||
mod auth;
|
|
||||||
pub mod types;
|
|
||||||
|
|
||||||
pub use auth::*;
|
|
||||||
pub use types::*;
|
|
||||||
|
|
||||||
/// Transport type for the OpenRPC server
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Transport {
|
|
||||||
WebSocket(SocketAddr),
|
|
||||||
Unix(PathBuf),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// OpenRPC server configuration
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct OpenRpcServerConfig {
|
|
||||||
pub transport: Transport,
|
|
||||||
pub supervisor_config_path: Option<PathBuf>,
|
|
||||||
pub db_path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Main OpenRPC server state
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct OpenRpcServer {
|
|
||||||
supervisor: Arc<RwLock<Supervisor>>,
|
|
||||||
auth_manager: Arc<RwLock<AuthManager>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// OpenRPC trait defining all available methods
|
|
||||||
#[rpc(server)]
|
|
||||||
pub trait OpenRpcApi {
|
|
||||||
// Authentication methods
|
|
||||||
#[method(name = "fetch_nonce")]
|
|
||||||
async fn fetch_nonce(&self, public_key: String) -> Result<String, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "authenticate")]
|
|
||||||
async fn authenticate(&self, public_key: String, signature: String, nonce: String) -> Result<bool, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "whoami")]
|
|
||||||
async fn whoami(&self) -> Result<String, ErrorCode>;
|
|
||||||
|
|
||||||
// Script execution
|
|
||||||
#[method(name = "play")]
|
|
||||||
async fn play(&self, script: String) -> Result<PlayResult, ErrorCode>;
|
|
||||||
|
|
||||||
// Job management
|
|
||||||
#[method(name = "create_job")]
|
|
||||||
async fn create_job(&self, job_params: JobParams) -> Result<String, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "start_job")]
|
|
||||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "run_job")]
|
|
||||||
async fn run_job(
|
|
||||||
&self,
|
|
||||||
script: String,
|
|
||||||
script_type: ScriptType,
|
|
||||||
prerequisites: Option<Vec<String>>,
|
|
||||||
) -> Result<String, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_status")]
|
|
||||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_output")]
|
|
||||||
async fn get_job_output(&self, job_id: String) -> Result<String, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "get_job_logs")]
|
|
||||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "list_jobs")]
|
|
||||||
async fn list_jobs(&self) -> Result<Vec<Job>, ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "stop_job")]
|
|
||||||
async fn stop_job(&self, job_id: String) -> Result<(), ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "delete_job")]
|
|
||||||
async fn delete_job(&self, job_id: String) -> Result<(), ErrorCode>;
|
|
||||||
|
|
||||||
#[method(name = "clear_all_jobs")]
|
|
||||||
async fn clear_all_jobs(&self) -> Result<(), ErrorCode>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OpenRpcServer {
|
|
||||||
/// Create a new OpenRPC server instance
|
|
||||||
pub async fn new(config: OpenRpcServerConfig) -> Result<Self> {
|
|
||||||
let supervisor = if let Some(config_path) = config.supervisor_config_path {
|
|
||||||
// Load supervisor from config file
|
|
||||||
SupervisorBuilder::from_toml(&config_path)?
|
|
||||||
.build().await?
|
|
||||||
} else {
|
|
||||||
// Create default supervisor with Redis URL
|
|
||||||
SupervisorBuilder::new()
|
|
||||||
.redis_url("redis://localhost:6379")
|
|
||||||
.build().await?
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
supervisor: Arc::new(RwLock::new(supervisor)),
|
|
||||||
auth_manager: Arc::new(RwLock::new(AuthManager::new())),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Start the OpenRPC server
|
|
||||||
pub async fn start(self, config: OpenRpcServerConfig) -> Result<ServerHandle> {
|
|
||||||
let mut module = RpcModule::new(());
|
|
||||||
|
|
||||||
// Register all the RPC methods
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("fetch_nonce", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let public_key: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.fetch_nonce(public_key).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("authenticate", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let (public_key, signature, nonce): (String, String, String) = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.authenticate(public_key, signature, nonce).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("whoami", move |_params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
server.whoami().await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("play", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let script: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.play(script).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("create_job", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job: JobParams = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.create_job(job).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("start_job", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.start_job(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("run_job", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let (script, script_type, prerequisites): (String, ScriptType, Option<Vec<String>>) = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.run_job(script, script_type, prerequisites).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("get_job_status", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.get_job_status(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("get_job_output", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.get_job_output(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("get_job_logs", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.get_job_logs(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("list_jobs", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let _: () = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.list_jobs().await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("stop_job", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.stop_job(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("delete_job", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let job_id: String = params.one().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.delete_job(job_id).await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let server_clone = self.clone();
|
|
||||||
module.register_async_method("clear_all_jobs", move |params, _| {
|
|
||||||
let server = server_clone.clone();
|
|
||||||
async move {
|
|
||||||
let _: () = params.parse().map_err(|_| ErrorCode::InvalidParams)?;
|
|
||||||
server.clear_all_jobs().await
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
match config.transport {
|
|
||||||
Transport::WebSocket(addr) => {
|
|
||||||
let server = ServerBuilder::default()
|
|
||||||
.build(addr)
|
|
||||||
.await?;
|
|
||||||
let handle = server.start(module);
|
|
||||||
Ok(handle)
|
|
||||||
}
|
|
||||||
Transport::Unix(_path) => {
|
|
||||||
// Unix socket transport not yet implemented in jsonrpsee 0.21
|
|
||||||
return Err(anyhow::anyhow!("Unix socket transport not yet supported").into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl OpenRpcApiServer for OpenRpcServer {
|
|
||||||
async fn fetch_nonce(&self, public_key: String) -> Result<String, ErrorCode> {
|
|
||||||
let mut auth_manager = self.auth_manager.write().await;
|
|
||||||
let nonce = auth_manager.generate_nonce(&public_key);
|
|
||||||
Ok(nonce)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn authenticate(
|
|
||||||
&self,
|
|
||||||
public_key: String,
|
|
||||||
signature: String,
|
|
||||||
_nonce: String,
|
|
||||||
) -> Result<bool, ErrorCode> {
|
|
||||||
let mut auth_manager = self.auth_manager.write().await;
|
|
||||||
match auth_manager.verify_signature(&public_key, &signature) {
|
|
||||||
Ok(is_valid) => Ok(is_valid),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Authentication error: {}", e);
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn whoami(&self) -> Result<String, ErrorCode> {
|
|
||||||
let _auth_manager = self.auth_manager.read().await;
|
|
||||||
// For now, return basic info - in a real implementation,
|
|
||||||
// you'd track authenticated sessions
|
|
||||||
Ok(serde_json::json!({
|
|
||||||
"authenticated": true,
|
|
||||||
"user_id": "anonymous"
|
|
||||||
}).to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn play(&self, script: String) -> Result<PlayResult, ErrorCode> {
|
|
||||||
let _supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
// For now, return a simple result since we need to implement execute_script method
|
|
||||||
Ok(PlayResult {
|
|
||||||
output: format!("Script executed: {}", script)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_job(&self, job_params: JobParams) -> Result<String, ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
// Use JobBuilder to create a Job instance
|
|
||||||
let mut builder = hero_job::JobBuilder::new()
|
|
||||||
.caller_id(&job_params.caller_id)
|
|
||||||
.context_id(&job_params.context_id)
|
|
||||||
.script(&job_params.script)
|
|
||||||
.script_type(job_params.script_type);
|
|
||||||
|
|
||||||
// Set timeout if provided
|
|
||||||
if let Some(timeout_secs) = job_params.timeout {
|
|
||||||
builder = builder.timeout(std::time::Duration::from_secs(timeout_secs));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set prerequisites if provided
|
|
||||||
if let Some(prerequisites) = job_params.prerequisites {
|
|
||||||
builder = builder.prerequisites(prerequisites);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the job
|
|
||||||
let job = match builder.build() {
|
|
||||||
Ok(job) => job,
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to build job: {}", e);
|
|
||||||
return Err(ErrorCode::InvalidParams);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = job.id.clone();
|
|
||||||
|
|
||||||
// Create the job using the supervisor
|
|
||||||
match supervisor.create_job(&job).await {
|
|
||||||
Ok(_) => Ok(job_id),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to create job: {}", e);
|
|
||||||
Err(ErrorCode::InternalError)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn start_job(&self, job_id: String) -> Result<StartJobResult, ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.start_job(&job_id).await {
|
|
||||||
Ok(_) => Ok(StartJobResult { success: true }),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to start job {}: {}", job_id, e);
|
|
||||||
Ok(StartJobResult { success: false })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run_job(
|
|
||||||
&self,
|
|
||||||
script: String,
|
|
||||||
script_type: ScriptType,
|
|
||||||
_prerequisites: Option<Vec<String>>,
|
|
||||||
) -> Result<String, ErrorCode> {
|
|
||||||
// For now, return a simple result
|
|
||||||
Ok(format!("Job executed with script: {} (type: {:?})", script, script_type))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_status(&self, job_id: String) -> Result<JobStatus, ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.get_job_status(&job_id).await {
|
|
||||||
Ok(status) => Ok(status),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to get job status for {}: {}", job_id, e);
|
|
||||||
Err(ErrorCode::InvalidParams)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_output(&self, job_id: String) -> Result<String, ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.get_job_output(&job_id).await {
|
|
||||||
Ok(output) => Ok(output.unwrap_or_else(|| "No output available".to_string())),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to get job output for {}: {}", job_id, e);
|
|
||||||
Err(ErrorCode::InvalidParams)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_job_logs(&self, job_id: String) -> Result<JobLogsResult, ErrorCode> {
|
|
||||||
// For now, return mock logs
|
|
||||||
Ok(JobLogsResult {
|
|
||||||
logs: format!("Logs for job {}", job_id),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_jobs(&self) -> Result<Vec<Job>, ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.list_jobs().await {
|
|
||||||
Ok(job_ids) => {
|
|
||||||
// For now, create minimal Job objects with just the IDs
|
|
||||||
// In a real implementation, we'd need a supervisor.get_job() method
|
|
||||||
let jobs: Vec<Job> = job_ids.into_iter().map(|job_id| {
|
|
||||||
// Create a minimal job object - this is a temporary solution
|
|
||||||
// until supervisor.get_job() is implemented
|
|
||||||
Job {
|
|
||||||
id: job_id,
|
|
||||||
caller_id: "unknown".to_string(),
|
|
||||||
context_id: "unknown".to_string(),
|
|
||||||
script: "unknown".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
timeout: std::time::Duration::from_secs(30),
|
|
||||||
retries: 0,
|
|
||||||
concurrent: false,
|
|
||||||
log_path: None,
|
|
||||||
env_vars: std::collections::HashMap::new(),
|
|
||||||
prerequisites: Vec::new(),
|
|
||||||
dependents: Vec::new(),
|
|
||||||
created_at: chrono::Utc::now(),
|
|
||||||
updated_at: chrono::Utc::now(),
|
|
||||||
}
|
|
||||||
}).collect();
|
|
||||||
Ok(jobs)
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to list jobs: {}", e);
|
|
||||||
Err(ErrorCode::InternalError)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn stop_job(&self, job_id: String) -> Result<(), ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.stop_job(&job_id).await {
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to stop job {}: {}", job_id, e);
|
|
||||||
Err(ErrorCode::InvalidParams)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_job(&self, job_id: String) -> Result<(), ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.delete_job(&job_id).await {
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to delete job {}: {}", job_id, e);
|
|
||||||
Err(ErrorCode::InvalidParams)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn clear_all_jobs(&self) -> Result<(), ErrorCode> {
|
|
||||||
let supervisor = self.supervisor.read().await;
|
|
||||||
|
|
||||||
match supervisor.clear_all_jobs().await {
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
Err(e) => {
|
|
||||||
error!("Failed to clear all jobs: {}", e);
|
|
||||||
Err(ErrorCode::InternalError)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
use hero_job::ScriptType;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Parameters for creating a job
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
pub struct JobParams {
|
|
||||||
pub script: String,
|
|
||||||
pub script_type: ScriptType,
|
|
||||||
pub caller_id: String,
|
|
||||||
pub context_id: String,
|
|
||||||
pub timeout: Option<u64>, // timeout in seconds
|
|
||||||
pub prerequisites: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of script execution
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
pub struct PlayResult {
|
|
||||||
pub output: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of starting a job
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
pub struct StartJobResult {
|
|
||||||
pub success: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Result of getting job logs
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
||||||
pub struct JobLogsResult {
|
|
||||||
pub logs: String,
|
|
||||||
}
|
|
@ -1,409 +0,0 @@
|
|||||||
use hero_openrpc_server::{OpenRpcServer, OpenRpcServerConfig, OpenRpcApiServer, Transport, types::*};
|
|
||||||
use hero_supervisor::{Supervisor, SupervisorBuilder};
|
|
||||||
use hero_job::{JobBuilder, JobStatus, ScriptType};
|
|
||||||
use jsonrpsee_types::error::ErrorCode;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
/// Helper function to create a test supervisor
|
|
||||||
async fn create_test_supervisor() -> Arc<RwLock<Supervisor>> {
|
|
||||||
let supervisor = SupervisorBuilder::new()
|
|
||||||
.redis_url("redis://localhost:6379")
|
|
||||||
.build()
|
|
||||||
.await
|
|
||||||
.expect("Failed to create test supervisor");
|
|
||||||
|
|
||||||
Arc::new(RwLock::new(supervisor))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper function to create a test OpenRPC server
|
|
||||||
async fn create_test_server() -> OpenRpcServer {
|
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
let config = OpenRpcServerConfig {
|
|
||||||
transport: Transport::WebSocket("127.0.0.1:0".parse::<SocketAddr>().unwrap()),
|
|
||||||
supervisor_config_path: None,
|
|
||||||
db_path: PathBuf::from("/tmp/test_openrpc.db"),
|
|
||||||
};
|
|
||||||
OpenRpcServer::new(config).await.expect("Failed to create OpenRPC server")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_fetch_nonce() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
let public_key = "test_public_key".to_string();
|
|
||||||
|
|
||||||
let result = server.fetch_nonce(public_key).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let nonce = result.unwrap();
|
|
||||||
assert!(!nonce.is_empty());
|
|
||||||
assert_eq!(nonce.len(), 64); // Should be a 32-byte hex string
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_create_job_success() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Hello, World!');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = server.create_job(job_params).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let job_id = result.unwrap();
|
|
||||||
assert!(!job_id.is_empty());
|
|
||||||
// Job ID should be a valid UUID format
|
|
||||||
assert!(uuid::Uuid::parse_str(&job_id).is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_create_job_with_prerequisites() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Job with prerequisites');".to_string(),
|
|
||||||
script_type: ScriptType::SAL,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(120),
|
|
||||||
prerequisites: Some(vec!["prereq_job_1".to_string(), "prereq_job_2".to_string()]),
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = server.create_job(job_params).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let job_id = result.unwrap();
|
|
||||||
assert!(!job_id.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_create_job_invalid_params() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// Test with empty caller_id (should fail JobBuilder validation)
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "".to_string(), // Empty caller_id should fail
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = server.create_job(job_params).await;
|
|
||||||
assert!(result.is_err());
|
|
||||||
assert_eq!(result.unwrap_err(), ErrorCode::InvalidParams);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_start_job() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Test job');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
|
|
||||||
// Then start the job
|
|
||||||
let result = server.start_job(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let start_result = result.unwrap();
|
|
||||||
assert!(start_result.success);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_job_status() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Status test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
|
|
||||||
// Get job status
|
|
||||||
let result = server.get_job_status(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let status = result.unwrap();
|
|
||||||
// Status should be one of the valid JobStatus variants
|
|
||||||
match status {
|
|
||||||
JobStatus::Dispatched | JobStatus::WaitingForPrerequisites |
|
|
||||||
JobStatus::Started | JobStatus::Error | JobStatus::Finished => {
|
|
||||||
// Valid status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_job_output() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Output test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
|
|
||||||
// Get job output
|
|
||||||
let result = server.get_job_output(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let output = result.unwrap();
|
|
||||||
assert!(!output.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_list_jobs() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// Create a few jobs first
|
|
||||||
for i in 0..3 {
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: format!("print('Job {}');", i),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _ = server.create_job(job_params).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// List all jobs
|
|
||||||
let result = server.list_jobs().await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let jobs = result.unwrap();
|
|
||||||
assert!(jobs.len() >= 3); // Should have at least the 3 jobs we created
|
|
||||||
|
|
||||||
// Verify job structure
|
|
||||||
for job in jobs {
|
|
||||||
assert!(!job.id.is_empty());
|
|
||||||
assert!(uuid::Uuid::parse_str(&job.id).is_ok());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_stop_job() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create and start a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Stop test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
let _ = server.start_job(job_id.clone()).await.unwrap();
|
|
||||||
|
|
||||||
// Stop the job
|
|
||||||
let result = server.stop_job(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_delete_job() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Delete test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
|
|
||||||
// Delete the job
|
|
||||||
let result = server.delete_job(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_clear_all_jobs() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// Create a few jobs first
|
|
||||||
for i in 0..3 {
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: format!("print('Clear test {}');", i),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _ = server.create_job(job_params).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear all jobs
|
|
||||||
let result = server.clear_all_jobs().await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
// Verify jobs are cleared
|
|
||||||
let jobs = server.list_jobs().await.unwrap();
|
|
||||||
assert_eq!(jobs.len(), 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_run_job() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
let script = "print('Run job test');".to_string();
|
|
||||||
let script_type = ScriptType::OSIS;
|
|
||||||
let prerequisites = None;
|
|
||||||
|
|
||||||
let result = server.run_job(script, script_type, prerequisites).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let output = result.unwrap();
|
|
||||||
assert!(!output.is_empty());
|
|
||||||
assert!(output.contains("Run job test"));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_play_script() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
let script = "print('Play script test');".to_string();
|
|
||||||
|
|
||||||
let result = server.play(script.clone()).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let play_result = result.unwrap();
|
|
||||||
assert!(!play_result.output.is_empty());
|
|
||||||
assert!(play_result.output.contains(&script));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_get_job_logs() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// First create a job
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('Logs test');".to_string(),
|
|
||||||
script_type: ScriptType::OSIS,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(60),
|
|
||||||
prerequisites: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let job_id = server.create_job(job_params).await.unwrap();
|
|
||||||
|
|
||||||
// Get job logs
|
|
||||||
let result = server.get_job_logs(job_id).await;
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let logs_result = result.unwrap();
|
|
||||||
assert!(!logs_result.logs.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_job_builder_integration() {
|
|
||||||
// Test that JobBuilder is working correctly with all the fields
|
|
||||||
let job_params = JobParams {
|
|
||||||
script: "print('JobBuilder test');".to_string(),
|
|
||||||
script_type: ScriptType::V,
|
|
||||||
caller_id: "test_caller".to_string(),
|
|
||||||
context_id: "test_context".to_string(),
|
|
||||||
timeout: Some(300),
|
|
||||||
prerequisites: Some(vec!["prereq1".to_string(), "prereq2".to_string()]),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Build job using JobBuilder (similar to what the server does)
|
|
||||||
let mut builder = JobBuilder::new()
|
|
||||||
.caller_id(&job_params.caller_id)
|
|
||||||
.context_id(&job_params.context_id)
|
|
||||||
.script(&job_params.script)
|
|
||||||
.script_type(job_params.script_type);
|
|
||||||
|
|
||||||
if let Some(timeout_secs) = job_params.timeout {
|
|
||||||
builder = builder.timeout(Duration::from_secs(timeout_secs));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(prerequisites) = job_params.prerequisites {
|
|
||||||
builder = builder.prerequisites(prerequisites);
|
|
||||||
}
|
|
||||||
|
|
||||||
let job = builder.build();
|
|
||||||
assert!(job.is_ok());
|
|
||||||
|
|
||||||
let job = job.unwrap();
|
|
||||||
assert_eq!(job.caller_id, "test_caller");
|
|
||||||
assert_eq!(job.context_id, "test_context");
|
|
||||||
assert_eq!(job.script, "print('JobBuilder test');");
|
|
||||||
assert_eq!(job.script_type, ScriptType::V);
|
|
||||||
assert_eq!(job.timeout, Duration::from_secs(300));
|
|
||||||
assert_eq!(job.prerequisites, vec!["prereq1".to_string(), "prereq2".to_string()]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_error_handling() {
|
|
||||||
let server = create_test_server().await;
|
|
||||||
|
|
||||||
// Test getting status for non-existent job
|
|
||||||
let result = server.get_job_status("non_existent_job".to_string()).await;
|
|
||||||
// Should return an error or handle gracefully
|
|
||||||
match result {
|
|
||||||
Ok(_) => {
|
|
||||||
// Some implementations might return a default status
|
|
||||||
},
|
|
||||||
Err(error_code) => {
|
|
||||||
assert_eq!(error_code, ErrorCode::InvalidParams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test getting output for non-existent job
|
|
||||||
let result = server.get_job_output("non_existent_job".to_string()).await;
|
|
||||||
match result {
|
|
||||||
Ok(output) => {
|
|
||||||
// Should return "No output available" or similar
|
|
||||||
assert!(output.contains("No output available") || output.is_empty());
|
|
||||||
},
|
|
||||||
Err(error_code) => {
|
|
||||||
assert_eq!(error_code, ErrorCode::InvalidParams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -150,6 +150,7 @@ async fn main() -> std::io::Result<()> {
|
|||||||
}
|
}
|
||||||
println!(" Authentication: {}", if config.auth { "ENABLED" } else { "DISABLED" });
|
println!(" Authentication: {}", if config.auth { "ENABLED" } else { "DISABLED" });
|
||||||
println!(" TLS/WSS: {}", if config.tls { "ENABLED" } else { "DISABLED" });
|
println!(" TLS/WSS: {}", if config.tls { "ENABLED" } else { "DISABLED" });
|
||||||
|
println!(" Webhooks: {}", if config.webhooks { "ENABLED" } else { "DISABLED" });
|
||||||
println!(" Circles configured: {}", config.circles.len());
|
println!(" Circles configured: {}", config.circles.len());
|
||||||
|
|
||||||
if config.tls {
|
if config.tls {
|
||||||
@ -159,6 +160,12 @@ async fn main() -> std::io::Result<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.webhooks {
|
||||||
|
println!(" Webhook secrets loaded from environment variables:");
|
||||||
|
println!(" - STRIPE_WEBHOOK_SECRET");
|
||||||
|
println!(" - IDENFY_WEBHOOK_SECRET");
|
||||||
|
}
|
||||||
|
|
||||||
if config.auth && !config.circles.is_empty() {
|
if config.auth && !config.circles.is_empty() {
|
||||||
println!(" Configured circles:");
|
println!(" Configured circles:");
|
||||||
for (circle_name, members) in &config.circles {
|
for (circle_name, members) in &config.circles {
|
||||||
|
Loading…
Reference in New Issue
Block a user