fix ourtime, logging, some missing installers, ...

This commit is contained in:
2024-12-31 09:32:56 +01:00
parent a59a66dd71
commit f8ab2f855a
35 changed files with 1925 additions and 217 deletions

View File

@@ -21,6 +21,7 @@ mut:
params_ ?&paramsparser.Params
dbcollection_ ?&dbfs.DBCollection @[skip; str: skip]
redis_ ?&redisclient.Redis @[skip; str: skip]
path_ ?pathlib.Path
pub mut:
// snippets map[string]string
config ContextConfig
@@ -185,3 +186,13 @@ pub fn (mut self Context) secret_set(secret_ string) ! {
self.config.secret = secret2
self.save()!
}
pub fn (mut self Context) path() !pathlib.Path {
return self.path_ or {
path := '${os.home_dir()}/hero/context/${self.config.name}'
mut path := pathlib.get_dir(path: path,create: false)!
path
}
}

View File

@@ -1,39 +1,105 @@
## context & sessions
## Context & Sessions
Everything we do in hero lives in a context, each context has a unique name.
Everything we do in hero lives in a context, each context has a unique name and ID. A context can have multiple sessions, where each session represents a specific execution environment.
Redis is used to manage the contexts and the sessions.
### Context
- redis db 0
- `context:current` curent id of the context, is also the DB if redis if redis is used
- redis db X, x is nr of context
- `context:name` name of this context
- `context:secret` secret as is used in context (is md5 of original config secret)
- `context:privkey` secp256k1 privkey as is used in context (encrypted by secret)
- `context:params` params for a context, can have metadata
- `context:lastid` last id for our db
- `session:$id` the location of session
- `session:$id:params` params for the session, can have metadata
Each context has:
- A unique ID and name
- Secret management (encrypted)
- Database collection
- Configuration storage
- Code root path
- Parameters
Session id is $contextid:$sessionid (e.g. 10:111)
### Sessions
**The SessionNewArgs:**
Sessions exist within a context and provide:
- Unique name within the context
- Interactive mode control
- Environment variables
- Start/End time tracking
- Parameter storage
- Database access
- Logging
- context_name string = 'default'
- session_name string //default will be an incremental nr if not filled in
- interactive bool = true //can ask questions, default on true
- coderoot string //this will define where all code is checked out
### Storage Structure
Redis is used to manage contexts and sessions:
- Redis DB X (where X is context ID):
- `context:config` - JSON encoded context configuration
- `sessions:config:$name` - JSON encoded session configuration for each session
### Database Structure
Each context has a database collection located at `~/hero/db/$contextid/`. Within this:
- Each session gets its own database named `session_$name`
- A shared `config` database exists for context-wide configuration
### Hero Configuration
Contexts support hero-specific configuration files:
- Stored at `~/hero/context/$contextname/$category__$name.yaml`
- Supports categories for organization
- Automatically handles shell expansions
### Example Usage
```v
import freeflowuniverse.herolib.core.base
mut session:=context_new(
// Create a new context
mut context := context_new(
id: 1
name: 'mycontext'
coderoot: '/tmp/code'
interactive: true
)!
mut session:=session_new(context:'default',session:'mysession1')!
mut session:=session_new()! //returns default context & incremental new session
// Create a new session in the context
mut session := session_new(
context: context
name: 'mysession1'
interactive: true
)!
// Work with environment variables
session.env_set('KEY', 'value')!
value := session.env_get('KEY')!
// Work with hero config
context.hero_config_set('category', 'name', 'content')!
content := context.hero_config_get('category', 'name')!
// Access session database
mut db := session.db_get()!
// Access context-wide config database
mut config_db := session.db_config_get()!
```
### Security
- Context secrets are stored as MD5 hashes
- Support for encryption of sensitive data
- Interactive secret configuration available
### File Structure
Each context and session has its own directory structure:
- Context root: `~/hero/context/$contextname/`
- Session directory: `~/hero/context/$contextname/$sessionname/`
This structure helps organize configuration files, logs, and other session-specific data.
### Logging
Each session has its own logger:
```v
mut logger := session.logger()!
logger.log(log:'My log message')
```
For detailed logging capabilities and options, see the logger documentation in `lib/core/logger/readme.md`.

View File

@@ -4,6 +4,7 @@ import freeflowuniverse.herolib.data.ourtime
// import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.data.dbfs
import freeflowuniverse.herolib.core.logger
import json
// import freeflowuniverse.herolib.core.pathlib
// import freeflowuniverse.herolib.develop.gittools
@@ -11,6 +12,9 @@ import json
@[heap]
pub struct Session {
mut:
path_ ?pathlib.Path
logger_ ?logger.Logger
pub mut:
name string // unique id for session (session id), can be more than one per context
interactive bool = true
@@ -20,6 +24,7 @@ pub mut:
context &Context @[skip; str: skip]
config SessionConfig
env map[string]string
}
///////// LOAD & SAVE
@@ -88,6 +93,14 @@ pub fn (self Session) guid() string {
return '${self.context.guid()}:${self.name}'
}
pub fn (mut self Session) path() !pathlib.Path {
return self.path_ or {
path := '${self.context.path().path}/${self.name}'
mut path := pathlib.get_dir(path: path,create: true)!
path
}
}
fn (self Session) str2() string {
mut out := 'session:${self.guid()}'
out += ' start:\'${self.start}\''

View File

@@ -1,61 +1,10 @@
module base
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.logger
@[heap]
pub struct Logger {
pub mut:
session string
pub fn (session Session) logger() !logger.Logger {
return session.logger_ or {
mut l2 := logger.new("${session.path()!.path}/logs")!
l2
}
pub struct LogItem {
pub mut:
time ourtime.OurTime
cat string
log string
logtype LogType
session string
}
pub enum LogType {
stdout
error
}
pub fn (session Session) logger_new() !Logger {
// mut l:=log.Log{}
// l.set_full_logpath('./info.log')
// l.log_to_console_too()
return Logger{}
}
@[params]
pub struct LogArgs {
pub mut:
cat string
log string @[required]
logtype LogType
}
// cat & log are the arguments .
// category can be any well chosen category e.g. vm
pub fn (mut session Session) log(args_ LogArgs) !LogItem {
mut args := args_
args.cat = texttools.name_fix(args.cat)
mut l := LogItem{
cat: args.cat
log: args.log
time: ourtime.now()
// session: session.guid()
}
// TODO: get string output and put to redis
return l
}
pub fn (li LogItem) str() string {
return '${li.session}'
}

View File

@@ -1,4 +0,0 @@
#redis-cli SCRIPT LOAD "$(cat logger.lua)"
export LOGGER_ADD=$(redis-cli SCRIPT LOAD "$(cat logger_add.lua)")
export LOGGER_DEL=$(redis-cli SCRIPT LOAD "$(cat logger_del.lua)")
export STATS_ADD=$(redis-cli SCRIPT LOAD "$(cat stats_add.lua)")

View File

@@ -1,51 +0,0 @@
local function normalize(str)
return string.gsub(string.lower(str), "%s+", "_")
end
local src = normalize(ARGV[1])
local category = normalize(ARGV[2])
local message = ARGV[3]
local logHashKey = "logs:" .. src
local lastIdKey = "logs:" .. src .. ":lastid"
-- redis.log(redis.LOG_NOTICE, "...")
-- Increment the log ID using Redis INCR command
local logId = redis.call('INCR', lastIdKey)
-- Get the current epoch time
local epoch = redis.call('TIME')[1]
-- Prepare the log entry with a unique ID, epoch time, and message
local logEntry = category .. ":" .. epoch .. ":" .. message
-- Add the new log entry to the hash set
redis.call('HSET', logHashKey, logId, logEntry)
-- Optionally manage the size of the hash to keep the latest 2000 entries only
local hlen = redis.call('HLEN', logHashKey)
if hlen > 5000 then
-- Find the smallest logId
local smallestId = logId
local cursor = "0"
repeat
local scanResult = redis.call('HSCAN', logHashKey, cursor, "COUNT", 5)
cursor = scanResult[1]
local entries = scanResult[2]
for i = 1, #entries, 2 do
local currentId = tonumber(entries[i])
if currentId < smallestId then
smallestId = currentId
end
end
until cursor == "0"
-- redis.log(redis.LOG_NOTICE, "smallest id: " .. smallestId)
-- Remove the oldest entries
for i = smallestId, smallestId + 500 do
redis.call('HDEL', logHashKey, i)
end
end
return logEntry

View File

@@ -1,22 +0,0 @@
-- Function to normalize strings (convert to lower case and replace spaces with underscores)
local function normalize(str)
return string.gsub(string.lower(str), "%s+", "_")
end
local src = ARGV[1] and normalize(ARGV[1]) or nil
if src then
-- Delete logs for specified source and category
local logHashKey = "logs:" .. src
local lastIdKey = logHashKey .. ":lastid"
redis.call('DEL', logHashKey)
redis.call('DEL', lastIdKey)
else
-- Delete all logs for all sources and categories
local keys = redis.call('KEYS', "logs:*")
for i, key in ipairs(keys) do
redis.call('DEL', key)
end
end
return "Logs deleted"

View File

@@ -1,18 +0,0 @@
#!/bin/bash
set -x
cd "$(dirname "$0")"
source load.sh
# for i in $(seq 1 1000)
# do
# redis-cli EVALSHA $LOGHASH 0 "AAA" "CAT1" "Example log message"
# redis-cli EVALSHA $LOGHASH 0 "AAA" "CAT2" "Example log message"
# done
redis-cli EVALSHA $LOGGER_DEL 0
for i in $(seq 1 200)
do
redis-cli EVALSHA $LOGGER_ADD 0 "BBB" "CAT2" "Example log message"
done

12
lib/core/logger/factory.v Normal file
View File

@@ -0,0 +1,12 @@
module logger
import freeflowuniverse.herolib.core.pathlib
pub fn new(path string)! Logger {
mut p := pathlib.get_dir(path:path,create:true)!
return Logger{
path: p
lastlog_time: 0
}
}

66
lib/core/logger/log.v Normal file
View File

@@ -0,0 +1,66 @@
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct LogItemArgs {
pub mut:
timestamp ?ourtime.OurTime
cat string
log string
logtype LogType
}
pub fn (mut l Logger) log(args_ LogItemArgs)! {
mut args := args_
t := args.timestamp or {
t2:=ourtime.now()
t2
}
// Format category (max 10 chars, ascii only)
args.cat = texttools.name_fix(args.cat)
if args.cat.len > 10 {
return error('category cannot be longer than 10 chars')
}
args.cat = texttools.expand(args.cat,10," ")
args.log = texttools.dedent(args.log).trim_space()
mut logfile_path:="${l.path.path}/${t.dayhour()}.log"
// Create log file if it doesn't exist
if !os.exists(logfile_path) {
os.write_file(logfile_path, '')!
l.lastlog_time = 0 //make sure we put time again
}
mut f:= os.open_append(logfile_path)!
mut content := ''
// Add timestamp if we're in a new second
if t.unix() > l.lastlog_time {
content += '\n${t.time().format_ss()}\n'
l.lastlog_time = t.unix()
}
// Format log lines
error_prefix := if args.logtype == .error { 'E' } else { ' ' }
lines := args.log.split('\n')
for i, line in lines {
if i == 0 {
content += '${error_prefix} ${args.cat} - ${line}\n'
} else {
content += '${error_prefix} ${line}\n'
}
}
f.writeln(content.trim_space_right())!
f.close()
}

101
lib/core/logger/log_test.v Normal file
View File

@@ -0,0 +1,101 @@
module logger
import os
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.pathlib
fn testsuite_begin() {
if os.exists('/tmp/testlogs') {
os.rmdir_all('/tmp/testlogs')!
}
}
fn test_logger() {
mut logger := new('/tmp/testlogs')!
// Test stdout logging
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp:ourtime.new('2022-12-05 20:14:35')!
})!
// Test error logging
logger.log(LogItemArgs{
cat: 'error-test'
log: 'This is an error\nWith details'
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:35')!
})!
logger.log(LogItemArgs{
cat: 'test-app'
log: 'This is a test message\nWith a second line\nAnd a third line'
logtype: .stdout
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
This is an error
With details
'
logtype: .error
timestamp: ourtime.new('2022-12-05 20:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
aaa
bbb
'
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
logger.log(LogItemArgs{
cat: 'error-test'
log: '
aaa2
bbb2
'
logtype: .error
timestamp: ourtime.new('2022-12-05 22:14:36')!
})!
// Verify log directory exists
assert os.exists('/tmp/testlogs'), 'Log directory should exist'
// Get log file
files := os.ls('/tmp/testlogs')!
assert files.len == 2
mut file := pathlib.get_file(
path: '/tmp/testlogs/${files[0]}'
create: false
)!
println('/tmp/testlogs/${files[0]}')
content:=file.read()!.trim_space()
items := logger.search()!
assert items.len == 6 //still wrong: TODO
}
fn testsuite_end() {
if os.exists('/tmp/testlogs') {
os.rmdir_all('/tmp/testlogs')!
}
}

28
lib/core/logger/model.v Normal file
View File

@@ -0,0 +1,28 @@
module logger
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.pathlib
@[heap]
pub struct Logger {
pub mut:
path pathlib.Path
lastlog_time i64 //to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster)
}
pub struct LogItem {
pub mut:
timestamp ourtime.OurTime
cat string
log string
logtype LogType
}
pub enum LogType {
stdout
error
}

64
lib/core/logger/readme.md Normal file
View File

@@ -0,0 +1,64 @@
# Logger Module
A simple logging system that provides structured logging with search capabilities.
Logs are stored in hourly files with a consistent format that makes them both human-readable and machine-parseable.
## Features
- Structured logging with categories and error types
- Automatic timestamp management
- Multi-line message support
- Search functionality with filtering options
- Human-readable log format
## Usage
```v
import freeflowuniverse.herolib.core.logger
import freeflowuniverse.herolib.data.ourtime
// Create a new logger
mut l := logger.new(path: '/var/logs')!
// Log a message
l.log(
cat: 'system',
log: 'System started successfully',
logtype: .stdout
)!
// Log an error
l.log(
cat: 'system',
log: 'Failed to connect\nRetrying in 5 seconds...',
logtype: .error
)!
// Search logs
results := l.search(
timestamp_from: ourtime.now().warp("-24h"), // Last 24 hours
cat: 'system', // Filter by category
log: 'failed', // Search in message content
logtype: .error, // Only error messages
maxitems: 100 // Limit results
)!
```
## Log Format
```
$time
$cat - $msg
$cat - first line of message
second line of message
third line ...
E $cat - first line of message
E second line of message
E third line ...
```
- time is expressed in '1980-07-11 21:23:42' == time_to_test.format_ss()
- if cat has '-' inside it will be converted to '_'
- $cat max 10 chars, and always takes the 10 chars so that the logs are nicely formatted
- the first char is ' ' or 'E' , E means its the logtype error

128
lib/core/logger/search.v Normal file
View File

@@ -0,0 +1,128 @@
module logger
import os
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.ourtime
@[params]
pub struct SearchArgs {
pub mut:
timestamp_from ?ourtime.OurTime
timestamp_to ?ourtime.OurTime
cat string //can be empty
log string //any content in here will be looked for
logtype LogType
maxitems int = 10000
}
pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem {
mut args := args_
// Format category (max 10 chars, ascii only)
args.cat = texttools.name_fix(args.cat)
if args.cat.len > 10 {
return error('category cannot be longer than 10 chars')
}
mut timestamp_from := args.timestamp_from or { ourtime.OurTime{} }
mut timestamp_to := args.timestamp_to or { ourtime.OurTime{} }
// Get time range
from_time := timestamp_from.unix()
to_time := timestamp_to.unix()
if from_time > to_time {
return error('from_time cannot be after to_time: ${from_time} < ${to_time}')
}
mut result := []LogItem{}
// Find log files in time range
mut files := os.ls(l.path.path)!
files.sort()
for file in files {
if !file.ends_with('.log') {
continue
}
// Parse dayhour from filename
dayhour := file[..file.len-4] // remove .log
file_time := ourtime.new(dayhour)!
mut current_time:=ourtime.OurTime{}
mut current_item := LogItem{}
mut collecting := false
// Skip if file is outside time range
if file_time.unix() < from_time || file_time.unix() > to_time {
continue
}
// Read and parse log file
content := os.read_file('${l.path.path}/${file}')!
lines := content.split('\n')
for line in lines {
if result.len >= args.maxitems {
return result
}
line_trim := line.trim_space()
if line_trim == '' {
continue
}
// Check if this is a timestamp line
if !(line.starts_with(" ") || line.starts_with("E")){
current_time = ourtime.new(line_trim)!
if collecting {
process(mut result,current_item,current_time, args , from_time, to_time)!
}
collecting = false
continue
}
// Parse log line
is_error := line.starts_with('E')
if !collecting {
// Start new item
current_item = LogItem{
timestamp: current_time
cat: line_trim[2..12].trim_space()
log: line_trim[15..].trim_space()
logtype: if is_error { .error } else { .stdout }
}
collecting = true
} else {
// Continuation line
current_item.log += '\n' + line_trim[15..]
}
}
// Add last item if collecting
if collecting {
process(mut result,current_item,current_time, args , from_time, to_time)!
}
}
return result
}
fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! {
// Add previous item if it matches filters
log_epoch:= current_item.timestamp.unix()
if log_epoch < from_time || log_epoch > to_time {
return
}
if (args.cat == '' || current_item.cat.trim_space() == args.cat) &&
(args.log == '' || current_item.log.contains(args.log)) &&
(args.logtype == current_item.logtype) {
result << current_item
}
}

View File

@@ -92,6 +92,12 @@ pub fn (ot OurTime) day() string {
return ot.time().ymmdd()
}
// returns a date-time string in "YYYY-MM-DD HH" format (24h).
pub fn (ot OurTime) dayhour() string {
return ot.time().format().all_before_last(":")
}
// returns as epoch (seconds)
pub fn (ot OurTime) int() int {
return int(ot.time().unix())

View File

@@ -106,6 +106,13 @@ fn get_unix_from_relative(timestr string) !i64 {
return time_unix
}
// Supported date formats:
// - `YYYY-MM-DD HH:mm:ss`
// - `YYYY-MM-DD HH:mm`
// - `YYYY-MM-DD HH`
// - `YYYY-MM-DD`
// - `DD-MM-YYYY` (YYYY must be 4 digits)
pub fn get_unix_from_absolute(timestr_ string) !i64 {
timestr := timestr_.trim_space()
split_time_hour := timestr.split(' ')
@@ -138,6 +145,9 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 {
return error("unrecognized time format, time must either be YYYY/MM/DD or DD/MM/YYYY, or : in stead of /. Input was:'${timestr_}'")
}
if timepart.trim_space() == ""{
timepart='00:00:00'
}
timparts := timepart.split(':')
if timparts.len > 3 {
return error("format of date/time not correct, in time part: '${timepart}'")
@@ -146,17 +156,15 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 {
if timparts.len == 2 {
timepart = '${timepart}:00'
} else if timparts.len == 1 {
if timepart.len == 0 {
timepart = '00:00:00'
} else {
timepart = '${timepart}:00:00'
}
}
full_string := '${datepart} ${timepart}'
time_struct := time.parse(full_string) or {
return error("could not parse date/time string '${timestr_}': ${err}")
return error("could not parse date/time string '${full_string}': ${err}")
}
//println(" ${timparts} ${time_struct}")
return time_struct.unix()
}

View File

@@ -57,8 +57,10 @@ t2.warp('+1h')! // Move 1 hour forward
#### Absolute Time Format
Supported date formats:
- `YYYY-MM-DD HH:mm:ss`
- `YYYY-MM-DD HH:mm`
- `YYYY-MM-DD HH`
- `YYYY-MM-DD`
- `DD-MM-YYYY` (YYYY must be 4 digits)
- Also supports '/' instead of '-' for dates
@@ -81,6 +83,8 @@ now := ourtime.now()
// Create from string
t := ourtime.new('2022-12-05 20:14:35')!
t := ourtime.new('2022-12-05 20:14')!
t := ourtime.new('2022-12-05 20')!
// Create from unix timestamp
t2 := ourtime.new_from_epoch(1670271275)

View File

@@ -89,15 +89,34 @@ fn test_input_variations() {
// check that standard formats can be inputted
fn test_absolute_time() {
input_strings := {
'2022-12-5 20:14:35': 1670271275
'2022-12-5': 1670198400
// '2022-12': 1669842000 // Should be the beginning of december
// '2022': 1640984400 // should be beginning of 2022
' 2022-12-05 ': 1670198400
'2022-12-5 1': 1670198400 + 3600
'2022-12-5 20': 1670198400 + 3600 * 20
'2022-12-5 20:14': 1670198400 + 3600 * 20 + 14 * 60
'2022-12-5 20:14:35': 1670198400 + 3600 * 20 + 14 * 60 + 35
}
for key, value in input_strings {
thetime := new(key) or { panic('cannot get expiration for ${key}') }
println(" ===== ${key} ${value}")
thetime := new(key) or { panic('cannot get ourtime for ${key}.\n$err') }
assert value == get_unix_from_absolute(key)!
assert thetime.unix() == value, 'expiration was incorrect for ${key}'
}
a := get_unix_from_absolute('2022-12-5')!
a2 := get_unix_from_absolute('2022-12-05')!
b := get_unix_from_absolute('2022-12-5 1')!
c := get_unix_from_absolute('2022-12-5 1:00')!
d := get_unix_from_absolute('2022-12-5 01:00')!
e := get_unix_from_absolute('2022-12-5 01:1')!
assert a==a2
assert b==a+3600
assert b==c
assert b==d
assert e==d+60
}
fn test_from_epoch() {
@@ -120,34 +139,3 @@ fn test_parse_date() {
}
}
// fn test_parse_time() {
// input_strings := {
// '12:20': {
// 'hour': 12
// 'minute': 20
// }
// '15;30': {
// 'hour': 15
// 'minute': 30
// }
// '12:30pm': {
// 'hour': 12
// 'minute': 30
// }
// '3pm': {
// 'hour': 15
// 'minute': 0
// }
// '8.40 pm': {
// 'hour': 20
// 'minute': 40
// }
// }
// for key, value in input_strings {
// test_value := parse(key) or {
// panic('parse_time failed for ${key}, with error ${err}')
// }
// // assert test_value == value, 'hour, minute was incorrect for ${key}'
// }
// }

View File

@@ -0,0 +1,52 @@
module coredns
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.develop.gittools
import os
pub fn configure(args_ InstallArgs) ! {
mut args := args_
mut gs := gittools.get()!
mut repo_path := ''
if args.config_url.len > 0 {
mut repo := gs.get_repo(
url: args.config_url
)!
repo_path = repo.get_path()!
args.config_path = repo_path
}
if args.config_path.len == 0 {
args.config_path = '${os.home_dir()}/hero/cfg/Corefile'
}
if args.dnszones_url.len > 0 {
mut repo := gs.get_repo(
url: args.dnszones_url
)!
repo_path = repo.get_path()!
args.dnszones_path = repo_path
}
if args.dnszones_path.len == 0 {
args.dnszones_path = '${os.home_dir()}/hero/cfg/dnszones'
}
mycorefile := $tmpl('templates/Corefile')
mut path := pathlib.get_file(path: args.config_path, create: true)!
path.write(mycorefile)!
}
pub fn example_configure(args_ InstallArgs) ! {
mut args := args_
exampledbfile := $tmpl('templates/db.example.org')
mut path_testzone := pathlib.get_file(
path: '${args_.dnszones_path}/db.example.org'
create: true
)!
path_testzone.template_write(exampledbfile, true)!
}

View File

@@ -0,0 +1,138 @@
module coredns
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.osal.screen
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.clients.httpconnection
import os
@[params]
pub struct InstallArgs {
pub mut:
reset bool // this means we re-install and forgot what we did before
start bool = true
stop bool
restart bool // this means we stop if started, otherwise just start
homedir string // not sure what this is?
config_path string // path to Corefile, if empty will install default one
config_url string // path to Corefile through e.g. git url, will pull it if it is not local yet
dnszones_path string // path to where all the dns zones are
dnszones_url string // path on git url pull if needed
plugins []string // list of plugins to build CoreDNS with
example bool // if true we will install examples
}
pub fn install(args_ InstallArgs) ! {
mut args := args_
version := '1.11.1'
res := os.execute('${osal.profile_path_source_and()} coredns version')
if res.exit_code == 0 {
r := res.output.split_into_lines().filter(it.trim_space().starts_with('CoreDNS-'))
if r.len != 1 {
return error("couldn't parse coredns version.\n${res.output}")
}
if texttools.version(version) > texttools.version(r[0].all_after_first('CoreDNS-')) {
args.reset = true
}
} else {
args.reset = true
}
if args.reset {
console.print_header('install coredns')
mut url := ''
if osal.is_linux_arm() {
url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_linux_arm64.tgz'
} else if osal.is_linux_intel() {
url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_linux_amd64.tgz'
} else if osal.is_osx_arm() {
url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_darwin_arm64.tgz'
} else if osal.is_osx_intel() {
url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_darwin_amd64.tgz'
} else {
return error('unsported platform')
}
mut dest := osal.download(
url: url
minsize_kb: 13000
expand_dir: '/tmp/coredns'
)!
mut binpath := dest.file_get('coredns')!
osal.cmd_add(
cmdname: 'coredns'
source: binpath.path
)!
}
configure(args)!
if args.example {
example_configure(args)!
}
if args.restart {
restart(args)!
return
}
if args.start {
start(args)!
}
}
pub fn restart(args_ InstallArgs) ! {
stop(args_)!
start(args_)!
}
pub fn stop(args_ InstallArgs) ! {
console.print_header('coredns stop')
name := 'coredns'
// use startup manager, see caddy
mut scr := screen.new()!
scr.kill(name)!
}
pub fn start(args_ InstallArgs) ! {
mut args := args_
configure(args)!
if check()! {
return
}
console.print_header('coredns start')
name := 'coredns'
mut scr := screen.new()!
mut s := scr.add(name: name, reset: true)!
cmd2 := "coredns -conf '${args.config_path}'"
s.cmd_send(cmd2)!
if !check()! {
return error("coredns did not install propertly, do: curl 'http://localhost:3334/health'")
}
console.print_header('coredns running')
}
pub fn check() !bool {
// this checks health of coredns
mut conn := httpconnection.new(name: 'coredns', url: 'http://localhost:3334')!
r := conn.get(prefix: 'health')!
if r.trim_space() == 'OK' {
return true
}
return false
}

View File

@@ -0,0 +1,49 @@
module coredns
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.installers.base
import os
pub fn play(mut plbook playbook.PlayBook) ! {
base.play(playbook)!
coredns_actions := plbook.find(filter: 'coredns.')!
if coredns_actions.len == 0 {
return
}
mut install_actions := plbook.find(filter: 'coredns.install')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
// CoreDNS parameters
reset := p.get_default_false('reset')
start := p.get_default_true('start')
stop := p.get_default_false('stop')
restart := p.get_default_false('restart')
homedir := p.get_default('homedir', '${os.home_dir()}/hero/var/coredns')!
config_path := p.get_default('config_path', '${os.home_dir()}/hero/cfg/Corefile')!
config_url := p.get_default('config_url', '')!
dnszones_path := p.get_default('dnszones_path', '${os.home_dir()}/hero/var/coredns/zones')!
dnszones_url := p.get_default('dnszones_url', '')!
plugins := p.get_list_default('plugins', [])!
example := p.get_default_false('example')
install(
reset: reset
start: start
stop: stop
restart: restart
homedir: homedir
config_path: config_path
config_url: config_url
dnszones_path: dnszones_path
dnszones_url: dnszones_url
plugins: plugins
example: example
)!
}
}
}

View File

@@ -0,0 +1,7 @@
.:53 {
forward . 8.8.8.8 9.9.9.9
log
errors
health :3334
import '${args.dnszones_path}/*'
}

View File

@@ -0,0 +1,14 @@
??ORIGIN example.org.
^^ 3600 IN SOA sns.dns.icann.org. noc.dns.icann.org. (
2017042745 ; serial
7200 ; refresh (2 hours)
3600 ; retry (1 hour)
1209600 ; expire (2 weeks)
3600 ; minimum (1 hour)
)
3600 IN NS a.iana-servers.net.
3600 IN NS b.iana-servers.net.
www IN A 127.0.0.1
IN AAAA ::1

View File

View File

@@ -0,0 +1,77 @@
module gitea
import freeflowuniverse.herolib.installers.db.postgresql as postgresinstaller
import freeflowuniverse.herolib.installers.base
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
pub fn install() ! {
if osal.platform() != .ubuntu || osal.platform() != .arch {
return error('only support ubuntu and arch for now')
}
if osal.done_exists('gitea_install') {
console.print_header('gitea binaraies already installed')
return
}
// make sure we install base on the node
base.install()!
postgresinstaller.install()!
version := '1.22.0'
url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz'
console.print_debug(' download ${url}')
mut dest := osal.download(
url: url
minsize_kb: 40000
reset: true
expand_file: '/tmp/download/gitea'
)!
binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)!
osal.cmd_add(
cmdname: 'gitea'
source: binpath.path
)!
osal.done_set('gitea_install', 'OK')!
console.print_header('gitea installed properly.')
}
pub fn start() ! {
if osal.platform() != .ubuntu || osal.platform() != .arch {
return error('only support ubuntu and arch for now')
}
if osal.done_exists('gitea_install') {
console.print_header('gitea binaraies already installed')
return
}
// make sure we install base on the node
base.install()!
postgresinstaller.install()!
version := '1.22.0'
url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz'
console.print_debug(' download ${url}')
mut dest := osal.download(
url: url
minsize_kb: 40000
reset: true
expand_file: '/tmp/download/gitea'
)!
binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)!
osal.cmd_add(
cmdname: 'gitea'
source: binpath.path
)!
osal.done_set('gitea_install', 'OK')!
console.print_header('gitea installed properly.')
}

View File

@@ -0,0 +1,210 @@
module gitea
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.osal.zinit
import freeflowuniverse.herolib.data.dbfs
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.installers.postgresql
import json
import rand
import os
import time
import freeflowuniverse.herolib.ui.console
@[params]
pub struct Config {
pub mut:
name string = 'default'
reset bool
path string = '/data/gitea'
passwd string
postgresql_name string = 'default'
mail_from string = 'git@meet.tf'
smtp_addr string = 'smtp-relay.brevo.com'
smtp_login string @[required]
smpt_port int = 587
smtp_passwd string
domain string @[required]
jwt_secret string
lfs_jwt_secret string
internal_token string
secret_key string
}
pub struct Server {
pub mut:
name string
config Config
process ?zinit.ZProcess
path_config pathlib.Path
}
// get the gitea server
//```js
// name string = 'default'
// path string = '/data/gitea'
// passwd string
//```
// if name exists already in the config DB, it will load for that name
pub fn new(args_ Config) !Server {
install()! // make sure it has been build & ready to be used
mut args := args_
if args.passwd == '' {
args.passwd = rand.string(12)
}
args.name = texttools.name_fix(args.name)
key := 'gitea_config_${args.name}'
mut kvs := dbfs.new(name: 'config')!
if !kvs.exists(key) {
// jwt_secret string
// lfs_jwt_secret string
// internal_token string
// secret_key string
if args.jwt_secret == '' {
r := os.execute_or_panic('gitea generate secret JWT_SECRET')
args.jwt_secret = r.output.trim_space()
}
if args.lfs_jwt_secret == '' {
r := os.execute_or_panic('gitea generate secret LFS_JWT_SECRET')
args.lfs_jwt_secret = r.output.trim_space()
}
if args.internal_token == '' {
r := os.execute_or_panic('gitea generate secret INTERNAL_TOKEN')
args.internal_token = r.output.trim_space()
}
if args.secret_key == '' {
r := os.execute_or_panic('gitea generate secret SECRET_KEY')
args.secret_key = r.output.trim_space()
}
data := json.encode(args)
kvs.set(key, data)!
}
return get(args.name)!
}
pub fn get(name_ string) !Server {
console.print_header('get gitea server ${name_}')
name := texttools.name_fix(name_)
key := 'gitea_config_${name}'
mut kvs := dbfs.new(name: 'config')!
if kvs.exists(key) {
data := kvs.get(key)!
args := json.decode(Config, data)!
mut server := Server{
name: name
config: args
path_config: pathlib.get_dir(path: '${args.path}/cfg', create: true)!
}
mut z := zinit.new()!
processname := 'gitea_${name}'
if z.process_exists(processname) {
server.process = z.process_get(processname)!
}
// console.print_debug(" - server get ok")
server.start()!
return server
}
return error("can't find server gitea with name ${name}")
}
// return status
// ```
// pub enum ZProcessStatus {
// unknown
// init
// ok
// error
// blocked
// spawned
// }
// ```
pub fn (mut server Server) status() zinit.ZProcessStatus {
mut process := server.process or { return .unknown }
return process.status() or { return .unknown }
}
// run gitea as docker compose
pub fn (mut server Server) start() ! {
// if server.ok(){
// return
// }
console.print_header('start gitea: ${server.name}')
mut db := postgresql.get(server.config.postgresql_name)!
// now create the DB
db.db_create('gitea')!
// if true{
// panic("sd")
// }
// TODO: postgresql can be on other server, need to fill in all arguments in template
t1 := $tmpl('templates/app.ini')
mut config_path := server.path_config.file_get_new('app.ini')!
config_path.write(t1)!
osal.user_add(name: 'git')!
osal.exec(
cmd: '
chown -R git:root ${server.config.path}
chmod -R 777 /usr/local/bin
'
)!
mut z := zinit.new()!
processname := 'gitea_${server.name}'
mut p := z.process_new(
name: processname
cmd: '
cd /tmp
sudo -u git bash -c \'gitea web --config ${config_path.path} --verbose\'
'
)!
p.output_wait('Starting new Web server: tcp:0.0.0.0:3000', 120)!
o := p.log()!
console.print_debug(o)
server.check()!
console.print_header('gitea start ok.')
}
pub fn (mut server Server) restart() ! {
server.stop()!
server.start()!
}
pub fn (mut server Server) stop() ! {
print_backtrace()
console.print_header('stop gitea: ${server.name}')
mut process := server.process or { return }
return process.stop()
}
// check health, return true if ok
pub fn (mut server Server) check() ! {
mut p := server.process or { return error("can't find process for server.") }
p.check()!
// TODO: need to do some other checks to gitea e.g. rest calls
}
// check health, return true if ok
pub fn (mut server Server) ok() bool {
server.check() or { return false }
return true
}
// remove all data
pub fn (mut server Server) destroy() ! {
server.stop()!
server.path_config.delete()!
}

View File

@@ -0,0 +1,108 @@
APP_NAME = ${server.config.name}
RUN_MODE = prod
RUN_USER = git
WORK_PATH = ${server.config.path}
[repository]
ROOT = ${server.config.path}/gitrepo
[repository.local]
LOCAL_COPY_PATH = ${server.config.path}/localrepo
[repository.upload]
TEMP_PATH = ${server.config.path}/uploads
[server]
APP_DATA_PATH = ${server.config.domain}
DOMAIN = ${server.config.domain}
SSH_DOMAIN = ${server.config.domain}
SSH_PORT = 22
SSH_LISTEN_PORT = 22
HTTP_PORT = 3000
ROOT_URL = https://${server.config.domain}
DISABLE_SSH = false
LFS_START_SERVER = true
LFS_JWT_SECRET = ${server.config.lfs_jwt_secret}
OFFLINE_MODE = false
[database]
PATH = ${server.config.path}/gitea.db
DB_TYPE = postgres
HOST = localhost:5432
NAME = gitea
USER = root
PASSWD = ${db.config.passwd}
LOG_SQL = false
SCHEMA =
SSL_MODE = disable
[indexer]
ISSUE_INDEXER_PATH = ${server.config.path}/indexers/issues.bleve
[session]
PROVIDER_CONFIG = ${server.config.path}/sessions
PROVIDER = file
[picture]
AVATAR_UPLOAD_PATH = ${server.config.path}/avatars
REPOSITORY_AVATAR_UPLOAD_PATH = ${server.config.path}/repo-avatars
[attachment]
PATH = ${server.config.path}/attachments
[log]
MODE = console
LEVEL = info
ROOT_PATH = ${server.config.path}/log
[security]
INSTALL_LOCK = true
SECRET_KEY = ${server.config.secret_key}
REVERSE_PROXY_LIMIT = 1
REVERSE_PROXY_TRUSTED_PROXIES = *
INTERNAL_TOKEN = ${server.config.internal_token}
PASSWORD_HASH_ALGO = pbkdf2
[service]
DISABLE_REGISTRATION = false
REQUIRE_SIGNIN_VIEW = false
REGISTER_EMAIL_CONFIRM = false
ENABLE_NOTIFY_MAIL = true
ALLOW_ONLY_EXTERNAL_REGISTRATION = false
ENABLE_CAPTCHA = false
DEFAULT_KEEP_EMAIL_PRIVATE = false
DEFAULT_ALLOW_CREATE_ORGANIZATION = true
DEFAULT_ENABLE_TIMETRACKING = true
NO_REPLY_ADDRESS = noreply.localhost
[lfs]
PATH = ${server.config.path}/lfs
[mailer]
ENABLED = true
FROM = ${server.config.mail_from}
; PROTOCOL = smtps
SMTP_ADDR = ${server.config.smtp_addr}
SMTP_PORT = ${server.config.smpt_port}
USER = ${server.config.smtp_login}
PASSWD = ${server.config.smtp_passwd}
[openid]
ENABLE_OPENID_SIGNIN = true
ENABLE_OPENID_SIGNUP = true
[cron.update_checker]
ENABLED = false
[repository.pull-request]
DEFAULT_MERGE_STYLE = merge
[repository.signing]
DEFAULT_TRUST_MODEL = committer
[oauth2]
JWT_SECRET = ${server.config.jwt_secret}
[actions]
ENABLED=true

View File

@@ -0,0 +1,13 @@
!!hero_code.generate_installer
name:'livekit'
classname:'LivekitServer'
singleton:0
templates:1
default:1
title:''
supported_platforms:''
reset:0
startupmanager:1
hasconfig:1
build:0

View File

@@ -0,0 +1,110 @@
module livekit
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import net.http
import json
import os
// checks if a certain version or above is installed
fn installed() !bool {
res := os.execute('${osal.profile_path_source_and()} livekit-server -v')
if res.exit_code != 0 {
return false
}
r := res.output.split_into_lines().filter(it.contains("version"))
if r.len != 1 {
return error("couldn't parse livekit version.\n${res.output}")
}
installedversion:=r[0].all_after_first('version')
if texttools.version(version) != texttools.version(installedversion) {
return false
}
return true
}
fn install() ! {
console.print_header('install livekit')
mut installer := get()!
osal.execute_silent("
curl -s https://livekit.io/install.sh | bash
")!
}
fn startupcmd () ![]zinit.ZProcessNewArgs{
mut res := []zinit.ZProcessNewArgs{}
mut installer := get()!
res << zinit.ZProcessNewArgs{
name: 'livekit'
cmd: 'livekit-server --config ${installer.configpath} --bind 0.0.0.0'
}
return res
}
fn running() !bool {
mut installer := get()!
myport:=installer.nr*2+7880
endpoint := '${http://localhost:${myport}/api/v1/health'
response := http.get(endpoint) or {
println('Error connecting to LiveKit server: $err')
return false
}
if response.status_code != 200 {
println('LiveKit server returned non-200 status code: ${response.status_code}')
return false
}
health_info := json.decode(map[string]string, response.body) or {
println('Error decoding LiveKit server response: $err')
return false
}
if health_info['status'] != 'ok' {
println('LiveKit server health check failed: ${health_info["status"]}')
return false
}
return true
}
fn start_pre()!{
}
fn start_post()!{
}
fn stop_pre()!{
}
fn stop_post()!{
}
fn destroy() ! {
mut installer := get()!
os.rm("
${installer.configpath}
livekit-server
")!
}

View File

@@ -0,0 +1,229 @@
module livekit
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.ui.console
import time
__global (
livekit_global map[string]&LivekitServer
livekit_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet{
pub mut:
name string = "default"
}
fn args_get (args_ ArgsGet) ArgsGet {
mut args:=args_
if args.name == ""{
args.name = livekit_default
}
if args.name == ""{
args.name = "default"
}
return args
}
pub fn get(args_ ArgsGet) !&LivekitServer {
mut args := args_get(args_)
if !(args.name in livekit_global) {
if ! config_exists(){
if default{
config_save()!
}
}
config_load()!
}
return livekit_global[args.name] or {
println(livekit_global)
panic("bug in get from factory: ")
}
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context:=base.context() or { panic("bug") }
return context.hero_config_exists("livekit",args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
mut heroscript := context.hero_config_get("livekit",args.name)!
play(heroscript:heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_set("livekit",args.name,heroscript_default()!)!
}
fn set(o LivekitServer)! {
mut o2:=obj_init(o)!
livekit_global["default"] = &o2
}
@[params]
pub struct PlayArgs {
pub mut:
name string = 'default'
heroscript string //if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
start bool
stop bool
restart bool
delete bool
configure bool //make sure there is at least one installed
}
pub fn play(args_ PlayArgs) ! {
mut args:=args_
if args.heroscript == "" {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or {
playbook.new(text: args.heroscript)!
}
mut install_actions := plbook.find(filter: 'livekit.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
mycfg:=cfg_play(p)!
set(mycfg)!
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
//load from disk and make sure is properly intialized
pub fn (mut self LivekitServer) reload() ! {
switch(self.name)
self=obj_init(self)!
}
pub fn (mut self LivekitServer) start() ! {
switch(self.name)
if self.running()!{
return
}
console.print_header('livekit start')
configure()!
start_pre()!
mut sm := startupmanager.get()!
for zprocess in startupcmd()!{
sm.start(zprocess.name)!
}
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('livekit did not install properly.')
}
pub fn (mut self LivekitServer) install_start(args RestartArgs) ! {
switch(self.name)
self.install(args)!
self.start()!
}
pub fn (mut self LivekitServer) stop() ! {
switch(self.name)
stop_pre()!
mut sm := startupmanager.get()!
for zprocess in startupcmd()!{
sm.stop(zprocess.name)!
}
stop_post()!
}
pub fn (mut self LivekitServer) restart() ! {
switch(self.name)
self.stop()!
self.start()!
}
pub fn (mut self LivekitServer) running() !bool {
switch(self.name)
mut sm := startupmanager.get()!
//walk over the generic processes, if not running return
for zprocess in startupcmd()!{
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
return running()!
}
@[params]
pub struct InstallArgs{
pub mut:
reset bool
}
pub fn (mut self LivekitServer) install(args InstallArgs) ! {
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
}
pub fn (mut self LivekitServer) destroy() ! {
switch(self.name)
self.stop()!
destroy()!
}
//switch instance to be used for livekit
pub fn switch(name string) {
livekit_default = name
}

View File

@@ -0,0 +1,89 @@
module livekit
import freeflowuniverse.herolib.data.paramsparser
import os
pub const version = '1.7.2'
const singleton = false
const default = true
//TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
heroscript:="
!!livekit.configure
name:'default'
apikey: ''
apisecret: ''
nr: 1 // each specific instance onto this server needs to have a unique nr
"
return heroscript
}
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
pub struct LivekitServer {
pub mut:
name string = 'default'
apikey string
apisecret string @[secret]
configpath string
nr int = 0 // each specific instance onto this server needs to have a unique nr
}
fn cfg_play(p paramsparser.Params) !LivekitServer {
mut mycfg := LivekitServer{
name: p.get_default('name', 'default')!
apikey: p.get_default('apikey', '')!
apisecret: p.get_default('apisecret', '')!
nr: p.get_default_int('nr', 0)!
}
return mycfg
}
fn obj_init(obj_ LivekitServer)!LivekitServer{
mut mycfg:=obj_
if mycfg.configpath == ''{
mycfg.configpath = '${os.home_dir()}/hero/cfg/livekit_${myconfig.name}.yaml'
}
if mycfg.apikey == '' || mycfg.apisecret == '' {
// Execute the livekit-server generate-keys command
result := os.execute('livekit-server generate-keys')
if result.exit_code != 0 {
return error('Failed to generate LiveKit keys')
}
// Split the output into lines
lines := result.output.split_into_lines()
// Extract API Key and API Secret
for line in lines {
if line.starts_with('API Key:') {
server.apikey = line.all_after('API Key:').trim_space()
} else if line.starts_with('API Secret:') {
server.apisecret = line.all_after('API Secret:').trim_space()
}
}
// Verify that both keys were extracted
if server.apikey == '' || server.apisecret == '' {
return error('Failed to extract API Key or API Secret')
}
}
return obj
}
//called before start if done
fn configure() ! {
mut installer := get()!
mut mycode := $tmpl('templates/config.yaml')
mut path := pathlib.get_file(path: installer.configpath, create: true)!
path.write(mycode)!
console.print_debug(mycode)
}

View File

@@ -0,0 +1,22 @@
# livekit
To get started
```vlang
import freeflowuniverse.herolib.installers.something. livekit
mut installer:= livekit.get()!
installer.start()!
```
livekit once installed will have generated the secret keys

View File

@@ -0,0 +1,26 @@
port: ${installer.nr*2+7880}
log_level: info
rtc:
tcp_port: ${installer.nr*2+7881}
port_range_start: ${installer.nr*1000+50000}
port_range_end: ${installer.nr*1000+999+50000}
# use_external_ip should be set to true for most cloud environments where
# the host has a public IP address, but is not exposed to the process.
# LiveKit will attempt to use STUN to discover the true IP, and advertise
# that IP with its clients
use_external_ip: true
redis:
# redis is recommended for production deploys
address: localhost:6379
keys:
# key-value pairs
${installer.apikey}: ${installer.apisecret}
# When enabled, LiveKit will expose prometheus metrics on :6789/metrics
#prometheus_port: ${installer.nr+6789}
# turn:
# enabled: true
# # domain must match tls certificate
# domain: <turn.myhost.com>
# # defaults to 3478. If not using a load balancer, must be set to 443.
# tls_port: 3478

View File

@@ -0,0 +1,191 @@
module installers
import freeflowuniverse.herolib.installers.base
import freeflowuniverse.herolib.installers.develapps.vscode
import freeflowuniverse.herolib.installers.develapps.chrome
import freeflowuniverse.herolib.installers.virt.podman as podman_installer
import freeflowuniverse.herolib.installers.virt.buildah as buildah_installer
import freeflowuniverse.herolib.installers.virt.lima
import freeflowuniverse.herolib.installers.net.mycelium
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.installers.lang.golang
import freeflowuniverse.herolib.installers.lang.vlang
import freeflowuniverse.herolib.installers.lang.herolib
import freeflowuniverse.herolib.installers.lang.nodejs
import freeflowuniverse.herolib.installers.lang.python
import freeflowuniverse.herolib.installers.web.zola
import freeflowuniverse.herolib.installers.web.tailwind
import freeflowuniverse.herolib.installers.hero.heroweb
import freeflowuniverse.herolib.installers.hero.herodev
import freeflowuniverse.herolib.installers.sysadmintools.daguserver
import freeflowuniverse.herolib.installers.sysadmintools.rclone
import freeflowuniverse.herolib.installers.sysadmintools.prometheus
import freeflowuniverse.herolib.installers.sysadmintools.grafana
import freeflowuniverse.herolib.installers.sysadmintools.fungistor
import freeflowuniverse.herolib.installers.sysadmintools.garage_s3
import freeflowuniverse.herolib.installers.infra.zinit
@[params]
pub struct InstallArgs {
pub mut:
names string
reset bool
uninstall bool
gitpull bool
gitreset bool
start bool
}
pub fn names(args_ InstallArgs) []string {
names := '
base
caddy
chrome
hero
dagu
develop
fungistor
garage_s3
golang
grafana
hero
herodev
heroweb
lima
mycelium
nodejs
herocontainers
prometheus
rclone
rust
tailwind
vlang
vscode
zinit
zola
'
mut ns := texttools.to_array(names)
ns.sort()
return ns
}
pub fn install_multi(args_ InstallArgs) ! {
mut args := args_
mut items := []string{}
for item in args.names.split(',').map(it.trim_space()) {
if item !in items {
items << item
}
}
for item in items {
match item {
'base' {
base.install(reset: args.reset)!
}
'develop' {
base.install(reset: args.reset, develop: true)!
}
'rclone' {
// rclone.install(reset: args.reset)!
mut rc := rclone.get()!
rc.install(reset: args.reset)!
}
'rust' {
rust.install(reset: args.reset)!
}
'golang' {
mut g := golang.get()!
g.install(reset: args.reset)!
}
'vlang' {
vlang.install(reset: args.reset)!
}
'hero' {
herolib.install(
reset: args.reset
git_pull: args.gitpull
git_reset: args.gitreset
)!
}
'hero' {
herolib.hero_install(reset: args.reset)!
}
'caddy' {
//caddy.install(reset: args.reset)!
// caddy.configure_examples()!
}
'chrome' {
chrome.install(reset: args.reset, uninstall: args.uninstall)!
}
'mycelium' {
mycelium.install(reset: args.reset)!
mycelium.start()!
}
'garage_s3' {
garage_s3.install(reset: args.reset, config_reset: args.reset, restart: true)!
}
'fungistor' {
fungistor.install(reset: args.reset)!
}
'lima' {
lima.install(reset: args.reset, uninstall: args.uninstall)!
}
'herocontainers' {
mut podman_installer0:= podman_installer.get()!
mut buildah_installer0:= buildah_installer.get()!
if args.reset{
podman_installer0.destroy()! //will remove all
buildah_installer0.destroy()! //will remove all
}
podman_installer0.install()!
buildah_installer0.install()!
}
'prometheus' {
prometheus.install(reset: args.reset, uninstall: args.uninstall)!
}
'grafana' {
grafana.install(reset: args.reset, uninstall: args.uninstall)!
}
'vscode' {
vscode.install(reset: args.reset)!
}
'nodejs' {
nodejs.install(reset: args.reset)!
}
'python' {
python.install()!
}
'herodev' {
herodev.install()!
}
// 'heroweb' {
// heroweb.install()!
// }
'dagu' {
// will call the installer underneith
mut dserver := daguserver.get()!
dserver.install()!
dserver.restart()!
// mut dagucl:=dserver.client()!
}
'zola' {
mut i2 := zola.get()!
i2.install()! //will also install tailwind
}
'tailwind' {
mut i := tailwind.get()!
i.install()!
}
'zinit' {
mut i := zinit.get()!
i.install()!
}
else {
return error('cannot find installer for: ${item}')
}
}
}
}

View File

@@ -0,0 +1,24 @@
module ulist
// import freeflowuniverse.herolib.core.pathlib
// import freeflowuniverse.herolib.develop.gittools
// U stands for Upload
pub struct UList {
pub mut:
root string // common base for all UFiles
items []UFile
}
pub struct UFile {
pub mut:
path string
alias string // if other name used for upload, otherwise is the filename
cat UCat
}
pub enum UCat {
file
bin
config
}