From a59a66dd7104a6482bea3f1c38ab088330c84fb1 Mon Sep 17 00:00:00 2001 From: despiegk Date: Tue, 31 Dec 2024 05:39:23 +0100 Subject: [PATCH 1/3] ... --- examples/threefold/grid/deployment_state.vsh | 1 + examples/threefold/grid/vm_example.vsh | 1 + examples/threefold/grid/vm_query_example.vsh | 1 + examples/threefold/grid/webgw_example.vsh | 1 + examples/threefold/grid/zdb_example.vsh | 1 + lib/vpkg.json | 12 ------------ 6 files changed, 5 insertions(+), 12 deletions(-) delete mode 100644 lib/vpkg.json diff --git a/examples/threefold/grid/deployment_state.vsh b/examples/threefold/grid/deployment_state.vsh index cb7a0b72..d7fb5f08 100644 --- a/examples/threefold/grid/deployment_state.vsh +++ b/examples/threefold/grid/deployment_state.vsh @@ -1,4 +1,5 @@ +#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run struct DeploymentStateDB{ secret ... //to encrypt symmetric diff --git a/examples/threefold/grid/vm_example.vsh b/examples/threefold/grid/vm_example.vsh index e91a2bf7..eaacfff5 100644 --- a/examples/threefold/grid/vm_example.vsh +++ b/examples/threefold/grid/vm_example.vsh @@ -1,3 +1,4 @@ +#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run struct VMSpecs{ diff --git a/examples/threefold/grid/vm_query_example.vsh b/examples/threefold/grid/vm_query_example.vsh index 8145b790..ee827ca7 100644 --- a/examples/threefold/grid/vm_query_example.vsh +++ b/examples/threefold/grid/vm_query_example.vsh @@ -1,3 +1,4 @@ +#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run struct NodeQuery{ diff --git a/examples/threefold/grid/webgw_example.vsh b/examples/threefold/grid/webgw_example.vsh index 29316058..f1a4934e 100644 --- a/examples/threefold/grid/webgw_example.vsh +++ b/examples/threefold/grid/webgw_example.vsh @@ -1,3 +1,4 @@ +#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run diff --git a/examples/threefold/grid/zdb_example.vsh b/examples/threefold/grid/zdb_example.vsh index 9486e6af..bf2fa16f 100644 --- a/examples/threefold/grid/zdb_example.vsh +++ b/examples/threefold/grid/zdb_example.vsh @@ -1,3 +1,4 @@ +#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run struct ZDBSpecs{ diff --git a/lib/vpkg.json b/lib/vpkg.json deleted file mode 100644 index 5f7b2f81..00000000 --- a/lib/vpkg.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "herolib", - "version": "0.1.0", - "author": [ - "despiegk " - ], - "repo": "https://github.com/herouniverse/herolib/herolib", - "sources": [ - "https://vpkg-project.github.io/registry/src/" - ], - "dependencies": [] -} \ No newline at end of file From f8ab2f855a1da76339dbac1af192bc7040f1f5bf Mon Sep 17 00:00:00 2001 From: despiegk Date: Tue, 31 Dec 2024 09:32:56 +0100 Subject: [PATCH 2/3] fix ourtime, logging, some missing installers, ... --- lib/core/base/context.v | 11 + lib/core/base/readme.md | 114 +++++++-- lib/core/base/session.v | 13 + lib/core/base/session_logger.v | 61 +---- lib/core/base/templates/load.sh | 4 - lib/core/base/templates/logger_add.lua | 51 ---- lib/core/base/templates/logger_del.lua | 22 -- lib/core/base/templates/logger_example.sh | 18 -- lib/core/logger/factory.v | 12 + lib/core/logger/log.v | 66 +++++ lib/core/logger/log_test.v | 101 ++++++++ lib/core/logger/model.v | 28 +++ lib/core/logger/readme.md | 64 +++++ lib/core/logger/search.v | 128 ++++++++++ lib/data/ourtime/ourtime.v | 6 + lib/data/ourtime/parser.v | 20 +- lib/data/ourtime/readme.md | 4 + lib/data/ourtime/timetools_test.v | 60 ++--- lib/installers/infra/coredns/cdns_configure.v | 52 ++++ lib/installers/infra/coredns/cdns_install.v | 138 +++++++++++ lib/installers/infra/coredns/cdns_play.v | 49 ++++ .../infra/coredns/templates/Corefile | 7 + .../infra/coredns/templates/db.example.org | 14 ++ lib/installers/infra/gitea/.heroscript | 0 lib/installers/infra/gitea/installer.v | 77 ++++++ lib/installers/infra/gitea/server.v | 210 ++++++++++++++++ lib/installers/infra/gitea/templates/app.ini | 108 +++++++++ lib/installers/infra/livekit/.heroscript | 13 + .../infra/livekit/livekit_actions.v | 110 +++++++++ .../infra/livekit/livekit_factory_.v | 229 ++++++++++++++++++ lib/installers/infra/livekit/livekit_model.v | 89 +++++++ lib/installers/infra/livekit/readme.md | 22 ++ .../infra/livekit/templates/config.yaml | 26 ++ lib/installers/install_multi.v | 191 +++++++++++++++ lib/installers/ulist/ulist.v | 24 ++ 35 files changed, 1925 insertions(+), 217 deletions(-) delete mode 100644 lib/core/base/templates/load.sh delete mode 100644 lib/core/base/templates/logger_add.lua delete mode 100644 lib/core/base/templates/logger_del.lua delete mode 100755 lib/core/base/templates/logger_example.sh create mode 100644 lib/core/logger/factory.v create mode 100644 lib/core/logger/log.v create mode 100644 lib/core/logger/log_test.v create mode 100644 lib/core/logger/model.v create mode 100644 lib/core/logger/readme.md create mode 100644 lib/core/logger/search.v create mode 100644 lib/installers/infra/coredns/cdns_configure.v create mode 100644 lib/installers/infra/coredns/cdns_install.v create mode 100644 lib/installers/infra/coredns/cdns_play.v create mode 100644 lib/installers/infra/coredns/templates/Corefile create mode 100644 lib/installers/infra/coredns/templates/db.example.org create mode 100644 lib/installers/infra/gitea/.heroscript create mode 100644 lib/installers/infra/gitea/installer.v create mode 100644 lib/installers/infra/gitea/server.v create mode 100644 lib/installers/infra/gitea/templates/app.ini create mode 100644 lib/installers/infra/livekit/.heroscript create mode 100644 lib/installers/infra/livekit/livekit_actions.v create mode 100644 lib/installers/infra/livekit/livekit_factory_.v create mode 100644 lib/installers/infra/livekit/livekit_model.v create mode 100644 lib/installers/infra/livekit/readme.md create mode 100644 lib/installers/infra/livekit/templates/config.yaml create mode 100644 lib/installers/install_multi.v create mode 100644 lib/installers/ulist/ulist.v diff --git a/lib/core/base/context.v b/lib/core/base/context.v index 2ec058da..3ed47031 100644 --- a/lib/core/base/context.v +++ b/lib/core/base/context.v @@ -21,6 +21,7 @@ mut: params_ ?¶msparser.Params dbcollection_ ?&dbfs.DBCollection @[skip; str: skip] redis_ ?&redisclient.Redis @[skip; str: skip] + path_ ?pathlib.Path pub mut: // snippets map[string]string config ContextConfig @@ -185,3 +186,13 @@ pub fn (mut self Context) secret_set(secret_ string) ! { self.config.secret = secret2 self.save()! } + + + +pub fn (mut self Context) path() !pathlib.Path { + return self.path_ or { + path := '${os.home_dir()}/hero/context/${self.config.name}' + mut path := pathlib.get_dir(path: path,create: false)! + path + } +} diff --git a/lib/core/base/readme.md b/lib/core/base/readme.md index be64e276..83ddbbab 100644 --- a/lib/core/base/readme.md +++ b/lib/core/base/readme.md @@ -1,39 +1,105 @@ -## context & sessions +## Context & Sessions -Everything we do in hero lives in a context, each context has a unique name. +Everything we do in hero lives in a context, each context has a unique name and ID. A context can have multiple sessions, where each session represents a specific execution environment. -Redis is used to manage the contexts and the sessions. +### Context -- redis db 0 - - `context:current` curent id of the context, is also the DB if redis if redis is used -- redis db X, x is nr of context - - `context:name` name of this context - - `context:secret` secret as is used in context (is md5 of original config secret) - - `context:privkey` secp256k1 privkey as is used in context (encrypted by secret) - - `context:params` params for a context, can have metadata - - `context:lastid` last id for our db - - `session:$id` the location of session - - `session:$id:params` params for the session, can have metadata +Each context has: +- A unique ID and name +- Secret management (encrypted) +- Database collection +- Configuration storage +- Code root path +- Parameters -Session id is $contextid:$sessionid (e.g. 10:111) +### Sessions -**The SessionNewArgs:** +Sessions exist within a context and provide: +- Unique name within the context +- Interactive mode control +- Environment variables +- Start/End time tracking +- Parameter storage +- Database access +- Logging -- context_name string = 'default' -- session_name string //default will be an incremental nr if not filled in -- interactive bool = true //can ask questions, default on true -- coderoot string //this will define where all code is checked out +### Storage Structure + +Redis is used to manage contexts and sessions: + +- Redis DB X (where X is context ID): + - `context:config` - JSON encoded context configuration + - `sessions:config:$name` - JSON encoded session configuration for each session + +### Database Structure + +Each context has a database collection located at `~/hero/db/$contextid/`. Within this: +- Each session gets its own database named `session_$name` +- A shared `config` database exists for context-wide configuration + +### Hero Configuration + +Contexts support hero-specific configuration files: +- Stored at `~/hero/context/$contextname/$category__$name.yaml` +- Supports categories for organization +- Automatically handles shell expansions + +### Example Usage ```v import freeflowuniverse.herolib.core.base -mut session:=context_new( - coderoot:'/tmp/code' - interactive:true +// Create a new context +mut context := context_new( + id: 1 + name: 'mycontext' + coderoot: '/tmp/code' + interactive: true )! -mut session:=session_new(context:'default',session:'mysession1')! -mut session:=session_new()! //returns default context & incremental new session +// Create a new session in the context +mut session := session_new( + context: context + name: 'mysession1' + interactive: true +)! +// Work with environment variables +session.env_set('KEY', 'value')! +value := session.env_get('KEY')! + +// Work with hero config +context.hero_config_set('category', 'name', 'content')! +content := context.hero_config_get('category', 'name')! + +// Access session database +mut db := session.db_get()! + +// Access context-wide config database +mut config_db := session.db_config_get()! ``` +### Security + +- Context secrets are stored as MD5 hashes +- Support for encryption of sensitive data +- Interactive secret configuration available + +### File Structure + +Each context and session has its own directory structure: +- Context root: `~/hero/context/$contextname/` +- Session directory: `~/hero/context/$contextname/$sessionname/` + +This structure helps organize configuration files, logs, and other session-specific data. + +### Logging + +Each session has its own logger: + +```v +mut logger := session.logger()! +logger.log(log:'My log message') +``` + +For detailed logging capabilities and options, see the logger documentation in `lib/core/logger/readme.md`. diff --git a/lib/core/base/session.v b/lib/core/base/session.v index cbe4428c..786a30ad 100644 --- a/lib/core/base/session.v +++ b/lib/core/base/session.v @@ -4,6 +4,7 @@ import freeflowuniverse.herolib.data.ourtime // import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.data.paramsparser import freeflowuniverse.herolib.data.dbfs +import freeflowuniverse.herolib.core.logger import json // import freeflowuniverse.herolib.core.pathlib // import freeflowuniverse.herolib.develop.gittools @@ -11,6 +12,9 @@ import json @[heap] pub struct Session { +mut: + path_ ?pathlib.Path + logger_ ?logger.Logger pub mut: name string // unique id for session (session id), can be more than one per context interactive bool = true @@ -20,6 +24,7 @@ pub mut: context &Context @[skip; str: skip] config SessionConfig env map[string]string + } ///////// LOAD & SAVE @@ -88,6 +93,14 @@ pub fn (self Session) guid() string { return '${self.context.guid()}:${self.name}' } +pub fn (mut self Session) path() !pathlib.Path { + return self.path_ or { + path := '${self.context.path().path}/${self.name}' + mut path := pathlib.get_dir(path: path,create: true)! + path + } +} + fn (self Session) str2() string { mut out := 'session:${self.guid()}' out += ' start:\'${self.start}\'' diff --git a/lib/core/base/session_logger.v b/lib/core/base/session_logger.v index 8bf7f665..9293b386 100644 --- a/lib/core/base/session_logger.v +++ b/lib/core/base/session_logger.v @@ -1,61 +1,10 @@ module base -import freeflowuniverse.herolib.data.ourtime -import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.core.logger -@[heap] -pub struct Logger { -pub mut: - session string -} - -pub struct LogItem { -pub mut: - time ourtime.OurTime - cat string - log string - logtype LogType - session string -} - -pub enum LogType { - stdout - error -} - -pub fn (session Session) logger_new() !Logger { - // mut l:=log.Log{} - // l.set_full_logpath('./info.log') - // l.log_to_console_too() - return Logger{} -} - -@[params] -pub struct LogArgs { -pub mut: - cat string - log string @[required] - logtype LogType -} - -// cat & log are the arguments . -// category can be any well chosen category e.g. vm -pub fn (mut session Session) log(args_ LogArgs) !LogItem { - mut args := args_ - args.cat = texttools.name_fix(args.cat) - - mut l := LogItem{ - cat: args.cat - log: args.log - time: ourtime.now() - // session: session.guid() +pub fn (session Session) logger() !logger.Logger { + return session.logger_ or { + mut l2 := logger.new("${session.path()!.path}/logs")! + l2 } - - // TODO: get string output and put to redis - - return l -} - -pub fn (li LogItem) str() string { - return '${li.session}' } diff --git a/lib/core/base/templates/load.sh b/lib/core/base/templates/load.sh deleted file mode 100644 index ed25e865..00000000 --- a/lib/core/base/templates/load.sh +++ /dev/null @@ -1,4 +0,0 @@ -#redis-cli SCRIPT LOAD "$(cat logger.lua)" -export LOGGER_ADD=$(redis-cli SCRIPT LOAD "$(cat logger_add.lua)") -export LOGGER_DEL=$(redis-cli SCRIPT LOAD "$(cat logger_del.lua)") -export STATS_ADD=$(redis-cli SCRIPT LOAD "$(cat stats_add.lua)") diff --git a/lib/core/base/templates/logger_add.lua b/lib/core/base/templates/logger_add.lua deleted file mode 100644 index f1cdb450..00000000 --- a/lib/core/base/templates/logger_add.lua +++ /dev/null @@ -1,51 +0,0 @@ - -local function normalize(str) - return string.gsub(string.lower(str), "%s+", "_") -end - -local src = normalize(ARGV[1]) -local category = normalize(ARGV[2]) -local message = ARGV[3] -local logHashKey = "logs:" .. src -local lastIdKey = "logs:" .. src .. ":lastid" - --- redis.log(redis.LOG_NOTICE, "...") - --- Increment the log ID using Redis INCR command -local logId = redis.call('INCR', lastIdKey) - --- Get the current epoch time -local epoch = redis.call('TIME')[1] - --- Prepare the log entry with a unique ID, epoch time, and message -local logEntry = category .. ":" .. epoch .. ":" .. message - --- Add the new log entry to the hash set -redis.call('HSET', logHashKey, logId, logEntry) - --- Optionally manage the size of the hash to keep the latest 2000 entries only -local hlen = redis.call('HLEN', logHashKey) -if hlen > 5000 then - -- Find the smallest logId - local smallestId = logId - local cursor = "0" - repeat - local scanResult = redis.call('HSCAN', logHashKey, cursor, "COUNT", 5) - cursor = scanResult[1] - local entries = scanResult[2] - for i = 1, #entries, 2 do - local currentId = tonumber(entries[i]) - if currentId < smallestId then - smallestId = currentId - end - end - until cursor == "0" - -- redis.log(redis.LOG_NOTICE, "smallest id: " .. smallestId) - - -- Remove the oldest entries - for i = smallestId, smallestId + 500 do - redis.call('HDEL', logHashKey, i) - end -end - -return logEntry diff --git a/lib/core/base/templates/logger_del.lua b/lib/core/base/templates/logger_del.lua deleted file mode 100644 index 10253d6c..00000000 --- a/lib/core/base/templates/logger_del.lua +++ /dev/null @@ -1,22 +0,0 @@ --- Function to normalize strings (convert to lower case and replace spaces with underscores) -local function normalize(str) - return string.gsub(string.lower(str), "%s+", "_") -end - -local src = ARGV[1] and normalize(ARGV[1]) or nil - -if src then - -- Delete logs for specified source and category - local logHashKey = "logs:" .. src - local lastIdKey = logHashKey .. ":lastid" - redis.call('DEL', logHashKey) - redis.call('DEL', lastIdKey) -else - -- Delete all logs for all sources and categories - local keys = redis.call('KEYS', "logs:*") - for i, key in ipairs(keys) do - redis.call('DEL', key) - end -end - -return "Logs deleted" diff --git a/lib/core/base/templates/logger_example.sh b/lib/core/base/templates/logger_example.sh deleted file mode 100755 index 5a399d98..00000000 --- a/lib/core/base/templates/logger_example.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -set -x -cd "$(dirname "$0")" -source load.sh - -# for i in $(seq 1 1000) -# do -# redis-cli EVALSHA $LOGHASH 0 "AAA" "CAT1" "Example log message" -# redis-cli EVALSHA $LOGHASH 0 "AAA" "CAT2" "Example log message" -# done - -redis-cli EVALSHA $LOGGER_DEL 0 - -for i in $(seq 1 200) -do - redis-cli EVALSHA $LOGGER_ADD 0 "BBB" "CAT2" "Example log message" -done - diff --git a/lib/core/logger/factory.v b/lib/core/logger/factory.v new file mode 100644 index 00000000..c6d2305e --- /dev/null +++ b/lib/core/logger/factory.v @@ -0,0 +1,12 @@ + +module logger +import freeflowuniverse.herolib.core.pathlib + + +pub fn new(path string)! Logger { + mut p := pathlib.get_dir(path:path,create:true)! + return Logger{ + path: p + lastlog_time: 0 + } +} diff --git a/lib/core/logger/log.v b/lib/core/logger/log.v new file mode 100644 index 00000000..3556376f --- /dev/null +++ b/lib/core/logger/log.v @@ -0,0 +1,66 @@ + +module logger + +import os +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.data.ourtime + + +@[params] +pub struct LogItemArgs { +pub mut: + timestamp ?ourtime.OurTime + cat string + log string + logtype LogType +} + +pub fn (mut l Logger) log(args_ LogItemArgs)! { + mut args := args_ + + t := args.timestamp or { + t2:=ourtime.now() + t2 + } + + // Format category (max 10 chars, ascii only) + args.cat = texttools.name_fix(args.cat) + if args.cat.len > 10 { + return error('category cannot be longer than 10 chars') + } + args.cat = texttools.expand(args.cat,10," ") + + args.log = texttools.dedent(args.log).trim_space() + + mut logfile_path:="${l.path.path}/${t.dayhour()}.log" + + // Create log file if it doesn't exist + if !os.exists(logfile_path) { + os.write_file(logfile_path, '')! + l.lastlog_time = 0 //make sure we put time again + } + + mut f:= os.open_append(logfile_path)! + + mut content := '' + + // Add timestamp if we're in a new second + if t.unix() > l.lastlog_time { + content += '\n${t.time().format_ss()}\n' + l.lastlog_time = t.unix() + } + + // Format log lines + error_prefix := if args.logtype == .error { 'E' } else { ' ' } + lines := args.log.split('\n') + + for i, line in lines { + if i == 0 { + content += '${error_prefix} ${args.cat} - ${line}\n' + } else { + content += '${error_prefix} ${line}\n' + } + } + f.writeln(content.trim_space_right())! + f.close() +} diff --git a/lib/core/logger/log_test.v b/lib/core/logger/log_test.v new file mode 100644 index 00000000..fc67bacc --- /dev/null +++ b/lib/core/logger/log_test.v @@ -0,0 +1,101 @@ +module logger + +import os +import freeflowuniverse.herolib.data.ourtime +import freeflowuniverse.herolib.core.pathlib + +fn testsuite_begin() { + if os.exists('/tmp/testlogs') { + os.rmdir_all('/tmp/testlogs')! + } +} + +fn test_logger() { + mut logger := new('/tmp/testlogs')! + + // Test stdout logging + logger.log(LogItemArgs{ + cat: 'test-app' + log: 'This is a test message\nWith a second line\nAnd a third line' + logtype: .stdout + timestamp:ourtime.new('2022-12-05 20:14:35')! + })! + + // Test error logging + logger.log(LogItemArgs{ + cat: 'error-test' + log: 'This is an error\nWith details' + logtype: .error + timestamp: ourtime.new('2022-12-05 20:14:35')! + })! + + logger.log(LogItemArgs{ + cat: 'test-app' + log: 'This is a test message\nWith a second line\nAnd a third line' + logtype: .stdout + timestamp: ourtime.new('2022-12-05 20:14:36')! + })! + + + logger.log(LogItemArgs{ + cat: 'error-test' + log: ' + This is an error + + With details + ' + logtype: .error + timestamp: ourtime.new('2022-12-05 20:14:36')! + })! + + logger.log(LogItemArgs{ + cat: 'error-test' + log: ' + aaa + + bbb + ' + logtype: .error + timestamp: ourtime.new('2022-12-05 22:14:36')! + })! + + logger.log(LogItemArgs{ + cat: 'error-test' + log: ' + aaa2 + + bbb2 + ' + logtype: .error + timestamp: ourtime.new('2022-12-05 22:14:36')! + })! + + + + + // Verify log directory exists + assert os.exists('/tmp/testlogs'), 'Log directory should exist' + + // Get log file + files := os.ls('/tmp/testlogs')! + assert files.len == 2 + + mut file := pathlib.get_file( + path: '/tmp/testlogs/${files[0]}' + create: false + )! + + println('/tmp/testlogs/${files[0]}') + + content:=file.read()!.trim_space() + + items := logger.search()! + assert items.len == 6 //still wrong: TODO + +} + +fn testsuite_end() { + if os.exists('/tmp/testlogs') { + os.rmdir_all('/tmp/testlogs')! + } +} diff --git a/lib/core/logger/model.v b/lib/core/logger/model.v new file mode 100644 index 00000000..0995dc05 --- /dev/null +++ b/lib/core/logger/model.v @@ -0,0 +1,28 @@ +module logger + +import freeflowuniverse.herolib.data.ourtime + +import freeflowuniverse.herolib.core.pathlib + +@[heap] +pub struct Logger { +pub mut: + path pathlib.Path + lastlog_time i64 //to see in log format, every second we put a time down, we need to know if we are in a new second (logs can come in much faster) +} + + +pub struct LogItem { +pub mut: + timestamp ourtime.OurTime + cat string + log string + logtype LogType +} + +pub enum LogType { + stdout + error +} + + diff --git a/lib/core/logger/readme.md b/lib/core/logger/readme.md new file mode 100644 index 00000000..d917fe6d --- /dev/null +++ b/lib/core/logger/readme.md @@ -0,0 +1,64 @@ +# Logger Module + +A simple logging system that provides structured logging with search capabilities. + +Logs are stored in hourly files with a consistent format that makes them both human-readable and machine-parseable. + +## Features + +- Structured logging with categories and error types +- Automatic timestamp management +- Multi-line message support +- Search functionality with filtering options +- Human-readable log format + +## Usage + +```v +import freeflowuniverse.herolib.core.logger +import freeflowuniverse.herolib.data.ourtime + +// Create a new logger +mut l := logger.new(path: '/var/logs')! + +// Log a message +l.log( + cat: 'system', + log: 'System started successfully', + logtype: .stdout +)! + +// Log an error +l.log( + cat: 'system', + log: 'Failed to connect\nRetrying in 5 seconds...', + logtype: .error +)! + +// Search logs +results := l.search( + timestamp_from: ourtime.now().warp("-24h"), // Last 24 hours + cat: 'system', // Filter by category + log: 'failed', // Search in message content + logtype: .error, // Only error messages + maxitems: 100 // Limit results +)! +``` + +## Log Format + +``` +$time + $cat - $msg + $cat - first line of message + second line of message + third line ... +E $cat - first line of message +E second line of message +E third line ... +``` + +- time is expressed in '1980-07-11 21:23:42' == time_to_test.format_ss() +- if cat has '-' inside it will be converted to '_' +- $cat max 10 chars, and always takes the 10 chars so that the logs are nicely formatted +- the first char is ' ' or 'E' , E means its the logtype error diff --git a/lib/core/logger/search.v b/lib/core/logger/search.v new file mode 100644 index 00000000..4dd671da --- /dev/null +++ b/lib/core/logger/search.v @@ -0,0 +1,128 @@ + +module logger + +import os +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.data.ourtime + + +@[params] +pub struct SearchArgs { +pub mut: + timestamp_from ?ourtime.OurTime + timestamp_to ?ourtime.OurTime + cat string //can be empty + log string //any content in here will be looked for + logtype LogType + maxitems int = 10000 +} + + +pub fn (mut l Logger) search(args_ SearchArgs)! []LogItem { + mut args := args_ + + // Format category (max 10 chars, ascii only) + args.cat = texttools.name_fix(args.cat) + if args.cat.len > 10 { + return error('category cannot be longer than 10 chars') + } + + mut timestamp_from := args.timestamp_from or { ourtime.OurTime{} } + mut timestamp_to := args.timestamp_to or { ourtime.OurTime{} } + + // Get time range + from_time := timestamp_from.unix() + to_time := timestamp_to.unix() + + if from_time > to_time { + return error('from_time cannot be after to_time: ${from_time} < ${to_time}') + } + + mut result := []LogItem{} + + // Find log files in time range + mut files := os.ls(l.path.path)! + files.sort() + + for file in files { + if !file.ends_with('.log') { + continue + } + + // Parse dayhour from filename + dayhour := file[..file.len-4] // remove .log + file_time := ourtime.new(dayhour)! + mut current_time:=ourtime.OurTime{} + mut current_item := LogItem{} + mut collecting := false + + + // Skip if file is outside time range + if file_time.unix() < from_time || file_time.unix() > to_time { + continue + } + + // Read and parse log file + content := os.read_file('${l.path.path}/${file}')! + lines := content.split('\n') + + for line in lines { + + if result.len >= args.maxitems { + return result + } + + line_trim := line.trim_space() + if line_trim == '' { + continue + } + + // Check if this is a timestamp line + if !(line.starts_with(" ") || line.starts_with("E")){ + current_time = ourtime.new(line_trim)! + if collecting { + process(mut result,current_item,current_time, args , from_time, to_time)! + } + collecting = false + continue + } + + // Parse log line + is_error := line.starts_with('E') + if !collecting { + // Start new item + current_item = LogItem{ + timestamp: current_time + cat: line_trim[2..12].trim_space() + log: line_trim[15..].trim_space() + logtype: if is_error { .error } else { .stdout } + } + collecting = true + } else { + // Continuation line + current_item.log += '\n' + line_trim[15..] + } + } + + // Add last item if collecting + if collecting { + process(mut result,current_item,current_time, args , from_time, to_time)! + } + } + + return result +} + +fn process(mut result []LogItem, current_item LogItem, current_time ourtime.OurTime, args SearchArgs, from_time i64, to_time i64) ! { + // Add previous item if it matches filters + log_epoch:= current_item.timestamp.unix() + if log_epoch < from_time || log_epoch > to_time { + return + } + if (args.cat == '' || current_item.cat.trim_space() == args.cat) && + (args.log == '' || current_item.log.contains(args.log)) && + (args.logtype == current_item.logtype) { + result << current_item + } + +} \ No newline at end of file diff --git a/lib/data/ourtime/ourtime.v b/lib/data/ourtime/ourtime.v index a4b4c846..86513ee3 100644 --- a/lib/data/ourtime/ourtime.v +++ b/lib/data/ourtime/ourtime.v @@ -92,6 +92,12 @@ pub fn (ot OurTime) day() string { return ot.time().ymmdd() } +// returns a date-time string in "YYYY-MM-DD HH" format (24h). +pub fn (ot OurTime) dayhour() string { + return ot.time().format().all_before_last(":") +} + + // returns as epoch (seconds) pub fn (ot OurTime) int() int { return int(ot.time().unix()) diff --git a/lib/data/ourtime/parser.v b/lib/data/ourtime/parser.v index 122159c9..c39bec24 100644 --- a/lib/data/ourtime/parser.v +++ b/lib/data/ourtime/parser.v @@ -106,6 +106,13 @@ fn get_unix_from_relative(timestr string) !i64 { return time_unix } +// Supported date formats: + +// - `YYYY-MM-DD HH:mm:ss` +// - `YYYY-MM-DD HH:mm` +// - `YYYY-MM-DD HH` +// - `YYYY-MM-DD` +// - `DD-MM-YYYY` (YYYY must be 4 digits) pub fn get_unix_from_absolute(timestr_ string) !i64 { timestr := timestr_.trim_space() split_time_hour := timestr.split(' ') @@ -138,6 +145,9 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 { return error("unrecognized time format, time must either be YYYY/MM/DD or DD/MM/YYYY, or : in stead of /. Input was:'${timestr_}'") } + if timepart.trim_space() == ""{ + timepart='00:00:00' + } timparts := timepart.split(':') if timparts.len > 3 { return error("format of date/time not correct, in time part: '${timepart}'") @@ -146,17 +156,15 @@ pub fn get_unix_from_absolute(timestr_ string) !i64 { if timparts.len == 2 { timepart = '${timepart}:00' } else if timparts.len == 1 { - if timepart.len == 0 { - timepart = '00:00:00' - } else { - timepart = '${timepart}:00:00' - } + timepart = '${timepart}:00:00' } full_string := '${datepart} ${timepart}' time_struct := time.parse(full_string) or { - return error("could not parse date/time string '${timestr_}': ${err}") + return error("could not parse date/time string '${full_string}': ${err}") } + + //println(" ${timparts} ${time_struct}") return time_struct.unix() } diff --git a/lib/data/ourtime/readme.md b/lib/data/ourtime/readme.md index f53a9347..556a03fa 100644 --- a/lib/data/ourtime/readme.md +++ b/lib/data/ourtime/readme.md @@ -57,8 +57,10 @@ t2.warp('+1h')! // Move 1 hour forward #### Absolute Time Format Supported date formats: + - `YYYY-MM-DD HH:mm:ss` - `YYYY-MM-DD HH:mm` +- `YYYY-MM-DD HH` - `YYYY-MM-DD` - `DD-MM-YYYY` (YYYY must be 4 digits) - Also supports '/' instead of '-' for dates @@ -81,6 +83,8 @@ now := ourtime.now() // Create from string t := ourtime.new('2022-12-05 20:14:35')! +t := ourtime.new('2022-12-05 20:14')! +t := ourtime.new('2022-12-05 20')! // Create from unix timestamp t2 := ourtime.new_from_epoch(1670271275) diff --git a/lib/data/ourtime/timetools_test.v b/lib/data/ourtime/timetools_test.v index 26564f9b..35f0572b 100644 --- a/lib/data/ourtime/timetools_test.v +++ b/lib/data/ourtime/timetools_test.v @@ -89,15 +89,34 @@ fn test_input_variations() { // check that standard formats can be inputted fn test_absolute_time() { input_strings := { - '2022-12-5 20:14:35': 1670271275 - '2022-12-5': 1670198400 - // '2022-12': 1669842000 // Should be the beginning of december - // '2022': 1640984400 // should be beginning of 2022 + '2022-12-5': 1670198400 + ' 2022-12-05 ': 1670198400 + '2022-12-5 1': 1670198400 + 3600 + '2022-12-5 20': 1670198400 + 3600 * 20 + '2022-12-5 20:14': 1670198400 + 3600 * 20 + 14 * 60 + '2022-12-5 20:14:35': 1670198400 + 3600 * 20 + 14 * 60 + 35 } for key, value in input_strings { - thetime := new(key) or { panic('cannot get expiration for ${key}') } + println(" ===== ${key} ${value}") + thetime := new(key) or { panic('cannot get ourtime for ${key}.\n$err') } + assert value == get_unix_from_absolute(key)! assert thetime.unix() == value, 'expiration was incorrect for ${key}' + } + + a := get_unix_from_absolute('2022-12-5')! + a2 := get_unix_from_absolute('2022-12-05')! + b := get_unix_from_absolute('2022-12-5 1')! + c := get_unix_from_absolute('2022-12-5 1:00')! + d := get_unix_from_absolute('2022-12-5 01:00')! + e := get_unix_from_absolute('2022-12-5 01:1')! + + assert a==a2 + assert b==a+3600 + assert b==c + assert b==d + assert e==d+60 + } fn test_from_epoch() { @@ -120,34 +139,3 @@ fn test_parse_date() { } } -// fn test_parse_time() { -// input_strings := { -// '12:20': { -// 'hour': 12 -// 'minute': 20 -// } -// '15;30': { -// 'hour': 15 -// 'minute': 30 -// } -// '12:30pm': { -// 'hour': 12 -// 'minute': 30 -// } -// '3pm': { -// 'hour': 15 -// 'minute': 0 -// } -// '8.40 pm': { -// 'hour': 20 -// 'minute': 40 -// } -// } - -// for key, value in input_strings { -// test_value := parse(key) or { -// panic('parse_time failed for ${key}, with error ${err}') -// } -// // assert test_value == value, 'hour, minute was incorrect for ${key}' -// } -// } diff --git a/lib/installers/infra/coredns/cdns_configure.v b/lib/installers/infra/coredns/cdns_configure.v new file mode 100644 index 00000000..d2a4c85d --- /dev/null +++ b/lib/installers/infra/coredns/cdns_configure.v @@ -0,0 +1,52 @@ +module coredns + +import freeflowuniverse.herolib.core.pathlib +import freeflowuniverse.herolib.develop.gittools +import os + +pub fn configure(args_ InstallArgs) ! { + mut args := args_ + mut gs := gittools.get()! + mut repo_path := '' + + if args.config_url.len > 0 { + mut repo := gs.get_repo( + url: args.config_url + )! + repo_path = repo.get_path()! + + args.config_path = repo_path + } + + if args.config_path.len == 0 { + args.config_path = '${os.home_dir()}/hero/cfg/Corefile' + } + + if args.dnszones_url.len > 0 { + mut repo := gs.get_repo( + url: args.dnszones_url + )! + repo_path = repo.get_path()! + args.dnszones_path = repo_path + } + + if args.dnszones_path.len == 0 { + args.dnszones_path = '${os.home_dir()}/hero/cfg/dnszones' + } + + mycorefile := $tmpl('templates/Corefile') + mut path := pathlib.get_file(path: args.config_path, create: true)! + path.write(mycorefile)! +} + +pub fn example_configure(args_ InstallArgs) ! { + mut args := args_ + + exampledbfile := $tmpl('templates/db.example.org') + + mut path_testzone := pathlib.get_file( + path: '${args_.dnszones_path}/db.example.org' + create: true + )! + path_testzone.template_write(exampledbfile, true)! +} diff --git a/lib/installers/infra/coredns/cdns_install.v b/lib/installers/infra/coredns/cdns_install.v new file mode 100644 index 00000000..149e95da --- /dev/null +++ b/lib/installers/infra/coredns/cdns_install.v @@ -0,0 +1,138 @@ +module coredns + +import freeflowuniverse.herolib.osal +import freeflowuniverse.herolib.osal.screen +import freeflowuniverse.herolib.ui.console +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.clients.httpconnection +import os + +@[params] +pub struct InstallArgs { +pub mut: + reset bool // this means we re-install and forgot what we did before + start bool = true + stop bool + restart bool // this means we stop if started, otherwise just start + homedir string // not sure what this is? + config_path string // path to Corefile, if empty will install default one + config_url string // path to Corefile through e.g. git url, will pull it if it is not local yet + dnszones_path string // path to where all the dns zones are + dnszones_url string // path on git url pull if needed + plugins []string // list of plugins to build CoreDNS with + example bool // if true we will install examples +} + +pub fn install(args_ InstallArgs) ! { + mut args := args_ + version := '1.11.1' + + res := os.execute('${osal.profile_path_source_and()} coredns version') + if res.exit_code == 0 { + r := res.output.split_into_lines().filter(it.trim_space().starts_with('CoreDNS-')) + if r.len != 1 { + return error("couldn't parse coredns version.\n${res.output}") + } + if texttools.version(version) > texttools.version(r[0].all_after_first('CoreDNS-')) { + args.reset = true + } + } else { + args.reset = true + } + + if args.reset { + console.print_header('install coredns') + + mut url := '' + if osal.is_linux_arm() { + url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_linux_arm64.tgz' + } else if osal.is_linux_intel() { + url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_linux_amd64.tgz' + } else if osal.is_osx_arm() { + url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_darwin_arm64.tgz' + } else if osal.is_osx_intel() { + url = 'https://github.com/coredns/coredns/releases/download/v${version}/coredns_${version}_darwin_amd64.tgz' + } else { + return error('unsported platform') + } + + mut dest := osal.download( + url: url + minsize_kb: 13000 + expand_dir: '/tmp/coredns' + )! + + mut binpath := dest.file_get('coredns')! + osal.cmd_add( + cmdname: 'coredns' + source: binpath.path + )! + } + + configure(args)! + + if args.example { + example_configure(args)! + } + + if args.restart { + restart(args)! + return + } + + if args.start { + start(args)! + } +} + +pub fn restart(args_ InstallArgs) ! { + stop(args_)! + start(args_)! +} + +pub fn stop(args_ InstallArgs) ! { + console.print_header('coredns stop') + + name := 'coredns' + + // use startup manager, see caddy + mut scr := screen.new()! + scr.kill(name)! +} + +pub fn start(args_ InstallArgs) ! { + mut args := args_ + configure(args)! + + if check()! { + return + } + + console.print_header('coredns start') + + name := 'coredns' + + mut scr := screen.new()! + + mut s := scr.add(name: name, reset: true)! + + cmd2 := "coredns -conf '${args.config_path}'" + + s.cmd_send(cmd2)! + + if !check()! { + return error("coredns did not install propertly, do: curl 'http://localhost:3334/health'") + } + + console.print_header('coredns running') +} + +pub fn check() !bool { + // this checks health of coredns + mut conn := httpconnection.new(name: 'coredns', url: 'http://localhost:3334')! + r := conn.get(prefix: 'health')! + if r.trim_space() == 'OK' { + return true + } + return false +} diff --git a/lib/installers/infra/coredns/cdns_play.v b/lib/installers/infra/coredns/cdns_play.v new file mode 100644 index 00000000..034122a5 --- /dev/null +++ b/lib/installers/infra/coredns/cdns_play.v @@ -0,0 +1,49 @@ +module coredns + +import freeflowuniverse.herolib.core.playbook +import freeflowuniverse.herolib.installers.base +import os + +pub fn play(mut plbook playbook.PlayBook) ! { + base.play(playbook)! + + coredns_actions := plbook.find(filter: 'coredns.')! + if coredns_actions.len == 0 { + return + } + + mut install_actions := plbook.find(filter: 'coredns.install')! + + if install_actions.len > 0 { + for install_action in install_actions { + mut p := install_action.params + + // CoreDNS parameters + reset := p.get_default_false('reset') + start := p.get_default_true('start') + stop := p.get_default_false('stop') + restart := p.get_default_false('restart') + homedir := p.get_default('homedir', '${os.home_dir()}/hero/var/coredns')! + config_path := p.get_default('config_path', '${os.home_dir()}/hero/cfg/Corefile')! + config_url := p.get_default('config_url', '')! + dnszones_path := p.get_default('dnszones_path', '${os.home_dir()}/hero/var/coredns/zones')! + dnszones_url := p.get_default('dnszones_url', '')! + plugins := p.get_list_default('plugins', [])! + example := p.get_default_false('example') + + install( + reset: reset + start: start + stop: stop + restart: restart + homedir: homedir + config_path: config_path + config_url: config_url + dnszones_path: dnszones_path + dnszones_url: dnszones_url + plugins: plugins + example: example + )! + } + } +} diff --git a/lib/installers/infra/coredns/templates/Corefile b/lib/installers/infra/coredns/templates/Corefile new file mode 100644 index 00000000..110c192b --- /dev/null +++ b/lib/installers/infra/coredns/templates/Corefile @@ -0,0 +1,7 @@ +.:53 { + forward . 8.8.8.8 9.9.9.9 + log + errors + health :3334 + import '${args.dnszones_path}/*' +} diff --git a/lib/installers/infra/coredns/templates/db.example.org b/lib/installers/infra/coredns/templates/db.example.org new file mode 100644 index 00000000..e316525e --- /dev/null +++ b/lib/installers/infra/coredns/templates/db.example.org @@ -0,0 +1,14 @@ +??ORIGIN example.org. +^^ 3600 IN SOA sns.dns.icann.org. noc.dns.icann.org. ( + 2017042745 ; serial + 7200 ; refresh (2 hours) + 3600 ; retry (1 hour) + 1209600 ; expire (2 weeks) + 3600 ; minimum (1 hour) + ) + + 3600 IN NS a.iana-servers.net. + 3600 IN NS b.iana-servers.net. + +www IN A 127.0.0.1 + IN AAAA ::1 \ No newline at end of file diff --git a/lib/installers/infra/gitea/.heroscript b/lib/installers/infra/gitea/.heroscript new file mode 100644 index 00000000..e69de29b diff --git a/lib/installers/infra/gitea/installer.v b/lib/installers/infra/gitea/installer.v new file mode 100644 index 00000000..7ee1104a --- /dev/null +++ b/lib/installers/infra/gitea/installer.v @@ -0,0 +1,77 @@ +module gitea + +import freeflowuniverse.herolib.installers.db.postgresql as postgresinstaller +import freeflowuniverse.herolib.installers.base +import freeflowuniverse.herolib.osal +import freeflowuniverse.herolib.core.pathlib +import freeflowuniverse.herolib.ui.console + +pub fn install() ! { + if osal.platform() != .ubuntu || osal.platform() != .arch { + return error('only support ubuntu and arch for now') + } + + if osal.done_exists('gitea_install') { + console.print_header('gitea binaraies already installed') + return + } + + // make sure we install base on the node + base.install()! + postgresinstaller.install()! + + version := '1.22.0' + url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz' + console.print_debug(' download ${url}') + mut dest := osal.download( + url: url + minsize_kb: 40000 + reset: true + expand_file: '/tmp/download/gitea' + )! + + binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)! + osal.cmd_add( + cmdname: 'gitea' + source: binpath.path + )! + + osal.done_set('gitea_install', 'OK')! + + console.print_header('gitea installed properly.') +} + +pub fn start() ! { + if osal.platform() != .ubuntu || osal.platform() != .arch { + return error('only support ubuntu and arch for now') + } + + if osal.done_exists('gitea_install') { + console.print_header('gitea binaraies already installed') + return + } + + // make sure we install base on the node + base.install()! + postgresinstaller.install()! + + version := '1.22.0' + url := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}-linux-amd64.xz' + console.print_debug(' download ${url}') + mut dest := osal.download( + url: url + minsize_kb: 40000 + reset: true + expand_file: '/tmp/download/gitea' + )! + + binpath := pathlib.get_file(path: '/tmp/download/gitea', create: false)! + osal.cmd_add( + cmdname: 'gitea' + source: binpath.path + )! + + osal.done_set('gitea_install', 'OK')! + + console.print_header('gitea installed properly.') +} diff --git a/lib/installers/infra/gitea/server.v b/lib/installers/infra/gitea/server.v new file mode 100644 index 00000000..c864ffe4 --- /dev/null +++ b/lib/installers/infra/gitea/server.v @@ -0,0 +1,210 @@ +module gitea + +import freeflowuniverse.herolib.osal +import freeflowuniverse.herolib.osal.zinit +import freeflowuniverse.herolib.data.dbfs +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.core.pathlib +import freeflowuniverse.herolib.installers.postgresql +import json +import rand +import os +import time +import freeflowuniverse.herolib.ui.console + +@[params] +pub struct Config { +pub mut: + name string = 'default' + reset bool + path string = '/data/gitea' + passwd string + postgresql_name string = 'default' + mail_from string = 'git@meet.tf' + smtp_addr string = 'smtp-relay.brevo.com' + smtp_login string @[required] + smpt_port int = 587 + smtp_passwd string + domain string @[required] + jwt_secret string + lfs_jwt_secret string + internal_token string + secret_key string +} + +pub struct Server { +pub mut: + name string + config Config + process ?zinit.ZProcess + path_config pathlib.Path +} + +// get the gitea server +//```js +// name string = 'default' +// path string = '/data/gitea' +// passwd string +//``` +// if name exists already in the config DB, it will load for that name +pub fn new(args_ Config) !Server { + install()! // make sure it has been build & ready to be used + mut args := args_ + if args.passwd == '' { + args.passwd = rand.string(12) + } + args.name = texttools.name_fix(args.name) + key := 'gitea_config_${args.name}' + mut kvs := dbfs.new(name: 'config')! + if !kvs.exists(key) { + // jwt_secret string + // lfs_jwt_secret string + // internal_token string + // secret_key string + + if args.jwt_secret == '' { + r := os.execute_or_panic('gitea generate secret JWT_SECRET') + args.jwt_secret = r.output.trim_space() + } + if args.lfs_jwt_secret == '' { + r := os.execute_or_panic('gitea generate secret LFS_JWT_SECRET') + args.lfs_jwt_secret = r.output.trim_space() + } + if args.internal_token == '' { + r := os.execute_or_panic('gitea generate secret INTERNAL_TOKEN') + args.internal_token = r.output.trim_space() + } + if args.secret_key == '' { + r := os.execute_or_panic('gitea generate secret SECRET_KEY') + args.secret_key = r.output.trim_space() + } + + data := json.encode(args) + kvs.set(key, data)! + } + return get(args.name)! +} + +pub fn get(name_ string) !Server { + console.print_header('get gitea server ${name_}') + name := texttools.name_fix(name_) + key := 'gitea_config_${name}' + mut kvs := dbfs.new(name: 'config')! + if kvs.exists(key) { + data := kvs.get(key)! + args := json.decode(Config, data)! + + mut server := Server{ + name: name + config: args + path_config: pathlib.get_dir(path: '${args.path}/cfg', create: true)! + } + + mut z := zinit.new()! + processname := 'gitea_${name}' + if z.process_exists(processname) { + server.process = z.process_get(processname)! + } + // console.print_debug(" - server get ok") + server.start()! + return server + } + return error("can't find server gitea with name ${name}") +} + +// return status +// ``` +// pub enum ZProcessStatus { +// unknown +// init +// ok +// error +// blocked +// spawned +// } +// ``` +pub fn (mut server Server) status() zinit.ZProcessStatus { + mut process := server.process or { return .unknown } + return process.status() or { return .unknown } +} + +// run gitea as docker compose +pub fn (mut server Server) start() ! { + // if server.ok(){ + // return + // } + + console.print_header('start gitea: ${server.name}') + mut db := postgresql.get(server.config.postgresql_name)! + + // now create the DB + db.db_create('gitea')! + + // if true{ + // panic("sd") + // } + + // TODO: postgresql can be on other server, need to fill in all arguments in template + t1 := $tmpl('templates/app.ini') + mut config_path := server.path_config.file_get_new('app.ini')! + config_path.write(t1)! + + osal.user_add(name: 'git')! + + osal.exec( + cmd: ' + chown -R git:root ${server.config.path} + chmod -R 777 /usr/local/bin + ' + )! + + mut z := zinit.new()! + processname := 'gitea_${server.name}' + mut p := z.process_new( + name: processname + cmd: ' + cd /tmp + sudo -u git bash -c \'gitea web --config ${config_path.path} --verbose\' + ' + )! + + p.output_wait('Starting new Web server: tcp:0.0.0.0:3000', 120)! + + o := p.log()! + console.print_debug(o) + + server.check()! + + console.print_header('gitea start ok.') +} + +pub fn (mut server Server) restart() ! { + server.stop()! + server.start()! +} + +pub fn (mut server Server) stop() ! { + print_backtrace() + console.print_header('stop gitea: ${server.name}') + mut process := server.process or { return } + return process.stop() +} + +// check health, return true if ok +pub fn (mut server Server) check() ! { + mut p := server.process or { return error("can't find process for server.") } + p.check()! + // TODO: need to do some other checks to gitea e.g. rest calls +} + +// check health, return true if ok +pub fn (mut server Server) ok() bool { + server.check() or { return false } + return true +} + +// remove all data +pub fn (mut server Server) destroy() ! { + server.stop()! + server.path_config.delete()! +} diff --git a/lib/installers/infra/gitea/templates/app.ini b/lib/installers/infra/gitea/templates/app.ini new file mode 100644 index 00000000..d823e925 --- /dev/null +++ b/lib/installers/infra/gitea/templates/app.ini @@ -0,0 +1,108 @@ +APP_NAME = ${server.config.name} +RUN_MODE = prod +RUN_USER = git +WORK_PATH = ${server.config.path} + +[repository] +ROOT = ${server.config.path}/gitrepo + +[repository.local] +LOCAL_COPY_PATH = ${server.config.path}/localrepo + +[repository.upload] +TEMP_PATH = ${server.config.path}/uploads + +[server] +APP_DATA_PATH = ${server.config.domain} +DOMAIN = ${server.config.domain} +SSH_DOMAIN = ${server.config.domain} +SSH_PORT = 22 +SSH_LISTEN_PORT = 22 +HTTP_PORT = 3000 +ROOT_URL = https://${server.config.domain} +DISABLE_SSH = false +LFS_START_SERVER = true +LFS_JWT_SECRET = ${server.config.lfs_jwt_secret} +OFFLINE_MODE = false + +[database] +PATH = ${server.config.path}/gitea.db +DB_TYPE = postgres +HOST = localhost:5432 +NAME = gitea +USER = root +PASSWD = ${db.config.passwd} +LOG_SQL = false +SCHEMA = +SSL_MODE = disable + +[indexer] +ISSUE_INDEXER_PATH = ${server.config.path}/indexers/issues.bleve + +[session] +PROVIDER_CONFIG = ${server.config.path}/sessions +PROVIDER = file + +[picture] +AVATAR_UPLOAD_PATH = ${server.config.path}/avatars +REPOSITORY_AVATAR_UPLOAD_PATH = ${server.config.path}/repo-avatars + +[attachment] +PATH = ${server.config.path}/attachments + +[log] +MODE = console +LEVEL = info +ROOT_PATH = ${server.config.path}/log + +[security] +INSTALL_LOCK = true +SECRET_KEY = ${server.config.secret_key} +REVERSE_PROXY_LIMIT = 1 +REVERSE_PROXY_TRUSTED_PROXIES = * +INTERNAL_TOKEN = ${server.config.internal_token} +PASSWORD_HASH_ALGO = pbkdf2 + +[service] +DISABLE_REGISTRATION = false +REQUIRE_SIGNIN_VIEW = false +REGISTER_EMAIL_CONFIRM = false +ENABLE_NOTIFY_MAIL = true +ALLOW_ONLY_EXTERNAL_REGISTRATION = false +ENABLE_CAPTCHA = false +DEFAULT_KEEP_EMAIL_PRIVATE = false +DEFAULT_ALLOW_CREATE_ORGANIZATION = true +DEFAULT_ENABLE_TIMETRACKING = true +NO_REPLY_ADDRESS = noreply.localhost + +[lfs] +PATH = ${server.config.path}/lfs + +[mailer] +ENABLED = true +FROM = ${server.config.mail_from} +; PROTOCOL = smtps +SMTP_ADDR = ${server.config.smtp_addr} +SMTP_PORT = ${server.config.smpt_port} +USER = ${server.config.smtp_login} +PASSWD = ${server.config.smtp_passwd} + +[openid] +ENABLE_OPENID_SIGNIN = true +ENABLE_OPENID_SIGNUP = true + +[cron.update_checker] +ENABLED = false + +[repository.pull-request] +DEFAULT_MERGE_STYLE = merge + +[repository.signing] +DEFAULT_TRUST_MODEL = committer + +[oauth2] +JWT_SECRET = ${server.config.jwt_secret} + +[actions] +ENABLED=true + diff --git a/lib/installers/infra/livekit/.heroscript b/lib/installers/infra/livekit/.heroscript new file mode 100644 index 00000000..510f4a67 --- /dev/null +++ b/lib/installers/infra/livekit/.heroscript @@ -0,0 +1,13 @@ + +!!hero_code.generate_installer + name:'livekit' + classname:'LivekitServer' + singleton:0 + templates:1 + default:1 + title:'' + supported_platforms:'' + reset:0 + startupmanager:1 + hasconfig:1 + build:0 \ No newline at end of file diff --git a/lib/installers/infra/livekit/livekit_actions.v b/lib/installers/infra/livekit/livekit_actions.v new file mode 100644 index 00000000..cf906d54 --- /dev/null +++ b/lib/installers/infra/livekit/livekit_actions.v @@ -0,0 +1,110 @@ +module livekit + +import freeflowuniverse.herolib.osal +import freeflowuniverse.herolib.ui.console +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.core.pathlib + + +import net.http +import json + + +import os + + +// checks if a certain version or above is installed +fn installed() !bool { + + res := os.execute('${osal.profile_path_source_and()} livekit-server -v') + if res.exit_code != 0 { + return false + } + r := res.output.split_into_lines().filter(it.contains("version")) + if r.len != 1 { + return error("couldn't parse livekit version.\n${res.output}") + } + installedversion:=r[0].all_after_first('version') + if texttools.version(version) != texttools.version(installedversion) { + return false + } + return true +} + +fn install() ! { + console.print_header('install livekit') + mut installer := get()! + osal.execute_silent(" + curl -s https://livekit.io/install.sh | bash + ")! +} + + + + +fn startupcmd () ![]zinit.ZProcessNewArgs{ + mut res := []zinit.ZProcessNewArgs{} + mut installer := get()! + res << zinit.ZProcessNewArgs{ + name: 'livekit' + cmd: 'livekit-server --config ${installer.configpath} --bind 0.0.0.0' + } + + return res + +} + +fn running() !bool { + mut installer := get()! + + myport:=installer.nr*2+7880 + endpoint := '${http://localhost:${myport}/api/v1/health' + + response := http.get(endpoint) or { + println('Error connecting to LiveKit server: $err') + return false + } + + if response.status_code != 200 { + println('LiveKit server returned non-200 status code: ${response.status_code}') + return false + } + + health_info := json.decode(map[string]string, response.body) or { + println('Error decoding LiveKit server response: $err') + return false + } + + if health_info['status'] != 'ok' { + println('LiveKit server health check failed: ${health_info["status"]}') + return false + } + + return true +} + +fn start_pre()!{ + +} + +fn start_post()!{ + +} + +fn stop_pre()!{ + +} + +fn stop_post()!{ + +} + + +fn destroy() ! { + mut installer := get()! + os.rm(" + ${installer.configpath} + livekit-server + ")! +} + diff --git a/lib/installers/infra/livekit/livekit_factory_.v b/lib/installers/infra/livekit/livekit_factory_.v new file mode 100644 index 00000000..5087834f --- /dev/null +++ b/lib/installers/infra/livekit/livekit_factory_.v @@ -0,0 +1,229 @@ + +module livekit + +import freeflowuniverse.herolib.core.base +import freeflowuniverse.herolib.core.playbook + + +import freeflowuniverse.herolib.sysadmin.startupmanager +import freeflowuniverse.herolib.ui.console +import time + +__global ( + livekit_global map[string]&LivekitServer + livekit_default string +) + +/////////FACTORY + +@[params] +pub struct ArgsGet{ +pub mut: + name string = "default" +} + + +fn args_get (args_ ArgsGet) ArgsGet { + mut args:=args_ + if args.name == ""{ + args.name = livekit_default + } + if args.name == ""{ + args.name = "default" + } + return args +} + +pub fn get(args_ ArgsGet) !&LivekitServer { + mut args := args_get(args_) + if !(args.name in livekit_global) { + if ! config_exists(){ + if default{ + config_save()! + } + } + config_load()! + } + return livekit_global[args.name] or { + println(livekit_global) + panic("bug in get from factory: ") + } +} + + + + +fn config_exists(args_ ArgsGet) bool { + mut args := args_get(args_) + mut context:=base.context() or { panic("bug") } + return context.hero_config_exists("livekit",args.name) +} + +fn config_load(args_ ArgsGet) ! { + mut args := args_get(args_) + mut context:=base.context()! + mut heroscript := context.hero_config_get("livekit",args.name)! + play(heroscript:heroscript)! +} + +fn config_save(args_ ArgsGet) ! { + mut args := args_get(args_) + mut context:=base.context()! + context.hero_config_set("livekit",args.name,heroscript_default()!)! +} + + +fn set(o LivekitServer)! { + mut o2:=obj_init(o)! + livekit_global["default"] = &o2 +} + + +@[params] +pub struct PlayArgs { +pub mut: + name string = 'default' + heroscript string //if filled in then plbook will be made out of it + plbook ?playbook.PlayBook + reset bool + + start bool + stop bool + restart bool + delete bool + configure bool //make sure there is at least one installed +} + +pub fn play(args_ PlayArgs) ! { + + mut args:=args_ + + + if args.heroscript == "" { + args.heroscript = heroscript_default()! + } + mut plbook := args.plbook or { + playbook.new(text: args.heroscript)! + } + + + mut install_actions := plbook.find(filter: 'livekit.configure')! + if install_actions.len > 0 { + for install_action in install_actions { + mut p := install_action.params + mycfg:=cfg_play(p)! + set(mycfg)! + } + } + +} + + + + +//////////////////////////////////////////////////////////////////////////////////////////////////// +//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS /////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////////////////////////// + + +//load from disk and make sure is properly intialized +pub fn (mut self LivekitServer) reload() ! { + switch(self.name) + self=obj_init(self)! +} + + +pub fn (mut self LivekitServer) start() ! { + switch(self.name) + if self.running()!{ + return + } + + console.print_header('livekit start') + + configure()! + + start_pre()! + + mut sm := startupmanager.get()! + + for zprocess in startupcmd()!{ + sm.start(zprocess.name)! + } + + start_post()! + + for _ in 0 .. 50 { + if self.running()! { + return + } + time.sleep(100 * time.millisecond) + } + return error('livekit did not install properly.') + +} + +pub fn (mut self LivekitServer) install_start(args RestartArgs) ! { + switch(self.name) + self.install(args)! + self.start()! +} + +pub fn (mut self LivekitServer) stop() ! { + switch(self.name) + stop_pre()! + mut sm := startupmanager.get()! + for zprocess in startupcmd()!{ + sm.stop(zprocess.name)! + } + stop_post()! +} + +pub fn (mut self LivekitServer) restart() ! { + switch(self.name) + self.stop()! + self.start()! +} + +pub fn (mut self LivekitServer) running() !bool { + switch(self.name) + mut sm := startupmanager.get()! + + //walk over the generic processes, if not running return + for zprocess in startupcmd()!{ + r:=sm.running(zprocess.name)! + if r==false{ + return false + } + } + return running()! +} + +@[params] +pub struct InstallArgs{ +pub mut: + reset bool +} + +pub fn (mut self LivekitServer) install(args InstallArgs) ! { + switch(self.name) + if args.reset || (!installed()!) { + install()! + } +} + + + +pub fn (mut self LivekitServer) destroy() ! { + switch(self.name) + + self.stop()! + destroy()! +} + + + +//switch instance to be used for livekit +pub fn switch(name string) { + livekit_default = name +} diff --git a/lib/installers/infra/livekit/livekit_model.v b/lib/installers/infra/livekit/livekit_model.v new file mode 100644 index 00000000..b24a18d2 --- /dev/null +++ b/lib/installers/infra/livekit/livekit_model.v @@ -0,0 +1,89 @@ +module livekit +import freeflowuniverse.herolib.data.paramsparser +import os + +pub const version = '1.7.2' +const singleton = false +const default = true + + +//TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT +pub fn heroscript_default() !string { + + heroscript:=" + !!livekit.configure + name:'default' + apikey: '' + apisecret: '' + nr: 1 // each specific instance onto this server needs to have a unique nr + " + + return heroscript + +} + +//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED + +pub struct LivekitServer { +pub mut: + name string = 'default' + apikey string + apisecret string @[secret] + configpath string + nr int = 0 // each specific instance onto this server needs to have a unique nr +} + +fn cfg_play(p paramsparser.Params) !LivekitServer { + mut mycfg := LivekitServer{ + name: p.get_default('name', 'default')! + apikey: p.get_default('apikey', '')! + apisecret: p.get_default('apisecret', '')! + nr: p.get_default_int('nr', 0)! + } + return mycfg +} + + +fn obj_init(obj_ LivekitServer)!LivekitServer{ + mut mycfg:=obj_ + if mycfg.configpath == ''{ + mycfg.configpath = '${os.home_dir()}/hero/cfg/livekit_${myconfig.name}.yaml' + } + if mycfg.apikey == '' || mycfg.apisecret == '' { + // Execute the livekit-server generate-keys command + result := os.execute('livekit-server generate-keys') + if result.exit_code != 0 { + return error('Failed to generate LiveKit keys') + } + // Split the output into lines + lines := result.output.split_into_lines() + + // Extract API Key and API Secret + for line in lines { + if line.starts_with('API Key:') { + server.apikey = line.all_after('API Key:').trim_space() + } else if line.starts_with('API Secret:') { + server.apisecret = line.all_after('API Secret:').trim_space() + } + } + // Verify that both keys were extracted + if server.apikey == '' || server.apisecret == '' { + return error('Failed to extract API Key or API Secret') + } + } + return obj +} + + +//called before start if done +fn configure() ! { + + mut installer := get()! + + mut mycode := $tmpl('templates/config.yaml') + mut path := pathlib.get_file(path: installer.configpath, create: true)! + path.write(mycode)! + console.print_debug(mycode) +} + + diff --git a/lib/installers/infra/livekit/readme.md b/lib/installers/infra/livekit/readme.md new file mode 100644 index 00000000..61c79442 --- /dev/null +++ b/lib/installers/infra/livekit/readme.md @@ -0,0 +1,22 @@ +# livekit + + + +To get started + +```vlang + + + +import freeflowuniverse.herolib.installers.something. livekit + +mut installer:= livekit.get()! + +installer.start()! + + + + +``` + +livekit once installed will have generated the secret keys \ No newline at end of file diff --git a/lib/installers/infra/livekit/templates/config.yaml b/lib/installers/infra/livekit/templates/config.yaml new file mode 100644 index 00000000..fafc4e47 --- /dev/null +++ b/lib/installers/infra/livekit/templates/config.yaml @@ -0,0 +1,26 @@ +port: ${installer.nr*2+7880} +log_level: info +rtc: + tcp_port: ${installer.nr*2+7881} + port_range_start: ${installer.nr*1000+50000} + port_range_end: ${installer.nr*1000+999+50000} + # use_external_ip should be set to true for most cloud environments where + # the host has a public IP address, but is not exposed to the process. + # LiveKit will attempt to use STUN to discover the true IP, and advertise + # that IP with its clients + use_external_ip: true +redis: + # redis is recommended for production deploys + address: localhost:6379 +keys: + # key-value pairs + ${installer.apikey}: ${installer.apisecret} + +# When enabled, LiveKit will expose prometheus metrics on :6789/metrics +#prometheus_port: ${installer.nr+6789} +# turn: +# enabled: true +# # domain must match tls certificate +# domain: +# # defaults to 3478. If not using a load balancer, must be set to 443. +# tls_port: 3478 \ No newline at end of file diff --git a/lib/installers/install_multi.v b/lib/installers/install_multi.v new file mode 100644 index 00000000..4bc316b8 --- /dev/null +++ b/lib/installers/install_multi.v @@ -0,0 +1,191 @@ +module installers + +import freeflowuniverse.herolib.installers.base +import freeflowuniverse.herolib.installers.develapps.vscode +import freeflowuniverse.herolib.installers.develapps.chrome +import freeflowuniverse.herolib.installers.virt.podman as podman_installer +import freeflowuniverse.herolib.installers.virt.buildah as buildah_installer + +import freeflowuniverse.herolib.installers.virt.lima +import freeflowuniverse.herolib.installers.net.mycelium +import freeflowuniverse.herolib.core.texttools +import freeflowuniverse.herolib.installers.lang.rust +import freeflowuniverse.herolib.installers.lang.golang +import freeflowuniverse.herolib.installers.lang.vlang +import freeflowuniverse.herolib.installers.lang.herolib +import freeflowuniverse.herolib.installers.lang.nodejs +import freeflowuniverse.herolib.installers.lang.python +import freeflowuniverse.herolib.installers.web.zola +import freeflowuniverse.herolib.installers.web.tailwind +import freeflowuniverse.herolib.installers.hero.heroweb +import freeflowuniverse.herolib.installers.hero.herodev +import freeflowuniverse.herolib.installers.sysadmintools.daguserver +import freeflowuniverse.herolib.installers.sysadmintools.rclone +import freeflowuniverse.herolib.installers.sysadmintools.prometheus +import freeflowuniverse.herolib.installers.sysadmintools.grafana +import freeflowuniverse.herolib.installers.sysadmintools.fungistor +import freeflowuniverse.herolib.installers.sysadmintools.garage_s3 +import freeflowuniverse.herolib.installers.infra.zinit + +@[params] +pub struct InstallArgs { +pub mut: + names string + reset bool + uninstall bool + gitpull bool + gitreset bool + start bool +} + +pub fn names(args_ InstallArgs) []string { + names := ' + base + caddy + chrome + hero + dagu + develop + fungistor + garage_s3 + golang + grafana + hero + herodev + heroweb + lima + mycelium + nodejs + herocontainers + prometheus + rclone + rust + tailwind + vlang + vscode + zinit + zola + ' + mut ns := texttools.to_array(names) + ns.sort() + return ns +} + +pub fn install_multi(args_ InstallArgs) ! { + mut args := args_ + mut items := []string{} + for item in args.names.split(',').map(it.trim_space()) { + if item !in items { + items << item + } + } + for item in items { + match item { + 'base' { + base.install(reset: args.reset)! + } + 'develop' { + base.install(reset: args.reset, develop: true)! + } + 'rclone' { + // rclone.install(reset: args.reset)! + mut rc := rclone.get()! + rc.install(reset: args.reset)! + } + 'rust' { + rust.install(reset: args.reset)! + } + 'golang' { + mut g := golang.get()! + g.install(reset: args.reset)! + } + 'vlang' { + vlang.install(reset: args.reset)! + } + 'hero' { + herolib.install( + reset: args.reset + git_pull: args.gitpull + git_reset: args.gitreset + )! + } + 'hero' { + herolib.hero_install(reset: args.reset)! + } + 'caddy' { + //caddy.install(reset: args.reset)! + // caddy.configure_examples()! + } + 'chrome' { + chrome.install(reset: args.reset, uninstall: args.uninstall)! + } + 'mycelium' { + mycelium.install(reset: args.reset)! + mycelium.start()! + } + 'garage_s3' { + garage_s3.install(reset: args.reset, config_reset: args.reset, restart: true)! + } + 'fungistor' { + fungistor.install(reset: args.reset)! + } + 'lima' { + lima.install(reset: args.reset, uninstall: args.uninstall)! + } + 'herocontainers' { + mut podman_installer0:= podman_installer.get()! + mut buildah_installer0:= buildah_installer.get()! + + if args.reset{ + podman_installer0.destroy()! //will remove all + buildah_installer0.destroy()! //will remove all + } + podman_installer0.install()! + buildah_installer0.install()! + } + 'prometheus' { + prometheus.install(reset: args.reset, uninstall: args.uninstall)! + } + 'grafana' { + grafana.install(reset: args.reset, uninstall: args.uninstall)! + } + 'vscode' { + vscode.install(reset: args.reset)! + } + 'nodejs' { + nodejs.install(reset: args.reset)! + } + 'python' { + python.install()! + } + 'herodev' { + herodev.install()! + } + // 'heroweb' { + // heroweb.install()! + // } + 'dagu' { + // will call the installer underneith + mut dserver := daguserver.get()! + dserver.install()! + dserver.restart()! + // mut dagucl:=dserver.client()! + } + 'zola' { + mut i2 := zola.get()! + i2.install()! //will also install tailwind + } + 'tailwind' { + mut i := tailwind.get()! + i.install()! + } + 'zinit' { + mut i := zinit.get()! + i.install()! + } + else { + return error('cannot find installer for: ${item}') + } + } + } +} diff --git a/lib/installers/ulist/ulist.v b/lib/installers/ulist/ulist.v new file mode 100644 index 00000000..5ef70a3c --- /dev/null +++ b/lib/installers/ulist/ulist.v @@ -0,0 +1,24 @@ +module ulist + +// import freeflowuniverse.herolib.core.pathlib +// import freeflowuniverse.herolib.develop.gittools + +// U stands for Upload +pub struct UList { +pub mut: + root string // common base for all UFiles + items []UFile +} + +pub struct UFile { +pub mut: + path string + alias string // if other name used for upload, otherwise is the filename + cat UCat +} + +pub enum UCat { + file + bin + config +} From 0821c052e7d181ff59f90894920b38b764aa8634 Mon Sep 17 00:00:00 2001 From: despiegk Date: Tue, 31 Dec 2024 10:20:37 +0100 Subject: [PATCH 3/3] prob still broken, but don\t have time right now --- .../templates/objname_factory_.vtemplate | 51 +++--- lib/core/base/baseconfig.v | 164 ------------------ lib/core/base/configurator.v | 121 ------------- lib/core/base/context.v | 15 +- lib/core/base/session.v | 8 +- lib/core/base/session_logger.v | 2 +- lib/data/encoderhero/decoder_test.v | 2 +- lib/data/encoderhero/encoder_test.v | 2 +- 8 files changed, 43 insertions(+), 322 deletions(-) delete mode 100644 lib/core/base/baseconfig.v delete mode 100644 lib/core/base/configurator.v diff --git a/lib/code/generator/installer_client/templates/objname_factory_.vtemplate b/lib/code/generator/installer_client/templates/objname_factory_.vtemplate index ff21aaf8..f88f7943 100644 --- a/lib/code/generator/installer_client/templates/objname_factory_.vtemplate +++ b/lib/code/generator/installer_client/templates/objname_factory_.vtemplate @@ -4,6 +4,9 @@ module ${model.name} import freeflowuniverse.herolib.core.base import freeflowuniverse.herolib.core.playbook import freeflowuniverse.herolib.ui.console +@if model.hasconfig +import freeflowuniverse.herolib.data.encoderhero +@end @if model.cat == .installer import freeflowuniverse.herolib.sysadmin.startupmanager @@ -37,24 +40,24 @@ fn args_get (args_ ArgsGet) ArgsGet { } pub fn get(args_ ArgsGet) !&${model.classname} { - mut model := args_get(args_) - if !(model.name in ${model.name}_global) { - if model.name=="default"{ - if ! config_exists(model){ + mut args := args_get(args_) + if !(args.name in ${args.name}_global) { + if args.name=="default"{ + if ! config_exists(args){ if default{ - config_save(model)! + mut context:=base.context() or { panic("bug") } + context.hero_config_set("${model.name}",model.name,heroscript_default()!)! } } - config_load(model)! + load(args)! } } - return ${model.name}_global[model.name] or { - println(${model.name}_global) - panic("could not get config for ${model.name} with name:??{model.name}") + return ${args.name}_global[args.name] or { + println(${args.name}_global) + panic("could not get config for ${args.name} with name:??{model.name}") } } - @else pub fn get(args_ ArgsGet) !&${model.classname} { return &${model.classname}{} @@ -62,32 +65,38 @@ pub fn get(args_ ArgsGet) !&${model.classname} { @end @if model.hasconfig -fn config_exists(args_ ArgsGet) bool { + +//set the model in mem and the config on the filesystem +fn set(o ${model.classname})! { + mut o2:=obj_init(o)! + ${model.name}_global[o.name] = &o2 + ${model.name}_default = o.name +} + +//check we find the config on the filesystem +fn exists(args_ ArgsGet) bool { mut model := args_get(args_) mut context:=base.context() or { panic("bug") } return context.hero_config_exists("${model.name}",model.name) } -fn config_load(args_ ArgsGet) ! { +//load the config error if it doesn't exist +fn load(args_ ArgsGet) ! { mut model := args_get(args_) mut context:=base.context()! mut heroscript := context.hero_config_get("${model.name}",model.name)! play(heroscript:heroscript)! } -fn config_save(args_ ArgsGet) ! { - mut model := args_get(args_) +//save the config to the filesystem in the context +fn save(o ${model.classname})! { + mut model := args_get(args_) mut context:=base.context()! - context.hero_config_set("${model.name}",model.name,heroscript_default()!)! + heroscript := encoderhero.encode[${model.classname}](o2)! + context.hero_config_set("${model.name}",model.name,heroscript)! } -fn set(o ${model.classname})! { - mut o2:=obj_init(o)! - ${model.name}_global[o.name] = &o2 - ${model.name}_default = o.name -} - ^^[params] pub struct PlayArgs { diff --git a/lib/core/base/baseconfig.v b/lib/core/base/baseconfig.v deleted file mode 100644 index 41202666..00000000 --- a/lib/core/base/baseconfig.v +++ /dev/null @@ -1,164 +0,0 @@ -module base - -import json -// import freeflowuniverse.herolib.ui.console - -// is an object which has a configurator, session and config object which is unique for the model -// T is the Config Object - -pub struct BaseConfig[T] { -mut: - configurator_ ?Configurator[T] @[skip; str: skip] - config_ ?&T - session_ ?&Session @[skip; str: skip] - configtype string -pub mut: - instance string -} - -pub fn (mut self BaseConfig[T]) session() !&Session { - mut mysession := self.session_ or { - mut c := context()! - mut r := c.redis()! - incrkey := 'sessions:base:latest:${self.configtype}:${self.instance}' - latestid := r.incr(incrkey)! - name := '${self.configtype}_${self.instance}_${latestid}' - mut s := c.session_new(name: name)! - self.session_ = &s - &s - } - - return mysession -} - -// management class of the configs of this obj -pub fn (mut self BaseConfig[T]) configurator() !&Configurator[T] { - if self.configurator_ == none { - mut c := configurator_new[T]( - instance: self.instance - )! - self.configurator_ = c - } - return &(self.configurator_ or { return error('configurator not initialized') }) -} - -// will overwrite the config -pub fn (mut self BaseConfig[T]) config_set(myconfig T) ! { - self.config_ = &myconfig - self.config_save()! -} - -pub fn (mut self BaseConfig[T]) config_new() !&T { - config := self.config_ or { - mut configurator := self.configurator()! - mut c := configurator.new()! - self.config_ = &c - &c - } - - self.config_save()! - return config -} - -pub fn (mut self BaseConfig[T]) config() !&T { - mut config := self.config_ or { return error('config was not initialized yet') } - - return config -} - -pub fn (mut self BaseConfig[T]) config_get() !&T { - mut mycontext := context()! - mut config := self.config_ or { - mut configurator := self.configurator()! - if !(configurator.exists()!) { - mut mycfg := self.config_new()! - return mycfg - } - - mut db := mycontext.db_config_get()! - if !db.exists(key: configurator.config_key())! { - return error("can't find configuration with name: ${configurator.config_key()} in context:'${mycontext.config.name}'") - } - data := db.get(key: configurator.config_key())! - - mut c := json.decode(T, data)! - $for field in T.fields { - field_attrs := attrs_get(field.attrs) - if 'secret' in field_attrs { - // QUESTION: is it ok if we only support encryption for string fields - $if field.typ is string { - v := c.$(field.name) - c.$(field.name) = mycontext.secret_decrypt(v)! - // console.print_debug('FIELD DECRYPTED: ${field} ${field.name}') - } - } - } - self.config_ = &c - &c - } - - return config -} - -pub fn (mut self BaseConfig[T]) config_save() ! { - mut config2 := *self.config()! // dereference so we don't modify the original - mut mycontext := context()! - // //walk over the properties see where they need to be encrypted, if yes encrypt - $for field in T.fields { - field_attrs := attrs_get(field.attrs) - if 'secret' in field_attrs { - // QUESTION: is it ok if we only support encryption for string fields - $if field.typ is string { - v := config2.$(field.name) - config2.$(field.name) = mycontext.secret_encrypt(v)! - } - // console.print_debug('FIELD ENCRYPTED: ${field.name}') - } - } - mut configurator := self.configurator()! - configurator.set(config2)! -} - -pub fn (mut self BaseConfig[T]) config_delete() ! { - mut configurator := self.configurator()! - configurator.delete()! - self.config_ = none -} - -pub enum Action { - set - get - new - delete -} - -// init our class with the base session_args -pub fn (mut self BaseConfig[T]) init(configtype string, instance string, action Action, myconfig T) ! { - self.instance = instance - self.configtype = configtype - if action == .get { - self.config_get()! - } else if action == .new { - self.config_new()! - } else if action == .delete { - self.config_delete()! - } else if action == .set { - self.config_set(myconfig)! - } else { - panic('bug') - } -} - -// will return {'name': 'teststruct', 'params': ''} -fn attrs_get(attrs []string) map[string]string { - mut out := map[string]string{} - for i in attrs { - if i.contains('=') { - kv := i.split('=') - out[kv[0].trim_space().to_lower()] = kv[1].trim_space().to_lower() - } else { - out[i.trim_space().to_lower()] = '' - } - } - return out -} diff --git a/lib/core/base/configurator.v b/lib/core/base/configurator.v deleted file mode 100644 index db27b64d..00000000 --- a/lib/core/base/configurator.v +++ /dev/null @@ -1,121 +0,0 @@ -module base - -import json -import freeflowuniverse.herolib.ui.console - -@[heap] -pub struct Configurator[T] { -pub mut: - // context &Context @[skip; str: skip] - instance string - description string - configured bool - configtype string // e.g. sshclient -} - -@[params] -pub struct ConfiguratorArgs { -pub mut: - // context &Context // optional context for the configurator - instance string @[required] -} - -// name is e.g. mailclient (the type of configuration setting) -// instance is the instance of the config e.g. kds -// the context defines the context in which we operate, is optional will get the default one if not set -pub fn configurator_new[T](args ConfiguratorArgs) !Configurator[T] { - return Configurator[T]{ - // context: args.context - configtype: T.name.to_lower() - instance: args.instance - } -} - -fn (mut self Configurator[T]) config_key() string { - return '${self.configtype}_config_${self.instance}' -} - -// set the full configuration as one object to dbconfig -pub fn (mut self Configurator[T]) set(args T) ! { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - data := json.encode_pretty(args) - db.set(key: self.config_key(), value: data)! -} - -pub fn (mut self Configurator[T]) exists() !bool { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - return db.exists(key: self.config_key()) -} - -pub fn (mut self Configurator[T]) new() !T { - return T{ - instance: self.instance - description: self.description - } -} - -pub fn (mut self Configurator[T]) get() !T { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - if !db.exists(key: self.config_key())! { - return error("can't find configuration with name: ${self.config_key()} in context:'${mycontext.config.name}'") - } - data := db.get(key: self.config_key())! - return json.decode(T, data)! -} - -pub fn (mut self Configurator[T]) delete() ! { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - db.delete(key: self.config_key())! -} - -pub fn (mut self Configurator[T]) getset(args T) !T { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - if db.exists(key: self.config_key())! { - return self.get()! - } - self.set(args)! - return self.get()! -} - -@[params] -pub struct PrintArgs { -pub mut: - name string -} - -pub fn (mut self Configurator[T]) list() ![]string { - panic('implement') -} - -pub fn (mut self Configurator[T]) configprint(args PrintArgs) ! { - mut mycontext := context()! - mut db := mycontext.db_config_get()! - if args.name.len > 0 { - if db.exists(key: self.config_key())! { - data := db.get(key: self.config_key())! - c := json.decode(T, data)! - console.print_debug('${c}') - console.print_debug('') - } else { - return error("Can't find connection with name: ${args.name}") - } - } else { - panic('implement') - // for item in list()! { - // // console.print_debug(" ==== $item") - // configprint(name: item)! - // } - } -} - -// init our class with the base session_args -// pub fn (mut self Configurator[T]) init(session_args_ SessionNewArgs) ! { -// self.session_=session_args.session or { -// session_new(session_args)! -// } -// } diff --git a/lib/core/base/context.v b/lib/core/base/context.v index 3ed47031..6e04fc9a 100644 --- a/lib/core/base/context.v +++ b/lib/core/base/context.v @@ -3,7 +3,6 @@ module base import freeflowuniverse.herolib.data.paramsparser import freeflowuniverse.herolib.core.redisclient import freeflowuniverse.herolib.data.dbfs -// import freeflowuniverse.herolib.crypt.secp256k1 import freeflowuniverse.herolib.crypt.aes_symmetric import freeflowuniverse.herolib.ui import freeflowuniverse.herolib.ui.console @@ -134,18 +133,18 @@ pub fn (mut self Context) db_config_get() !dbfs.DB { pub fn (mut self Context) hero_config_set(cat string, name string, content_ string) ! { mut content := texttools.dedent(content_) content = rootpath.shell_expansion(content) - path := '${os.home_dir()}/hero/context/${self.config.name}/${cat}__${name}.yaml' + path := '${self.path()!.path}/${cat}__${name}.yaml' mut config_file := pathlib.get_file(path: path)! config_file.write(content)! } -pub fn (mut self Context) hero_config_exists(cat string, name string) bool { - path := '${os.home_dir()}/hero/context/${self.config.name}/${cat}__${name}.yaml' +pub fn (mut self Context) hero_config_exists(cat string, name string) !bool { + path := '${self.path()!.path}/${cat}__${name}.yaml' return os.exists(path) } pub fn (mut self Context) hero_config_get(cat string, name string) !string { - path := '${os.home_dir()}/hero/context/${self.config.name}/${cat}__${name}.yaml' + path := '${self.path()!.path}/${cat}__${name}.yaml' mut config_file := pathlib.get_file(path: path, create: false)! return config_file.read()! } @@ -187,12 +186,10 @@ pub fn (mut self Context) secret_set(secret_ string) ! { self.save()! } - - pub fn (mut self Context) path() !pathlib.Path { return self.path_ or { - path := '${os.home_dir()}/hero/context/${self.config.name}' - mut path := pathlib.get_dir(path: path,create: false)! + path2 := '${os.home_dir()}/hero/context/${self.config.name}' + mut path := pathlib.get_dir(path: path2,create: false)! path } } diff --git a/lib/core/base/session.v b/lib/core/base/session.v index 786a30ad..d31108bf 100644 --- a/lib/core/base/session.v +++ b/lib/core/base/session.v @@ -6,14 +6,14 @@ import freeflowuniverse.herolib.data.paramsparser import freeflowuniverse.herolib.data.dbfs import freeflowuniverse.herolib.core.logger import json -// import freeflowuniverse.herolib.core.pathlib +import freeflowuniverse.herolib.core.pathlib // import freeflowuniverse.herolib.develop.gittools // import freeflowuniverse.herolib.ui.console @[heap] pub struct Session { mut: - path_ ?pathlib.Path + path_ ?pathlib.Path logger_ ?logger.Logger pub mut: name string // unique id for session (session id), can be more than one per context @@ -95,8 +95,8 @@ pub fn (self Session) guid() string { pub fn (mut self Session) path() !pathlib.Path { return self.path_ or { - path := '${self.context.path().path}/${self.name}' - mut path := pathlib.get_dir(path: path,create: true)! + path2 := '${self.context.path()!.path}/${self.name}' + mut path := pathlib.get_dir(path: path2,create: true)! path } } diff --git a/lib/core/base/session_logger.v b/lib/core/base/session_logger.v index 9293b386..17f31083 100644 --- a/lib/core/base/session_logger.v +++ b/lib/core/base/session_logger.v @@ -2,7 +2,7 @@ module base import freeflowuniverse.herolib.core.logger -pub fn (session Session) logger() !logger.Logger { +pub fn (mut session Session) logger() !logger.Logger { return session.logger_ or { mut l2 := logger.new("${session.path()!.path}/logs")! l2 diff --git a/lib/data/encoderhero/decoder_test.v b/lib/data/encoderhero/decoder_test.v index e9fa6ffd..aa9e9732 100644 --- a/lib/data/encoderhero/decoder_test.v +++ b/lib/data/encoderhero/decoder_test.v @@ -110,7 +110,7 @@ const person = Person{ id: 1 name: 'Bob' age: 21 - birthday: time.new_time( + birthday: time.new( day: 12 month: 12 year: 2012 diff --git a/lib/data/encoderhero/encoder_test.v b/lib/data/encoderhero/encoder_test.v index 29e47de5..52ed38f1 100644 --- a/lib/data/encoderhero/encoder_test.v +++ b/lib/data/encoderhero/encoder_test.v @@ -52,7 +52,7 @@ const person = Person{ id: 1 name: 'Bob' age: 21 - birthday: time.new_time( + birthday: time.new( day: 12 month: 12 year: 2012