Compare commits

...

25 Commits

Author SHA1 Message Date
eb38bc5e60 bump version to 1.0.6 2025-02-08 14:07:15 +01:00
b0da6d1bd2 ... 2025-02-08 09:54:29 +03:00
1377953dcf ... 2025-02-07 16:40:44 +03:00
aa85172700 Merge branch 'development_hetzner' into development_webdav 2025-02-07 13:00:16 +03:00
eff269e911 Merge branch 'development' into development_hetzner 2025-02-07 12:55:30 +03:00
65ec6ee1a3 Merge branch 'development_hetzner' of https://github.com/freeflowuniverse/herolib into development_hetzner 2025-02-07 12:55:21 +03:00
a86b23b2e9 release 2025-02-07 12:52:49 +03:00
bcccd5f247 Merge branch 'development' 2025-02-07 12:52:29 +03:00
cb8c550ed1 chore: bump version to 1.0.5 2025-02-07 12:38:49 +03:00
5fc7019dcc chore: bump version to 1.0.4 2025-02-07 12:30:56 +03:00
8c9248fd94 ... 2025-02-07 12:25:15 +03:00
d1a5f1c268 ... 2025-02-07 12:22:23 +03:00
96bc5c9e5d ... 2025-02-07 12:17:34 +03:00
20927485d9 ... 2025-02-07 12:13:39 +03:00
a034708f21 ... 2025-02-07 12:08:25 +03:00
19a2577564 ... 2025-02-07 12:07:32 +03:00
e34d804dda ... 2025-02-07 11:59:52 +03:00
cd6c899661 ... 2025-02-07 11:45:28 +03:00
4a2753d32c refactor(webdav): remove lock_manager from webdav app
- Remove unused `lock_manager` from the `App` struct.
- Comment out the lock and unlock handlers.
- Improve `propfind` response XML generation.
- Fix path handling in `generate_response_element`.
- Update content type handling for files.
- Improve XML generation for resource responses.

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-06 15:37:21 +02:00
1c0535a8b4 WIP: Implement WebDAV server
- Add a WebDAV server implementation using the `vweb` framework.
- The server supports basic authentication, request logging, and essential WebDAV methods.
- Implements file operations, authentication, and request logging.

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-03 18:02:16 +02:00
a0c253fa05 refactor(tfgrid3deployer): simplify Hetzner node filtering
- Remove `get_hetzner_farm_ids` function.
- Directly filter nodes using the `features` field.

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-03 13:57:46 +02:00
17a67870ef WIP(tfgrid3deployer): Support deploying on hetzner nodes
- Add support for deploying VMs, ZDBs, and gateways on Hetzner nodes.
- Introduce `use_hetzner_node` flag to VM, ZDB, and WebName.
- Update `filter_nodes` to filter by Hetzner farm IDs if `on_hetzner` flag is set.
- Implement `get_hetzner_farm_ids` function (currently a placeholder).

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-02 18:25:04 +02:00
54cfd4c353 feat: add ip-api client
- Add a new ip-api client to the project.
- This client uses the ip-api.com API to get IP information.
- An example is provided in `examples/develop/ipapi`.

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-02 18:04:00 +02:00
717eb1e7d8 WIP: feat: add Hetzner deployment example
- Added a new example demonstrating deployment on Hetzner using the `tfgrid3deployer`.
- The example creates a VM and adds a webname.

Co-authored-by: mahmmoud.hassanein <mahmmoud.hassanein@gmail.com>
2025-02-02 17:29:53 +02:00
03f5885980 Merge pull request #38 from freeflowuniverse/development
dev main
2025-01-28 09:49:18 +03:00
54 changed files with 2147 additions and 694 deletions

View File

@@ -17,6 +17,7 @@ concurrency:
jobs:
deploy-documentation:
#if: startsWith(github.ref, 'refs/tags/')
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
@@ -26,12 +27,11 @@ jobs:
uses: actions/checkout@v4
- name: Setup Vlang
run: ./install_v.sh --github-actions
run: ./install_v.sh
- name: Generate documentation
run: |
./doc.vsh
# ls /home/runner/work/herolib/docs
./doc.vsh
find .
- name: Setup Pages

View File

@@ -5,11 +5,7 @@ permissions:
on:
push:
branches: ["main","development"]
tags:
- 'v*'
workflow_dispatch:
branches: ["main","development"]
jobs:
build:

2
.gitignore vendored
View File

@@ -7,6 +7,7 @@ vls.*
vls.log
node_modules/
docs/
vdocs/
photonwrapper.so
x
.env
@@ -25,7 +26,6 @@ dump.rdb
output/
*.db
.stellar
vdocs/
data.ms/
test_basic
cli/hero

View File

@@ -6,7 +6,7 @@
## hero install for users
```bash
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development_kristof10/install_hero.sh > /tmp/install_hero.sh
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install_hero.sh
bash /tmp/install_hero.sh
```
@@ -16,7 +16,7 @@ this tool can be used to work with git, build books, play with hero AI, ...
## automated install for developers
```bash
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/main/install_v.sh' > /tmp/install_v.sh
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
bash /tmp/install_v.sh --analyzer --herolib
#DONT FORGET TO START A NEW SHELL (otherwise the paths will not be set)
```

View File

@@ -31,7 +31,7 @@ fn do() ! {
mut cmd := Command{
name: 'hero'
description: 'Your HERO toolset.'
version: '2.0.6'
version: '1.0.6'
}
// herocmds.cmd_run_add_flags(mut cmd)
@@ -94,4 +94,4 @@ fn main() {
fn pre_func(cmd Command) ! {
herocmds.plbook_run(cmd)!
}
}

38
doc.vsh
View File

@@ -26,9 +26,9 @@ os.chdir(herolib_path) or {
panic('Failed to change directory to herolib: ${err}')
}
os.rmdir_all('_docs') or {}
os.rmdir_all('docs') or {}
os.rmdir_all('vdocs') or {}
os.mkdir_all('_docs') or {}
os.mkdir_all('docs') or {}
os.mkdir_all('vdocs') or {}
// Generate HTML documentation
println('Generating HTML documentation...')
@@ -42,13 +42,12 @@ os.chdir(abs_dir_of_script) or {
// Generate Markdown documentation
println('Generating Markdown documentation...')
os.rmdir_all('vdocs') or {}
// if os.system('v doc -m -no-color -f md -o ../vdocs/v/') != 0 {
// panic('Failed to generate V markdown documentation')
// }
if os.system('v doc -m -no-color -f md -o vdocs/herolib/') != 0 {
if os.system('v doc -m -no-color -f md -o vdocs/') != 0 {
panic('Failed to generate Hero markdown documentation')
}
@@ -62,4 +61,33 @@ $if !linux {
}
}
// Create Jekyll required files
println('Creating Jekyll files...')
os.mkdir_all('docs/assets/css') or {}
// Create style.scss
style_content := '---\n---\n\n@import "{{ site.theme }}";'
os.write_file('docs/assets/css/style.scss', style_content) or {
panic('Failed to create style.scss: ${err}')
}
// Create _config.yml
config_content := 'title: HeroLib Documentation
description: Documentation for the HeroLib project
theme: jekyll-theme-primer
baseurl: /herolib
exclude:
- Gemfile
- Gemfile.lock
- node_modules
- vendor/bundle/
- vendor/cache/
- vendor/gems/
- vendor/ruby/'
os.write_file('docs/_config.yml', config_content) or {
panic('Failed to create _config.yml: ${err}')
}
println('Documentation generation completed successfully!')

View File

@@ -1,11 +1,9 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.mailclient
import freeflowuniverse.herolib.clients. mailclient
//remove the previous one, otherwise the env variables are not read
mailclient.config_delete(name:"test")!
// remove the previous one, otherwise the env variables are not read
mailclient.config_delete(name: 'test')!
// env variables which need to be set are:
// - MAIL_FROM=...
@@ -14,11 +12,14 @@ mailclient.config_delete(name:"test")!
// - MAIL_SERVER=...
// - MAIL_USERNAME=...
mut client:= mailclient.get(name:"test")!
mut client := mailclient.get(name: 'test')!
println(client)
client.send(subject:'this is a test',to:'kristof@incubaid.com',body:'
client.send(
subject: 'this is a test'
to: 'kristof@incubaid.com'
body: '
this is my email content
')!
'
)!

View File

@@ -3,7 +3,6 @@
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.clients.postgresql_client
// Configure PostgreSQL client
heroscript := "
!!postgresql_client.configure
@@ -19,7 +18,7 @@ heroscript := "
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: "test")!
mut db_client := postgresql_client.get(name: 'test')!
// Check if test database exists, create if not
if !db_client.db_exists('test')! {
@@ -31,15 +30,14 @@ if !db_client.db_exists('test')! {
db_client.dbname = 'test'
// Create table if not exists
create_table_sql := "CREATE TABLE IF NOT EXISTS users (
create_table_sql := 'CREATE TABLE IF NOT EXISTS users (
id SERIAL PRIMARY KEY,
name VARCHAR(100) NOT NULL,
email VARCHAR(255) UNIQUE NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)"
)'
println('Creating table users if not exists...')
db_client.exec(create_table_sql)!
println('Database and table setup completed successfully!')

View File

@@ -12,8 +12,8 @@ mut:
}
mut person := Person{
name: 'Bob'
birthday: time.now()
name: 'Bob'
birthday: time.now()
}
heroscript := encoderhero.encode[Person](person)!
@@ -22,9 +22,8 @@ println(heroscript)
person2 := encoderhero.decode[Person](heroscript)!
println(person2)
//show that it doesn't matter which action & method is used
heroscript2:="!!a.b name:Bob age:20 birthday:'2025-02-06 09:57:30'"
// show that it doesn't matter which action & method is used
heroscript2 := "!!a.b name:Bob age:20 birthday:'2025-02-06 09:57:30'"
person3 := encoderhero.decode[Person](heroscript)!
println(person3)

View File

@@ -1,23 +1,22 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import json
enum JobTitle {
manager
executive
worker
manager
executive
worker
}
struct Employee {
mut:
name string
family string @[json: '-'] // this field will be skipped
age int
salary f32
title JobTitle @[json: 'ETitle'] // the key for this field will be 'ETitle', not 'title'
notes string @[omitempty] // the JSON property is not created if the string is equal to '' (an empty string).
// TODO: document @[raw]
name string
family string @[json: '-'] // this field will be skipped
age int
salary f32
title JobTitle @[json: 'ETitle'] // the key for this field will be 'ETitle', not 'title'
notes string @[omitempty] // the JSON property is not created if the string is equal to '' (an empty string).
// TODO: document @[raw]
}
x := Employee{'Peter', 'Begins', 28, 95000.5, .worker, ''}
@@ -34,4 +33,3 @@ println(y)
ss := json.encode(y)
println('JSON encoding of employee y: ${ss}')
assert ss == s

View File

@@ -18,8 +18,7 @@ heroscript := "
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: "test")!
mut db_client := postgresql_client.get(name: 'test')!
// Create a new location instance
mut loc := location.new(mut db_client, false) or { panic(err) }

View File

@@ -18,8 +18,7 @@ heroscript := "
postgresql_client.play(heroscript: heroscript)!
// Get the configured client
mut db_client := postgresql_client.get(name: "test")!
mut db_client := postgresql_client.get(name: 'test')!
// Create a new location instance
mut loc := location.new(mut db_client, false) or { panic(err) }

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.ipapi
import os
mut ip_api_client := ipapi.get()!
info := ip_api_client.get_ip_info('37.27.132.46')!
println('info: ${info}')

View File

@@ -2,14 +2,15 @@
import freeflowuniverse.herolib.installers.infra.gitea as gitea_installer
mut installer := gitea_installer.get(name: 'test')!
mut installer:= gitea_installer.get(name:'test')!
//if you want to configure using heroscript
gitea_installer.play(heroscript:"
// if you want to configure using heroscript
gitea_installer.play(
heroscript: "
!!gitea.configure name:test
passwd:'something'
domain: 'docs.info.com'
")!
"
)!
installer.start()!

View File

@@ -4,19 +4,38 @@ set -e
os_name="$(uname -s)"
arch_name="$(uname -m)"
version='1.0.6'
# Base URL for GitHub releases
base_url="https://github.com/freeflowuniverse/herolib/releases/download/v${version}"
# Select the URL based on the platform
if [[ "$os_name" == "Linux" && "$arch_name" == "x86_64" ]]; then
url="https://f003.backblazeb2.com/file/threefold/linux-i64/hero"
url="$base_url/hero-x86_64-unknown-linux-musl"
elif [[ "$os_name" == "Linux" && "$arch_name" == "aarch64" ]]; then
url="$base_url/hero-aarch64-unknown-linux-musl"
elif [[ "$os_name" == "Darwin" && "$arch_name" == "arm64" ]]; then
url="https://f003.backblazeb2.com/file/threefold/macos-arm64/hero"
# elif [[ "$os_name" == "Darwin" && "$arch_name" == "x86_64" ]]; then
# url="https://f003.backblazeb2.com/file/threefold/macos-i64/hero"
url="$base_url/hero-aarch64-apple-darwin"
elif [[ "$os_name" == "Darwin" && "$arch_name" == "x86_64" ]]; then
url="$base_url/hero-x86_64-apple-darwin"
else
echo "Unsupported platform."
echo "Unsupported platform: $os_name $arch_name"
exit 1
fi
# # Select the URL based on the platform
# if [[ "$os_name" == "Linux" && "$arch_name" == "x86_64" ]]; then
# url="https://f003.backblazeb2.com/file/threefold/linux-i64/hero"
# elif [[ "$os_name" == "Darwin" && "$arch_name" == "arm64" ]]; then
# url="https://f003.backblazeb2.com/file/threefold/macos-arm64/hero"
# # elif [[ "$os_name" == "Darwin" && "$arch_name" == "x86_64" ]]; then
# # url="https://f003.backblazeb2.com/file/threefold/macos-i64/hero"
# else
# echo "Unsupported platform."
# exit 1
# fi
# Check for existing hero installations
existing_hero=$(which hero 2>/dev/null || true)
if [ ! -z "$existing_hero" ]; then
@@ -122,4 +141,4 @@ if [ "$file_size" -ge 2 ]; then
else
echo "Downloaded file is less than 10 MB. Process aborted."
exit 1
fi
fi

View File

@@ -0,0 +1,8 @@
!!hero_code.generate_client
name:'ipapi'
classname:'IPApi'
singleton:0
default:1
hasconfig:1
reset:0

View File

@@ -0,0 +1,29 @@
module ipapi
import json
pub struct IPInfo {
pub:
query string
status string
country string
country_code string @[json: 'countryCode']
region string
region_name string @[json: 'regionName']
city string
zip string
lat f32
lon f32
timezone string
isp string
org string
as string
}
pub fn (mut a IPApi) get_ip_info(ip string) !IPInfo {
mut conn := a.connection()!
res := conn.get_json(prefix: 'json/${ip}')!
info := json.decode(IPInfo, res)!
return info
}

View File

@@ -0,0 +1,102 @@
module ipapi
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
__global (
ipapi_global map[string]&IPApi
ipapi_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = ipapi_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&IPApi {
mut args := args_get(args_)
if args.name !in ipapi_global {
if args.name == 'default' {
if !config_exists(args) {
if default {
config_save(args)!
}
}
config_load(args)!
}
}
return ipapi_global[args.name] or {
println(ipapi_global)
panic('could not get config for ipapi with name:${args.name}')
}
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('ipapi', args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('ipapi', args.name)!
play(heroscript: heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('ipapi', args.name, heroscript_default()!)!
}
fn set(o IPApi) ! {
mut o2 := obj_init(o)!
ipapi_global[o.name] = &o2
ipapi_default = o.name
}
@[params]
pub struct PlayArgs {
pub mut:
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
if args.heroscript == '' {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
mut install_actions := plbook.find(filter: 'ipapi.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
}
// switch instance to be used for ipapi
pub fn switch(name string) {
ipapi_default = name
}

View File

@@ -0,0 +1,58 @@
module ipapi
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.core.httpconnection
import os
pub const version = '1.14.3'
const singleton = false
const default = true
// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
heroscript := "
!!ipapi.configure
name:'default'
"
return heroscript
}
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct IPApi {
pub mut:
name string = 'default'
conn ?&httpconnection.HTTPConnection @[str: skip]
}
fn cfg_play(p paramsparser.Params) ! {
// THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := IPApi{
name: p.get_default('name', 'default')!
}
set(mycfg)!
}
fn obj_init(obj_ IPApi) !IPApi {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
return obj
}
pub fn (mut client IPApi) connection() !&httpconnection.HTTPConnection {
mut c := client.conn or {
mut c2 := httpconnection.new(
name: 'ipapi_${client.name}'
url: 'http://ip-api.com'
cache: false
retry: 20
)!
c2
}
client.conn = c
return c
}

View File

@@ -0,0 +1,30 @@
# ipapi
To get started
```vlang
import freeflowuniverse.herolib.clients. ipapi
mut client:= ipapi.get()!
client...
```
## example heroscript
```hero
!!ipapi.configure
secret: '...'
host: 'localhost'
port: 8888
```

View File

@@ -1,127 +1,112 @@
module mailclient
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
__global (
mailclient_global map[string]&MailClient
mailclient_default string
mailclient_global map[string]&MailClient
mailclient_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet{
pub struct ArgsGet {
pub mut:
name string
name string
}
fn args_get (args_ ArgsGet) ArgsGet {
mut args:=args_
if args.name == ""{
args.name = mailclient_default
}
if args.name == ""{
args.name = "default"
}
return args
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = mailclient_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MailClient {
mut args := args_get(args_)
if !(args.name in mailclient_global) {
if ! config_exists(args){
config_save(args)!
}
config_load(args)!
}
return mailclient_global[args.name] or {
println(mailclient_global)
//bug if we get here because should be in globals
panic("could not get config for mailclient with name, is bug:${args.name}")
}
pub fn get(args_ ArgsGet) !&MailClient {
mut args := args_get(args_)
if args.name !in mailclient_global {
if !config_exists(args) {
config_save(args)!
}
config_load(args)!
}
return mailclient_global[args.name] or {
println(mailclient_global)
// bug if we get here because should be in globals
panic('could not get config for mailclient with name, is bug:${args.name}')
}
}
pub fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context:=base.context() or { panic("bug") }
return context.hero_config_exists("mailclient",args.name)
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('mailclient', args.name)
}
pub fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
mut heroscript := context.hero_config_get("mailclient",args.name)!
play(heroscript:heroscript)!
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('mailclient', args.name)!
play(heroscript: heroscript)!
}
pub fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_set("mailclient",args.name,heroscript_default(instance:args.name)!)!
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('mailclient', args.name, heroscript_default(instance: args.name)!)!
}
pub fn config_delete(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_delete("mailclient",args.name)!
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_delete('mailclient', args.name)!
}
fn set(o MailClient)! {
mut o2:=obj_init(o)!
mailclient_global[o.name] = &o2
mailclient_default = o.name
fn set(o MailClient) ! {
mut o2 := obj_init(o)!
mailclient_global[o.name] = &o2
mailclient_default = o.name
}
@[params]
pub struct PlayArgs {
pub mut:
heroscript string //if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut args:=args_
if args.heroscript == "" {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or {
playbook.new(text: args.heroscript)!
}
mut install_actions := plbook.find(filter: 'mailclient.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
mut args := args_
if args.heroscript == '' {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
mut install_actions := plbook.find(filter: 'mailclient.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
}
//switch instance to be used for mailclient
// switch instance to be used for mailclient
pub fn switch(name string) {
mailclient_default = name
mailclient_default = name
}
//helpers
// helpers
@[params]
pub struct DefaultConfigArgs{
instance string = 'default'
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -1,4 +1,5 @@
module mailclient
import freeflowuniverse.herolib.data.paramsparser
import os
@@ -6,7 +7,6 @@ pub const version = '0.0.0'
const singleton = false
const default = true
pub fn heroscript_default(args DefaultConfigArgs) !string {
mail_from := os.getenv_opt('MAIL_FROM') or { 'info@example.com' }
mail_password := os.getenv_opt('MAIL_PASSWORD') or { 'secretpassword' }
@@ -23,7 +23,7 @@ pub fn heroscript_default(args DefaultConfigArgs) !string {
mail_username: '${mail_username}'
"
return heroscript
return heroscript
}
@[heap]
@@ -40,22 +40,18 @@ pub mut:
}
fn cfg_play(p paramsparser.Params) ! {
mut mycfg := MailClient{
mut mycfg := MailClient{
name: p.get_default('name', 'default')!
mail_from: p.get('mail_from')!
mail_password: p.get('mail_password')!
mail_port: p.get_int_default('mail_port', 465)!
mail_server: p.get('mail_server')!
mail_username: p.get('mail_username')!
}
set(mycfg)!
}
fn obj_init(obj_ MailClient)!MailClient{
mut obj:=obj_
return obj
}
set(mycfg)!
}
fn obj_init(obj_ MailClient) !MailClient {
mut obj := obj_
return obj
}

View File

@@ -144,7 +144,6 @@ pub fn (mut self Context) hero_config_delete(cat string, name string) ! {
config_file.delete()!
}
pub fn (mut self Context) hero_config_exists(cat string, name string) bool {
path := '${os.home_dir()}/hero/context/${self.config.name}/${cat}__${name}.yaml'
return os.exists(path)

View File

@@ -53,7 +53,6 @@ pub fn cmd_docusaurus(mut cmdroot Command) {
description: 'update your environment the template and the repo you are working on (git pull).'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
@@ -84,29 +83,29 @@ fn cmd_docusaurus_execute(cmd Command) ! {
// exit(1)
// }
mut docs := docusaurus.new(update:update)!
mut docs := docusaurus.new(update: update)!
if build {
// Create a new docusaurus site
_ := docs.build(
url: url
update:update
url: url
update: update
)!
}
if builddev {
// Create a new docusaurus site
_ := docs.build_dev(
url: url
update:update
url: url
update: update
)!
}
if dev {
// Create a new docusaurus site
_ := docs.dev(
url: url
update:update
url: url
update: update
)!
}
}

View File

@@ -10,16 +10,16 @@ import freeflowuniverse.herolib.clients.postgresql_client
// LocationDB handles all database operations for locations
pub struct LocationDB {
pub mut:
db pg.DB
db pg.DB
db_client postgresql_client.PostgresClient
tmp_dir pathlib.Path
db_dir pathlib.Path
tmp_dir pathlib.Path
db_dir pathlib.Path
}
// new_location_db creates a new LocationDB instance
pub fn new_location_db(mut db_client postgresql_client.PostgresClient, reset bool) !LocationDB {
mut db_dir := pathlib.get_dir(path:'${os.home_dir()}/hero/var/db/location.db',create: true)!
mut db_dir := pathlib.get_dir(path: '${os.home_dir()}/hero/var/db/location.db', create: true)!
// Create locations database if it doesn't exist
if !db_client.db_exists('locations')! {
db_client.db_create('locations')!
@@ -27,15 +27,15 @@ pub fn new_location_db(mut db_client postgresql_client.PostgresClient, reset boo
// Switch to locations database
db_client.dbname = 'locations'
// Get the underlying pg.DB connection
db := db_client.db()!
mut loc_db := LocationDB{
db: db
db: db
db_client: db_client
tmp_dir: pathlib.get_dir(path: '/tmp/location/',create: true)!
db_dir: db_dir
tmp_dir: pathlib.get_dir(path: '/tmp/location/', create: true)!
db_dir: db_dir
}
loc_db.init_tables(reset)!
return loc_db
@@ -50,7 +50,7 @@ fn (mut l LocationDB) init_tables(reset bool) ! {
drop table Country
}!
}
sql l.db {
create table Country
create table City

View File

@@ -5,7 +5,7 @@ import freeflowuniverse.herolib.clients.postgresql_client
// Location represents the main API for location operations
pub struct Location {
mut:
db LocationDB
db LocationDB
db_client postgresql_client.PostgresClient
}
@@ -13,7 +13,7 @@ mut:
pub fn new(mut db_client postgresql_client.PostgresClient, reset bool) !Location {
db := new_location_db(mut db_client, reset)!
return Location{
db: db
db: db
db_client: db_client
}
}
@@ -28,7 +28,7 @@ pub fn (mut l Location) download_and_import(redownload bool) ! {
fn main() ! {
// Configure and get PostgreSQL client
heroscript := "
!!postgresql_client.configure
!!postgresql_client.configure
name:'test'
user: 'postgres'
port: 5432

View File

@@ -1,4 +1,3 @@
module location
//https://www.geonames.org/export/codes.html
// https://www.geonames.org/export/codes.html

View File

@@ -7,27 +7,24 @@ import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
const (
geonames_url = 'https://download.geonames.org/export/dump'
)
const geonames_url = 'https://download.geonames.org/export/dump'
// download_and_import_data downloads and imports GeoNames data
pub fn (mut l LocationDB) download_and_import_data(redownload bool) ! {
// Download country info
if redownload{
if redownload {
l.reset_import_dates()!
}
country_file := osal.download(
url: '${geonames_url}/countryInfo.txt'
dest: '${l.tmp_dir.path}/country.txt'
url: '${geonames_url}/countryInfo.txt'
dest: '${l.tmp_dir.path}/country.txt'
minsize_kb: 10
)!
l.import_country_data(country_file.path)!
l.import_cities()!
}
// reset_import_dates sets all country import_dates to 0
@@ -41,9 +38,9 @@ pub fn (mut l LocationDB) reset_import_dates() ! {
// should_import_cities checks if a city should be imported based on its last import date on country level
fn (mut l LocationDB) should_import_cities(iso2 string) !bool {
console.print_debug('Checking if should import country: ${iso2}')
country := sql l.db {
select from Country where iso2 == "${iso2}" limit 1
select from Country where iso2 == '${iso2}' limit 1
} or { []Country{} }
console.print_debug('SQL query result: ${country.len} records found')
@@ -58,11 +55,11 @@ fn (mut l LocationDB) should_import_cities(iso2 string) !bool {
one_month := i64(30 * 24 * 60 * 60) // 30 days in seconds
last_import := country[0].import_date
time_since_import := now - last_import
console.print_debug('Last import: ${last_import}, Time since import: ${time_since_import} seconds (${time_since_import/86400} days)')
should_import := (time_since_import > one_month) || (last_import == 0)
console.print_debug('Last import: ${last_import}, Time since import: ${time_since_import} seconds (${time_since_import / 86400} days)')
should_import := time_since_import > one_month || last_import == 0
console.print_debug('Should import ${iso2}: ${should_import}')
return should_import
}
@@ -70,10 +67,10 @@ fn (mut l LocationDB) should_import_cities(iso2 string) !bool {
fn (mut l LocationDB) import_country_data(filepath string) ! {
console.print_header('Starting import from: ${filepath}')
l.db.exec('BEGIN TRANSACTION')!
mut file := os.open(filepath) or {
mut file := os.open(filepath) or {
console.print_stderr('Failed to open country file: ${err}')
return err
return err
}
defer { file.close() }
@@ -98,39 +95,34 @@ fn (mut l LocationDB) import_country_data(filepath string) ! {
} or { []Country{} }
country := Country{
iso2: iso2
iso3: fields[1]
name: fields[4]
continent: fields[8]
iso2: iso2
iso3: fields[1]
name: fields[4]
continent: fields[8]
population: fields[7].i64()
timezone: fields[17]
timezone: fields[17]
}
if existing_country.len > 0 {
// Update existing country
sql l.db {
update Country set
iso3 = country.iso3,
name = country.name,
continent = country.continent,
population = country.population,
timezone = country.timezone
where iso2 == iso2
update Country set iso3 = country.iso3, name = country.name, continent = country.continent,
population = country.population, timezone = country.timezone where iso2 == iso2
}!
//console.print_debug("Updated country: ${country}")
// console.print_debug("Updated country: ${country}")
} else {
// Insert new country
sql l.db {
insert country into Country
}!
//console.print_debug("Inserted country: ${country}")
// console.print_debug("Inserted country: ${country}")
}
count++
if count % 10 == 0 {
console.print_header('Processed ${count} countries')
}
}
l.db.exec('COMMIT')!
console.print_header('Finished importing countries. Total records: ${count}')
}
@@ -158,13 +150,13 @@ fn (mut l LocationDB) import_cities() ! {
// Download and process cities for this country
cities_file := osal.download(
url: '${geonames_url}/${iso2}.zip'
dest: '${l.tmp_dir.path}/${iso2}.zip'
url: '${geonames_url}/${iso2}.zip'
dest: '${l.tmp_dir.path}/${iso2}.zip'
expand_file: '${l.tmp_dir.path}/${iso2}'
minsize_kb: 2
minsize_kb: 2
)!
l.import_city_data("${l.tmp_dir.path}/${iso2}/${iso2}.txt")!
l.import_city_data('${l.tmp_dir.path}/${iso2}/${iso2}.txt')!
// Update the country's import date after successful city import
now := time.now().unix()
@@ -193,36 +185,34 @@ fn (mut l LocationDB) import_city_data(filepath string) ! {
// country code : ISO-3166 2-letter country code, 2 characters
// cc2 : alternate country codes, comma separated, ISO-3166 2-letter country code, 200 characters
// admin1 code : fipscode (subject to change to iso code), see exceptions below, see file admin1Codes.txt for display names of this code; varchar(20)
// admin2 code : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80)
// admin2 code : code for the second administrative division, a county in the US, see file admin2Codes.txt; varchar(80)
// admin3 code : code for third level administrative division, varchar(20)
// admin4 code : code for fourth level administrative division, varchar(20)
// population : bigint (8 byte int)
// population : bigint (8 byte int)
// elevation : in meters, integer
// dem : digital elevation model, srtm3 or gtopo30, average elevation of 3''x3'' (ca 90mx90m) or 30''x30'' (ca 900mx900m) area in meters, integer. srtm processed by cgiar/ciat.
// timezone : the iana timezone id (see file timeZone.txt) varchar(40)
// modification date : date of last modification in yyyy-MM-dd format
l.db.exec('BEGIN TRANSACTION')!
mut file := os.open(filepath) or {
mut file := os.open(filepath) or {
console.print_stderr('Failed to open city file: ${err}')
return err
return err
}
defer { file.close() }
mut reader := io.new_buffered_reader(reader:file)
mut reader := io.new_buffered_reader(reader: file)
defer { reader.free() }
mut count := 0
console.print_header('Start import ${filepath}')
for {
line := reader.read_line() or {
//console.print_debug('End of file reached')
break
line := reader.read_line() or {
// console.print_debug('End of file reached')
break
}
//console.print_debug(line)
// console.print_debug(line)
fields := line.split('\t')
if fields.len < 12 { // Need at least 12 fields for required data
console.print_stderr('fields < 12: ${line}')
@@ -234,65 +224,52 @@ fn (mut l LocationDB) import_city_data(filepath string) ! {
name := fields[1]
ascii_name := texttools.name_fix(fields[2])
country_iso2 := fields[8].to_upper()
// Check if city exists
existing_city := sql l.db {
select from City where id == geoname_id
} or { []City{} }
city := City{
id: geoname_id
name: name
ascii_name: ascii_name
country_iso2: country_iso2
postal_code: '' // Not provided in this format
state_name: '' // Will need separate admin codes file
state_code: fields[10]
county_name: ''
county_code: fields[11]
community_name: ''
community_code: ''
latitude: fields[4].f64()
longitude: fields[5].f64()
accuracy: 4 // Using geonameid, so accuracy is 4
population: fields[14].i64()
timezone: fields[17]
feature_class: fields[6]
feature_code: fields[7]
id: geoname_id
name: name
ascii_name: ascii_name
country_iso2: country_iso2
postal_code: '' // Not provided in this format
state_name: '' // Will need separate admin codes file
state_code: fields[10]
county_name: ''
county_code: fields[11]
community_name: ''
community_code: ''
latitude: fields[4].f64()
longitude: fields[5].f64()
accuracy: 4 // Using geonameid, so accuracy is 4
population: fields[14].i64()
timezone: fields[17]
feature_class: fields[6]
feature_code: fields[7]
search_priority: 0 // Default priority
}
if existing_city.len > 0 {
// Update existing city
sql l.db {
update City set
name = city.name,
ascii_name = city.ascii_name,
country_iso2 = city.country_iso2,
postal_code = city.postal_code,
state_name = city.state_name,
state_code = city.state_code,
county_name = city.county_name,
county_code = city.county_code,
community_name = city.community_name,
community_code = city.community_code,
latitude = city.latitude,
longitude = city.longitude,
accuracy = city.accuracy,
population = city.population,
timezone = city.timezone,
feature_class = city.feature_class,
feature_code = city.feature_code,
search_priority = city.search_priority
where id == geoname_id
update City set name = city.name, ascii_name = city.ascii_name, country_iso2 = city.country_iso2,
postal_code = city.postal_code, state_name = city.state_name, state_code = city.state_code,
county_name = city.county_name, county_code = city.county_code, community_name = city.community_name,
community_code = city.community_code, latitude = city.latitude, longitude = city.longitude,
accuracy = city.accuracy, population = city.population, timezone = city.timezone,
feature_class = city.feature_class, feature_code = city.feature_code,
search_priority = city.search_priority where id == geoname_id
}!
//console.print_debug("Updated city: ${city}")
// console.print_debug("Updated city: ${city}")
} else {
// Insert new city
sql l.db {
insert city into City
}!
//console.print_debug("Inserted city: ${city}")
// console.print_debug("Inserted city: ${city}")
}
count++
// if count % 1000 == 0 {
@@ -300,8 +277,8 @@ fn (mut l LocationDB) import_city_data(filepath string) ! {
// }
}
console.print_debug( 'Processed ${count} cities')
console.print_debug('Processed ${count} cities')
l.db.exec('COMMIT')!
console.print_header('Finished importing cities for ${filepath}. Total records: ${count}')
}

View File

@@ -2,44 +2,44 @@ module location
pub struct Country {
pub:
iso2 string @[primary; sql: 'iso2'; max_len: 2; unique; index]
name string @[required; unique; index]
iso3 string @[required; sql: 'iso3'; max_len: 3; unique; index]
iso2 string @[index; max_len: 2; primary; sql: 'iso2'; unique]
name string @[index; required; unique]
iso3 string @[index; max_len: 3; required; sql: 'iso3'; unique]
continent string @[max_len: 2]
population i64
timezone string @[max_len: 40]
import_date i64 // Epoch timestamp of last import
import_date i64 // Epoch timestamp of last import
}
pub struct City {
pub:
id int @[unique; index]
name string @[required; max_len: 200; index]
ascii_name string @[required; max_len: 200; index] // Normalized name without special characters
country_iso2 string @[required; fkey: 'Country.iso2']
postal_code string @[max_len: 20; index ] //postal code
state_name string @[max_len: 100] // State/Province name
state_code string @[max_len: 20] // State/Province code
county_name string @[max_len: 100]
county_code string @[max_len: 20]
community_name string @[max_len: 100]
community_code string @[max_len: 20]
latitude f64 @[index: 'idx_coords']
longitude f64 @[index: 'idx_coords']
population i64
timezone string @[max_len: 40]
feature_class string @[max_len: 1] // For filtering (P for populated places)
feature_code string @[max_len: 10] // Detailed type (PPL, PPLA, etc.)
id int @[index; unique]
name string @[index; max_len: 200; required]
ascii_name string @[index; max_len: 200; required] // Normalized name without special characters
country_iso2 string @[fkey: 'Country.iso2'; required]
postal_code string @[index; max_len: 20] // postal code
state_name string @[max_len: 100] // State/Province name
state_code string @[max_len: 20] // State/Province code
county_name string @[max_len: 100]
county_code string @[max_len: 20]
community_name string @[max_len: 100]
community_code string @[max_len: 20]
latitude f64 @[index: 'idx_coords']
longitude f64 @[index: 'idx_coords']
population i64
timezone string @[max_len: 40]
feature_class string @[max_len: 1] // For filtering (P for populated places)
feature_code string @[max_len: 10] // Detailed type (PPL, PPLA, etc.)
search_priority int
accuracy i16 = 1 //1=estimated, 4=geonameid, 6=centroid of addresses or shape
accuracy i16 = 1 // 1=estimated, 4=geonameid, 6=centroid of addresses or shape
}
pub struct AlternateName {
pub:
id int @[primary; sql: serial]
city_id int @[required; fkey: 'City.id']
name string @[required; max_len: 200; index]
language_code string @[max_len: 2]
id int @[primary; sql: serial]
city_id int @[fkey: 'City.id'; required]
name string @[index; max_len: 200; required]
language_code string @[max_len: 2]
is_preferred bool
is_short bool
}
@@ -47,9 +47,9 @@ pub:
// SearchResult represents a location search result with combined city and country info
pub struct SearchResult {
pub:
city City
country Country
similarity f64 // Search similarity score
city City
country Country
similarity f64 // Search similarity score
}
// Coordinates represents a geographic point

View File

@@ -51,11 +51,11 @@ import db.pg
// where_clause := if query_conditions.len > 0 { 'WHERE ' + query_conditions.join(' AND ') } else { '' }
// query := '
// SELECT c.*, co.*
// SELECT c.*, co.*
// FROM City c
// JOIN Country co ON c.country_iso2 = co.iso2
// ${where_clause}
// ORDER BY c.search_priority DESC, c.population DESC
// ORDER BY c.search_priority DESC, c.population DESC
// LIMIT ${opts.limit}
// '
@@ -111,8 +111,8 @@ import db.pg
// query := "
// WITH distances AS (
// SELECT c.*, co.*,
// (6371 * acos(cos(radians($1)) * cos(radians(latitude)) *
// cos(radians(longitude) - radians($2)) + sin(radians($1)) *
// (6371 * acos(cos(radians($1)) * cos(radians(latitude)) *
// cos(radians(longitude) - radians($2)) + sin(radians($1)) *
// sin(radians(latitude)))) AS distance
// FROM City c
// JOIN Country co ON c.country_iso2 = co.iso2
@@ -122,7 +122,7 @@ import db.pg
// ORDER BY distance
// LIMIT $4
// "
// params := [
// opts.coordinates.latitude.str(),
// opts.coordinates.longitude.str(),

View File

@@ -97,13 +97,13 @@ fn (mut repo GitRepo) load_tags() ! {
tags_result := repo.exec('git tag --list') or {
return error('Failed to list tags: ${err}. Please ensure git is installed and repository is accessible.')
}
//println(tags_result)
// println(tags_result)
for line in tags_result.split('\n') {
line_trimmed := line.trim_space()
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
//console.print_debug('Skipping malformed tag line: ${line_trimmed}')
// console.print_debug('Skipping malformed tag line: ${line_trimmed}')
continue
}
commit_hash := parts[0].trim_space()

View File

@@ -25,22 +25,21 @@ fn installed() !bool {
}
fn install() ! {
console.print_header('install gitea')
baseurl:="https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}"
baseurl := 'https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-${version}'
mut url := ''
if core.is_linux_arm()! {
//https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-linux-arm64.xz
if core.is_linux_arm()! {
// https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-linux-arm64.xz
url = '${baseurl}-linux-arm64.xz'
} else if core.is_linux_intel()! {
// https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-linux-amd64.xz
url = '${baseurl}-linux-amd64.xz'
} else if core.is_osx_arm()! {
//https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-darwin-10.12-arm64.xz
// https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-darwin-10.12-arm64.xz
url = '${baseurl}-darwin-10.12-arm64.xz'
} else if core.is_osx_intel()! {
//https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-darwin-10.12-amd64.xz
// https://github.com/go-gitea/gitea/releases/download/v1.23.2/gitea-1.23.2-darwin-10.12-amd64.xz
url = '${baseurl}-darwin-10.12-amd64.xz'
} else {
return error('unsported platform')
@@ -104,68 +103,66 @@ fn startupcmd() ![]zinit.ZProcessNewArgs {
}
return res
// mut res := []zinit.ZProcessNewArgs{}
// cfg := get()!
// res << zinit.ZProcessNewArgs{
// name: 'gitea'
// // cmd: 'GITEA_WORK_DIR=${cfg.path} sudo -u git /var/lib/git/gitea web -c /etc/gitea_app.ini'
// cmd: '
// # Variables
// GITEA_USER="${cfg.run_user}"
// GITEA_HOME="${cfg.path}"
// GITEA_BINARY="/usr/local/bin/gitea"
// GITEA_CONFIG="/etc/gitea_app.ini"
// GITEA_DATA_PATH="\$GITEA_HOME/data"
// GITEA_CUSTOM_PATH="\$GITEA_HOME/custom"
// GITEA_LOG_PATH="\$GITEA_HOME/log"
// mut res := []zinit.ZProcessNewArgs{}
// cfg := get()!
// res << zinit.ZProcessNewArgs{
// name: 'gitea'
// // cmd: 'GITEA_WORK_DIR=${cfg.path} sudo -u git /var/lib/git/gitea web -c /etc/gitea_app.ini'
// cmd: '
// # Ensure the script is run as root
// if [[ \$EUID -ne 0 ]]; then
// echo "This script must be run as root."
// exit 1
// fi
// # Variables
// GITEA_USER="${cfg.run_user}"
// GITEA_HOME="${cfg.path}"
// GITEA_BINARY="/usr/local/bin/gitea"
// GITEA_CONFIG="/etc/gitea_app.ini"
// GITEA_DATA_PATH="\$GITEA_HOME/data"
// GITEA_CUSTOM_PATH="\$GITEA_HOME/custom"
// GITEA_LOG_PATH="\$GITEA_HOME/log"
// echo "Setting up Gitea..."
// # Ensure the script is run as root
// if [[ \$EUID -ne 0 ]]; then
// echo "This script must be run as root."
// exit 1
// fi
// # Create Gitea user if it doesn\'t exist
// if id -u "\$GITEA_USER" &>/dev/null; then
// echo "User \$GITEA_USER already exists."
// else
// echo "Creating Gitea user..."
// if ! sudo adduser --system --shell /bin/bash --group --disabled-password --home "/var/lib/\$GITEA_USER" "\$GITEA_USER"; then
// echo "Failed to create user \$GITEA_USER."
// exit 1
// fi
// fi
// echo "Setting up Gitea..."
// # Create necessary directories
// echo "Creating directories..."
// mkdir -p "\$GITEA_DATA_PATH" "\$GITEA_CUSTOM_PATH" "\$GITEA_LOG_PATH"
// chown -R "\$GITEA_USER:\$GITEA_USER" "\$GITEA_HOME"
// chmod -R 750 "\$GITEA_HOME"
// # Create Gitea user if it doesn\'t exist
// if id -u "\$GITEA_USER" &>/dev/null; then
// echo "User \$GITEA_USER already exists."
// else
// echo "Creating Gitea user..."
// if ! sudo adduser --system --shell /bin/bash --group --disabled-password --home "/var/lib/\$GITEA_USER" "\$GITEA_USER"; then
// echo "Failed to create user \$GITEA_USER."
// exit 1
// fi
// fi
// chown "\$GITEA_USER:\$GITEA_USER" "\$GITEA_CONFIG"
// chmod 640 "\$GITEA_CONFIG"
// # Create necessary directories
// echo "Creating directories..."
// mkdir -p "\$GITEA_DATA_PATH" "\$GITEA_CUSTOM_PATH" "\$GITEA_LOG_PATH"
// chown -R "\$GITEA_USER:\$GITEA_USER" "\$GITEA_HOME"
// chmod -R 750 "\$GITEA_HOME"
// chown "\$GITEA_USER:\$GITEA_USER" "\$GITEA_CONFIG"
// chmod 640 "\$GITEA_CONFIG"
// GITEA_WORK_DIR=\$GITEA_HOME sudo -u git gitea web -c \$GITEA_CONFIG
// '
// workdir: cfg.path
// }
// res << zinit.ZProcessNewArgs{
// name: 'restart_gitea'
// cmd: 'sleep 30 && zinit restart gitea && exit 1'
// after: ['gitea']
// oneshot: true
// workdir: cfg.path
// }
// return res
// GITEA_WORK_DIR=\$GITEA_HOME sudo -u git gitea web -c \$GITEA_CONFIG
// '
// workdir: cfg.path
// }
// res << zinit.ZProcessNewArgs{
// name: 'restart_gitea'
// cmd: 'sleep 30 && zinit restart gitea && exit 1'
// after: ['gitea']
// oneshot: true
// workdir: cfg.path
// }
// return res
}
fn running() !bool {
//TODO: extend with proper gitea client
// TODO: extend with proper gitea client
res := os.execute('curl -fsSL http://localhost:3000 || exit 1')
return res.exit_code == 0
}

View File

@@ -4,287 +4,277 @@ import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
__global (
gitea_global map[string]&GiteaServer
gitea_default string
gitea_global map[string]&GiteaServer
gitea_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet{
pub struct ArgsGet {
pub mut:
name string
name string
}
fn args_get (args_ ArgsGet) ArgsGet {
mut args:=args_
if args.name == ""{
args.name = "default"
}
return args
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&GiteaServer {
mut context:=base.context()!
mut args := args_get(args_)
mut obj := GiteaServer{}
if !(args.name in gitea_global) {
if ! exists(args)!{
set(obj)!
}else{
heroscript := context.hero_config_get("gitea",args.name)!
mut obj2:=heroscript_loads(heroscript)!
set_in_mem(obj2)!
}
}
return gitea_global[args.name] or {
println(gitea_global)
//bug if we get here because should be in globals
panic("could not get config for gitea with name, is bug:${args.name}")
}
pub fn get(args_ ArgsGet) !&GiteaServer {
mut context := base.context()!
mut args := args_get(args_)
mut obj := GiteaServer{}
if args.name !in gitea_global {
if !exists(args)! {
set(obj)!
} else {
heroscript := context.hero_config_get('gitea', args.name)!
mut obj2 := heroscript_loads(heroscript)!
set_in_mem(obj2)!
}
}
return gitea_global[args.name] or {
println(gitea_global)
// bug if we get here because should be in globals
panic('could not get config for gitea with name, is bug:${args.name}')
}
}
//register the config for the future
pub fn set(o GiteaServer)! {
set_in_mem(o)!
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set("gitea", o.name, heroscript)!
// register the config for the future
pub fn set(o GiteaServer) ! {
set_in_mem(o)!
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('gitea', o.name, heroscript)!
}
//does the config exists?
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists("gitea", args.name)
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('gitea', args.name)
}
pub fn delete(args_ ArgsGet)! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_delete("gitea",args.name)!
if args.name in gitea_global {
//del gitea_global[args.name]
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_delete('gitea', args.name)!
if args.name in gitea_global {
// del gitea_global[args.name]
}
}
//only sets in mem, does not set as config
fn set_in_mem(o GiteaServer)! {
mut o2:=obj_init(o)!
gitea_global[o.name] = &o2
gitea_default = o.name
// only sets in mem, does not set as config
fn set_in_mem(o GiteaServer) ! {
mut o2 := obj_init(o)!
gitea_global[o.name] = &o2
gitea_default = o.name
}
@[params]
pub struct PlayArgs {
pub mut:
heroscript string //if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut args:=args_
mut args := args_
mut plbook := args.plbook or {
playbook.new(text: args.heroscript)!
}
mut install_actions := plbook.find(filter: 'gitea.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
heroscript:=install_action.heroscript()
mut obj:=heroscript_loads(heroscript)!
set(obj)!
}
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
mut other_actions := plbook.find(filter: 'gitea.')!
for other_action in other_actions {
if other_action.name in ["destroy","install","build"]{
mut p := other_action.params
reset:=p.get_default_false("reset")
if other_action.name == "destroy" || reset{
console.print_debug("install action gitea.destroy")
destroy()!
}
if other_action.name == "install"{
console.print_debug("install action gitea.install")
install()!
}
}
if other_action.name in ["start","stop","restart"]{
mut p := other_action.params
name := p.get('name')!
mut gitea_obj:=get(name:name)!
console.print_debug("action object:\n${gitea_obj}")
if other_action.name == "start"{
console.print_debug("install action gitea.${other_action.name}")
gitea_obj.start()!
}
mut install_actions := plbook.find(filter: 'gitea.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
heroscript := install_action.heroscript()
mut obj := heroscript_loads(heroscript)!
set(obj)!
}
}
if other_action.name == "stop"{
console.print_debug("install action gitea.${other_action.name}")
gitea_obj.stop()!
}
if other_action.name == "restart"{
console.print_debug("install action gitea.${other_action.name}")
gitea_obj.restart()!
}
}
}
mut other_actions := plbook.find(filter: 'gitea.')!
for other_action in other_actions {
if other_action.name in ['destroy', 'install', 'build'] {
mut p := other_action.params
reset := p.get_default_false('reset')
if other_action.name == 'destroy' || reset {
console.print_debug('install action gitea.destroy')
destroy()!
}
if other_action.name == 'install' {
console.print_debug('install action gitea.install')
install()!
}
}
if other_action.name in ['start', 'stop', 'restart'] {
mut p := other_action.params
name := p.get('name')!
mut gitea_obj := get(name: name)!
console.print_debug('action object:\n${gitea_obj}')
if other_action.name == 'start' {
console.print_debug('install action gitea.${other_action.name}')
gitea_obj.start()!
}
if other_action.name == 'stop' {
console.print_debug('install action gitea.${other_action.name}')
gitea_obj.stop()!
}
if other_action.name == 'restart' {
console.print_debug('install action gitea.${other_action.name}')
gitea_obj.restart()!
}
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit
// tmux
// systemd
match cat{
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
}else{
console.print_debug("startupmanager: auto")
return startupmanager.get()!
}
}
// unknown
// screen
// zinit
// tmux
// systemd
match cat {
.zinit {
console.print_debug('startupmanager: zinit')
return startupmanager.get(cat: .zinit)!
}
.systemd {
console.print_debug('startupmanager: systemd')
return startupmanager.get(cat: .systemd)!
}
else {
console.print_debug('startupmanager: auto')
return startupmanager.get()!
}
}
}
//load from disk and make sure is properly intialized
// load from disk and make sure is properly intialized
pub fn (mut self GiteaServer) reload() ! {
switch(self.name)
self=obj_init(self)!
switch(self.name)
self = obj_init(self)!
}
pub fn (mut self GiteaServer) start() ! {
switch(self.name)
if self.running()!{
return
}
switch(self.name)
if self.running()! {
return
}
console.print_header('gitea start')
console.print_header('gitea start')
if ! installed()!{
install()!
}
if !installed()! {
install()!
}
configure()!
configure()!
start_pre()!
start_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
for zprocess in startupcmd()! {
mut sm := startupmanager_get(zprocess.startuptype)!
console.print_debug('starting gitea with ${zprocess.startuptype}...')
console.print_debug('starting gitea with ${zprocess.startuptype}...')
sm.new(zprocess)!
sm.new(zprocess)!
sm.start(zprocess.name)!
}
sm.start(zprocess.name)!
}
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('gitea did not install properly.')
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('gitea did not install properly.')
}
pub fn (mut self GiteaServer) install_start(args InstallArgs) ! {
switch(self.name)
self.install(args)!
self.start()!
switch(self.name)
self.install(args)!
self.start()!
}
pub fn (mut self GiteaServer) stop() ! {
switch(self.name)
stop_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
sm.stop(zprocess.name)!
}
stop_post()!
switch(self.name)
stop_pre()!
for zprocess in startupcmd()! {
mut sm := startupmanager_get(zprocess.startuptype)!
sm.stop(zprocess.name)!
}
stop_post()!
}
pub fn (mut self GiteaServer) restart() ! {
switch(self.name)
self.stop()!
self.start()!
switch(self.name)
self.stop()!
self.start()!
}
pub fn (mut self GiteaServer) running() !bool {
switch(self.name)
switch(self.name)
//walk over the generic processes, if not running return
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
return running()!
// walk over the generic processes, if not running return
for zprocess in startupcmd()! {
mut sm := startupmanager_get(zprocess.startuptype)!
r := sm.running(zprocess.name)!
if r == false {
return false
}
}
return running()!
}
@[params]
pub struct InstallArgs{
pub struct InstallArgs {
pub mut:
reset bool
reset bool
}
pub fn (mut self GiteaServer) install(args InstallArgs) ! {
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
}
pub fn (mut self GiteaServer) build() ! {
switch(self.name)
build()!
switch(self.name)
build()!
}
pub fn (mut self GiteaServer) destroy() ! {
switch(self.name)
self.stop() or {}
destroy()!
switch(self.name)
self.stop() or {}
destroy()!
}
//switch instance to be used for gitea
// switch instance to be used for gitea
pub fn switch(name string) {
gitea_default = name
gitea_default = name
}
//helpers
// helpers
@[params]
pub struct DefaultConfigArgs{
instance string = 'default'
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -1,4 +1,5 @@
module gitea
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.core.pathlib
@@ -16,30 +17,29 @@ const default = false
@[heap]
pub struct GiteaServer {
pub mut:
name string = 'default'
path string = '${os.home_dir()}/hero/var/gitea'
passwd string
domain string = "git.test.com"
jwt_secret string = rand.hex(12)
lfs_jwt_secret string
internal_token string
secret_key string
postgresql_client_name string = "default"
mail_client_name string = "default"
name string = 'default'
path string = '${os.home_dir()}/hero/var/gitea'
passwd string
domain string = 'git.test.com'
jwt_secret string = rand.hex(12)
lfs_jwt_secret string
internal_token string
secret_key string
postgresql_client_name string = 'default'
mail_client_name string = 'default'
}
pub fn (obj GiteaServer) config_path() string {
return '${obj.path}/config.ini'
return '${obj.path}/config.ini'
}
//your checking & initialization code if needed
fn obj_init(mycfg_ GiteaServer)!GiteaServer{
mut mycfg:=mycfg_
return mycfg
// your checking & initialization code if needed
fn obj_init(mycfg_ GiteaServer) !GiteaServer {
mut mycfg := mycfg_
return mycfg
}
//called before start if done
// called before start if done
fn configure() ! {
mut server := get()!
@@ -68,7 +68,7 @@ fn configure() ! {
return error('Failed to initialize mail client "${server.mail_client_name}": ${err}')
}
//TODO: check database exists
// TODO: check database exists
if !db_client.db_exists('gitea_${server.name}')! {
console.print_header('Creating database gitea_${server.name} for gitea.')
db_client.db_create('gitea_${server.name}')!
@@ -85,10 +85,10 @@ fn configure() ! {
/////////////NORMALLY NO NEED TO TOUCH
pub fn heroscript_dumps(obj GiteaServer) !string {
return encoderhero.encode[GiteaServer ](obj)!
return encoderhero.encode[GiteaServer](obj)!
}
pub fn heroscript_loads(heroscript string) !GiteaServer {
mut obj := encoderhero.decode[GiteaServer](heroscript)!
return obj
mut obj := encoderhero.decode[GiteaServer](heroscript)!
return obj
}

View File

@@ -10,9 +10,9 @@ fn testsuite_begin() {
muttmux := new() or { panic('Cannot create tmux: ${err}') }
// reset tmux for tests
is_running := tmux.is_running() or { panic('cannot check if tmux is running: ${err}') }
is_running := is_running() or { panic('cannot check if tmux is running: ${err}') }
if is_running {
tmux.stop() or { panic('Cannot stop tmux: ${err}') }
stop() or { panic('Cannot stop tmux: ${err}') }
}
}
@@ -41,27 +41,21 @@ fn test_window_new() ! {
// tests creating duplicate windows
fn test_window_new0() {
installer := get_install()!
installer := tmux.get_install(
mut tmux := Tmux {
mut tmux := Tmux{
node: node_ssh
}
window_args := WindowArgs {
window_args := WindowArgs{
name: 'TestWindow0'
}
// console.print_debug(tmux)
mut window := tmux.window_new(window_args) or {
panic("Can't create new window: $err")
}
mut window := tmux.window_new(window_args) or { panic("Can't create new window: ${err}") }
assert tmux.sessions.keys().contains('main')
mut window_dup := tmux.window_new(window_args) or {
panic("Can't create new window: $err")
}
console.print_debug(node_ssh.exec('tmux ls') or { panic("fail:$err")})
window.delete() or { panic("Cant delete window") }
mut window_dup := tmux.window_new(window_args) or { panic("Can't create new window: ${err}") }
console.print_debug(node_ssh.exec('tmux ls') or { panic('fail:${err}') })
window.delete() or { panic('Cant delete window') }
// console.print_debug(tmux)
}

View File

@@ -135,6 +135,7 @@ fn (mut self TFDeployment) set_nodes() ! {
has_ipv6: if vm.requirements.public_ip6 { vm.requirements.public_ip6 } else { none }
status: 'up'
features: if vm.requirements.public_ip4 { ['zmachine'] } else { [] }
on_hetzner: vm.requirements.use_hetzner_node
)!
if nodes.len == 0 {
@@ -160,6 +161,7 @@ fn (mut self TFDeployment) set_nodes() ! {
healthy: true
node_id: zdb.requirements.node_id
available_for: u64(self.deployer.twin_id)
on_hetzner: zdb.requirements.use_hetzner_node
)!
if nodes.len == 0 {
@@ -183,6 +185,7 @@ fn (mut self TFDeployment) set_nodes() ! {
node_id: webname.requirements.node_id
available_for: u64(self.deployer.twin_id)
features: ['zmachine']
on_hetzner: webname.requirements.use_hetzner_node
)!
if nodes.len == 0 {

View File

@@ -42,16 +42,36 @@ fn get_mycelium() grid_models.Mycelium {
}
}
pub fn filter_nodes(filter gridproxy_models.NodeFilter) ![]gridproxy_models.Node {
@[params]
pub struct FilterNodesArgs {
gridproxy_models.NodeFilter
pub:
on_hetzner bool
}
pub fn filter_nodes(args FilterNodesArgs) ![]gridproxy_models.Node {
// Resolve the network configuration
net := resolve_network()!
// Create grid proxy client and retrieve the matching nodes
mut gp_client := gridproxy.new(net: net, cache: true)!
mut filter := args.NodeFilter
if args.on_hetzner {
filter.features << ['zmachine-light']
}
nodes := gp_client.get_nodes(filter)!
return nodes
}
// fn get_hetzner_node_ids(nodes []gridproxy_models.Node) ![]u64 {
// // get farm ids that are know to be hetzner's
// // if we need to iterate over all nodes, maybe we should use multi-threading
// panic('Not Implemented')
// return []
// }
fn convert_to_gigabytes(bytes u64) u64 {
return bytes * 1024 * 1024 * 1024
}

View File

@@ -26,7 +26,10 @@ pub mut:
flist string = 'https://hub.grid.tf/tf-official-vms/ubuntu-24.04-latest.flist'
entrypoint string = '/sbin/zinit init'
env map[string]string
nodes []u32 // if set will chose a node from the list to deploy on
// if set will chose a node from the list to deploy on
nodes []u32
// will deploy on one of hetzner nodes
use_hetzner_node bool
}
// MachineModel struct to represent a machine and its associat ed details

View File

@@ -8,9 +8,9 @@ pub mut:
name string @[required]
node_id ?u32
use_wireguard_network bool
use_hetzner_node bool
// must be in the format ip:port if tls_passthrough is set, otherwise the format should be http://ip[:port]
backend string @[required]
use_wireguard bool
tls_passthrough bool
}

153
lib/vfs/webdav/README.md Normal file
View File

@@ -0,0 +1,153 @@
# **WebDAV Server in V**
This project implements a WebDAV server using the `vweb` framework and modules from `crystallib`. The server supports essential WebDAV file operations such as reading, writing, copying, moving, and deleting files and directories. It also includes **authentication** and **request logging** for better control and debugging.
---
## **Features**
- **File Operations**:
Supports standard WebDAV methods: `GET`, `PUT`, `DELETE`, `COPY`, `MOVE`, and `MKCOL` (create directory) for files and directories.
- **Authentication**:
Basic HTTP authentication using an in-memory user database (`username:password`).
- **Request Logging**:
Logs incoming requests for debugging and monitoring purposes.
- **WebDAV Compliance**:
Implements WebDAV HTTP methods with proper responses to ensure compatibility with WebDAV clients.
- **Customizable Middleware**:
Extend or modify middleware for custom logging, authentication, or request handling.
---
## **Usage**
### Running the Server
```v
module main
import freeflowuniverse.herolib.vfs.webdav
fn main() {
mut app := webdav.new_app(
root_dir: '/tmp/rootdir' // Directory to serve via WebDAV
user_db: {
'admin': 'admin' // Username and password for authentication
}
)!
app.run()
}
```
### **Mounting the Server**
Once the server is running, you can mount it as a WebDAV volume:
```bash
sudo mount -t davfs <server_url> <mount_point>
```
For example:
```bash
sudo mount -t davfs http://localhost:8080 /mnt/webdav
```
**Important Note**:
Ensure the `root_dir` is **not the same as the mount point** to avoid performance issues during operations like `ls`.
---
## **Supported Routes**
| **Method** | **Route** | **Description** |
|------------|--------------|----------------------------------------------------------|
| `GET` | `/:path...` | Retrieves the contents of a file. |
| `PUT` | `/:path...` | Creates a new file or updates an existing one. |
| `DELETE` | `/:path...` | Deletes a file or directory. |
| `COPY` | `/:path...` | Copies a file or directory to a new location. |
| `MOVE` | `/:path...` | Moves a file or directory to a new location. |
| `MKCOL` | `/:path...` | Creates a new directory. |
| `OPTIONS` | `/:path...` | Lists supported WebDAV methods. |
| `PROPFIND` | `/:path...` | Retrieves properties (e.g., size, date) of a file or directory. |
---
## **Authentication**
This WebDAV server uses **Basic Authentication**.
Set the `Authorization` header in your client to include your credentials in base64 format:
```http
Authorization: Basic <base64-encoded-credentials>
```
**Example**:
For the credentials `admin:admin`, the header would look like this:
```http
Authorization: Basic YWRtaW46YWRtaW4=
```
---
## **Configuration**
You can configure the WebDAV server using the following parameters when calling `new_app`:
| **Parameter** | **Type** | **Description** |
|-----------------|-------------------|---------------------------------------------------------------|
| `root_dir` | `string` | Root directory to serve files from. |
| `user_db` | `map[string]string` | A map containing usernames as keys and passwords as values. |
| `port` (optional) | `int` | The port on which the server will run. Defaults to `8080`. |
---
## **Example Workflow**
1. Start the server:
```bash
v run webdav_server.v
```
2. Mount the server using `davfs`:
```bash
sudo mount -t davfs http://localhost:8080 /mnt/webdav
```
3. Perform operations:
- Create a new file:
```bash
echo "Hello WebDAV!" > /mnt/webdav/hello.txt
```
- List files:
```bash
ls /mnt/webdav
```
- Delete a file:
```bash
rm /mnt/webdav/hello.txt
```
4. Check server logs for incoming requests and responses.
---
## **Performance Notes**
- Avoid mounting the WebDAV server directly into its own root directory (`root_dir`), as this can cause significant slowdowns for file operations like `ls`.
- Use tools like `cadaver`, `curl`, or `davfs` for interacting with the WebDAV server.
---
## **Dependencies**
- V Programming Language
- Crystallib VFS Module (for WebDAV support)
---
## **Future Enhancements**
- Support for advanced WebDAV methods like `LOCK` and `UNLOCK`.
- Integration with persistent databases for user credentials.
- TLS/SSL support for secure connections.

69
lib/vfs/webdav/app.v Normal file
View File

@@ -0,0 +1,69 @@
module webdav
import vweb
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
@[heap]
struct App {
vweb.Context
user_db map[string]string @[required]
root_dir pathlib.Path @[vweb_global]
pub mut:
// lock_manager LockManager
server_port int
middlewares map[string][]vweb.Middleware
}
@[params]
pub struct AppArgs {
pub mut:
server_port int = 8080
root_dir string @[required]
user_db map[string]string @[required]
}
pub fn new_app(args AppArgs) !&App {
root_dir := pathlib.get_dir(path: args.root_dir, create: true)!
mut app := &App{
user_db: args.user_db.clone()
root_dir: root_dir
server_port: args.server_port
}
app.middlewares['/'] << logging_middleware
app.middlewares['/'] << app.auth_middleware
return app
}
@[params]
pub struct RunArgs {
pub mut:
background bool
}
pub fn (mut app App) run(args RunArgs) {
console.print_green('Running the server on port: ${app.server_port}')
if args.background {
spawn vweb.run(app, app.server_port)
} else {
vweb.run(app, app.server_port)
}
}
pub fn (mut app App) not_found() vweb.Result {
app.set_status(404, 'Not Found')
return app.text('Not Found')
}
pub fn (mut app App) server_error() vweb.Result {
app.set_status(500, 'Inernal Server Error')
return app.text('Internal Server Error')
}
pub fn (mut app App) bad_request(message string) vweb.Result {
app.set_status(400, 'Bad Request')
return app.text(message)
}

43
lib/vfs/webdav/auth.v Normal file
View File

@@ -0,0 +1,43 @@
module webdav
import vweb
import encoding.base64
fn (mut app App) auth_middleware(mut ctx vweb.Context) bool {
auth_header := ctx.get_header('Authorization')
if auth_header == '' {
ctx.set_status(401, 'Unauthorized')
ctx.add_header('WWW-Authenticate', 'Basic realm="WebDAV Server"')
ctx.send_response_to_client('', '')
return false
}
if !auth_header.starts_with('Basic ') {
ctx.set_status(401, 'Unauthorized')
ctx.add_header('WWW-Authenticate', 'Basic realm="WebDAV Server"')
ctx.send_response_to_client('', '')
return false
}
auth_decoded := base64.decode_str(auth_header[6..])
split_credentials := auth_decoded.split(':')
if split_credentials.len != 2 {
ctx.set_status(401, 'Unauthorized')
ctx.add_header('WWW-Authenticate', 'Basic realm="WebDAV Server"')
ctx.send_response_to_client('', '')
return false
}
username := split_credentials[0]
hashed_pass := split_credentials[1]
if app.user_db[username] != hashed_pass {
ctx.set_status(401, 'Unauthorized')
ctx.add_header('WWW-Authenticate', 'Basic realm="WebDAV Server"')
ctx.send_response_to_client('', '')
return false
}
return true
}

67
lib/vfs/webdav/bin/main.v Normal file
View File

@@ -0,0 +1,67 @@
import freeflowuniverse.herolib.vfs.webdav
import cli { Command, Flag }
import os
fn main() {
mut cmd := Command{
name: 'webdav'
description: 'Vlang Webdav Server'
}
mut app := Command{
name: 'webdav'
description: 'Vlang Webdav Server'
execute: fn (cmd Command) ! {
port := cmd.flags.get_int('port')!
directory := cmd.flags.get_string('directory')!
user := cmd.flags.get_string('user')!
password := cmd.flags.get_string('password')!
mut server := webdav.new_app(
root_dir: directory
server_port: port
user_db: {
user: password
}
)!
server.run()
return
}
}
app.add_flag(Flag{
flag: .int
name: 'port'
abbrev: 'p'
description: 'server port'
default_value: ['8000']
})
app.add_flag(Flag{
flag: .string
required: true
name: 'directory'
abbrev: 'd'
description: 'server directory'
})
app.add_flag(Flag{
flag: .string
required: true
name: 'user'
abbrev: 'u'
description: 'username'
})
app.add_flag(Flag{
flag: .string
required: true
name: 'password'
abbrev: 'pw'
description: 'user password'
})
app.setup()
app.parse(os.args)
}

87
lib/vfs/webdav/lock.v Normal file
View File

@@ -0,0 +1,87 @@
module webdav
import time
import rand
struct Lock {
resource string
owner string
token string
depth int // 0 for a single resource, 1 for recursive
timeout int // in seconds
created_at time.Time
}
struct LockManager {
mut:
locks map[string]Lock
}
pub fn (mut lm LockManager) lock(resource string, owner string, depth int, timeout int) !string {
if resource in lm.locks {
// Check if the lock is still valid
existing_lock := lm.locks[resource]
if time.now().unix() - existing_lock.created_at.unix() < existing_lock.timeout {
return existing_lock.token // Resource is already locked
}
// Expired lock, remove it
lm.unlock(resource)
}
// Generate a new lock token
token := rand.uuid_v4()
lm.locks[resource] = Lock{
resource: resource
owner: owner
token: token
depth: depth
timeout: timeout
created_at: time.now()
}
return token
}
pub fn (mut lm LockManager) unlock(resource string) bool {
if resource in lm.locks {
lm.locks.delete(resource)
return true
}
return false
}
pub fn (lm LockManager) is_locked(resource string) bool {
if resource in lm.locks {
lock_ := lm.locks[resource]
// Check if lock is expired
if time.now().unix() - lock_.created_at.unix() >= lock_.timeout {
return false
}
return true
}
return false
}
pub fn (mut lm LockManager) unlock_with_token(resource string, token string) bool {
if resource in lm.locks {
lock_ := lm.locks[resource]
if lock_.token == token {
lm.locks.delete(resource)
return true
}
}
return false
}
fn (mut lm LockManager) lock_recursive(resource string, owner string, depth int, timeout int) !string {
if depth == 0 {
return lm.lock(resource, owner, depth, timeout)
}
// Implement logic to lock child resources if depth == 1
return ''
}
pub fn (mut lm LockManager) cleanup_expired_locks() {
// now := time.now().unix()
// lm.locks
// lm.locks = lm.locks.filter(it.value.created_at.unix() + it.value.timeout > now)
}

13
lib/vfs/webdav/logging.v Normal file
View File

@@ -0,0 +1,13 @@
module webdav
import vweb
import freeflowuniverse.herolib.ui.console
fn logging_middleware(mut ctx vweb.Context) bool {
console.print_green('=== New Request ===')
console.print_green('Method: ${ctx.req.method.str()}')
console.print_green('Path: ${ctx.req.url}')
console.print_green('Headers: ${ctx.req.header}')
console.print_green('')
return true
}

259
lib/vfs/webdav/methods.v Normal file
View File

@@ -0,0 +1,259 @@
module webdav
import vweb
import os
import freeflowuniverse.herolib.core.pathlib
import encoding.xml
import freeflowuniverse.herolib.ui.console
import net.urllib
// @['/:path...'; LOCK]
// fn (mut app App) lock_handler(path string) vweb.Result {
// // Not yet working
// // TODO: Test with multiple clients
// resource := app.req.url
// owner := app.get_header('Owner')
// if owner.len == 0 {
// return app.bad_request('Owner header is required.')
// }
// depth := if app.get_header('Depth').len > 0 { app.get_header('Depth').int() } else { 0 }
// timeout := if app.get_header('Timeout').len > 0 { app.get_header('Timeout').int() } else { 3600 }
// token := app.lock_manager.lock(resource, owner, depth, timeout) or {
// app.set_status(423, 'Locked')
// return app.text('Resource is already locked.')
// }
// app.set_status(200, 'OK')
// app.add_header('Lock-Token', token)
// return app.text('Lock granted with token: ${token}')
// }
// @['/:path...'; UNLOCK]
// fn (mut app App) unlock_handler(path string) vweb.Result {
// // Not yet working
// // TODO: Test with multiple clients
// resource := app.req.url
// token := app.get_header('Lock-Token')
// if token.len == 0 {
// console.print_stderr('Unlock failed: `Lock-Token` header required.')
// return app.bad_request('Unlock failed: `Lock-Token` header required.')
// }
// if app.lock_manager.unlock_with_token(resource, token) {
// app.set_status(204, 'No Content')
// return app.text('Lock successfully released')
// }
// console.print_stderr('Resource is not locked or token mismatch.')
// app.set_status(409, 'Conflict')
// return app.text('Resource is not locked or token mismatch')
// }
@['/:path...'; get]
fn (mut app App) get_file(path string) vweb.Result {
mut file_path := pathlib.get_file(path: app.root_dir.path + path) or { return app.not_found() }
if !file_path.exists() {
return app.not_found()
}
file_data := file_path.read() or {
console.print_stderr('failed to read file ${file_path.path}: ${err}')
return app.server_error()
}
ext := os.file_ext(file_path.path)
content_type := if v := vweb.mime_types[ext] {
v
} else {
'text/plain'
}
app.set_status(200, 'Ok')
app.send_response_to_client(content_type, file_data)
return vweb.not_found() // this is for returning a dummy result
}
@['/:path...'; delete]
fn (mut app App) delete(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if !p.exists() {
return app.not_found()
}
if p.is_dir() {
console.print_debug('deleting directory: ${p.path}')
os.rmdir_all(p.path) or { return app.server_error() }
}
if p.is_file() {
console.print_debug('deleting file: ${p.path}')
os.rm(p.path) or { return app.server_error() }
}
console.print_debug('entry: ${p.path} is deleted')
app.set_status(204, 'No Content')
return app.text('entry ${p.path} is deleted')
}
@['/:path...'; put]
fn (mut app App) create_or_update(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if p.is_dir() {
console.print_stderr('Cannot PUT to a directory: ${p.path}')
app.set_status(405, 'Method Not Allowed')
return app.text('HTTP 405: Method Not Allowed')
}
file_data := app.req.data
p = pathlib.get_file(path: p.path, create: true) or {
console.print_stderr('failed to get file ${p.path}: ${err}')
return app.server_error()
}
p.write(file_data) or {
console.print_stderr('failed to write file data ${p.path}: ${err}')
return app.server_error()
}
app.set_status(200, 'Successfully saved file: ${p.path}')
return app.text('HTTP 200: Successfully saved file: ${p.path}')
}
@['/:path...'; copy]
fn (mut app App) copy(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if !p.exists() {
return app.not_found()
}
destination := app.get_header('Destination')
destination_url := urllib.parse(destination) or {
return app.bad_request('Invalid Destination ${destination}: ${err}')
}
destination_path_str := destination_url.path
mut destination_path := pathlib.get(app.root_dir.path + destination_path_str)
if destination_path.exists() {
return app.bad_request('Destination ${destination_path.path} already exists')
}
os.cp_all(p.path, destination_path.path, false) or {
console.print_stderr('failed to copy: ${err}')
return app.server_error()
}
app.set_status(200, 'Successfully copied entry: ${p.path}')
return app.text('HTTP 200: Successfully copied entry: ${p.path}')
}
@['/:path...'; move]
fn (mut app App) move(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if !p.exists() {
return app.not_found()
}
destination := app.get_header('Destination')
destination_url := urllib.parse(destination) or {
return app.bad_request('Invalid Destination ${destination}: ${err}')
}
destination_path_str := destination_url.path
mut destination_path := pathlib.get(app.root_dir.path + destination_path_str)
if destination_path.exists() {
return app.bad_request('Destination ${destination_path.path} already exists')
}
os.mv(p.path, destination_path.path) or {
console.print_stderr('failed to copy: ${err}')
return app.server_error()
}
app.set_status(200, 'Successfully moved entry: ${p.path}')
return app.text('HTTP 200: Successfully moved entry: ${p.path}')
}
@['/:path...'; mkcol]
fn (mut app App) mkcol(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if p.exists() {
return app.bad_request('Another collection exists at ${p.path}')
}
p = pathlib.get_dir(path: p.path, create: true) or {
console.print_stderr('failed to create directory ${p.path}: ${err}')
return app.server_error()
}
app.set_status(201, 'Created')
return app.text('HTTP 201: Created')
}
@['/:path...'; options]
fn (mut app App) options(path string) vweb.Result {
app.set_status(200, 'OK')
app.add_header('DAV', '1,2')
app.add_header('Allow', 'OPTIONS, PROPFIND, MKCOL, GET, HEAD, POST, PUT, DELETE, COPY, MOVE')
app.add_header('MS-Author-Via', 'DAV')
app.add_header('Access-Control-Allow-Origin', '*')
app.add_header('Access-Control-Allow-Methods', 'OPTIONS, PROPFIND, MKCOL, GET, HEAD, POST, PUT, DELETE, COPY, MOVE')
app.add_header('Access-Control-Allow-Headers', 'Authorization, Content-Type')
app.send_response_to_client('text/plain', '')
return vweb.not_found()
}
@['/:path...'; propfind]
fn (mut app App) propfind(path string) vweb.Result {
mut p := pathlib.get(app.root_dir.path + path)
if !p.exists() {
return app.not_found()
}
depth := app.get_header('Depth').int()
responses := app.get_responses(p.path, depth) or {
console.print_stderr('failed to get responses: ${err}')
return app.server_error()
}
doc := xml.XMLDocument{
root: xml.XMLNode{
name: 'D:multistatus'
children: responses
attributes: {
'xmlns:D': 'DAV:'
}
}
}
res := '<?xml version="1.0" encoding="UTF-8"?>${doc.pretty_str('').split('\n')[1..].join('')}'
// println('res: ${res}')
app.set_status(207, 'Multi-Status')
app.send_response_to_client('application/xml', res)
return vweb.not_found()
}
fn (mut app App) generate_element(element string, space_cnt int) string {
mut spaces := ''
for i := 0; i < space_cnt; i++ {
spaces += ' '
}
return '${spaces}<${element}>\n'
}
// TODO: implement
// @['/'; proppatch]
// fn (mut app App) prop_patch() vweb.Result {
// }
// TODO: implement, now it's used with PUT
// @['/'; post]
// fn (mut app App) post() vweb.Result {
// }

172
lib/vfs/webdav/prop.v Normal file
View File

@@ -0,0 +1,172 @@
module webdav
import freeflowuniverse.herolib.core.pathlib
import encoding.xml
import os
import time
import vweb
fn (mut app App) generate_response_element(path string, depth int) xml.XMLNode {
mut path_ := path.all_after(app.root_dir.path)
if !path_.starts_with('/') {
path_ = '/${path_}'
}
if os.is_dir(path) && path_ != '/' {
path_ = '${path_}/'
}
href := xml.XMLNode{
name: 'D:href'
children: [path_]
}
propstat := app.generate_propstat_element(path, depth)
return xml.XMLNode{
name: 'D:response'
children: [href, propstat]
}
}
fn (mut app App) generate_propstat_element(path string, depth int) xml.XMLNode {
mut status := xml.XMLNode{
name: 'D:status'
children: ['HTTP/1.1 200 OK']
}
prop := app.generate_prop_element(path, depth) or {
// TODO: status should be according to returned error
return xml.XMLNode{
name: 'D:propstat'
children: [
xml.XMLNode{
name: 'D:status'
children: ['HTTP/1.1 500 Internal Server Error']
},
]
}
}
return xml.XMLNode{
name: 'D:propstat'
children: [prop, status]
}
}
fn (mut app App) generate_prop_element(path string, depth int) !xml.XMLNode {
if !os.exists(path) {
return error('not found')
}
stat := os.stat(path)!
display_name := xml.XMLNode{
name: 'D:displayname'
children: ['${os.file_name(path)}']
}
content_length := if os.is_dir(path) { 0 } else { stat.size }
get_content_length := xml.XMLNode{
name: 'D:getcontentlength'
children: ['${content_length}']
}
ctime := format_iso8601(time.unix(stat.ctime))
creation_date := xml.XMLNode{
name: 'D:creationdate'
children: ['${ctime}']
}
mtime := format_iso8601(time.unix(stat.mtime))
get_last_mod := xml.XMLNode{
name: 'D:getlastmodified'
children: ['${mtime}']
}
content_type := match os.is_dir(path) {
true {
'httpd/unix-directory'
}
false {
app.get_file_content_type(path)
}
}
get_content_type := xml.XMLNode{
name: 'D:getcontenttype'
children: ['${content_type}']
}
mut get_resource_type_children := []xml.XMLNodeContents{}
if os.is_dir(path) {
get_resource_type_children << xml.XMLNode{
name: 'D:collection xmlns:D="DAV:"'
}
}
get_resource_type := xml.XMLNode{
name: 'D:resourcetype'
children: get_resource_type_children
}
mut nodes := []xml.XMLNodeContents{}
nodes << display_name
nodes << get_last_mod
nodes << get_content_type
nodes << get_resource_type
if !os.is_dir(path) {
nodes << get_content_length
}
nodes << creation_date
mut res := xml.XMLNode{
name: 'D:prop'
children: nodes.clone()
}
return res
}
fn (mut app App) get_file_content_type(path string) string {
ext := os.file_ext(path)
content_type := if v := vweb.mime_types[ext] {
v
} else {
'text/plain; charset=utf-8'
}
return content_type
}
fn format_iso8601(t time.Time) string {
return '${t.year:04d}-${t.month:02d}-${t.day:02d}T${t.hour:02d}:${t.minute:02d}:${t.second:02d}Z'
}
fn (mut app App) get_responses(path string, depth int) ![]xml.XMLNodeContents {
mut responses := []xml.XMLNodeContents{}
responses << app.generate_response_element(path, depth)
if depth == 0 {
return responses
}
if os.is_dir(path) {
mut dir := pathlib.get_dir(path: path) or {
app.set_status(500, 'failed to get directory ${path}: ${err}')
return error('failed to get directory ${path}: ${err}')
}
entries := dir.list(recursive: false) or {
app.set_status(500, 'failed to list directory ${path}: ${err}')
return error('failed to list directory ${path}: ${err}')
}
for entry in entries.paths {
responses << app.generate_response_element(entry.path, depth)
}
}
return responses
}

View File

@@ -0,0 +1,216 @@
module webdav
import net.http
import freeflowuniverse.herolib.core.pathlib
import time
import encoding.base64
import rand
fn test_run() {
root_dir := '/tmp/webdav'
mut app := new_app(
root_dir: root_dir
user_db: {
'mario': '123'
}
)!
app.run()
}
// fn test_get() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// file_name := 'newfile.txt'
// mut p := pathlib.get_file(path: '${root_dir}/${file_name}', create: true)!
// p.write('my new file')!
// mut req := http.new_request(.get, 'http://localhost:${app.server_port}/${file_name}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// response := req.do()!
// assert response.body == 'my new file'
// }
// fn test_put() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// file_name := 'newfile_put.txt'
// mut data := 'my new put file'
// mut req := http.new_request(.put, 'http://localhost:${app.server_port}/${file_name}',
// data)
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// mut response := req.do()!
// mut p := pathlib.get_file(path: '${root_dir}/${file_name}')!
// assert p.exists()
// assert p.read()! == data
// data = 'updated data'
// req = http.new_request(.put, 'http://localhost:${app.server_port}/${file_name}', data)
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// response = req.do()!
// p = pathlib.get_file(path: '${root_dir}/${file_name}')!
// assert p.exists()
// assert p.read()! == data
// }
// fn test_copy() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// file_name1, file_name2 := 'newfile_copy1.txt', 'newfile_copy2.txt'
// mut p1 := pathlib.get_file(path: '${root_dir}/${file_name1}', create: true)!
// data := 'file copy data'
// p1.write(data)!
// mut req := http.new_request(.copy, 'http://localhost:${app.server_port}/${file_name1}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// req.add_custom_header('Destination', 'http://localhost:${app.server_port}/${file_name2}')!
// mut response := req.do()!
// assert p1.exists()
// mut p2 := pathlib.get_file(path: '${root_dir}/${file_name2}')!
// assert p2.exists()
// assert p2.read()! == data
// }
// fn test_move() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// file_name1, file_name2 := 'newfile_move1.txt', 'newfile_move2.txt'
// mut p := pathlib.get_file(path: '${root_dir}/${file_name1}', create: true)!
// data := 'file move data'
// p.write(data)!
// mut req := http.new_request(.move, 'http://localhost:${app.server_port}/${file_name1}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// req.add_custom_header('Destination', 'http://localhost:${app.server_port}/${file_name2}')!
// mut response := req.do()!
// p = pathlib.get_file(path: '${root_dir}/${file_name2}')!
// assert p.exists()
// assert p.read()! == data
// }
// fn test_delete() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// file_name := 'newfile_delete.txt'
// mut p := pathlib.get_file(path: '${root_dir}/${file_name}', create: true)!
// mut req := http.new_request(.delete, 'http://localhost:${app.server_port}/${file_name}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// mut response := req.do()!
// assert !p.exists()
// }
// fn test_mkcol() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// dir_name := 'newdir'
// mut req := http.new_request(.mkcol, 'http://localhost:${app.server_port}/${dir_name}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// mut response := req.do()!
// mut p := pathlib.get_dir(path: '${root_dir}/${dir_name}')!
// assert p.exists()
// }
// fn test_propfind() {
// root_dir := '/tmp/webdav'
// mut app := new_app(
// server_port: rand.int_in_range(8000, 9000)!
// root_dir: root_dir
// user_db: {
// 'mario': '123'
// }
// )!
// app.run(background: true)
// time.sleep(1 * time.second)
// dir_name := 'newdir'
// file1 := 'file1.txt'
// file2 := 'file2.html'
// dir1 := 'dir1'
// mut p := pathlib.get_dir(path: '${root_dir}/${dir_name}', create: true)!
// mut file1_p := pathlib.get_file(path: '${p.path}/${file1}', create: true)!
// mut file2_p := pathlib.get_file(path: '${p.path}/${file2}', create: true)!
// mut dir1_p := pathlib.get_dir(path: '${p.path}/${dir1}', create: true)!
// mut req := http.new_request(.propfind, 'http://localhost:${app.server_port}/${dir_name}',
// '')
// signature := base64.encode_str('mario:123')
// req.add_custom_header('Authorization', 'Basic ${signature}')!
// mut response := req.do()!
// assert response.status_code == 207
// }

View File

@@ -13,58 +13,58 @@ import freeflowuniverse.herolib.ui.console
@[heap]
pub struct DocSite {
pub mut:
name string
url string
path_src pathlib.Path
path_build pathlib.Path
name string
url string
path_src pathlib.Path
path_build pathlib.Path
// path_publish pathlib.Path
args DSiteNewArgs
errors []SiteError
args DSiteNewArgs
errors []SiteError
config Config
}
@[params]
pub struct DSiteNewArgs {
pub mut:
name string
nameshort string
path string
url string
name string
nameshort string
path string
url string
// publish_path string
build_path string
production bool
build_path string
production bool
watch_changes bool = true
update bool
update bool
}
pub fn (mut f DocusaurusFactory) build_dev(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
mut s := f.add(args_)!
s.generate()!
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
bash build_dev.sh
'
retry: 0
)!
)!
return s
}
pub fn (mut f DocusaurusFactory) build(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
mut s := f.add(args_)!
s.generate()!
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
bash build.sh
'
retry: 0
)!
)!
return s
}
pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
mut s := f.add(args_)!
s.clean()!
s.generate()!
@@ -72,14 +72,14 @@ pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
// Create screen session for docusaurus development server
mut screen_name := 'docusaurus'
mut sf := screen.new()!
// Add and start a new screen session
mut scr := sf.add(
name: screen_name
cmd: '/bin/bash'
start: true
name: screen_name
cmd: '/bin/bash'
start: true
attach: false
reset: true
reset: true
)!
// Send commands to the screen session
@@ -93,33 +93,29 @@ pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
console.print_item(' 1. Attach to screen: screen -r ${screen_name}')
console.print_item(' 2. To detach from screen: Press Ctrl+A then D')
console.print_item(' 3. To list all screens: screen -ls')
console.print_item('The site content is on::')
console.print_item('The site content is on::')
console.print_item(' 1. location of documents: ${s.path_src.path}/docs')
if osal.cmd_exists("code"){
if osal.cmd_exists('code') {
console.print_item(' 2. We opened above dir in vscode.')
osal.exec(cmd:'code ${s.path_src.path}/docs')!
osal.exec(cmd: 'code ${s.path_src.path}/docs')!
}
// Start the watcher in a separate thread
//mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
//tf.wait()!
println("\n")
// mut tf:=spawn watch_docs(docs_path, s.path_src.path, s.path_build.path)
// tf.wait()!
println('\n')
if args_.watch_changes {
docs_path := '${s.path_src.path}/docs'
watch_docs(docs_path, s.path_src.path, s.path_build.path)!
}
}
return s
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
pub fn (mut f DocusaurusFactory) add(args_ DSiteNewArgs) !&DocSite {
console.print_header(' Docusaurus: ${args_.name}')
mut args := args_
@@ -129,62 +125,57 @@ pub fn (mut f DocusaurusFactory) add(args_ DSiteNewArgs) !&DocSite {
}
// if args.publish_path.len == 0 {
// args.publish_path = '${f.path_publish.path}/${args.name}'
if args.url.len>0{
if args.url.len > 0 {
mut gs := gittools.new()!
args.path = gs.get_path(url: args.url)!
}
if args.path.len==0{
if args.path.len == 0 {
return error("Can't get path from docusaurus site, its not specified.")
}
mut gs := gittools.new()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:args.update)!
mut r := gs.get_repo(
url: 'https://github.com/freeflowuniverse/docusaurus_template.git'
pull: args.update
)!
mut template_path := r.patho()!
// First ensure cfg directory exists in src, if not copy from template
if !os.exists("${args.path}/cfg") {
mut template_cfg := template_path.dir_get("cfg")!
template_cfg.copy(dest:"${args.path}/cfg")!
if !os.exists('${args.path}/cfg') {
mut template_cfg := template_path.dir_get('cfg')!
template_cfg.copy(dest: '${args.path}/cfg')!
}
if !os.exists("${args.path}/docs") {
mut template_cfg := template_path.dir_get("docs")!
template_cfg.copy(dest:"${args.path}/docs")!
if !os.exists('${args.path}/docs') {
mut template_cfg := template_path.dir_get('docs')!
template_cfg.copy(dest: '${args.path}/docs')!
}
mut myconfig := load_config('${args.path}/cfg')!
mut myconfig:=load_config("${args.path}/cfg")!
if myconfig.main.name.len==0{
myconfig.main.name = myconfig.main.base_url.trim_space().trim("/").trim_space()
if myconfig.main.name.len == 0 {
myconfig.main.name = myconfig.main.base_url.trim_space().trim('/').trim_space()
}
if args.name == '' {
args.name = myconfig.main.name
}
}
if args.nameshort.len == 0 {
args.nameshort = args.name
}
}
args.nameshort = texttools.name_fix(args.nameshort)
mut ds := DocSite{
name: args.name
url: args.url
path_src: pathlib.get_dir(path: args.path, create: false)!
name: args.name
url: args.url
path_src: pathlib.get_dir(path: args.path, create: false)!
path_build: f.path_build
// path_publish: pathlib.get_dir(path: args.publish_path, create: true)!
args: args
config:myconfig
args: args
config: myconfig
}
f.sites << &ds
@@ -201,9 +192,9 @@ pub mut:
}
pub fn (mut site DocSite) error(args ErrorArgs) {
// path2 := pathlib.get(args.path)
e := SiteError{
path: args.path
// path2 := pathlib.get(args.path)
e := SiteError{
path: args.path
msg: args.msg
cat: args.cat
}
@@ -222,21 +213,19 @@ pub fn (mut site DocSite) generate() ! {
// retry: 0
// )!
// Now copy all directories that exist in src to build
for item in ["src","static","cfg"]{
if os.exists("${site.path_src.path}/${item}"){
mut aa:= site.path_src.dir_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
for item in ['src', 'static', 'cfg'] {
if os.exists('${site.path_src.path}/${item}') {
mut aa := site.path_src.dir_get(item)!
aa.copy(dest: '${site.path_build.path}/${item}')!
}
}
for item in ["docs"]{
if os.exists("${site.path_src.path}/${item}"){
mut aa:= site.path_src.dir_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}",delete:true)!
for item in ['docs'] {
if os.exists('${site.path_src.path}/${item}') {
mut aa := site.path_src.dir_get(item)!
aa.copy(dest: '${site.path_build.path}/${item}', delete: true)!
}
}
}
fn (mut site DocSite) template_install() ! {
@@ -245,22 +234,26 @@ fn (mut site DocSite) template_install() ! {
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut template_path := r.patho()!
//always start from template first
for item in ["src","static","cfg"]{
mut aa:= template_path.dir_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}",delete:true)!
// always start from template first
for item in ['src', 'static', 'cfg'] {
mut aa := template_path.dir_get(item)!
aa.copy(dest: '${site.path_build.path}/${item}', delete: true)!
}
for item in ['package.json', 'sidebars.ts', 'tsconfig.json','docusaurus.config.ts'] {
for item in ['package.json', 'sidebars.ts', 'tsconfig.json', 'docusaurus.config.ts'] {
src_path := os.join_path(template_path.path, item)
dest_path := os.join_path(site.path_build.path, item)
os.cp(src_path, dest_path) or { return error('Failed to copy ${item} to build path: ${err}') }
os.cp(src_path, dest_path) or {
return error('Failed to copy ${item} to build path: ${err}')
}
}
for item in ['.gitignore'] {
src_path := os.join_path(template_path.path, item)
dest_path := os.join_path(site.path_src.path, item)
os.cp(src_path, dest_path) or { return error('Failed to copy ${item} to source path: ${err}') }
os.cp(src_path, dest_path) or {
return error('Failed to copy ${item} to source path: ${err}')
}
}
cfg := site.config
@@ -269,30 +262,27 @@ fn (mut site DocSite) template_install() ! {
build := $tmpl('templates/build.sh')
build_dev := $tmpl('templates/build_dev.sh')
mut develop_ := site.path_build.file_get_new("develop.sh")!
develop_.template_write(develop,true)!
mut develop_ := site.path_build.file_get_new('develop.sh')!
develop_.template_write(develop, true)!
develop_.chmod(0o700)!
mut build_ := site.path_build.file_get_new("build.sh")!
build_.template_write(build,true)!
mut build_ := site.path_build.file_get_new('build.sh')!
build_.template_write(build, true)!
build_.chmod(0o700)!
mut build_dev_ := site.path_build.file_get_new("build_dev.sh")!
build_dev_.template_write(build_dev,true)!
mut build_dev_ := site.path_build.file_get_new('build_dev.sh')!
build_dev_.template_write(build_dev, true)!
build_dev_.chmod(0o700)!
mut develop2_ := site.path_src.file_get_new("develop.sh")!
develop2_.template_write(develop,true)!
mut develop2_ := site.path_src.file_get_new('develop.sh')!
develop2_.template_write(develop, true)!
develop2_.chmod(0o700)!
mut build2_ := site.path_src.file_get_new("build.sh")!
build2_.template_write(build,true)!
mut build2_ := site.path_src.file_get_new('build.sh')!
build2_.template_write(build, true)!
build2_.chmod(0o700)!
mut build_dev2_ := site.path_src.file_get_new("build_dev.sh")!
build_dev2_.template_write(build_dev,true)!
build_dev2_.chmod(0o700)!
mut build_dev2_ := site.path_src.file_get_new('build_dev.sh')!
build_dev2_.template_write(build_dev, true)!
build_dev2_.chmod(0o700)!
}

View File

@@ -22,7 +22,7 @@ pub mut:
// publish_path string
build_path string
production bool
update bool
update bool
}
pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {

View File

@@ -7,7 +7,10 @@ import freeflowuniverse.herolib.installers.web.bun
fn (mut site DocusaurusFactory) template_install(update bool) ! {
mut gs := gittools.new()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git',pull:update)!
mut r := gs.get_repo(
url: 'https://github.com/freeflowuniverse/docusaurus_template.git'
pull: update
)!
mut template_path := r.patho()!
for item in ['package.json', 'sidebars.ts', 'tsconfig.json'] {

8
manual/create_tag.md Normal file
View File

@@ -0,0 +1,8 @@
## how to tag a version and push
```bash
cd ~/Users/despiegk~/code/github/freeflowuniverse/herolib
git tag -a v1.0.4 -m "all CI is now working"
git add . -A ; git commit -m ... ; git pull ; git push origin v1.0.4
```

35
release.sh Executable file
View File

@@ -0,0 +1,35 @@
#!/bin/bash
set -e
# Function to get the latest release tag
get_latest_release() {
curl --silent "https://api.github.com/repos/freeflowuniverse/herolib/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/'
}
# Show current version
echo "Current latest release: $(get_latest_release)"
# Ask for new version
read -p "Enter new version (e.g., 1.0.4): " new_version
# Validate version format
if [[ ! $new_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "Error: Version must be in format X.Y.Z (e.g., 1.0.4)"
exit 1
fi
# Update version in hero.v
sed -i.bak "s/version: '[0-9]\+\.[0-9]\+\.[0-9]\+'/version: '$new_version'/" cli/hero.v
rm -f cli/hero.v.bak
# Commit changes
git add . -A
git commit -m "chore: bump version to $new_version"
git pull
git push
# Create and push tag
git tag -a "v$new_version" -m "Release version $new_version"
git push origin "v$new_version"
echo "Release v$new_version created and pushed!"

103
release.vsh Executable file
View File

@@ -0,0 +1,103 @@
#!/usr/bin/env -S v run
import os
import net.http
import x.json2 as json
import regex
struct GithubRelease {
tag_name string
}
fn get_latest_release() !string {
url := 'https://api.github.com/repos/freeflowuniverse/herolib/releases/latest'
resp := http.get(url)!
release := json.decode[GithubRelease](resp.body) or {
return error('Failed to decode GitHub response: ${err}')
}
return release.tag_name
}
// Show current version
latest_release := get_latest_release() or {
eprintln('Error getting latest release: ${err}')
exit(1)
}
println('Current latest release: ${latest_release}')
// Ask for new version
new_version := os.input('Enter new version (e.g., 1.0.4): ')
// Validate version format
version_re := regex.regex_opt(r'^[0-9]+\.[0-9]+\.[0-9]+$') or {
eprintln('Error creating regex: ${err}')
exit(1)
}
if !version_re.matches_string(new_version) {
eprintln('Error: Version must be in format X.Y.Z (e.g., 1.0.4)')
exit(1)
}
ourdir := dir(@FILE)
hero_v_path := '${ourdir}/cli/hero.v'
// Read hero.v
content := os.read_file(hero_v_path) or {
eprintln('Error reading ${hero_v_path}: ${err}')
exit(1)
}
// Find version line
mut version_line_idx := -1
mut lines := content.split_into_lines()
for i, line in lines {
if line.contains('version:') {
version_line_idx = i
break
}
}
if version_line_idx == -1 {
eprintln('Error: Could not find version line in ${hero_v_path}')
exit(1)
}
// Get indentation
old_line := lines[version_line_idx]
indent := old_line.all_before('version:')
// Create backup
os.cp(hero_v_path, '${hero_v_path}.backup') or {
eprintln('Error creating backup: ${err}')
exit(1)
}
// Replace version line
lines[version_line_idx] = ' version: \'${new_version}\''
// Write back to file
os.write_file(hero_v_path, lines.join_lines()) or {
eprintln('Error writing to ${hero_v_path}: ${err}')
// Restore backup
os.cp('${hero_v_path}.backup', hero_v_path) or {
eprintln('Error restoring backup: ${err}')
}
exit(1)
}
// Clean up backup
os.rm('${hero_v_path}.backup') or {
eprintln('Warning: Could not remove backup file: ${err}')
}
// Git operations
os.execute('git add ${hero_v_path}')
os.execute('git commit -m "chore: bump version to ${new_version}"')
os.execute('git pull')
os.execute('git push')
os.execute('git tag -a "v${new_version}" -m "Release version ${new_version}"')
os.execute('git push origin "v${new_version}"')
println('Release v${new_version} created and pushed!')