Merge branch 'main' into development_mahmoud

* main:
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
  the base
This commit is contained in:
2024-12-25 12:31:03 +01:00
72 changed files with 3753 additions and 50 deletions

View File

@@ -31,7 +31,10 @@ jobs:
run: ./install_v.sh
- name: Generate documentation
run: ./doc.vsh
run: |
./doc.vsh
# ls /home/runner/work/herolib/docs
find .
- name: Setup Pages
uses: actions/configure-pages@v3
@@ -39,7 +42,7 @@ jobs:
- name: Upload artifact
uses: actions/upload-pages-artifact@v1
with:
path: "/home/runner/work/crystallib/crystallib/docs"
path: "/home/runner/work/herolib/herolib/docs"
- name: Deploy to GitHub Pages
id: deployment

View File

@@ -41,6 +41,22 @@ jobs:
- name: Setup Herolib
run: ./install_herolib.vsh
- name: Install and Start Redis
run: |
# Import Redis GPG key
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
# Add Redis repository
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
# Install Redis
sudo apt-get update
sudo apt-get install -y redis
# Print versions
redis-cli --version
redis-server --version
# Start Redis
sudo systemctl start redis-server
redis-cli ping
- name: Do all the basic tests
run: ./test_basic.vsh

View File

@@ -1,6 +1,8 @@
# herolib
a smaller version of crystallib with only the items we need for hero
a smaller version of herolib with only the items we need for hero
> [documentation here](https://freeflowuniverse.github.io/herolib/)
## automated install

View File

@@ -30,7 +30,7 @@ if additional_args.len > 0 {
}
// Change to the hero directory
hero_dir := os.join_path(os.home_dir(), 'code/github/freeflowuniverse/crystallib/cli/hero')
hero_dir := os.join_path(os.home_dir(), 'code/github/freeflowuniverse/herolib/cli')
os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}') }
// Set HEROPATH based on OS

93
cli/compile_upload.vsh Executable file
View File

@@ -0,0 +1,93 @@
#!/usr/bin/env -S v run
import os
fn get_platform_id() string {
os_name := os.user_os()
arch := os.uname().machine
return match os_name {
'linux' {
match arch {
'aarch64', 'arm64' { 'linux-arm64' }
'x86_64' { 'linux-i64' }
else { 'unknown' }
}
}
'macos' {
match arch {
'arm64' { 'macos-arm64' }
'x86_64' { 'macos-i64' }
else { 'unknown' }
}
}
else {
'unknown'
}
}
}
fn read_secrets() ! {
secret_file := os.join_path(os.home_dir(), 'code/git.ourworld.tf/despiegk/hero_secrets/mysecrets.sh')
if os.exists(secret_file) {
println('Reading secrets from ${secret_file}')
content := os.read_file(secret_file)!
lines := content.split('\n')
for line in lines {
if line.contains('export') {
parts := line.replace('export ', '').split('=')
if parts.len == 2 {
key := parts[0].trim_space()
value := parts[1].trim_space().trim('"').trim("'")
os.setenv(key, value, true)
}
}
}
}
}
fn s3_configure() ! {
read_secrets()!
// Check if environment variables are set
s3keyid := os.getenv_opt('S3KEYID') or { return error('S3KEYID is not set') }
s3appid := os.getenv_opt('S3APPID') or { return error('S3APPID is not set') }
// Create rclone config file
rclone_dir := os.join_path(os.home_dir(), '.config/rclone')
os.mkdir_all(rclone_dir) or { return error('Failed to create rclone directory: ${err}') }
rclone_conf := os.join_path(rclone_dir, 'rclone.conf')
config_content := '[b2]
type = b2
account = ${s3keyid}
key = ${s3appid}
hard_delete = true'
os.write_file(rclone_conf, config_content) or { return error('Failed to write rclone config: ${err}') }
println('made S3 config on: ${rclone_conf}')
content := os.read_file(rclone_conf) or { return error('Failed to read rclone config: ${err}') }
println(content)
}
fn hero_upload() ! {
hero_path := os.find_abs_path_of_executable('hero') or { return error("Error: 'hero' command not found in PATH") }
s3_configure()!
platform_id := get_platform_id()
rclone_conf := os.join_path(os.home_dir(), '.config/rclone/rclone.conf')
println('Uploading hero binary for platform: ${platform_id}')
// List contents
os.execute_or_panic('rclone --config="${rclone_conf}" lsl b2:threefold/${platform_id}/')
// Copy hero binary
os.execute_or_panic('rclone --config="${rclone_conf}" copy "${hero_path}" b2:threefold/${platform_id}/')
}
fn main() {
hero_upload() or { eprintln(err) exit(1) }
}

View File

@@ -2,7 +2,7 @@ module main
import os
import cli { Command, Flag }
import freeflowuniverse.herolib.hero.cmds
// import freeflowuniverse.herolib.hero.cmds
// import freeflowuniverse.herolib.hero.publishing
import freeflowuniverse.herolib.installers.base
import freeflowuniverse.herolib.ui.console

33
doc.vsh
View File

@@ -9,47 +9,46 @@ println('Formatting code...')
if os.system('v fmt -w ${abs_dir_of_script}/examples') != 0 {
eprintln('Warning: Failed to format examples')
}
if os.system('v fmt -w ${abs_dir_of_script}/herolib') != 0 {
if os.system('v fmt -w ${abs_dir_of_script}/lib') != 0 {
eprintln('Warning: Failed to format herolib')
}
// Clean existing docs
println('Cleaning existing documentation...')
os.rmdir_all('${abs_dir_of_script}/docs') or {}
herolib_path := os.join_path(abs_dir_of_script, 'herolib')
os.rmdir_all('_docs') or {}
os.rmdir_all('docs') or {}
os.rmdir_all('vdocs') or {}
herolib_path := os.join_path(abs_dir_of_script, 'lib')
os.chdir(herolib_path) or {
panic('Failed to change directory to herolib: ${err}')
}
os.rmdir_all('_docs') or {}
os.rmdir_all('docs') or {}
os.rmdir_all('vdocs') or {}
// Generate HTML documentation
println('Generating HTML documentation...')
if os.system('v doc -m -f html . -readme -comments -no-timestamp') != 0 {
if os.system('v doc -m -f html . -readme -comments -no-timestamp -o ../docs') != 0 {
panic('Failed to generate HTML documentation')
}
// Move docs to parent directory
os.rename('_docs', '${abs_dir_of_script}/docs') or {
panic('Failed to move documentation to parent directory: ${err}')
os.chdir(abs_dir_of_script) or {
panic('Failed to change directory to abs_dir_of_script: ${err}')
}
// Generate Markdown documentation
println('Generating Markdown documentation...')
os.rmdir_all('vdocs') or {}
os.mkdir_all('vdocs/v') or {
panic('Failed to create v docs directory: ${err}')
}
os.mkdir_all('vdocs/crystal') or {
panic('Failed to create crystal docs directory: ${err}')
}
if os.system('v doc -m -no-color -f md -o vdocs/v/') != 0 {
panic('Failed to generate V markdown documentation')
}
if os.system('v doc -m -no-color -f md -o vdocs/crystal/') != 0 {
// if os.system('v doc -m -no-color -f md -o ../vdocs/v/') != 0 {
// panic('Failed to generate V markdown documentation')
// }
if os.system('v doc -m -no-color -f md -o vdocs/herolib/') != 0 {
panic('Failed to generate Crystal markdown documentation')
}

View File

@@ -0,0 +1,7 @@
//This is a config file, ofcourse not correct
//the {CAPITALS} will be converted to the name of the argument
keyname : ${myconfig.keyname}
keyid : ''
appkey : ${myconfig.appkey}

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.base
pub struct MyClient[T] {
base.BaseConfig[T]
}
@[params]
pub struct MyConfig {
pub mut:
// the config items which are important to remember
keyname string
keyid string
appkey string @[secret]
}
// EXAMPLE USAGE
mut cl := new('testinstance', keyname: 'somekey', appkey: 'will be secret')!
println(cl.config_get()!)
// now get the client, will give error if it doesn't exist
mut cl2 := get('testinstance')!
println(cl2.config_get()!)
delete('testinstance')!

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.base
pub struct MyClient[T] {
base.BaseConfig[T]
}
@[params]
pub struct MyConfig {
pub mut:
// the config items which are important to remember
keyname string
keyid string
appkey string @[secret]
}
pub fn new(instance string, cfg MyConfig) !MyClient[MyConfig] {
mut self := MyClient[MyConfig]{
type_name: 'myclient'
}
self.init(instance: instance, action: .new)!
self.config_set(cfg)!
return self
}
pub fn get(instance string) !MyClient[MyConfig] {
mut self := MyClient[MyConfig]{
type_name: 'myclient'
}
self.init(instance: instance, action: .get)!
return self
}
pub fn delete(instance string) ! {
mut self := MyClient[MyConfig]{
type_name: 'myclient'
}
self.init(instance: instance, action: .delete)!
}
// EXAMPLE USAGE
mut cl := new('testinstance', keyname: 'somekey', appkey: 'will be secret')!
myconfig := cl.config_get()!
// it should show how the fields are normal, but at back there was encryption/decryption of the field marked secret
println(myconfig)
config_content := $tmpl('aconfigfile.txt')
mut myconfigfile := pathlib.get_file(path: '/tmp/myconfigfile.txt', create: false)!
myconfigfile.write(config_content)!

34
examples/core/base/core_1.vsh Executable file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools
pub struct MyClass {
base.Base
pub mut:
descr string
}
// will fetch default context
mut c := base.context()!
mut s := c.session_new()!
println(s)
mut gs := gittools.configure(multibranch: true, root: '/tmp/code', name: 'test')!
// mut gs:=gittools.get(name:"test")!
mut s2 := c.session_latest()!
println(s2)
println(gs)
mut mc := MyClass{
type_name: 'mytype'
instance: 'first'
}
mut mysession := mc.session()!
println(mc)
println(mysession)

View File

@@ -0,0 +1,13 @@
module embedding
pub struct Embedder {
Embedded
}
pub struct Embedded {
id int
related_ids []int
name string
tags []string
date time.Time
}

View File

@@ -0,0 +1,13 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import os
import freeflowuniverse.herolib.core.codeparser
import freeflowuniverse.herolib.core.codemodel { Struct }
code_path := '${os.dir(@FILE)}/embedding.v'
code := codeparser.parse_v(code_path)!
assert code.len == 2
assert code[0] is Struct
embedder_struct := code[0] as Struct
println(embedder_struct.fields.map('${it.name}: ${it.typ.symbol}'))

143
examples/core/db/db_do.v Executable file
View File

@@ -0,0 +1,143 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import time
import freeflowuniverse.herolib.core.smartid
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.core.db
pub struct MyStruct {
db.Base
pub mut:
color string
nr int
}
fn create_struct() MyStruct {
mut m := MyStruct{
name: 'aname'
description: 'a description\ncan be multiline\n1'
gid: smartid.gid(oid_u32: 99, cid_name: 'test') or { panic(err) }
color: 'red'
nr: 8
}
author_gid := smartid.gid(oid_u32: 333, cid_name: 'test') or { panic(err) }
m.params_add('priority:urgent silver') or { panic(err) }
m.params_add('priority:low gold') or { panic(err) }
m.params_add('timing:now gold') or { panic(err) }
m.remark_add(
author: author_gid
content: '
lucky we did do this
can be multiline
'
rtype: .audit
) or { panic(err) }
m.remark_add(
content: '
another one
'
rtype: .log
params: 'color:red urgent'
) or { panic(err) }
m.remark_add(
content: 'hiii'
rtype: .log
params: 'color:red urgent'
) or { panic(err) }
return m
}
// example of how we should implement binary serialization
pub fn (o MyStruct) serialize_binary() ![]u8 {
mut b := o.bin_encoder()!
b.add_string(o.color)
b.add_int(o.nr)
return b.data
}
pub fn load(data []u8) !MyStruct {
mut d, base := db.base_decoder(data)!
mut o := MyStruct{
Base: base
}
o.color = d.get_string()
o.nr = d.get_int()
return o
}
fn do1() ! {
m := create_struct()
data := m.serialize_binary()!
m2 := load(data)!
assert m.gid == m2.gid
println(m2)
// assert m.params.params.len == m2.params.params.len
// for id, _ in m.params.params {
// assert m.params.params[id] == m2.params.params[id]
// }
// assert m.params.args.len == m2.params.args.len
// mut args_map := map[string]bool{}
// for id, _ in m.params.args {
// args_map[m.params.args[id]] = true
// }
// for a in m2.params.args {
// assert args_map[a] == true
// }
// assert m.version_base == m2.version_base
// assert m.serialization_type == m2.serialization_type
// assert m.name == m2.name
// assert m.description == m2.description
// assert m.remarks.remarks.len == m2.remarks.remarks.len
// for id, _ in m.remarks.remarks {
// assert m.remarks.remarks[id].content == m2.remarks.remarks[id].content
// assert m.remarks.remarks[id].time == m2.remarks.remarks[id].time
// assert m.remarks.remarks[id].rtype == m2.remarks.remarks[id].rtype
// a1 := m.remarks.remarks[id].author or {
// if _ := m2.remarks.remarks[id].author {
// panic('author is in original object, but not in deserialized object')
// }
// continue
// }
// a2 := m2.remarks.remarks[id].author or {
// panic('author is in deserialized object, but not in original object')
// }
// assert a1 == a2
// }
}
// fn test_find_remark() {
// m := create_struct()
// mut r := m.remarks.find_remark(time_to: ourtime.now())!
// assert r.len == 3
// r = m.remarks.find_remark(params_filter: 'color:red*')!
// assert r.len == 2
// r = m.remarks.find_remark(
// time_from: ourtime.OurTime{
// unix: i64(time.now().unix_time()) - time.second
// }
// )!
// assert r.len == 3
// a := smartid.gid(oid_u32: 333, cid_name: 'test')!
// r = m.remarks.find_remark(author: a)!
// assert r.len == 1
// }
do1()!

45
examples/core/dbfs/dbfs1.vsh Executable file
View File

@@ -0,0 +1,45 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.dbfs
import time
import os
data_dir := '/tmp/db'
os.rmdir_all(data_dir) or {}
mut dbcollection := dbfs.get(contextid: 1, dbpath: data_dir, secret: '123456')!
mut db := dbcollection.db_create(name: 'db_a', encrypted: true, withkeys: true)!
id := db.set(key: 'a', value: 'bbbb')!
assert 'bbbb' == db.get(key: 'a')!
id2 := db.set(key: 'a', value: 'bbbb2')!
assert 'bbbb2' == db.get(key: 'a')!
assert id == id2
assert id == 1
id3 := db.set(key: 'b', value: 'bbbb3')!
assert 'bbbb3' == db.get(key: 'b')!
assert id3 == id2 + 1
assert db.exists(key: 'a')!
assert db.exists(key: 'b')!
assert db.exists(id: id2)!
assert db.exists(id: id3)!
id3_exsts := db.exists(id: id3 + 1)!
println(id3 + 1)
assert id3_exsts == false
for i in 3 .. 100 {
id4 := db.set(key: 'a${i}', value: 'b${i}')!
println('${i} --> ${id4}')
assert i == id4
}
db.delete(key: 'a')!
assert db.exists(key: 'a')! == false
assert db.exists(id: id2)! == false
db.delete(id: 50)!
assert db.exists(key: 'a50')! == false
assert db.exists(id: 50)! == false

5
examples/core/generate.vsh Executable file
View File

@@ -0,0 +1,5 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.generator.installer
installer.scan('~/code/github/freeflowuniverse/herolib/herolib')!

View File

@@ -0,0 +1,4 @@
module gitea_client
struct GiteaClient {
}

View File

@@ -0,0 +1,83 @@
module dagu
// import os
import freeflowuniverse.herolib.clients.httpconnection
import os
struct GiteaClient[T] {
base.Base[T]
mut:
connection &httpconnection.HTTPConnection
}
struct Config {
play.ConfigBase[T]
url string
}
//
pub fn get(args PlayArgs) GiteaClient[Config] {
mut client := GiteaClient[Config]{}
client.init(args)!
return client
}
//
pub fn heroplay(args PlayBookAddArgs) ! {
// make session for configuring from heroscript
mut session := play.session_new(session_name: 'config')!
session.playbook_add(path: args.path, text: args.text, git_url: args.git_url)!
for mut action in session.plbook.find(filter: 'gitea_client.define')! {
mut p := action.params
instance := p.get_default('instance', 'default')!
mut cl := get(instance: instance)!
mut cfg := cl.config()!
mut config := p.decode[T]()!
cl.config_save()!
}
}
//
pub fn (self GiteaClient[T]) config_interactive() ! {
mut myui := ui.new()!
console.clear()
println('
## Configure B2 Client')
println('========================
')
mut cfg := self.config()!
self.instance = myui.ask_question(
question: 'name for B2 (backblaze) client'
default: self.instance
)!
cfg.description = myui.ask_question(
question: 'description'
minlen: 0
default: cfg.description
)!
cfg.keyid = myui.ask_question(
question: 'keyid e.g. 003e2a7be6357fb0000000001'
minlen: 5
default: cfg.keyid
)!
cfg.appkey = myui.ask_question(
question: 'appkey e.g. K008UsdrYOAou2ulBHA8p4KBe/dL2n4'
minlen: 5
default: cfg.appkey
)!
buckets := self.list_buckets()!
bucket_names := buckets.map(it.name)
cfg.bucketname = myui.ask_dropdown(
question: 'choose default bucket name'
items: bucket_names
)!
self.config_save()!
}

View File

@@ -0,0 +1,69 @@
module gitea_client
import json
import net.http
// Repository operations
pub fn (mut client GiteaClient) create_repo(name string, description string, private bool) !string {
data := {
'name': name
'description': description
'private': private.str()
}
resp := client.connection.post('/api/v1/user/repos', json.encode(data))!
return resp
}
pub fn (mut client GiteaClient) get_repo(owner string, repo string) !string {
resp := client.connection.get('/api/v1/repos/${owner}/${repo}')!
return resp
}
pub fn (mut client GiteaClient) list_repos() !string {
resp := client.connection.get('/api/v1/user/repos')!
return resp
}
// User operations
pub fn (mut client GiteaClient) get_user() !string {
resp := client.connection.get('/api/v1/user')!
return resp
}
pub fn (mut client GiteaClient) list_users() !string {
resp := client.connection.get('/api/v1/admin/users')!
return resp
}
// Organization operations
pub fn (mut client GiteaClient) create_org(name string, description string) !string {
data := {
'username': name
'description': description
}
resp := client.connection.post('/api/v1/orgs', json.encode(data))!
return resp
}
pub fn (mut client GiteaClient) list_orgs() !string {
resp := client.connection.get('/api/v1/orgs')!
return resp
}
// Issue operations
pub fn (mut client GiteaClient) create_issue(owner string, repo string, title string, body string) !string {
data := {
'title': title
'body': body
}
resp := client.connection.post('/api/v1/repos/${owner}/${repo}/issues', json.encode(data))!
return resp
}
pub fn (mut client GiteaClient) list_issues(owner string, repo string) !string {
resp := client.connection.get('/api/v1/repos/${owner}/${repo}/issues')!
return resp
}

View File

@@ -0,0 +1 @@
module gitea_client

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import os
import json
import freeflowuniverse.herolib.core.openapi.gen
const spec_path = '${os.dir(@FILE)}/openapi.json'
mod := gen.generate_client_module(
api_name: 'Gitea'
)!
mod.write_v('${os.dir(@FILE)}/gitea_client',
overwrite: true
)!

View File

@@ -0,0 +1,18 @@
module openrpc_client
import freeflowuniverse.herolib.data.jsonrpc { JsonRpcRequest }
import net.websocket
struct Client {
mut:
ws_client &websocket.Client
}
pub fn new() Client {
address := 'localhost:8000'
ws_client := websocket.new_client(address)!
}
fn (mut client Client) send_rpc(rpc JsonRpcRequest) ! {
client.ws_client.write_string(rpc.to_json())
}

View File

@@ -0,0 +1,26 @@
module openrpc_client
pub fn (client Client) hello() {
println('hello')
}
pub fn (client Client) ping() string {
return 'pong'
}
@[params]
pub struct AnimalArgs {
name string
species string
}
// create_animal adds an animal with the provided arguments to the database
pub fn (client Client) create_animal(args AnimalArgs) {
println('Creating animal `${args.name}`')
}
// get_animal finds an animal in the database with the provided name
// returns the animal, an animal in the db with a matching name
pub fn (client Client) get_animal(name string) Animal {
return Animal{}
}

View File

@@ -0,0 +1,7 @@
module openrpc_client
pub struct Animal {
name string
species string
created_at string
}

View File

@@ -0,0 +1,153 @@
{
"openrpc": "1.0.0-rc1",
"info": {
"version": "1.0.0",
"title": "Petstore Expanded",
"description": "A sample API that uses a petstore as an example to demonstrate features in the OpenRPC specification",
"termsOfService": "https://open-rpc.org",
"contact": {
"name": "OpenRPC Team",
"email": "doesntexist@open-rpc.org",
"url": "https://open-rpc.org"
},
"license": {
"name": "Apache 2.0",
"url": "https://www.apache.org/licenses/LICENSE-2.0.html"
}
},
"servers": [
{
"url": "http://petstore.open-rpc.org"
}
],
"methods": [
{
"name": "get_pets",
"description": "Returns all pets from the system that the user has access to\nNam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tincidunt varius justo. In hac habitasse platea dictumst. Integer at adipiscing ante, a sagittis ligula. Aenean pharetra tempor ante molestie imperdiet. Vivamus id aliquam diam.",
"params": [
{
"name": "tags",
"description": "tags to filter by",
"schema": {
"type": "array",
"items": {
"type": "string"
}
}
},
{
"name": "limit",
"description": "maximum number of results to return",
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet",
"description": "pet response",
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Pet"
}
}
}
},
{
"name": "create_pet",
"description": "Creates a new pet in the store. Duplicates are allowed",
"params": [
{
"name": "newPet",
"description": "Pet to add to the store.",
"schema": {
"$ref": "#/components/schemas/NewPet"
}
}
],
"result": {
"name": "pet",
"description": "the newly created pet",
"schema": {
"$ref": "#/components/schemas/Pet"
}
}
},
{
"name": "get_pet_by_id",
"description": "Returns a user based on a single ID, if the user does not have access to the pet",
"params": [
{
"name": "id",
"description": "ID of pet to fetch",
"required": true,
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet",
"description": "pet response",
"schema": {
"$ref": "#/components/schemas/Pet"
}
}
},
{
"name": "delete_pet_by_id",
"description": "deletes a single pet based on the ID supplied",
"params": [
{
"name": "id",
"description": "ID of pet to delete",
"required": true,
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet",
"description": "pet deleted",
"schema": {}
}
}
],
"components": {
"schemas": {
"Pet": {
"allOf": [
{
"$ref": "#/components/schemas/NewPet"
},
{
"required": [
"id"
],
"properties": {
"id": {
"type": "integer"
}
}
}
]
},
"NewPet": {
"type": "object",
"required": [
"name"
],
"properties": {
"name": {
"type": "string"
},
"tag": {
"type": "string"
}
}
}
}
}
}

View File

@@ -0,0 +1,2 @@
This is a mock client to the Pet Store JSON-RPC API, described by the PetStore OpenRPC Document.
The client has comments that are copied from the PetStore OpenRPC Document to demonstrate that document generation from the client results in a similar OpenRPC Document.

View File

@@ -0,0 +1,19 @@
module petstore_client
import freeflowuniverse.herolib.data.jsonrpc { JsonRpcRequest }
import net.websocket
struct Client {
mut:
ws_client &websocket.Client
}
pub fn new() !Client {
address := 'localhost:8000'
ws_client := websocket.new_client(address)!
return Client{ws_client}
}
fn (mut client Client) send_rpc[T](rpc JsonRpcRequest[T]) ! {
client.ws_client.write_string(rpc.to_json())
}

View File

@@ -0,0 +1,36 @@
module petstore_client
import freeflowuniverse.herolib.data.jsonrpc
// get_pets finds pets in the system that the user has access to by tags and within a limit
// - tags: tags to filter by
// - limit: maximum number of results to return
// returns pet_list, all pets from the system, that mathes the tags
pub fn (mut client Client) get_pets(tags []string, limit int) []Pet {
return []Pet{}
}
@[params]
struct NewPet {
name string @[required]
tag string
}
// create_pet creates a new pet in the store. Duplicates are allowed.
// - new_pet: Pet to add to the store.
// returns pet, the newly created pet
pub fn (mut client Client) create_pet(new_pet NewPet) Pet {
return Pet{}
}
// get_pet_by_id gets a pet based on a single ID, if the user has access to the pet
// - id: ID of pet to fetch
// returns pet, pet response
pub fn (mut client Client) get_pet_by_id(id int) Pet {
return Pet{}
}
// delete_pet_by_id deletes a single pet based on the ID supplied
// - id: ID of pet to delete
// returns pet, pet deleted
pub fn (mut client Client) delete_pet_by_id(id int) {}

View File

@@ -0,0 +1,8 @@
module petstore_client
// a pet struct that represents a pet
struct Pet {
name string // name of the pet
tag string // a tag of the pet, helps finding pet
id int // unique indentifier
}

View File

@@ -0,0 +1,132 @@
{
"openrpc": "1.0.0",
"info": {
"title": "PetStore API",
"version": "1.0.0"
},
"methods": [
{
"name": "petstore_client.GetPets",
"description": "finds pets in the system that the user has access to by tags and within a limit",
"params": [
{
"name": "tags",
"description": "tags to filter by",
"schema": {
"type": "array",
"items": {
"type": "string"
}
}
},
{
"name": "limit",
"description": "maximum number of results to return",
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet_list",
"description": "all pets from the system, that mathes the tags",
"schema": {
"$ref": "#\/components\/schemas\/Pet"
}
}
},
{
"name": "petstore_client.CreatePet",
"description": "creates a new pet in the store. Duplicates are allowed.",
"params": [
{
"name": "new_pet",
"description": "Pet to add to the store.",
"schema": {
"$ref": "#\/components\/schemas\/NewPet"
}
}
],
"result": {
"name": "pet",
"description": "the newly created pet",
"schema": {
"$ref": "#\/components\/schemas\/Pet"
}
}
},
{
"name": "petstore_client.GetPetById",
"description": "gets a pet based on a single ID, if the user has access to the pet",
"params": [
{
"name": "id",
"description": "ID of pet to fetch",
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet",
"description": "pet response",
"schema": {
"$ref": "#\/components\/schemas\/Pet"
}
}
},
{
"name": "petstore_client.DeletePetById",
"description": "deletes a single pet based on the ID supplied",
"params": [
{
"name": "id",
"description": "ID of pet to delete",
"schema": {
"type": "integer"
}
}
],
"result": {
"name": "pet",
"description": "pet deleted",
"schema": {
"type": "null"
}
}
}
],
"components": {
"schemas": {
"NewPet": {
"title": "NewPet",
"properties": {
"name": {
"type": "string"
},
"tag": {
"type": "string"
}
}
},
"Pet": {
"title": "Pet",
"description": "a pet struct that represents a pet",
"properties": {
"name": {
"description": "name of the pet",
"type": "string"
},
"tag": {
"description": "a tag of the pet, helps finding pet",
"type": "string"
},
"id": {
"description": "unique indentifier",
"type": "integer"
}
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os
const testpath3 = os.dir(@FILE) + '/../../..'
mut p := pathlib.get_dir(path: testpath3)!
// IMPORTANT TO HAVE r'... the r in front
pl := p.list(regex: [r'.*\.v$'])!
println(pl)

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os
const testpath4 = os.dir(@FILE) + '../../'
mut p := pathlib.get_file(path: os.dir(@FILE) + '/paths_md5.vsh')!
md5hash := p.md5hex()!
println('file md5 hash: ${md5hash}')
file_size_bytes := p.size()!
println('size in bytes: ${file_size_bytes}')
file_size_kbytes := p.size_kb()!
println('size in kb: ${file_size_kbytes}')

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.data.paramsparser
import os
const testpath3 = os.dir(@FILE) + '/../..'
// if we return True then it means the dir or file is processed
fn filter_1(mut path pathlib.Path, mut params paramsparser.Params) !bool {
if path.is_dir() {
if path.path.ends_with('.dSYM') {
return false
}
return true
}
if path.path.ends_with('.vsh') {
return true
}
return false
}
fn executor_1(mut patho pathlib.Path, mut params paramsparser.Params) !paramsparser.Params {
if patho.is_file() {
// println( " - exec: $patho.path" )
params.add(patho.path)!
}
return params
}
mut p := pathlib.get_dir(path: testpath3)!
mut params := paramsparser.Params{}
mut params2 := p.scan(mut params, [filter_1], [executor_1])!
println(params2)
assert params2.args.len == 4

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.core.pathlib
import os
const testpath4 = os.dir(@FILE) + '/paths_sha256.vsh'
mut p := pathlib.get_file(path: testpath4)!
s := p.sha256()!
println(s)

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.crypt.secrets
secrets.delete_passwd()!
r := secrets.encrypt('aaa')!
println(r)
assert 'aaa' == secrets.decrypt(r)!

5
generate.vsh Executable file
View File

@@ -0,0 +1,5 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.code.generator.generic
generic.scan(path:"~/code/github/freeflowuniverse/herolib/lib/installers",force:true, add:true)!

View File

View File

@@ -19,4 +19,3 @@ fn (mut h HTTPConnection) header(req Request) Header {
return h.default_header.join(header)
}

View File

@@ -3,7 +3,6 @@ module httpconnection
import net.http
import freeflowuniverse.herolib.clients.redisclient { RedisURL }
@[params]
pub struct HTTPConnectionArgs {
pub:
@@ -34,6 +33,4 @@ pub fn new(args HTTPConnectionArgs) !&HTTPConnection {
base_url: args.url.trim('/')
}
return &conn
}

View File

@@ -42,7 +42,7 @@ pub fn install(args_ InstallArgs) ! {
if osal.is_linux() {
osal.package_install('redis-server')!
} else {
osal.package_install('redis')!/Users/despiegk1/code/github/freeflowuniverse/herolib/herolib/installers/db/redis/template
osal.package_install('redis')! / Users / despiegk1 / code / github / freeflowuniverse / herolib / herolib / installers / db / redis / template
}
}
osal.execute_silent('mkdir -p ${args.datadir}')!

View File

@@ -23,7 +23,6 @@ pub fn ensure_hero_dirs() string {
return herodir()
}
// root dir for our hero environment
pub fn herodir() string {
return shell_expansion('~/hero')
@@ -53,7 +52,6 @@ pub fn path_ensure(s string) string {
return path
}
// get path underneath the hero root directory
pub fn hero_path(s string) string {
path := shell_expansion(s).trim_left(' /')
@@ -61,7 +59,6 @@ pub fn hero_path(s string) string {
return full_path
}
// return path and ensure it exists and return the path
pub fn hero_path_ensure(s string) string {
path := hero_path(s)

View File

@@ -0,0 +1,214 @@
# Git Tools Module
A comprehensive Git management module for V that provides high-level abstractions for Git operations, repository management, and automation of common Git workflows.
## Features
- Repository management (clone, load, delete)
- Branch operations (create, switch, checkout)
- Tag management (create, switch, verify)
- Change tracking and commits
- Remote operations (push, pull)
- SSH key integration
- Submodule support
- Repository status tracking
- Light cloning option for large repositories
## Basic Usage
### Repository Management
```v
import freeflowuniverse.herolib.develop.gittools
// Initialize with code root directory
mut gs := gittools.new(coderoot: '~/code')!
// Clone a repository
mut repo := gs.clone(GitCloneArgs{
url: 'git@github.com:username/repo.git'
sshkey: 'deploy_key' // Optional SSH key name
})!
// Or get existing repository
mut repo := gs.get_repo(name: 'existing_repo')!
// Delete repository
repo.delete()!
```
### Branch Operations
```v
// Create and switch to new branch
repo.branch_create('feature-branch')!
repo.branch_switch('feature-branch')!
// Check status and commit changes
if repo.has_changes() {
repo.commit('feat: Add new feature')!
repo.push()!
}
// Pull latest changes
repo.pull()!
// Pull with submodules
repo.pull(submodules: true)!
```
### Tag Management
```v
// Create a new tag
repo.tag_create('v1.0.0')!
// Switch to tag
repo.tag_switch('v1.0.0')!
// Check if tag exists
exists := repo.tag_exists('v1.0.0')!
// Get tag information
if repo.status_local.tag == 'v1.0.0' {
// Currently on tag v1.0.0
}
```
## Advanced Features
### SSH Key Integration
```v
// Clone with SSH key
mut repo := gs.clone(GitCloneArgs{
url: 'git@github.com:username/repo.git'
sshkey: 'deploy_key'
})!
// Set SSH key for existing repository
repo.set_sshkey('deploy_key')!
```
### Repository Status
```v
// Update repository status
repo.status_update()!
// Check various status conditions
if repo.need_commit() {
// Has uncommitted changes
}
if repo.need_push_or_pull() {
// Has unpushed/unpulled changes
}
if repo.need_checkout() {
// Needs to checkout different branch/tag
}
```
### Change Management
```v
// Check for changes
if repo.has_changes() {
// Handle changes
}
// Reset all changes
repo.reset()!
// or
repo.remove_changes()!
// Update submodules
repo.update_submodules()!
```
## Repository Configuration
### GitRepo Structure
```v
pub struct GitRepo {
pub mut:
provider string // e.g., github.com
account string // Git account name
name string // Repository name
status_remote GitRepoStatusRemote // Remote repository status
status_local GitRepoStatusLocal // Local repository status
status_wanted GitRepoStatusWanted // Desired status
config GitRepoConfig // Repository configuration
deploysshkey string // SSH key for git operations
}
```
### Status Tracking
```v
// Remote Status
pub struct GitRepoStatusRemote {
pub mut:
ref_default string // Default branch hash
branches map[string]string // Branch name -> commit hash
tags map[string]string // Tag name -> commit hash
}
// Local Status
pub struct GitRepoStatusLocal {
pub mut:
branches map[string]string // Branch name -> commit hash
branch string // Current branch
tag string // Current tag
}
// Desired Status
pub struct GitRepoStatusWanted {
pub mut:
branch string
tag string
url string // Remote repository URL
readonly bool // Prevent push/commit operations
}
```
## Error Handling
The module provides comprehensive error handling:
```v
// Clone with error handling
mut repo := gs.clone(url: 'invalid_url') or {
println('Clone failed: ${err}')
return
}
// Commit with error handling
repo.commit('feat: New feature') or {
if err.msg().contains('nothing to commit') {
println('No changes to commit')
} else {
println('Commit failed: ${err}')
}
return
}
```
## Testing
Run the test suite:
```bash
v -enable-globals test herolib/develop/gittools/tests/
```
## Notes
- SSH keys should be properly configured in `~/.ssh/`
- For readonly repositories, all local changes will be reset on pull
- Light cloning option (`config.light: true`) creates shallow clones
- Repository status is automatically cached and updated
- Submodules are handled recursively when specified
- All operations maintain repository consistency

View File

@@ -0,0 +1,129 @@
module gittools
import os
import json
import freeflowuniverse.herolib.core.pathlib
__global (
gsinstances map[string]&GitStructure
)
pub fn reset() {
gsinstances = map[string]&GitStructure{} // they key is the redis_key (hash of coderoot)
}
@[params]
pub struct GitStructureArgsNew {
pub mut:
coderoot string
light bool = true // If true, clones only the last history for all branches (clone with only 1 level deep)
log bool = true // If true, logs git commands/statements
debug bool = true
ssh_key_name string // name of ssh key to be used when loading the gitstructure
reload bool
}
// Retrieve or create a new GitStructure instance with the given configuration.
pub fn new(args_ GitStructureArgsNew) !&GitStructure {
mut args := args_
if args.coderoot == '' {
args.coderoot = '${os.home_dir()}/code'
}
mut cfg := GitStructureConfig{
coderoot: args.coderoot
light: args.light
log: args.log
debug: args.debug
ssh_key_name: args.ssh_key_name
}
// Retrieve the configuration from Redis.
rediskey_ := rediskey(args.coderoot)
mut redis := redis_get()
datajson := json.encode(cfg)
redis.set(rediskey_, datajson)!
return get(coderoot: args.coderoot, reload: args.reload)
}
@[params]
pub struct GitStructureArgGet {
pub mut:
coderoot string
reload bool
}
// Retrieve a GitStructure instance based on the given arguments.
pub fn get(args_ GitStructureArgGet) !&GitStructure {
mut args := args_
if args.coderoot == '' {
args.coderoot = '${os.home_dir()}/code'
}
if args.reload {
cachereset()!
}
rediskey_ := rediskey(args.coderoot)
// println(rediskey_)
// Return existing instance if already created.
if rediskey_ in gsinstances {
mut gs := gsinstances[rediskey_] or {
panic('Unexpected error: key not found in gsinstances')
}
if args.reload {
gs.load()!
}
return gs
}
mut redis := redis_get()
mut datajson := redis.get(rediskey_) or { '' }
if datajson == '' {
if args_.coderoot == '' {
return new()!
}
return error("can't find repostructure for coderoot: ${args.coderoot}")
}
mut config := json.decode(GitStructureConfig, datajson) or { GitStructureConfig{} }
// Create and load the GitStructure instance.
mut gs := GitStructure{
key: rediskey_
config: config
coderoot: pathlib.get_dir(path: args.coderoot, create: true)!
}
if args.reload {
gs.load()!
} else {
gs.init()!
}
gsinstances[rediskey_] = &gs
return gsinstances[rediskey_] or { panic('bug') }
}
// Reset the configuration cache for Git structures.
pub fn configreset() ! {
mut redis := redis_get()
key_check := 'git:config:*'
keys := redis.keys(key_check)!
for key in keys {
redis.del(key)!
}
}
// Reset all caches and configurations for all Git repositories.
pub fn cachereset() ! {
key_check := 'git:repos:**'
mut redis := redis_get()
keys := redis.keys(key_check)!
for key in keys {
redis.del(key)!
}
configreset()!
}

View File

@@ -0,0 +1,129 @@
module gittools
import freeflowuniverse.herolib.core.pathlib
// GitLocation uniquely identifies a Git repository, its online URL, and its location in the filesystem.
@[heap]
pub struct GitLocation {
pub mut:
provider string // Git provider (e.g., GitHub)
account string // Account name
name string // Repository name
branch_or_tag string // Branch name
path string // Path in the repository (not the filesystem)
anker string // Position in a file
}
//////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////
// Get GitLocation from a path within the Git repository
pub fn (mut gs GitStructure) gitlocation_from_path(path string) !GitLocation {
mut full_path := pathlib.get(path)
rel_path := full_path.path_relative(gs.coderoot.path)!
// Validate the relative path
mut parts := rel_path.split('/')
if parts.len < 3 {
return error("git: path is not valid, should contain provider/account/repository: '${rel_path}'")
}
// Extract provider, account, and repository name
provider := parts[0]
account := parts[1]
name := parts[2]
mut repo_path := if parts.len > 3 { parts[3..].join('/') } else { '' }
return GitLocation{
provider: provider
account: account
name: name
path: repo_path
}
}
// Get GitLocation from a URL
pub fn (mut gs GitStructure) gitlocation_from_url(url string) !GitLocation {
mut urllower := url.trim_space()
if urllower == '' {
return error('url cannot be empty')
}
// Normalize URL
urllower = normalize_url(urllower)
// Split URL into parts
mut parts := urllower.split('/')
mut anchor := ''
mut path := ''
mut branch_or_tag := ''
// Deal with path and anchor
if parts.len > 4 {
path = parts[4..].join('/')
if path.contains('#') {
parts2 := path.split('#')
if parts2.len == 2 {
path = parts2[0]
anchor = parts2[1]
} else {
return error("git: url badly formatted, more than 1 '#' in ${url}")
}
}
}
// Extract branch if available
if parts.len > 3 {
branch_or_tag = parts[3]
parts[2] = parts[2].replace('.git', '')
}
// Validate parts
if parts.len < 3 {
return error("git: url badly formatted, not enough parts in '${urllower}' \nparts:\n${parts}")
}
// Extract provider, account, and name
provider := if parts[0] == 'github.com' { 'github' } else { parts[0] }
account := parts[1]
name := parts[2].replace('.git', '')
return GitLocation{
provider: provider
account: account
name: name
branch_or_tag: branch_or_tag
path: path
anker: anchor
}
}
// Return a herolib path object on the filesystem pointing to the locator
pub fn (mut l GitLocation) patho() !pathlib.Path {
mut addrpath := pathlib.get_dir(path: '${l.provider}/${l.account}/${l.name}', create: false)!
if l.path.len > 0 {
return pathlib.get('${addrpath.path}/${l.path}')
}
return addrpath
}
// Normalize the URL for consistent parsing
fn normalize_url(url string) string {
// Remove common URL prefixes
if url.starts_with('ssh://') {
return url[6..].replace(':', '/').replace('//', '/').trim('/')
}
if url.starts_with('git@') {
return url[4..].replace(':', '/').replace('//', '/').trim('/')
}
if url.starts_with('http:/') {
return url[6..].replace(':', '/').replace('//', '/').trim('/')
}
if url.starts_with('https:/') {
return url[7..].replace(':', '/').replace('//', '/').trim('/')
}
if url.ends_with('.git') {
return url[0..url.len - 4].replace(':', '/').replace('//', '/').trim('/')
}
return url.replace(':', '/').replace('//', '/').trim('/')
}

View File

@@ -0,0 +1,196 @@
module gittools
import crypto.md5
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.clients.redisclient
import os
import freeflowuniverse.herolib.ui.console
pub struct GitStructureConfig {
pub mut:
coderoot string
light bool = true // If true, clones only the last history for all branches (clone with only 1 level deep)
log bool = true // If true, logs git commands/statements
debug bool = true
ssh_key_name string
}
fn rediskey(coderoot string) string {
key := md5.hexhash(coderoot)
return 'git:config:${key}'
}
// GitStructure holds information about repositories within a specific code root.
// This structure keeps track of loaded repositories, their configurations, and their status.
@[heap]
pub struct GitStructure {
pub mut:
key string // Unique key representing the git structure (default is hash of $home/code).
config GitStructureConfig // Configuration settings for the git structure.
coderoot pathlib.Path // Root directory where repositories are located.
repos map[string]&GitRepo // Map of repositories, keyed by their unique names.
loaded bool // Indicates if the repositories have been loaded into memory.
}
//////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////
// Loads all repository information from the filesystem and updates from remote if necessary.
// Use the reload argument to force reloading from the disk.
//
// Args:
// - args (StatusUpdateArgs): Arguments controlling the reload behavior.
pub fn (mut gitstructure GitStructure) load(args StatusUpdateArgs) ! {
mut processed_paths := []string{}
// println("1")
gitstructure.load_recursive(gitstructure.coderoot.path, mut processed_paths)!
// println("2")
if args.reload {
mut ths := []thread !{}
redisclient.reset()! // make sure redis is empty, we don't want to reuse
for _, mut repo_ in gitstructure.repos {
mut myfunction := fn (mut repo GitRepo) ! {
// println("reload repo ${repo.name} on ${repo.get_path()!}")
redisclient.reset()!
redisclient.checkempty()
repo.status_update(reload: true)!
}
ths << spawn myfunction(mut repo_)
}
console.print_debug('loaded all threads for git on ${gitstructure.coderoot.path}')
for th in ths {
th.wait()!
}
// console.print_debug("threads finished")
// exit(0)
}
gitstructure.init()!
}
// just some initialization mechanism
pub fn (mut gitstructure GitStructure) init() ! {
if gitstructure.config.debug {
gitstructure.config.log = true
}
if gitstructure.repos.keys().len == 0 {
gitstructure.load()!
}
}
// Recursively loads repositories from the provided path, updating their statuses.
//
// Args:
// - path (string): The path to search for repositories.
// - processed_paths ([]string): List of already processed paths to avoid duplication.
fn (mut gitstructure GitStructure) load_recursive(path string, mut processed_paths []string) ! {
path_object := pathlib.get(path)
relpath := path_object.path_relative(gitstructure.coderoot.path)!
// Limit the recursion depth to avoid deep directory traversal.
if relpath.count('/') > 4 {
return
}
items := os.ls(path) or {
return error('Cannot load gitstructure because directory not found: ${path}')
}
for item in items {
current_path := os.join_path(path, item)
if os.is_dir(current_path) {
if os.exists(os.join_path(current_path, '.git')) {
// Initialize the repository from the current path.
mut repo := gitstructure.repo_init_from_path_(current_path)!
// repo.status_update()!
key_ := repo.get_key()
path_ := repo.get_path()!
if processed_paths.contains(key_) || processed_paths.contains(path_) {
return error('Duplicate repository detected.\nPath: ${path_}\nKey: ${key_}')
}
processed_paths << path_
processed_paths << key_
gitstructure.repos[key_] = &repo
continue
}
if item.starts_with('.') || item.starts_with('_') {
continue
}
// Recursively search in subdirectories.
gitstructure.load_recursive(current_path, mut processed_paths)!
}
}
}
// Resets the cache for the current Git structure, removing cached data from Redis.
pub fn (mut gitstructure GitStructure) cachereset() ! {
mut redis := redis_get()
keys := redis.keys('git:repos:${gitstructure.key}:**')!
for key in keys {
redis.del(key)!
}
}
@[params]
pub struct RepoInitParams {
ssh_key_name string // name of ssh key to be used in repo
}
// Initializes a Git repository from a given path by locating the parent directory with `.git`.
//
// Args:
// - path (string): Path to initialize the repository from.
//
// Returns:
// - GitRepo: Reference to the initialized repository.
//
// Raises:
// - Error: If `.git` is not found in the parent directories.
fn (mut gitstructure GitStructure) repo_init_from_path_(path string, params RepoInitParams) !GitRepo {
mypath := pathlib.get_dir(path: path, create: false)!
mut parent_path := mypath.parent_find('.git') or {
return error('Cannot find .git in parent directories starting from: ${path}')
}
if parent_path.path == '' {
return error('Cannot find .git in parent directories starting from: ${path}')
}
// Retrieve GitLocation from the path.
gl := gitstructure.gitlocation_from_path(mypath.path)!
// Initialize and return a GitRepo struct.
mut r := GitRepo{
gs: &gitstructure
status_remote: GitRepoStatusRemote{}
status_local: GitRepoStatusLocal{}
config: GitRepoConfig{}
provider: gl.provider
account: gl.account
name: gl.name
deploysshkey: params.ssh_key_name
}
return r
}
// returns the git repository of the working directory by locating the parent directory with `.git`.
//
// Returns:
// - GitRepo: Reference to the initialized repository.
//
// Raises:
// - None: If `.git` is not found in the parent directories.
pub fn (mut gitstructure GitStructure) get_working_repo() ?GitRepo {
curdir := pathlib.get_wd()
return gitstructure.repo_init_from_path_(curdir.path) or { return none }
}

View File

@@ -0,0 +1,274 @@
module gittools
import freeflowuniverse.herolib.ui as gui
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.ui.generic
import freeflowuniverse.herolib.clients.redisclient
import os
pub const gitcmds = 'clone,commit,pull,push,delete,reload,list,edit,sourcetree,cd'
@[params]
pub struct ReposActionsArgs {
pub mut:
cmd string // clone,commit,pull,push,delete,reload,list,edit,sourcetree
filter string // if used will only show the repo's which have the filter string inside
repo string
account string
provider string
msg string
url string
branch string
recursive bool
pull bool
script bool = true // run non interactive
reset bool = true // means we will lose changes (only relevant for clone, pull)
}
// do group actions on repo
// args
//```
// cmd string // clone,commit,pull,push,delete,reload,list,edit,sourcetree,cd
// filter string // if used will only show the repo's which have the filter string inside
// repo string
// account string
// provider string
// msg string
// url string
// pull bool
// script bool = true // run non interactive
// reset bool = true // means we will lose changes (only relevant for clone, pull)
//```
pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut args := args_
console.print_debug('git do ${args.cmd}')
if args.repo == '' && args.account == '' && args.provider == '' && args.filter == '' {
curdir := os.getwd()
mut curdiro := pathlib.get_dir(path: curdir, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
if parentpath.path != '' {
r0 := gs.repo_init_from_path_(parentpath.path)!
args.repo = r0.name
args.account = r0.account
args.provider = r0.provider
}
}
args.cmd = args.cmd.trim_space().to_lower()
mut ui := gui.new()!
if args.cmd == 'reload' {
console.print_header(' - reload gitstructure ${gs.config.coderoot}')
gs.load(reload: true)!
return ''
}
if args.cmd == 'list' {
gs.repos_print(
filter: args.filter
name: args.repo
account: args.account
provider: args.provider
)!
return ''
}
mut repos := gs.get_repos(
filter: args.filter
name: args.repo
account: args.account
provider: args.provider
)!
if args.url.len > 0 {
mut g := gs.get_repo(url: args.url)!
g.load()!
if args.cmd == 'cd' {
return g.get_path()!
}
if args.reset {
g.remove_changes()!
}
if args.cmd == 'pull' || args.pull {
g.pull()!
}
if args.cmd == 'push' {
if g.need_commit()! {
if args.msg.len == 0 {
return error('please specify message with -m ...')
}
g.commit(args.msg)!
}
g.push()!
}
if args.cmd == 'pull' || args.cmd == 'clone' || args.cmd == 'push' {
gpath := g.get_path()!
console.print_debug('git do ok, on path ${gpath}')
return gpath
}
repos = [g]
}
if args.cmd in 'sourcetree,edit'.split(',') {
if repos.len == 0 {
return error('please specify at least 1 repo for cmd:${args.cmd}')
}
if repos.len > 4 {
return error('more than 4 repo found for cmd:${args.cmd}')
}
for r in repos {
if args.cmd == 'edit' {
r.open_vscode()!
}
if args.cmd == 'sourcetree' {
r.sourcetree()!
}
}
return ''
}
if args.cmd in 'pull,push,commit,delete'.split(',') {
gs.repos_print(
filter: args.filter
name: args.repo
account: args.account
provider: args.provider
)!
mut need_commit := false
mut need_pull := false
mut need_push := false
if repos.len == 0 {
console.print_header(' - nothing to do.')
return ''
}
// check on repos who needs what
for mut g in repos {
g.load()!
// console.print_debug(st)
need_commit = g.need_commit()! || need_commit
if args.cmd == 'push' && need_commit {
need_push = true
}
need_pull = args.cmd in 'pull,push'.split(',') // always do pull when push and pull
need_push = args.cmd == 'push' && (g.need_push_or_pull()! || need_push)
}
mut ok := false
if need_commit || need_pull || need_push {
mut out := '\n ** NEED TO '
if need_commit {
out += 'COMMIT '
}
if need_pull {
out += 'PULL '
}
if need_push {
out += 'PUSH '
}
if args.reset {
out += ' (changes will be lost!)'
}
console.print_debug(out + ' ** \n')
if args.script {
ok = true
} else {
ok = ui.ask_yesno(question: 'Is above ok?')!
}
}
if args.cmd == 'delete' {
if args.script {
ok = true
} else {
ok = ui.ask_yesno(question: 'Is it ok to delete above repos? (DANGEROUS)')!
}
}
if ok == false {
return error('cannot continue with action, you asked me to stop.\n${args}')
}
// mut changed := false
mut ths := []thread !bool{}
for mut g in repos {
ths << spawn fn (mut g GitRepo, args ReposActionsArgs, need_commit bool, need_push bool, shared ui generic.UserInterface) !bool {
redisclient.reset()!
redisclient.checkempty()
mut has_changed := false
need_commit_repo := (g.need_commit()! || need_commit)
&& args.cmd in 'commit,pull,push'.split(',')
need_pull_repo := args.cmd in 'pull,push'.split(',') // always do pull when push and pull
need_push_repo := args.cmd in 'push'.split(',')
&& (g.need_push_or_pull()! || need_push)
// console.print_debug(" --- git_do ${g.addr.name} ${st.need_commit} ${st.need_pull} ${st.need_push}")
if need_commit_repo {
mut msg := args.msg
if msg.len == 0 {
if args.script {
return error('message needs to be specified for commit.')
}
lock ui {
msg = ui.ask_question(
question: 'commit message for repo: ${g.account}/${g.name} '
)!
}
}
console.print_header(' - commit ${g.account}/${g.name}')
g.commit(msg)!
has_changed = true
}
if need_pull_repo {
if args.reset {
console.print_header(' - remove changes ${g.account}/${g.name}')
g.remove_changes()!
}
console.print_header(' - pull ${g.account}/${g.name}')
g.pull()!
has_changed = true
}
if need_push_repo {
console.print_header(' - push ${g.account}/${g.name}')
g.push()!
has_changed = true
}
if args.cmd == 'delete' {
g.delete()!
has_changed = true
}
return has_changed
}(mut g, args, need_commit, need_push, shared &ui)
}
for th in ths {
has_changed := th.wait()!
if has_changed {
// console.clear()
console.print_header('\nCompleted required actions.\n')
gs.repos_print(
filter: args.filter
name: args.repo
account: args.account
provider: args.provider
)!
}
}
return ''
}
// end for the commit, pull, push, delete
$if debug {
print_backtrace()
}
return error('did not find cmd: ${args.cmd}')
}

View File

@@ -0,0 +1,145 @@
module gittools
import freeflowuniverse.herolib.clients.redisclient
import time
// ReposGetArgs defines arguments to retrieve repositories from the git structure.
// It includes filters by name, account, provider, and an option to clone a missing repo.
@[params]
pub struct ReposGetArgs {
pub mut:
filter string // Optional filter for repository names
name string // Specific repository name to retrieve.
account string // Git account associated with the repository.
provider string // Git provider (e.g., GitHub).
pull bool // Pull the last changes.
reset bool // Reset the changes.
reload bool // Reload the repo into redis cache
url string // Repository URL
}
// Retrieves a list of repositories from the git structure that match the provided arguments.
// if pull will force a pull, if it can't will be error, if reset will remove the changes
//
// Args:
//```
// ReposGetArgs {
// filter string // Optional filter for repository names
// name string // Specific repository name to retrieve.
// account string // Git account associated with the repository.
// provider string // Git provider (e.g., GitHub).
// pull bool // Pull the last changes.
// reset bool // Reset the changes.
// reload bool // Reload the repo into redis cache
// url string // Repository URL, used if cloning is needed.
//```
// Returns:
// - []&GitRepo: A list of repository references that match the criteria.
pub fn (mut gitstructure GitStructure) get_repos(args_ ReposGetArgs) ![]&GitRepo {
mut args := args_
mut res := []&GitRepo{}
for _, repo in gitstructure.repos {
relpath := repo.get_relative_path()!
if args.filter != '' && relpath.contains(args.filter) {
res << repo
continue
}
if args.url.len > 0 {
// if being mathed from url load repo info
git_location := gitstructure.gitlocation_from_url(args.url)!
args.account = git_location.account
args.provider = git_location.provider
args.name = git_location.name
}
if repo_match_check(repo, args) {
res << repo
}
}
// operate per repo on thread based on args
mut ths := []thread !{}
for mut repo in res {
// check redis cache outside, in threads is problematic
repo.cache_get() or { return error('failed to get repo cache ${err}') }
if time.since(time.unix(repo.last_load)) > 24 * time.hour {
args.reload = true
}
ths << spawn fn (mut repo GitRepo, args ReposGetArgs) ! {
redisclient.reset()!
redisclient.checkempty()
if args.pull {
repo.pull()!
} else if args.reset {
repo.reset()!
} else if args.reload {
repo.load()!
}
}(mut repo, args)
}
for th in ths {
th.wait()!
}
return res
}
// Retrieves a single repository based on the provided arguments.
// if pull will force a pull, if it can't will be error, if reset will remove the changes
// If the repository does not exist, it will clone it
//
// Args:
//```
// ReposGetArgs {
// filter string // Optional filter for repository names
// name string // Specific repository name to retrieve.
// account string // Git account associated with the repository.
// provider string // Git provider (e.g., GitHub).
// pull bool // Pull the last changes.
// reset bool // Reset the changes.
// reload bool // Reload the repo into redis cache
// url string // Repository URL, used if cloning is needed.
//```
//
// Returns:
// - &GitRepo: Reference to the retrieved or cloned repository.
//
// Raises:
// - Error: If multiple repositories are found with similar names or if cloning fails.
pub fn (mut gitstructure GitStructure) get_repo(args_ ReposGetArgs) !&GitRepo {
mut args := args_
repositories := gitstructure.get_repos(args)!
if repositories.len == 0 {
if args.url.len == 0 {
return error('Cannot clone the repository, no URL provided: ${args.url}')
}
return gitstructure.clone(url: args.url)!
}
if repositories.len > 1 {
repos := repositories.map('- ${it} ${it.account}.${it.name}').join_lines()
return error('Found more than one repository for \n${args}\n${repos}')
}
return repositories[0]
}
// Helper function to check if a repository matches the criteria (name, account, provider).
//
// Args:
// - repo (GitRepo): The repository to check.
// - args (ReposGetArgs): The criteria to match against.
//
// Returns:
// - bool: True if the repository matches, false otherwise.
fn repo_match_check(repo GitRepo, args ReposGetArgs) bool {
return (args.name.len == 0 || repo.name == args.name)
&& (args.account.len == 0 || repo.account == args.account)
&& (args.provider.len == 0 || repo.provider == args.provider)
}

View File

@@ -0,0 +1,62 @@
module gittools
import freeflowuniverse.herolib.ui.console
// Check and return the status of a repository (whether it needs a commit, pull, or push)
fn get_repo_status(gr GitRepo) !string {
mut repo := gr
mut statuses := []string{}
if repo.need_commit()! {
statuses << 'COMMIT'
}
if repo.need_push_or_pull()! {
statuses << 'PULL'
statuses << 'PUSH'
}
return statuses.join(', ')
}
// Format repository information for display, including path, tag/branch, and status
fn format_repo_info(repo GitRepo) ![]string {
status := get_repo_status(repo)!
tag_or_branch := if repo.status_local.tag.len > 0 {
'[[${repo.status_local.tag}]]' // Display tag if it exists
} else {
'[${repo.status_local.branch}]' // Otherwise, display branch
}
relative_path := repo.get_relative_path()!
return [' - ${relative_path}', tag_or_branch, status]
}
// Print repositories based on the provided criteria, showing their statuses
pub fn (mut gitstructure GitStructure) repos_print(args ReposGetArgs) ! {
console.print_debug('#### Overview of repositories:')
console.print_debug('')
mut repo_data := [][]string{}
// Collect repository information based on the provided criteria
for _, repo in gitstructure.get_repos(args)! {
repo_data << format_repo_info(repo)!
}
// Clear the console and start printing the formatted repository information
console.clear()
console.print_lf(1)
// Display header with optional argument filtering information
header := if args.str().len > 0 {
'Repositories: ${gitstructure.config.coderoot} [${args.str()}]'
} else {
'Repositories: ${gitstructure.config.coderoot}'
}
console.print_header(header)
// Print the repository information in a formatted array
console.print_lf(1)
console.print_array(repo_data, ' ', true) // true -> aligned for better readability
console.print_lf(5)
}

View File

@@ -0,0 +1,313 @@
module gittools
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal
import os
import time
// GitRepo holds information about a single Git repository.
@[heap]
pub struct GitRepo {
pub mut:
gs &GitStructure @[skip; str: skip] // Reference to the parent GitStructure
provider string // e.g., github.com, shortened to 'github'
account string // Git account name
name string // Repository name
status_remote GitRepoStatusRemote // Remote repository status
status_local GitRepoStatusLocal // Local repository status
status_wanted GitRepoStatusWanted // what is the status we want?
config GitRepoConfig // Repository-specific configuration
last_load int // Epoch timestamp of the last load from reality
deploysshkey string // to use with git
}
// this is the status we want, we need to work towards off
pub struct GitRepoStatusWanted {
pub mut:
branch string
tag string
url string // Remote repository URL, is basically the one we want
readonly bool // if read only then we cannot push or commit, all changes will be reset when doing pull
}
// GitRepoStatusRemote holds remote status information for a repository.
pub struct GitRepoStatusRemote {
pub mut:
ref_default string // is the default branch hash
branches map[string]string // Branch name -> commit hash
tags map[string]string // Tag name -> commit hash
}
// GitRepoStatusLocal holds local status information for a repository.
pub struct GitRepoStatusLocal {
pub mut:
branches map[string]string // Branch name -> commit hash
branch string // the current branch
tag string // If the local branch is not set, the tag may be set
}
// GitRepoConfig holds repository-specific configuration options.
pub struct GitRepoConfig {
pub mut:
remote_check_period int = 3600 * 24 * 3 // Seconds to wait between remote checks (0 = check every time), default 3 days
}
// just some initialization mechanism
pub fn (mut gitstructure GitStructure) repo_new_from_gitlocation(git_location GitLocation) !&GitRepo {
mut repo := GitRepo{
provider: git_location.provider
name: git_location.name
account: git_location.account
gs: &gitstructure
status_remote: GitRepoStatusRemote{}
status_local: GitRepoStatusLocal{}
status_wanted: GitRepoStatusWanted{}
}
gitstructure.repos[repo.name] = &repo
return &repo
}
//////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////
// Commit the staged changes with the provided commit message.
pub fn (mut repo GitRepo) commit(msg string) ! {
repo.status_update()!
if repo.need_commit()! {
if msg == '' {
return error('Commit message is empty.')
}
repo_path := repo.get_path()!
repo.exec('git add . -A') or {
return error('Cannot add to repo: ${repo_path}. Error: ${err}')
}
repo.exec('git commit -m "${msg}"') or {
return error('Cannot commit repo: ${repo_path}. Error: ${err}')
}
console.print_green('Changes committed successfully.')
} else {
console.print_debug('No changes to commit.')
}
repo.load()!
}
// Push local changes to the remote repository.
pub fn (mut repo GitRepo) push() ! {
repo.status_update()!
if repo.need_push_or_pull()! {
url := repo.get_repo_url()!
console.print_header('Pushing changes to ${url}')
// We may need to push the locally created branches
repo.exec('git push --set-upstream origin ${repo.status_local.branch}')!
console.print_green('Changes pushed successfully.')
repo.load()!
} else {
console.print_header('Everything is up to date.')
repo.load()!
}
}
@[params]
pub struct PullCheckoutArgs {
pub mut:
submodules bool // if we want to pull for submodules
}
// Pull remote content into the repository.
pub fn (mut repo GitRepo) pull(args_ PullCheckoutArgs) ! {
repo.status_update()!
if repo.need_checkout() {
repo.checkout()!
}
repo.exec('git pull') or {
return error('Cannot pull repo: ${repo.get_path()!}. Error: ${err}')
}
if args_.submodules {
repo.update_submodules()!
}
repo.load()!
console.print_green('Changes pulled successfully.')
}
// Checkout a branch in the repository.
pub fn (mut repo GitRepo) checkout() ! {
repo.status_update()!
if repo.status_wanted.readonly {
repo.reset()!
}
if repo.need_commit()! {
return error('Cannot checkout branch due to uncommitted changes in ${repo.get_path()!}.')
}
if repo.status_wanted.tag.len > 0 {
repo.exec('git checkout tags/${repo.status_wanted.tag}')!
}
if repo.status_wanted.branch.len > 0 {
repo.exec('git checkout ${repo.status_wanted.tag}')!
}
}
// Create a new branch in the repository.
pub fn (mut repo GitRepo) branch_create(branchname string) ! {
repo.exec('git branch -c ${branchname}') or {
return error('Cannot Create branch: ${repo.get_path()!} to ${branchname}\nError: ${err}')
}
console.print_green('Branch ${branchname} created successfully.')
}
pub fn (mut repo GitRepo) branch_switch(branchname string) ! {
repo.exec('git switch ${branchname}') or {
return error('Cannot switch branch: ${repo.get_path()!} to ${branchname}\nError: ${err}')
}
console.print_green('Branch ${branchname} switched successfully.')
repo.status_local.branch = branchname
repo.status_local.tag = ''
repo.pull()!
}
// Create a new branch in the repository.
pub fn (mut repo GitRepo) tag_create(tagname string) ! {
repo_path := repo.get_path()!
repo.exec('git tag ${tagname}') or {
return error('Cannot create tag: ${repo_path}. Error: ${err}')
}
console.print_green('Tag ${tagname} created successfully.')
}
pub fn (mut repo GitRepo) tag_switch(tagname string) ! {
repo.exec('git checkout ${tagname}') or {
return error('Cannot switch to tag: ${tagname}. Error: ${err}')
}
console.print_green('Tag ${tagname} activated.')
repo.status_local.branch = ''
repo.status_local.tag = tagname
repo.pull()!
}
// Create a new branch in the repository.
pub fn (mut repo GitRepo) tag_exists(tag string) !bool {
repo.exec('git show ${tag}') or { return false }
return true
}
// Deletes the Git repository
pub fn (mut repo GitRepo) delete() ! {
repo_path := repo.get_path()!
repo.cache_delete()!
osal.rm(repo_path)!
repo.load()!
}
// Create GitLocation from the path within the Git repository
pub fn (mut gs GitRepo) gitlocation_from_path(path string) !GitLocation {
if path.starts_with('/') || path.starts_with('~') {
return error('Path must be relative, cannot start with / or ~')
}
// TODO: check that path is inside gitrepo
// TODO: get relative path in relation to root of gitrepo
mut git_path := gs.patho()!
if !os.exists(git_path.path) {
return error('Path does not exist inside the repository: ${git_path.path}')
}
mut branch_or_tag := gs.status_wanted.branch
if gs.status_wanted.tag.len > 0 {
branch_or_tag = gs.status_wanted.tag
}
return GitLocation{
provider: gs.provider
account: gs.account
name: gs.name
branch_or_tag: branch_or_tag
path: path // relative path in relation to git repo
}
}
// Check if repo path exists and validate fields
pub fn (mut repo GitRepo) init() ! {
path_string := repo.get_path()!
if repo.gs.coderoot.path == '' {
return error('Coderoot cannot be empty')
}
if repo.provider == '' {
return error('Provider cannot be empty')
}
if repo.account == '' {
return error('Account cannot be empty')
}
if repo.name == '' {
return error('Name cannot be empty')
}
if !os.exists(path_string) {
return error('Path does not exist: ${path_string}')
}
// TODO: check deploy key has been set in repo
// if not do git config core.sshCommand "ssh -i /path/to/deploy_key"
if repo.deploysshkey.len > 0 {
repo.set_sshkey(repo.deploysshkey)!
}
// TODO: check tag or branch set on wanted, and not both
}
// Set the ssh key on the repo
fn (mut repo GitRepo) set_sshkey(key_name string) ! {
// will use this dir to find and set key from
ssh_dir := os.join_path(os.home_dir(), '.ssh')
key := osal.get_ssh_key(key_name, directory: ssh_dir) or {
return error('SSH Key with name ${key_name} not found.')
}
private_key := key.private_key_path()!
_ := 'git config core.sshcommand "ssh -i ~/.ssh/${private_key.path}"'
repo.deploysshkey = key_name
}
// Removes all changes from the repo; be cautious
pub fn (mut repo GitRepo) remove_changes() ! {
repo.status_update()!
if repo.has_changes()! {
console.print_header('Removing changes in ${repo.get_path()!}')
repo.exec('git reset HEAD --hard && git clean -xfd') or {
return error("can't remove changes on repo: ${repo.get_path()!}.\n${err}")
// TODO: we can do this fall back later
// console.print_header('Could not remove changes; will re-clone ${repo.get_path()!}')
// mut p := repo.patho()!
// p.delete()! // remove path, this will re-clone the full thing
// repo.load_from_url()!
}
repo.load()!
}
}
// alias for remove changes
pub fn (mut repo GitRepo) reset() ! {
return repo.remove_changes()
}
// Update submodules
fn (mut repo GitRepo) update_submodules() ! {
repo.exec('git submodule update --init --recursive') or {
return error('Cannot update submodules for repo: ${repo.get_path()!}. Error: ${err}')
}
}
fn (repo GitRepo) exec(cmd_ string) !string {
repo_path := repo.get_path()!
cmd := 'cd ${repo_path} && ${cmd_}'
if repo.gs.config.log {
console.print_green(cmd)
}
r := os.execute(cmd)
if r.exit_code != 0 {
return error('Repo failed to exec cmd: ${cmd}\n${r.output})')
}
return r.output
}

View File

@@ -0,0 +1,42 @@
module gittools
import json
import freeflowuniverse.herolib.clients.redisclient
fn redis_get() redisclient.Redis {
mut redis_client := redisclient.core_get() or { panic(err) }
return redis_client
}
// Save repo to redis cache
fn (mut repo GitRepo) cache_set() ! {
mut redis_client := redis_get()
repo_json := json.encode(repo)
cache_key := repo.get_cache_key()
redis_client.set(cache_key, repo_json)!
}
// Get repo from redis cache
fn (mut repo GitRepo) cache_get() ! {
mut repo_json := ''
mut redis_client := redis_get()
cache_key := repo.get_cache_key()
repo_json = redis_client.get(cache_key) or {
return error('Failed to get redis key ${cache_key}\n${err}')
}
if repo_json.len > 0 {
mut cached := json.decode(GitRepo, repo_json)!
cached.gs = repo.gs
repo = cached
}
}
// Remove cache
fn (repo GitRepo) cache_delete() ! {
mut redis_client := redis_get()
cache_key := repo.get_cache_key()
redis_client.del(cache_key) or { return error('Cannot delete the repo cache due to: ${err}') }
// TODO: report v bug, function should work without return as well
return
}

View File

@@ -0,0 +1,47 @@
module gittools
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal
import os
@[params]
pub struct GitCloneArgs {
pub mut:
url string
sshkey string
}
// Clones a new repository into the git structure based on the provided arguments.
pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
if args.url.len == 0 {
return error('url needs to be specified when doing a clone.')
}
console.print_header('Git clone from the URL: ${args.url}.')
git_location := gitstructure.gitlocation_from_url(args.url)!
mut repo := gitstructure.repo_new_from_gitlocation(git_location)!
repo.status_wanted.url = args.url
if args.sshkey.len > 0 {
repo.set_sshkey(args.sshkey)!
}
parent_dir := repo.get_parent_dir(create: true)!
mut extra := ''
if gitstructure.config.light {
extra = '--depth 1 --no-single-branch '
}
cmd := 'cd ${parent_dir} && git clone ${extra} ${repo.get_repo_url()!} ${repo.name}'
result := os.execute(cmd)
if result.exit_code != 0 {
return error('Cannot clone the repository due to: \n${result.output}')
}
repo.load()!
console.print_green("The repository '${repo.name}' cloned into ${parent_dir}.")
return repo
}

View File

@@ -0,0 +1,110 @@
module gittools
// FUNCITONS TO GET INFO FROM REALITY
// Retrieves a list of unstaged changes in the repository.
//
// This function returns a list of files that are modified or untracked.
//
// Returns:
// - An array of strings representing file paths of unstaged changes.
// - Throws an error if the command execution fails.
pub fn (repo GitRepo) get_changes_unstaged() ![]string {
unstaged_result := repo.exec('git ls-files --other --modified --exclude-standard') or {
return error('Failed to check for unstaged changes: ${err}')
}
// Filter out any empty lines from the result.
return unstaged_result.split('\n').filter(it.len > 0)
}
// Retrieves a list of staged changes in the repository.
//
// This function returns a list of files that are staged and ready to be committed.
//
// Returns:
// - An array of strings representing file paths of staged changes.
// - Throws an error if the command execution fails.
pub fn (repo GitRepo) get_changes_staged() ![]string {
staged_result := repo.exec('git diff --name-only --staged') or {
return error('Failed to check for staged changes: ${err}')
}
// Filter out any empty lines from the result.
return staged_result.split('\n').filter(it.len > 0)
}
// Check if there are any unstaged or untracked changes in the repository.
pub fn (mut repo GitRepo) has_changes() !bool {
repo.status_update()!
r0 := repo.get_changes_unstaged()!
r1 := repo.get_changes_staged()!
if r0.len + r1.len > 0 {
return true
}
return false
}
// Check if there are staged changes to commit.
pub fn (mut repo GitRepo) need_commit() !bool {
return repo.has_changes()!
}
// Check if the repository has changes that need to be pushed.
pub fn (mut repo GitRepo) need_push_or_pull() !bool {
repo.status_update()!
last_remote_commit := repo.get_last_remote_commit() or {
return error('Failed to get last remote commit: ${err}')
}
last_local_commit := repo.get_last_local_commit() or {
return error('Failed to get last local commit: ${err}')
}
println('last_local_commit: ${last_local_commit}')
println('last_remote_commit: ${last_remote_commit}')
return last_local_commit != last_remote_commit
}
// Determine if the repository needs to checkout to a different branch or tag
fn (mut repo GitRepo) need_checkout() bool {
if repo.status_wanted.branch.len > 0 {
if repo.status_wanted.branch != repo.status_local.branch {
return true
}
} else if repo.status_wanted.tag.len > 0 {
if repo.status_wanted.tag != repo.status_local.tag {
return true
}
}
// it could be empty since the status_wanted are optional.
// else{
// panic("bug, should never be empty ${repo.status_wanted.branch}, ${repo.status_local.branch}")
// }
return false
}
fn (mut repo GitRepo) get_remote_default_branchname() !string {
if repo.status_remote.ref_default.len == 0 {
return error('ref_default cannot be empty for ${repo}')
}
return repo.status_remote.branches[repo.status_remote.ref_default] or {
return error("can't find ref_default in branches for ${repo}")
}
}
// is always the commit for the branch as known remotely, if not known will return ""
pub fn (repo GitRepo) get_last_remote_commit() !string {
if repo.status_local.branch in repo.status_remote.branches {
return repo.status_local.branches[repo.status_local.branch]
}
return ''
}
// get commit for branch, will return '' if local branch doesn't exist remotely
pub fn (repo GitRepo) get_last_local_commit() !string {
if repo.status_local.branch in repo.status_local.branches {
return repo.status_local.branches[repo.status_local.branch]
}
return error("can't find branch: ${repo.status_local.branch} in local branches:\n${repo.status_local.branches}")
}

View File

@@ -0,0 +1,91 @@
module gittools
import time
import freeflowuniverse.herolib.ui.console
@[params]
pub struct StatusUpdateArgs {
reload bool
ssh_key_name string // name of ssh key to be used when loading
}
pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Check current time vs last check, if needed (check period) then load
// println("${repo.name} ++")
repo.cache_get()! // Ensure we have the situation from redis
repo.init()!
current_time := int(time.now().unix())
if args.reload || repo.last_load == 0
|| current_time - repo.last_load >= repo.config.remote_check_period {
console.print_debug('${repo.name} ${current_time}-${repo.last_load}: ${repo.config.remote_check_period} +++')
// if true{exit(0)}
repo.load()!
// println("${repo.name} ++++")
}
}
// Load repo information
// Does not check cache, it is the callers responsibility to check cache and load accordingly.
fn (mut repo GitRepo) load() ! {
console.print_debug('load ${repo.get_key()}')
repo.init()!
repo.exec('git fetch --all') or {
return error('Cannot fetch repo: ${repo.get_path()!}. Error: ${err}')
}
repo.load_branches()!
repo.load_tags()!
repo.last_load = int(time.now().unix())
repo.cache_set()!
}
// Helper to load remote tags
fn (mut repo GitRepo) load_branches() ! {
tags_result := repo.exec("git for-each-ref --format='%(objectname) %(refname:short)' refs/heads refs/remotes/origin")!
for line in tags_result.split('\n') {
if line.trim_space() != '' {
parts := line.split(' ')
if parts.len == 2 {
commit_hash := parts[0].trim_space()
mut name := parts[1].trim_space()
if name.contains('_archive') {
continue
} else if name == 'origin' {
repo.status_remote.ref_default = commit_hash
} else if name.starts_with('origin') {
name = name.all_after('origin/').trim_space()
// Update remote tags info
repo.status_remote.branches[name] = commit_hash
} else {
repo.status_local.branches[name] = commit_hash
}
}
}
}
mybranch := repo.exec('git branch --show-current')!.split_into_lines().filter(it.trim_space() != '')
if mybranch.len == 1 {
repo.status_local.branch = mybranch[0].trim_space()
}
// Could be a tag.
// else{
// panic("bug: git branch does not give branchname")
// }
}
// Helper to load remote tags
fn (mut repo GitRepo) load_tags() ! {
tags_result := repo.exec('git tag --list')!
for line in tags_result.split('\n') {
if line.trim_space() != '' {
parts := line.split(' ')
if parts.len == 2 {
commit_hash := parts[0].trim_space()
tag_name := parts[1].all_after('refs/tags/').trim_space()
// Update remote tags info
repo.status_remote.tags[tag_name] = commit_hash
}
}
}
}

View File

@@ -0,0 +1,151 @@
module gittools
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.osal.sshagent
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.develop.vscode
import freeflowuniverse.herolib.develop.sourcetree
import os
@[params]
struct GetParentDir {
pub mut:
create bool
}
fn (repo GitRepo) get_key() string {
return '${repo.gs.key}:${repo.provider}:${repo.account}:${repo.name}'
}
fn (repo GitRepo) get_cache_key() string {
return 'git:repos:${repo.gs.key}:${repo.provider}:${repo.account}:${repo.name}'
}
pub fn (repo GitRepo) get_path() !string {
return '${repo.gs.coderoot.path}/${repo.provider}/${repo.account}/${repo.name}'
}
// gets the path of a given url within a repo
// ex: 'https://git.ourworld.tf/ourworld_holding/info_ourworld/src/branch/main/books/cocreation/SUMMARY.md'
// returns <repo_path>/books/cocreation/SUMMARY.md
pub fn (repo GitRepo) get_path_of_url(url string) !string {
// Split the URL into components
url_parts := url.split('/')
// Find the index of "src" (Gitea) or "blob/tree" (GitHub)
mut repo_root_idx := url_parts.index('src')
if repo_root_idx == -1 {
repo_root_idx = url_parts.index('blob')
}
if repo_root_idx == -1 {
// maybe default repo url (without src and blob)
return repo.get_path() or {
return error('Invalid URL format: Cannot find repository path')
}
}
// Ensure that the repository path starts after the branch
if url_parts.len < repo_root_idx + 2 {
return error('Invalid URL format: Missing branch or file path')
}
// Extract the path inside the repository
path_in_repo := url_parts[repo_root_idx + 3..].join('/')
// Construct the full path
return '${repo.get_path()!}/${path_in_repo}'
}
// Relative path inside the gitstructure, pointing to the repo
pub fn (repo GitRepo) get_relative_path() !string {
mut mypath := repo.patho()!
return mypath.path_relative(repo.gs.coderoot.path) or { panic("couldn't get relative path") }
}
pub fn (repo GitRepo) get_parent_dir(args GetParentDir) !string {
repo_path := repo.get_path()!
parent_dir := os.dir(repo_path)
if !os.exists(parent_dir) && !args.create {
return error('Parent directory does not exist: ${parent_dir}')
}
os.mkdir_all(parent_dir)!
return parent_dir
}
@[params]
pub struct GetRepoUrlArgs {
pub mut:
with_branch bool // // If true, return the repo URL for an exact branch.
}
// url_get returns the URL of a git address
fn (self GitRepo) get_repo_url(args GetRepoUrlArgs) !string {
url := self.status_wanted.url
if url.len != 0 {
if args.with_branch {
return '${url}/tree/${self.status_local.branch}'
}
return url
}
if sshagent.loaded() {
return self.get_ssh_url()!
} else {
return self.get_http_url()!
}
}
fn (self GitRepo) get_ssh_url() !string {
mut provider := self.provider
if provider == 'github' {
provider = 'github.com'
}
return 'git@${provider}:${self.account}/${self.name}.git'
}
fn (self GitRepo) get_http_url() !string {
mut provider := self.provider
if provider == 'github' {
provider = 'github.com'
}
return 'https://${provider}/${self.account}/${self.name}'
}
// Return rich path object from our library hero lib
pub fn (repo GitRepo) patho() !pathlib.Path {
return pathlib.get_dir(path: repo.get_path()!, create: false)!
}
pub fn (mut repo GitRepo) display_current_status() ! {
staged_changes := repo.get_changes_staged()!
unstaged_changes := repo.get_changes_unstaged()!
console.print_header('Staged changes:')
for f in staged_changes {
console.print_green('\t- ${f}')
}
console.print_header('Unstaged changes:')
if unstaged_changes.len == 0 {
console.print_stderr('No unstaged changes; the changes need to be committed.')
return
}
for f in unstaged_changes {
console.print_stderr('\t- ${f}')
}
}
// Opens SourceTree for the Git repo
pub fn (repo GitRepo) sourcetree() ! {
sourcetree.open(path: repo.get_path()!)!
}
// Opens Visual Studio Code for the repo
pub fn (repo GitRepo) open_vscode() ! {
path := repo.get_path()!
mut vs_code := vscode.new(path)
vs_code.open()!
}

View File

@@ -0,0 +1,241 @@
module tests
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import os
import time
__global (
branch_name_tests string
tag_name_tests string
repo_path_tests string
repo_tests gittools.GitRepo
repo_setup_tests GittoolsTests
)
// Setup function that initializes global variables for use across tests.
// This simulates lifecycle methods like `before_all` and runs before the tests.
fn setup_generate_globals() {
runtime := time.now().unix()
branch_name_tests = 'branch_${runtime}'
tag_name_tests = 'tag_${runtime}'
}
// Function that runs at the start of the testsuite, ensuring that the repository
// is set up and global variables are initialized for the tests.
fn testsuite_begin() {
setup_generate_globals()
}
fn testsuite_end() {
repo_setup_tests.clean()!
}
// Test cloning a Git repository and verifying that it exists locally.
//
// This test performs the following steps:
// - Sets up the repository directory and global variables.
// - Clones the repository using a provided URL.
// - Verifies that the repository's path exists in the local filesystem.
@[test]
fn test_clone_repo() {
repo_setup_tests = setup_repo()!
mut gs := gittools.new(coderoot: repo_setup_tests.coderoot)!
repo_tests = gs.get_repo(url: repo_setup_tests.repo_url)!
repo_path_tests = repo_tests.get_path()!
assert os.exists(repo_path_tests) == true
}
// Test creating a new branch in the Git repository.
//
// This test performs the following steps:
// - Clones the repository.
// - Creates a new branch using the global `branch_name_tests` variable.
// - Verifies that the new branch was created but not checked out.
@[test]
fn test_branch_create() {
repo_tests.branch_create(branch_name_tests)!
assert repo_tests.status_local.branch != branch_name_tests
}
// Test switching to an existing branch in the repository.
//
// This test performs the following steps:
// - Ensures the repository is set up.
// - Switches to the branch created in the `test_branch_create` test.
// - Verifies that the branch was successfully switched.
@[test]
fn test_switch() {
repo_tests.branch_switch(branch_name_tests)!
assert repo_tests.status_local.branch == branch_name_tests
}
// Test creating a tag in the Git repository.
//
// This test performs the following steps:
// - Ensures the repository is set up.
// - Creates a tag using the global `tag_name_tests` variable.
// - Verifies that the tag was successfully created in the repository.
@[test]
fn test_tag_create() {
repo_tests.tag_create(tag_name_tests)!
assert repo_tests.tag_exists(tag_name_tests)! == true
}
// Test detecting changes in the repository, adding changes, and committing them.
//
// This test performs the following steps:
// - Clones the repository and creates a new file.
// - Verifies that the repository has unstaged changes after creating the file.
// - Stages and commits the changes, then verifies that there are no more unstaged changes.
@[test]
fn test_has_changes_add_changes_commit_changes() {
file_name := create_new_file(repo_path_tests)!
assert repo_tests.has_changes()! == true
mut unstaged_changes := repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 1
mut staged_changes := repo_tests.get_changes_staged()!
assert staged_changes.len == 0
commit_msg := 'feat: Added ${file_name} file.'
repo_tests.commit(commit_msg)!
staged_changes = repo_tests.get_changes_staged()!
assert staged_changes.len == 0
unstaged_changes = repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 0
}
// Test pushing changes to the repository.
//
// This test performs the following steps:
// - Clones the repository and creates a new branch.
// - Creates a new file and commits the changes.
// - Verifies that the changes have been successfully pushed to the remote repository.
@[test]
fn test_push_changes() {
file_name := create_new_file(repo_path_tests)!
assert repo_tests.has_changes()! == true
mut unstaged_changes := repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 1
mut staged_changes := repo_tests.get_changes_staged()!
assert staged_changes.len == 0
commit_msg := 'feat: Added ${file_name} file.'
repo_tests.commit(commit_msg)!
repo_tests.push()!
staged_changes = repo_tests.get_changes_staged()!
assert staged_changes.len == 0
unstaged_changes = repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 0
}
// Test performing multiple commits and pushing them.
//
// This test performs the following steps:
// - Clones the repository.
// - Creates multiple files.
// - Commits each change and pushes them to the remote repository.
@[test]
fn test_multiple_commits_and_push() {
file_name_1 := create_new_file(repo_path_tests)!
repo_tests.commit('feat: Added ${file_name_1} file.')!
file_name_2 := create_new_file(repo_path_tests)!
repo_tests.commit('feat: Added ${file_name_2} file.')!
repo_tests.push()!
assert repo_tests.has_changes()! == false
}
// Test committing with valid changes.
//
// This test performs the following steps:
// - Creates a new file in the repository.
// - Verifies that there are uncommitted changes.
// - Commits the changes.
@[test]
fn test_commit_with_valid_changes() {
file_name_1 := create_new_file(repo_path_tests)!
assert repo_tests.need_commit()! == true
repo_tests.commit('Initial commit')!
}
// Test committing without changes.
//
// This test performs the following steps:
// - Verifies that there are no uncommitted changes.
// - Attempts to commit and expects a failure.
@[test]
fn test_commit_without_changes() {
assert repo_tests.has_changes()! == false
assert repo_tests.need_commit()! == false
repo_tests.commit('Initial commit') or {
assert false, 'Commit should be done with some changes'
}
}
// Test pushing with no changes.
//
// This test verifies that pushing with no changes results in no action.
@[test]
fn test_push_with_no_changes() {
assert repo_tests.need_push_or_pull()! == false
repo_tests.push() or {
assert false, 'Push should not perform any action when no changes exist'
}
}
// Test pulling remote changes.
//
// This test ensures that the pull operation succeeds without errors.
@[test]
fn test_pull_remote_changes() {
repo_tests.pull() or { assert false, 'Pull should succeed' }
}
// Test creating and switching to a new branch.
//
// This test creates a new branch and then switches to it.
@[test]
fn test_create_and_switch_new_branch() {
repo_tests.branch_create('testing-branch') or { assert false, 'Branch creation should succeed' }
repo_tests.branch_switch('testing-branch') or { assert false, 'Branch switch should succeed' }
}
// Test creating and switching to a tag.
//
// This test creates a new tag and then switches to it.
@[test]
fn test_create_and_check_tag() {
repo_tests.tag_create('v1.0.0') or { assert false, 'Tag creation should succeed' }
repo_tests.tag_exists('v1.0.0') or { assert false, 'Tag switch should succeed' }
}
// Test removing changes.
//
// This test verifies that changes are successfully removed after they are made.
@[test]
fn test_remove_changes() {
mut unstaged_changes := repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 0
mut staged_changes := repo_tests.get_changes_staged()!
assert staged_changes.len == 0
file_name := create_new_file(repo_path_tests)!
assert repo_tests.has_changes()! == true
unstaged_changes = repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 1
staged_changes = repo_tests.get_changes_staged()!
assert staged_changes.len == 0
repo_tests.remove_changes() or { assert false, 'Changes should be removed successfully' }
unstaged_changes = repo_tests.get_changes_unstaged()!
assert unstaged_changes.len == 0
staged_changes = repo_tests.get_changes_staged()!
assert staged_changes.len == 0
}

View File

@@ -0,0 +1,54 @@
module tests
import freeflowuniverse.herolib.osal
import os
import time
struct GittoolsTests {
coderoot string
repo_dir string
repo_url string
repo_name string
}
// Creates a new Python file with 'Hello, World!' content in the specified repository path.
// The file name includes a timestamp to ensure uniqueness.
//
// Args:
// - repo_path (string): Path to the repository where the new file will be created.
// - runtime (i64): Unix timestamp used to generate a unique file name.
//
// Returns:
// - string: Name of the newly created file.
fn create_new_file(repo_path string) !string {
coded_now := time.now().unix()
file_name := 'hello_world_${coded_now}.py'
osal.execute_silent("echo \"print('Hello, World!')\" > ${repo_path}/${file_name}")!
return file_name
}
// Sets up a GittoolsTests instance with predefined values for repository setup.
//
// Returns:
// - GittoolsTests: Struct containing information about the repo setup.
fn setup_repo() !GittoolsTests {
ts := GittoolsTests{
coderoot: '/tmp/code'
repo_url: 'https://github.com/freeflowuniverse/test_repo.git'
}
if os.exists(ts.coderoot) {
ts.clean()!
}
os.mkdir_all(ts.coderoot)!
return ts
}
// Removes the directory structure created during repository setup.
//
// Raises:
// - Error: If the directory cannot be removed.
fn (ts GittoolsTests) clean() ! {
os.rmdir_all(ts.coderoot)!
}

View File

@@ -0,0 +1,64 @@
# Performance Module
A simple V module for measuring and visualizing process performance using Redis for data storage.
## Features
- **Timestamp Management**: Record timestamps for specific events during a process.
- **Epoch Handling**: Start and end measurement phases using epochs.
- **Timeline Visualization**: Display detailed timelines with duration bars and color-coded performance indicators.
## Installation
Install the repository and import the module:
`import performance`
## Usage
### Create a Timer
`mut timer := performance.new('my_process')`
### Add Timestamps
Record a timestamp for an event:
`timer.new_timestamp('event_name')`
### Manage Epochs
Start or end a measurement phase:
```
timer.epoch() // Start a new epoch
timer.epoch_end() // End the current epoch
```
### Visualize Timelines
Display the recorded timeline:
`timer.timeline()`
## Dependencies
• Redis: Requires a Redis server for data storage.
• Redis Client: Uses freeflowuniverse.herolib.clients.redisclient.
## Example
```
mut timer := performance.new('example_process')
timer.epoch()
timer.new_timestamp('start')
time.sleep(1 * time.second)
timer.new_timestamp('middle')
time.sleep(2 * time.second)
timer.new_timestamp('end')
timer.epoch_end()
timer.timeline()
```
This will output a detailed timeline with duration bars for each event.

View File

@@ -0,0 +1,130 @@
module performance
import arrays
import time
import sync
import term // For color coding
import freeflowuniverse.herolib.clients.redisclient
// Struct to represent a timer for measuring process performance
@[noinit]
pub struct ProcessTimer {
pub:
name string // Name of the timer instance
}
// Create a new ProcessTimer instance with a unique name including thread ID
pub fn new(name string) ProcessTimer {
return ProcessTimer{
name: '${name}_${sync.thread_id()}'
}
}
// Add a new timestamp to the current epoch for a specific name or event
pub fn (p ProcessTimer) new_timestamp(name_ string) {
mut name := name_
mut redis := redisclient.core_get() or { panic(err) } // Get a Redis client
epoch := redis.get('${p.name}_epoch') or { '0' } // Fetch the current epoch value, default to '0'
all := redis.hgetall('${p.name}_${epoch}') or {
map[string]string{}
} // Get all timestamps for the current epoch
// If a timestamp with the same name exists, make it unique
if name in all.keys() {
i := all.keys().filter(it.starts_with(name)).len
name = '${name}_${i}'
}
// Store the new timestamp in Redis
redis.hset('${p.name}_${epoch}', name, time.now().unix_micro().str()) or { panic(err) }
}
// Increment the epoch value, effectively starting a new measurement phase
pub fn (p ProcessTimer) epoch() {
mut redis := redisclient.core_get() or { panic(err) }
redis.incr('${p.name}_epoch') or { panic(err) }
}
// Increment the epoch value to signify the end of a measurement phase
pub fn (p ProcessTimer) epoch_end() {
mut redis := redisclient.core_get() or { panic(err) }
redis.incr('${p.name}_epoch') or { panic(err) }
}
// Generate and display a timeline of events and their durations for each epoch
pub fn (p ProcessTimer) timeline() {
mut redis := redisclient.core_get() or { panic(err) } // Get a Redis client
epoch := redis.get('${p.name}_epoch') or { '0' } // Fetch the current epoch value
println(term.cyan('\nTimelines:\n')) // Print header
// Loop through all epochs
for e in 0 .. epoch.int() {
result := redis.hgetall('${p.name}_${e}') or { continue } // Get all timestamps for the epoch
if result.len == 0 {
// No data for this epoch
println(term.yellow('No timeline data found for process: ${p.name}_${e}'))
continue
}
// Parse the results into a map of event names to timestamps
mut timestamps := map[string]i64{}
for key, value in result {
timestamps[key] = value.i64()
}
// Calculate the durations between consecutive timestamps
mut durations := []i64{}
for i, timestamp in timestamps.values() {
prev_timestamp := if i == 0 {
timestamp
} else {
timestamps.values()[i - 1]
}
durations << timestamp - prev_timestamp
}
// Find the maximum duration for normalization
max_duration := arrays.max(durations) or { 1 }
scale := 40.0 / f64(max_duration) // Scale for the timeline bar
// Print the timeline for the epoch
println(term.cyan('\nProcess Timeline:\n'))
mut i := 0
for key, timestamp in timestamps {
// Print event name and formatted timestamp
println('${key}: ${time.unix_micro(timestamp).format_rfc3339_micro()[10..]}')
// Calculate and display the duration bar
prev_timestamp := if i == 0 {
0
} else {
timestamps.values()[i - 1]
}
if i == timestamps.len - 1 {
continue
}
duration := durations[i + 1]
// Determine bar length and color based on duration
bar_length := int(duration * scale)
color := if duration < max_duration / 3 {
term.green
} else if duration < 2 * max_duration / 3 {
term.yellow
} else {
term.red
}
// Create the bar visualization
bar := if duration == 0 {
''
} else {
color('|') + color('-'.repeat(bar_length)) + color(' '.repeat(40 - bar_length)) +
color('|')
}
println('${bar} (${duration}μs)')
i++
}
}
println('\n') // End with a newline
}

View File

@@ -0,0 +1,11 @@
## sourcetree
```v
import freeflowuniverse.herolib.develop.sourcetree
//will look for git in location if not found will give error
sourcetree.open(path:"/tmp/something")!
```
- if path not specified will chose current path

View File

@@ -0,0 +1,22 @@
module sourcetree
import freeflowuniverse.herolib.osal
import os
// import freeflowuniverse.herolib.ui.console
@[params]
pub struct OpenArgs {
pub mut:
path string
}
// will look for git in location if not found will give error
// if not specified will use current dir
pub fn open(args OpenArgs) ! {
if !os.exists(args.path) {
return error('Cannot open SourceTree: could not find path ${args.path}')
}
cmd4 := 'open -a SourceTree ${args.path}'
// console.print_debug(cmd4)
osal.execute_interactive(cmd4)!
}

View File

@@ -0,0 +1,10 @@
## visual studio code
```v
import freeflowuniverse.herolib.develop.vscode
vscode.open(path:"/tmp/something")!
```
- if path not specified will chose current path

View File

@@ -0,0 +1,63 @@
module vscode
import freeflowuniverse.herolib.osal
import os
pub struct VSCodeHelper {
pub mut:
install_if_not_exists bool
path string
}
pub fn new(path string) VSCodeHelper {
return VSCodeHelper{
path: if path == '' {
os.getwd()
} else {
path
}
}
}
// Open Visual Studio Code at the specified path.
// If the path is not provided, it defaults to the current working directory.
pub fn (self VSCodeHelper) open() ! {
self.check_installation()!
if !os.exists(self.path) {
return error('Cannot open Visual Studio Code: path not found: ${self.path}')
}
cmd := '${self.get_executable_binary()} ${self.path}'
osal.execute_interactive(cmd)!
}
// Get the executable binary for Visual Studio Code.
fn (self VSCodeHelper) get_executable_binary() string {
if self.is_installed() {
if osal.cmd_exists('vscode') {
return 'vscode'
}
return 'code'
}
return ''
}
// Check if Visual Studio Code is installed.
pub fn (self VSCodeHelper) is_installed() bool {
return osal.cmd_exists('vscode') || osal.cmd_exists('code')
}
// Check the installation status of Visual Studio Code.
// If not installed and the flag is set, attempt to install it.
pub fn (self VSCodeHelper) check_installation() ! {
if !self.is_installed() {
if self.install_if_not_exists {
// Uncomment and implement the installation logic if needed
// vscodeinstaller.install()!
// return check_installation()
return error('Visual Studio Code is not installed.\nPlease see https://code.visualstudio.com/download')
}
return error('Visual Studio Code is not installed.\nPlease see https://code.visualstudio.com/download')
}
}

View File

@@ -150,7 +150,7 @@ pub fn secret() !string {
return secret
}
pub fn client() !zdb.ZDB {
pub fn client() !ZDB {
mut db := zdb.get('localhost:3355', secret()!, 'test')!
return db
}

View File

@@ -94,9 +94,9 @@ fn ulist_get() !ulist.UList {
fn upload() ! {
}
fn startupcmd() ![]zinit.ZProcessNewArgs {
fn startupcmd() ![]ZProcessNewArgs {
mut res := []zinit.ZProcessNewArgs{}
res << zinit.ZProcessNewArgs{
res << ZProcessNewArgs{
name: 'zinit'
cmd: '/usr/local/bin/zinit init'
startuptype: .systemd

View File

@@ -28,7 +28,7 @@ pub fn get(args_ ArgsGet) !&Zinit {
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
fn startupmanager_get(cat StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit

View File

@@ -12,30 +12,22 @@ fn test_package_management() {
}
// First ensure wget is not installed
package_remove('wget') or {
assert true // Ignore error if package wasn't installed
}
package_remove('wget') or {}
// Verify wget is not installed
assert !cmd_exists('wget')
// Update package list
package_refresh() or {
assert false, 'Failed to refresh package list: ${err}'
}
package_refresh() or { assert false, 'Failed to refresh package list: ${err}' }
// Install wget
package_install('wget') or {
assert false, 'Failed to install wget: ${err}'
}
package_install('wget') or { assert false, 'Failed to install wget: ${err}' }
// Verify wget is now installed
assert cmd_exists('wget')
// Clean up - remove wget
package_remove('wget') or {
assert false, 'Failed to remove wget: ${err}'
}
package_remove('wget') or { assert false, 'Failed to remove wget: ${err}' }
// Verify wget is removed
assert !cmd_exists('wget')

83
lib/readme.md Normal file
View File

@@ -0,0 +1,83 @@
# herolib
Is an opinionated library as used by threefold mainly to automate cloud environments, its still very much work in progress and we welcome any contribution.
Please check also our [cookbook](https://github.com/freeflowuniverse/herolib/tree/development/cookbook) which might give some ideas how to use it.
## Get started with hero
```bash
curl -sL https://raw.githubusercontent.com/freeflowuniverse/herolib/development/scripts/install_hero.sh | bash
```
## Get started with herolib
the following script will install vlang and herolib (report bugs please)
```bash
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/development/scripts/installer.sh > /tmp/install.sh
bash /tmp/install.sh
```
optional requirements
- ssh key loaded for access to github
### alternative with manual git checkout & v install
requirements
- v installed
- ssh key loaded for access to github
```bash
mkdir -p ~/code/github/freeflowuniverse
cd ~/code/github/freeflowuniverse
git clone git@github.com:freeflowuniverse/herolib.git
cd herolib
# checkout a branch with most recent changes
# git checkout development
bash install.sh
```
## Install Hero
hero is our "hero" tool to execute heroscript, deal with git, ...
hero will be installed in
- /usr/local/bin for linux
- ~/hero/bin for osx
```bash
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/development/scripts/install_hero.sh > /tmp/hero_install.sh
bash /tmp/hero_install.sh
#to debug
bash -x /tmp/hero_install.sh
#maybe you want to copy to your system bin dir
cp ~/hero/bin/hero /usr/local/bin
#to use hero make sure you restart your shell or you do (if osx)
source ~/.zprofile
#check how to use, can also do on each of the subcommands
hero -help
```
requirements
- ssh key loaded for access to github
## generating docs yourself
```bash
#cd in this directory
cd ~/code/github/freeflowuniverse/herolib
bash doc.sh
```
## build hero
- linux as done by github actions, nothing to do
- osx: ``

View File

@@ -8,7 +8,7 @@ import json
// client for telegram bot
struct TelegramClient {
bot vgram.Bot
baobab client.Client // Baobab client
baobab Client // Baobab client
waiting_qs map[string]RedisQueue // where string is user_id
}

12
lib/v.mod Normal file
View File

@@ -0,0 +1,12 @@
Module {
name: 'herolib'
author: 'freeflowuniverse'
description: 'Set of various libraries'
version: '0.1.0'
repo_url: 'https://github.com/freeflowuniverse/herolib/herolib'
deps: []
vcs: 'git'
license: 'apache2'
}

12
lib/vpkg.json Normal file
View File

@@ -0,0 +1,12 @@
{
"name": "herolib",
"version": "0.1.0",
"author": [
"despiegk <info@threefold.io>"
],
"repo": "https://github.com/herouniverse/herolib/herolib",
"sources": [
"https://vpkg-project.github.io/registry/src/"
],
"dependencies": []
}

View File

@@ -4,7 +4,7 @@ is a python tool to help us to get .md files into our manual
copies all readme.md files from the different lib directors to
- e.g. $crystallib/manual/libreadme/installers_sysadmintools_actrunner.md
- e.g. $herolib/manual/libreadme/installers_sysadmintools_actrunner.md
- note the name has the location inside of where info came from
this allows us to make manual and to copy information from the readme's which are in library
@@ -12,6 +12,6 @@ this allows us to make manual and to copy information from the readme's which ar
to run
```bash
~/code/github/freeflowuniverse/crystallib/tools/doc_extractor/extractor.sh
~/code/github/freeflowuniverse/herolib/tools/doc_extractor/extractor.sh
```

View File

@@ -125,7 +125,7 @@ fn dotest(path string, base_dir string, use_redis bool)! {
cmd := 'v -stats -enable-globals -n -w -gc none -no-retry-compilation -cc tcc test ${norm_path}'
println(cmd)
result := os.execute(cmd)
eprintln(result)
if result.exit_code != 0 {
eprintln('Test failed: ${path}')
eprintln(result.output)
@@ -168,6 +168,7 @@ tests_ignore := "
tests_error := "
net_test.v
osal/package_test.v
systemd_process_test.v
rpc_test.v
screen_test.v