This commit is contained in:
2025-08-15 08:52:46 +02:00
parent 71298a9704
commit 43ffedfad8
36 changed files with 517 additions and 550 deletions

View File

@@ -37,4 +37,3 @@ println('Found ${issues.len} issues.')
for issue in issues {
println(' #${issue.number}: ${issue.title}')
}

View File

@@ -20,7 +20,7 @@ pub mut:
fn (mut self GiteaClient) httpclient() !&httpconnection.HTTPConnection {
mut http_conn := httpconnection.new(
name: 'giteaclient_${self.name}'
url: self.url
url: self.url
)!
// Add authentication header if API key is provided
@@ -49,7 +49,7 @@ fn obj_init(mycfg_ GiteaClient) !GiteaClient {
if mycfg.url.ends_with('/api') {
mycfg.url = mycfg.url.replace('/api', '')
}
mycfg.url = "https://${mycfg.url}/api/v1"
mycfg.url = 'https://${mycfg.url}/api/v1'
if mycfg.secret.len == 0 {
return error('secret needs to be filled in for ${mycfg.name}')

View File

@@ -12,7 +12,7 @@ pub fn (mut client GiteaClient) user_list_repos() ![]Repository {
prefix: '/user/repos'
}
mut http_client := client.httpclient()!
r:=http_client.get_json_list_generic[Repository](req)!
r := http_client.get_json_list_generic[Repository](req)!
return r
}

View File

@@ -5,41 +5,41 @@ import time
pub struct APIError {
pub:
message string
url string
url string
}
pub struct AccessToken {
pub:
id i64
name string
scopes []string
sha1 string
id i64
name string
scopes []string
sha1 string
token_last_eight string
}
pub struct ActionVariable {
pub:
owner_id i64
repo_id i64
name string
data string
repo_id i64
name string
data string
}
pub struct Activity {
pub:
act_user User
act_user User
act_user_id i64
comment Comment
comment_id i64
content string
created time.Time
id i64
is_private bool
op_type string
ref_name string
repo Repository
repo_id i64
user_id i64
comment Comment
comment_id i64
content string
created time.Time
id i64
is_private bool
op_type string
ref_name string
repo Repository
repo_id i64
user_id i64
}
pub struct AddCollaboratorOption {
@@ -49,460 +49,460 @@ pub:
pub struct AddTimeOption {
pub:
time i64
created time.Time
time i64
created time.Time
user_name string
}
pub struct AnnotatedTagObject {
pub:
sha string
typ string @[json: 'type'] // `type` is a keyword in V
typ string @[json: 'type'] // `type` is a keyword in V
url string
}
pub struct AnnotatedTag {
pub:
message string
object AnnotatedTagObject
sha string
tag string
tagger CommitUser
url string
message string
object AnnotatedTagObject
sha string
tag string
tagger CommitUser
url string
verification PayloadCommitVerification
}
pub struct Attachment {
pub:
browser_download_url string
created_at time.Time
download_count i64
id i64
name string
size i64
uuid string
created_at time.Time
download_count i64
id i64
name string
size i64
uuid string
}
pub struct Badge {
pub:
id i64
slug string
id i64
slug string
description string
image_url string
image_url string
}
pub struct Branch {
pub:
commit PayloadCommit
commit PayloadCommit
effective_branch_protection_name string
enable_status_check bool
name string
protected bool
required_approvals i64
status_check_contexts []string
user_can_merge bool
user_can_push bool
enable_status_check bool
name string
protected bool
required_approvals i64
status_check_contexts []string
user_can_merge bool
user_can_push bool
}
pub struct BranchProtection {
pub:
branch_name string
rule_name string
enable_push bool
enable_push_whitelist bool
push_whitelist_usernames []string
push_whitelist_teams []string
push_whitelist_deploy_keys bool
enable_merge_whitelist bool
merge_whitelist_usernames []string
merge_whitelist_teams []string
enable_status_check bool
status_check_contexts []string
required_approvals i64
enable_approvals_whitelist bool
approvals_whitelist_username []string
approvals_whitelist_teams []string
block_on_rejected_reviews bool
rule_name string
enable_push bool
enable_push_whitelist bool
push_whitelist_usernames []string
push_whitelist_teams []string
push_whitelist_deploy_keys bool
enable_merge_whitelist bool
merge_whitelist_usernames []string
merge_whitelist_teams []string
enable_status_check bool
status_check_contexts []string
required_approvals i64
enable_approvals_whitelist bool
approvals_whitelist_username []string
approvals_whitelist_teams []string
block_on_rejected_reviews bool
block_on_official_review_requests bool
block_on_outdated_branch bool
dismiss_stale_approvals bool
ignore_stale_approvals bool
require_signed_commits bool
protected_file_patterns string
unprotected_file_patterns string
created_at time.Time
updated_at time.Time
block_on_outdated_branch bool
dismiss_stale_approvals bool
ignore_stale_approvals bool
require_signed_commits bool
protected_file_patterns string
unprotected_file_patterns string
created_at time.Time
updated_at time.Time
}
pub struct ChangeFileOperation {
pub:
operation string // "create", "update", "delete"
path string
content string // base64 encoded
path string
content string // base64 encoded
from_path string
sha string
sha string
}
pub struct ChangeFilesOptions {
pub:
author Identity
branch string
committer Identity
dates CommitDateOptions
files []ChangeFileOperation
message string
author Identity
branch string
committer Identity
dates CommitDateOptions
files []ChangeFileOperation
message string
new_branch string
signoff bool
signoff bool
}
pub struct ChangedFile {
pub:
additions i64
changes i64
contents_url string
deletions i64
filename string
html_url string
additions i64
changes i64
contents_url string
deletions i64
filename string
html_url string
previous_filename string
raw_url string
status string
raw_url string
status string
}
pub struct Commit {
pub:
author User
commit RepoCommit
author User
commit RepoCommit
committer User
created time.Time
files []CommitAffectedFiles
html_url string
parents []CommitMeta
sha string
stats CommitStats
url string
created time.Time
files []CommitAffectedFiles
html_url string
parents []CommitMeta
sha string
stats CommitStats
url string
}
pub struct CommitAffectedFiles {
pub:
filename string
status string
status string
}
pub struct CommitDateOptions {
pub:
author time.Time
author time.Time
committer time.Time
}
pub struct CommitMeta {
pub:
created time.Time
sha string
url string
sha string
url string
}
pub struct CommitStats {
pub:
additions i64
deletions i64
total i64
total i64
}
pub struct CommitUser {
pub:
date string
date string
email string
name string
name string
}
pub struct Comment {
pub:
assets []Attachment
body string
created_at time.Time
html_url string
id i64
issue_url string
original_author string
assets []Attachment
body string
created_at time.Time
html_url string
id i64
issue_url string
original_author string
original_author_id i64
pull_request_url string
updated_at time.Time
user User
pull_request_url string
updated_at time.Time
user User
}
pub struct CreateIssueOption {
pub:
title string
assignee string
assignees []string
body string
closed bool
due_date time.Time
labels []i64
title string
assignee string
assignees []string
body string
closed bool
due_date time.Time
labels []i64
milestone i64
ref string
ref string
}
pub struct CreateRepoOption {
pub:
name string
auto_init bool
default_branch string
description string
gitignores string
issue_labels string
license string
name string
auto_init bool
default_branch string
description string
gitignores string
issue_labels string
license string
object_format_name string // "sha1" or "sha256"
private bool
readme string
template bool
trust_model string // "default", "collaborator", "committer", "collaboratorcommitter"
private bool
readme string
template bool
trust_model string // "default", "collaborator", "committer", "collaboratorcommitter"
}
pub struct Identity {
pub:
email string
name string
name string
}
pub struct InternalTracker {
pub:
allow_only_contributors_to_track_time bool
enable_issue_dependencies bool
enable_time_tracker bool
enable_issue_dependencies bool
enable_time_tracker bool
}
pub struct Issue {
pub:
id i64
url string
html_url string
number i64
user User
original_author string
id i64
url string
html_url string
number i64
user User
original_author string
original_author_id i64
title string
body string
ref string
labels []Label
milestone Milestone
assignee User
assignees []User
state string // StateType
is_locked bool
comments i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_date time.Time
pull_request PullRequestMeta
repository RepositoryMeta
assets []Attachment
pin_order i64
title string
body string
ref string
labels []Label
milestone Milestone
assignee User
assignees []User
state string // StateType
is_locked bool
comments i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_date time.Time
pull_request PullRequestMeta
repository RepositoryMeta
assets []Attachment
pin_order i64
}
pub struct Label {
pub:
id i64
name string
exclusive bool
id i64
name string
exclusive bool
is_archived bool
color string
color string
description string
url string
url string
}
pub struct Milestone {
pub:
id i64
title string
description string
state string // StateType
open_issues i64
id i64
title string
description string
state string // StateType
open_issues i64
closed_issues i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_on time.Time
created_at time.Time
updated_at time.Time
closed_at time.Time
due_on time.Time
}
pub struct Organization {
pub:
avatar_url string
description string
email string
full_name string
id i64
location string
name string
avatar_url string
description string
email string
full_name string
id i64
location string
name string
repo_admin_change_team_access bool
username string
visibility string
website string
username string
visibility string
website string
}
pub struct PayloadCommitVerification {
pub:
payload string
reason string
payload string
reason string
signature string
signer PayloadUser
verified bool
signer PayloadUser
verified bool
}
pub struct PayloadCommit {
pub:
added []string
author PayloadUser
committer PayloadUser
id string
message string
modified []string
removed []string
timestamp time.Time
url string
added []string
author PayloadUser
committer PayloadUser
id string
message string
modified []string
removed []string
timestamp time.Time
url string
verification PayloadCommitVerification
}
pub struct PayloadUser {
pub:
email string
name string
email string
name string
username string
}
pub struct Permission {
pub:
admin bool
pull bool
push bool
pull bool
push bool
}
pub struct PullRequestMeta {
pub:
merged bool
merged bool
merged_at time.Time
draft bool
html_url string
draft bool
html_url string
}
pub struct RepoCommit {
pub:
author CommitUser
committer CommitUser
message string
tree CommitMeta
url string
author CommitUser
committer CommitUser
message string
tree CommitMeta
url string
verification PayloadCommitVerification
}
pub struct Repository {
pub:
id i64
owner User
name string
full_name string
description string
empty bool
private bool
fork bool
template bool
parent_id i64
mirror bool
size i64
language string
languages_url string
html_url string
url string
link string
ssh_url string
clone_url string
website string
stars_count i64
forks_count i64
watchers_count i64
open_issues_count i64
open_pr_counter i64
release_counter i64
default_branch string
archived bool
created_at time.Time
updated_at time.Time
archived_at time.Time
permissions Permission
has_issues bool
internal_tracker InternalTracker
has_wiki bool
has_pull_requests bool
has_projects bool
has_releases bool
has_packages bool
has_actions bool
ignore_whitespace_conflicts bool
allow_merge_commits bool
allow_rebase bool
allow_rebase_explicit bool
allow_squash_merge bool
allow_fast_forward_only_merge bool
allow_rebase_update bool
id i64
owner User
name string
full_name string
description string
empty bool
private bool
fork bool
template bool
parent_id i64
mirror bool
size i64
language string
languages_url string
html_url string
url string
link string
ssh_url string
clone_url string
website string
stars_count i64
forks_count i64
watchers_count i64
open_issues_count i64
open_pr_counter i64
release_counter i64
default_branch string
archived bool
created_at time.Time
updated_at time.Time
archived_at time.Time
permissions Permission
has_issues bool
internal_tracker InternalTracker
has_wiki bool
has_pull_requests bool
has_projects bool
has_releases bool
has_packages bool
has_actions bool
ignore_whitespace_conflicts bool
allow_merge_commits bool
allow_rebase bool
allow_rebase_explicit bool
allow_squash_merge bool
allow_fast_forward_only_merge bool
allow_rebase_update bool
default_delete_branch_after_merge bool
default_merge_style string
default_allow_maintainer_edit bool
avatar_url string
internal bool
mirror_interval string
mirror_updated time.Time
repo_transfer RepoTransfer
default_merge_style string
default_allow_maintainer_edit bool
avatar_url string
internal bool
mirror_interval string
mirror_updated time.Time
repo_transfer RepoTransfer
}
pub struct RepositoryMeta {
pub:
id i64
name string
owner string
id i64
name string
owner string
full_name string
}
pub struct Team {
pub:
can_create_org_repo bool
description string
id i64
can_create_org_repo bool
description string
id i64
includes_all_repositories bool
name string
organization Organization
permission string
units []string
units_map map[string]string
name string
organization Organization
permission string
units []string
units_map map[string]string
}
pub struct RepoTransfer {
pub:
doer User
doer User
recipient User
teams []Team
teams []Team
}
pub struct User {
pub:
id i64
login string
full_name string
email string
avatar_url string
language string
is_admin bool
last_login time.Time
created time.Time
restricted bool
active bool
prohibit_login bool
location string
website string
description string
visibility string
followers_count i64
following_count i64
id i64
login string
full_name string
email string
avatar_url string
language string
is_admin bool
last_login time.Time
created time.Time
restricted bool
active bool
prohibit_login bool
location string
website string
description string
visibility string
followers_count i64
following_count i64
starred_repos_count i64
username string
}
username string
}

View File

@@ -143,8 +143,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
)!
// TODO: We need to load the sitename instead, or maybe remove it
mut dsite := docusaurus.dsite_get("")!
mut dsite := docusaurus.dsite_get('')!
if buildpublish {
// Build and publish production-ready artifacts

View File

@@ -96,7 +96,6 @@ pub fn cmd_git(mut cmdroot Command) {
mut allcmdsref := [&list_command, &clone_command, &push_command, &pull_command, &commit_command,
&reload_command, &delete_command, &sourcetree_command, &editor_command]
for mut c in allcmdsref {
c.add_flag(Flag{
flag: .bool
@@ -113,6 +112,14 @@ pub fn cmd_git(mut cmdroot Command) {
abbrev: 'l'
description: 'reload the data in cache for selected repos.'
})
c.add_flag(Flag{
flag: .string
required: false
name: 'filter'
abbrev: 'f'
description: 'filter the repos by name or path.'
})
}
mut allcmdscommit := [&push_command, &pull_command, &commit_command]
@@ -129,13 +136,6 @@ pub fn cmd_git(mut cmdroot Command) {
mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command]
for mut c in urlcmds {
c.add_flag(Flag{
flag: .string
required: false
name: 'url'
abbrev: 'u'
description: 'url for clone operation.'
})
c.add_flag(Flag{
flag: .bool
required: false
@@ -163,16 +163,6 @@ pub fn cmd_git(mut cmdroot Command) {
})
}
for mut c in allcmdsref {
c.add_flag(Flag{
flag: .string
required: false
name: 'filter'
abbrev: 'f'
description: 'Filter is part of path of repo e.g. threefoldtech/info_'
})
}
for mut c_ in allcmdsref {
mut c := *c_
c.add_flag(Flag{
@@ -211,7 +201,18 @@ fn cmd_git_execute(cmd Command) ! {
mut gs := gittools.new(coderoot: coderoot)!
// create the filter for doing group actions, or action on 1 repo
mut filter := cmd.flags.get_string('filter') or { '' }
mut filter := ''
mut url := ''
mut path := ''
if cmd.args.len > 0 {
arg1 := cmd.args[0]
if arg1.starts_with('git') || arg1.starts_with('http') {
url = arg1
} else {
path = arg1
}
}
if cmd.name in gittools.gitcmds.split(',') {
mut pull := cmd.flags.get_bool('pull') or { false }
@@ -223,7 +224,7 @@ fn cmd_git_execute(cmd Command) ! {
}
mypath := gs.do(
filter: filter
filter: cmd.flags.get_string('filter') or { '' }
reload: reload
recursive: recursive
cmd: cmd.name
@@ -231,7 +232,8 @@ fn cmd_git_execute(cmd Command) ! {
pull: pull
reset: reset
msg: cmd.flags.get_string('message') or { '' }
url: cmd.flags.get_string('url') or { '' }
url: url
path: path
)!
if cmd.name == 'cd' {
print('cd ${mypath}\n')

View File

@@ -31,7 +31,6 @@ pub mut:
pub fn new(args_ PlayBookNewArgs) !PlayBook {
mut args := args_
mut c := base.context() or { return error('failed to get context: ${err}') }
mut s := c.session_new()!

View File

@@ -148,7 +148,7 @@ pub fn (mut plbook PlayBook) get(args FindArgs) !&Action {
} else if res.len > 1 {
$if debug {
print_backtrace()
}
}
return error("found more than one action: '${args.filter}'")
}
return res[0] or { panic('bug') }

View File

@@ -12,7 +12,6 @@ enum State {
othertext
}
// pub struct PlayBookNewArgs {
// path string
// text string
@@ -37,23 +36,24 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
args.path = newpath.path
}
if plbook.path=="" && args.path!="" {
if plbook.path == '' && args.path != '' {
plbook.path = args.path
}
if args.text.len>0 && args.replace.len>0{
//now we need to replace any placeholders in the text
if args.text.len > 0 && args.replace.len > 0 {
// now we need to replace any placeholders in the text
for key, value in args.replace {
if key.starts_with('@') || key.starts_with('$') || key.starts_with('[') || key.starts_with('{') {
if key.starts_with('@') || key.starts_with('$') || key.starts_with('[')
|| key.starts_with('{') {
args.text = args.text.replace(key, value)
}else{
args.text = args.text.replace("@${key}", value)
args.text = args.text.replace("$\{${key}\}", value)
args.text = args.text.replace("\{${key}\}", value)
} else {
args.text = args.text.replace('@${key}', value)
args.text = args.text.replace('$\{${key}\}', value)
args.text = args.text.replace('\{${key}\}', value)
}
}
}
// walk over directory
if args.path.len > 0 {
// console.print_header("PLBOOK add path:'${args.path}'")

View File

@@ -2,7 +2,7 @@ module playbook
import freeflowuniverse.herolib.develop.gittools // Added import for gittools
//REMARK: include is done in play_core
// REMARK: include is done in play_core
// // Include external playbook actions (from git repo or local path)
// // based on actions defined as `!!play.include`.

View File

@@ -53,9 +53,8 @@ pub fn run(args_ PlayArgs) ! {
giteaclient.play(mut plbook)!
if args.emptycheck{
if args.emptycheck {
// Ensure we did not leave any actions unprocessed
plbook.empty_check()!
}
}

View File

@@ -11,23 +11,20 @@ import os
// -------------------------------------------------------------------
fn play_core(mut plbook PlayBook) ! {
// ----------------------------------------------------------------
// 1. Include handling (play include / echo)
// ----------------------------------------------------------------
// ----------------------------------------------------------------
// 1. Include handling (play include / echo)
// ----------------------------------------------------------------
// Track included paths to prevent infinite recursion
mut included_paths := map[string]bool{}
for mut action_ in plbook.find(filter: 'play.*')! {
if action_.name == 'include' {
mut action := *action_
mut toreplace := action.params.get_default('replace', '')!
mut playrunpath := action.params.get_default('path', '')!
if playrunpath.len == 0 {
action.name = 'pull'
mypath:=gittools.path(
mypath := gittools.path(
path: playrunpath
git_url: action.params.get_default('git_url', '')!
git_reset: action.params.get_default_false('git_reset')
@@ -40,8 +37,8 @@ fn play_core(mut plbook PlayBook) ! {
}
// console.print_debug('play run:\n${action_}')
if ! playrunpath.starts_with('/') {
playrunpath=os.abs_path("${plbook.path}/${playrunpath}")
if !playrunpath.starts_with('/') {
playrunpath = os.abs_path('${plbook.path}/${playrunpath}')
}
console.print_debug('play run include path:${playrunpath}')
@@ -51,12 +48,11 @@ fn play_core(mut plbook PlayBook) ! {
console.print_debug('Skipping already included path: ${playrunpath}')
continue
}
toreplacedict:=texttools.to_map(toreplace)
toreplacedict := texttools.to_map(toreplace)
included_paths[playrunpath] = true
plbook.add(path: playrunpath,replace:toreplacedict)!
action.done = true
plbook.add(path: playrunpath, replace: toreplacedict)!
action.done = true
}
if action_.name == 'echo' {
content := action_.params.get_default('content', "didn't find content")!
@@ -64,38 +60,35 @@ fn play_core(mut plbook PlayBook) ! {
}
}
// ----------------------------------------------------------------
// 2. Session environment handling
// ----------------------------------------------------------------
// Guard make sure a session exists
mut session := plbook.session
// !!session.env_set / env_set_once
for mut action in plbook.find(filter: 'session.')! {
// ----------------------------------------------------------------
// 2. Session environment handling
// ----------------------------------------------------------------
// Guard make sure a session exists
mut session := plbook.session
mut p := action.params
match action.name {
'env_set' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
session.env_set(key, val)!
}
'env_set_once' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
// Use the dedicated setonce method
session.env_set_once(key, val)!
}
else { /* ignore unknown subaction */ }
}
action.done = true
}
// !!session.env_set / env_set_once
for mut action in plbook.find(filter: 'session.')! {
mut p := action.params
match action.name {
'env_set' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
session.env_set(key, val)!
}
'env_set_once' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
// Use the dedicated setonce method
session.env_set_once(key, val)!
}
else {}
}
action.done = true
}
// ----------------------------------------------------------------
// 3. Template replacement in action parameters
// ----------------------------------------------------------------
// ----------------------------------------------------------------
// 3. Template replacement in action parameters
// ----------------------------------------------------------------
// Apply template replacement from session environment variables
if session.env.len > 0 {
// Create a map with name_fix applied to keys for template replacement
@@ -143,5 +136,4 @@ fn play_core(mut plbook PlayBook) ! {
session.save()!
action.done = true
}
}

View File

@@ -11,8 +11,7 @@ import freeflowuniverse.herolib.ui.console // For verbose error reporting
// ---------------------------------------------------------------
fn play_git(mut plbook PlayBook) ! {
mut gs:=gittools.new()!
mut gs := gittools.new()!
define_actions := plbook.find(filter: 'git.define')!
if define_actions.len > 0 {
@@ -25,19 +24,18 @@ fn play_git(mut plbook PlayBook) ! {
ssh_key_path := p.get_default('ssh_key_path', '')!
reload := p.get_default_false('reload')
gs=gittools.new(
coderoot: coderoot
log: log
debug: debug
offline: offline
reload: reload
gs = gittools.new(
coderoot: coderoot
log: log
debug: debug
offline: offline
reload: reload
)!
if light || ssh_key_path.len > 0 {
gs.config_set(light: light, ssh_key_path: ssh_key_path)!
}
}
}
// -----------------------------------------------------------
// !!git.clone clone repositories

View File

@@ -23,7 +23,7 @@ pub fn to_array_int(r string) []int {
return r2
}
//convert a:b ,c:d,e:f to dict with keys a,c,e and corresponding values b,d,f
// convert a:b ,c:d,e:f to dict with keys a,c,e and corresponding values b,d,f
pub fn to_map(mapstring string) map[string]string {
mut result := map[string]string{}
mut mapstring_array := to_array(mapstring)
@@ -31,7 +31,7 @@ pub fn to_map(mapstring string) map[string]string {
if item.contains(':') {
parts := item.split(':')
if parts.len == 2 {
result[parts[0].trim_space()] = parts[1].trim_space().trim("'\"").trim_space()
result[parts[0].trim_space()] = parts[1].trim_space().trim('\'"').trim_space()
} else {
panic('to_map: expected key:value pairs, got: ${item}')
}
@@ -40,7 +40,6 @@ pub fn to_map(mapstring string) map[string]string {
}
}
return result
}
// intelligent way how to map a line to a map

View File

@@ -24,7 +24,7 @@ fn decode_struct[T](_ T, data string) !T {
if !data.contains(action_name) {
action_name = '${obj_name}.configure'
if !data.contains(action_name) {
$if debug{
$if debug {
print_backtrace()
}
return error('Data does not contain action name: ${obj_name}.define or ${action_name}')

View File

@@ -12,9 +12,9 @@ pub fn (params &Params) get(key_ string) !string {
return p.value.trim(' ')
}
}
$if debug {
print_backtrace()
}
$if debug {
print_backtrace()
}
return error('Did not find key:${key} in ${params}')
}
@@ -158,9 +158,9 @@ pub fn (params &Params) get_int_default(key string, defval int) !int {
}
pub fn (params &Params) get_default_true(key string) bool {
mut r := ""
mut r := ''
if params.exists(key) {
r = params.get(key) or { panic("bug") }
r = params.get(key) or { panic('bug') }
}
r = texttools.name_fix_no_underscore(r)
if r == '' || r == '1' || r == 'true' || r == 'y' || r == 'yes' {
@@ -170,10 +170,11 @@ pub fn (params &Params) get_default_true(key string) bool {
}
pub fn (params &Params) get_default_false(key string) bool {
mut r := ""
mut r := ''
if params.exists(key) {
r = params.get(key) or { panic("bug") }
} r = texttools.name_fix_no_underscore(r)
r = params.get(key) or { panic('bug') }
}
r = texttools.name_fix_no_underscore(r)
if r == '' || r == '0' || r == 'false' || r == 'n' || r == 'no' {
return false
}

View File

@@ -1,7 +1,6 @@
module gittools
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
@@ -16,11 +15,11 @@ pub fn reset() {
@[params]
pub struct GitStructureArgsNew {
pub mut:
coderoot string
log bool = true // If true, logs git commands/statements
debug bool = true
reload bool
offline bool
coderoot string
log bool = true // If true, logs git commands/statements
debug bool = true
reload bool
offline bool
}
// Retrieve or create a new GitStructure instance with the given configuration.
@@ -38,8 +37,8 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure {
panic('Unexpected error: key not found in gsinstances')
}
return gs
}else{
console.print_debug("Loading GitStructure for ${args.coderoot}")
} else {
console.print_debug('Loading GitStructure for ${args.coderoot}')
}
// Create and load the GitStructure instance.
@@ -47,8 +46,8 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure {
key: rediskey_
coderoot: pathlib.get_dir(path: args.coderoot, create: true)!
log: args.log
debug: args.debug
offline: args.offline
debug: args.debug
offline: args.offline
}
if 'OFFLINE' in os.environ() {
@@ -59,14 +58,13 @@ pub fn new(args_ GitStructureArgsNew) !&GitStructure {
if args.reload {
gs.load(true)!
}else{
} else {
gs.load(false)!
}
gsinstances[rediskey_] = &gs
return gsinstances[rediskey_] or { panic('bug') }
}
@[params]
@@ -90,15 +88,15 @@ pub mut:
pub fn path(args_ GitPathGetArgs) !pathlib.Path {
mut args := args_
if args.path!=""{
if args.path != '' {
if os.exists(args.path) {
return pathlib.get(args.path)
}else{
if args.git_url == "" {
} else {
if args.git_url == '' {
return error("can't resolve git repo path without url or existing path, ${args.path} does not exist.")
}
}
}
}
if args.git_url.len > 0 {
mut gs := new(coderoot: args.git_root)!
@@ -114,4 +112,3 @@ pub fn path(args_ GitPathGetArgs) !pathlib.Path {
}
return pathlib.get(args.path)
}

View File

@@ -32,7 +32,7 @@ pub fn (mut gs GitStructure) gitlocation_from_path(path string) !GitLocation {
provider := parts[0]
account := parts[1]
name := parts[2]
mut repo_path := if parts.len > 3 { parts[3..].join('/') } else { "" } //this is for relative path in repo
mut repo_path := if parts.len > 3 { parts[3..].join('/') } else { '' } // this is for relative path in repo
return GitLocation{
provider: provider

View File

@@ -6,7 +6,6 @@ import freeflowuniverse.herolib.ui.console
import os
import json
// GitStructure holds information about repositories within a specific code root.
// This structure keeps track of loaded repositories, their configurations, and their status.
@[heap]
@@ -17,9 +16,9 @@ pub mut:
key string // Unique key representing the git structure (default is hash of $home/code).
repos map[string]&GitRepo // Map of repositories
coderoot pathlib.Path
log bool = true // If true, logs git commands/statements
debug bool = true
offline bool
log bool = true // If true, logs git commands/statements
debug bool = true
offline bool
}
//////////////////////////////////////////////////////////////////////////////////////
@@ -52,8 +51,6 @@ pub fn (mut gitstructure GitStructure) load(reset bool) ! {
// - path (string): The path to search for repositories.
// - processed_paths ([]string): List of already processed paths to avoid duplication.
fn (mut gitstructure GitStructure) load_recursive(path string, mut processed_paths []string) ! {
path_object := pathlib.get(path)
relpath := path_object.path_relative(gitstructure.coderoot.path)!
@@ -72,7 +69,8 @@ fn (mut gitstructure GitStructure) load_recursive(path string, mut processed_pat
current_path := os.join_path(path, item)
if os.is_dir(current_path) {
excluded_dirs := ['node_modules', 'vendor', 'dist', 'build', 'bin', 'obj', 'target', 'tmp', 'temp']
excluded_dirs := ['node_modules', 'vendor', 'dist', 'build', 'bin', 'obj', 'target',
'tmp', 'temp']
if item.starts_with('.') || item.starts_with('_') || excluded_dirs.contains(item) {
continue
}
@@ -131,13 +129,13 @@ fn (mut gitstructure GitStructure) repo_init_from_path_(path string, params Repo
// console.print_debug("Initializing GitRepo from path: ${mypath.path}")
// Initialize and return a GitRepo struct.
mut r := GitRepo{
gs: &gitstructure
status: GitStatus{}
config: GitRepoConfig{}
provider: gl.provider
account: gl.account
name: gl.name
deploysshkey: params.ssh_key_name
gs: &gitstructure
status: GitStatus{}
config: GitRepoConfig{}
provider: gl.provider
account: gl.account
name: gl.name
deploysshkey: params.ssh_key_name
}
return r

View File

@@ -2,7 +2,6 @@ module gittools
import json
@[params]
pub struct GitStructureConfig {
pub mut:
@@ -11,8 +10,6 @@ pub mut:
ssh_key_path string
}
// Load config from redis
pub fn (mut self GitStructure) config() !GitStructureConfig {
mut config := self.config_ or {
@@ -24,10 +21,10 @@ pub fn (mut self GitStructure) config() !GitStructureConfig {
}
c
}
return config
}
pub fn (mut self GitStructure) config_set(args GitStructureConfig) ! {
mut redis := redis_get()
redis.set('${self.cache_key()}:config', json.encode(args))!

View File

@@ -44,17 +44,44 @@ pub mut:
pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
mut args := args_
console.print_debug('git do ${args.cmd}')
// println(args)
// $dbg;
// if args.path == '' && args.url == '' && args.repo == '' && args.account == ''
// && args.provider == '' && args.filter == '' {
// args.path = os.getwd()
// }
if args.path.len > 0 && args.url.len > 0 {
panic('bug')
}
if args.path.len > 0 && args.filter.len > 0 {
panic('bug')
}
if args.url.len > 0 && args.filter.len > 0 {
panic('bug')
}
if args.path != '' {
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
if parentpath.path != '' {
r0 := gs.repo_init_from_path_(parentpath.path)!
if args.path.contains('*') {
panic('bug')
}
if args.path == '.' {
// means current dir
args.path = os.getwd()
mut curdiro := pathlib.get_dir(path: args.path, create: false)!
mut parentpath := curdiro.parent_find('.git') or { pathlib.Path{} }
args.path = curdiro.path
}
if !os.exists(args.path) {
return error('Path does not exist: ${args.path}')
}
r0 := gs.repo_init_from_path_(args.path)!
args.repo = r0.name
args.account = r0.account
args.provider = r0.provider
} else {
if args.url.len > 0 {
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == '') {
return error('when specify url cannot specify repo, account, profider or filter')
}
mut r0 := gs.get_repo(url: args.url)!
args.repo = r0.name
args.account = r0.account
args.provider = r0.provider
@@ -72,15 +99,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
provider: args.provider
)!
// MODIFIED: Remove the global reload.
// The reload flag will now be handled inside the loop.
// if args.reload || args.cmd == 'reload' {
// for mut repo in repos {
// repo.cache_last_load_clear()!
// }
// gs.load(true)! // <-- REMOVED
// }
for mut repo in repos {
repo.status_update(reset: args.reload || args.cmd == 'reload')!
}
@@ -122,18 +140,6 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
return ''
}
// see if a url was used means we are in 1 repo
if args.url.len > 0 {
if !(args.repo == '' && args.account == '' && args.provider == '' && args.filter == '') {
return error('when specify url cannot specify repo, account, profider or filter')
}
mut r0 := gs.get_repo(url: args.url)!
args.repo = r0.name
args.account = r0.account
args.provider = r0.provider
}
if args.cmd in 'pull,push,commit,delete'.split(',') {
gs.repos_print(
filter: args.filter
@@ -166,10 +172,9 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
}
console.print_debug(" --- status repo ${g.name}'s\n need_commit0:${need_commit0} \n need_pull0:${need_pull0} \n need_push0:${need_push0}")
}
console.print_debug(" --- status all repo's\n need_commit0:${need_commit0} \n need_pull0:${need_pull0} \n need_push0:${need_push0}")
console.print_debug(" --- status all repo's\n need_commit0:${need_commit0} \n need_pull0:${need_pull0} \n need_push0:${need_push0}")
mut ok := false
if need_commit0 || need_pull0 || need_push0 {

View File

@@ -119,7 +119,7 @@ pub fn (mut gitstructure GitStructure) get_repo(args_ ReposGetArgs) !&GitRepo {
// repos := repositories.map('- ${it.account}.${it.name}').join_lines()
$if debug {
print_backtrace()
}
}
return error('Found more than one repository for \n${args}')
}

View File

@@ -5,8 +5,6 @@ import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.core as osal
import os
// commit stages all changes and commits them with the provided message.
pub fn (mut repo GitRepo) commit(msg string) ! {
repo.status_update()!

View File

@@ -26,13 +26,13 @@ pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
// Initialize a new GitRepo instance
mut repo := GitRepo{
gs: &gitstructure
provider: git_location.provider
account: git_location.account
name: git_location.name
deploysshkey: args.sshkey // Use the sshkey from args
config: GitRepoConfig{} // Initialize with default config
status: GitStatus{} // Initialize with default status
gs: &gitstructure
provider: git_location.provider
account: git_location.account
name: git_location.name
deploysshkey: args.sshkey // Use the sshkey from args
config: GitRepoConfig{} // Initialize with default config
status: GitStatus{} // Initialize with default status
}
// Add the new repo to the gitstructure's repos map
@@ -70,7 +70,7 @@ pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
if result.exit_code != 0 {
return error('Cannot clone the repository due to: \n${result.output}')
}
// The repo is now cloned. Load its initial status.
repo.load_internal()!

View File

@@ -30,7 +30,6 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Decide if a full load is needed.
if args.reset || repo.last_load == 0
|| current_time - repo.last_load >= repo.config.remote_check_period {
$dbg;
repo.load_internal() or {
// Persist the error state to the cache
console.print_stderr('Failed to load repository ${repo.name} at ${repo.path()}: ${err}')
@@ -55,7 +54,7 @@ fn (mut repo GitRepo) load_internal() ! {
return error('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. Please check network connection and repository access.')
}
repo.load_branches()!
repo.load_tags()!
repo.load_tags()!
// Reset ahead/behind counts before recalculating
repo.status.ahead = 0
@@ -89,7 +88,6 @@ fn (mut repo GitRepo) load_internal() ! {
// Persist the newly loaded state to the cache.
repo.cache_set()!
}
// Helper to load remote tags
@@ -195,4 +193,4 @@ pub fn (mut repo GitRepo) detect_changes() !bool {
return true
}
return false
}
}

View File

@@ -1,12 +1,11 @@
module gittools
// GitRepo represents a single git repository.
@[heap]
pub struct GitRepo {
// a git repo is always part of a git structure
mut:
gs &GitStructure @[skip; str: skip]
gs &GitStructure @[skip; str: skip]
last_load int // epoch when last loaded
pub mut:
provider string // e.g., github.com
@@ -26,10 +25,10 @@ pub mut:
tags map[string]string // All tag names -> commit hash
// Current local state
branch string // The current checked-out branch.
tag string // The current checked-out tag (if any).
ahead int // Commits ahead of remote.
behind int // Commits behind remote.
branch string // The current checked-out branch.
tag string // The current checked-out tag (if any).
ahead int // Commits ahead of remote.
behind int // Commits behind remote.
// Combined status
has_changes bool // True if there are uncommitted local changes.

View File

@@ -4,9 +4,9 @@ import os
import freeflowuniverse.herolib.core.pathlib
__global (
docusaurus_sites map[string]&DocSite
docusaurus_sites map[string]&DocSite
docusaurus_config []DocusaurusConfigParams
docusaurus_last string //the last one we worked with
docusaurus_last string // the last one we worked with
)
pub struct DocusaurusConfig {
@@ -16,7 +16,7 @@ pub mut:
install bool
reset bool
template_update bool
coderoot string
coderoot string
}
@[params]
@@ -27,16 +27,16 @@ pub mut:
install bool
reset bool
template_update bool
coderoot string
coderoot string
}
//return the last know config
// return the last know config
pub fn config() !DocusaurusConfig {
if docusaurus_config.len == 0 {
docusaurus_config << DocusaurusConfigParams{}
}
mut args:= docusaurus_config[0] or { panic("bug in docusaurus config") }
if args.path_build == '' {
mut args := docusaurus_config[0] or { panic('bug in docusaurus config') }
if args.path_build == '' {
args.path_build = '${os.home_dir()}/hero/var/docusaurus/build'
}
if args.path_publish == '' {
@@ -47,16 +47,16 @@ pub fn config() !DocusaurusConfig {
}
mut c := DocusaurusConfig{
path_publish: pathlib.get_dir(path: args.path_publish, create: true)!
path_build: pathlib.get_dir(path: args.path_build, create: true)!
coderoot: args.coderoot
install: args.install
reset: args.reset
path_publish: pathlib.get_dir(path: args.path_publish, create: true)!
path_build: pathlib.get_dir(path: args.path_build, create: true)!
coderoot: args.coderoot
install: args.install
reset: args.reset
template_update: args.template_update
}
if c.install {
install(c)!
c.install=true
c.install = true
}
return c
}

View File

@@ -8,8 +8,8 @@ import freeflowuniverse.herolib.ui.console
@[heap]
pub struct DocSite {
pub mut:
name string
url string
name string
url string
// path_src pathlib.Path
path_publish pathlib.Path
path_build pathlib.Path
@@ -17,7 +17,7 @@ pub mut:
config Configuration
website sitemodule.Site
generated bool
}
}
pub fn (mut s DocSite) build() ! {
s.generate()!
@@ -51,20 +51,19 @@ pub fn (mut s DocSite) build_publish() ! {
retry: 0
)!
for item in s.website.siteconfig.build_dest {
if item.path.trim_space().trim("/ ") == "" {
$if debug{
if item.path.trim_space().trim('/ ') == '' {
$if debug {
print_backtrace()
}
return error("build destination path is empty for docusaurus.")
return error('build destination path is empty for docusaurus.')
}
osal.exec(
cmd: '
cmd: '
cd ${s.path_build.path}
rsync -avz --delete -e "ssh -p 22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" build/ ${item.path}
'
)!
}
}
@[params]

View File

@@ -1,6 +1,5 @@
module docusaurus
import freeflowuniverse.herolib.core.pathlib
import json
import os
@@ -16,8 +15,8 @@ pub fn (mut docsite DocSite) generate() ! {
console.print_header(' docsite generate: ${docsite.name} on ${c.path_build.path}')
osal.rm('${c.path_build.path}/docs')!
cfg_path:="${c.path_build.path}/cfg"
cfg_path := '${c.path_build.path}/cfg'
osal.rm(cfg_path)!
mut main_file := pathlib.get_file(path: '${cfg_path}/main.json', create: true)!
@@ -32,5 +31,4 @@ pub fn (mut docsite DocSite) generate() ! {
docsite.generate_docs()!
docsite.import()!
}

View File

@@ -20,10 +20,9 @@ mut:
// Generate docs from site configuration
pub fn (mut docsite DocSite) generate_docs() ! {
c := config()!
//we generate the docs in the build path
// we generate the docs in the build path
docs_path := '${c.path_build.path}/docs'
mut gen := SiteGenerator{

View File

@@ -6,13 +6,10 @@ import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools.regext
pub fn (mut docsite DocSite) import() ! {
for importparams in docsite.website.siteconfig.imports {
console.print_header('Importing: path:${importparams.path} or url:${importparams.url}')
// pub struct ImportItem {
// name string // will normally be empty
// url string // http git url can be to specific path
@@ -22,10 +19,10 @@ pub fn (mut docsite DocSite) import() ! {
// visible bool = true
// }
c:=config()!
c := config()!
if importparams.path == "" && importparams.url != "" {
return error("in import for docusaurus need to specify url or path")
if importparams.path == '' && importparams.url != '' {
return error('in import for docusaurus need to specify url or path')
}
// Use gittools to get path of what we want to import
@@ -37,11 +34,11 @@ pub fn (mut docsite DocSite) import() ! {
path: importparams.path
)!
if import_path.path == "" {
return error("import path not found for url:${importparams.url} and path:${importparams.path}")
if import_path.path == '' {
return error('import path not found for url:${importparams.url} and path:${importparams.path}')
}
if importparams.dest.starts_with("/") {
return error("Import path ${importparams.dest} must be relative, will be relative in relation to the build dir.")
if importparams.dest.starts_with('/') {
return error('Import path ${importparams.dest} must be relative, will be relative in relation to the build dir.')
}
import_path.copy(dest: '${c.path_build.path}/${importparams.dest}', delete: false)!

View File

@@ -24,7 +24,7 @@ pub fn dsite_define(sitename string) ! {
// Create the DocSite instance
mut dsite := &DocSite{
name: sitename
path_publish: pathlib.get_dir(path: "${path_build_}/build", create: true)!
path_publish: pathlib.get_dir(path: '${path_build_}/build', create: true)!
path_build: pathlib.get_dir(path: path_build_, create: true)!
config: new_configuration(website.siteconfig)!
website: website
@@ -36,7 +36,7 @@ pub fn dsite_define(sitename string) ! {
pub fn dsite_get(name_ string) !&DocSite {
mut name := texttools.name_fix(name_)
if name=="" {
if name == '' {
name = docusaurus_last
}
return docusaurus_sites[name] or {
@@ -46,9 +46,9 @@ pub fn dsite_get(name_ string) !&DocSite {
pub fn dsite_exists(name_ string) !bool {
mut name := texttools.name_fix(name_)
if name=="" {
if name == '' {
name = docusaurus_last
}
}
_ := docusaurus_sites[name] or { return false }
return true
}

View File

@@ -6,7 +6,7 @@ import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.installers.web.bun
fn install( c DocusaurusConfig) ! {
fn install(c DocusaurusConfig) ! {
mut gs := gittools.new()!
if c.reset {
@@ -22,7 +22,7 @@ fn install( c DocusaurusConfig) ! {
mut template_path0 := pathlib.get_dir(path: template_path, create: false)!
template_path0.copy(dest: c.path_build.path, delete: false)! //the dir has already been deleted so no point to delete again
template_path0.copy(dest: c.path_build.path, delete: false)! // the dir has already been deleted so no point to delete again
// install bun
mut installer := bun.get()!
@@ -36,5 +36,4 @@ fn install( c DocusaurusConfig) ! {
bun install
'
)!
}

View File

@@ -9,7 +9,7 @@ pub fn play(mut plbook PlayBook) ! {
return
}
//there should be 1 define section
// there should be 1 define section
mut action_define := plbook.ensure_once(filter: 'docusaurus.define')!
mut param_define := action_define.params
@@ -24,10 +24,9 @@ pub fn play(mut plbook PlayBook) ! {
site_name := param_define.get('name') or {
return error('In docusaurus.define, param "name" is required.')
}
dsite_define(site_name)!
action_define.done = true
mut dsite := dsite_get(site_name)!
@@ -47,7 +46,6 @@ pub fn play(mut plbook PlayBook) ! {
action.done = true
}
mut actions_build := plbook.find(filter: 'docusaurus.build')!
if actions_build.len > 1 {
return error('Multiple "docusaurus.build" actions found. Only one is allowed.')

View File

@@ -1,6 +1,6 @@
module docusaurus
//not longer working because is coming from doctree
// not longer working because is coming from doctree
// import freeflowuniverse.herolib.osal.notifier
// import os
@@ -97,9 +97,6 @@ module docusaurus
// }
// }
// pub fn (mut s DocSite) dev_watch(args DevArgs) ! {
// s.generate()!

View File

@@ -1,4 +1,5 @@
module site
import os
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.core.texttools
@@ -73,7 +74,7 @@ fn play_import(mut plbook PlayBook, mut config SiteConfig) ! {
mut importpath := p.get_default('path', '')!
if importpath != '' {
if ! importpath.starts_with('/') {
if !importpath.starts_with('/') {
importpath = os.abs_path('${plbook.path}/${importpath}')
}
}
@@ -182,7 +183,7 @@ fn play_publish(mut plbook PlayBook, mut config SiteConfig) ! {
for mut action in build_dest_actions {
mut p := action.params
mut dest := BuildDest{
path: p.get_default('path', '')! //can be url
path: p.get_default('path', '')! // can be url
ssh_name: p.get_default('ssh_name', '')!
}
config.build_dest << dest
@@ -190,13 +191,12 @@ fn play_publish(mut plbook PlayBook, mut config SiteConfig) ! {
}
}
fn play_publish_dev(mut plbook PlayBook, mut config SiteConfig) ! {
mut build_dest_actions := plbook.find(filter: 'site.publish_dev')!
for mut action in build_dest_actions {
mut p := action.params
mut dest := BuildDest{
path: p.get_default('path', '')! //can be url
path: p.get_default('path', '')! // can be url
ssh_name: p.get_default('ssh_name', '')!
}
config.build_dest_dev << dest