docusaurus

This commit is contained in:
2025-02-02 19:24:10 +03:00
parent cfa9f877b3
commit 0fd5062408
29 changed files with 1290 additions and 78 deletions

View File

@@ -81,6 +81,7 @@ fn do() ! {
// herocmds.cmd_zola(mut cmd)
// herocmds.cmd_juggler(mut cmd)
herocmds.cmd_generator(mut cmd)
herocmds.cmd_docusaurus(mut cmd)
// herocmds.cmd_docsorter(mut cmd)
// cmd.add_command(publishing.cmd_publisher(pre_func))
cmd.setup()

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.virt.docker
mut engine := docker.new(prefix: '', localonly: true)!
// Check if dev_tools image exists
if ! engine.image_exists(repo: 'dev_tools') !{
eprintln("image dev_tools doesn't exist, build it")
exit(1)
}
// Check if container exists and get its status
mut container := engine.container_get(
name: 'dev_tools'
) or {
// Container doesn't exist, create it
println('Creating dev_tools container...')
engine.container_create(
name: 'dev_tools'
image_repo: 'dev_tools'
remove_when_done: false
)!
}
// Start container if not running
if container.status != .up {
println('Starting dev_tools container...')
container.start()!
}
// Open shell to container
println('Opening shell to dev_tools container...')
container.shell()!

View File

@@ -8,7 +8,7 @@ mut r := engine.recipe_new(name: 'dev_tools', platform: .alpine)
r.add_from(image: 'alpine', tag: 'latest')!
r.add_package(name: 'git,vim')!
r.add_package(name: 'git,mc')!
r.add_zinit()!

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.web.docusaurus
// import freeflowuniverse.herolib.data.doctree
// Create a new docusaurus factory
mut docs := docusaurus.new(
// build_path: '/tmp/docusaurus_build'
)!
// Create a new docusaurus site
mut site := docs.dev(
url:'https://git.ourworld.tf/despiegk/docs_kristof'
)!
//FOR FUTURE TO ADD CONTENT FROM DOCTREE
// Create a doctree for content
// mut tree := doctree.new(name: 'content')!
// // Add some content from a git repository
// tree.scan(
// git_url: 'https://github.com/yourusername/your-docs-repo'
// git_pull: true
// )!
// // Export the content to the docusaurus site
// tree.export(
// destination: '${site.path_build.path}/docs'
// reset: true
// keep_structure: true
// exclude_errors: false
// )!
// Build the docusaurus site
//site.build()!
// Generate the static site
//site.generate()!
// Optionally open the site in a browser
// site.open()!

View File

@@ -9,9 +9,9 @@ import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.osal.systemd
import freeflowuniverse.herolib.osal.zinit
@end
import freeflowuniverse.herolib.installers.ulist
@if args.build
import freeflowuniverse.herolib.installers.ulist
import freeflowuniverse.herolib.installers.lang.golang
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.installers.lang.python
@@ -82,7 +82,7 @@ fn stop_post()!{
// checks if a certain version or above is installed
fn installed() !bool {
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res := os.execute('??{osal.profile_path_source_and()} ${args.name} version')
// res := os.execute('??{osal.profile_path_source_and()!} ${args.name} version')
// if res.exit_code != 0 {
// return false
// }

View File

@@ -0,0 +1,105 @@
module herocmds
import freeflowuniverse.herolib.web.docusaurus
import os
import cli { Command, Flag }
pub fn cmd_docusaurus(mut cmdroot Command) {
mut cmd_run := Command{
name: 'docusaurus'
description: 'Generate, build, run docusaurus sites.'
required_args: 0
execute: cmd_docusaurus_execute
}
// cmd_run.add_flag(Flag{
// flag: .bool
// required: false
// name: 'reset'
// abbrev: 'r'
// description: 'will reset.'
// })
cmd_run.add_flag(Flag{
flag: .string
required: false
name: 'url'
abbrev: 'u'
// default: ''
description: 'Url where docusaurus source is.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'build'
abbrev: 'b'
description: 'build and publish.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'builddev'
abbrev: 'bd'
description: 'build dev version and publish.'
})
cmd_run.add_flag(Flag{
flag: .bool
required: false
name: 'dev'
abbrev: 'd'
description: 'Run your dev environment on local browser.'
})
cmdroot.add_command(cmd_run)
}
fn cmd_docusaurus_execute(cmd Command) ! {
// mut reset := cmd.flags.get_bool('reset') or { false }
mut url := cmd.flags.get_string('url') or { '' }
// mut path := cmd.flags.get_string('path') or { '' }
// if path == '' {
// path = os.getwd()
// }
// path = path.replace('~', os.home_dir())
mut build := cmd.flags.get_bool('build') or { false }
mut builddev := cmd.flags.get_bool('builddev') or { false }
mut dev := cmd.flags.get_bool('dev') or { false }
// if build== false && build== false && build== false {
// eprintln("specify build, builddev or dev")
// exit(1)
// }
mut docs := docusaurus.new(
// build_path: '/tmp/docusaurus_build'
)!
if build{
// Create a new docusaurus site
mut site := docs.build(
url:url
)!
}
if builddev{
// Create a new docusaurus site
mut site := docs.build_dev(
url:url
)!
}
if dev{
// Create a new docusaurus site
mut site := docs.dev(
url:url
)!
}
}

View File

@@ -49,7 +49,7 @@ pub fn (mut gitstructure GitStructure) load(reload bool) ! {
}
// mut ths := []thread !{}
// need to make sure redis is empty before doing the threads
// need to make sure redis is empty before doing the threads, is not removing the cache
redisclient.reset()!
redisclient.checkempty()

View File

@@ -143,3 +143,32 @@ fn repo_match_check(repo GitRepo, args ReposGetArgs) bool {
&& (args.account.len == 0 || repo.account == args.account)
&& (args.provider.len == 0 || repo.provider == args.provider)
}
// Retrieves a single repository path based on the provided arguments (goes inside repo).
// if pull will force a pull, if it can't will be error, if reset will remove the changes
// If the repository does not exist, it will clone it
//
// Args:
//```
// ReposGetArgs {
// name string // Specific repository name to retrieve.
// account string // Git account associated with the repository.
// provider string // Git provider (e.g., GitHub).
// pull bool // Pull the last changes.
// reset bool // Reset the changes.
// reload bool // Reload the repo into redis cache
// url string // Repository URL, used if cloning is needed.
//```
//
// Returns:
// - &GitRepo: Reference to the retrieved or cloned repository.
//
// Raises:
// - Error: If multiple repositories are found with similar names or if cloning fails.
pub fn (mut gitstructure GitStructure) get_path(args_ ReposGetArgs) !string {
mut r:=gitstructure.get_repo(args_)!
mut mypath:=r.get_path_of_url(args_.url)!
return mypath
}

View File

@@ -35,7 +35,6 @@ pub fn (repo GitRepo) get_changes_staged() ![]string {
// Check if there are any unstaged or untracked changes in the repository.
pub fn (mut repo GitRepo) detect_changes() !bool {
repo.status_update()!
r0 := repo.get_changes_unstaged()!
r1 := repo.get_changes_staged()!
if r0.len + r1.len > 0 {

View File

@@ -11,16 +11,20 @@ pub struct StatusUpdateArgs {
pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Check current time vs last check, if needed (check period) then load
// println("${repo.name} ++")
repo.cache_get()! // Ensure we have the situation from redis
repo.init()!
repo.cache_get() or {
return error('Failed to get cache for repo ${repo.name}: ${err}')
} // Ensure we have the situation from redis
repo.init() or {
return error('Failed to initialize repo ${repo.name}: ${err}')
}
current_time := int(time.now().unix())
if args.reload || repo.last_load == 0
|| current_time - repo.last_load >= repo.config.remote_check_period {
console.print_debug('${repo.name} ${current_time}-${repo.last_load}: ${repo.config.remote_check_period} +++')
//console.print_debug('${repo.name} ${current_time}-${repo.last_load} (${current_time - repo.last_load >= repo.config.remote_check_period}): ${repo.config.remote_check_period} +++')
// if true{exit(0)}
repo.load()!
// println("${repo.name} ++++")
repo.load() or {
return error('Failed to load repository ${repo.name}: ${err}')
}
}
}
@@ -28,68 +32,97 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
// Does not check cache, it is the callers responsibility to check cache and load accordingly.
fn (mut repo GitRepo) load() ! {
console.print_debug('load ${repo.cache_key()}')
repo.init()!
if os.exists('${repo.path()}/.git') == false {
return error("Can't find git in repo ${repo.path()}")
repo.init() or {
return error('Failed to initialize repo during load operation: ${err}')
}
git_path := '${repo.path()}/.git'
if os.exists(git_path) == false {
return error('Repository not found: ${repo.path()} is not a valid git repository (missing .git directory)')
}
repo.exec('git fetch --all') or {
return error('Cannot fetch repo: ${repo.path()}. Error: ${err}')
return error('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. Please check network connection and repository access.')
}
repo.load_branches()!
repo.load_tags()!
repo.load_branches() or {
return error('Failed to load branches for ${repo.name}: ${err}')
}
repo.load_tags() or {
return error('Failed to load tags for ${repo.name}: ${err}')
}
repo.last_load = int(time.now().unix())
repo.has_changes = repo.detect_changes()!
repo.cache_set()!
repo.has_changes = repo.detect_changes() or {
return error('Failed to detect changes in repository ${repo.name}: ${err}')
}
repo.cache_set() or {
return error('Failed to update cache for repository ${repo.name}: ${err}')
}
}
// Helper to load remote tags
fn (mut repo GitRepo) load_branches() ! {
tags_result := repo.exec("git for-each-ref --format='%(objectname) %(refname:short)' refs/heads refs/remotes/origin")!
tags_result := repo.exec("git for-each-ref --format='%(objectname) %(refname:short)' refs/heads refs/remotes/origin") or {
return error('Failed to get branch references: ${err}. Command: git for-each-ref')
}
for line in tags_result.split('\n') {
if line.trim_space() != '' {
parts := line.split(' ')
if parts.len == 2 {
commit_hash := parts[0].trim_space()
mut name := parts[1].trim_space()
if name.contains('_archive') {
continue
} else if name == 'origin' {
repo.status_remote.ref_default = commit_hash
} else if name.starts_with('origin') {
name = name.all_after('origin/').trim_space()
// Update remote tags info
repo.status_remote.branches[name] = commit_hash
} else {
repo.status_local.branches[name] = commit_hash
}
line_trimmed := line.trim_space()
//println(line_trimmed)
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
//console.print_debug('Info: skipping malformed branch/tag line: ${line_trimmed}')
continue
}
commit_hash := parts[0].trim_space()
mut name := parts[1].trim_space()
if name.contains('_archive') {
continue
} else if name == 'origin' {
repo.status_remote.ref_default = commit_hash
} else if name.starts_with('origin') {
name = name.all_after('origin/').trim_space()
// Update remote tags info
repo.status_remote.branches[name] = commit_hash
} else {
repo.status_local.branches[name] = commit_hash
}
}
}
mybranch := repo.exec('git branch --show-current')!.split_into_lines().filter(it.trim_space() != '')
mybranch := repo.exec('git branch --show-current') or {
return error('Failed to get current branch: ${err}')
}.split_into_lines().filter(it.trim_space() != '')
if mybranch.len == 1 {
repo.status_local.branch = mybranch[0].trim_space()
}else{
return error("bug: git branch does not give branchname.\n${mybranch}")
}
// Could be a tag.
// else{
// panic("bug: git branch does not give branchname")
// }
}
// Helper to load remote tags
fn (mut repo GitRepo) load_tags() ! {
tags_result := repo.exec('git tag --list')!
tags_result := repo.exec('git tag --list') or {
return error('Failed to list tags: ${err}. Please ensure git is installed and repository is accessible.')
}
for line in tags_result.split('\n') {
if line.trim_space() != '' {
parts := line.split(' ')
if parts.len == 2 {
commit_hash := parts[0].trim_space()
tag_name := parts[1].all_after('refs/tags/').trim_space()
// Update remote tags info
repo.status_remote.tags[tag_name] = commit_hash
line_trimmed := line.trim_space()
if line_trimmed != '' {
parts := line_trimmed.split(' ')
if parts.len < 2 {
console.print_debug('Skipping malformed tag line: ${line_trimmed}')
continue
}
commit_hash := parts[0].trim_space()
tag_name := parts[1].all_after('refs/tags/').trim_space()
// Update remote tags info
repo.status_remote.tags[tag_name] = commit_hash
}
}
}

View File

@@ -0,0 +1,13 @@
!!hero_code.generate_installer
name:'bun'
classname:'Bun'
singleton:1
templates:0
default:1
title:''
supported_platforms:''
reset:0
startupmanager:0
hasconfig:0
build:0

View File

@@ -0,0 +1,72 @@
module bun
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.installers.ulist
import os
//////////////////// following actions are not specific to instance of the object
// checks if a certain version or above is installed
fn installed() !bool {
res := os.execute('${osal.profile_path_source_and()!} bun -version')
if res.exit_code != 0 {
return false
}
r := res.output.split_into_lines().filter(it.trim_space().len > 0)
if r.len != 1 {
return error("couldn't parse bun version.\n${res.output}")
}
// println(" ${texttools.version(version)} <= ${texttools.version(r[0])}")
if texttools.version(version) <= texttools.version(r[0]) {
return true
}
return false
}
//get the Upload List of the files
fn ulist_get() !ulist.UList {
//optionally build a UList which is all paths which are result of building, is then used e.g. in upload
return ulist.UList{}
}
//uploads to S3 server if configured
fn upload() ! {
// installers.upload(
// cmdname: 'bun'
// source: '${gitpath}/target/x86_64-unknown-linux-musl/release/bun'
// )!
}
fn install() ! {
console.print_header('install bun')
osal.exec(cmd: 'curl -fsSL https://bun.sh/install | bash')!
}
fn destroy() ! {
// osal.process_kill_recursive(name:'bun')!
osal.cmd_delete('bun')!
osal.package_remove('
bun
')!
//will remove all paths where bun is found
osal.profile_path_add_remove(paths2delete:"bun")!
osal.rm("
~/.bun
")!
}

View File

@@ -0,0 +1,82 @@
module bun
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.sysadmin.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
__global (
bun_global map[string]&Bun
bun_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet{
pub mut:
name string
}
pub fn get(args_ ArgsGet) !&Bun {
return &Bun{}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit
// tmux
// systemd
match cat{
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
}else{
console.print_debug("startupmanager: auto")
return startupmanager.get()!
}
}
}
@[params]
pub struct InstallArgs{
pub mut:
reset bool
}
pub fn (mut self Bun) install(args InstallArgs) ! {
switch(self.name)
if args.reset || (!installed()!) {
install()!
}
}
pub fn (mut self Bun) destroy() ! {
switch(self.name)
destroy()!
}
//switch instance to be used for bun
pub fn switch(name string) {
bun_default = name
}

View File

@@ -0,0 +1,29 @@
module bun
import freeflowuniverse.herolib.data.paramsparser
import os
pub const version = '1.2.2'
const singleton = true
const default = true
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct Bun {
pub mut:
name string = 'default'
}
fn obj_init(obj_ Bun)!Bun{
//never call get here, only thing we can do here is work on object itself
mut obj:=obj_
return obj
}
//called before start if done
fn configure() ! {
//mut installer := get()!
}

View File

@@ -0,0 +1,44 @@
# bun
To get started
```vlang
import freeflowuniverse.herolib.installers.something.bun as bun_installer
heroscript:="
!!bun.configure name:'test'
password: '1234'
port: 7701
!!bun.start name:'test' reset:1
"
bun_installer.play(heroscript=heroscript)!
//or we can call the default and do a start with reset
//mut installer:= bun_installer.get()!
//installer.start(reset:true)!
```
## example heroscript
```hero
!!bun.configure
homedir: '/home/user/bun'
username: 'admin'
password: 'secretpassword'
title: 'Some Title'
host: 'localhost'
port: 8888
```

View File

@@ -37,20 +37,6 @@ pub mut:
command string
}
@[params]
pub struct DockerContainerCreateArgs {
name string
hostname string
forwarded_ports []string // ["80:9000/tcp", "1000, 10000/udp"]
mounted_volumes []string // ["/root:/root", ]
env map[string]string // map of environment variables that will be passed to the container
privileged bool
remove_when_done bool = true // remove the container when it shuts down
pub mut:
image_repo string
image_tag string
command string = '/bin/bash'
}
// create/start container (first need to get a dockercontainer before we can start)
pub fn (mut container DockerContainer) start() ! {

View File

@@ -3,48 +3,146 @@ module docker
import freeflowuniverse.herolib.osal { exec }
import freeflowuniverse.herolib.virt.utils
@[params]
pub struct DockerContainerCreateArgs {
pub mut:
name string
hostname string
forwarded_ports []string // ["80:9000/tcp", "1000, 10000/udp"]
mounted_volumes []string // ["/root:/root", ]
env map[string]string // map of environment variables that will be passed to the container
privileged bool
remove_when_done bool = true // remove the container when it shuts down
image_repo string
image_tag string
command string = '/bin/bash'
}
pub fn (mut e DockerEngine) container_create(args DockerContainerCreateArgs) !&DockerContainer {
// Validate required parameters
if args.name.trim_space() == '' {
return error('Container name cannot be empty')
}
// Set default hostname if not provided
mut hostname := args.hostname
if hostname.trim_space() == '' {
hostname = args.name.replace('_', '-')
}
mut ports := ''
mut mounts := ''
mut env := ''
mut command := args.command
// Build environment variables string with proper spacing
for var, value in args.env {
env += '-e ${var}="${value}"'
if env != '' {
env += ' '
}
env += '-e "${var}=${value}"'
}
// Build ports string
for port in args.forwarded_ports {
ports = ports + '-p ${port} '
if ports != '' {
ports += ' '
}
ports += '-p ${port}'
}
// Build mounts string
for mount in args.mounted_volumes {
mounts += '-v ${mount} '
if mounts != '' {
mounts += ' '
}
mounts += '-v ${mount}'
}
mut image := '${args.image_repo}'
// Build image string
mut image := args.image_repo
if args.image_tag != '' {
image = image + ':${args.image_tag}'
image += ':${args.image_tag}'
} else {
// Check if image exists with 'local' tag first
mut local_check := exec(cmd: 'docker images ${args.image_repo}:local -q',debug:true)!
if local_check.output != '' {
image += ':local'
} else {
// Default to latest if no tag specified
image += ':latest'
}
}
// Set default image and command for threefold
if image == 'threefold' || image == 'threefold:latest' || image == '' {
image = 'threefoldtech/grid3_ubuntu_dev'
command = '/usr/local/bin/boot.sh'
}
// Verify image exists locally
mut image_check := exec(cmd: 'docker images ${image} -q')!
if image_check.output == '' {
return error('Docker image not found: ${image}. Please ensure the image exists locally or can be pulled from a registry.')
}
privileged := if args.privileged { '--privileged' } else { '' }
// if forwarded ports passed in the args not containing mapping tp ssh (22) create one
// Add SSH port if not present
if !utils.contains_ssh_port(args.forwarded_ports) {
// find random free port in the node
mut port := e.get_free_port() or { panic('No free port.') }
mut port := e.get_free_port() or { return error('No free port available for SSH') }
if ports != '' {
ports += ' '
}
ports += '-p ${port}:22/tcp'
}
exec(
cmd: 'docker run --hostname ${args.hostname} ${privileged} --sysctl net.ipv6.conf.all.disable_ipv6=0 --name ${args.name} ${ports} ${env} ${mounts} -d -t ${image} ${command}'
)!
// Have to reload the containers as container_get works from memory
e.containers_load()!
mut container := e.container_get(name: args.name)!
// Construct docker run command with proper spacing and escaping
mut mycmd := 'docker run'
if hostname != '' {
mycmd += ' --hostname "${hostname}"'
}
if privileged != '' {
mycmd += ' ${privileged}'
}
mycmd += ' --sysctl net.ipv6.conf.all.disable_ipv6=0'
mycmd += ' --name "${args.name}"'
if ports != '' {
mycmd += ' ${ports}'
}
if env != '' {
mycmd += ' ${env}'
}
if mounts != '' {
mycmd += ' ${mounts}'
}
mycmd += ' -d -t ${image}'
if command != '' {
mycmd += ' ${command}'
}
// Execute docker run command
exec(cmd: mycmd) or {
return error('Failed to create Docker container:
Command: ${mycmd}
Error: ${err}
Possible causes:
- Invalid image name or tag
- Container name already in use
- Port conflicts
- Insufficient permissions
Please check the error message and try again.')
}
// Verify container was created successfully
e.containers_load() or {
return error('Container created but failed to reload container list: ${err}')
}
mut container := e.container_get(name: args.name) or {
return error('Container created but not found in container list. This may indicate the container failed to start properly. Check container logs with: docker logs ${args.name}')
}
return container
}

View File

@@ -0,0 +1,40 @@
{
"style": "dark",
"links": [
{
"title": "Docs",
"items": [
{
"label": "Introduction",
"to": "/docs"
},
{
"label": "TFGrid V4 Docs",
"href": "https://docs.threefold.io/"
}
]
},
{
"title": "Community",
"items": [
{
"label": "Telegram",
"href": "https://t.me/threefold"
},
{
"label": "X",
"href": "https://x.com/threefold_io"
}
]
},
{
"title": "Links",
"items": [
{
"label": "ThreeFold.io",
"href": "https://threefold.io"
}
]
}
]
}

View File

@@ -0,0 +1,16 @@
{
"title": "Internet Geek",
"tagline": "Internet Geek",
"favicon": "img/favicon.png",
"url": "https://friends.threefold.info",
"url_home": "docs/",
"baseUrl": "/kristof/",
"image": "img/tf_graph.png",
"metadata": {
"description": "ThreeFold is laying the foundation for a geo aware Web 4, the next generation of the Internet.",
"image": "https://threefold.info/kristof/img/tf_graph.png",
"title": "ThreeFold Technology Vision"
},
"buildDest":"root@info.ourworld.tf:/root/hero/www/info",
"buildDestDev":"root@info.ourworld.tf:/root/hero/www/infodev"
}

View File

@@ -0,0 +1,15 @@
{
"title": "Kristof = Chief Executive Geek",
"items": [
{
"href": "https://threefold.info/kristof/",
"label": "ThreeFold Technology",
"position": "right"
},
{
"href": "https://threefold.io",
"label": "ThreeFold.io",
"position": "right"
}
]
}

View File

@@ -0,0 +1,94 @@
module docusaurus
import os
import strings
pub fn (mut site DocSite) clean(args ErrorArgs) ! {
toclean := "
/node_modules
# Production
/build
# Generated files
.docusaurus
.cache-loader
# Misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
bun.lockb
bun.lock
yarn.lock
build.sh
build_dev.sh
develop.sh
docusaurus.config.ts
sidebars.ts
tsconfig.json
"
mut sb := strings.new_builder(200)
for line in toclean.split_into_lines() {
clean_line := line.trim_space()
if clean_line == '' || clean_line.starts_with('#') {
continue
}
// Remove leading slash if present to make path relative
path_to_clean := if clean_line.starts_with('/') {
clean_line[1..]
} else {
clean_line
}
full_path := os.join_path(site.path_src.path, path_to_clean)
// Handle glob patterns (files ending with *)
if path_to_clean.ends_with('*') {
base_pattern := path_to_clean#[..-1] // Remove the * at the end
base_dir := os.dir(full_path)
if os.exists(base_dir) {
files := os.ls(base_dir) or {
sb.writeln('Failed to list directory ${base_dir}: ${err}')
continue
}
for file in files {
if file.starts_with(base_pattern) {
file_path := os.join_path(base_dir, file)
os.rm(file_path) or {
sb.writeln('Failed to remove ${file_path}: ${err}')
}
}
}
}
continue
}
// Handle regular files and directories
if os.exists(full_path) {
if os.is_dir(full_path) {
os.rmdir_all(full_path) or {
sb.writeln('Failed to remove directory ${full_path}: ${err}')
}
} else {
os.rm(full_path) or {
sb.writeln('Failed to remove file ${full_path}: ${err}')
}
}
}
}
}

View File

@@ -0,0 +1,95 @@
module docusaurus
import json
import os
// Footer config structures
pub struct FooterItem {
pub mut:
label string
to string
href string
}
pub struct FooterLink {
pub mut:
title string
items []FooterItem
}
pub struct Footer {
pub mut:
style string
links []FooterLink
}
// Main config structure
pub struct MainMetadata {
pub mut:
description string
image string
title string
}
pub struct Main {
pub mut:
name string
title string
tagline string
favicon string
url string
url_home string
base_url string @[json: 'baseUrl']
image string
metadata MainMetadata
build_dest string @[json: 'buildDest']
build_dest_dev string @[json: 'buildDestDev']
}
// Navbar config structures
pub struct NavbarItem {
pub mut:
href string
label string
position string
}
pub struct Navbar {
pub mut:
title string
items []NavbarItem
}
// Combined config structure
pub struct Config {
pub mut:
footer Footer
main Main
navbar Navbar
}
// load_config loads all configuration from the specified directory
pub fn load_config(cfg_dir string) !Config {
// Ensure the config directory exists
if !os.exists(cfg_dir) {
return error('Config directory ${cfg_dir} does not exist')
}
// Load and parse footer config
footer_content := os.read_file(os.join_path(cfg_dir, 'footer.json'))!
footer := json.decode(Footer, footer_content)!
// Load and parse main config
main_content := os.read_file(os.join_path(cfg_dir, 'main.json'))!
main := json.decode(Main, main_content)!
// Load and parse navbar config
navbar_content := os.read_file(os.join_path(cfg_dir, 'navbar.json'))!
navbar := json.decode(Navbar, navbar_content)!
return Config{
footer: footer
main: main
navbar: navbar
}
}

230
lib/web/docusaurus/dsite.v Normal file
View File

@@ -0,0 +1,230 @@
module docusaurus
import freeflowuniverse.herolib.osal
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools
import json
@[heap]
pub struct DocSite {
pub mut:
name string
url string
path_src pathlib.Path
path_build pathlib.Path
// path_publish pathlib.Path
args DSiteNewArgs
errors []SiteError
config Config
}
@[params]
pub struct DSiteNewArgs {
pub mut:
name string
nameshort string
path string
url string
// publish_path string
build_path string
production bool
}
pub fn (mut f DocusaurusFactory) build_dev(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
s.generate()!
osal.exec(
cmd: '
cd ${s.path_build.path}
bash build_dev.sh
'
retry: 0
)!
return s
}
pub fn (mut f DocusaurusFactory) build(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
s.generate()!
osal.exec(
cmd: '
cd ${s.path_build.path}
bash build.sh
'
retry: 0
)!
return s
}
pub fn (mut f DocusaurusFactory) dev(args_ DSiteNewArgs) !&DocSite {
mut s:=f.add(args_)!
s.generate()!
osal.exec(
cmd: '
cd ${s.path_build.path}
bash develop.sh
'
retry: 0
)!
return s
}
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
pub fn (mut f DocusaurusFactory) add(args_ DSiteNewArgs) !&DocSite {
console.print_header(' Docusaurus: ${args_.name}')
mut args := args_
if args.build_path.len == 0 {
args.build_path = '${f.path_build.path}'
}
// if args.publish_path.len == 0 {
// args.publish_path = '${f.path_publish.path}/${args.name}'
if args.url.len>0{
mut gs := gittools.new()!
args.path = gs.get_path(url: args.url)!
}
if args.path.len==0{
return error("Can't get path from docusaurus site, its not specified.")
}
mut myconfig:=load_config("${args.path}/cfg")!
if myconfig.main.name.len==0{
myconfig.main.name = myconfig.main.base_url.trim_space().trim("/").trim_space()
}
if args.name == '' {
args.name = myconfig.main.name
}
if args.nameshort.len == 0 {
args.nameshort = args.name
}
args.nameshort = texttools.name_fix(args.nameshort)
mut ds := DocSite{
name: args.name
url: args.url
path_src: pathlib.get_dir(path: args.path, create: false)!
path_build: f.path_build
// path_publish: pathlib.get_dir(path: args.publish_path, create: true)!
args: args
config:myconfig
}
f.sites << &ds
return &ds
}
@[params]
pub struct ErrorArgs {
pub mut:
path string
msg string
cat ErrorCat
}
pub fn (mut site DocSite) error(args ErrorArgs) {
path2 := pathlib.get(args.path)
e := SiteError{
path: args.path
msg: args.msg
cat: args.cat
}
site.errors << e
console.print_stderr(args.msg)
}
pub fn (mut site DocSite) generate() ! {
console.print_header(' site generate: ${site.name} on ${site.path_build.path}')
site.template_install()!
// osal.exec(
// cmd: '
// cd ${site.path_build.path}
// #Docusaurus build --dest-dir ${site.path_publish.path}
// '
// retry: 0
// )!
for item in ["src","static","cfg","docs"]{
if os.exists("${site.path_src.path}/${item}"){
mut aa:= site.path_src.dir_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
}
}
}
fn (mut site DocSite) template_install() ! {
mut gs := gittools.new()!
mut r := gs.get_repo(url: 'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut template_path := r.patho()!
//always start from template first
for item in ["src","static","cfg"]{
mut aa:= template_path.dir_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
}
for item in ['package.json', 'sidebars.ts', 'tsconfig.json','docusaurus.config.ts'] {
mut aa:= template_path.file_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")! //TODO: use normal os.copy
}
for item in ['.gitignore'] {
mut aa:= template_path.file_get(item)!
aa.copy(dest:"${site.path_src.path}/${item}")! //TODO: use normal os.copy
}
cfg := site.config
develop := $tmpl('templates/develop.sh')
build := $tmpl('templates/build.sh')
build_dev := $tmpl('templates/build_dev.sh')
mut develop_ := site.path_build.file_get_new("develop.sh")!
develop_.template_write(develop,true)!
develop_.chmod(0o700)!
mut build_ := site.path_build.file_get_new("build.sh")!
build_.template_write(build,true)!
build_.chmod(0o700)!
mut build_dev_ := site.path_build.file_get_new("build_dev.sh")!
build_dev_.template_write(build_dev,true)!
build_dev_.chmod(0o700)!
mut develop2_ := site.path_src.file_get_new("develop.sh")!
develop2_.template_write(develop,true)!
develop2_.chmod(0o700)!
mut build2_ := site.path_src.file_get_new("build.sh")!
build2_.template_write(build,true)!
build2_.chmod(0o700)!
mut build_dev2_ := site.path_src.file_get_new("build_dev.sh")!
build_dev2_.template_write(build_dev,true)!
build_dev2_.chmod(0o700)!
}

View File

@@ -0,0 +1,46 @@
module docusaurus
import os
// import freeflowuniverse.herolib.data.doctree.collection
import freeflowuniverse.herolib.core.pathlib
// import freeflowuniverse.herolib.ui.console
// import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.develop.gittools
@[heap]
pub struct DocusaurusFactory {
pub mut:
sites []&DocSite @[skip; str: skip]
path_build pathlib.Path
// path_publish pathlib.Path
args DocusaurusArgs
}
@[params]
pub struct DocusaurusArgs {
pub mut:
// publish_path string
build_path string
production bool
}
pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
mut args:=args_
if args.build_path == ""{
args.build_path = "${os.home_dir()}/hero/var/docusaurus"
}
// if args.publish_path == ""{
// args.publish_path = "${os.home_dir()}/hero/var/docusaurus/publish"
// }
mut ds := &DocusaurusFactory{
args: args_
path_build: pathlib.get_dir(path: args.build_path, create: true)!
// path_publish: pathlib.get_dir(path: args_.publish_path, create: true)!
}
ds.template_install()!
return ds
}

View File

@@ -0,0 +1,24 @@
module docusaurus
pub struct SiteError {
Error
pub mut:
path string
msg string
cat ErrorCat
}
pub enum ErrorCat {
unknown
image_double
file_double
file_not_found
image_not_found
page_double
page_not_found
sidebar
circular_import
def
summary
include
}

View File

@@ -0,0 +1,32 @@
module docusaurus
import freeflowuniverse.herolib.develop.gittools
import freeflowuniverse.herolib.osal
import freeflowuniverse.herolib.installers.web.bun
fn (mut site DocusaurusFactory) template_install() ! {
mut gs := gittools.new()!
mut r:=gs.get_repo(url:'https://github.com/freeflowuniverse/docusaurus_template.git')!
mut template_path:=r.patho()!
for item in ['package.json', 'sidebars.ts', 'tsconfig.json'] {
mut aa:= template_path.file_get(item)!
aa.copy(dest:"${site.path_build.path}/${item}")!
}
//install bun
mut installer:= bun.get()!
installer.install()!
osal.exec(cmd: '
cd ${site.path_build.path}
bun install
')!
}

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
script_dir="??(cd "??(dirname "??{BASH_SOURCE[0]}")" && pwd)"
cd "??{script_dir}"
echo "Docs directory: ??script_dir"
cd ${site.path_build.path}
export PATH=/tmp/docusaurus_build/node_modules/.bin:??PATH
rm -rf ${site.path_build.path}/build/
bun docusaurus build
rsync -rv --delete ${site.path_build.path}/build/ ${cfg.main.build_dest.trim_right("/")}/${cfg.main.name.trim_right("/")}/

View File

@@ -0,0 +1,19 @@
#!/bin/bash
set -e
script_dir="??(cd "??(dirname "??{BASH_SOURCE[0]}")" && pwd)"
cd "??{script_dir}"
echo "Docs directory: ??script_dir"
cd ${site.path_build.path}
export PATH=/tmp/docusaurus_build/node_modules/.bin:??PATH
rm -rf ${site.path_build.path}/build/
bun docusaurus build
rsync -rv --delete ${site.path_build.path}/build/ ${cfg.main.build_dest_dev.trim_right("/")}/${cfg.main.name.trim_right("/")}/

View File

@@ -0,0 +1,14 @@
#!/bin/bash
set -e
script_dir="??(cd "??(dirname "??{BASH_SOURCE[0]}")" && pwd)"
cd "??{script_dir}"
echo "Docs directory: ??script_dir"
cd ${site.path_build.path}
export PATH=/tmp/docusaurus_build/node_modules/.bin:??PATH
bun run start -p 3100