Compare commits
16 Commits
developmen
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
| e34ba394b9 | |||
| 43308dfbe1 | |||
|
|
e8904ea1ce | ||
|
|
3d25fe0f04 | ||
|
|
d91957b945 | ||
|
|
923f8c24e7 | ||
|
|
40ad68e0ff | ||
|
|
1762387301 | ||
| ea9286687d | |||
|
|
cc837a1427 | ||
|
|
154c08411c | ||
| 1870f2a7ce | |||
|
|
ff92f6eff2 | ||
|
|
eeb5e207f2 | ||
|
|
09b595948d | ||
|
|
63c0b81fc9 |
32
.github/workflows/README.md
vendored
32
.github/workflows/README.md
vendored
@@ -1,32 +0,0 @@
|
||||
# Building Hero for release
|
||||
|
||||
Generally speaking, our scripts and docs for building hero produce non portable binaries for Linux. While that's fine for development purposes, statically linked binaries are much more convenient for releases and distribution.
|
||||
|
||||
The release workflow here creates a static binary for Linux using an Alpine container. A few notes follow about how that's done.
|
||||
|
||||
## Static builds in vlang
|
||||
|
||||
Since V compiles to C in our case, we are really concerned with how to produce static C builds. The V project provides [some guidance](https://github.com/vlang/v?tab=readme-ov-file#docker-with-alpinemusl) on using an Alpine container and passing `-cflags -static` to the V compiler.
|
||||
|
||||
That's fine for some projects. Hero has a dependency on the `libpq` C library for Postgres functionality, however, and this creates a complication.
|
||||
|
||||
## Static linking libpq
|
||||
|
||||
In order to create a static build of hero on Alpine, we need to install some additional packages:
|
||||
|
||||
* openssl-libs-static
|
||||
* postgresql-dev
|
||||
|
||||
The full `apk` command to prepare the container for building looks like this:
|
||||
|
||||
```bash
|
||||
apk add --no-cache bash git build-base openssl-dev libpq-dev postgresql-dev openssl-libs-static
|
||||
```
|
||||
|
||||
Then we also need to instruct the C compiler to link against the Postgres static shared libraries. Here's the build command:
|
||||
|
||||
```bash
|
||||
v -w -d use_openssl -enable-globals -cc gcc -cflags -static -ldflags "-lpgcommon_shlib -lpgport_shlib" cli/hero.v
|
||||
```
|
||||
|
||||
Note that gcc is also the preferred compiler for static builds.
|
||||
35
.github/workflows/hero_build.yml
vendored
35
.github/workflows/hero_build.yml
vendored
@@ -35,6 +35,9 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# We do the workaround as described here https://github.com/Incubaid/herolib?tab=readme-ov-file#tcc-compiler-error-on-macos
|
||||
# gcc and clang also don't work on macOS due to https://github.com/vlang/v/issues/25467
|
||||
# We can change the compiler or remove this when one is fixed
|
||||
- name: Setup V & Herolib
|
||||
id: setup
|
||||
shell: bash
|
||||
@@ -50,34 +53,52 @@ jobs:
|
||||
echo "Herolib symlink created to $(pwd)/lib"
|
||||
timeout-minutes: 10
|
||||
|
||||
# For Linux, we build a static binary linked against musl on Alpine. For
|
||||
# static linking, gcc is preferred
|
||||
# We can't make static builds for Linux easily, since we link to libql
|
||||
# (Postgres) and this has no static version available in the Alpine
|
||||
# repos. Therefore we build dynamic binaries for both glibc and musl.
|
||||
#
|
||||
# Again we work around a bug limiting our choice of C compiler tcc won't
|
||||
# work on Alpine due to https://github.com/vlang/v/issues/24866
|
||||
# So always use gcc for Linux
|
||||
#
|
||||
# For macOS, we can only use tcc (see above), but then we hit issues using
|
||||
# the garbage collector, so disable that
|
||||
- name: Build Hero
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
set -ex
|
||||
set -e
|
||||
if [ "${{ runner.os }}" = "Linux" ]; then
|
||||
sudo apt-get install libpq-dev
|
||||
# Build for glibc
|
||||
v -w -d use_openssl -enable-globals -cc gcc cli/hero.v -o cli/hero-${{ matrix.target }}
|
||||
|
||||
# Build for musl using Alpine in Docker
|
||||
docker run --rm \
|
||||
-v ${{ github.workspace }}/lib:/root/.vmodules/incubaid/herolib \
|
||||
-v ${{ github.workspace }}:/herolib \
|
||||
-w /herolib \
|
||||
alpine:3.22 \
|
||||
alpine \
|
||||
sh -c '
|
||||
set -ex
|
||||
apk add --no-cache bash git build-base openssl-dev libpq-dev postgresql-dev openssl-libs-static
|
||||
apk add --no-cache bash git build-base openssl-dev libpq-dev
|
||||
cd v
|
||||
make clean
|
||||
make
|
||||
./v symlink
|
||||
cd ..
|
||||
v -w -d use_openssl -enable-globals -cc gcc -cflags -static -ldflags "-lpgcommon_shlib -lpgport_shlib" cli/hero.v -o cli/hero-${{ matrix.target }}-musl
|
||||
v -w -d use_openssl -enable-globals -cc gcc cli/hero.v -o cli/hero-${{ matrix.target }}-musl
|
||||
'
|
||||
|
||||
else
|
||||
v -w -d use_openssl -enable-globals -cc clang cli/hero.v -o cli/hero-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
- name: Upload glibc binary
|
||||
if: runner.os == 'Linux'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hero-${{ matrix.target }}
|
||||
path: cli/hero-${{ matrix.target }}
|
||||
|
||||
- name: Upload musl binary
|
||||
if: runner.os == 'Linux'
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
# HeroLib AI Prompts (`aiprompts/`)
|
||||
|
||||
This directory contains AI-oriented instructions and manuals for working with the Hero tool and the `herolib` codebase.
|
||||
|
||||
It is the **entry point for AI agents** that generate or modify code/docs in this repository.
|
||||
|
||||
## Scope
|
||||
|
||||
- **Global rules for AI and V/Hero usage**
|
||||
See:
|
||||
- `herolib_start_here.md`
|
||||
- `vlang_herolib_core.md`
|
||||
- **Herolib core modules**
|
||||
See:
|
||||
- `herolib_core/` (core HeroLib modules)
|
||||
- `herolib_advanced/` (advanced topics)
|
||||
- **Docusaurus & Site module (Hero docs)**
|
||||
See:
|
||||
- `docusaurus/docusaurus_ebook_manual.md`
|
||||
- `lib/web/docusaurus/README.md` (authoritative module doc)
|
||||
- `lib/web/site/ai_instructions.md` and `lib/web/site/readme.md`
|
||||
- **HeroModels / HeroDB**
|
||||
See:
|
||||
- `ai_instructions_hero_models.md`
|
||||
- `heromodel_instruct.md`
|
||||
- **V language & web server docs** (upstream-style, mostly language-level)
|
||||
See:
|
||||
- `v_core/`, `v_advanced/`
|
||||
- `v_veb_webserver/`
|
||||
|
||||
## Sources of Truth
|
||||
|
||||
For any domain, **code and module-level docs are authoritative**:
|
||||
|
||||
- Core install & usage: `herolib/README.md`, scripts under `scripts/`
|
||||
- Site module: `lib/web/site/ai_instructions.md`, `lib/web/site/readme.md`
|
||||
- Docusaurus module: `lib/web/docusaurus/README.md`, `lib/web/docusaurus/*.v`
|
||||
- DocTree client: `lib/data/doctree/client/README.md`
|
||||
- HeroModels: `lib/hero/heromodels/*.v` + tests
|
||||
|
||||
`aiprompts/` files **must not contradict** these. When in doubt, follow the code / module docs first and treat prompts as guidance.
|
||||
|
||||
## Directory Overview
|
||||
|
||||
- `herolib_start_here.md` / `vlang_herolib_core.md`
|
||||
Global AI rules and V/Hero basics.
|
||||
- `herolib_core/` & `herolib_advanced/`
|
||||
Per-module instructions for core/advanced HeroLib features.
|
||||
- `docusaurus/`
|
||||
AI manual for building Hero docs/ebooks with the Docusaurus + Site + DocTree pipeline.
|
||||
- `instructions/`
|
||||
Active, higher-level instructions (e.g. HeroDB base filesystem).
|
||||
- `instructions_archive/`
|
||||
**Legacy / historical** prompt material. See `instructions_archive/README.md`.
|
||||
- `todo/`
|
||||
Meta design/refactor notes (not up-to-date instructions for normal usage).
|
||||
- `v_core/`, `v_advanced/`, `v_veb_webserver/`
|
||||
V language and web framework references used when generating V code.
|
||||
- `bizmodel/`, `unpolly/`, `doctree/`, `documentor/`
|
||||
Domain-specific or feature-specific instructions.
|
||||
|
||||
## How to Treat Legacy Material
|
||||
|
||||
- Content under `instructions_archive/` is **kept for reference** and may describe older flows (e.g. older documentation or prompt pipelines).
|
||||
Do **not** use it as a primary source for new work unless explicitly requested.
|
||||
- Some prompts mention **Doctree**; the current default docs pipeline uses **DocTree**. Doctree/`doctreeclient` is an alternative/legacy backend.
|
||||
|
||||
## Guidelines for AI Agents
|
||||
|
||||
- Always:
|
||||
- Respect global rules in `herolib_start_here.md` and `vlang_herolib_core.md`.
|
||||
- Prefer module docs under `lib/` when behavior or parameters differ.
|
||||
- Avoid modifying generated files (e.g. `*_ .v` or other generated artifacts) as instructed.
|
||||
- When instructions conflict, resolve as:
|
||||
1. **Code & module docs in `lib/`**
|
||||
2. **AI instructions in `aiprompts/`**
|
||||
3. **Archived docs (`instructions_archive/`) only when explicitly needed**.
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`.
|
||||
This document provides clear instructions for AI agents to create new HeroDB models similar to `message.v`.
|
||||
These models are used to store structured data in Redis using the HeroDB system.
|
||||
The `message.v` example can be found in `lib/hero/heromodels/message.v`.
|
||||
The message.v can be found in `lib/hero/heromodels/message.v`.s
|
||||
|
||||
## Key Concepts
|
||||
|
||||
@@ -108,7 +108,7 @@ Add your model to the ModelsFactory struct in `factory.v`:
|
||||
```v
|
||||
pub struct ModelsFactory {
|
||||
pub mut:
|
||||
calendar DBCalendar
|
||||
messages DBCalendar
|
||||
// ... other models
|
||||
}
|
||||
```
|
||||
|
||||
51
aiprompts/doctree/doctree_specs.md
Normal file
51
aiprompts/doctree/doctree_specs.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Doctree Export Specification
|
||||
|
||||
## Overview
|
||||
|
||||
The `doctree` module in `lib/data/doctree` is responsible for processing and exporting documentation trees. This involves taking a structured representation of documentation (collections, pages, images, files) and writing it to a specified file system destination. Additionally, it leverages Redis to store metadata about the exported documentation, facilitating quick lookups and integration with other systems.
|
||||
|
||||
## Key Components
|
||||
|
||||
### `lib/data/doctree/export.v`
|
||||
|
||||
This file defines the main `export` function for the `Tree` object. It orchestrates the overall export process:
|
||||
|
||||
- Takes `TreeExportArgs` which includes parameters like `destination`, `reset` (to clear destination), `keep_structure`, `exclude_errors`, `toreplace` (for regex replacements), `concurrent` (for parallel processing), and `redis` (to control Redis metadata storage).
|
||||
- Processes definitions, includes, actions, and macros within the `Tree`.
|
||||
- Generates file paths for pages, images, and other files.
|
||||
- Iterates through `Collection` objects within the `Tree` and calls their respective `export` methods, passing down the `redis` flag.
|
||||
|
||||
### `lib/data/doctree/collection/export.v`
|
||||
|
||||
This file defines the `export` function for the `Collection` object. This is where the actual file system writing and Redis interaction for individual collections occur:
|
||||
|
||||
- Takes `CollectionExportArgs` which includes `destination`, `file_paths`, `reset`, `keep_structure`, `exclude_errors`, `replacer`, and the `redis` flag.
|
||||
- Creates a `.collection` file in the destination directory with basic collection information.
|
||||
- **Redis Integration**:
|
||||
- Obtains a Redis client using `base.context().redis()`.
|
||||
- Stores the collection's destination path in Redis using `redis.hset('doctree:path', 'collection_name', 'destination_path')`.
|
||||
- Calls `export_pages`, `export_files`, `export_images`, and `export_linked_pages` which all interact with Redis if the `redis` flag is true.
|
||||
- **`export_pages`**:
|
||||
- Processes page links and handles not-found errors.
|
||||
- Writes markdown content to the destination file system.
|
||||
- Stores page metadata in Redis: `redis.hset('doctree:collection_name', 'page_name', 'page_file_name.md')`.
|
||||
- **`export_files` and `export_images`**:
|
||||
- Copies files and images to the destination directory (e.g., `img/`).
|
||||
- Stores file/image metadata in Redis: `redis.hset('doctree:collection_name', 'file_name', 'img/file_name.ext')`.
|
||||
- **`export_linked_pages`**:
|
||||
- Gathers linked pages within the collection.
|
||||
- Writes a `.linkedpages` file.
|
||||
- Stores linked pages file metadata in Redis: `redis.hset('doctree:collection_name', 'linkedpages', 'linkedpages_file_name.md')`.
|
||||
|
||||
## Link between Redis and Export
|
||||
|
||||
The `doctree` export process uses Redis as a metadata store. When the `redis` flag is set to `true` (which is the default), the export functions populate Redis with key-value pairs that map collection names, page names, file names, and image names to their respective paths and file names within the exported documentation structure.
|
||||
|
||||
This Redis integration serves as a quick lookup mechanism for other applications or services that might need to access or reference the exported documentation. Instead of traversing the file system, these services can query Redis to get the location of specific documentation elements.
|
||||
|
||||
## Is Export Needed?
|
||||
|
||||
Yes, the export functionality is crucial for making the processed `doctree` content available outside the internal `doctree` representation.
|
||||
|
||||
- **File System Export**: The core purpose of the export is to write the documentation content (markdown files, images, other assets) to a specified directory. This is essential for serving the documentation via a web server, integrating with static site generators (like Docusaurus, as suggested by other files in the project), or simply providing a browsable version of the documentation.
|
||||
- **Redis Metadata**: While the file system export is fundamental, the Redis metadata storage is an important complementary feature. It provides an efficient way for other systems to programmatically discover and locate documentation assets. If there are downstream applications that rely on this Redis metadata for navigation, search, or content delivery, then the Redis part of the export is indeed needed. If no such applications exist or are planned, the `redis` flag can be set to `false` to skip this step, but the file system export itself remains necessary for external consumption of the documentation.
|
||||
@@ -2,38 +2,13 @@
|
||||
|
||||
This manual provides a comprehensive guide on how to leverage HeroLib's Docusaurus integration, Doctree, and HeroScript to create and manage technical ebooks, optimized for AI-driven content generation and project management.
|
||||
|
||||
## Quick Start - Recommended Ebook Structure
|
||||
|
||||
The recommended directory structure for an ebook:
|
||||
|
||||
```
|
||||
my_ebook/
|
||||
├── scan.hero # DocTree collection scanning
|
||||
├── config.hero # Site configuration
|
||||
├── menus.hero # Navbar and footer configuration
|
||||
├── include.hero # Docusaurus define and doctree export
|
||||
├── 1_intro.heroscript # Page definitions (numbered for ordering)
|
||||
├── 2_concepts.heroscript # More page definitions
|
||||
└── 3_advanced.heroscript # Additional pages
|
||||
```
|
||||
|
||||
**Running an ebook:**
|
||||
|
||||
```bash
|
||||
# Start development server
|
||||
hero docs -d -p /path/to/my_ebook
|
||||
|
||||
# Build for production
|
||||
hero docs -p /path/to/my_ebook
|
||||
```
|
||||
|
||||
## 1. Core Concepts
|
||||
|
||||
To effectively create ebooks with HeroLib, it's crucial to understand the interplay of three core components:
|
||||
|
||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process. Files use `.hero` extension for configuration and `.heroscript` for page definitions.
|
||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process.
|
||||
* **Docusaurus**: A popular open-source static site generator. HeroLib uses Docusaurus as the underlying framework to render your ebook content into a navigable website.
|
||||
* **DocTree**: HeroLib's document collection layer. DocTree scans and exports markdown "collections" and "pages" that Docusaurus consumes.
|
||||
* **Doctree**: HeroLib's content management system. Doctree organizes your markdown files into "collections" and "pages," allowing for structured content retrieval and reuse across multiple projects.
|
||||
|
||||
## 2. Setting Up a Docusaurus Project with HeroLib
|
||||
|
||||
@@ -47,26 +22,18 @@ The `docusaurus.define` HeroScript directive configures the global settings for
|
||||
|
||||
```heroscript
|
||||
!!docusaurus.define
|
||||
name:"my_ebook" // must match the site name from !!site.config
|
||||
path_build: "/tmp/my_ebook_build"
|
||||
path_publish: "/tmp/my_ebook_publish"
|
||||
reset: true // clean build dir before building (optional)
|
||||
install: true // run bun install if needed (optional)
|
||||
template_update: true // update the Docusaurus template (optional)
|
||||
doctree_dir: "/tmp/doctree_export" // where DocTree exports collections
|
||||
use_doctree: true // use DocTree as content backend
|
||||
production: true
|
||||
update: true
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* `name` (string, required): The site/factory name. Must match the `name` used in `!!site.config` so Docusaurus can find the corresponding site definition.
|
||||
* `path_build` (string, optional): The local path where the Docusaurus site will be built. Defaults to `~/hero/var/docusaurus/build`.
|
||||
* `path_publish` (string, optional): The local path where the final Docusaurus site will be published (e.g., for deployment). Defaults to `~/hero/var/docusaurus/publish`.
|
||||
* `reset` (boolean, optional): If `true`, clean the build directory before starting.
|
||||
* `install` (boolean, optional): If `true`, run dependency installation (e.g., `bun install`).
|
||||
* `template_update` (boolean, optional): If `true`, update the Docusaurus template.
|
||||
* `doctree_dir` (string, optional): Directory where DocTree exports collections (used by the DocTree client in `lib/data/doctree/client`).
|
||||
* `use_doctree` (boolean, optional): If `true`, use the DocTree client as the content backend (default behavior).
|
||||
* `production` (boolean, optional): If `true`, the site will be built for production (optimized). Default is `false`.
|
||||
* `update` (boolean, optional): If `true`, the Docusaurus template and dependencies will be updated. Default is `false`.
|
||||
|
||||
### 2.2. Adding a Docusaurus Site (`docusaurus.add`)
|
||||
|
||||
@@ -86,7 +53,7 @@ The `docusaurus.add` directive defines an individual Docusaurus site (your ebook
|
||||
```heroscript
|
||||
!!docusaurus.add
|
||||
name:"tfgrid_tech_ebook"
|
||||
git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
|
||||
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
|
||||
git_reset:true // Reset Git repository before pulling
|
||||
git_pull:true // Pull latest changes
|
||||
git_root:"/tmp/git_clones" // Optional: specify a root directory for git clones
|
||||
@@ -223,18 +190,18 @@ Configure the footer section of your Docusaurus site.
|
||||
* `href` (string, optional): External URL for the link.
|
||||
* `to` (string, optional): Internal Docusaurus path.
|
||||
|
||||
### 3.4. Publish Destinations (`site.publish`, `site.publish_dev`)
|
||||
### 3.4. Build Destinations (`site.build_dest`, `site.build_dest_dev`)
|
||||
|
||||
Specify where the built Docusaurus site should be deployed. This typically involves an SSH connection defined elsewhere (e.g., `!!site.ssh_connection`).
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.publish
|
||||
!!site.build_dest
|
||||
ssh_name:"production_server" // Name of a pre-defined SSH connection
|
||||
path:"/var/www/my-ebook" // Remote path on the server
|
||||
path:"/var/www/my-ebook" // Remote path on the server
|
||||
|
||||
!!site.publish_dev
|
||||
!!site.build_dest_dev
|
||||
ssh_name:"dev_server"
|
||||
path:"/tmp/dev-ebook"
|
||||
```
|
||||
@@ -252,7 +219,7 @@ This powerful feature allows you to pull markdown content and assets from other
|
||||
|
||||
```heroscript
|
||||
!!site.import
|
||||
url:'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/cloud_reinvented'
|
||||
url:'https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections/cloud_reinvented'
|
||||
dest:'cloud_reinvented' // Destination subdirectory within your Docusaurus docs folder
|
||||
replace:'NAME:MyName, URGENCY:red' // Optional: comma-separated key:value pairs for text replacement
|
||||
```
|
||||
@@ -271,60 +238,49 @@ This is where you define the actual content pages and how they are organized int
|
||||
|
||||
```heroscript
|
||||
// Define a category
|
||||
!!site.page_category name:'introduction' label:"Introduction to Ebook"
|
||||
!!site.page_category path:'introduction' label:"Introduction to Ebook" position:10
|
||||
|
||||
// Define pages - first page specifies collection, subsequent pages reuse it
|
||||
!!site.page src:"my_collection:chapter_1_overview"
|
||||
// Define a page within that category, linking to Doctree content
|
||||
!!site.page path:'introduction' src:"my_doctree_collection:chapter_1_overview"
|
||||
title:"Chapter 1: Overview"
|
||||
description:"A brief introduction to the ebook's content."
|
||||
|
||||
!!site.page src:"chapter_2_basics"
|
||||
title:"Chapter 2: Basics"
|
||||
|
||||
// New category with new collection
|
||||
!!site.page_category name:'advanced' label:"Advanced Topics"
|
||||
|
||||
!!site.page src:"advanced_collection:performance"
|
||||
title:"Performance Tuning"
|
||||
hide_title:true
|
||||
position:1 // Order within the category
|
||||
hide_title:true // Hide the title on the page itself
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.page_category`**:
|
||||
* `name` (string, required): Category identifier (used internally).
|
||||
* `path` (string, required): The path to the category directory within your Docusaurus `docs` folder (e.g., `introduction` will create `docs/introduction/_category_.json`).
|
||||
* `label` (string, required): The display name for the category in the sidebar.
|
||||
* `position` (int, optional): The order of the category in the sidebar (auto-incremented if omitted).
|
||||
* `position` (int, optional): The order of the category in the sidebar.
|
||||
* `sitename` (string, optional): If you have multiple Docusaurus sites defined, specify which site this category belongs to. Defaults to the current site's name.
|
||||
* **`site.page`**:
|
||||
* `src` (string, required): **Crucial for DocTree/collection integration.** Format: `collection_name:page_name` for the first page, or just `page_name` to reuse the previous collection.
|
||||
* `title` (string, optional): The title of the page. If not provided, HeroLib extracts it from the markdown `# Heading` or uses the page name.
|
||||
* `src` (string, required): **Crucial for Doctree integration.** This specifies the source of the page content in the format `collection_name:page_name`. HeroLib will fetch the markdown content from the specified Doctree collection and page.
|
||||
* `path` (string, required): The relative path and filename for the generated markdown file within your Docusaurus `docs` folder (e.g., `introduction/chapter_1.md`). If only a directory is provided (e.g., `introduction/`), the `page_name` from `src` will be used as the filename.
|
||||
* `title` (string, optional): The title of the page. If not provided, HeroLib will attempt to extract it from the markdown content or use the `page_name`.
|
||||
* `description` (string, optional): A short description for the page, used in frontmatter.
|
||||
* `position` (int, optional): The order of the page within its category.
|
||||
* `hide_title` (boolean, optional): If `true`, the title will not be displayed on the page itself.
|
||||
* `draft` (boolean, optional): If `true`, the page will be hidden from navigation.
|
||||
* `draft` (boolean, optional): If `true`, the page will be marked as a draft and not included in production builds.
|
||||
* `title_nr` (int, optional): If set, HeroLib will re-number the markdown headings (e.g., `title_nr:3` will make `# Heading` become `### Heading`). Useful for consistent heading levels across imported content.
|
||||
|
||||
### 3.7. Collections and DocTree/Doctree Integration
|
||||
### 3.7. Doctree Integration Details
|
||||
|
||||
The `site.page` directive's `src` parameter (`collection_name:page_name`) is the bridge to your content collections.
|
||||
The `site.page` directive's `src` parameter (`collection_name:page_name`) is the bridge to your Doctree content.
|
||||
|
||||
**Current default: DocTree export**
|
||||
|
||||
1. **Collections**: DocTree exports markdown files into collections under an `export_dir` (see `lib/data/doctree/client`).
|
||||
2. **Export step**: A separate process (DocTree) writes the collections into `doctree_dir` (e.g., `/tmp/doctree_export`), following the `content/` + `meta/` structure.
|
||||
3. **Docusaurus consumption**: The Docusaurus module uses the DocTree client (`doctree_client`) to resolve `collection_name:page_name` into markdown content and assets when generating docs.
|
||||
|
||||
**Alternative: Doctree/`doctreeclient`**
|
||||
|
||||
In older setups, or when explicitly configured, Doctree and `doctreeclient` can still be used to provide the same `collection:page` model:
|
||||
**How Doctree Works:**
|
||||
|
||||
1. **Collections**: Doctree organizes markdown files into logical groups called "collections." A collection is typically a directory containing markdown files and an empty `.collection` file.
|
||||
2. **Scanning**: You define which collections Doctree should scan using `!!doctree.scan` in a HeroScript file (e.g., `doctree.heroscript`):
|
||||
2. **Scanning**: You define which collections Doctree should scan using `!!doctree.scan` in a HeroScript file (e.g., `doctree.heroscript`).
|
||||
**Example `doctree.heroscript`:**
|
||||
|
||||
```heroscript
|
||||
!!doctree.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections"
|
||||
!!doctree.scan git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections"
|
||||
```
|
||||
|
||||
This will pull the `collections` directory from the specified Git URL and make its contents available to Doctree.
|
||||
3. **Page Retrieval**: When `site.page` references `src:"my_collection:my_page"`, the client (`doctree_client` or `doctreeclient`, depending on configuration) fetches the content of `my_page.md` from the `my_collection` collection.
|
||||
3. **Page Retrieval**: When `site.page` references `src:"my_collection:my_page"`, HeroLib's `doctreeclient` fetches the content of `my_page.md` from the `my_collection` collection that Doctree has scanned.
|
||||
|
||||
## 4. Building and Developing Your Ebook
|
||||
|
||||
|
||||
378
aiprompts/herolib_core/core_heroprompt.md
Normal file
378
aiprompts/herolib_core/core_heroprompt.md
Normal file
@@ -0,0 +1,378 @@
|
||||
# HeroPrompt Module
|
||||
|
||||
The `heroprompt` module provides a hierarchical workspace-based system for organizing code files and generating structured AI prompts. It enables developers to select files from multiple directories and generate formatted prompts for AI code analysis.
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Hierarchical Organization**: HeroPrompt → Workspace → Directory → Files
|
||||
- **Redis Persistence**: All data persists across sessions using Redis
|
||||
- **Factory Pattern**: Clean API with `get()`, `delete()`, `exists()`, `list()` functions
|
||||
- **File Selection**: Select specific files or entire directories for analysis
|
||||
- **Active Workspace**: Manage multiple workspaces with one active at a time
|
||||
- **Prompt Generation**: Generate structured prompts with file maps, contents, and instructions
|
||||
- **Template-Based**: Uses V templates for consistent prompt formatting
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### 1. Getting Started
|
||||
|
||||
```v
|
||||
import incubaid.herolib.develop.heroprompt
|
||||
|
||||
// Create or get a HeroPrompt instance
|
||||
mut hp := heroprompt.get(name: 'my_project', create: true)!
|
||||
|
||||
// Create a workspace (first workspace is automatically active)
|
||||
mut workspace := hp.new_workspace(
|
||||
name: 'my_workspace'
|
||||
description: 'My project workspace'
|
||||
)!
|
||||
```
|
||||
|
||||
### 2. Adding Directories
|
||||
|
||||
```v
|
||||
// Add directory and automatically scan all files
|
||||
mut dir := workspace.add_directory(
|
||||
path: '/path/to/your/code'
|
||||
name: 'backend'
|
||||
scan: true // Scans all files and subdirectories
|
||||
)!
|
||||
|
||||
// Add another directory
|
||||
mut frontend_dir := workspace.add_directory(
|
||||
path: '/path/to/frontend'
|
||||
name: 'frontend'
|
||||
scan: true
|
||||
)!
|
||||
```
|
||||
|
||||
### 3. Selecting Files
|
||||
|
||||
```v
|
||||
// Select specific files
|
||||
dir.select_file(path: '/path/to/your/code/main.v')!
|
||||
dir.select_file(path: '/path/to/your/code/utils.v')!
|
||||
|
||||
// Or select all files in a directory
|
||||
frontend_dir.select_all()!
|
||||
|
||||
// Deselect files
|
||||
dir.deselect_file(path: '/path/to/your/code/test.v')!
|
||||
|
||||
// Deselect all files
|
||||
dir.deselect_all()!
|
||||
```
|
||||
|
||||
### 4. Generating AI Prompts
|
||||
|
||||
```v
|
||||
// Generate prompt with selected files
|
||||
prompt := workspace.generate_prompt(
|
||||
instruction: 'Review these files and suggest improvements'
|
||||
)!
|
||||
|
||||
println(prompt)
|
||||
|
||||
// Or generate with specific files (overrides selection)
|
||||
prompt2 := workspace.generate_prompt(
|
||||
instruction: 'Analyze these specific files'
|
||||
selected_files: ['/path/to/file1.v', '/path/to/file2.v']
|
||||
)!
|
||||
```
|
||||
|
||||
## Factory Functions
|
||||
|
||||
### `heroprompt.get(name: string, create: bool) !HeroPrompt`
|
||||
|
||||
Gets or creates a HeroPrompt instance.
|
||||
|
||||
```v
|
||||
// Get existing instance or create new one
|
||||
mut hp := heroprompt.get(name: 'my_project', create: true)!
|
||||
|
||||
// Get existing instance only (error if doesn't exist)
|
||||
mut hp2 := heroprompt.get(name: 'my_project')!
|
||||
```
|
||||
|
||||
### `heroprompt.delete(name: string) !`
|
||||
|
||||
Deletes a HeroPrompt instance from Redis.
|
||||
|
||||
```v
|
||||
heroprompt.delete(name: 'my_project')!
|
||||
```
|
||||
|
||||
### `heroprompt.exists(name: string) !bool`
|
||||
|
||||
Checks if a HeroPrompt instance exists.
|
||||
|
||||
```v
|
||||
if heroprompt.exists(name: 'my_project')! {
|
||||
println('Instance exists')
|
||||
}
|
||||
```
|
||||
|
||||
### `heroprompt.list() ![]string`
|
||||
|
||||
Lists all HeroPrompt instance names.
|
||||
|
||||
```v
|
||||
instances := heroprompt.list()!
|
||||
for name in instances {
|
||||
println('Instance: ${name}')
|
||||
}
|
||||
```
|
||||
|
||||
## HeroPrompt Methods
|
||||
|
||||
### Workspace Management
|
||||
|
||||
#### `hp.new_workspace(name: string, description: string, is_active: bool) !&Workspace`
|
||||
|
||||
Creates a new workspace. The first workspace is automatically set as active.
|
||||
|
||||
```v
|
||||
mut ws := hp.new_workspace(
|
||||
name: 'backend'
|
||||
description: 'Backend API workspace'
|
||||
)!
|
||||
```
|
||||
|
||||
#### `hp.get_workspace(name: string) !&Workspace`
|
||||
|
||||
Retrieves an existing workspace by name.
|
||||
|
||||
```v
|
||||
mut ws := hp.get_workspace('backend')!
|
||||
```
|
||||
|
||||
#### `hp.get_active_workspace() !&Workspace`
|
||||
|
||||
Returns the currently active workspace.
|
||||
|
||||
```v
|
||||
mut active := hp.get_active_workspace()!
|
||||
println('Active workspace: ${active.name}')
|
||||
```
|
||||
|
||||
#### `hp.set_active_workspace(name: string) !`
|
||||
|
||||
Sets a workspace as active (deactivates all others).
|
||||
|
||||
```v
|
||||
hp.set_active_workspace('frontend')!
|
||||
```
|
||||
|
||||
#### `hp.list_workspaces() []&Workspace`
|
||||
|
||||
Lists all workspaces in the instance.
|
||||
|
||||
```v
|
||||
workspaces := hp.list_workspaces()
|
||||
for ws in workspaces {
|
||||
println('Workspace: ${ws.name}')
|
||||
}
|
||||
```
|
||||
|
||||
#### `hp.delete_workspace(name: string) !`
|
||||
|
||||
Deletes a workspace.
|
||||
|
||||
```v
|
||||
hp.delete_workspace('old_workspace')!
|
||||
```
|
||||
|
||||
## Workspace Methods
|
||||
|
||||
### Directory Management
|
||||
|
||||
#### `ws.add_directory(path: string, name: string, scan: bool) !&Directory`
|
||||
|
||||
Adds a directory to the workspace.
|
||||
|
||||
```v
|
||||
mut dir := ws.add_directory(
|
||||
path: '/path/to/code'
|
||||
name: 'my_code'
|
||||
scan: true // Automatically scans all files
|
||||
)!
|
||||
```
|
||||
|
||||
#### `ws.list_directories() []&Directory`
|
||||
|
||||
Lists all directories in the workspace.
|
||||
|
||||
```v
|
||||
dirs := ws.list_directories()
|
||||
for dir in dirs {
|
||||
println('Directory: ${dir.name}')
|
||||
}
|
||||
```
|
||||
|
||||
#### `ws.remove_directory(id: string) !`
|
||||
|
||||
Removes a directory from the workspace.
|
||||
|
||||
```v
|
||||
ws.remove_directory(id: dir.id)!
|
||||
```
|
||||
|
||||
### Prompt Generation
|
||||
|
||||
#### `ws.generate_prompt(instruction: string, selected_files: []string, show_all_files: bool) !string`
|
||||
|
||||
Generates a complete AI prompt with file map, contents, and instructions.
|
||||
|
||||
```v
|
||||
// Use selected files (from select_file() calls)
|
||||
prompt := ws.generate_prompt(
|
||||
instruction: 'Review the code'
|
||||
)!
|
||||
|
||||
// Or specify files explicitly
|
||||
prompt2 := ws.generate_prompt(
|
||||
instruction: 'Analyze these files'
|
||||
selected_files: ['/path/to/file1.v', '/path/to/file2.v']
|
||||
show_all_files: false
|
||||
)!
|
||||
```
|
||||
|
||||
#### `ws.generate_file_map(selected_files: []string, show_all: bool) !string`
|
||||
|
||||
Generates a hierarchical tree structure of files.
|
||||
|
||||
```v
|
||||
file_map := ws.generate_file_map(
|
||||
selected_files: ['/path/to/file1.v']
|
||||
show_all: false
|
||||
)!
|
||||
println(file_map)
|
||||
```
|
||||
|
||||
#### `ws.generate_file_contents(selected_files: []string, include_path: bool) !string`
|
||||
|
||||
Generates formatted file contents.
|
||||
|
||||
```v
|
||||
contents := ws.generate_file_contents(
|
||||
selected_files: ['/path/to/file1.v']
|
||||
include_path: true
|
||||
)!
|
||||
println(contents)
|
||||
```
|
||||
|
||||
## Directory Methods
|
||||
|
||||
### File Selection
|
||||
|
||||
#### `dir.select_file(path: string) !`
|
||||
|
||||
Marks a file as selected.
|
||||
|
||||
```v
|
||||
dir.select_file(path: '/path/to/file.v')!
|
||||
```
|
||||
|
||||
#### `dir.select_all() !`
|
||||
|
||||
Selects all files in the directory and subdirectories.
|
||||
|
||||
```v
|
||||
dir.select_all()!
|
||||
```
|
||||
|
||||
#### `dir.deselect_file(path: string) !`
|
||||
|
||||
Deselects a file.
|
||||
|
||||
```v
|
||||
dir.deselect_file(path: '/path/to/file.v')!
|
||||
```
|
||||
|
||||
#### `dir.deselect_all() !`
|
||||
|
||||
Deselects all files in the directory.
|
||||
|
||||
```v
|
||||
dir.deselect_all()!
|
||||
```
|
||||
|
||||
### Directory Information
|
||||
|
||||
#### `dir.exists() bool`
|
||||
|
||||
Checks if the directory exists on the filesystem.
|
||||
|
||||
```v
|
||||
if dir.exists() {
|
||||
println('Directory exists')
|
||||
}
|
||||
```
|
||||
|
||||
#### `dir.get_contents() !DirectoryContent`
|
||||
|
||||
Gets all files in the directory (scans if needed).
|
||||
|
||||
```v
|
||||
content := dir.get_contents()!
|
||||
println('Files: ${content.files.len}')
|
||||
```
|
||||
|
||||
## Generated Prompt Format
|
||||
|
||||
The generated prompt uses a template with three sections:
|
||||
|
||||
```prompt
|
||||
<user_instructions>
|
||||
Review these files and suggest improvements
|
||||
</user_instructions>
|
||||
|
||||
<file_map>
|
||||
my_project/
|
||||
├── src/
|
||||
│ ├── main.v *
|
||||
│ └── utils.v *
|
||||
└── README.md *
|
||||
</file_map>
|
||||
|
||||
<file_contents>
|
||||
File: /path/to/src/main.v
|
||||
\```v
|
||||
module main
|
||||
|
||||
fn main() {
|
||||
println('Hello')
|
||||
}
|
||||
\```
|
||||
|
||||
</file_contents>
|
||||
|
||||
```
|
||||
|
||||
Files marked with `*` in the file_map are the selected files included in the prompt.
|
||||
|
||||
## Complete Example
|
||||
|
||||
```v
|
||||
import incubaid.herolib.develop.heroprompt
|
||||
|
||||
mut hp := heroprompt.get(name: 'my_app', create: true)!
|
||||
mut ws := hp.new_workspace(name: 'backend')!
|
||||
|
||||
mut src_dir := ws.add_directory(path: '/path/to/src', name: 'source', scan: true)!
|
||||
src_dir.select_file(path: '/path/to/src/main.v')!
|
||||
|
||||
prompt := ws.generate_prompt(instruction: 'Review the code')!
|
||||
println(prompt)
|
||||
|
||||
heroprompt.delete(name: 'my_app')!
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
- Use `heroprompt.delete()` at start for fresh state
|
||||
- First workspace is automatically active
|
||||
- Changes auto-save to Redis
|
||||
- Use `scan: true` to discover all files
|
||||
- Create separate workspaces for different contexts
|
||||
@@ -35,11 +35,11 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
if plbook.exists_once(filter: 'docusaurus.define') {
|
||||
mut action := plbook.get(filter: 'docusaurus.define')!
|
||||
mut p := action.params
|
||||
//example how we get parameters from the action see aiprompts/herolib_core/core_params.md for more details
|
||||
path_build := p.get_default('path_build', '')!
|
||||
path_publish := p.get_default('path_publish', '')!
|
||||
reset := p.get_default_false('reset')
|
||||
use_doctree := p.get_default_false('use_doctree')
|
||||
//example how we get parameters from the action see core_params.md for more details
|
||||
ds = new(
|
||||
path: p.get_default('path_publish', '')!
|
||||
production: p.get_default_false('production')
|
||||
)!
|
||||
}
|
||||
|
||||
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
||||
@@ -51,4 +51,4 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
}
|
||||
```
|
||||
|
||||
For detailed information on parameter retrieval methods (e.g., `p.get()`, `p.get_int()`, `p.get_default_true()`), refer to `aiprompts/herolib_core/core_params.md`.
|
||||
For detailed information on parameter retrieval methods (e.g., `p.get()`, `p.get_int()`, `p.get_default_true()`), refer to `aiprompts/ai_core/core_params.md`.
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
> NOTE: This document is an example snapshot of a developer's filesystem layout for HeroDB/HeroModels. Paths under `/Users/despiegk/...` are illustrative only. For the current, authoritative structure always use the live repository tree (this checkout) and the modules under `lib/hero/heromodels` and `lib/hero/db`.
|
||||
|
||||
<file_map>
|
||||
/Users/despiegk/code/github/incubaid/herolib
|
||||
├── .github
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
# Instructions Archive (Legacy Prompts)
|
||||
|
||||
This directory contains **archived / legacy AI prompt material** for `herolib`.
|
||||
|
||||
- Files here may describe **older workflows** (e.g. previous documentation generation or model pipelines).
|
||||
- They are kept for **historical reference** and to help understand how things evolved.
|
||||
- They are **not** guaranteed to match the current `herolib` implementation.
|
||||
|
||||
## Usage Guidelines
|
||||
|
||||
- Do **not** use these files as the primary source for new features or refactors.
|
||||
- When generating code or documentation, prefer:
|
||||
1. Code and module docs under `lib/` (e.g. `lib/web/site/ai_instructions.md`, `lib/web/docusaurus/README.md`).
|
||||
2. Up-to-date AI instructions under `aiprompts/` (outside of `instructions_archive/`).
|
||||
- Only consult this directory when you explicitly need to understand **historical behavior** or migrate old flows.
|
||||
@@ -1,10 +1,51 @@
|
||||
# module orm
|
||||
|
||||
|
||||
## Contents
|
||||
- [Constants](#Constants)
|
||||
- [new_query](#new_query)
|
||||
- [orm_select_gen](#orm_select_gen)
|
||||
- [orm_stmt_gen](#orm_stmt_gen)
|
||||
- [orm_table_gen](#orm_table_gen)
|
||||
- [Connection](#Connection)
|
||||
- [Primitive](#Primitive)
|
||||
- [QueryBuilder[T]](#QueryBuilder[T])
|
||||
- [reset](#reset)
|
||||
- [where](#where)
|
||||
- [or_where](#or_where)
|
||||
- [order](#order)
|
||||
- [limit](#limit)
|
||||
- [offset](#offset)
|
||||
- [select](#select)
|
||||
- [set](#set)
|
||||
- [query](#query)
|
||||
- [count](#count)
|
||||
- [insert](#insert)
|
||||
- [insert_many](#insert_many)
|
||||
- [update](#update)
|
||||
- [delete](#delete)
|
||||
- [create](#create)
|
||||
- [drop](#drop)
|
||||
- [last_id](#last_id)
|
||||
- [MathOperationKind](#MathOperationKind)
|
||||
- [OperationKind](#OperationKind)
|
||||
- [OrderType](#OrderType)
|
||||
- [SQLDialect](#SQLDialect)
|
||||
- [StmtKind](#StmtKind)
|
||||
- [InfixType](#InfixType)
|
||||
- [Null](#Null)
|
||||
- [QueryBuilder](#QueryBuilder)
|
||||
- [QueryData](#QueryData)
|
||||
- [SelectConfig](#SelectConfig)
|
||||
- [Table](#Table)
|
||||
- [TableField](#TableField)
|
||||
|
||||
## Constants
|
||||
```v
|
||||
const num64 = [typeof[i64]().idx, typeof[u64]().idx]
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const nums = [
|
||||
@@ -18,7 +59,7 @@ const nums = [
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const float = [
|
||||
@@ -27,31 +68,31 @@ const float = [
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const type_string = typeof[string]().idx
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const serial = -1
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const time_ = -2
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const enum_ = -3
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const type_idx = {
|
||||
@@ -70,19 +111,19 @@ const type_idx = {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const string_max_len = 2048
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
```v
|
||||
const null_primitive = Primitive(Null{})
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## new_query
|
||||
```v
|
||||
@@ -91,7 +132,7 @@ fn new_query[T](conn Connection) &QueryBuilder[T]
|
||||
|
||||
new_query create a new query object for struct `T`
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## orm_select_gen
|
||||
```v
|
||||
@@ -100,7 +141,7 @@ fn orm_select_gen(cfg SelectConfig, q string, num bool, qm string, start_pos int
|
||||
|
||||
Generates an sql select stmt, from universal parameter orm - See SelectConfig q, num, qm, start_pos - see orm_stmt_gen where - See QueryData
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## orm_stmt_gen
|
||||
```v
|
||||
@@ -110,7 +151,7 @@ fn orm_stmt_gen(sql_dialect SQLDialect, table Table, q string, kind StmtKind, nu
|
||||
|
||||
Generates an sql stmt, from universal parameter q - The quotes character, which can be different in every type, so it's variable num - Stmt uses nums at prepared statements (? or ?1) qm - Character for prepared statement (qm for question mark, as in sqlite) start_pos - When num is true, it's the start position of the counter
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## orm_table_gen
|
||||
```v
|
||||
@@ -120,7 +161,7 @@ fn orm_table_gen(sql_dialect SQLDialect, table Table, q string, defaults bool, d
|
||||
|
||||
Generates an sql table stmt, from universal parameter table - Table struct q - see orm_stmt_gen defaults - enables default values in stmt def_unique_len - sets default unique length for texts fields - See TableField sql_from_v - Function which maps type indices to sql type names alternative - Needed for msdb
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## Connection
|
||||
```v
|
||||
@@ -140,7 +181,7 @@ Interfaces gets called from the backend and can be implemented Since the orm sup
|
||||
|
||||
Every function without last_id() returns an optional, which returns an error if present last_id returns the last inserted id of the db
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## Primitive
|
||||
```v
|
||||
@@ -162,7 +203,7 @@ type Primitive = InfixType
|
||||
| []Primitive
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## QueryBuilder[T]
|
||||
## reset
|
||||
@@ -172,7 +213,7 @@ fn (qb_ &QueryBuilder[T]) reset() &QueryBuilder[T]
|
||||
|
||||
reset reset a query object, but keep the connection and table name
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## where
|
||||
```v
|
||||
@@ -181,7 +222,7 @@ fn (qb_ &QueryBuilder[T]) where(condition string, params ...Primitive) !&QueryBu
|
||||
|
||||
where create a `where` clause, it will `AND` with previous `where` clause. valid token in the `condition` include: `field's names`, `operator`, `(`, `)`, `?`, `AND`, `OR`, `||`, `&&`, valid `operator` incldue: `=`, `!=`, `<>`, `>=`, `<=`, `>`, `<`, `LIKE`, `ILIKE`, `IS NULL`, `IS NOT NULL`, `IN`, `NOT IN` example: `where('(a > ? AND b <= ?) OR (c <> ? AND (x = ? OR y = ?))', a, b, c, x, y)`
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## or_where
|
||||
```v
|
||||
@@ -190,7 +231,7 @@ fn (qb_ &QueryBuilder[T]) or_where(condition string, params ...Primitive) !&Quer
|
||||
|
||||
or_where create a `where` clause, it will `OR` with previous `where` clause.
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## order
|
||||
```v
|
||||
@@ -199,7 +240,7 @@ fn (qb_ &QueryBuilder[T]) order(order_type OrderType, field string) !&QueryBuild
|
||||
|
||||
order create a `order` clause
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## limit
|
||||
```v
|
||||
@@ -208,7 +249,7 @@ fn (qb_ &QueryBuilder[T]) limit(limit int) !&QueryBuilder[T]
|
||||
|
||||
limit create a `limit` clause
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## offset
|
||||
```v
|
||||
@@ -217,7 +258,7 @@ fn (qb_ &QueryBuilder[T]) offset(offset int) !&QueryBuilder[T]
|
||||
|
||||
offset create a `offset` clause
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## select
|
||||
```v
|
||||
@@ -226,7 +267,7 @@ fn (qb_ &QueryBuilder[T]) select(fields ...string) !&QueryBuilder[T]
|
||||
|
||||
select create a `select` clause
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## set
|
||||
```v
|
||||
@@ -235,7 +276,7 @@ fn (qb_ &QueryBuilder[T]) set(assign string, values ...Primitive) !&QueryBuilder
|
||||
|
||||
set create a `set` clause for `update`
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## query
|
||||
```v
|
||||
@@ -244,7 +285,7 @@ fn (qb_ &QueryBuilder[T]) query() ![]T
|
||||
|
||||
query start a query and return result in struct `T`
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## count
|
||||
```v
|
||||
@@ -253,7 +294,7 @@ fn (qb_ &QueryBuilder[T]) count() !int
|
||||
|
||||
count start a count query and return result
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## insert
|
||||
```v
|
||||
@@ -262,7 +303,7 @@ fn (qb_ &QueryBuilder[T]) insert[T](value T) !&QueryBuilder[T]
|
||||
|
||||
insert insert a record into the database
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## insert_many
|
||||
```v
|
||||
@@ -271,7 +312,7 @@ fn (qb_ &QueryBuilder[T]) insert_many[T](values []T) !&QueryBuilder[T]
|
||||
|
||||
insert_many insert records into the database
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## update
|
||||
```v
|
||||
@@ -280,7 +321,7 @@ fn (qb_ &QueryBuilder[T]) update() !&QueryBuilder[T]
|
||||
|
||||
update update record(s) in the database
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## delete
|
||||
```v
|
||||
@@ -289,7 +330,7 @@ fn (qb_ &QueryBuilder[T]) delete() !&QueryBuilder[T]
|
||||
|
||||
delete delete record(s) in the database
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## create
|
||||
```v
|
||||
@@ -298,7 +339,7 @@ fn (qb_ &QueryBuilder[T]) create() !&QueryBuilder[T]
|
||||
|
||||
create create a table
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## drop
|
||||
```v
|
||||
@@ -307,7 +348,7 @@ fn (qb_ &QueryBuilder[T]) drop() !&QueryBuilder[T]
|
||||
|
||||
drop drop a table
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## last_id
|
||||
```v
|
||||
@@ -316,7 +357,7 @@ fn (qb_ &QueryBuilder[T]) last_id() int
|
||||
|
||||
last_id returns the last inserted id of the db
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## MathOperationKind
|
||||
```v
|
||||
@@ -328,7 +369,7 @@ enum MathOperationKind {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## OperationKind
|
||||
```v
|
||||
@@ -348,7 +389,7 @@ enum OperationKind {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## OrderType
|
||||
```v
|
||||
@@ -358,7 +399,7 @@ enum OrderType {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## SQLDialect
|
||||
```v
|
||||
@@ -370,7 +411,7 @@ enum SQLDialect {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## StmtKind
|
||||
```v
|
||||
@@ -381,7 +422,7 @@ enum StmtKind {
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## InfixType
|
||||
```v
|
||||
@@ -393,14 +434,14 @@ pub:
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## Null
|
||||
```v
|
||||
struct Null {}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## QueryBuilder
|
||||
```v
|
||||
@@ -415,7 +456,7 @@ pub mut:
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## QueryData
|
||||
```v
|
||||
@@ -433,7 +474,7 @@ pub mut:
|
||||
|
||||
Examples for QueryData in SQL: abc == 3 && b == 'test' => fields[abc, b]; data[3, 'test']; types[index of int, index of string]; kinds[.eq, .eq]; is_and[true]; Every field, data, type & kind of operation in the expr share the same index in the arrays is_and defines how they're addicted to each other either and or or parentheses defines which fields will be inside () auto_fields are indexes of fields where db should generate a value when absent in an insert
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## SelectConfig
|
||||
```v
|
||||
@@ -455,7 +496,7 @@ pub mut:
|
||||
|
||||
table - Table struct is_count - Either the data will be returned or an integer with the count has_where - Select all or use a where expr has_order - Order the results order - Name of the column which will be ordered order_type - Type of order (asc, desc) has_limit - Limits the output data primary - Name of the primary field has_offset - Add an offset to the result fields - Fields to select types - Types to select
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## Table
|
||||
```v
|
||||
@@ -466,7 +507,7 @@ pub mut:
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
## TableField
|
||||
```v
|
||||
@@ -480,3 +521,7 @@ pub mut:
|
||||
is_arr bool
|
||||
}
|
||||
```
|
||||
|
||||
[[Return to contents]](#Contents)
|
||||
|
||||
#### Powered by vdoc. Generated on: 2 Sep 2025 07:19:37
|
||||
|
||||
@@ -1,282 +0,0 @@
|
||||
|
||||
# V ORM — Developer Cheat Sheet
|
||||
|
||||
*Fast reference for Struct Mapping, CRUD, Attributes, Query Builder, and Usage Patterns*
|
||||
|
||||
---
|
||||
|
||||
## 1. What V ORM Is
|
||||
|
||||
* Built-in ORM for **SQLite**, **MySQL**, **PostgreSQL**
|
||||
* Unified V-syntax; no SQL string building
|
||||
* Automatic query sanitization
|
||||
* Compile-time type & field checks
|
||||
* Structs map directly to tables
|
||||
|
||||
---
|
||||
|
||||
## 2. Define Models (Struct ↔ Table)
|
||||
|
||||
### Basic Example
|
||||
|
||||
```v
|
||||
struct User {
|
||||
id int @[primary; sql: serial]
|
||||
name string
|
||||
email string @[unique]
|
||||
}
|
||||
```
|
||||
|
||||
### Nullable Fields
|
||||
|
||||
```v
|
||||
age ?int // allows NULL
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Struct Attributes
|
||||
|
||||
### Table-level
|
||||
|
||||
| Attribute | Meaning |
|
||||
| ---------------------------- | ------------------------- |
|
||||
| `@[table: 'custom_name']` | Override table name |
|
||||
| `@[comment: '...']` | Table comment |
|
||||
| `@[index: 'field1, field2']` | Creates multi-field index |
|
||||
|
||||
---
|
||||
|
||||
## 4. Field Attributes
|
||||
|
||||
| Attribute | Description |
|
||||
| ------------------------------------------------ | ---------------------------- |
|
||||
| `@[primary]` | Primary key |
|
||||
| `@[unique]` | UNIQUE constraint |
|
||||
| `@[unique: 'group']` | Composite unique group |
|
||||
| `@[skip]` / `@[sql: '-']` | Ignore field |
|
||||
| `@[sql: serial]` | Auto-increment key |
|
||||
| `@[sql: 'col_name']` | Rename column |
|
||||
| `@[sql_type: 'BIGINT']` | Force SQL type |
|
||||
| `@[default: 'CURRENT_TIMESTAMP']` | Raw SQL default |
|
||||
| `@[fkey: 'field']` | Foreign key on a child array |
|
||||
| `@[references]`, `@[references: 'table(field)']` | FK relationship |
|
||||
| `@[index]` | Index on field |
|
||||
| `@[comment: '...']` | Column comment |
|
||||
|
||||
### Example
|
||||
|
||||
```v
|
||||
struct Post {
|
||||
id int @[primary; sql: serial]
|
||||
title string
|
||||
body string
|
||||
author_id int @[references: 'users(id)']
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. ORM SQL Block (Primary API)
|
||||
|
||||
### Create Table
|
||||
|
||||
```v
|
||||
sql db {
|
||||
create table User
|
||||
}!
|
||||
```
|
||||
|
||||
### Drop Table
|
||||
|
||||
```v
|
||||
sql db {
|
||||
drop table User
|
||||
}!
|
||||
```
|
||||
|
||||
### Insert
|
||||
|
||||
```v
|
||||
id := sql db {
|
||||
insert new_user into User
|
||||
}!
|
||||
```
|
||||
|
||||
### Select
|
||||
|
||||
```v
|
||||
users := sql db {
|
||||
select from User where age > 18 && name != 'Tom'
|
||||
order by id desc
|
||||
limit 10
|
||||
}!
|
||||
```
|
||||
|
||||
### Update
|
||||
|
||||
```v
|
||||
sql db {
|
||||
update User set name = 'Alice' where id == 1
|
||||
}!
|
||||
```
|
||||
|
||||
### Delete
|
||||
|
||||
```v
|
||||
sql db {
|
||||
delete from User where id > 100
|
||||
}!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. Relationships
|
||||
|
||||
### One-to-Many
|
||||
|
||||
```v
|
||||
struct Parent {
|
||||
id int @[primary; sql: serial]
|
||||
children []Child @[fkey: 'parent_id']
|
||||
}
|
||||
|
||||
struct Child {
|
||||
id int @[primary; sql: serial]
|
||||
parent_id int
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. Notes on `time.Time`
|
||||
|
||||
* Stored as integer timestamps
|
||||
* SQL defaults like `NOW()` / `CURRENT_TIMESTAMP` **don’t work** for `time.Time` with V ORM defaults
|
||||
* Use `@[default: 'CURRENT_TIMESTAMP']` only with custom SQL types
|
||||
|
||||
---
|
||||
|
||||
## 8. Query Builder API (Dynamic Queries)
|
||||
|
||||
### Create Builder
|
||||
|
||||
```v
|
||||
mut qb := orm.new_query[User](db)
|
||||
```
|
||||
|
||||
### Create Table
|
||||
|
||||
```v
|
||||
qb.create()!
|
||||
```
|
||||
|
||||
### Insert Many
|
||||
|
||||
```v
|
||||
qb.insert_many(users)!
|
||||
```
|
||||
|
||||
### Select
|
||||
|
||||
```v
|
||||
results := qb
|
||||
.select('id, name')!
|
||||
.where('age > ?', 18)!
|
||||
.order('id DESC')!
|
||||
.limit(20)!
|
||||
.query()!
|
||||
```
|
||||
|
||||
### Update
|
||||
|
||||
```v
|
||||
qb
|
||||
.set('name = ?', 'NewName')!
|
||||
.where('id = ?', 1)!
|
||||
.update()!
|
||||
```
|
||||
|
||||
### Delete
|
||||
|
||||
```v
|
||||
qb.where('created_at IS NULL')!.delete()!
|
||||
```
|
||||
|
||||
### Complex WHERE
|
||||
|
||||
```v
|
||||
qb.where(
|
||||
'(salary > ? AND age < ?) OR (role LIKE ?)',
|
||||
3000, 40, '%engineer%'
|
||||
)!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. Connecting to Databases
|
||||
|
||||
### SQLite
|
||||
|
||||
```v
|
||||
import db.sqlite
|
||||
db := sqlite.connect('db.sqlite')!
|
||||
```
|
||||
|
||||
### MySQL
|
||||
|
||||
```v
|
||||
import db.mysql
|
||||
db := mysql.connect(host: 'localhost', user: 'root', password: '', dbname: 'test')!
|
||||
```
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
```v
|
||||
import db.pg
|
||||
db := pg.connect(conn_str)!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Full Example (Complete CRUD)
|
||||
|
||||
```v
|
||||
import db.sqlite
|
||||
|
||||
struct Customer {
|
||||
id int @[primary; sql: serial]
|
||||
name string
|
||||
email string @[unique]
|
||||
}
|
||||
|
||||
fn main() {
|
||||
db := sqlite.connect('customers.db')!
|
||||
|
||||
sql db { create table Customer }!
|
||||
|
||||
new_c := Customer{name: 'Alice', email: 'alice@x.com'}
|
||||
|
||||
id := sql db { insert new_c into Customer }!
|
||||
println(id)
|
||||
|
||||
list := sql db { select from Customer where name == 'Alice' }!
|
||||
println(list)
|
||||
|
||||
sql db { update Customer set name = 'Alicia' where id == id }!
|
||||
|
||||
sql db { delete from Customer where id == id }!
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 11. Best Practices
|
||||
|
||||
* Always use `sql db { ... }` for static queries
|
||||
* Use QueryBuilder for dynamic conditions
|
||||
* Prefer `sql: serial` for primary keys
|
||||
* Explicitly define foreign keys
|
||||
* Use `?T` for nullable fields
|
||||
* Keep struct names identical to table names unless overridden
|
||||
|
||||
@@ -122,12 +122,12 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
if plbook.exists_once(filter: 'docusaurus.define') {
|
||||
mut action := plbook.get(filter: 'docusaurus.define')!
|
||||
mut p := action.params
|
||||
//example how we get parameters from the action see aiprompts/herolib_core/core_params.md for more details
|
||||
path_build := p.get_default('path_build', '')!
|
||||
path_publish := p.get_default('path_publish', '')!
|
||||
reset := p.get_default_false('reset')
|
||||
use_doctree := p.get_default_false('use_doctree')
|
||||
}
|
||||
//example how we get parameters from the action see core_params.md for more details
|
||||
ds = new(
|
||||
path: p.get_default('path_publish', '')!
|
||||
production: p.get_default_false('production')
|
||||
)!
|
||||
}
|
||||
|
||||
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
||||
actions := plbook.find(filter: 'docusaurus.add')!
|
||||
@@ -138,7 +138,7 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
}
|
||||
```
|
||||
|
||||
For detailed information on parameter retrieval methods (e.g., `p.get()`, `p.get_int()`, `p.get_default_true()`), refer to `aiprompts/herolib_core/core_params.md`.
|
||||
For detailed information on parameter retrieval methods (e.g., `p.get()`, `p.get_int()`, `p.get_default_true()`), refer to `aiprompts/ai_core/core_params.md`.
|
||||
|
||||
# PlayBook, process heroscripts
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
@@ -62,8 +61,6 @@ compile_cmd := if os.user_os() == 'macos' {
|
||||
'v -enable-globals -g -w -n -prod hero.v'
|
||||
} else {
|
||||
'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
// 'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
// 'v -cg -enable-globals -parallel-cc -w -n -d use_openssl hero.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
|
||||
@@ -53,7 +53,7 @@ fn do() ! {
|
||||
mut cmd := Command{
|
||||
name: 'hero'
|
||||
description: 'Your HERO toolset.'
|
||||
version: '1.0.38'
|
||||
version: '1.0.36'
|
||||
}
|
||||
|
||||
mut toinstall := false
|
||||
@@ -90,7 +90,7 @@ fn do() ! {
|
||||
herocmds.cmd_docusaurus(mut cmd)
|
||||
herocmds.cmd_web(mut cmd)
|
||||
herocmds.cmd_sshagent(mut cmd)
|
||||
herocmds.cmd_doctree(mut cmd)
|
||||
herocmds.cmd_atlas(mut cmd)
|
||||
|
||||
cmd.setup()
|
||||
cmd.parse(os.args)
|
||||
@@ -103,4 +103,4 @@ fn main() {
|
||||
print_backtrace()
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,3 +40,4 @@ RUN /tmp/install_herolib.vsh && \
|
||||
|
||||
ENTRYPOINT ["/bin/bash"]
|
||||
CMD ["/bin/bash"]
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.ai.client
|
||||
|
||||
mut cl := client.new()!
|
||||
|
||||
// response := cl.llms.llm_local.chat_completion(
|
||||
// message: 'Explain quantum computing in simple terms'
|
||||
// temperature: 0.5
|
||||
// max_completion_tokens: 1024
|
||||
// )!
|
||||
|
||||
response := cl.llms.llm_embed.chat_completion(
|
||||
message: 'Explain quantum computing in simple terms'
|
||||
)!
|
||||
|
||||
println(response)
|
||||
@@ -1,12 +1,12 @@
|
||||
#!/usr/bin/env hero
|
||||
|
||||
!!doctree.scan
|
||||
!!atlas.scan
|
||||
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/mycelium_economics'
|
||||
|
||||
!!doctree.scan
|
||||
!!atlas.scan
|
||||
git_url: 'https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/authentic_web'
|
||||
|
||||
// !!doctree.scan
|
||||
// !!atlas.scan
|
||||
// git_url: 'https://git.ourworld.tf/geomind/docs_geomind/src/branch/main/collections/usecases'
|
||||
|
||||
!!doctree.export destination: '/tmp/doctree_export'
|
||||
!!atlas.export destination: '/tmp/atlas_export'
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
#!/usr/bin/env hero
|
||||
|
||||
!!doctree.scan
|
||||
git_url: 'https://git.ourworld.tf/geomind/doctree_geomind/src/branch/main/content'
|
||||
meta_path: '/tmp/doctree_export_meta'
|
||||
!!atlas.scan
|
||||
git_url: 'https://git.ourworld.tf/geomind/atlas_geomind/src/branch/main/content'
|
||||
meta_path: '/tmp/atlas_export_meta'
|
||||
|
||||
!!doctree.scan
|
||||
git_url: 'https://git.ourworld.tf/tfgrid/doctree_threefold/src/branch/main/content'
|
||||
meta_path: '/tmp/doctree_export_meta'
|
||||
!!atlas.scan
|
||||
git_url: 'https://git.ourworld.tf/tfgrid/atlas_threefold/src/branch/main/content'
|
||||
meta_path: '/tmp/atlas_export_meta'
|
||||
ignore3: 'static,templates,groups'
|
||||
|
||||
!!doctree.export
|
||||
destination: '/tmp/doctree_export_test'
|
||||
!!atlas.export
|
||||
destination: '/tmp/atlas_export_test'
|
||||
include: true
|
||||
redis: true
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env hero
|
||||
|
||||
!!doctree.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||
!!atlas.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||
|
||||
!!doctree.export destination: '/tmp/doctree_export'
|
||||
!!atlas.export destination: '/tmp/atlas_export'
|
||||
|
||||
@@ -1,308 +0,0 @@
|
||||
#!/usr/bin/env -S vrun
|
||||
|
||||
import incubaid.herolib.data.doctree
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
fn main() {
|
||||
println('=== ATLAS DEBUG SCRIPT ===\n')
|
||||
|
||||
// Create and scan doctree
|
||||
mut a := doctree.new(name: 'main')!
|
||||
|
||||
// Scan the collections
|
||||
println('Scanning collections...\n')
|
||||
a.scan(
|
||||
path: '/Users/despiegk/code/git.ourworld.tf/geomind/docs_geomind/collections/mycelium_nodes_tiers'
|
||||
)!
|
||||
a.scan(
|
||||
path: '/Users/despiegk/code/git.ourworld.tf/geomind/docs_geomind/collections/geomind_compare'
|
||||
)!
|
||||
a.scan(path: '/Users/despiegk/code/git.ourworld.tf/geomind/docs_geomind/collections/geoaware')!
|
||||
a.scan(
|
||||
path: '/Users/despiegk/code/git.ourworld.tf/tfgrid/docs_tfgrid4/collections/mycelium_economics'
|
||||
)!
|
||||
a.scan(
|
||||
path: '/Users/despiegk/code/git.ourworld.tf/tfgrid/docs_tfgrid4/collections/mycelium_concepts'
|
||||
)!
|
||||
a.scan(
|
||||
path: '/Users/despiegk/code/git.ourworld.tf/tfgrid/docs_tfgrid4/collections/mycelium_cloud_tech'
|
||||
)!
|
||||
|
||||
// Initialize doctree (post-scanning validation)
|
||||
a.init_post()!
|
||||
|
||||
// Print all pages per collection
|
||||
println('\n=== COLLECTIONS & PAGES ===\n')
|
||||
for col_name, col in a.collections {
|
||||
println('Collection: ${col_name}')
|
||||
println(' Pages (${col.pages.len}):')
|
||||
if col.pages.len > 0 {
|
||||
for page_name, _ in col.pages {
|
||||
println(' - ${page_name}')
|
||||
}
|
||||
} else {
|
||||
println(' (empty)')
|
||||
}
|
||||
println(' Files/Images (${col.files.len}):')
|
||||
if col.files.len > 0 {
|
||||
for file_name, _ in col.files {
|
||||
println(' - ${file_name}')
|
||||
}
|
||||
} else {
|
||||
println(' (empty)')
|
||||
}
|
||||
}
|
||||
|
||||
// Validate links (this will recursively find links across collections)
|
||||
println('\n=== VALIDATING LINKS (RECURSIVE) ===\n')
|
||||
a.validate_links()!
|
||||
println('✓ Link validation complete\n')
|
||||
|
||||
// Check for broken links
|
||||
println('\n=== BROKEN LINKS ===\n')
|
||||
mut total_errors := 0
|
||||
for col_name, col in a.collections {
|
||||
if col.has_errors() {
|
||||
println('Collection: ${col_name} (${col.errors.len} errors)')
|
||||
for err in col.errors {
|
||||
println(' [${err.category_str()}] Page: ${err.page_key}')
|
||||
println(' Message: ${err.message}')
|
||||
println('')
|
||||
total_errors++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if total_errors == 0 {
|
||||
println('✓ No broken links found!')
|
||||
} else {
|
||||
println('\n❌ Total broken link errors: ${total_errors}')
|
||||
}
|
||||
|
||||
// Show discovered links per page (validates recursive discovery)
|
||||
println('\n\n=== DISCOVERED LINKS (RECURSIVE RESOLUTION) ===\n')
|
||||
println('Checking for files referenced by cross-collection pages...\n')
|
||||
mut total_links := 0
|
||||
for col_name, col in a.collections {
|
||||
mut col_has_links := false
|
||||
for page_name, page in col.pages {
|
||||
if page.links.len > 0 {
|
||||
if !col_has_links {
|
||||
println('Collection: ${col_name}')
|
||||
col_has_links = true
|
||||
}
|
||||
println(' Page: ${page_name} (${page.links.len} links)')
|
||||
for link in page.links {
|
||||
target_col := if link.target_collection_name != '' {
|
||||
link.target_collection_name
|
||||
} else {
|
||||
col_name
|
||||
}
|
||||
println(' → ${target_col}:${link.target_item_name} [${link.file_type}]')
|
||||
total_links++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
println('\n✓ Total links discovered: ${total_links}')
|
||||
|
||||
// List pages that need investigation
|
||||
println('\n=== CHECKING SPECIFIC MISSING PAGES ===\n')
|
||||
|
||||
missing_pages := [
|
||||
'compare_electricity',
|
||||
'internet_basics',
|
||||
'centralization_risk',
|
||||
'gdp_negative',
|
||||
]
|
||||
|
||||
// Check in geoaware collection
|
||||
if 'geoaware' in a.collections {
|
||||
mut geoaware := a.get_collection('geoaware')!
|
||||
|
||||
println('Collection: geoaware')
|
||||
if geoaware.pages.len > 0 {
|
||||
println(' All pages in collection:')
|
||||
for page_name, _ in geoaware.pages {
|
||||
println(' - ${page_name}')
|
||||
}
|
||||
} else {
|
||||
println(' (No pages found)')
|
||||
}
|
||||
|
||||
println('\n Checking for specific missing pages:')
|
||||
for page_name in missing_pages {
|
||||
exists := page_name in geoaware.pages
|
||||
status := if exists { '✓' } else { '✗' }
|
||||
println(' ${status} ${page_name}')
|
||||
}
|
||||
}
|
||||
|
||||
// Check for pages across all collections
|
||||
println('\n\n=== LOOKING FOR MISSING PAGES ACROSS ALL COLLECTIONS ===\n')
|
||||
|
||||
for missing_page in missing_pages {
|
||||
println('Searching for "${missing_page}":')
|
||||
mut found := false
|
||||
for col_name, col in a.collections {
|
||||
if missing_page in col.pages {
|
||||
println(' ✓ Found in: ${col_name}')
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
println(' ✗ Not found in any collection')
|
||||
}
|
||||
}
|
||||
|
||||
// Check for the solution page
|
||||
println('\n\n=== CHECKING FOR "solution" PAGE ===\n')
|
||||
for col_name in ['mycelium_nodes_tiers', 'geomind_compare', 'geoaware', 'mycelium_economics',
|
||||
'mycelium_concepts', 'mycelium_cloud_tech'] {
|
||||
if col_name in a.collections {
|
||||
mut col := a.get_collection(col_name)!
|
||||
exists := col.page_exists('solution')!
|
||||
status := if exists { '✓' } else { '✗' }
|
||||
println('${status} ${col_name}: "solution" page')
|
||||
}
|
||||
}
|
||||
|
||||
// Print error summary
|
||||
println('\n\n=== ERROR SUMMARY BY CATEGORY ===\n')
|
||||
mut category_counts := map[string]int{}
|
||||
for _, col in a.collections {
|
||||
for err in col.errors {
|
||||
cat_str := err.category_str()
|
||||
category_counts[cat_str]++
|
||||
}
|
||||
}
|
||||
|
||||
if category_counts.len == 0 {
|
||||
println('✓ No errors found!')
|
||||
} else {
|
||||
for cat, count in category_counts {
|
||||
println('${cat}: ${count}')
|
||||
}
|
||||
}
|
||||
|
||||
// ===== EXPORT AND FILE VERIFICATION TEST =====
|
||||
println('\n\n=== EXPORT AND FILE VERIFICATION TEST ===\n')
|
||||
|
||||
// Create export directory
|
||||
export_path := '/tmp/doctree_debug_export'
|
||||
if os.exists(export_path) {
|
||||
os.rmdir_all(export_path)!
|
||||
}
|
||||
os.mkdir_all(export_path)!
|
||||
|
||||
println('Exporting to: ${export_path}\n')
|
||||
a.export(destination: export_path)!
|
||||
println('✓ Export completed\n')
|
||||
|
||||
// Collect all files found during link validation
|
||||
mut expected_files := map[string]string{} // key: file_name, value: collection_name
|
||||
mut file_count := 0
|
||||
for col_name, col in a.collections {
|
||||
for page_name, page in col.pages {
|
||||
for link in page.links {
|
||||
if link.status == .found && (link.file_type == .file || link.file_type == .image) {
|
||||
file_key := link.target_item_name
|
||||
expected_files[file_key] = link.target_collection_name
|
||||
file_count++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println('Expected to find ${file_count} file references in links\n')
|
||||
println('=== VERIFYING FILES IN EXPORT DIRECTORY ===\n')
|
||||
|
||||
// Get the first collection name (the primary exported collection)
|
||||
mut primary_col_name := ''
|
||||
for col_name, _ in a.collections {
|
||||
primary_col_name = col_name
|
||||
break
|
||||
}
|
||||
|
||||
if primary_col_name == '' {
|
||||
println('❌ No collections found')
|
||||
} else {
|
||||
mut verified_count := 0
|
||||
mut missing_count := 0
|
||||
mut found_files := map[string]bool{}
|
||||
|
||||
// Check both img and files directories
|
||||
img_dir := '${export_path}/content/${primary_col_name}/img'
|
||||
files_dir := '${export_path}/content/${primary_col_name}/files'
|
||||
|
||||
// Scan img directory
|
||||
if os.exists(img_dir) {
|
||||
img_files := os.ls(img_dir) or { []string{} }
|
||||
for img_file in img_files {
|
||||
found_files[img_file] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Scan files directory
|
||||
if os.exists(files_dir) {
|
||||
file_list := os.ls(files_dir) or { []string{} }
|
||||
for file in file_list {
|
||||
found_files[file] = true
|
||||
}
|
||||
}
|
||||
|
||||
println('Files/Images found in export directory:')
|
||||
if found_files.len > 0 {
|
||||
for file_name, _ in found_files {
|
||||
println(' ✓ ${file_name}')
|
||||
if file_name in expected_files {
|
||||
verified_count++
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println(' (none found)')
|
||||
}
|
||||
|
||||
println('\n=== FILE VERIFICATION RESULTS ===\n')
|
||||
println('Expected files from links: ${file_count}')
|
||||
println('Files found in export: ${found_files.len}')
|
||||
println('Files verified (present in export): ${verified_count}')
|
||||
|
||||
// Check for missing expected files
|
||||
for expected_file, source_col in expected_files {
|
||||
if expected_file !in found_files {
|
||||
missing_count++
|
||||
println(' ✗ Missing: ${expected_file} (from ${source_col})')
|
||||
}
|
||||
}
|
||||
|
||||
if missing_count > 0 {
|
||||
println('\n❌ ${missing_count} expected files are MISSING from export!')
|
||||
} else if verified_count == file_count && file_count > 0 {
|
||||
println('\n✓ All expected files are present in export directory!')
|
||||
} else if file_count == 0 {
|
||||
println('\n⚠ No file links were found during validation (check if pages have file references)')
|
||||
}
|
||||
|
||||
// Show directory structure
|
||||
println('\n=== EXPORT DIRECTORY STRUCTURE ===\n')
|
||||
if os.exists('${export_path}/content/${primary_col_name}') {
|
||||
println('${export_path}/content/${primary_col_name}/')
|
||||
|
||||
content_files := os.ls('${export_path}/content/${primary_col_name}') or { []string{} }
|
||||
for item in content_files {
|
||||
full_path := '${export_path}/content/${primary_col_name}/${item}'
|
||||
if os.is_dir(full_path) {
|
||||
sub_items := os.ls(full_path) or { []string{} }
|
||||
println(' ${item}/ (${sub_items.len} items)')
|
||||
for sub_item in sub_items {
|
||||
println(' - ${sub_item}')
|
||||
}
|
||||
} else {
|
||||
println(' - ${item}')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.data.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.web.doctree_client
|
||||
import incubaid.herolib.web.atlas_client
|
||||
import os
|
||||
|
||||
// Example: DocTree Export and AtlasClient Usage
|
||||
// Example: Atlas Export and AtlasClient Usage
|
||||
|
||||
println('DocTree Export & Client Example')
|
||||
println('Atlas Export & Client Example')
|
||||
println('============================================================')
|
||||
|
||||
// Setup test directory
|
||||
test_dir := '/tmp/doctree_example'
|
||||
export_dir := '/tmp/doctree_export'
|
||||
test_dir := '/tmp/atlas_example'
|
||||
export_dir := '/tmp/atlas_export'
|
||||
os.rmdir_all(test_dir) or {}
|
||||
os.rmdir_all(export_dir) or {}
|
||||
os.mkdir_all(test_dir)!
|
||||
@@ -30,9 +30,9 @@ page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||
|
||||
// Create and scan doctree
|
||||
println('\n1. Creating DocTree and scanning...')
|
||||
mut a := doctree.new(name: 'my_docs')!
|
||||
// Create and scan atlas
|
||||
println('\n1. Creating Atlas and scanning...')
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: test_dir)!
|
||||
|
||||
println(' Found ${a.collections.len} collection(s)')
|
||||
@@ -60,7 +60,7 @@ println(' ✓ Export complete')
|
||||
|
||||
// Use AtlasClient to access exported content
|
||||
println('\n4. Using AtlasClient to read exported content...')
|
||||
mut client := doctree_client.new(export_dir: export_dir)!
|
||||
mut client := atlas_client.new(export_dir: export_dir)!
|
||||
|
||||
// List collections
|
||||
collections := client.list_collections()!
|
||||
|
||||
198
examples/develop/heroprompt/README.md
Normal file
198
examples/develop/heroprompt/README.md
Normal file
@@ -0,0 +1,198 @@
|
||||
# HeroPrompt Example
|
||||
|
||||
Generate structured AI prompts from your codebase with file selection and workspace management.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Run the example:
|
||||
|
||||
```bash
|
||||
./examples/develop/heroprompt/prompt_example.vsh
|
||||
```
|
||||
|
||||
This example demonstrates the complete workflow from creating a workspace to generating AI prompts.
|
||||
|
||||
---
|
||||
|
||||
## What is HeroPrompt?
|
||||
|
||||
HeroPrompt helps you organize code files and generate structured prompts for AI analysis:
|
||||
|
||||
- **Workspace Management**: Organize files into logical workspaces
|
||||
- **File Selection**: Select specific files or entire directories
|
||||
- **Prompt Generation**: Generate formatted prompts with file trees and contents
|
||||
- **Redis Persistence**: All data persists across sessions
|
||||
- **Active Workspace**: Easily switch between different workspaces
|
||||
|
||||
---
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### 1. Create Instance and Workspace
|
||||
|
||||
```v
|
||||
import incubaid.herolib.develop.heroprompt
|
||||
|
||||
// Create or get instance
|
||||
mut hp := heroprompt.get(name: 'my_project', create: true)!
|
||||
|
||||
// Create workspace (first workspace is automatically active)
|
||||
mut workspace := hp.new_workspace(
|
||||
name: 'my_workspace'
|
||||
description: 'My project workspace'
|
||||
)!
|
||||
```
|
||||
|
||||
### 2. Add Directories
|
||||
|
||||
```v
|
||||
// Add directory and scan all files
|
||||
mut dir := workspace.add_directory(
|
||||
path: '/path/to/your/code'
|
||||
name: 'my_code'
|
||||
scan: true // Automatically scans all files and subdirectories
|
||||
)!
|
||||
```
|
||||
|
||||
### 3. Select Files
|
||||
|
||||
```v
|
||||
// Select specific files
|
||||
dir.select_file(path: '/path/to/file1.v')!
|
||||
dir.select_file(path: '/path/to/file2.v')!
|
||||
|
||||
// Or select all files in directory
|
||||
dir.select_all()!
|
||||
```
|
||||
|
||||
### 4. Generate Prompt
|
||||
|
||||
```v
|
||||
// Generate AI prompt with selected files
|
||||
prompt := workspace.generate_prompt(
|
||||
instruction: 'Review these files and suggest improvements'
|
||||
)!
|
||||
|
||||
println(prompt)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Generated Prompt Format
|
||||
|
||||
The generated prompt includes three sections:
|
||||
|
||||
```
|
||||
<user_instructions>
|
||||
Review these files and suggest improvements
|
||||
</user_instructions>
|
||||
|
||||
<file_map>
|
||||
my_project/
|
||||
├── src/
|
||||
│ ├── main.v *
|
||||
│ └── utils.v *
|
||||
└── README.md *
|
||||
</file_map>
|
||||
|
||||
<file_contents>
|
||||
File: /path/to/src/main.v
|
||||
```v
|
||||
module main
|
||||
...
|
||||
```
|
||||
|
||||
</file_contents>
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## API Reference
|
||||
|
||||
### Factory Functions
|
||||
|
||||
```v
|
||||
heroprompt.get(name: 'my_project', create: true)! // Get or create
|
||||
heroprompt.delete(name: 'my_project')! // Delete instance
|
||||
heroprompt.exists(name: 'my_project')! // Check if exists
|
||||
heroprompt.list()! // List all instances
|
||||
```
|
||||
|
||||
### HeroPrompt Methods
|
||||
|
||||
```v
|
||||
hp.new_workspace(name: 'ws', description: 'desc')! // Create workspace
|
||||
hp.get_workspace('ws')! // Get workspace by name
|
||||
hp.list_workspaces() // List all workspaces
|
||||
hp.delete_workspace('ws')! // Delete workspace
|
||||
hp.get_active_workspace()! // Get active workspace
|
||||
hp.set_active_workspace('ws')! // Set active workspace
|
||||
```
|
||||
|
||||
### Workspace Methods
|
||||
|
||||
```v
|
||||
ws.add_directory(path: '/path', name: 'dir', scan: true)! // Add directory
|
||||
ws.list_directories() // List directories
|
||||
ws.remove_directory(id: 'dir_id')! // Remove directory
|
||||
ws.generate_prompt(instruction: 'Review')! // Generate prompt
|
||||
ws.generate_file_map()! // Generate file tree
|
||||
ws.generate_file_contents()! // Generate contents
|
||||
```
|
||||
|
||||
### Directory Methods
|
||||
|
||||
```v
|
||||
dir.select_file(path: '/path/to/file')! // Select file
|
||||
dir.select_all()! // Select all files
|
||||
dir.deselect_file(path: '/path/to/file')! // Deselect file
|
||||
dir.deselect_all()! // Deselect all files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
### Active Workspace
|
||||
|
||||
```v
|
||||
// Get the currently active workspace
|
||||
mut active := hp.get_active_workspace()!
|
||||
|
||||
// Switch to a different workspace
|
||||
hp.set_active_workspace('other_workspace')!
|
||||
```
|
||||
|
||||
### Multiple Workspaces
|
||||
|
||||
```v
|
||||
// Create multiple workspaces for different purposes
|
||||
mut backend := hp.new_workspace(name: 'backend')!
|
||||
mut frontend := hp.new_workspace(name: 'frontend')!
|
||||
mut docs := hp.new_workspace(name: 'documentation')!
|
||||
```
|
||||
|
||||
### File Selection
|
||||
|
||||
```v
|
||||
// Select individual files
|
||||
dir.select_file(path: '/path/to/file.v')!
|
||||
|
||||
// Select all files in directory
|
||||
dir.select_all()!
|
||||
|
||||
// Deselect files
|
||||
dir.deselect_file(path: '/path/to/file.v')!
|
||||
dir.deselect_all()!
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Tips
|
||||
|
||||
- Always start with cleanup (`heroprompt.delete()`) in examples to ensure a fresh state
|
||||
- The first workspace created is automatically set as active
|
||||
- File selection persists to Redis automatically
|
||||
- Use `scan: true` when adding directories to automatically scan all files
|
||||
- Selected files are tracked per directory for efficient management
|
||||
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.develop.heroprompt
|
||||
import os
|
||||
|
||||
// mut workspace := heroprompt.new(
|
||||
// path: '${os.home_dir()}/code/github/incubaid/herolib'
|
||||
// name: 'workspace'
|
||||
// )!
|
||||
|
||||
mut workspace := heroprompt.get(
|
||||
name: 'example_ws'
|
||||
path: '${os.home_dir()}/code/github/incubaid/herolib'
|
||||
create: true
|
||||
)!
|
||||
|
||||
println('workspace (initial): ${workspace}')
|
||||
println('selected (initial): ${workspace.selected_children()}')
|
||||
|
||||
// Add a directory and a file
|
||||
workspace.add_dir(path: '${os.home_dir()}/code/github/incubaid/herolib/docker')!
|
||||
workspace.add_file(
|
||||
path: '${os.home_dir()}/code/github/incubaid/herolib/docker/docker_ubuntu_install.sh'
|
||||
)!
|
||||
println('selected (after add): ${workspace.selected_children()}')
|
||||
|
||||
// Build a prompt from current selection (should be empty now)
|
||||
mut prompt := workspace.prompt(
|
||||
text: 'Using the selected files, i want you to get all print statments'
|
||||
)
|
||||
|
||||
println('--- PROMPT START ---')
|
||||
println(prompt)
|
||||
println('--- PROMPT END ---')
|
||||
|
||||
// Remove the file by name, then the directory by name
|
||||
workspace.remove_file(name: 'docker_ubuntu_install.sh') or { println('remove_file: ${err}') }
|
||||
workspace.remove_dir(name: 'docker') or { println('remove_dir: ${err}') }
|
||||
println('selected (after remove): ${workspace.selected_children()}')
|
||||
|
||||
// List workspaces (names only)
|
||||
mut all := heroprompt.list_workspaces() or { []&heroprompt.Workspace{} }
|
||||
mut names := []string{}
|
||||
for w in all {
|
||||
names << w.name
|
||||
}
|
||||
println('workspaces: ${names}')
|
||||
|
||||
// Optionally delete the example workspace
|
||||
workspace.delete_workspace() or { println('delete_workspace: ${err}') }
|
||||
145
examples/develop/heroprompt/prompt_example.vsh
Executable file
145
examples/develop/heroprompt/prompt_example.vsh
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.develop.heroprompt
|
||||
import os
|
||||
|
||||
println('=== HeroPrompt: AI Prompt Generation Example ===\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 1: Cleanup and Setup
|
||||
// ============================================================================
|
||||
// Always start fresh - delete any existing instance
|
||||
println('Step 1: Cleaning up any existing instance...')
|
||||
heroprompt.delete(name: 'prompt_demo') or {}
|
||||
println('✓ Cleanup complete\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 2: Create HeroPrompt Instance
|
||||
// ============================================================================
|
||||
// Get or create a new HeroPrompt instance
|
||||
// The 'create: true' parameter will create it if it doesn't exist
|
||||
println('Step 2: Creating HeroPrompt instance...')
|
||||
mut hp := heroprompt.get(name: 'prompt_demo', create: true)!
|
||||
println('✓ Created instance: ${hp.name}\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 3: Create Workspace
|
||||
// ============================================================================
|
||||
// A workspace is a collection of directories and files
|
||||
// The first workspace is automatically set as active
|
||||
println('Step 3: Creating workspace...')
|
||||
mut workspace := hp.new_workspace(
|
||||
name: 'my_project'
|
||||
description: 'Example project workspace'
|
||||
)!
|
||||
println('✓ Created workspace: ${workspace.name}')
|
||||
println(' Active: ${workspace.is_active}')
|
||||
println(' Description: ${workspace.description}\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 4: Add Directories to Workspace
|
||||
// ============================================================================
|
||||
// Add directories containing code you want to analyze
|
||||
// The 'scan: true' parameter automatically scans all files and subdirectories
|
||||
println('Step 4: Adding directories to workspace...')
|
||||
|
||||
homepath := os.home_dir()
|
||||
|
||||
// Add the examples directory
|
||||
mut examples_dir := workspace.add_directory(
|
||||
path: '${homepath}/code/github/incubaid/herolib/examples/develop/heroprompt'
|
||||
name: 'examples'
|
||||
scan: true
|
||||
)!
|
||||
println('✓ Added directory: examples')
|
||||
|
||||
// Add the library directory
|
||||
mut lib_dir := workspace.add_directory(
|
||||
path: '${homepath}/code/github/incubaid/herolib/lib/develop/heroprompt'
|
||||
name: 'library'
|
||||
scan: true
|
||||
)!
|
||||
println('✓ Added directory: library\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 5: Select Specific Files
|
||||
// ============================================================================
|
||||
// You can select specific files from directories for prompt generation
|
||||
// This is useful when you only want to analyze certain files
|
||||
println('Step 5: Selecting specific files...')
|
||||
|
||||
// Select individual files from the examples directory
|
||||
examples_dir.select_file(
|
||||
path: '${homepath}/code/github/incubaid/herolib/examples/develop/heroprompt/README.md'
|
||||
)!
|
||||
println('✓ Selected: README.md')
|
||||
|
||||
examples_dir.select_file(
|
||||
path: '${homepath}/code/github/incubaid/herolib/examples/develop/heroprompt/prompt_example.vsh'
|
||||
)!
|
||||
println('✓ Selected: prompt_example.vsh')
|
||||
|
||||
// Select all files from the library directory
|
||||
lib_dir.select_all()!
|
||||
println('✓ Selected all files in library directory\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 6: Generate AI Prompt
|
||||
// ============================================================================
|
||||
// Generate a complete prompt with file map, file contents, and instructions
|
||||
// The prompt automatically includes only the selected files
|
||||
println('Step 6: Generating AI prompt...')
|
||||
|
||||
prompt := workspace.generate_prompt(
|
||||
instruction: 'Review the selected files and provide suggestions for improvements.'
|
||||
)!
|
||||
|
||||
println('✓ Generated prompt')
|
||||
println(' Total length: ${prompt.len} characters\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 7: Display Prompt Preview
|
||||
// ============================================================================
|
||||
println('Step 7: Prompt preview (first 800 characters)...')
|
||||
preview_len := if prompt.len > 800 { 800 } else { prompt.len }
|
||||
println(prompt[..preview_len])
|
||||
|
||||
// ============================================================================
|
||||
// STEP 8: Alternative - Get Active Workspace
|
||||
// ============================================================================
|
||||
// You can retrieve the active workspace without knowing its name
|
||||
println('Step 8: Working with active workspace...')
|
||||
|
||||
mut active_ws := hp.get_active_workspace()!
|
||||
println('✓ Retrieved active workspace: ${active_ws.name}')
|
||||
println(' Directories: ${active_ws.directories.len}')
|
||||
println(' Files: ${active_ws.files.len}\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 9: Set Different Active Workspace
|
||||
// ============================================================================
|
||||
// You can create multiple workspaces and switch between them
|
||||
println('Step 9: Creating and switching workspaces...')
|
||||
|
||||
// Create a second workspace
|
||||
mut workspace2 := hp.new_workspace(
|
||||
name: 'documentation'
|
||||
description: 'Documentation workspace'
|
||||
is_active: false
|
||||
)!
|
||||
println('✓ Created workspace: ${workspace2.name}')
|
||||
|
||||
// Switch active workspace
|
||||
hp.set_active_workspace('documentation')!
|
||||
println('✓ Set active workspace to: documentation')
|
||||
|
||||
// Verify the switch
|
||||
active_ws = hp.get_active_workspace()!
|
||||
println('✓ Current active workspace: ${active_ws.name}\n')
|
||||
|
||||
// ============================================================================
|
||||
// STEP 10: Cleanup
|
||||
// ============================================================================
|
||||
println('Step 10: Cleanup...')
|
||||
heroprompt.delete(name: 'prompt_demo')!
|
||||
println('✓ Deleted instance\n')
|
||||
5
examples/virt/hetzner/.gitignore
vendored
5
examples/virt/hetzner/.gitignore
vendored
@@ -1,4 +1 @@
|
||||
hetzner_kristof1
|
||||
hetzner_kristof2
|
||||
hetzner_kristof3
|
||||
hetzner_test1
|
||||
hetzner_example
|
||||
@@ -1,3 +0,0 @@
|
||||
export HETZNER_USER="#ws+JdQtGCdL"
|
||||
export HETZNER_PASSWORD="Kds007kds!"
|
||||
export HETZNER_SSHKEY_NAME="mahmoud"
|
||||
@@ -1,34 +1,37 @@
|
||||
#!/usr/bin/env hero
|
||||
#!/usr/bin/env hero
|
||||
|
||||
// # Configure HetznerManager, replace with your own credentials, server id's and ssh key name and all other parameters
|
||||
|
||||
!!hetznermanager.configure
|
||||
user:"user_name"
|
||||
whitelist:"server_id"
|
||||
password:"password"
|
||||
sshkey:"ssh_key_name"
|
||||
// !!hetznermanager.configure
|
||||
// name:"main"
|
||||
// user:"krist"
|
||||
// whitelist:"2111181, 2392178, 2545053, 2542166, 2550508, 2550378,2550253"
|
||||
// password:"wontsethere"
|
||||
// sshkey:"kristof"
|
||||
|
||||
!!hetznermanager.server_rescue
|
||||
server_name: 'server_name' // The name of the server to manage (or use `id`)
|
||||
wait: true // Wait for the operation to complete
|
||||
hero_install: true // Automatically install Herolib in the rescue system
|
||||
|
||||
// !!hetznermanager.server_rescue
|
||||
// server_name: 'kristof21' // The name of the server to manage (or use `id`)
|
||||
// wait: true // Wait for the operation to complete
|
||||
// hero_install: true // Automatically install Herolib in the rescue system
|
||||
|
||||
|
||||
// # Reset a server
|
||||
!!hetznermanager.server_reset
|
||||
instance: 'main'
|
||||
server_name: 'server_name'
|
||||
wait: true
|
||||
// !!hetznermanager.server_reset
|
||||
// instance: 'main'
|
||||
// server_name: 'your-server-name'
|
||||
// wait: true
|
||||
|
||||
// # Add a new SSH key to your Hetzner account
|
||||
!!hetznermanager.key_create
|
||||
instance: 'main'
|
||||
key_name: 'ssh_key_name'
|
||||
data: 'ssh-rsa AAAA...'
|
||||
// !!hetznermanager.key_create
|
||||
// instance: 'main'
|
||||
// key_name: 'my-laptop-key'
|
||||
// data: 'ssh-rsa AAAA...'
|
||||
|
||||
|
||||
// Install Ubuntu 24.04 on a server
|
||||
!!hetznermanager.ubuntu_install
|
||||
server_name: 'server_name'
|
||||
server_name: 'kristof2'
|
||||
wait: true
|
||||
hero_install: true // Install Herolib on the new OS
|
||||
|
||||
|
||||
|
||||
68
examples/virt/hetzner/hetzner_example.vsh
Executable file
68
examples/virt/hetzner/hetzner_example.vsh
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.virt.hetznermanager
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.builder
|
||||
import time
|
||||
import os
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
user := os.environ()['HETZNER_USER'] or {
|
||||
println('HETZNER_USER not set')
|
||||
exit(1)
|
||||
}
|
||||
passwd := os.environ()['HETZNER_PASSWORD'] or {
|
||||
println('HETZNER_PASSWORD not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hs := '
|
||||
!!hetznermanager.configure
|
||||
user:"${user}"
|
||||
whitelist:"2111181, 2392178, 2545053, 2542166, 2550508, 2550378,2550253"
|
||||
password:"${passwd}"
|
||||
sshkey:"kristof"
|
||||
'
|
||||
|
||||
println(hs)
|
||||
|
||||
playcmds.run(heroscript: hs)!
|
||||
|
||||
console.print_header('Hetzner Test.')
|
||||
|
||||
mut cl := hetznermanager.get()!
|
||||
// println(cl)
|
||||
|
||||
// for i in 0 .. 5 {
|
||||
// println('test cache, first time slow then fast')
|
||||
// }
|
||||
|
||||
println(cl.servers_list()!)
|
||||
|
||||
// mut serverinfo := cl.server_info_get(name: 'kristof2')!
|
||||
|
||||
// println(serverinfo)
|
||||
|
||||
// cl.server_reset(name:"kristof2",wait:true)!
|
||||
|
||||
// don't forget to specify the keyname needed
|
||||
// cl.server_rescue(name:"kristof2",wait:true, hero_install:true,sshkey_name:"kristof")!
|
||||
|
||||
// mut ks:=cl.keys_get()!
|
||||
// println(ks)
|
||||
|
||||
// console.print_header('SSH login')
|
||||
// mut b := builder.new()!
|
||||
// mut n := b.node_new(ipaddr: serverinfo.server_ip)!
|
||||
|
||||
// this will put hero in debug mode on the system
|
||||
// n.hero_install(compile:true)!
|
||||
|
||||
// n.shell("")!
|
||||
|
||||
// cl.ubuntu_install(name: 'kristof2', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(name: 'kristof20', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550378, name: 'kristof21', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550508, name: 'kristof22', wait: true, hero_install: true)!
|
||||
cl.ubuntu_install(id: 2550253, name: 'kristof23', wait: true, hero_install: true)!
|
||||
@@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.virt.hetznermanager
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.builder
|
||||
import time
|
||||
import os
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
// Server-specific configuration
|
||||
const server_name = 'kristof1'
|
||||
const server_whitelist = '2521602'
|
||||
|
||||
// Load credentials from environment variables
|
||||
// Source hetzner_env.sh before running: source examples/virt/hetzner/hetzner_env.sh
|
||||
hetzner_user := os.environ()['HETZNER_USER'] or {
|
||||
println('HETZNER_USER not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_passwd := os.environ()['HETZNER_PASSWORD'] or {
|
||||
println('HETZNER_PASSWORD not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_sshkey_name := os.environ()['HETZNER_SSHKEY_NAME'] or {
|
||||
println('HETZNER_SSHKEY_NAME not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hs := '
|
||||
!!hetznermanager.configure
|
||||
user:"${hetzner_user}"
|
||||
whitelist:"${server_whitelist}"
|
||||
password:"${hetzner_passwd}"
|
||||
sshkey:"${hetzner_sshkey_name}"
|
||||
'
|
||||
|
||||
println(hs)
|
||||
|
||||
playcmds.run(heroscript: hs)!
|
||||
|
||||
console.print_header('Hetzner Test.')
|
||||
|
||||
mut cl := hetznermanager.get()!
|
||||
// println(cl)
|
||||
|
||||
// for i in 0 .. 5 {
|
||||
// println('test cache, first time slow then fast')
|
||||
// }
|
||||
|
||||
println(cl.servers_list()!)
|
||||
|
||||
mut serverinfo := cl.server_info_get(name: server_name)!
|
||||
|
||||
println(serverinfo)
|
||||
|
||||
// cl.server_reset(name: 'kristof2', wait: true)!
|
||||
|
||||
// cl.server_rescue(name: name, wait: true, hero_install: true)!
|
||||
|
||||
// mut ks := cl.keys_get()!
|
||||
// println(ks)
|
||||
|
||||
// console.print_header('SSH login')
|
||||
|
||||
cl.ubuntu_install(name: server_name, wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(name: 'kristof20', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550378, name: 'kristof21', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550508, name: 'kristof22', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id: 2550253, name: 'kristof23', wait: true, hero_install: true)!
|
||||
|
||||
// this will put hero in debug mode on the system
|
||||
mut b := builder.new()!
|
||||
mut n := b.node_new(ipaddr: serverinfo.server_ip)!
|
||||
n.hero_install(compile: true)!
|
||||
|
||||
n.shell('')!
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.virt.hetznermanager
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.builder
|
||||
import time
|
||||
import os
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
// Server-specific configuration
|
||||
const server_name = 'kristof2'
|
||||
const server_whitelist = '2555487'
|
||||
|
||||
// Load credentials from environment variables
|
||||
// Source hetzner_env.sh before running: source examples/virt/hetzner/hetzner_env.sh
|
||||
hetzner_user := os.environ()['HETZNER_USER'] or {
|
||||
println('HETZNER_USER not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_passwd := os.environ()['HETZNER_PASSWORD'] or {
|
||||
println('HETZNER_PASSWORD not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_sshkey_name := os.environ()['HETZNER_SSHKEY_NAME'] or {
|
||||
println('HETZNER_SSHKEY_NAME not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hero_script := '
|
||||
!!hetznermanager.configure
|
||||
user:"${hetzner_user}"
|
||||
whitelist:"${server_whitelist}"
|
||||
password:"${hetzner_passwd}"
|
||||
sshkey:"${hetzner_sshkey_name}"
|
||||
'
|
||||
|
||||
playcmds.run(heroscript: hero_script)!
|
||||
mut hetznermanager_ := hetznermanager.get()!
|
||||
|
||||
mut serverinfo := hetznermanager_.server_info_get(name: server_name)!
|
||||
|
||||
println('${server_name} ${serverinfo.server_ip}')
|
||||
|
||||
hetznermanager_.server_rescue(name: server_name, wait: true, hero_install: true)!
|
||||
mut keys := hetznermanager_.keys_get()!
|
||||
|
||||
mut b := builder.new()!
|
||||
mut n := b.node_new(ipaddr: serverinfo.server_ip)!
|
||||
|
||||
hetznermanager_.ubuntu_install(name: server_name, wait: true, hero_install: true)!
|
||||
n.shell('')!
|
||||
@@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.virt.hetznermanager
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.builder
|
||||
import time
|
||||
import os
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
// Server-specific configuration
|
||||
const server_name = 'kristof3'
|
||||
const server_whitelist = '2573047'
|
||||
|
||||
// Load credentials from environment variables
|
||||
// Source hetzner_env.sh before running: source examples/virt/hetzner/hetzner_env.sh
|
||||
hetzner_user := os.environ()['HETZNER_USER'] or {
|
||||
println('HETZNER_USER not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_passwd := os.environ()['HETZNER_PASSWORD'] or {
|
||||
println('HETZNER_PASSWORD not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_sshkey_name := os.environ()['HETZNER_SSHKEY_NAME'] or {
|
||||
println('HETZNER_SSHKEY_NAME not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hs := '
|
||||
!!hetznermanager.configure
|
||||
user:"${hetzner_user}"
|
||||
whitelist:"${server_whitelist}"
|
||||
password:"${hetzner_passwd}"
|
||||
sshkey:"${hetzner_sshkey_name}"
|
||||
'
|
||||
|
||||
println(hs)
|
||||
|
||||
playcmds.run(heroscript: hs)!
|
||||
|
||||
console.print_header('Hetzner Test.')
|
||||
|
||||
mut cl := hetznermanager.get()!
|
||||
// println(cl)
|
||||
|
||||
// for i in 0 .. 5 {
|
||||
// println('test cache, first time slow then fast')
|
||||
// }
|
||||
|
||||
println(cl.servers_list()!)
|
||||
|
||||
mut serverinfo := cl.server_info_get(name: server_name)!
|
||||
|
||||
println(serverinfo)
|
||||
|
||||
// cl.server_reset(name: 'kristof2', wait: true)!
|
||||
|
||||
// cl.server_rescue(name: name, wait: true, hero_install: true)!
|
||||
|
||||
// mut ks := cl.keys_get()!
|
||||
// println(ks)
|
||||
|
||||
// console.print_header('SSH login')
|
||||
|
||||
cl.ubuntu_install(name: server_name, wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(name: 'kristof20', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550378, name: 'kristof21', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550508, name: 'kristof22', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id: 2550253, name: 'kristof23', wait: true, hero_install: true)!
|
||||
|
||||
// this will put hero in debug mode on the system
|
||||
mut b := builder.new()!
|
||||
mut n := b.node_new(ipaddr: serverinfo.server_ip)!
|
||||
n.hero_install(compile: true)!
|
||||
|
||||
n.shell('')!
|
||||
@@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.virt.hetznermanager
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.core.base
|
||||
import incubaid.herolib.builder
|
||||
import time
|
||||
import os
|
||||
import incubaid.herolib.core.playcmds
|
||||
|
||||
// Server-specific configuration
|
||||
const server_name = 'test1'
|
||||
const server_whitelist = '2575034'
|
||||
|
||||
// Load credentials from environment variables
|
||||
// Source hetzner_env.sh before running: source examples/virt/hetzner/hetzner_env.sh
|
||||
hetzner_user := os.environ()['HETZNER_USER'] or {
|
||||
println('HETZNER_USER not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_passwd := os.environ()['HETZNER_PASSWORD'] or {
|
||||
println('HETZNER_PASSWORD not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hetzner_sshkey_name := os.environ()['HETZNER_SSHKEY_NAME'] or {
|
||||
println('HETZNER_SSHKEY_NAME not set')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
hs := '
|
||||
!!hetznermanager.configure
|
||||
user:"${hetzner_user}"
|
||||
whitelist:"${server_whitelist}"
|
||||
password:"${hetzner_passwd}"
|
||||
sshkey:"${hetzner_sshkey_name}"
|
||||
'
|
||||
|
||||
println(hs)
|
||||
|
||||
playcmds.run(heroscript: hs)!
|
||||
|
||||
console.print_header('Hetzner Test.')
|
||||
|
||||
mut cl := hetznermanager.get()!
|
||||
// println(cl)
|
||||
|
||||
// for i in 0 .. 5 {
|
||||
// println('test cache, first time slow then fast')
|
||||
// }
|
||||
|
||||
println(cl.servers_list()!)
|
||||
|
||||
mut serverinfo := cl.server_info_get(name: server_name)!
|
||||
|
||||
println(serverinfo)
|
||||
|
||||
// cl.server_reset(name: 'kristof2', wait: true)!
|
||||
|
||||
// cl.server_rescue(name: name, wait: true, hero_install: true)!
|
||||
|
||||
// mut ks := cl.keys_get()!
|
||||
// println(ks)
|
||||
|
||||
// console.print_header('SSH login')
|
||||
|
||||
cl.ubuntu_install(name: server_name, wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(name: 'kristof20', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550378, name: 'kristof21', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id:2550508, name: 'kristof22', wait: true, hero_install: true)!
|
||||
// cl.ubuntu_install(id: 2550253, name: 'kristof23', wait: true, hero_install: true)!
|
||||
|
||||
// this will put hero in debug mode on the system
|
||||
mut b := builder.new()!
|
||||
mut n := b.node_new(ipaddr: serverinfo.server_ip)!
|
||||
n.hero_install(compile: true)!
|
||||
|
||||
n.shell('')!
|
||||
@@ -1,57 +1,9 @@
|
||||
# Hetzner Examples
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Configure Environment Variables
|
||||
get the login passwd from:
|
||||
|
||||
Copy `hetzner_env.sh` and fill in your credentials:
|
||||
https://robot.hetzner.com/preferences/index
|
||||
|
||||
```bash
|
||||
export HETZNER_USER="your-robot-username" # Hetzner Robot API username
|
||||
export HETZNER_PASSWORD="your-password" # Hetzner Robot API password
|
||||
export HETZNER_SSHKEY_NAME="my-key" # Name of SSH key registered in Hetzner
|
||||
```
|
||||
|
||||
Each script has its own server name and whitelist ID defined at the top.
|
||||
|
||||
### 2. Run a Script
|
||||
|
||||
```bash
|
||||
source hetzner_env.sh
|
||||
./hetzner_kristof2.vsh
|
||||
```
|
||||
|
||||
## SSH Keys
|
||||
|
||||
The `HETZNER_SSHKEY_NAME` must be the **name** of an SSH key already registered in your Hetzner Robot account.
|
||||
|
||||
Available keys in our Hetzner account:
|
||||
|
||||
- hossnys (RSA 2048)
|
||||
- Jan De Landtsheer (ED25519 256)
|
||||
- mahmoud (ED25519 256)
|
||||
- kristof (ED25519 256)
|
||||
- maxime (ED25519 256)
|
||||
|
||||
To add a new key, use `key_create` in your script or the Hetzner Robot web interface.
|
||||
|
||||
## Alternative: Using hero_secrets
|
||||
|
||||
You can also use the shared secrets repository:
|
||||
|
||||
```bash
|
||||
hero git pull https://git.threefold.info/despiegk/hero_secrets
|
||||
source ~/code/git.ourworld.tf/despiegk/hero_secrets/mysecrets.sh
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Get Robot API credentials
|
||||
|
||||
Get your login credentials from: https://robot.hetzner.com/preferences/index
|
||||
|
||||
### Test API access
|
||||
|
||||
```bash
|
||||
curl -u "your-username:your-password" https://robot-ws.your-server.de/server
|
||||
```
|
||||
curl -u "#ws+JdQtGCdL:..." https://robot-ws.your-server.de/server
|
||||
```
|
||||
@@ -1,208 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.web.doctree.meta
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
// Comprehensive HeroScript for testing multi-level navigation depths
|
||||
const test_heroscript_nav_depth = '
|
||||
!!site.config
|
||||
name: "nav_depth_test"
|
||||
title: "Navigation Depth Test Site"
|
||||
description: "Testing multi-level nested navigation"
|
||||
tagline: "Deep navigation structures"
|
||||
|
||||
!!site.navbar
|
||||
title: "Nav Depth Test"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Home"
|
||||
to: "/"
|
||||
position: "left"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 1: Simple top-level category
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Why"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
//COLLECTION WILL BE REPEATED, HAS NO INFLUENCE ON NAVIGATION LEVELS
|
||||
!!site.page src: "mycollection:intro"
|
||||
label: "Why Choose Us"
|
||||
title: "Why Choose Us"
|
||||
description: "Reasons to use this platform"
|
||||
|
||||
!!site.page src: "benefits"
|
||||
label: "Key Benefits"
|
||||
title: "Key Benefits"
|
||||
description: "Main benefits overview"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 1: Simple top-level category
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Tutorials"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "getting_started"
|
||||
label: "Getting Started"
|
||||
title: "Getting Started"
|
||||
description: "Basic tutorial to get started"
|
||||
|
||||
!!site.page src: "first_steps"
|
||||
label: "First Steps"
|
||||
title: "First Steps"
|
||||
description: "Your first steps with the platform"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 3: Three-level nested category (Tutorials > Operations > Urgent)
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Tutorials/Operations/Urgent"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "emergency_restart"
|
||||
label: "Emergency Restart"
|
||||
title: "Emergency Restart"
|
||||
description: "How to emergency restart the system"
|
||||
|
||||
!!site.page src: "critical_fixes"
|
||||
label: "Critical Fixes"
|
||||
title: "Critical Fixes"
|
||||
description: "Apply critical fixes immediately"
|
||||
|
||||
!!site.page src: "incident_response"
|
||||
label: "Incident Response"
|
||||
title: "Incident Response"
|
||||
description: "Handle incidents in real-time"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 2: Two-level nested category (Tutorials > Operations)
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Tutorials/Operations"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "daily_checks"
|
||||
label: "Daily Checks"
|
||||
title: "Daily Checks"
|
||||
description: "Daily maintenance checklist"
|
||||
|
||||
!!site.page src: "monitoring"
|
||||
label: "Monitoring"
|
||||
title: "Monitoring"
|
||||
description: "System monitoring procedures"
|
||||
|
||||
!!site.page src: "backups"
|
||||
label: "Backups"
|
||||
title: "Backups"
|
||||
description: "Backup and restore procedures"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 1: One-to-two level (Tutorials)
|
||||
// ============================================================
|
||||
// Note: This creates a sibling at the Tutorials level (not nested deeper)
|
||||
!!site.page src: "advanced_concepts"
|
||||
label: "Advanced Concepts"
|
||||
title: "Advanced Concepts"
|
||||
description: "Deep dive into advanced concepts"
|
||||
|
||||
!!site.page src: "troubleshooting"
|
||||
label: "Troubleshooting"
|
||||
title: "Troubleshooting"
|
||||
description: "Troubleshooting guide"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 2: Two-level nested category (Why > FAQ)
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Why/FAQ"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "general"
|
||||
label: "General Questions"
|
||||
title: "General Questions"
|
||||
description: "Frequently asked questions"
|
||||
|
||||
!!site.page src: "pricing_questions"
|
||||
label: "Pricing"
|
||||
title: "Pricing Questions"
|
||||
description: "Questions about pricing"
|
||||
|
||||
!!site.page src: "technical_faq"
|
||||
label: "Technical FAQ"
|
||||
title: "Technical FAQ"
|
||||
description: "Technical frequently asked questions"
|
||||
|
||||
!!site.page src: "support_faq"
|
||||
label: "Support"
|
||||
title: "Support FAQ"
|
||||
description: "Support-related FAQ"
|
||||
|
||||
// ============================================================
|
||||
// LEVEL 4: Four-level nested category (Tutorials > Operations > Database > Optimization)
|
||||
// ============================================================
|
||||
!!site.page_category
|
||||
path: "Tutorials/Operations/Database/Optimization"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "query_optimization"
|
||||
label: "Query Optimization"
|
||||
title: "Query Optimization"
|
||||
description: "Optimize your database queries"
|
||||
|
||||
!!site.page src: "indexing_strategy"
|
||||
label: "Indexing Strategy"
|
||||
title: "Indexing Strategy"
|
||||
description: "Effective indexing strategies"
|
||||
|
||||
!!site.page_category
|
||||
path: "Tutorials/Operations/Database"
|
||||
collapsible: true
|
||||
collapsed: false
|
||||
|
||||
!!site.page src: "configuration"
|
||||
label: "Configuration"
|
||||
title: "Database Configuration"
|
||||
description: "Configure your database"
|
||||
|
||||
!!site.page src: "replication"
|
||||
label: "Replication"
|
||||
title: "Database Replication"
|
||||
description: "Set up database replication"
|
||||
|
||||
'
|
||||
|
||||
fn check(s2 meta.Site) {
|
||||
|
||||
// assert s == s2
|
||||
}
|
||||
|
||||
|
||||
// ========================================================
|
||||
// SETUP: Create and process playbook
|
||||
// ========================================================
|
||||
console.print_item('Creating playbook from HeroScript')
|
||||
mut plbook := playbook.new(text: test_heroscript_nav_depth)!
|
||||
console.print_green('✓ Playbook created')
|
||||
console.lf()
|
||||
|
||||
console.print_item('Processing site configuration')
|
||||
meta.play(mut plbook)!
|
||||
console.print_green('✓ Site processed')
|
||||
console.lf()
|
||||
|
||||
console.print_item('Retrieving configured site')
|
||||
mut nav_site := meta.get(name: 'nav_depth_test')!
|
||||
console.print_green('✓ Site retrieved')
|
||||
console.lf()
|
||||
|
||||
// check(nav_site)
|
||||
@@ -1,201 +0,0 @@
|
||||
# Site Module Usage Guide
|
||||
|
||||
## Quick Examples
|
||||
|
||||
### 1. Run Basic Example
|
||||
|
||||
```bash
|
||||
cd examples/web/site
|
||||
vrun process_site.vsh ./
|
||||
```
|
||||
|
||||
With output:
|
||||
```
|
||||
=== Site Configuration Processor ===
|
||||
Processing HeroScript files from: ./
|
||||
Found 1 HeroScript file(s):
|
||||
- basic.heroscript
|
||||
|
||||
Processing: basic.heroscript
|
||||
|
||||
=== Configuration Complete ===
|
||||
Site: simple_docs
|
||||
Title: Simple Documentation
|
||||
Pages: 4
|
||||
Description: A basic documentation site
|
||||
Navigation structure:
|
||||
- [Page] Getting Started
|
||||
- [Page] Installation
|
||||
- [Page] Usage Guide
|
||||
- [Page] FAQ
|
||||
|
||||
✓ Site configuration ready for deployment
|
||||
```
|
||||
|
||||
### 2. Run Multi-Section Example
|
||||
|
||||
```bash
|
||||
vrun process_site.vsh ./
|
||||
# Edit process_site.vsh to use multi_section.heroscript instead
|
||||
```
|
||||
|
||||
### 3. Process Custom Directory
|
||||
|
||||
```bash
|
||||
vrun process_site.vsh /path/to/your/site/config
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
docs/
|
||||
├── 0_config.heroscript # Basic config
|
||||
├── 1_menu.heroscript # Navigation
|
||||
├── 2_pages.heroscript # Pages and categories
|
||||
└── process.vsh # Your processing script
|
||||
```
|
||||
|
||||
## Creating Your Own Site
|
||||
|
||||
1. **Create a config directory:**
|
||||
```bash
|
||||
mkdir my_site
|
||||
cd my_site
|
||||
```
|
||||
|
||||
2. **Create config file (0_config.heroscript):**
|
||||
```heroscript
|
||||
!!site.config
|
||||
name: "my_site"
|
||||
title: "My Site"
|
||||
```
|
||||
|
||||
3. **Create pages file (1_pages.heroscript):**
|
||||
```heroscript
|
||||
!!site.page src: "docs:intro"
|
||||
title: "Getting Started"
|
||||
```
|
||||
|
||||
4. **Process with script:**
|
||||
```bash
|
||||
vrun ../process_site.vsh ./
|
||||
```
|
||||
|
||||
## Common Workflows
|
||||
|
||||
### Workflow 1: Documentation Site
|
||||
|
||||
```
|
||||
docs/
|
||||
├── 0_config.heroscript
|
||||
│ └── Basic config + metadata
|
||||
├── 1_menu.heroscript
|
||||
│ └── Navbar + footer
|
||||
├── 2_getting_started.heroscript
|
||||
│ └── Getting started pages
|
||||
├── 3_api.heroscript
|
||||
│ └── API reference pages
|
||||
└── 4_advanced.heroscript
|
||||
└── Advanced topic pages
|
||||
```
|
||||
|
||||
### Workflow 2: Internal Knowledge Base
|
||||
|
||||
```
|
||||
kb/
|
||||
├── 0_config.heroscript
|
||||
├── 1_navigation.heroscript
|
||||
└── 2_articles.heroscript
|
||||
```
|
||||
|
||||
### Workflow 3: Product Documentation with Imports
|
||||
|
||||
```
|
||||
product_docs/
|
||||
├── 0_config.heroscript
|
||||
├── 1_imports.heroscript
|
||||
│ └── Import shared templates
|
||||
├── 2_menu.heroscript
|
||||
└── 3_pages.heroscript
|
||||
```
|
||||
|
||||
## Tips & Tricks
|
||||
|
||||
### Tip 1: Reuse Collections
|
||||
|
||||
```heroscript
|
||||
# Specify once, reuse multiple times
|
||||
!!site.page src: "guides:intro"
|
||||
!!site.page src: "setup" # Reuses "guides"
|
||||
!!site.page src: "deployment" # Still "guides"
|
||||
|
||||
# Switch to new collection
|
||||
!!site.page src: "api:reference"
|
||||
!!site.page src: "examples" # Now "api"
|
||||
```
|
||||
|
||||
### Tip 2: Auto-Increment Categories
|
||||
|
||||
```heroscript
|
||||
# Automatically positioned at 100, 200, 300...
|
||||
!!site.page_category name: "basics"
|
||||
!!site.page_category name: "advanced"
|
||||
!!site.page_category name: "expert"
|
||||
|
||||
# Or specify explicit positions
|
||||
!!site.page_category name: "basics" position: 10
|
||||
!!site.page_category name: "advanced" position: 20
|
||||
```
|
||||
|
||||
### Tip 3: Title Extraction
|
||||
|
||||
Let titles come from markdown files:
|
||||
|
||||
```heroscript
|
||||
# Don't specify title
|
||||
!!site.page src: "docs:introduction"
|
||||
# Title will be extracted from # Heading in introduction.md
|
||||
```
|
||||
|
||||
### Tip 4: Draft Pages
|
||||
|
||||
Hide pages while working on them:
|
||||
|
||||
```heroscript
|
||||
!!site.page src: "docs:work_in_progress"
|
||||
draft: true
|
||||
title: "Work in Progress"
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
### Debug: Check What Got Configured
|
||||
|
||||
```v
|
||||
mut s := site.get(name: 'my_site')!
|
||||
println(s.pages) // All pages
|
||||
println(s.nav) // Navigation structure
|
||||
println(s.siteconfig) // Configuration
|
||||
```
|
||||
|
||||
### Debug: List All Sites
|
||||
|
||||
```v
|
||||
sites := site.list()
|
||||
for site_name in sites {
|
||||
println('Site: ${site_name}')
|
||||
}
|
||||
```
|
||||
|
||||
### Debug: Enable Verbose Output
|
||||
|
||||
Add `console.print_debug()` calls in your HeroScript processing.
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Customize `process_site.vsh` for your needs
|
||||
- Add your existing pages (in markdown)
|
||||
- Export to Docusaurus
|
||||
- Deploy to production
|
||||
|
||||
For more info, see the main [Site Module README](./readme.md).
|
||||
@@ -1,53 +0,0 @@
|
||||
#!/usr/bin/env hero
|
||||
# Basic single-section documentation site
|
||||
|
||||
!!site.config
|
||||
name: "simple_docs"
|
||||
title: "Simple Documentation"
|
||||
description: "A basic documentation site"
|
||||
copyright: "© 2024 Example"
|
||||
url: "https://docs.example.com"
|
||||
base_url: "/"
|
||||
|
||||
!!site.navbar
|
||||
title: "Simple Docs"
|
||||
logo_src: "img/logo.png"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Docs"
|
||||
to: "/"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "GitHub"
|
||||
href: "https://github.com/example/repo"
|
||||
position: "right"
|
||||
|
||||
!!site.footer
|
||||
style: "dark"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Documentation"
|
||||
label: "Getting Started"
|
||||
to: "getting-started"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Community"
|
||||
label: "Discord"
|
||||
href: "https://discord.gg/example"
|
||||
|
||||
!!site.page src: "docs:introduction"
|
||||
title: "Getting Started"
|
||||
description: "Learn the basics"
|
||||
|
||||
!!site.page src: "installation"
|
||||
title: "Installation"
|
||||
description: "How to install"
|
||||
|
||||
!!site.page src: "usage"
|
||||
title: "Usage Guide"
|
||||
description: "How to use the system"
|
||||
|
||||
!!site.page src: "faq"
|
||||
title: "FAQ"
|
||||
description: "Frequently asked questions"
|
||||
@@ -1,155 +0,0 @@
|
||||
#!/usr/bin/env hero
|
||||
# Multi-section documentation with categories
|
||||
|
||||
!!site.config
|
||||
name: "multi_docs"
|
||||
title: "Complete Documentation"
|
||||
description: "Comprehensive documentation with multiple sections"
|
||||
tagline: "Everything you need to know"
|
||||
copyright: "© 2024 Tech Company"
|
||||
url: "https://docs.techcompany.com"
|
||||
base_url: "/docs"
|
||||
|
||||
!!site.navbar
|
||||
title: "Tech Documentation"
|
||||
logo_src: "img/logo.svg"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "Documentation"
|
||||
to: "/"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "API"
|
||||
to: "api"
|
||||
position: "left"
|
||||
|
||||
!!site.navbar_item
|
||||
label: "GitHub"
|
||||
href: "https://github.com/techcompany"
|
||||
position: "right"
|
||||
|
||||
!!site.footer
|
||||
style: "dark"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Guides"
|
||||
label: "Getting Started"
|
||||
to: "getting-started"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Guides"
|
||||
label: "Installation"
|
||||
to: "installation"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Company"
|
||||
label: "Website"
|
||||
href: "https://techcompany.com"
|
||||
|
||||
!!site.footer_item
|
||||
title: "Legal"
|
||||
label: "Privacy"
|
||||
href: "https://techcompany.com/privacy"
|
||||
|
||||
# ==================================================
|
||||
# Getting Started Section
|
||||
# ==================================================
|
||||
|
||||
!!site.page_category
|
||||
name: "getting_started"
|
||||
label: "Getting Started"
|
||||
position: 100
|
||||
|
||||
!!site.page src: "docs:introduction"
|
||||
title: "Introduction"
|
||||
description: "What is this project?"
|
||||
|
||||
!!site.page src: "installation"
|
||||
title: "Installation"
|
||||
description: "Get up and running"
|
||||
|
||||
!!site.page src: "quickstart"
|
||||
title: "Quick Start"
|
||||
description: "Your first steps"
|
||||
|
||||
# ==================================================
|
||||
# Core Concepts Section
|
||||
# ==================================================
|
||||
|
||||
!!site.page_category
|
||||
name: "concepts"
|
||||
label: "Core Concepts"
|
||||
position: 200
|
||||
|
||||
!!site.page src: "concepts:architecture"
|
||||
title: "Architecture"
|
||||
description: "System design and architecture"
|
||||
|
||||
!!site.page src: "components"
|
||||
title: "Components"
|
||||
description: "Main system components"
|
||||
|
||||
!!site.page src: "data_flow"
|
||||
title: "Data Flow"
|
||||
description: "How data flows through the system"
|
||||
|
||||
!!site.page src: "security"
|
||||
title: "Security"
|
||||
description: "Security considerations"
|
||||
|
||||
# ==================================================
|
||||
# Advanced Topics Section
|
||||
# ==================================================
|
||||
|
||||
!!site.page_category
|
||||
name: "advanced"
|
||||
label: "Advanced Topics"
|
||||
position: 300
|
||||
|
||||
!!site.page src: "advanced:performance"
|
||||
title: "Performance Tuning"
|
||||
description: "Optimize your system"
|
||||
|
||||
!!site.page src: "scaling"
|
||||
title: "Scaling"
|
||||
description: "Scale to millions of users"
|
||||
|
||||
!!site.page src: "deployment"
|
||||
title: "Deployment"
|
||||
description: "Deploy to production"
|
||||
|
||||
# ==================================================
|
||||
# API Reference Section
|
||||
# ==================================================
|
||||
|
||||
!!site.page_category
|
||||
name: "api"
|
||||
label: "API Reference"
|
||||
position: 400
|
||||
|
||||
!!site.page src: "api:overview"
|
||||
title: "API Overview"
|
||||
description: "API capabilities and base URLs"
|
||||
|
||||
!!site.page src: "rest_api"
|
||||
title: "REST API"
|
||||
description: "Complete REST API documentation"
|
||||
|
||||
!!site.page src: "graphql_api"
|
||||
title: "GraphQL"
|
||||
description: "GraphQL API documentation"
|
||||
|
||||
!!site.page src: "webhooks"
|
||||
title: "Webhooks"
|
||||
description: "Implement webhooks in your app"
|
||||
|
||||
# ==================================================
|
||||
# Publishing
|
||||
# ==================================================
|
||||
|
||||
!!site.publish
|
||||
path: "/var/www/html/docs"
|
||||
|
||||
!!site.publish_dev
|
||||
path: "/tmp/docs-preview"
|
||||
@@ -1,116 +0,0 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.web.site
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
// Process a site configuration from HeroScript files
|
||||
|
||||
println(console.color_fg(.green) + '=== Site Configuration Processor ===' + console.reset())
|
||||
|
||||
// Get directory from command line or use default
|
||||
mut config_dir := './docs'
|
||||
if os.args.len > 1 {
|
||||
config_dir = os.args[1]
|
||||
}
|
||||
|
||||
if !os.exists(config_dir) {
|
||||
console.print_stderr('Error: Directory not found: ${config_dir}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
console.print_item('Processing HeroScript files from: ${config_dir}')
|
||||
|
||||
// Find all heroscript files
|
||||
mut heroscript_files := []string{}
|
||||
entries := os.ls(config_dir) or {
|
||||
console.print_stderr('Error reading directory: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
for entry in entries {
|
||||
if entry.ends_with('.heroscript') {
|
||||
heroscript_files << entry
|
||||
}
|
||||
}
|
||||
|
||||
// Sort files (to ensure numeric prefix order)
|
||||
heroscript_files.sort()
|
||||
|
||||
if heroscript_files.len == 0 {
|
||||
console.print_stderr('No .heroscript files found in ${config_dir}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
console.print_item('Found ${heroscript_files.len} HeroScript file(s):')
|
||||
for file in heroscript_files {
|
||||
console.print_item(' - ${file}')
|
||||
}
|
||||
|
||||
// Process each file
|
||||
mut site_names := []string{}
|
||||
for file in heroscript_files {
|
||||
full_path := os.join_path(config_dir, file)
|
||||
console.print_lf(1)
|
||||
console.print_header('Processing: ${file}')
|
||||
|
||||
mut plbook := playbook.new(path: full_path) or {
|
||||
console.print_stderr('Error loading ${file}: ${err}')
|
||||
continue
|
||||
}
|
||||
|
||||
site.play(mut plbook) or {
|
||||
console.print_stderr('Error processing ${file}: ${err}')
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Get all configured sites
|
||||
site_names = site.list()
|
||||
|
||||
if site_names.len == 0 {
|
||||
console.print_stderr('No sites were configured')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
console.print_lf(2)
|
||||
console.print_green('=== Configuration Complete ===')
|
||||
|
||||
// Display configured sites
|
||||
for site_name in site_names {
|
||||
mut configured_site := site.get(name: site_name) or { continue }
|
||||
|
||||
console.print_header('Site: ${site_name}')
|
||||
console.print_item('Title: ${configured_site.siteconfig.title}')
|
||||
console.print_item('Pages: ${configured_site.pages.len}')
|
||||
console.print_item('Description: ${configured_site.siteconfig.description}')
|
||||
|
||||
// Show pages organized by category
|
||||
if configured_site.nav.my_sidebar.len > 0 {
|
||||
console.print_item('Navigation structure:')
|
||||
for nav_item in configured_site.nav.my_sidebar {
|
||||
match nav_item {
|
||||
site.NavDoc {
|
||||
console.print_item(' - [Page] ${nav_item.label}')
|
||||
}
|
||||
site.NavCat {
|
||||
console.print_item(' - [Category] ${nav_item.label}')
|
||||
for sub_item in nav_item.items {
|
||||
match sub_item {
|
||||
site.NavDoc {
|
||||
console.print_item(' - ${sub_item.label}')
|
||||
}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
println(console.color_fg(.green) + '✓ Site configuration ready for deployment' + console.reset())
|
||||
@@ -45,7 +45,7 @@ fn addtoscript(tofind string, toadd string) ! {
|
||||
// Reset symlinks (cleanup)
|
||||
println('Resetting all symlinks...')
|
||||
os.rm('${os.home_dir()}/.vmodules/incubaid/herolib') or {}
|
||||
os.rm('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
||||
os.rm('${os.home_dir()}/.vmodules/incubaid/herolib') or {}
|
||||
|
||||
// Create necessary directories
|
||||
os.mkdir_all('${os.home_dir()}/.vmodules/incubaid') or {
|
||||
|
||||
@@ -13,7 +13,7 @@ import incubaid.herolib.installers.lang.python
|
||||
import os
|
||||
|
||||
fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
_ := get()!
|
||||
mut installer := get()!
|
||||
mut res := []startupmanager.ZProcessNewArgs{}
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// res << startupmanager.ZProcessNewArgs{
|
||||
@@ -28,7 +28,7 @@ fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
}
|
||||
|
||||
fn running() !bool {
|
||||
_ := get()!
|
||||
mut installer := get()!
|
||||
// THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
|
||||
// this checks health of erpnext
|
||||
// curl http://localhost:3333/api/v1/s --oauth2-bearer 1234 works
|
||||
|
||||
@@ -16,7 +16,7 @@ pub mut:
|
||||
|
||||
pub fn (b BizModel) export(args ExportArgs) ! {
|
||||
name := if args.name != '' { args.name } else { texttools.snake_case(args.title) }
|
||||
pathlib.get_dir(
|
||||
path := pathlib.get_dir(
|
||||
path: os.join_path(os.home_dir(), 'hero/var/bizmodel/exports/${name}')
|
||||
create: true
|
||||
empty: true
|
||||
@@ -52,7 +52,7 @@ pub fn (model BizModel) write_operational_plan(args ExportArgs) ! {
|
||||
mut hr_page := pathlib.get_file(path: '${hr_dir.path}/human_resources.md')!
|
||||
hr_page.template_write($tmpl('./templates/human_resources.md'), true)!
|
||||
|
||||
for _, employee in model.employees {
|
||||
for key, employee in model.employees {
|
||||
mut employee_page := pathlib.get_file(
|
||||
path: '${hr_dir.path}/${texttools.snake_case(employee.name)}.md'
|
||||
)!
|
||||
@@ -73,7 +73,7 @@ pub fn (model BizModel) write_operational_plan(args ExportArgs) ! {
|
||||
}
|
||||
}
|
||||
|
||||
for _, department in model.departments {
|
||||
for key, department in model.departments {
|
||||
dept := department
|
||||
mut dept_page := pathlib.get_file(
|
||||
path: '${depts_dir.path}/${texttools.snake_case(department.name)}.md'
|
||||
@@ -94,7 +94,7 @@ pub fn (model BizModel) write_revenue_model(args ExportArgs) ! {
|
||||
products_page.template_write('# Products', true)!
|
||||
|
||||
name1 := 'example'
|
||||
for _, product in model.products {
|
||||
for key, product in model.products {
|
||||
mut product_page := pathlib.get_file(
|
||||
path: '${products_dir.path}/${texttools.snake_case(product.name)}.md'
|
||||
)!
|
||||
|
||||
@@ -7,7 +7,7 @@ import incubaid.herolib.core.pathlib
|
||||
pub struct ExportCSVArgs {
|
||||
pub mut:
|
||||
path string
|
||||
include_empty bool // whether to include empty cells or not
|
||||
include_empty bool = false // whether to include empty cells or not
|
||||
separator string = '|' // separator character for CSV
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
})
|
||||
|
||||
// play actions for each biz in plbook
|
||||
for biz, _ in actions_by_biz {
|
||||
for biz, actions in actions_by_biz {
|
||||
mut model := getset(biz)!
|
||||
model.play(mut plbook)!
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import incubaid.herolib.core.playbook { Action }
|
||||
// title:'Engineering Division'
|
||||
// avg_monthly_cost:'6000USD' avg_indexation:'5%'
|
||||
fn (mut m BizModel) department_define_action(action Action) !Action {
|
||||
_ := action.params.get_default('bizname', '')!
|
||||
bizname := action.params.get_default('bizname', '')!
|
||||
mut name := action.params.get('name') or { return error('department name is required') }
|
||||
mut descr := action.params.get_default('descr', '')!
|
||||
if descr.len == 0 {
|
||||
|
||||
@@ -74,7 +74,7 @@ fn (mut m BizModel) employee_define_action(action Action) !Action {
|
||||
mut curcost := -costpeople_row.cells[x].val
|
||||
mut curpeople := nrpeople_row.cells[x].val
|
||||
mut currev := revtotal.cells[x].val
|
||||
println("currev: ${currev}, curcost: ${curcost}, curpeople: ${curpeople}, costpercent_revenue: ${cost_percent_revenue}")
|
||||
// println("currev: ${currev}, curcost: ${curcost}, curpeople: ${curpeople}, costpercent_revenue: ${cost_percent_revenue}")
|
||||
if currev * cost_percent_revenue > curcost {
|
||||
costpeople_row.cells[x].val = -currev * cost_percent_revenue
|
||||
nrpeople_row.cells[x].val = f64(currev * cost_percent_revenue / costperson_default.usd())
|
||||
|
||||
@@ -10,7 +10,7 @@ fn (mut sim BizModel) pl_total() ! {
|
||||
|
||||
// sheet.pprint(nr_columns: 10)!
|
||||
|
||||
_ := sheet.group2row(
|
||||
mut pl_total := sheet.group2row(
|
||||
name: 'pl_summary'
|
||||
include: ['pl']
|
||||
tags: 'summary'
|
||||
|
||||
@@ -77,7 +77,7 @@ fn (mut m BizModel) revenue_action(action Action) !Action {
|
||||
product.has_revenue = true
|
||||
}
|
||||
|
||||
_ := revenue.action(
|
||||
mut margin := revenue.action(
|
||||
name: '${r.name}_margin'
|
||||
descr: 'Margin for ${r.name}'
|
||||
action: .substract
|
||||
|
||||
@@ -6,7 +6,7 @@ import incubaid.herolib.core.texttools
|
||||
// see lib/biz/bizmodel/docs/revenue.md
|
||||
fn (mut m BizModel) revenue_item_action(action Action) !Action {
|
||||
mut r := get_action_descr(action)!
|
||||
mut product := m.products[r.name] or { return error('Product "${r.name}" not found for revenue item action') }
|
||||
mut product := m.products[r.name]
|
||||
|
||||
mut nr_sold := m.sheet.row_new(
|
||||
name: '${r.name}_nr_sold'
|
||||
@@ -193,7 +193,7 @@ fn (mut m BizModel) revenue_item_action(action Action) !Action {
|
||||
tags: 'name:${r.name}'
|
||||
)!
|
||||
|
||||
_ := margin_setup.action(
|
||||
mut margin := margin_setup.action(
|
||||
name: '${r.name}_margin'
|
||||
descr: 'Margin for ${r.name}'
|
||||
action: .add
|
||||
|
||||
@@ -6,19 +6,19 @@ import incubaid.herolib.core.playbook
|
||||
fn (mut sim BizModel) revenue_total() ! {
|
||||
mut sheet := sim.sheet
|
||||
|
||||
_ := sheet.group2row(
|
||||
mut revenue_total := sheet.group2row(
|
||||
name: 'revenue_total'
|
||||
include: ['rev']
|
||||
tags: 'total revtotal pl'
|
||||
descr: 'Revenue Total'
|
||||
)!
|
||||
_ := sheet.group2row(
|
||||
mut cogs_total := sheet.group2row(
|
||||
name: 'cogs_total'
|
||||
include: ['cogs']
|
||||
tags: 'total cogstotal pl'
|
||||
descr: 'Cost of Goods Total.'
|
||||
)!
|
||||
_ := sheet.group2row(
|
||||
mut margin_total := sheet.group2row(
|
||||
name: 'margin_total'
|
||||
include: ['margin']
|
||||
tags: 'total margintotal'
|
||||
|
||||
@@ -7,7 +7,7 @@ import incubaid.herolib.core.pathlib
|
||||
pub struct ExportCSVArgs {
|
||||
pub mut:
|
||||
path string
|
||||
include_empty bool // whether to include empty cells or not
|
||||
include_empty bool = false // whether to include empty cells or not
|
||||
separator string = '|' // separator character for CSV
|
||||
}
|
||||
|
||||
|
||||
@@ -118,23 +118,23 @@ pub fn (s Sheet) data_get_as_string(args RowGetArgs) !string {
|
||||
}
|
||||
nryears := 5
|
||||
err_pre := "Can't get data for sheet:${s.name} row:${args.rowname}.\n"
|
||||
mut s2 := s
|
||||
mut s2 := s
|
||||
|
||||
if args.period_type == .year {
|
||||
s2 = *s.toyear(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
s2 = s.toyear(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
if args.period_type == .quarter {
|
||||
s2 = *s.toquarter(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
s2 = s.toquarter(
|
||||
name: args.rowname
|
||||
namefilter: args.namefilter
|
||||
includefilter: args.includefilter
|
||||
excludefilter: args.excludefilter
|
||||
)!
|
||||
}
|
||||
mut out := ''
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ fn pad_right(s string, length int) string {
|
||||
pub struct PPrintArgs {
|
||||
pub mut:
|
||||
group_months int = 1 // e.g. if 2 then will group by 2 months
|
||||
nr_columns int // number of columns to show in the table, 0 is all
|
||||
nr_columns int = 0 // number of columns to show in the table, 0 is all
|
||||
description bool // show description in the table
|
||||
aggrtype bool = true // show aggregate type in the table
|
||||
tags bool = true // show tags in the table
|
||||
@@ -151,7 +151,7 @@ pub fn (mut s Sheet) pprint(args PPrintArgs) ! {
|
||||
}
|
||||
max_cols := data_start_index + args.nr_columns
|
||||
mut new_all_rows := [][]string{}
|
||||
for _, row in all_rows {
|
||||
for i, row in all_rows {
|
||||
if row.len > max_cols {
|
||||
new_all_rows << row[0..max_cols]
|
||||
} else {
|
||||
|
||||
@@ -67,9 +67,7 @@ pub fn (mut node Node) hero_install(args HeroInstallArgs) ! {
|
||||
todo << 'bash /tmp/install_v.sh --herolib '
|
||||
}
|
||||
}
|
||||
// Use exec instead of exec_interactive since user interaction is not needed
|
||||
// exec_interactive uses shell mode which replaces the process and never returns
|
||||
node.exec(cmd: todo.join('\n'), stdout: true)!
|
||||
node.exec_interactive(todo.join('\n'))!
|
||||
}
|
||||
|
||||
@[params]
|
||||
|
||||
@@ -99,11 +99,8 @@ pub fn (mut executor ExecutorLocal) download(args SyncArgs) ! {
|
||||
}
|
||||
|
||||
pub fn (mut executor ExecutorLocal) shell(cmd string) ! {
|
||||
// Note: os.execvp replaces the current process and never returns.
|
||||
// This is intentional - shell() is designed to hand over control to the shell.
|
||||
// Do not put shell() before any other code that needs to execute.
|
||||
if cmd.len > 0 {
|
||||
os.execvp('/bin/bash', ['-c', cmd])!
|
||||
os.execvp('/bin/bash', ["-c '${cmd}'"])!
|
||||
} else {
|
||||
os.execvp('/bin/bash', [])!
|
||||
}
|
||||
|
||||
@@ -235,12 +235,11 @@ pub fn (mut executor ExecutorSSH) info() map[string]string {
|
||||
// forwarding ssh traffic to certain container
|
||||
|
||||
pub fn (mut executor ExecutorSSH) shell(cmd string) ! {
|
||||
mut args := ['-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null',
|
||||
'${executor.user}@${executor.ipaddr.addr}', '-p', '${executor.ipaddr.port}']
|
||||
if cmd.len > 0 {
|
||||
args << cmd
|
||||
panic('TODO IMPLEMENT SHELL EXEC OVER SSH')
|
||||
}
|
||||
os.execvp('ssh', args)!
|
||||
os.execvp('ssh', ['-o StrictHostKeyChecking=no', '${executor.user}@${executor.ipaddr.addr}',
|
||||
'-p ${executor.ipaddr.port}'])!
|
||||
}
|
||||
|
||||
pub fn (mut executor ExecutorSSH) list(path string) ![]string {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module builder
|
||||
|
||||
// pub fn (mut node Node) ubuntu_sources_fix() {
|
||||
// }
|
||||
pub fn (mut node Node) ubuntu_sources_fix() {
|
||||
}
|
||||
|
||||
@@ -228,7 +228,7 @@ pub fn (mut client MeilisearchClient) similar_documents(uid string, args Similar
|
||||
method: .post
|
||||
data: json.encode(args)
|
||||
}
|
||||
client.enable_eperimental_feature(vector_store: true)! // Enable the feature first.
|
||||
res := client.enable_eperimental_feature(vector_store: true)! // Enable the feature first.
|
||||
mut http := client.httpclient()!
|
||||
rsponse := http.post_json_str(req)!
|
||||
println('rsponse: ${rsponse}')
|
||||
|
||||
@@ -19,7 +19,7 @@ pub mut:
|
||||
user string = 'root'
|
||||
port int = 5432
|
||||
host string = 'localhost'
|
||||
password string
|
||||
password string = ''
|
||||
dbname string = 'postgres'
|
||||
}
|
||||
|
||||
@@ -52,7 +52,8 @@ pub fn heroscript_dumps(obj PostgresqlClient) !string {
|
||||
}
|
||||
|
||||
pub fn heroscript_loads(heroscript string) !PostgresqlClient {
|
||||
mut client := encoderhero.decode[PostgresqlClient](heroscript)!
|
||||
client.db_ = pg.DB{}
|
||||
return client
|
||||
mut obj := encoderhero.decode[PostgresqlClient](heroscript)!
|
||||
return PostgresqlClient{
|
||||
db_: pg.DB{}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,5 +114,5 @@ fn (q QueryBuilder) build_query(args BuildQueryArgs) string {
|
||||
|
||||
fn type_to_map[T](t T) !map[string]json2.Any {
|
||||
encoded_input := json2.encode(t)
|
||||
return json2.decode[json2.Any](encoded_input)!.as_map()
|
||||
return json2.raw_decode(encoded_input)!.as_map()
|
||||
}
|
||||
|
||||
@@ -11,13 +11,8 @@ pub fn parse_const(code_ string) !Const {
|
||||
if !code.contains('=') {
|
||||
return error('code <${code_}> is not of const')
|
||||
}
|
||||
mut name := code.split('=')[0].trim_space()
|
||||
// Strip 'const ' prefix if present
|
||||
if name.starts_with('const ') {
|
||||
name = name.trim_string_left('const ').trim_space()
|
||||
}
|
||||
return Const{
|
||||
name: name
|
||||
name: code.split('=')[0].trim_space()
|
||||
value: code.split('=')[1].trim_space()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,11 +44,6 @@ pub fn (p Param) typescript() string {
|
||||
pub fn parse_param(code_ string) !Param {
|
||||
mut code := code_.trim_space()
|
||||
|
||||
// Handle empty string (void return type)
|
||||
if code == '' {
|
||||
return Param{}
|
||||
}
|
||||
|
||||
if code == '!' {
|
||||
return Param{
|
||||
is_result: true
|
||||
@@ -65,13 +60,6 @@ pub fn parse_param(code_ string) !Param {
|
||||
}
|
||||
split := code.split(' ').filter(it != '')
|
||||
|
||||
// Handle empty split (void return type after mut check)
|
||||
if split.len == 0 {
|
||||
return Param{
|
||||
mutable: is_mut
|
||||
}
|
||||
}
|
||||
|
||||
if split.len == 1 {
|
||||
// means anonymous param
|
||||
return Param{
|
||||
|
||||
@@ -15,14 +15,14 @@ fn test_comprehensive_code_parsing() {
|
||||
console.print_lf(1)
|
||||
|
||||
// Run all tests
|
||||
check_module_parsing()!
|
||||
check_struct_parsing()
|
||||
check_function_parsing()!
|
||||
check_imports_and_modules()
|
||||
check_type_system()
|
||||
check_visibility_modifiers()
|
||||
check_method_parsing()!
|
||||
check_constants_parsing()
|
||||
test_module_parsing()
|
||||
test_struct_parsing()
|
||||
test_function_parsing()
|
||||
test_imports_and_modules()
|
||||
test_type_system()
|
||||
test_visibility_modifiers()
|
||||
test_method_parsing()
|
||||
test_constants_parsing()
|
||||
|
||||
console.print_green('✓ All comprehensive tests passed!')
|
||||
console.print_lf(1)
|
||||
@@ -74,7 +74,7 @@ fn copy_directory(src string, dst string) ! {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_module_parsing() ! {
|
||||
fn test_module_parsing() {
|
||||
console.print_header('Test 1: Module and File Parsing')
|
||||
|
||||
mut myparser := new(path: '/tmp/codeparsertest', recursive: true)!
|
||||
@@ -98,7 +98,7 @@ fn check_module_parsing() ! {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_struct_parsing() {
|
||||
fn test_struct_parsing() {
|
||||
console.print_header('Test 2: Struct Parsing')
|
||||
|
||||
models_file := os.join_path('/tmp/codeparsertest', 'models.v')
|
||||
@@ -145,7 +145,7 @@ fn check_struct_parsing() {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_function_parsing() ! {
|
||||
fn test_function_parsing() {
|
||||
console.print_header('Test 3: Function Parsing')
|
||||
|
||||
mut myparser := new(path: '/tmp/codeparsertest', recursive: true)!
|
||||
@@ -191,7 +191,7 @@ fn check_function_parsing() ! {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_imports_and_modules() {
|
||||
fn test_imports_and_modules() {
|
||||
console.print_header('Test 4: Imports and Module Names')
|
||||
|
||||
models_file := os.join_path('/tmp/codeparsertest', 'models.v')
|
||||
@@ -222,7 +222,7 @@ fn check_imports_and_modules() {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_type_system() {
|
||||
fn test_type_system() {
|
||||
console.print_header('Test 5: Type System')
|
||||
|
||||
models_file := os.join_path('/tmp/codeparsertest', 'models.v')
|
||||
@@ -257,7 +257,7 @@ fn check_type_system() {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_visibility_modifiers() {
|
||||
fn test_visibility_modifiers() {
|
||||
console.print_header('Test 6: Visibility Modifiers')
|
||||
|
||||
models_file := os.join_path('/tmp/codeparsertest', 'models.v')
|
||||
@@ -293,7 +293,7 @@ fn check_visibility_modifiers() {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_method_parsing() ! {
|
||||
fn test_method_parsing() {
|
||||
console.print_header('Test 7: Method Parsing')
|
||||
|
||||
mut myparser := new(path: '/tmp/codeparsertest', recursive: true)!
|
||||
@@ -327,7 +327,7 @@ fn check_method_parsing() ! {
|
||||
console.print_lf(1)
|
||||
}
|
||||
|
||||
fn check_constants_parsing() {
|
||||
fn test_constants_parsing() {
|
||||
console.print_header('Test 8: Constants Parsing')
|
||||
|
||||
models_file := os.join_path('/tmp/codeparsertest', 'models.v')
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
module herocmds
|
||||
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.playcmds
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.web.docusaurus
|
||||
import os
|
||||
import cli { Command, Flag }
|
||||
|
||||
pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||
pub fn cmd_atlas(mut cmdroot Command) Command {
|
||||
mut cmd_run := Command{
|
||||
name: 'doctree'
|
||||
description: 'Scan and export doctree collections.'
|
||||
name: 'atlas'
|
||||
description: 'Scan and export atlas collections.'
|
||||
required_args: 0
|
||||
execute: cmd_doctree_execute
|
||||
execute: cmd_atlas_execute
|
||||
}
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
@@ -29,7 +29,7 @@ pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||
required: false
|
||||
name: 'url'
|
||||
abbrev: 'u'
|
||||
description: 'Git URL where doctree source is.'
|
||||
description: 'Git URL where atlas source is.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
@@ -37,7 +37,7 @@ pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||
required: false
|
||||
name: 'path'
|
||||
abbrev: 'p'
|
||||
description: 'Path where doctree collections are located.'
|
||||
description: 'Path where atlas collections are located.'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
@@ -45,7 +45,7 @@ pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||
required: false
|
||||
name: 'name'
|
||||
abbrev: 'n'
|
||||
description: 'DocTree instance name (default: "default").'
|
||||
description: 'Atlas instance name (default: "default").'
|
||||
})
|
||||
|
||||
cmd_run.add_flag(Flag{
|
||||
@@ -112,7 +112,7 @@ pub fn cmd_doctree(mut cmdroot Command) Command {
|
||||
return cmdroot
|
||||
}
|
||||
|
||||
fn cmd_doctree_execute(cmd Command) ! {
|
||||
fn cmd_atlas_execute(cmd Command) ! {
|
||||
// ---------- FLAGS ----------
|
||||
mut reset := cmd.flags.get_bool('reset') or { false }
|
||||
mut update := cmd.flags.get_bool('update') or { false }
|
||||
@@ -138,27 +138,27 @@ fn cmd_doctree_execute(cmd Command) ! {
|
||||
path = os.getwd()
|
||||
}
|
||||
|
||||
doctree_path := gittools.path(
|
||||
atlas_path := gittools.path(
|
||||
git_url: url
|
||||
path: path
|
||||
git_reset: reset
|
||||
git_pull: update
|
||||
)!
|
||||
|
||||
console.print_header('Running DocTree for: ${doctree_path.path}')
|
||||
console.print_header('Running Atlas for: ${atlas_path.path}')
|
||||
|
||||
// Run HeroScript if exists
|
||||
playcmds.run(
|
||||
heroscript_path: doctree_path.path
|
||||
heroscript_path: atlas_path.path
|
||||
reset: reset
|
||||
emptycheck: false
|
||||
)!
|
||||
|
||||
// Create or get doctree instance
|
||||
mut a := if doctree.exists(name) {
|
||||
doctree.get(name)!
|
||||
// Create or get atlas instance
|
||||
mut a := if atlas.exists(name) {
|
||||
atlas.get(name)!
|
||||
} else {
|
||||
doctree.new(name: name)!
|
||||
atlas.new(name: name)!
|
||||
}
|
||||
|
||||
// Default behavior: scan and export if no flags specified
|
||||
@@ -170,13 +170,13 @@ fn cmd_doctree_execute(cmd Command) ! {
|
||||
// Execute operations
|
||||
if scan {
|
||||
console.print_header('Scanning collections...')
|
||||
a.scan(path: doctree_path.path)!
|
||||
a.scan(path: atlas_path.path)!
|
||||
console.print_green('✓ Scan complete: ${a.collections.len} collection(s) found')
|
||||
}
|
||||
|
||||
if export {
|
||||
if destination == '' {
|
||||
destination = '${doctree_path.path}/output'
|
||||
destination = '${atlas_path.path}/output'
|
||||
}
|
||||
|
||||
console.print_header('Exporting collections to: ${destination}')
|
||||
@@ -203,14 +203,14 @@ fn cmd_doctree_execute(cmd Command) ! {
|
||||
// Run dev server if -dev flag is set
|
||||
if dev {
|
||||
console.print_header('Starting development server...')
|
||||
console.print_item('DocTree export directory: ${destination}')
|
||||
console.print_item('Looking for docusaurus configuration in: ${doctree_path.path}')
|
||||
console.print_item('Atlas export directory: ${destination}')
|
||||
console.print_item('Looking for docusaurus configuration in: ${atlas_path.path}')
|
||||
|
||||
// Run the docusaurus dev server using the exported doctree content
|
||||
// This will look for a .heroscript file in the doctree_path that configures docusaurus
|
||||
// with use_doctree:true and doctree_export_dir pointing to the destination
|
||||
// Run the docusaurus dev server using the exported atlas content
|
||||
// This will look for a .heroscript file in the atlas_path that configures docusaurus
|
||||
// with use_atlas:true and atlas_export_dir pointing to the destination
|
||||
playcmds.run(
|
||||
heroscript_path: doctree_path.path
|
||||
heroscript_path: atlas_path.path
|
||||
reset: reset
|
||||
)!
|
||||
|
||||
|
||||
@@ -114,7 +114,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
|
||||
// ---------- FLAGS ----------
|
||||
mut open_ := cmd.flags.get_bool('open') or { false }
|
||||
mut buildpublish := cmd.flags.get_bool('buildpublish') or { false }
|
||||
_ := cmd.flags.get_bool('builddevpublish') or { false }
|
||||
mut builddevpublish := cmd.flags.get_bool('builddevpublish') or { false }
|
||||
mut dev := cmd.flags.get_bool('dev') or { false }
|
||||
mut reset := cmd.flags.get_bool('reset') or { false }
|
||||
mut update := cmd.flags.get_bool('update') or { false }
|
||||
|
||||
@@ -217,7 +217,7 @@ fn cmd_git_execute(cmd Command) ! {
|
||||
mut gs := gittools.new(coderoot: coderoot)!
|
||||
|
||||
// create the filter for doing group actions, or action on 1 repo
|
||||
_ := ''
|
||||
mut filter := ''
|
||||
mut url := ''
|
||||
mut path := ''
|
||||
|
||||
|
||||
@@ -164,7 +164,7 @@ pub fn plbook_run(cmd Command) !(&playbook.PlayBook, string) {
|
||||
playbook.new(path: path)!
|
||||
}
|
||||
|
||||
_ := cmd.flags.get_bool('dagu') or { false }
|
||||
dagu := cmd.flags.get_bool('dagu') or { false }
|
||||
|
||||
playcmds.run(plbook: plbook)!
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
module playcmds
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.data.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.biz.bizmodel
|
||||
import incubaid.herolib.threefold.incatokens
|
||||
import incubaid.herolib.web.site
|
||||
@@ -20,7 +20,6 @@ import incubaid.herolib.installers.horus.herorunner
|
||||
import incubaid.herolib.installers.horus.osirisrunner
|
||||
import incubaid.herolib.installers.horus.salrunner
|
||||
import incubaid.herolib.installers.virt.podman
|
||||
import incubaid.herolib.installers.virt.kubernetes_installer
|
||||
import incubaid.herolib.installers.infra.gitea
|
||||
import incubaid.herolib.builder
|
||||
|
||||
@@ -71,7 +70,7 @@ pub fn run(args_ PlayArgs) ! {
|
||||
site.play(mut plbook)!
|
||||
|
||||
incatokens.play(mut plbook)!
|
||||
doctree.play(mut plbook)!
|
||||
atlas.play(mut plbook)!
|
||||
docusaurus.play(mut plbook)!
|
||||
hetznermanager.play(mut plbook)!
|
||||
hetznermanager.play2(mut plbook)!
|
||||
@@ -81,7 +80,6 @@ pub fn run(args_ PlayArgs) ! {
|
||||
herolib.play(mut plbook)!
|
||||
vlang.play(mut plbook)!
|
||||
podman.play(mut plbook)!
|
||||
kubernetes_installer.play(mut plbook)!
|
||||
gitea.play(mut plbook)!
|
||||
|
||||
giteaclient.play(mut plbook)!
|
||||
|
||||
@@ -11,7 +11,7 @@ pub fn play_ssh(mut plbook PlayBook) ! {
|
||||
}
|
||||
|
||||
// Get or create a single SSH agent instance
|
||||
_ := sshagent.new_single(sshagent.SSHAgentNewArgs{})!
|
||||
mut agent := sshagent.new_single(sshagent.SSHAgentNewArgs{})!
|
||||
|
||||
// TO IMPLEMENT:
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ pub fn escape_regex_chars(s string) string {
|
||||
// This function does not add implicit ^ and $ anchors, allowing for substring matches.
|
||||
fn wildcard_to_regex(wildcard_pattern string) string {
|
||||
mut regex_pattern := ''
|
||||
for _, r in wildcard_pattern.runes() {
|
||||
for i, r in wildcard_pattern.runes() {
|
||||
match r {
|
||||
`*` {
|
||||
regex_pattern += '.*'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.paramsparser
|
||||
|
||||
@[heap]
|
||||
pub struct DocTree {
|
||||
pub struct Atlas {
|
||||
pub mut:
|
||||
name string
|
||||
collections map[string]&Collection
|
||||
@@ -14,7 +14,7 @@ pub mut:
|
||||
}
|
||||
|
||||
// Create a new collection
|
||||
fn (mut self DocTree) add_collection(mut path pathlib.Path) !Collection {
|
||||
fn (mut self Atlas) add_collection(mut path pathlib.Path) !Collection {
|
||||
mut name := path.name_fix_no_ext()
|
||||
mut filepath := path.file_get('.collection')!
|
||||
content := filepath.read()!
|
||||
@@ -24,17 +24,18 @@ fn (mut self DocTree) add_collection(mut path pathlib.Path) !Collection {
|
||||
name = params.get('name')!
|
||||
}
|
||||
}
|
||||
name = doctree.name_fix(name)
|
||||
console.print_item("Adding collection '${name}' to DocTree '${self.name}' at path '${path.path}'")
|
||||
|
||||
name = texttools.name_fix(name)
|
||||
console.print_item("Adding collection '${name}' to Atlas '${self.name}' at path '${path.path}'")
|
||||
|
||||
if name in self.collections {
|
||||
return error('Collection ${name} already exists in DocTree ${self.name}')
|
||||
return error('Collection ${name} already exists in Atlas ${self.name}')
|
||||
}
|
||||
|
||||
mut c := Collection{
|
||||
name: name
|
||||
path: path.path // absolute path
|
||||
doctree: &self // Set doctree reference
|
||||
atlas: &self // Set atlas reference
|
||||
error_cache: map[string]bool{}
|
||||
}
|
||||
|
||||
@@ -46,24 +47,38 @@ fn (mut self DocTree) add_collection(mut path pathlib.Path) !Collection {
|
||||
}
|
||||
|
||||
// Get a collection by name
|
||||
pub fn (a DocTree) get_collection(name string) !&Collection {
|
||||
pub fn (a Atlas) get_collection(name string) !&Collection {
|
||||
return a.collections[name] or {
|
||||
return CollectionNotFound{
|
||||
name: name
|
||||
msg: 'Collection not found in DocTree ${a.name}'
|
||||
msg: 'Collection not found in Atlas ${a.name}'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate all links in all collections
|
||||
pub fn (mut a DocTree) init_post() ! {
|
||||
pub fn (mut a Atlas) init_post() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.init_post()!
|
||||
}
|
||||
}
|
||||
|
||||
// Add a group to the doctree
|
||||
pub fn (mut a DocTree) group_add(mut group Group) ! {
|
||||
// Validate all links in all collections
|
||||
pub fn (mut a Atlas) validate_links() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.validate_links()!
|
||||
}
|
||||
}
|
||||
|
||||
// Fix all links in all collections (rewrite source files)
|
||||
pub fn (mut a Atlas) fix_links() ! {
|
||||
for _, mut col in a.collections {
|
||||
col.fix_links()!
|
||||
}
|
||||
}
|
||||
|
||||
// Add a group to the atlas
|
||||
pub fn (mut a Atlas) group_add(mut group Group) ! {
|
||||
if group.name in a.groups {
|
||||
return error('Group ${group.name} already exists')
|
||||
}
|
||||
@@ -71,13 +86,13 @@ pub fn (mut a DocTree) group_add(mut group Group) ! {
|
||||
}
|
||||
|
||||
// Get a group by name
|
||||
pub fn (a DocTree) group_get(name string) !&Group {
|
||||
name_lower := doctree.name_fix(name)
|
||||
pub fn (a Atlas) group_get(name string) !&Group {
|
||||
name_lower := texttools.name_fix(name)
|
||||
return a.groups[name_lower] or { return error('Group ${name} not found') }
|
||||
}
|
||||
|
||||
// Get all groups matching a session's email
|
||||
pub fn (a DocTree) groups_get(session Session) []&Group {
|
||||
pub fn (a Atlas) groups_get(session Session) []&Group {
|
||||
mut matching := []&Group{}
|
||||
|
||||
email_lower := session.email.to_lower()
|
||||
@@ -102,7 +117,7 @@ pub mut:
|
||||
ignore []string // list of directory names to ignore
|
||||
}
|
||||
|
||||
pub fn (mut a DocTree) scan(args ScanArgs) ! {
|
||||
pub fn (mut a Atlas) scan(args ScanArgs) ! {
|
||||
mut path := pathlib.get_dir(path: args.path)!
|
||||
mut ignore := args.ignore.clone()
|
||||
ignore = ignore.map(it.to_lower())
|
||||
@@ -110,7 +125,7 @@ pub fn (mut a DocTree) scan(args ScanArgs) ! {
|
||||
}
|
||||
|
||||
// Scan a directory for collections
|
||||
fn (mut a DocTree) scan_(mut dir pathlib.Path, ignore_ []string) ! {
|
||||
fn (mut a Atlas) scan_(mut dir pathlib.Path, ignore_ []string) ! {
|
||||
console.print_item('Scanning directory: ${dir.path}')
|
||||
if !dir.is_dir() {
|
||||
return error('Path is not a directory: ${dir.path}')
|
||||
207
lib/data/atlas/atlas_save_test.v
Normal file
207
lib/data/atlas/atlas_save_test.v
Normal file
@@ -0,0 +1,207 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
const test_dir = '/tmp/atlas_save_test'
|
||||
|
||||
fn testsuite_begin() {
|
||||
os.rmdir_all(test_dir) or {}
|
||||
os.mkdir_all(test_dir)!
|
||||
}
|
||||
|
||||
fn testsuite_end() {
|
||||
os.rmdir_all(test_dir) or {}
|
||||
}
|
||||
|
||||
fn test_save_and_load_basic() {
|
||||
// Create a collection with some content
|
||||
col_path := '${test_dir}/docs'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/intro.md', create: true)!
|
||||
page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\nMore content here.')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: test_dir)!
|
||||
|
||||
assert a.collections.len == 1
|
||||
|
||||
// Save all collections
|
||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
// assert os.exists('${col_path}/.collection.json')
|
||||
|
||||
// // Load in a new atlas
|
||||
// mut a2 := new(name: 'loaded_docs')!
|
||||
// a2.load_from_directory(test_dir)!
|
||||
|
||||
// assert a2.collections.len == 1
|
||||
|
||||
// // Access loaded data
|
||||
// loaded_col := a2.get_collection('docs')!
|
||||
// assert loaded_col.name == 'docs'
|
||||
// assert loaded_col.pages.len == 2
|
||||
|
||||
// // Verify pages exist
|
||||
// assert loaded_col.page_exists('intro')
|
||||
// assert loaded_col.page_exists('guide')
|
||||
|
||||
// // Read page content
|
||||
// mut intro_page := loaded_col.page_get('intro')!
|
||||
// content := intro_page.read_content()!
|
||||
// assert content.contains('# Introduction')
|
||||
// assert content.contains('Welcome to the docs!')
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_includes() {
|
||||
col_path := '${test_dir}/docs_include'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/intro.md', create: true)!
|
||||
page1.write('# Introduction\n\nWelcome to the docs!')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col_path}/guide.md', create: true)!
|
||||
page2.write('# Guide\n\n!!include docs:intro\n\nMore content here.')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_include')!
|
||||
|
||||
// Validate links (should find the include)
|
||||
a.validate_links()!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
assert !col.has_errors()
|
||||
|
||||
// // Save
|
||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// // Load
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// a2.load_from_directory('${test_dir}/docs_include')!
|
||||
|
||||
// loaded_col := a2.get_collection('docs')!
|
||||
// assert loaded_col.pages.len == 2
|
||||
// assert !loaded_col.has_errors()
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_errors() {
|
||||
col_path := '${test_dir}/docs_errors'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
// Create page with broken link
|
||||
mut page1 := pathlib.get_file(path: '${col_path}/broken.md', create: true)!
|
||||
page1.write('[Broken link](nonexistent)')!
|
||||
|
||||
// Create and scan atlas
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_errors')!
|
||||
|
||||
// Validate - will generate errors
|
||||
a.validate_links()!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
assert col.has_errors()
|
||||
initial_error_count := col.errors.len
|
||||
|
||||
// // Save with errors
|
||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// // Load
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// a2.load_from_directory('${test_dir}/docs_errors')!
|
||||
|
||||
// loaded_col := a2.get_collection('docs')!
|
||||
// assert loaded_col.has_errors()
|
||||
// assert loaded_col.errors.len == initial_error_count
|
||||
// assert loaded_col.error_cache.len == initial_error_count
|
||||
}
|
||||
|
||||
fn test_save_and_load_multiple_collections() {
|
||||
// Create multiple collections
|
||||
col1_path := '${test_dir}/multi/col1'
|
||||
col2_path := '${test_dir}/multi/col2'
|
||||
|
||||
os.mkdir_all(col1_path)!
|
||||
os.mkdir_all(col2_path)!
|
||||
|
||||
mut cfile1 := pathlib.get_file(path: '${col1_path}/.collection', create: true)!
|
||||
cfile1.write('name:col1')!
|
||||
|
||||
mut cfile2 := pathlib.get_file(path: '${col2_path}/.collection', create: true)!
|
||||
cfile2.write('name:col2')!
|
||||
|
||||
mut page1 := pathlib.get_file(path: '${col1_path}/page1.md', create: true)!
|
||||
page1.write('# Page 1')!
|
||||
|
||||
mut page2 := pathlib.get_file(path: '${col2_path}/page2.md', create: true)!
|
||||
page2.write('# Page 2')!
|
||||
|
||||
// Create and save
|
||||
mut a := new(name: 'multi')!
|
||||
a.scan(path: '${test_dir}/multi')!
|
||||
|
||||
assert a.collections.len == 2
|
||||
|
||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// // Load from directory
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// a2.load_from_directory('${test_dir}/multi')!
|
||||
|
||||
// assert a2.collections.len == 2
|
||||
// assert a2.get_collection('col1')!.page_exists('page1')
|
||||
// assert a2.get_collection('col2')!.page_exists('page2')
|
||||
}
|
||||
|
||||
fn test_save_and_load_with_images() {
|
||||
col_path := '${test_dir}/docs_images'
|
||||
os.mkdir_all(col_path)!
|
||||
os.mkdir_all('${col_path}/img')!
|
||||
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:docs')!
|
||||
|
||||
mut page := pathlib.get_file(path: '${col_path}/page.md', create: true)!
|
||||
page.write('# Page with image')!
|
||||
|
||||
// Create a dummy image file
|
||||
mut img := pathlib.get_file(path: '${col_path}/img/test.png', create: true)!
|
||||
img.write('fake png data')!
|
||||
|
||||
// Create and scan
|
||||
mut a := new(name: 'my_docs')!
|
||||
a.scan(path: '${test_dir}/docs_images')!
|
||||
|
||||
col := a.get_collection('docs')!
|
||||
// assert col.images.len == 1
|
||||
assert col.image_exists('test.png')!
|
||||
|
||||
// // Save
|
||||
// a.save(destination_meta: '/tmp/atlas_meta')!
|
||||
|
||||
// // Load
|
||||
// mut a2 := new(name: 'loaded')!
|
||||
// a2.load_from_directory('${test_dir}/docs_images')!
|
||||
|
||||
// loaded_col := a2.get_collection('docs')!
|
||||
// assert loaded_col.images.len == 1
|
||||
// assert loaded_col.image_exists('test.png')!
|
||||
|
||||
img_file := col.image_get('test.png')!
|
||||
assert img_file.name == 'test.png'
|
||||
assert img_file.is_image()
|
||||
}
|
||||
@@ -1,34 +1,26 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
import json
|
||||
|
||||
const test_base = '/tmp/doctree_test'
|
||||
const test_base = '/tmp/atlas_test'
|
||||
|
||||
// Clean up before and after each test
|
||||
fn setup_test() {
|
||||
fn testsuite_begin() {
|
||||
os.rmdir_all(test_base) or {}
|
||||
os.mkdir_all(test_base) or {}
|
||||
os.mkdir_all(test_base)!
|
||||
}
|
||||
|
||||
fn cleanup_test() {
|
||||
fn testsuite_end() {
|
||||
os.rmdir_all(test_base) or {}
|
||||
}
|
||||
|
||||
fn test_create_doctree() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
mut a := new(name: 'test_doctree')!
|
||||
assert a.name == 'test_doctree'
|
||||
fn test_create_atlas() {
|
||||
mut a := new(name: 'test_atlas')!
|
||||
assert a.name == 'test_atlas'
|
||||
assert a.collections.len == 0
|
||||
}
|
||||
|
||||
fn test_add_collection() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Create test collection
|
||||
col_path := '${test_base}/col1'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -46,9 +38,6 @@ fn test_add_collection() {
|
||||
}
|
||||
|
||||
fn test_scan() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Create test structure
|
||||
os.mkdir_all('${test_base}/docs/guides')!
|
||||
mut cfile := pathlib.get_file(path: '${test_base}/docs/guides/.collection', create: true)!
|
||||
@@ -66,9 +55,6 @@ fn test_scan() {
|
||||
}
|
||||
|
||||
fn test_export() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup
|
||||
col_path := '${test_base}/source/col1'
|
||||
export_path := '${test_base}/export'
|
||||
@@ -90,9 +76,6 @@ fn test_export() {
|
||||
}
|
||||
|
||||
fn test_export_with_includes() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup: Create pages with includes
|
||||
col_path := '${test_base}/include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -112,7 +95,7 @@ fn test_export_with_includes() {
|
||||
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
|
||||
export_path := '${test_base}/export_include'
|
||||
a.export(destination: export_path, include: true, redis: false)!
|
||||
a.export(destination: export_path, include: true)!
|
||||
|
||||
// Verify exported page1 has page2 content included
|
||||
exported := os.read_file('${export_path}/content/test_col/page1.md')!
|
||||
@@ -122,9 +105,6 @@ fn test_export_with_includes() {
|
||||
}
|
||||
|
||||
fn test_export_without_includes() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
col_path := '${test_base}/no_include_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
@@ -138,7 +118,7 @@ fn test_export_without_includes() {
|
||||
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
|
||||
export_path := '${test_base}/export_no_include'
|
||||
a.export(destination: export_path, include: false, redis: false)!
|
||||
a.export(destination: export_path, include: false)!
|
||||
|
||||
// Verify exported page1 still has include action
|
||||
exported := os.read_file('${export_path}/content/test_col2/page1.md')!
|
||||
@@ -146,28 +126,18 @@ fn test_export_without_includes() {
|
||||
}
|
||||
|
||||
fn test_error_deduplication() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
mut a := new(name: 'test')!
|
||||
col_path := '${test_base}/err_dedup_col'
|
||||
os.mkdir_all(col_path)!
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:err_dedup_col')!
|
||||
mut col := a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
assert col.name == 'err_dedup_col' // Ensure collection is added correctly
|
||||
}
|
||||
|
||||
fn test_error_hash() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
// This test had no content, leaving it as a placeholder.
|
||||
}
|
||||
|
||||
fn test_find_links() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
col_path := '${test_base}/find_links_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
@@ -187,11 +157,7 @@ fn test_find_links() {
|
||||
assert links.len >= 2
|
||||
}
|
||||
|
||||
// Test with a valid link to ensure no errors are reported
|
||||
fn test_find_links_valid_link() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
fn test_validate_links() {
|
||||
// Setup
|
||||
col_path := '${test_base}/link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -210,17 +176,15 @@ fn test_find_links_valid_link() {
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
|
||||
// Validate
|
||||
a.validate_links()!
|
||||
|
||||
// Should have no errors
|
||||
col := a.get_collection('test_col')!
|
||||
assert col.errors.len == 0
|
||||
|
||||
a.export(destination: '${test_base}/export_links', redis: false)!
|
||||
}
|
||||
|
||||
fn test_validate_broken_links() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup
|
||||
col_path := '${test_base}/broken_link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -236,17 +200,13 @@ fn test_validate_broken_links() {
|
||||
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
|
||||
// Validate
|
||||
a.export(destination: '${test_base}/validate_broken_links', redis: false)!
|
||||
a.validate_links()!
|
||||
|
||||
// Should have error
|
||||
col := a.get_collection('test_col')!
|
||||
assert col.errors.len > 0
|
||||
}
|
||||
|
||||
fn test_fix_links() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup - all pages in same directory for simpler test
|
||||
col_path := '${test_base}/fix_link_test'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -269,22 +229,20 @@ fn test_fix_links() {
|
||||
mut p := col.page_get('page1')!
|
||||
|
||||
original := p.content()!
|
||||
assert original.contains('[Link](page2)')
|
||||
println('Original: ${original}')
|
||||
|
||||
fixed := p.content_with_fixed_links(FixLinksArgs{
|
||||
include: true
|
||||
cross_collection: true
|
||||
export_mode: false
|
||||
})!
|
||||
println('Fixed: ${fixed}')
|
||||
|
||||
// The fix_links should work on content
|
||||
assert fixed.contains('[Link](page2.md)')
|
||||
}
|
||||
|
||||
fn test_link_formats() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
col_path := '${test_base}/link_format_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
@@ -310,9 +268,6 @@ fn test_link_formats() {
|
||||
}
|
||||
|
||||
fn test_cross_collection_links() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup two collections
|
||||
col1_path := '${test_base}/col1_cross'
|
||||
col2_path := '${test_base}/col2_cross'
|
||||
@@ -338,19 +293,20 @@ fn test_cross_collection_links() {
|
||||
a.add_collection(mut pathlib.get_dir(path: col1_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col2_path)!)!
|
||||
|
||||
// Validate - should pass
|
||||
a.validate_links()!
|
||||
|
||||
col1 := a.get_collection('col1')!
|
||||
assert col1.errors.len == 0
|
||||
|
||||
a.export(destination: '${test_base}/export_cross', redis: false)!
|
||||
// Fix links - cross-collection links should NOT be rewritten
|
||||
a.fix_links()!
|
||||
|
||||
fixed := page1.read()!
|
||||
assert fixed.contains('[Link to col2](col2:page2)') // Unchanged
|
||||
}
|
||||
|
||||
fn test_save_and_load() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup
|
||||
col_path := '${test_base}/save_test'
|
||||
os.mkdir_all(col_path)!
|
||||
@@ -365,13 +321,9 @@ fn test_save_and_load() {
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
col := a.get_collection('test_col')!
|
||||
assert col.name == 'test_col'
|
||||
}
|
||||
|
||||
fn test_save_with_errors() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
col_path := '${test_base}/error_save_test'
|
||||
os.mkdir_all(col_path)!
|
||||
|
||||
@@ -380,13 +332,9 @@ fn test_save_with_errors() {
|
||||
|
||||
mut a := new(name: 'test')!
|
||||
mut col := a.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
assert col.name == 'err_col' // Ensure collection is added correctly
|
||||
}
|
||||
|
||||
fn test_load_from_directory() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Setup multiple collections
|
||||
col1_path := '${test_base}/load_dir/col1'
|
||||
col2_path := '${test_base}/load_dir/col2'
|
||||
@@ -410,21 +358,16 @@ fn test_load_from_directory() {
|
||||
mut a := new(name: 'test')!
|
||||
a.add_collection(mut pathlib.get_dir(path: col1_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col2_path)!)!
|
||||
|
||||
assert a.collections.len == 2
|
||||
}
|
||||
|
||||
fn test_get_edit_url() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Create a mock collection
|
||||
mut doctree := new(name: 'test_doctree')!
|
||||
mut atlas := new(name: 'test_atlas')!
|
||||
col_path := '${test_base}/git_test'
|
||||
os.mkdir_all(col_path)!
|
||||
mut cfile := pathlib.get_file(path: '${col_path}/.collection', create: true)!
|
||||
cfile.write('name:git_test_col')!
|
||||
mut col := doctree.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
mut col := atlas.add_collection(mut pathlib.get_dir(path: col_path)!)!
|
||||
col.git_url = 'https://github.com/test/repo.git' // Assuming git_url is a field on Collection
|
||||
// Create a mock page
|
||||
mut page_path := pathlib.get_file(path: '${col_path}/test_page.md', create: true)!
|
||||
@@ -433,112 +376,8 @@ fn test_get_edit_url() {
|
||||
|
||||
// Get the page and collection edit URLs
|
||||
page := col.page_get('test_page')!
|
||||
// No asserts in original, adding one for completeness
|
||||
assert page.name == 'test_page'
|
||||
}
|
||||
|
||||
fn test_export_recursive_links() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
// Create 3 collections with chained links
|
||||
col_a_path := '${test_base}/recursive_export/col_a'
|
||||
col_b_path := '${test_base}/recursive_export/col_b'
|
||||
col_c_path := '${test_base}/recursive_export/col_c'
|
||||
|
||||
os.mkdir_all(col_a_path)!
|
||||
os.mkdir_all(col_b_path)!
|
||||
os.mkdir_all(col_c_path)!
|
||||
|
||||
// Collection A: links to B
|
||||
mut cfile_a := pathlib.get_file(path: '${col_a_path}/.collection', create: true)!
|
||||
cfile_a.write('name:col_a')!
|
||||
mut page_a := pathlib.get_file(path: '${col_a_path}/page_a.md', create: true)!
|
||||
page_a.write('# Page A\n\nThis is page A.\n\n[Link to Page B](col_b:page_b)')!
|
||||
|
||||
// Collection B: links to C
|
||||
mut cfile_b := pathlib.get_file(path: '${col_b_path}/.collection', create: true)!
|
||||
cfile_b.write('name:col_b')!
|
||||
mut page_b := pathlib.get_file(path: '${col_b_path}/page_b.md', create: true)!
|
||||
page_b.write('# Page B\n\nThis is page B with link to C.\n\n[Link to Page C](col_c:page_c)')!
|
||||
|
||||
// Collection C: final page
|
||||
mut cfile_c := pathlib.get_file(path: '${col_c_path}/.collection', create: true)!
|
||||
cfile_c.write('name:col_c')!
|
||||
mut page_c := pathlib.get_file(path: '${col_c_path}/page_c.md', create: true)!
|
||||
page_c.write('# Page C\n\nThis is the final page in the chain.')!
|
||||
|
||||
// Create DocTree and add all collections
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_c_path)!)!
|
||||
|
||||
// Export
|
||||
export_path := '${test_base}/export_recursive'
|
||||
a.export(destination: export_path, redis: false)!
|
||||
|
||||
// Verify directory structure exists
|
||||
assert os.exists('${export_path}/content'), 'Export content directory should exist'
|
||||
assert os.exists('${export_path}/content/col_a'), 'Collection col_a directory should exist'
|
||||
assert os.exists('${export_path}/meta'), 'Export meta directory should exist'
|
||||
|
||||
// Verify all pages exist in col_a export directory
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md'), 'page_a.md should be exported'
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md'), 'page_b.md from col_b should be included'
|
||||
assert os.exists('${export_path}/content/col_a/page_c.md'), 'page_c.md from col_c should be included'
|
||||
|
||||
// Verify metadata files exist
|
||||
assert os.exists('${export_path}/meta/col_a.json'), 'col_a metadata should exist'
|
||||
assert os.exists('${export_path}/meta/col_b.json'), 'col_b metadata should exist'
|
||||
assert os.exists('${export_path}/meta/col_c.json'), 'col_c metadata should exist'
|
||||
}
|
||||
|
||||
fn test_export_recursive_with_images() {
|
||||
setup_test()
|
||||
defer { cleanup_test() }
|
||||
|
||||
col_a_path := '${test_base}/recursive_img/col_a'
|
||||
col_b_path := '${test_base}/recursive_img/col_b'
|
||||
|
||||
os.mkdir_all(col_a_path)!
|
||||
os.mkdir_all(col_b_path)!
|
||||
os.mkdir_all('${col_a_path}/img')!
|
||||
os.mkdir_all('${col_b_path}/img')!
|
||||
|
||||
// Collection A with local image
|
||||
mut cfile_a := pathlib.get_file(path: '${col_a_path}/.collection', create: true)!
|
||||
cfile_a.write('name:col_a')!
|
||||
|
||||
mut page_a := pathlib.get_file(path: '${col_a_path}/page_a.md', create: true)!
|
||||
page_a.write('# Page A\n\n\n\n[Link to B](col_b:page_b)')!
|
||||
|
||||
// Create local image
|
||||
os.write_file('${col_a_path}/img/local.png', 'fake png data')!
|
||||
|
||||
// Collection B with image and linked page
|
||||
mut cfile_b := pathlib.get_file(path: '${col_b_path}/.collection', create: true)!
|
||||
cfile_b.write('name:col_b')!
|
||||
|
||||
mut page_b := pathlib.get_file(path: '${col_b_path}/page_b.md', create: true)!
|
||||
page_b.write('# Page B\n\n')!
|
||||
|
||||
// Create image in collection B
|
||||
os.write_file('${col_b_path}/img/b_image.jpg', 'fake jpg data')!
|
||||
|
||||
// Create DocTree
|
||||
mut a := new()!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_a_path)!)!
|
||||
a.add_collection(mut pathlib.get_dir(path: col_b_path)!)!
|
||||
|
||||
export_path := '${test_base}/export_recursive_img'
|
||||
a.export(destination: export_path, redis: false)!
|
||||
|
||||
// Verify pages exported
|
||||
assert os.exists('${export_path}/content/col_a/page_a.md'), 'page_a should exist'
|
||||
assert os.exists('${export_path}/content/col_a/page_b.md'), 'page_b from col_b should be included'
|
||||
|
||||
// Verify images exported to col_a image directory
|
||||
assert os.exists('${export_path}/content/col_a/img/local.png'), 'Local image should exist'
|
||||
assert os.exists('${export_path}/content/col_a/img/b_image.jpg'), 'Image from cross-collection reference should be copied'
|
||||
// edit_url := page.get_edit_url()! // This method does not exist
|
||||
|
||||
// Assert the URLs are correct
|
||||
// assert edit_url == 'https://github.com/test/repo/edit/main/test_page.md'
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
# DocTreeClient
|
||||
# AtlasClient
|
||||
|
||||
A simple API for accessing document collections exported by the `doctree` module.
|
||||
A simple API for accessing document collections exported by the `atlas` module.
|
||||
|
||||
## What It Does
|
||||
|
||||
DocTreeClient provides methods to:
|
||||
AtlasClient provides methods to:
|
||||
|
||||
- List collections, pages, files, and images
|
||||
- Check if resources exist
|
||||
@@ -15,10 +15,10 @@ DocTreeClient provides methods to:
|
||||
## Quick Start
|
||||
|
||||
```v
|
||||
import incubaid.herolib.web.doctree_client
|
||||
import incubaid.herolib.web.atlas_client
|
||||
|
||||
// Create client, exports will be in $/hero/var/doctree_export by default
|
||||
mut client := doctree_client.new()!
|
||||
// Create client
|
||||
mut client := atlas_client.new(export_dir: '/tmp/atlas_export')!
|
||||
|
||||
// List collections
|
||||
collections := client.list_collections()!
|
||||
@@ -34,7 +34,7 @@ if client.has_errors('my_collection')! {
|
||||
|
||||
## Export Structure
|
||||
|
||||
DocTree exports to this structure:
|
||||
Atlas exports to this structure:
|
||||
|
||||
```txt
|
||||
export_dir/
|
||||
@@ -87,9 +87,9 @@ Names are normalized using `name_fix()`:
|
||||
|
||||
## Example
|
||||
|
||||
See `examples/data/doctree_client/basic_usage.vsh` for a complete working example.
|
||||
See `examples/data/atlas_client/basic_usage.vsh` for a complete working example.
|
||||
|
||||
## See Also
|
||||
|
||||
- `lib/data/doctree/` - DocTree module for exporting collections
|
||||
- `lib/data/atlas/` - Atlas module for exporting collections
|
||||
- `lib/web/doctreeclient/` - Alternative client for doctree collections
|
||||
@@ -7,17 +7,17 @@ import os
|
||||
import json
|
||||
import incubaid.herolib.core.redisclient
|
||||
|
||||
// DocTreeClient provides access to exported documentation collections
|
||||
// AtlasClient provides access to Atlas-exported documentation collections
|
||||
// It reads from both the exported directory structure and Redis metadata
|
||||
pub struct DocTreeClient {
|
||||
pub struct AtlasClient {
|
||||
pub mut:
|
||||
redis &redisclient.Redis
|
||||
export_dir string // Path to the doctree export directory (contains content/ and meta/)
|
||||
export_dir string // Path to the atlas export directory (contains content/ and meta/)
|
||||
}
|
||||
|
||||
// get_page_path returns the path for a page in a collection
|
||||
// Pages are stored in {export_dir}/content/{collection}/{page}.md
|
||||
pub fn (mut c DocTreeClient) get_page_path(collection_name string, page_name string) !string {
|
||||
pub fn (mut c AtlasClient) get_page_path(collection_name string, page_name string) !string {
|
||||
// Apply name normalization
|
||||
fixed_collection_name := texttools.name_fix(collection_name)
|
||||
fixed_page_name := texttools.name_fix(page_name)
|
||||
@@ -40,9 +40,9 @@ pub fn (mut c DocTreeClient) get_page_path(collection_name string, page_name str
|
||||
|
||||
// get_file_path returns the path for a file in a collection
|
||||
// Files are stored in {export_dir}/content/{collection}/{filename}
|
||||
pub fn (mut c DocTreeClient) get_file_path(collection_name_ string, file_name_ string) !string {
|
||||
collection_name := texttools.name_fix(collection_name_)
|
||||
file_name := texttools.name_fix(file_name_)
|
||||
pub fn (mut c AtlasClient) get_file_path(collection_name_ string, file_name_ string) !string {
|
||||
collection_name := texttools.name_fix_no_ext(collection_name_)
|
||||
file_name := texttools.name_fix_keepext(file_name_)
|
||||
|
||||
// Check if export directory exists
|
||||
if !os.exists(c.export_dir) {
|
||||
@@ -62,11 +62,11 @@ pub fn (mut c DocTreeClient) get_file_path(collection_name_ string, file_name_ s
|
||||
|
||||
// get_image_path returns the path for an image in a collection
|
||||
// Images are stored in {export_dir}/content/{collection}/{imagename}
|
||||
pub fn (mut c DocTreeClient) get_image_path(collection_name_ string, image_name_ string) !string {
|
||||
pub fn (mut c AtlasClient) get_image_path(collection_name_ string, image_name_ string) !string {
|
||||
// Apply name normalization
|
||||
collection_name := texttools.name_fix(collection_name_)
|
||||
collection_name := texttools.name_fix_no_ext(collection_name_)
|
||||
// Images keep their original names with extensions
|
||||
image_name := texttools.name_fix(image_name_)
|
||||
image_name := texttools.name_fix_keepext(image_name_)
|
||||
|
||||
// Check if export directory exists
|
||||
if !os.exists(c.export_dir) {
|
||||
@@ -85,28 +85,28 @@ pub fn (mut c DocTreeClient) get_image_path(collection_name_ string, image_name_
|
||||
}
|
||||
|
||||
// page_exists checks if a page exists in a collection
|
||||
pub fn (mut c DocTreeClient) page_exists(collection_name string, page_name string) bool {
|
||||
pub fn (mut c AtlasClient) page_exists(collection_name string, page_name string) bool {
|
||||
// Try to get the page path - if it succeeds, the page exists
|
||||
_ := c.get_page_path(collection_name, page_name) or { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
// file_exists checks if a file exists in a collection
|
||||
pub fn (mut c DocTreeClient) file_exists(collection_name string, file_name string) bool {
|
||||
pub fn (mut c AtlasClient) file_exists(collection_name string, file_name string) bool {
|
||||
// Try to get the file path - if it succeeds, the file exists
|
||||
_ := c.get_file_path(collection_name, file_name) or { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
// image_exists checks if an image exists in a collection
|
||||
pub fn (mut c DocTreeClient) image_exists(collection_name string, image_name string) bool {
|
||||
pub fn (mut c AtlasClient) image_exists(collection_name string, image_name string) bool {
|
||||
// Try to get the image path - if it succeeds, the image exists
|
||||
_ := c.get_image_path(collection_name, image_name) or { return false }
|
||||
return true
|
||||
}
|
||||
|
||||
// get_page_content returns the content of a page in a collection
|
||||
pub fn (mut c DocTreeClient) get_page_content(collection_name string, page_name string) !string {
|
||||
pub fn (mut c AtlasClient) get_page_content(collection_name string, page_name string) !string {
|
||||
// Get the path for the page
|
||||
page_path := c.get_page_path(collection_name, page_name)!
|
||||
|
||||
@@ -124,7 +124,7 @@ pub fn (mut c DocTreeClient) get_page_content(collection_name string, page_name
|
||||
|
||||
// list_collections returns a list of all collection names
|
||||
// Collections are directories in {export_dir}/content/
|
||||
pub fn (mut c DocTreeClient) list_collections() ![]string {
|
||||
pub fn (mut c AtlasClient) list_collections() ![]string {
|
||||
content_dir := os.join_path(c.export_dir, 'content')
|
||||
|
||||
// Check if content directory exists
|
||||
@@ -148,7 +148,7 @@ pub fn (mut c DocTreeClient) list_collections() ![]string {
|
||||
|
||||
// list_pages returns a list of all page names in a collection
|
||||
// Uses metadata to get the authoritative list of pages that belong to this collection
|
||||
pub fn (mut c DocTreeClient) list_pages(collection_name string) ![]string {
|
||||
pub fn (mut c AtlasClient) list_pages(collection_name string) ![]string {
|
||||
// Get metadata which contains the authoritative list of pages
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
|
||||
@@ -162,7 +162,7 @@ pub fn (mut c DocTreeClient) list_pages(collection_name string) ![]string {
|
||||
}
|
||||
|
||||
// list_files returns a list of all file names in a collection (excluding pages and images)
|
||||
pub fn (mut c DocTreeClient) list_files(collection_name string) ![]string {
|
||||
pub fn (mut c AtlasClient) list_files(collection_name string) ![]string {
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
mut file_names := []string{}
|
||||
for file_name, file_meta in metadata.files {
|
||||
@@ -174,7 +174,7 @@ pub fn (mut c DocTreeClient) list_files(collection_name string) ![]string {
|
||||
}
|
||||
|
||||
// list_images returns a list of all image names in a collection
|
||||
pub fn (mut c DocTreeClient) list_images(collection_name string) ![]string {
|
||||
pub fn (mut c AtlasClient) list_images(collection_name string) ![]string {
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
mut images := []string{}
|
||||
for file_name, file_meta in metadata.files {
|
||||
@@ -187,7 +187,7 @@ pub fn (mut c DocTreeClient) list_images(collection_name string) ![]string {
|
||||
|
||||
// list_pages_map returns a map of collection names to a list of page names within that collection.
|
||||
// The structure is map[collectionname][]pagename.
|
||||
pub fn (mut c DocTreeClient) list_pages_map() !map[string][]string {
|
||||
pub fn (mut c AtlasClient) list_pages_map() !map[string][]string {
|
||||
mut result := map[string][]string{}
|
||||
collections := c.list_collections()!
|
||||
|
||||
@@ -199,11 +199,38 @@ pub fn (mut c DocTreeClient) list_pages_map() !map[string][]string {
|
||||
return result
|
||||
}
|
||||
|
||||
// list_markdown returns the collections and their pages in markdown format.
|
||||
pub fn (mut c AtlasClient) list_markdown() !string {
|
||||
mut markdown_output := ''
|
||||
pages_map := c.list_pages_map()!
|
||||
|
||||
if pages_map.len == 0 {
|
||||
return 'No collections or pages found in this atlas export.'
|
||||
}
|
||||
|
||||
mut sorted_collections := pages_map.keys()
|
||||
sorted_collections.sort()
|
||||
|
||||
for col_name in sorted_collections {
|
||||
page_names := pages_map[col_name]
|
||||
markdown_output += '## ${col_name}\n'
|
||||
if page_names.len == 0 {
|
||||
markdown_output += ' * No pages in this collection.\n'
|
||||
} else {
|
||||
for page_name in page_names {
|
||||
markdown_output += ' * ${page_name}\n'
|
||||
}
|
||||
}
|
||||
markdown_output += '\n' // Add a newline for spacing between collections
|
||||
}
|
||||
return markdown_output
|
||||
}
|
||||
|
||||
// get_collection_metadata reads and parses the metadata JSON file for a collection
|
||||
// Metadata is stored in {export_dir}/meta/{collection}.json
|
||||
pub fn (mut c DocTreeClient) get_collection_metadata(collection_name string) !CollectionMetadata {
|
||||
pub fn (mut c AtlasClient) get_collection_metadata(collection_name string) !CollectionMetadata {
|
||||
// Apply name normalization
|
||||
fixed_collection_name := texttools.name_fix(collection_name)
|
||||
fixed_collection_name := texttools.name_fix_no_ext(collection_name)
|
||||
|
||||
meta_path := os.join_path(c.export_dir, 'meta', '${fixed_collection_name}.json')
|
||||
|
||||
@@ -220,95 +247,78 @@ pub fn (mut c DocTreeClient) get_collection_metadata(collection_name string) !Co
|
||||
return metadata
|
||||
}
|
||||
|
||||
// get_page_links returns the links found in a page by reading the metadata
|
||||
pub fn (mut c AtlasClient) get_page_links(collection_name string, page_name string) ![]LinkMetadata {
|
||||
// Get collection metadata
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
// Apply name normalization to page name
|
||||
fixed_page_name := texttools.name_fix_no_ext(page_name)
|
||||
|
||||
// Find the page in metadata
|
||||
if fixed_page_name in metadata.pages {
|
||||
return metadata.pages[fixed_page_name].links
|
||||
}
|
||||
return error('page_not_found: Page "${page_name}" not found in collection metadata, for collection: "${collection_name}"')
|
||||
}
|
||||
|
||||
// get_collection_errors returns the errors for a collection from metadata
|
||||
pub fn (mut c DocTreeClient) get_collection_errors(collection_name string) ![]ErrorMetadata {
|
||||
pub fn (mut c AtlasClient) get_collection_errors(collection_name string) ![]ErrorMetadata {
|
||||
metadata := c.get_collection_metadata(collection_name)!
|
||||
return metadata.errors
|
||||
}
|
||||
|
||||
// has_errors checks if a collection has any errors
|
||||
pub fn (mut c DocTreeClient) has_errors(collection_name string) bool {
|
||||
pub fn (mut c AtlasClient) has_errors(collection_name string) bool {
|
||||
errors := c.get_collection_errors(collection_name) or { return false }
|
||||
return errors.len > 0
|
||||
}
|
||||
|
||||
pub fn (mut c DocTreeClient) copy_collection(collection_name string, destination_path string) ! {
|
||||
// TODO: list over all pages, links & files and copy them to destination
|
||||
|
||||
pub fn (mut c AtlasClient) copy_images(collection_name string, page_name string, destination_path string) ! {
|
||||
// Get page links from metadata
|
||||
links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
// Create img subdirectory
|
||||
mut img_dest := pathlib.get_dir(path: '${destination_path}/img', create: true)!
|
||||
|
||||
// Copy only image links
|
||||
for link in links {
|
||||
if link.file_type != .image {
|
||||
continue
|
||||
}
|
||||
if link.status == .external {
|
||||
continue
|
||||
}
|
||||
// Get image path and copy
|
||||
img_path := c.get_image_path(link.target_collection_name, link.target_item_name)!
|
||||
mut src := pathlib.get_file(path: img_path)!
|
||||
src.copy(dest: '${img_dest.path}/${src.name_fix_keepext()}')!
|
||||
// console.print_debug('Copied image: ${src.path} to ${img_dest.path}/${src.name_fix_keepext()}')
|
||||
}
|
||||
}
|
||||
|
||||
// // will copy all pages linked from a page to a destination directory as well as the page itself
|
||||
// pub fn (mut c DocTreeClient) copy_pages(collection_name string, page_name string, destination_path string) ! {
|
||||
// // TODO: copy page itself
|
||||
// copy_files copies all non-image files from a page to a destination directory
|
||||
// Files are placed in {destination}/files/ subdirectory
|
||||
// Only copies files referenced in the page (via links)
|
||||
pub fn (mut c AtlasClient) copy_files(collection_name string, page_name string, destination_path string) ! {
|
||||
// Get page links from metadata
|
||||
links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
// // Get page links from metadata
|
||||
// links := c.get_page_links(collection_name, page_name)!
|
||||
// Create files subdirectory
|
||||
mut files_dest := pathlib.get_dir(path: '${destination_path}/files', create: true)!
|
||||
|
||||
// // Create img subdirectory
|
||||
// mut img_dest := pathlib.get_dir(path: '${destination_path}', create: true)!
|
||||
|
||||
// // Copy only image links
|
||||
// for link in links {
|
||||
// if link.file_type != .page {
|
||||
// continue
|
||||
// }
|
||||
// if link.status == .external {
|
||||
// continue
|
||||
// }
|
||||
// // Get image path and copy
|
||||
// img_path := c.get_page_path(link.target_collection_name, link.target_item_name)!
|
||||
// mut src := pathlib.get_file(path: img_path)!
|
||||
// src.copy(dest: '${img_dest.path}/${src.name_fix_no_ext()}')!
|
||||
// console.print_debug(' ********. Copied page: ${src.path} to ${img_dest.path}/${src.name_fix_no_ext()}')
|
||||
// }
|
||||
// }
|
||||
|
||||
// pub fn (mut c DocTreeClient) copy_images(collection_name string, page_name string, destination_path string) ! {
|
||||
// // Get page links from metadata
|
||||
// links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
// // Create img subdirectory
|
||||
// mut img_dest := pathlib.get_dir(path: '${destination_path}/img', create: true)!
|
||||
|
||||
// // Copy only image links
|
||||
// for link in links {
|
||||
// if link.file_type != .image {
|
||||
// continue
|
||||
// }
|
||||
// if link.status == .external {
|
||||
// continue
|
||||
// }
|
||||
// // Get image path and copy
|
||||
// img_path := c.get_image_path(link.target_collection_name, link.target_item_name)!
|
||||
// mut src := pathlib.get_file(path: img_path)!
|
||||
// src.copy(dest: '${img_dest.path}/${src.name_fix_no_ext()}')!
|
||||
// // console.print_debug('Copied image: ${src.path} to ${img_dest.path}/${src.name_fix()}')
|
||||
// }
|
||||
// }
|
||||
|
||||
// // copy_files copies all non-image files from a page to a destination directory
|
||||
// // Files are placed in {destination}/files/ subdirectory
|
||||
// // Only copies files referenced in the page (via links)
|
||||
// pub fn (mut c DocTreeClient) copy_files(collection_name string, page_name string, destination_path string) ! {
|
||||
// // Get page links from metadata
|
||||
// links := c.get_page_links(collection_name, page_name)!
|
||||
|
||||
// // Create files subdirectory
|
||||
// mut files_dest := pathlib.get_dir(path: '${destination_path}/files', create: true)!
|
||||
|
||||
// // Copy only file links (non-image files)
|
||||
// for link in links {
|
||||
// if link.file_type != .file {
|
||||
// continue
|
||||
// }
|
||||
// if link.status == .external {
|
||||
// continue
|
||||
// }
|
||||
// // println(link)
|
||||
// // Get file path and copy
|
||||
// file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
||||
// mut src := pathlib.get_file(path: file_path)!
|
||||
// // src.copy(dest: '${files_dest.path}/${src.name_fix_no_ext()}')!
|
||||
// console.print_debug('Copied file: ${src.path} to ${files_dest.path}/${src.name_fix_no_ext()}')
|
||||
// }
|
||||
// }
|
||||
// Copy only file links (non-image files)
|
||||
for link in links {
|
||||
if link.file_type != .file {
|
||||
continue
|
||||
}
|
||||
if link.status == .external {
|
||||
continue
|
||||
}
|
||||
// println(link)
|
||||
// Get file path and copy
|
||||
file_path := c.get_file_path(link.target_collection_name, link.target_item_name)!
|
||||
mut src := pathlib.get_file(path: file_path)!
|
||||
// src.copy(dest: '${files_dest.path}/${src.name_fix_keepext()}')!
|
||||
console.print_debug('Copied file: ${src.path} to ${files_dest.path}/${src.name_fix_keepext()}')
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import incubaid.herolib.core.texttools
|
||||
|
||||
// Helper function to create a test export directory structure
|
||||
fn setup_test_export() string {
|
||||
test_dir := os.join_path(os.temp_dir(), 'doctree_client_test_${os.getpid()}')
|
||||
test_dir := os.join_path(os.temp_dir(), 'atlas_client_test_${os.getpid()}')
|
||||
|
||||
// Clean up if exists
|
||||
if os.exists(test_dir) {
|
||||
@@ -54,7 +54,28 @@ fn setup_test_export() string {
|
||||
"name": "page2",
|
||||
"path": "",
|
||||
"collection_name": "testcollection",
|
||||
"links": []
|
||||
"links": [
|
||||
{
|
||||
"src": "logo.png",
|
||||
"text": "logo",
|
||||
"target": "logo.png",
|
||||
"line": 3,
|
||||
"target_collection_name": "testcollection",
|
||||
"target_item_name": "logo.png",
|
||||
"status": "ok",
|
||||
"file_type": "image"
|
||||
},
|
||||
{
|
||||
"src": "data.csv",
|
||||
"text": "data",
|
||||
"target": "data.csv",
|
||||
"line": 4,
|
||||
"target_collection_name": "testcollection",
|
||||
"target_item_name": "data.csv",
|
||||
"status": "ok",
|
||||
"file_type": "file"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"files": {
|
||||
@@ -89,7 +110,14 @@ fn setup_test_export() string {
|
||||
}
|
||||
},
|
||||
"files": {},
|
||||
"errors": []
|
||||
"errors": [
|
||||
{
|
||||
"category": "test",
|
||||
"page_key": "intro",
|
||||
"message": "Test error",
|
||||
"line": 10
|
||||
}
|
||||
]
|
||||
}'
|
||||
os.write_file(os.join_path(test_dir, 'meta', 'anothercollection.json'), metadata2) or {
|
||||
panic(err)
|
||||
@@ -427,6 +455,23 @@ fn test_list_pages_map() {
|
||||
assert pages_map['anothercollection'].len == 1
|
||||
}
|
||||
|
||||
// Test list_markdown
|
||||
fn test_list_markdown() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
markdown := client.list_markdown() or { panic(err) }
|
||||
|
||||
assert markdown.contains('testcollection')
|
||||
assert markdown.contains('anothercollection')
|
||||
assert markdown.contains('page1')
|
||||
assert markdown.contains('page2')
|
||||
assert markdown.contains('intro')
|
||||
assert markdown.contains('##')
|
||||
assert markdown.contains('*')
|
||||
}
|
||||
|
||||
// Test get_collection_metadata - success
|
||||
fn test_get_collection_metadata_success() {
|
||||
test_dir := setup_test_export()
|
||||
@@ -440,6 +485,21 @@ fn test_get_collection_metadata_success() {
|
||||
assert metadata.errors.len == 0
|
||||
}
|
||||
|
||||
// Test get_collection_metadata - with errors
|
||||
fn test_get_collection_metadata_with_errors() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
metadata := client.get_collection_metadata('anothercollection') or { panic(err) }
|
||||
|
||||
assert metadata.name == 'anothercollection'
|
||||
assert metadata.pages.len == 1
|
||||
assert metadata.errors.len == 1
|
||||
assert metadata.errors[0].message == 'Test error'
|
||||
assert metadata.errors[0].line == 10
|
||||
}
|
||||
|
||||
// Test get_collection_metadata - not found
|
||||
fn test_get_collection_metadata_not_found() {
|
||||
test_dir := setup_test_export()
|
||||
@@ -453,17 +513,78 @@ fn test_get_collection_metadata_not_found() {
|
||||
assert false, 'Should have returned an error'
|
||||
}
|
||||
|
||||
// Test get_page_links - success
|
||||
fn test_get_page_links_success() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
links := client.get_page_links('testcollection', 'page2') or { panic(err) }
|
||||
|
||||
assert links.len == 2
|
||||
assert links[0].target_item_name == 'logo.png'
|
||||
assert links[0].target_collection_name == 'testcollection'
|
||||
assert links[0].file_type == .image
|
||||
}
|
||||
|
||||
// Test get_page_links - no links
|
||||
fn test_get_page_links_empty() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
links := client.get_page_links('testcollection', 'page1') or { panic(err) }
|
||||
|
||||
assert links.len == 0
|
||||
}
|
||||
|
||||
// Test get_page_links - page not found
|
||||
fn test_get_page_links_page_not_found() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
client.get_page_links('testcollection', 'nonexistent') or {
|
||||
assert err.msg().contains('page_not_found')
|
||||
return
|
||||
}
|
||||
assert false, 'Should have returned an error'
|
||||
}
|
||||
|
||||
// Test get_collection_errors - success
|
||||
fn test_get_collection_errors_success() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
errors := client.get_collection_errors('anothercollection') or { panic(err) }
|
||||
|
||||
assert errors.len == 1
|
||||
assert errors[0].message == 'Test error'
|
||||
}
|
||||
|
||||
// Test get_collection_errors - no errors
|
||||
fn test_get_collection_errors_empty() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
errors := client.get_collection_errors('testcollection') or { panic(err) }
|
||||
|
||||
assert errors.len == 0
|
||||
}
|
||||
|
||||
// Test has_errors - true
|
||||
fn test_has_errors_true() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
has_errors := client.has_errors('anothercollection')
|
||||
|
||||
assert has_errors == true
|
||||
}
|
||||
|
||||
// Test has_errors - false
|
||||
fn test_has_errors_false() {
|
||||
test_dir := setup_test_export()
|
||||
@@ -475,7 +596,7 @@ fn test_has_errors_false() {
|
||||
assert has_errors == false
|
||||
}
|
||||
|
||||
// Test has_errors - collection not found returns false
|
||||
// Test has_errors - collection not found
|
||||
fn test_has_errors_collection_not_found() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
@@ -492,16 +613,64 @@ fn test_copy_images_success() {
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
dest_dir := os.join_path(os.temp_dir(), 'copy_dest_${os.getpid()}')
|
||||
defer { os.rmdir_all(dest_dir) or {} }
|
||||
|
||||
os.mkdir_all(dest_dir) or { panic(err) }
|
||||
defer { cleanup_test_export(dest_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
client.copy_images('testcollection', 'page2', dest_dir) or { panic(err) }
|
||||
|
||||
// Check that logo.png was copied to img subdirectory
|
||||
assert os.exists(os.join_path(dest_dir, 'img', 'logo.png'))
|
||||
}
|
||||
|
||||
// Test copy_images - no images
|
||||
fn test_copy_images_no_images() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
dest_dir := os.join_path(os.temp_dir(), 'copy_dest_empty_${os.getpid()}')
|
||||
os.mkdir_all(dest_dir) or { panic(err) }
|
||||
defer { cleanup_test_export(dest_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
client.copy_images('testcollection', 'page1', dest_dir) or { panic(err) }
|
||||
|
||||
// Check that images were copied to img subdirectory
|
||||
assert os.exists(os.join_path(dest_dir, 'img', 'logo.png'))
|
||||
assert os.exists(os.join_path(dest_dir, 'img', 'banner.jpg'))
|
||||
// Should succeed even with no images
|
||||
assert true
|
||||
}
|
||||
|
||||
// Test copy_files - success
|
||||
fn test_copy_files_success() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
dest_dir := os.join_path(os.temp_dir(), 'copy_files_dest_${os.getpid()}')
|
||||
os.mkdir_all(dest_dir) or { panic(err) }
|
||||
defer { cleanup_test_export(dest_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
// Note: test data would need to be updated to have file links in page2
|
||||
// For now, this test demonstrates the pattern
|
||||
client.copy_files('testcollection', 'page2', dest_dir) or { panic(err) }
|
||||
|
||||
// Check that files were copied to files subdirectory
|
||||
// assert os.exists(os.join_path(dest_dir, 'files', 'somefile.csv'))
|
||||
}
|
||||
|
||||
// Test copy_files - no files
|
||||
fn test_copy_files_no_files() {
|
||||
test_dir := setup_test_export()
|
||||
defer { cleanup_test_export(test_dir) }
|
||||
|
||||
dest_dir := os.join_path(os.temp_dir(), 'copy_files_empty_${os.getpid()}')
|
||||
os.mkdir_all(dest_dir) or { panic(err) }
|
||||
defer { cleanup_test_export(dest_dir) }
|
||||
|
||||
mut client := new(export_dir: test_dir) or { panic(err) }
|
||||
client.copy_files('testcollection', 'page1', dest_dir) or { panic(err) }
|
||||
|
||||
// Should succeed even with no file links
|
||||
assert true
|
||||
}
|
||||
|
||||
// Test naming normalization edge cases
|
||||
@@ -3,18 +3,18 @@ module client
|
||||
import incubaid.herolib.core.base
|
||||
|
||||
@[params]
|
||||
pub struct DocTreeClientArgs {
|
||||
pub struct AtlasClientArgs {
|
||||
pub:
|
||||
export_dir string @[required] // Path to doctree export directory
|
||||
export_dir string @[required] // Path to atlas export directory
|
||||
}
|
||||
|
||||
// Create a new DocTreeClient instance
|
||||
// Create a new AtlasClient instance
|
||||
// The export_dir should point to the directory containing content/ and meta/ subdirectories
|
||||
pub fn new(args DocTreeClientArgs) !&DocTreeClient {
|
||||
pub fn new(args AtlasClientArgs) !&AtlasClient {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
|
||||
return &DocTreeClient{
|
||||
return &AtlasClient{
|
||||
redis: redis
|
||||
export_dir: args.export_dir
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
module client
|
||||
|
||||
// DocTreeClient provides access to DocTree-exported documentation collections
|
||||
// AtlasClient provides access to Atlas-exported documentation collections
|
||||
// It reads from both the exported directory structure and Redis metadata
|
||||
|
||||
// List of recognized image file extensions
|
||||
@@ -22,16 +22,6 @@ pub mut:
|
||||
path string
|
||||
collection_name string
|
||||
links []LinkMetadata
|
||||
title string
|
||||
description string
|
||||
questions []Question
|
||||
|
||||
}
|
||||
|
||||
pub struct Question {
|
||||
pub mut:
|
||||
question string
|
||||
answer string
|
||||
}
|
||||
|
||||
pub struct FileMetadata {
|
||||
@@ -1,11 +1,11 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.web.doctree as doctreetools
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.data.paramsparser { Params }
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
import os
|
||||
|
||||
pub struct Session {
|
||||
pub mut:
|
||||
@@ -21,7 +21,7 @@ pub mut:
|
||||
path string // absolute path
|
||||
pages map[string]&Page
|
||||
files map[string]&File
|
||||
doctree &DocTree @[skip; str: skip]
|
||||
atlas &Atlas @[skip; str: skip]
|
||||
errors []CollectionError
|
||||
error_cache map[string]bool
|
||||
git_url string
|
||||
@@ -41,7 +41,7 @@ fn (mut c Collection) init_pre() ! {
|
||||
}
|
||||
|
||||
fn (mut c Collection) init_post() ! {
|
||||
c.find_links()!
|
||||
c.validate_links()!
|
||||
c.init_git_info()!
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ fn (mut c Collection) add_file(mut p pathlib.Path) ! {
|
||||
|
||||
// Get a page by name
|
||||
pub fn (c Collection) page_get(name_ string) !&Page {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_no_ext(name_)
|
||||
return c.pages[name] or { return PageNotFound{
|
||||
collection: c.name
|
||||
page: name
|
||||
@@ -104,7 +104,7 @@ pub fn (c Collection) page_get(name_ string) !&Page {
|
||||
|
||||
// Get an image by name
|
||||
pub fn (c Collection) image_get(name_ string) !&File {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
mut img := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
@@ -117,7 +117,7 @@ pub fn (c Collection) image_get(name_ string) !&File {
|
||||
|
||||
// Get a file by name
|
||||
pub fn (c Collection) file_get(name_ string) !&File {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
mut f := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
@@ -129,7 +129,7 @@ pub fn (c Collection) file_get(name_ string) !&File {
|
||||
}
|
||||
|
||||
pub fn (c Collection) file_or_image_get(name_ string) !&File {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
mut f := c.files[name] or { return FileNotFound{
|
||||
collection: c.name
|
||||
file: name
|
||||
@@ -139,26 +139,26 @@ pub fn (c Collection) file_or_image_get(name_ string) !&File {
|
||||
|
||||
// Check if page exists
|
||||
pub fn (c Collection) page_exists(name_ string) !bool {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_no_ext(name_)
|
||||
return name in c.pages
|
||||
}
|
||||
|
||||
// Check if image exists
|
||||
pub fn (c Collection) image_exists(name_ string) !bool {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
f := c.files[name] or { return false }
|
||||
return f.ftype == .image
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
pub fn (c Collection) file_exists(name_ string) !bool {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
f := c.files[name] or { return false }
|
||||
return f.ftype == .file
|
||||
}
|
||||
|
||||
pub fn (c Collection) file_or_image_exists(name_ string) !bool {
|
||||
name := doctreetools.name_fix(name_)
|
||||
name := texttools.name_fix_keepext(name_)
|
||||
_ := c.files[name] or { return false }
|
||||
return true
|
||||
}
|
||||
@@ -247,6 +247,31 @@ pub fn (c Collection) print_errors() {
|
||||
}
|
||||
}
|
||||
|
||||
// Validate all links in collection
|
||||
pub fn (mut c Collection) validate_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
content := page.content(include: true)!
|
||||
page.links = page.find_links(content)! // will walk over links see if errors and add errors
|
||||
}
|
||||
}
|
||||
|
||||
// Fix all links in collection (rewrite files)
|
||||
pub fn (mut c Collection) fix_links() ! {
|
||||
for _, mut page in c.pages {
|
||||
// Read original content
|
||||
content := page.content()!
|
||||
|
||||
// Fix links
|
||||
fixed_content := page.content_with_fixed_links()!
|
||||
|
||||
// Write back if changed
|
||||
if fixed_content != content {
|
||||
mut p := page.path()!
|
||||
p.write(fixed_content)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if session can read this collection
|
||||
pub fn (c Collection) can_read(session Session) bool {
|
||||
// If no ACL set, everyone can read
|
||||
@@ -255,8 +280,8 @@ pub fn (c Collection) can_read(session Session) bool {
|
||||
}
|
||||
|
||||
// Get user's groups
|
||||
mut doctree := c.doctree
|
||||
groups := doctree.groups_get(session)
|
||||
mut atlas := c.atlas
|
||||
groups := atlas.groups_get(session)
|
||||
group_names := groups.map(it.name)
|
||||
|
||||
// Check if any of user's groups are in read ACL
|
||||
@@ -277,8 +302,8 @@ pub fn (c Collection) can_write(session Session) bool {
|
||||
}
|
||||
|
||||
// Get user's groups
|
||||
mut doctree := c.doctree
|
||||
groups := doctree.groups_get(session)
|
||||
mut atlas := c.atlas
|
||||
groups := atlas.groups_get(session)
|
||||
group_names := groups.map(it.name)
|
||||
|
||||
// Check if any of user's groups are in write ACL
|
||||
@@ -290,3 +315,104 @@ pub fn (c Collection) can_write(session Session) bool {
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Detect git repository URL for a collection
|
||||
fn (mut c Collection) init_git_info() ! {
|
||||
mut current_path := c.path()!
|
||||
|
||||
// Walk up directory tree to find .git
|
||||
mut git_repo := current_path.parent_find('.git') or {
|
||||
// No git repo found
|
||||
return
|
||||
}
|
||||
|
||||
if git_repo.path == '' {
|
||||
panic('Unexpected empty git repo path')
|
||||
}
|
||||
|
||||
mut gs := gittools.new()!
|
||||
mut p := c.path()!
|
||||
mut location := gs.gitlocation_from_path(p.path)!
|
||||
|
||||
r := os.execute_opt('cd ${p.path} && git branch --show-current')!
|
||||
|
||||
location.branch_or_tag = r.output.trim_space()
|
||||
|
||||
c.git_url = location.web_url()!
|
||||
}
|
||||
|
||||
////////////SCANNING FUNCTIONS ?//////////////////////////////////////////////////////
|
||||
|
||||
fn (mut c Collection) scan(mut dir pathlib.Path) ! {
|
||||
mut entries := dir.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
// Skip hidden files/dirs
|
||||
if entry.name().starts_with('.') || entry.name().starts_with('_') {
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.is_dir() {
|
||||
// Recursively scan subdirectories
|
||||
mut mutable_entry := entry
|
||||
c.scan(mut mutable_entry)!
|
||||
continue
|
||||
}
|
||||
|
||||
// Process files based on extension
|
||||
match entry.extension_lower() {
|
||||
'md' {
|
||||
mut mutable_entry := entry
|
||||
c.add_page(mut mutable_entry)!
|
||||
}
|
||||
else {
|
||||
mut mutable_entry := entry
|
||||
c.add_file(mut mutable_entry)!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan for ACL files
|
||||
fn (mut c Collection) scan_acl() ! {
|
||||
// Look for read.acl in collection directory
|
||||
read_acl_path := '${c.path()!.path}/read.acl'
|
||||
if os.exists(read_acl_path) {
|
||||
content := os.read_file(read_acl_path)!
|
||||
// Split by newlines and normalize
|
||||
c.acl_read = content.split('\n')
|
||||
.map(it.trim_space())
|
||||
.filter(it.len > 0)
|
||||
.map(it.to_lower())
|
||||
}
|
||||
|
||||
// Look for write.acl in collection directory
|
||||
write_acl_path := '${c.path()!.path}/write.acl'
|
||||
if os.exists(write_acl_path) {
|
||||
content := os.read_file(write_acl_path)!
|
||||
// Split by newlines and normalize
|
||||
c.acl_write = content.split('\n')
|
||||
.map(it.trim_space())
|
||||
.filter(it.len > 0)
|
||||
.map(it.to_lower())
|
||||
}
|
||||
}
|
||||
|
||||
// scan_groups scans the collection's directory for .group files and loads them into memory.
|
||||
pub fn (mut c Collection) scan_groups() ! {
|
||||
if c.name != 'groups' {
|
||||
return error('scan_groups only works on "groups" collection')
|
||||
}
|
||||
mut p := c.path()!
|
||||
mut entries := p.list(recursive: false)!
|
||||
|
||||
for mut entry in entries.paths {
|
||||
if entry.extension_lower() == 'group' {
|
||||
filename := entry.name_fix_no_ext()
|
||||
mut visited := map[string]bool{}
|
||||
mut group := parse_group_file(filename, c.path()!.path, mut visited)!
|
||||
|
||||
c.atlas.group_add(mut group)!
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import crypto.md5
|
||||
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
pub enum CollectionErrorCategory {
|
||||
circular_include
|
||||
@@ -1,4 +1,4 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
pub struct CollectionNotFound {
|
||||
Error
|
||||
170
lib/data/atlas/export.v
Normal file
170
lib/data/atlas/export.v
Normal file
@@ -0,0 +1,170 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.core.base
|
||||
import json
|
||||
|
||||
@[params]
|
||||
pub struct ExportArgs {
|
||||
pub mut:
|
||||
destination string @[requireds]
|
||||
reset bool = true
|
||||
include bool = true
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export all collections
|
||||
pub fn (mut a Atlas) export(args ExportArgs) ! {
|
||||
mut dest := pathlib.get_dir(path: args.destination, create: true)!
|
||||
|
||||
if args.reset {
|
||||
dest.empty()!
|
||||
}
|
||||
|
||||
// Validate links before export to populate page.links
|
||||
a.validate_links()!
|
||||
|
||||
for _, mut col in a.collections {
|
||||
col.export(
|
||||
destination: dest
|
||||
reset: args.reset
|
||||
include: args.include
|
||||
redis: args.redis
|
||||
)!
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct CollectionExportArgs {
|
||||
pub mut:
|
||||
destination pathlib.Path @[required]
|
||||
reset bool = true
|
||||
include bool = true // process includes during export
|
||||
redis bool = true
|
||||
}
|
||||
|
||||
// Export a single collection
|
||||
pub fn (mut c Collection) export(args CollectionExportArgs) ! {
|
||||
// Create collection directory
|
||||
mut col_dir := pathlib.get_dir(
|
||||
path: '${args.destination.path}/content/${c.name}'
|
||||
create: true
|
||||
)!
|
||||
mut dir_meta := pathlib.get_dir(
|
||||
path: '${args.destination.path}/meta/'
|
||||
create: true
|
||||
)!
|
||||
|
||||
if c.has_errors() {
|
||||
c.print_errors()
|
||||
}
|
||||
|
||||
meta := json.encode_pretty(c)
|
||||
mut json_file := pathlib.get_file(
|
||||
path: '${dir_meta.path}/${c.name}.json'
|
||||
create: true
|
||||
)!
|
||||
json_file.write(meta)!
|
||||
|
||||
// Track cross-collection pages and files that need to be copied for self-contained export
|
||||
mut cross_collection_pages := map[string]&Page{} // key: page.name, value: &Page
|
||||
mut cross_collection_files := map[string]&File{} // key: file.name, value: &File
|
||||
|
||||
// First pass: export all pages in this collection and collect cross-collection references
|
||||
for _, mut page in c.pages {
|
||||
// Get content with includes processed and links transformed for export
|
||||
content := page.content_with_fixed_links(
|
||||
include: args.include
|
||||
cross_collection: true
|
||||
export_mode: true
|
||||
)!
|
||||
|
||||
mut dest_file := pathlib.get_file(path: '${col_dir.path}/${page.name}.md', create: true)!
|
||||
dest_file.write(content)!
|
||||
|
||||
// Collect cross-collection references for copying (pages and files/images)
|
||||
// IMPORTANT: Use cached links from validation (before transformation) to preserve collection info
|
||||
for mut link in page.links {
|
||||
if link.status != .found {
|
||||
continue
|
||||
}
|
||||
|
||||
// Collect cross-collection page references
|
||||
is_local := link.target_collection_name == c.name
|
||||
if link.file_type == .page && !is_local {
|
||||
mut target_page := link.target_page() or { continue }
|
||||
// Use page name as key to avoid duplicates
|
||||
if target_page.name !in cross_collection_pages {
|
||||
cross_collection_pages[target_page.name] = target_page
|
||||
}
|
||||
}
|
||||
|
||||
// Collect cross-collection file/image references
|
||||
if (link.file_type == .file || link.file_type == .image) && !is_local {
|
||||
mut target_file := link.target_file() or { continue }
|
||||
// Use file name as key to avoid duplicates
|
||||
file_key := target_file.name
|
||||
if file_key !in cross_collection_files {
|
||||
cross_collection_files[file_key] = target_file
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Redis operations...
|
||||
if args.redis {
|
||||
mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
redis.hset('atlas:${c.name}', page.name, page.path)!
|
||||
}
|
||||
}
|
||||
|
||||
// Copy all files/images from this collection to the export directory
|
||||
for _, mut file in c.files {
|
||||
mut src_file := file.path()!
|
||||
|
||||
// Determine subdirectory based on file type
|
||||
mut subdir := if file.is_image() { 'img' } else { 'files' }
|
||||
|
||||
// Ensure subdirectory exists
|
||||
mut subdir_path := pathlib.get_dir(
|
||||
path: '${col_dir.path}/${subdir}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
mut dest_path := '${subdir_path.path}/${file.name}'
|
||||
mut dest_file := pathlib.get_file(path: dest_path, create: true)!
|
||||
src_file.copy(dest: dest_file.path)!
|
||||
}
|
||||
|
||||
// Second pass: copy cross-collection referenced pages to make collection self-contained
|
||||
for _, mut ref_page in cross_collection_pages {
|
||||
// Get the referenced page content with includes processed
|
||||
ref_content := ref_page.content_with_fixed_links(
|
||||
include: args.include
|
||||
cross_collection: true
|
||||
export_mode: true
|
||||
)!
|
||||
|
||||
// Write the referenced page to this collection's directory
|
||||
mut dest_file := pathlib.get_file(path: '${col_dir.path}/${ref_page.name}.md', create: true)!
|
||||
dest_file.write(ref_content)!
|
||||
}
|
||||
|
||||
// Third pass: copy cross-collection referenced files/images to make collection self-contained
|
||||
for _, mut ref_file in cross_collection_files {
|
||||
mut src_file := ref_file.path()!
|
||||
|
||||
// Determine subdirectory based on file type
|
||||
mut subdir := if ref_file.is_image() { 'img' } else { 'files' }
|
||||
|
||||
// Ensure subdirectory exists
|
||||
mut subdir_path := pathlib.get_dir(
|
||||
path: '${col_dir.path}/${subdir}'
|
||||
create: true
|
||||
)!
|
||||
|
||||
mut dest_path := '${subdir_path.path}/${ref_file.name}'
|
||||
mut dest_file := pathlib.get_file(path: dest_path, create: true)!
|
||||
src_file.copy(dest: dest_file.path)!
|
||||
}
|
||||
}
|
||||
61
lib/data/atlas/factory.v
Normal file
61
lib/data/atlas/factory.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.ui.console
|
||||
import incubaid.herolib.data.paramsparser
|
||||
|
||||
__global (
|
||||
atlases shared map[string]&Atlas
|
||||
)
|
||||
|
||||
@[params]
|
||||
pub struct AtlasNewArgs {
|
||||
pub mut:
|
||||
name string = 'default'
|
||||
}
|
||||
|
||||
// Create a new Atlas
|
||||
pub fn new(args AtlasNewArgs) !&Atlas {
|
||||
mut name := texttools.name_fix(args.name)
|
||||
|
||||
mut a := &Atlas{
|
||||
name: name
|
||||
}
|
||||
|
||||
set(a)
|
||||
return a
|
||||
}
|
||||
|
||||
// Get Atlas from global map
|
||||
pub fn get(name string) !&Atlas {
|
||||
mut fixed_name := texttools.name_fix(name)
|
||||
rlock atlases {
|
||||
if fixed_name in atlases {
|
||||
return atlases[fixed_name] or { return error('Atlas ${name} not found') }
|
||||
}
|
||||
}
|
||||
return error("Atlas '${name}' not found")
|
||||
}
|
||||
|
||||
// Check if Atlas exists
|
||||
pub fn exists(name string) bool {
|
||||
mut fixed_name := texttools.name_fix(name)
|
||||
rlock atlases {
|
||||
return fixed_name in atlases
|
||||
}
|
||||
}
|
||||
|
||||
// List all Atlas names
|
||||
pub fn list() []string {
|
||||
rlock atlases {
|
||||
return atlases.keys()
|
||||
}
|
||||
}
|
||||
|
||||
// Store Atlas in global map
|
||||
fn set(atlas &Atlas) {
|
||||
lock atlases {
|
||||
atlases[atlas.name] = atlas
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
102
lib/data/atlas/getters.v
Normal file
102
lib/data/atlas/getters.v
Normal file
@@ -0,0 +1,102 @@
|
||||
module atlas
|
||||
|
||||
// Get a page from any collection using format "collection:page"
|
||||
pub fn (a Atlas) page_get(key string) !&Page {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid page key format. Use "collection:page" in page_get')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.page_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get an image from any collection using format "collection:image"
|
||||
pub fn (a Atlas) image_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid image key format. Use "collection:image" in image_get')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.image_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get a file from any collection using format "collection:file"
|
||||
pub fn (a Atlas) file_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid file key format. Use "collection:file" in file_get')
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.file_get(parts[1])!
|
||||
}
|
||||
|
||||
// Get a file (can be image) from any collection using format "collection:file"
|
||||
pub fn (a Atlas) file_or_image_get(key string) !&File {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error('Invalid file key format. Use "collection:file"')
|
||||
}
|
||||
col := a.get_collection(parts[0])!
|
||||
return col.file_or_image_get(parts[1])!
|
||||
}
|
||||
|
||||
// Check if page exists
|
||||
pub fn (a Atlas) page_exists(key string) !bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error("Invalid file key format. Use 'collection:file' in page_exists")
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.page_exists(parts[1])
|
||||
}
|
||||
|
||||
// Check if image exists
|
||||
pub fn (a Atlas) image_exists(key string) !bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error("Invalid file key format. Use 'collection:file' in image_exists")
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.image_exists(parts[1])
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
pub fn (a Atlas) file_exists(key string) !bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error("Invalid file key format. Use 'collection:file' in file_exists")
|
||||
}
|
||||
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.file_exists(parts[1])
|
||||
}
|
||||
|
||||
pub fn (a Atlas) file_or_image_exists(key string) !bool {
|
||||
parts := key.split(':')
|
||||
if parts.len != 2 {
|
||||
return error("Invalid file key format. Use 'collection:file' in file_or_image_exists")
|
||||
}
|
||||
col := a.get_collection(parts[0]) or { return false }
|
||||
return col.file_or_image_exists(parts[1])
|
||||
}
|
||||
|
||||
// List all pages in Atlas
|
||||
pub fn (a Atlas) list_pages() map[string][]string {
|
||||
mut result := map[string][]string{}
|
||||
|
||||
for col_name, col in a.collections {
|
||||
mut page_names := []string{}
|
||||
for page_name, _ in col.pages {
|
||||
page_names << page_name
|
||||
}
|
||||
page_names.sort()
|
||||
result[col_name] = page_names
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
@@ -20,7 +20,7 @@ pub mut:
|
||||
|
||||
// Create a new Group
|
||||
pub fn new_group(args GroupNewArgs) !Group {
|
||||
mut name := doctree.name_fix(args.name)
|
||||
mut name := texttools.name_fix(args.name)
|
||||
mut patterns := args.patterns.map(it.to_lower())
|
||||
|
||||
return Group{
|
||||
@@ -72,7 +72,7 @@ fn parse_group_file(filename string, base_path string, mut visited map[string]bo
|
||||
visited[filename] = true
|
||||
|
||||
mut group := Group{
|
||||
name: doctree.name_fix(filename)
|
||||
name: texttools.name_fix(filename)
|
||||
patterns: []string{}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
in doctree/
|
||||
in atlas/
|
||||
|
||||
check format of groups
|
||||
see content/groups
|
||||
@@ -7,9 +7,9 @@ now the groups end with .group
|
||||
|
||||
check how the include works, so we can include another group in the group as defined, only works in same folder
|
||||
|
||||
in the scan function in doctree, now make scan_groups function, find groups, only do this for collection as named groups
|
||||
do not add collection groups to doctree, this is a system collection
|
||||
in the scan function in atlas, now make scan_groups function, find groups, only do this for collection as named groups
|
||||
do not add collection groups to atlas, this is a system collection
|
||||
|
||||
make the groups and add them to doctree
|
||||
make the groups and add them to atlas
|
||||
|
||||
give clear instructions for coding agent how to write the code
|
||||
@@ -1,6 +1,7 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.web.doctree as doctreetools
|
||||
import incubaid.herolib.core.texttools
|
||||
import incubaid.herolib.ui.console
|
||||
|
||||
pub enum LinkFileType {
|
||||
page // Default: link to another page
|
||||
@@ -42,7 +43,7 @@ pub fn (mut self Link) target_page() !&Page {
|
||||
if self.status == .external {
|
||||
return error('External links do not have a target page')
|
||||
}
|
||||
return self.page.collection.doctree.page_get(self.key())
|
||||
return self.page.collection.atlas.page_get(self.key())
|
||||
}
|
||||
|
||||
// Get the target file this link points to
|
||||
@@ -50,7 +51,7 @@ pub fn (mut self Link) target_file() !&File {
|
||||
if self.status == .external {
|
||||
return error('External links do not have a target file')
|
||||
}
|
||||
return self.page.collection.doctree.file_or_image_get(self.key())
|
||||
return self.page.collection.atlas.file_or_image_get(self.key())
|
||||
}
|
||||
|
||||
// Find all markdown links in content
|
||||
@@ -160,10 +161,23 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
|
||||
|
||||
// Format: $collection:$pagename or $collection:$pagename.md
|
||||
if target.contains(':') {
|
||||
link.target_collection_name, link.target_item_name = doctreetools.key_parse(target)!
|
||||
parts := target.split(':')
|
||||
if parts.len >= 2 {
|
||||
link.target_collection_name = texttools.name_fix(parts[0])
|
||||
// For file links, use name without extension; for page links, normalize normally
|
||||
if link.file_type == .file {
|
||||
link.target_item_name = texttools.name_fix_no_ext(parts[1])
|
||||
} else {
|
||||
link.target_item_name = normalize_page_name(parts[1])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// For file links, use name without extension; for page links, normalize normally
|
||||
link.target_item_name = doctreetools.name_fix(target)
|
||||
if link.file_type == .file {
|
||||
link.target_item_name = texttools.name_fix_no_ext(target).trim_space()
|
||||
} else {
|
||||
link.target_item_name = normalize_page_name(target).trim_space()
|
||||
}
|
||||
link.target_collection_name = p.collection.name
|
||||
}
|
||||
|
||||
@@ -175,11 +189,11 @@ fn (mut p Page) parse_link_target(mut link Link) ! {
|
||||
mut error_prefix := 'Broken link'
|
||||
|
||||
if link.file_type == .file || link.file_type == .image {
|
||||
target_exists = p.collection.doctree.file_or_image_exists(link.key())!
|
||||
target_exists = p.collection.atlas.file_or_image_exists(link.key())!
|
||||
error_category = .invalid_file_reference
|
||||
error_prefix = if link.file_type == .file { 'Broken file link' } else { 'Broken image link' }
|
||||
} else {
|
||||
target_exists = p.collection.doctree.page_exists(link.key())!
|
||||
target_exists = p.collection.atlas.page_exists(link.key())!
|
||||
}
|
||||
|
||||
// console.print_debug('Link target exists: ${target_exists} for key=${link.key()}')
|
||||
@@ -284,3 +298,14 @@ fn (mut p Page) filesystem_link_path(mut link Link) !string {
|
||||
|
||||
return target_path.path_relative(source_path.path)!
|
||||
}
|
||||
|
||||
/////////////TOOLS//////////////////////////////////
|
||||
|
||||
// Normalize page name (remove .md, apply name_fix)
|
||||
fn normalize_page_name(name string) string {
|
||||
mut clean := name
|
||||
if clean.ends_with('.md') {
|
||||
clean = clean[0..clean.len - 3]
|
||||
}
|
||||
return texttools.name_fix(clean)
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.web.doctree as doctreetools
|
||||
import incubaid.herolib.core.texttools
|
||||
|
||||
@[heap]
|
||||
pub struct Page {
|
||||
@@ -11,18 +11,9 @@ pub mut:
|
||||
collection_name string
|
||||
links []Link
|
||||
// macros []Macro
|
||||
title string
|
||||
description string
|
||||
questions []Question
|
||||
collection &Collection @[skip; str: skip] // Reference to parent collection
|
||||
}
|
||||
|
||||
pub struct Question {
|
||||
pub mut:
|
||||
question string
|
||||
answer string
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct NewPageArgs {
|
||||
pub:
|
||||
@@ -45,7 +36,7 @@ pub mut:
|
||||
include bool
|
||||
}
|
||||
|
||||
// Read content can be with or without processing includes
|
||||
// Read content without processing includes
|
||||
pub fn (mut p Page) content(args ReadContentArgs) !string {
|
||||
mut mypath := p.path()!
|
||||
mut content := mypath.read()!
|
||||
@@ -58,7 +49,7 @@ pub fn (mut p Page) content(args ReadContentArgs) !string {
|
||||
|
||||
// Recursively process includes
|
||||
fn (mut p Page) process_includes(content string, mut visited map[string]bool) !string {
|
||||
mut doctree := p.collection.doctree
|
||||
mut atlas := p.collection.atlas
|
||||
// Prevent circular includes
|
||||
page_key := p.key()
|
||||
if page_key in visited {
|
||||
@@ -89,16 +80,34 @@ fn (mut p Page) process_includes(content string, mut visited map[string]bool) !s
|
||||
mut target_page := ''
|
||||
|
||||
if include_ref.contains(':') {
|
||||
target_collection, target_page = doctreetools.key_parse(include_ref)!
|
||||
parts := include_ref.split(':')
|
||||
if parts.len == 2 {
|
||||
target_collection = texttools.name_fix(parts[0])
|
||||
target_page = texttools.name_fix(parts[1])
|
||||
} else {
|
||||
p.collection.error(
|
||||
category: .include_syntax_error
|
||||
page_key: page_key
|
||||
message: 'Invalid include format: `${include_ref}`'
|
||||
show_console: false
|
||||
)
|
||||
processed_lines << '<!-- Invalid include format: ${include_ref} -->'
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
target_page = doctreetools.name_fix(include_ref)
|
||||
target_page = texttools.name_fix(include_ref)
|
||||
}
|
||||
|
||||
// Remove .md extension if present
|
||||
if target_page.ends_with('.md') {
|
||||
target_page = target_page[0..target_page.len - 3]
|
||||
}
|
||||
|
||||
// Build page key
|
||||
page_ref := '${target_collection}:${target_page}'
|
||||
|
||||
// Get the referenced page from doctree
|
||||
mut include_page := doctree.page_get(page_ref) or {
|
||||
// Get the referenced page from atlas
|
||||
mut include_page := atlas.page_get(page_ref) or {
|
||||
p.collection.error(
|
||||
category: .missing_include
|
||||
page_key: page_key
|
||||
@@ -1,36 +1,35 @@
|
||||
module core
|
||||
module atlas
|
||||
|
||||
import incubaid.herolib.core.playbook { PlayBook }
|
||||
import incubaid.herolib.develop.gittools
|
||||
import incubaid.herolib.ui.console
|
||||
import os
|
||||
|
||||
// Play function to process HeroScript actions for DocTree
|
||||
// Play function to process HeroScript actions for Atlas
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
if !plbook.exists(filter: 'doctree.') {
|
||||
if !plbook.exists(filter: 'atlas.') {
|
||||
return
|
||||
}
|
||||
|
||||
// Track which doctrees we've processed in this playbook
|
||||
mut processed_doctreees := map[string]bool{}
|
||||
// Track which atlases we've processed in this playbook
|
||||
mut processed_atlases := map[string]bool{}
|
||||
|
||||
mut name := ''
|
||||
|
||||
// Process scan actions - scan directories for collections
|
||||
mut scan_actions := plbook.find(filter: 'doctree.scan')!
|
||||
mut scan_actions := plbook.find(filter: 'atlas.scan')!
|
||||
for mut action in scan_actions {
|
||||
mut p := action.params
|
||||
name = p.get_default('name', 'main')!
|
||||
ignore := p.get_list_default('ignore', [])!
|
||||
console.print_item("Scanning DocTree '${name}' with ignore patterns: ${ignore}")
|
||||
// Get or create doctree from global map
|
||||
mut doctree_instance := if exists(name) {
|
||||
console.print_item("Scanning Atlas '${name}' with ignore patterns: ${ignore}")
|
||||
// Get or create atlas from global map
|
||||
mut atlas_instance := if exists(name) {
|
||||
get(name)!
|
||||
} else {
|
||||
console.print_debug('DocTree not found, creating a new one')
|
||||
console.print_debug('Atlas not found, creating a new one')
|
||||
new(name: name)!
|
||||
}
|
||||
processed_doctreees[name] = true
|
||||
processed_atlases[name] = true
|
||||
|
||||
mut path := p.get_default('path', '')!
|
||||
|
||||
@@ -45,38 +44,38 @@ pub fn play(mut plbook PlayBook) ! {
|
||||
)!.path
|
||||
}
|
||||
if path == '' {
|
||||
return error('Either "path" or "git_url" must be provided for doctree.scan action.')
|
||||
return error('Either "path" or "git_url" must be provided for atlas.scan action.')
|
||||
}
|
||||
doctree_instance.scan(path: path, ignore: ignore)!
|
||||
atlas_instance.scan(path: path, ignore: ignore)!
|
||||
action.done = true
|
||||
|
||||
// No need to call set() again - doctree is already in global map from new()
|
||||
// No need to call set() again - atlas is already in global map from new()
|
||||
// and we're modifying it by reference
|
||||
}
|
||||
|
||||
// Run init_post on all processed doctrees
|
||||
for doctree_name, _ in processed_doctreees {
|
||||
mut doctree_instance_post := get(doctree_name)!
|
||||
doctree_instance_post.init_post()!
|
||||
// Run init_post on all processed atlases
|
||||
for atlas_name, _ in processed_atlases {
|
||||
mut atlas_instance_post := get(atlas_name)!
|
||||
atlas_instance_post.init_post()!
|
||||
}
|
||||
|
||||
// Process export actions - export collections to destination
|
||||
mut export_actions := plbook.find(filter: 'doctree.export')!
|
||||
mut export_actions := plbook.find(filter: 'atlas.export')!
|
||||
|
||||
// Process explicit export actions
|
||||
for mut action in export_actions {
|
||||
mut p := action.params
|
||||
name = p.get_default('name', 'main')!
|
||||
destination := p.get_default('destination', '${os.home_dir()}/hero/var/doctree_export')!
|
||||
destination := p.get_default('destination', '/tmp/atlas_export')!
|
||||
reset := p.get_default_true('reset')
|
||||
include := p.get_default_true('include')
|
||||
redis := p.get_default_true('redis')
|
||||
|
||||
mut doctree_instance := get(name) or {
|
||||
return error("DocTree '${name}' not found. Use !!doctree.scan first.")
|
||||
mut atlas_instance := get(name) or {
|
||||
return error("Atlas '${name}' not found. Use !!atlas.scan first.")
|
||||
}
|
||||
|
||||
doctree_instance.export(
|
||||
atlas_instance.export(
|
||||
destination: destination
|
||||
reset: reset
|
||||
include: include
|
||||
4
lib/data/atlas/process.md
Normal file
4
lib/data/atlas/process.md
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
|
||||
- first find all pages
|
||||
- then for each page find all links
|
||||
@@ -1,4 +1,4 @@
|
||||
# DocTree Module
|
||||
# Atlas Module
|
||||
|
||||
A lightweight document collection manager for V, inspired by doctree but simplified.
|
||||
|
||||
@@ -18,7 +18,7 @@ put in .hero file and execute with hero or but shebang line on top of .hero scri
|
||||
|
||||
**Scan Parameters:**
|
||||
|
||||
- `name` (optional, default: 'main') - DocTree instance name
|
||||
- `name` (optional, default: 'main') - Atlas instance name
|
||||
- `path` (required when git_url not provided) - Directory path to scan
|
||||
- `git_url` (alternative to path) - Git repository URL to clone/checkout
|
||||
- `git_root` (optional when using git_url, default: ~/code) - Base directory for cloning
|
||||
@@ -31,9 +31,9 @@ put in .hero file and execute with hero or but shebang line on top of .hero scri
|
||||
```heroscript
|
||||
#!/usr/bin/env hero
|
||||
|
||||
!!doctree.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||
!!atlas.scan git_url:"https://git.ourworld.tf/tfgrid/docs_tfgrid4/src/branch/main/collections/tests"
|
||||
|
||||
!!doctree.export
|
||||
!!atlas.export destination: '/tmp/atlas_export'
|
||||
|
||||
```
|
||||
|
||||
@@ -42,10 +42,10 @@ put this in .hero file
|
||||
## usage in herolib
|
||||
|
||||
```v
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Create a new DocTree
|
||||
mut a := doctree.new(name: 'my_docs')!
|
||||
// Create a new Atlas
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
|
||||
// Scan a directory for collections
|
||||
a.scan(path: '/path/to/docs')!
|
||||
@@ -94,7 +94,7 @@ file := a.file_get('guides:diagram')!
|
||||
### Scanning for Collections
|
||||
|
||||
```v
|
||||
mut a := doctree.new()!
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
```
|
||||
|
||||
@@ -191,7 +191,7 @@ for _, col in a.collections {
|
||||
|
||||
### Include Processing
|
||||
|
||||
DocTree supports simple include processing using `!!include` actions:
|
||||
Atlas supports simple include processing using `!!include` actions:
|
||||
|
||||
```v
|
||||
// Export with includes processed (default)
|
||||
@@ -241,11 +241,11 @@ content := page.content()!
|
||||
|
||||
## Git Integration
|
||||
|
||||
DocTree automatically detects the git repository URL for each collection and stores it for reference. This allows users to easily navigate to the source for editing.
|
||||
Atlas automatically detects the git repository URL for each collection and stores it for reference. This allows users to easily navigate to the source for editing.
|
||||
|
||||
### Automatic Detection
|
||||
|
||||
When scanning collections, DocTree walks up the directory tree to find the `.git` directory and captures:
|
||||
When scanning collections, Atlas walks up the directory tree to find the `.git` directory and captures:
|
||||
- **git_url**: The remote origin URL
|
||||
- **git_branch**: The current branch
|
||||
|
||||
@@ -254,7 +254,7 @@ When scanning collections, DocTree walks up the directory tree to find the `.git
|
||||
You can scan collections directly from a git repository:
|
||||
|
||||
```heroscript
|
||||
!!doctree.scan
|
||||
!!atlas.scan
|
||||
name: 'my_docs'
|
||||
git_url: 'https://github.com/myorg/docs.git'
|
||||
git_root: '~/code' // optional, defaults to ~/code
|
||||
@@ -265,7 +265,7 @@ The repository will be automatically cloned if it doesn't exist locally.
|
||||
### Accessing Edit URLs
|
||||
|
||||
```v
|
||||
mut page := doctree.page_get('guides:intro')!
|
||||
mut page := atlas.page_get('guides:intro')!
|
||||
edit_url := page.get_edit_url()!
|
||||
println('Edit at: ${edit_url}')
|
||||
// Output: Edit at: https://github.com/myorg/docs/edit/main/guides.md
|
||||
@@ -282,7 +282,7 @@ Collection guides source: https://github.com/myorg/docs.git (branch: main)
|
||||
This allows published documentation to link back to the source repository for contributions.
|
||||
## Links
|
||||
|
||||
DocTree supports standard Markdown links with several formats for referencing pages within collections.
|
||||
Atlas supports standard Markdown links with several formats for referencing pages within collections.
|
||||
|
||||
### Link Formats
|
||||
|
||||
@@ -313,14 +313,14 @@ Link using a path - **only the filename is used** for matching:
|
||||
|
||||
#### Validation
|
||||
|
||||
Check all links in your DocTree:
|
||||
Check all links in your Atlas:
|
||||
|
||||
```v
|
||||
mut a := doctree.new()!
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Validate all links
|
||||
a.find_links()!
|
||||
a.validate_links()!
|
||||
|
||||
// Check for errors
|
||||
for _, col in a.collections {
|
||||
@@ -335,7 +335,7 @@ for _, col in a.collections {
|
||||
Automatically rewrite links with correct relative paths:
|
||||
|
||||
```v
|
||||
mut a := doctree.new()!
|
||||
mut a := atlas.new()!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Fix all links in place
|
||||
@@ -384,7 +384,7 @@ After fix (assuming pages are in subdirectories):
|
||||
|
||||
### Export Directory Structure
|
||||
|
||||
When you export an DocTree, the directory structure is organized as:
|
||||
When you export an Atlas, the directory structure is organized as:
|
||||
|
||||
$$\text{export\_dir}/
|
||||
\begin{cases}
|
||||
@@ -409,17 +409,17 @@ $$\text{export\_dir}/
|
||||
|
||||
## Redis Integration
|
||||
|
||||
DocTree uses Redis to store metadata about collections, pages, images, and files for fast lookups and caching.
|
||||
Atlas uses Redis to store metadata about collections, pages, images, and files for fast lookups and caching.
|
||||
|
||||
### Redis Data Structure
|
||||
|
||||
When `redis: true` is set during export, DocTree stores:
|
||||
When `redis: true` is set during export, Atlas stores:
|
||||
|
||||
1. **Collection Paths** - Hash: `doctree:path`
|
||||
1. **Collection Paths** - Hash: `atlas:path`
|
||||
- Key: collection name
|
||||
- Value: exported collection directory path
|
||||
|
||||
2. **Collection Contents** - Hash: `doctree:<collection_name>`
|
||||
2. **Collection Contents** - Hash: `atlas:<collection_name>`
|
||||
- Pages: `page_name` → `page_name.md`
|
||||
- Images: `image_name.ext` → `img/image_name.ext`
|
||||
- Files: `file_name.ext` → `files/file_name.ext`
|
||||
@@ -427,11 +427,11 @@ When `redis: true` is set during export, DocTree stores:
|
||||
### Redis Usage Examples
|
||||
|
||||
```v
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
import incubaid.herolib.core.base
|
||||
|
||||
// Export with Redis metadata (default)
|
||||
mut a := doctree.new(name: 'docs')!
|
||||
mut a := atlas.new(name: 'docs')!
|
||||
a.scan(path: './docs')!
|
||||
a.export(
|
||||
destination: './output'
|
||||
@@ -443,15 +443,15 @@ mut context := base.context()!
|
||||
mut redis := context.redis()!
|
||||
|
||||
// Get collection path
|
||||
col_path := redis.hget('doctree:path', 'guides')!
|
||||
col_path := redis.hget('atlas:path', 'guides')!
|
||||
println('Guides collection exported to: ${col_path}')
|
||||
|
||||
// Get page location
|
||||
page_path := redis.hget('doctree:guides', 'introduction')!
|
||||
page_path := redis.hget('atlas:guides', 'introduction')!
|
||||
println('Introduction page: ${page_path}') // Output: introduction.md
|
||||
|
||||
// Get image location
|
||||
img_path := redis.hget('doctree:guides', 'logo.png')!
|
||||
img_path := redis.hget('atlas:guides', 'logo.png')!
|
||||
println('Logo image: ${img_path}') // Output: img/logo.png
|
||||
```
|
||||
|
||||
@@ -468,9 +468,9 @@ println('Logo image: ${img_path}') // Output: img/logo.png
|
||||
Save collection metadata to JSON files for archival or cross-tool compatibility:
|
||||
|
||||
```v
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
mut a := doctree.new(name: 'my_docs')!
|
||||
mut a := atlas.new(name: 'my_docs')!
|
||||
a.scan(path: './docs')!
|
||||
|
||||
// Save all collections to a specified directory
|
||||
@@ -497,32 +497,32 @@ save_path/
|
||||
|
||||
## HeroScript Integration
|
||||
|
||||
DocTree integrates with HeroScript, allowing you to define DocTree operations in `.vsh` or playbook files.
|
||||
Atlas integrates with HeroScript, allowing you to define Atlas operations in `.vsh` or playbook files.
|
||||
|
||||
### Using in V Scripts
|
||||
|
||||
Create a `.vsh` script to process DocTree operations:
|
||||
Create a `.vsh` script to process Atlas operations:
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import incubaid.herolib.core.playbook
|
||||
import incubaid.herolib.web.doctree
|
||||
import incubaid.herolib.data.atlas
|
||||
|
||||
// Define your HeroScript content
|
||||
heroscript := "
|
||||
!!doctree.scan path: './docs'
|
||||
!!atlas.scan path: './docs'
|
||||
|
||||
!!doctree.export destination: './output' include: true
|
||||
!!atlas.export destination: './output' include: true
|
||||
"
|
||||
|
||||
// Create playbook from text
|
||||
mut plbook := playbook.new(text: heroscript)!
|
||||
|
||||
// Execute doctree actions
|
||||
doctree.play(mut plbook)!
|
||||
// Execute atlas actions
|
||||
atlas.play(mut plbook)!
|
||||
|
||||
println('DocTree processing complete!')
|
||||
println('Atlas processing complete!')
|
||||
```
|
||||
|
||||
### Using in Playbook Files
|
||||
@@ -530,11 +530,11 @@ println('DocTree processing complete!')
|
||||
Create a `docs.play` file:
|
||||
|
||||
```heroscript
|
||||
!!doctree.scan
|
||||
!!atlas.scan
|
||||
name: 'main'
|
||||
path: '~/code/docs'
|
||||
|
||||
!!doctree.export
|
||||
!!atlas.export
|
||||
destination: '~/code/output'
|
||||
reset: true
|
||||
include: true
|
||||
@@ -565,11 +565,11 @@ playcmds.run(mut plbook)!
|
||||
Errors are automatically collected and reported:
|
||||
|
||||
```heroscript
|
||||
!!doctree.scan
|
||||
!!atlas.scan
|
||||
path: './docs'
|
||||
|
||||
# Errors will be printed during export
|
||||
!!doctree.export
|
||||
!!atlas.export
|
||||
destination: './output'
|
||||
```
|
||||
|
||||
@@ -583,13 +583,13 @@ Collection guides - Errors (2)
|
||||
|
||||
### Auto-Export Behavior
|
||||
|
||||
If you use `!!doctree.scan` **without** an explicit `!!doctree.export`, DocTree will automatically export to the default location (current directory).
|
||||
If you use `!!atlas.scan` **without** an explicit `!!atlas.export`, Atlas will automatically export to the default location (current directory).
|
||||
|
||||
To disable auto-export, include an explicit (empty) export action or simply don't include any scan actions.
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. **Always validate before export**: Use `!!doctree.validate` to catch broken links early
|
||||
1. **Always validate before export**: Use `!!atlas.validate` to catch broken links early
|
||||
2. **Use named instances**: When working with multiple documentation sets, use the `name` parameter
|
||||
3. **Enable Redis for production**: Use `redis: true` for web deployments to enable fast lookups
|
||||
4. **Process includes during export**: Keep `include: true` to embed referenced content in exported files
|
||||
@@ -599,7 +599,7 @@ The following features are planned but not yet available:
|
||||
|
||||
- [ ] Load collections from `.collection.json` files
|
||||
- [ ] Python API for reading collections
|
||||
- [ ] `doctree.validate` playbook action
|
||||
- [ ] `doctree.fix_links` playbook action
|
||||
- [ ] `atlas.validate` playbook action
|
||||
- [ ] `atlas.fix_links` playbook action
|
||||
- [ ] Auto-save on collection modifications
|
||||
- [ ] Collection version control
|
||||
@@ -38,7 +38,7 @@ pub fn set_titles(page string, maxnr int) string {
|
||||
for line in lines {
|
||||
mut hash_count := 0
|
||||
mut first_char_idx := 0
|
||||
for _, r in line.runes() {
|
||||
for char_idx, r in line.runes() {
|
||||
if r == ` ` {
|
||||
first_char_idx++
|
||||
continue
|
||||
@@ -89,7 +89,7 @@ pub fn set_titles(page string, maxnr int) string {
|
||||
// Remove existing numbering (e.g., "1. ", "1.1. ")
|
||||
mut skip_chars := 0
|
||||
mut in_numbering := true
|
||||
for _, r in original_title_text.runes() {
|
||||
for r_idx, r in original_title_text.runes() {
|
||||
if in_numbering {
|
||||
if (r >= `0` && r <= `9`) || r == `.` || r == ` ` {
|
||||
skip_chars++
|
||||
|
||||
@@ -22,8 +22,8 @@ pub mut:
|
||||
recursive bool
|
||||
pull bool
|
||||
reload bool // means reload the info into the cache
|
||||
script bool // run non interactive
|
||||
reset bool // means we will lose changes (only relevant for clone, pull)
|
||||
script bool = true // run non interactive
|
||||
reset bool = true // means we will lose changes (only relevant for clone, pull)
|
||||
}
|
||||
|
||||
// do group actions on repo
|
||||
@@ -38,12 +38,14 @@ pub mut:
|
||||
// url string
|
||||
// pull bool
|
||||
// reload bool //means reload the info into the cache
|
||||
// script bool // run non interactive
|
||||
// reset bool// means we will lose changes (only relevant for clone, pull)
|
||||
// script bool = true // run non interactive
|
||||
// reset bool = true // means we will lose changes (only relevant for clone, pull)
|
||||
//```
|
||||
pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
|
||||
mut args := args_
|
||||
console.print_debug('git do ${args.cmd}')
|
||||
// println(args)
|
||||
// $dbg;
|
||||
|
||||
if args.path.len > 0 && args.url.len > 0 {
|
||||
panic('bug')
|
||||
@@ -97,9 +99,7 @@ pub fn (mut gs GitStructure) do(args_ ReposActionsArgs) !string {
|
||||
provider: args.provider
|
||||
)!
|
||||
|
||||
// println(repos.map(it.name))
|
||||
|
||||
if repos.len < 4 || args.cmd in 'pull,push,commit'.split(',') {
|
||||
if repos.len < 4 || args.cmd in 'pull,push,commit,delete'.split(',') {
|
||||
args.reload = true
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
|
||||
}
|
||||
|
||||
if args.reset || repo.last_load == 0 {
|
||||
// console.print_debug('${repo.name} : Cache Get')
|
||||
// console.print_debug('${repo.name} : Cache get')
|
||||
repo.cache_get()!
|
||||
}
|
||||
|
||||
@@ -30,8 +30,6 @@ pub fn (mut repo GitRepo) status_update(args StatusUpdateArgs) ! {
|
||||
// Decide if a full load is needed.
|
||||
if args.reset || repo.last_load == 0
|
||||
|| current_time - repo.last_load >= repo.config.remote_check_period {
|
||||
// console.print_debug("reload ${repo.name}:\n args reset:${args.reset}\n lastload:${repo.last_load}\n currtime-lastload:${current_time- repo.last_load}\n period:${repo.config.remote_check_period}")
|
||||
// $dbg;
|
||||
repo.load_internal() or {
|
||||
// Persist the error state to the cache
|
||||
console.print_stderr('Failed to load repository ${repo.name} at ${repo.path()}: ${err}')
|
||||
@@ -53,8 +51,7 @@ fn (mut repo GitRepo) load_internal() ! {
|
||||
|
||||
repo.exec('fetch --all') or {
|
||||
repo.status.error = 'Failed to fetch updates: ${err}'
|
||||
console.print_stderr('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. \nPlease check git repo source, network connection and repository access.')
|
||||
return
|
||||
return error('Failed to fetch updates for ${repo.name} at ${repo.path()}: ${err}. Please check network connection and repository access.')
|
||||
}
|
||||
repo.load_branches()!
|
||||
repo.load_tags()!
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
|
||||
!!hero_code.generate_client
|
||||
name:'heroprompt'
|
||||
classname:'Workspace'
|
||||
singleton:0
|
||||
classname:'HeroPrompt'
|
||||
singleton:1
|
||||
default:1
|
||||
hasconfig:1
|
||||
templates:
|
||||
reset:0
|
||||
126
lib/develop/heroprompt/heroprompt.v
Normal file
126
lib/develop/heroprompt/heroprompt.v
Normal file
@@ -0,0 +1,126 @@
|
||||
module heroprompt
|
||||
|
||||
import rand
|
||||
import incubaid.herolib.data.ourtime
|
||||
|
||||
// HeroPrompt Methods - Workspace Management
|
||||
|
||||
@[params]
|
||||
pub struct NewWorkspaceParams {
|
||||
pub mut:
|
||||
name string @[required] // Workspace name
|
||||
description string // Optional description
|
||||
is_active bool = false // Whether this should be the active workspace
|
||||
}
|
||||
|
||||
// new_workspace creates a new workspace in this HeroPrompt instance
|
||||
pub fn (mut hp HeroPrompt) new_workspace(args NewWorkspaceParams) !&Workspace {
|
||||
hp.log(.info, 'Creating workspace: ${args.name}')
|
||||
|
||||
// Check if workspace already exists
|
||||
if args.name in hp.workspaces {
|
||||
hp.log(.error, 'Workspace already exists: ${args.name}')
|
||||
return error('workspace already exists: ${args.name}')
|
||||
}
|
||||
|
||||
// Determine if this should be the active workspace
|
||||
// If it's the first workspace, make it active by default
|
||||
// Or if explicitly requested via args.is_active
|
||||
is_first_workspace := hp.workspaces.len == 0
|
||||
should_be_active := args.is_active || is_first_workspace
|
||||
|
||||
// Create new workspace
|
||||
mut ws := &Workspace{
|
||||
id: rand.uuid_v4()
|
||||
name: args.name
|
||||
description: args.description
|
||||
is_active: should_be_active
|
||||
directories: map[string]&Directory{}
|
||||
files: []HeropromptFile{}
|
||||
created: ourtime.now()
|
||||
updated: ourtime.now()
|
||||
parent: &hp // Set parent reference for auto-save
|
||||
}
|
||||
|
||||
// Add to heroprompt instance
|
||||
hp.workspaces[args.name] = ws
|
||||
hp.updated = ourtime.now()
|
||||
|
||||
// Save to Redis
|
||||
hp.save()!
|
||||
|
||||
hp.log(.info, 'Workspace created: ${args.name}')
|
||||
return ws
|
||||
}
|
||||
|
||||
// get_workspace retrieves an existing workspace by name
|
||||
pub fn (hp &HeroPrompt) get_workspace(name string) !&Workspace {
|
||||
if name !in hp.workspaces {
|
||||
return error('workspace not found: ${name}')
|
||||
}
|
||||
return hp.workspaces[name]
|
||||
}
|
||||
|
||||
// list_workspaces returns all workspaces in this HeroPrompt instance
|
||||
pub fn (hp &HeroPrompt) list_workspaces() []&Workspace {
|
||||
mut workspaces := []&Workspace{}
|
||||
for _, ws in hp.workspaces {
|
||||
workspaces << ws
|
||||
}
|
||||
return workspaces
|
||||
}
|
||||
|
||||
// delete_workspace removes a workspace from this HeroPrompt instance
|
||||
pub fn (mut hp HeroPrompt) delete_workspace(name string) ! {
|
||||
if name !in hp.workspaces {
|
||||
hp.log(.error, 'Workspace not found: ${name}')
|
||||
return error('workspace not found: ${name}')
|
||||
}
|
||||
|
||||
hp.workspaces.delete(name)
|
||||
hp.updated = ourtime.now()
|
||||
hp.save()!
|
||||
|
||||
hp.log(.info, 'Workspace deleted: ${name}')
|
||||
}
|
||||
|
||||
// save persists the HeroPrompt instance to Redis
|
||||
pub fn (mut hp HeroPrompt) save() ! {
|
||||
hp.updated = ourtime.now()
|
||||
set(hp)!
|
||||
}
|
||||
|
||||
// get_active_workspace returns the currently active workspace
|
||||
pub fn (mut hp HeroPrompt) get_active_workspace() !&Workspace {
|
||||
for name, ws in hp.workspaces {
|
||||
if ws.is_active {
|
||||
// Return the actual reference from the map, not a copy
|
||||
return hp.workspaces[name] or { return error('workspace not found: ${name}') }
|
||||
}
|
||||
}
|
||||
return error('no active workspace found')
|
||||
}
|
||||
|
||||
// set_active_workspace sets the specified workspace as active and deactivates all others
|
||||
pub fn (mut hp HeroPrompt) set_active_workspace(name string) ! {
|
||||
// Check if workspace exists
|
||||
if name !in hp.workspaces {
|
||||
hp.log(.error, 'Workspace not found: ${name}')
|
||||
return error('workspace not found: ${name}')
|
||||
}
|
||||
|
||||
// Deactivate all workspaces
|
||||
for _, mut ws in hp.workspaces {
|
||||
ws.is_active = false
|
||||
}
|
||||
|
||||
// Activate the specified workspace
|
||||
mut ws := hp.workspaces[name] or { return error('workspace not found: ${name}') }
|
||||
ws.is_active = true
|
||||
hp.updated = ourtime.now()
|
||||
|
||||
// Save to Redis
|
||||
hp.save()!
|
||||
|
||||
hp.log(.info, 'Active workspace set to: ${name}')
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
module heroprompt
|
||||
|
||||
import incubaid.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub struct HeropromptChild {
|
||||
pub mut:
|
||||
content string
|
||||
path pathlib.Path
|
||||
name string
|
||||
include_tree bool // when true and this child is a dir, include full subtree in maps/contents
|
||||
}
|
||||
|
||||
// Utility function to get file extension with special handling for common files
|
||||
pub fn get_file_extension(filename string) string {
|
||||
// Handle special cases for common files without extensions
|
||||
special_files := {
|
||||
'dockerfile': 'dockerfile'
|
||||
'makefile': 'makefile'
|
||||
'license': 'license'
|
||||
'readme': 'readme'
|
||||
'changelog': 'changelog'
|
||||
'authors': 'authors'
|
||||
'contributors': 'contributors'
|
||||
'copying': 'copying'
|
||||
'install': 'install'
|
||||
'news': 'news'
|
||||
'todo': 'todo'
|
||||
'version': 'version'
|
||||
'manifest': 'manifest'
|
||||
'gemfile': 'gemfile'
|
||||
'rakefile': 'rakefile'
|
||||
'procfile': 'procfile'
|
||||
'vagrantfile': 'vagrantfile'
|
||||
}
|
||||
lower_filename := filename.to_lower()
|
||||
if lower_filename in special_files {
|
||||
return special_files[lower_filename]
|
||||
}
|
||||
if filename.starts_with('.') && !filename.starts_with('..') {
|
||||
if filename.contains('.') && filename.len > 1 {
|
||||
parts := filename[1..].split('.')
|
||||
if parts.len >= 2 {
|
||||
return parts[parts.len - 1]
|
||||
} else {
|
||||
return filename[1..]
|
||||
}
|
||||
} else {
|
||||
return filename[1..]
|
||||
}
|
||||
}
|
||||
parts := filename.split('.')
|
||||
if parts.len < 2 {
|
||||
return ''
|
||||
}
|
||||
return parts[parts.len - 1]
|
||||
}
|
||||
|
||||
// Read the file content
|
||||
pub fn (chl HeropromptChild) read() !string {
|
||||
if chl.path.cat != .file {
|
||||
return error('cannot read content of a directory')
|
||||
}
|
||||
content := os.read_file(chl.path.path)!
|
||||
return content
|
||||
}
|
||||
467
lib/develop/heroprompt/heroprompt_directory.v
Normal file
467
lib/develop/heroprompt/heroprompt_directory.v
Normal file
@@ -0,0 +1,467 @@
|
||||
module heroprompt
|
||||
|
||||
import os
|
||||
import rand
|
||||
import incubaid.herolib.core.pathlib
|
||||
import incubaid.herolib.develop.codewalker
|
||||
import incubaid.herolib.data.ourtime
|
||||
|
||||
// Directory represents a directory/directory added to a workspace
|
||||
// It contains metadata about the directory and its location
|
||||
@[heap]
|
||||
pub struct Directory {
|
||||
pub mut:
|
||||
id string = rand.uuid_v4() // Unique identifier for this directory
|
||||
name string // Display name (can be customized by user)
|
||||
path string // Absolute path to the directory
|
||||
description string // Optional description
|
||||
git_info GitInfo // Git directory information (if applicable)
|
||||
created ourtime.OurTime // When this directory was added
|
||||
updated ourtime.OurTime // Last update time
|
||||
include_tree bool = true // Whether to include full tree in file maps
|
||||
is_expanded bool // UI state: whether directory is expanded in tree view
|
||||
is_selected bool // UI state: whether directory checkbox is checked
|
||||
selected_files map[string]bool // Map of file paths to selection state (normalized paths)
|
||||
}
|
||||
|
||||
// GitInfo contains git-specific metadata for a directory
|
||||
pub struct GitInfo {
|
||||
pub mut:
|
||||
is_git_dir bool // Whether this is a git directory
|
||||
current_branch string // Current git branch
|
||||
remote_url string // Remote URL (if any)
|
||||
last_commit string // Last commit hash
|
||||
has_changes bool // Whether there are uncommitted changes
|
||||
}
|
||||
|
||||
// Create a new directory from a directory path
|
||||
@[params]
|
||||
pub struct NewDirectoryParams {
|
||||
pub mut:
|
||||
path string @[required] // Absolute path to directory
|
||||
name string // Optional custom name (defaults to directory name)
|
||||
description string // Optional description
|
||||
}
|
||||
|
||||
// Create a new directory instance
|
||||
pub fn new_directory(args NewDirectoryParams) !Directory {
|
||||
if args.path.len == 0 {
|
||||
return error('directory path is required')
|
||||
}
|
||||
|
||||
mut dir_path := pathlib.get(args.path)
|
||||
if !dir_path.exists() || !dir_path.is_dir() {
|
||||
return error('path is not an existing directory: ${args.path}')
|
||||
}
|
||||
|
||||
abs_path := dir_path.realpath()
|
||||
dir_name := dir_path.name()
|
||||
|
||||
// Detect git information
|
||||
git_info := detect_git_info(abs_path)
|
||||
|
||||
return Directory{
|
||||
id: rand.uuid_v4()
|
||||
name: if args.name.len > 0 { args.name } else { dir_name }
|
||||
path: abs_path
|
||||
description: args.description
|
||||
git_info: git_info
|
||||
created: ourtime.now()
|
||||
updated: ourtime.now()
|
||||
include_tree: true
|
||||
}
|
||||
}
|
||||
|
||||
// Detect git information for a directory
|
||||
fn detect_git_info(path string) GitInfo {
|
||||
// TODO: Use the gittools library to get this information
|
||||
// Keep it for now, maybe next version
|
||||
mut info := GitInfo{
|
||||
is_git_dir: false
|
||||
}
|
||||
|
||||
// Check if .git directory exists
|
||||
git_dir := os.join_path(path, '.git')
|
||||
if !os.exists(git_dir) {
|
||||
return info
|
||||
}
|
||||
|
||||
info.is_git_dir = true
|
||||
|
||||
// Try to detect current branch
|
||||
head_file := os.join_path(git_dir, 'HEAD')
|
||||
if os.exists(head_file) {
|
||||
head_content := os.read_file(head_file) or { '' }
|
||||
if head_content.contains('ref: refs/heads/') {
|
||||
info.current_branch = head_content.replace('ref: refs/heads/', '').trim_space()
|
||||
}
|
||||
}
|
||||
|
||||
// Try to detect remote URL
|
||||
config_file := os.join_path(git_dir, 'config')
|
||||
if os.exists(config_file) {
|
||||
config_content := os.read_file(config_file) or { '' }
|
||||
// Simple parsing - look for url = line
|
||||
for line in config_content.split_into_lines() {
|
||||
trimmed := line.trim_space()
|
||||
if trimmed.starts_with('url = ') {
|
||||
info.remote_url = trimmed.replace('url = ', '')
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for uncommitted changes (simplified - just check if there are any files in git status)
|
||||
// In a real implementation, would run `git status --porcelain`
|
||||
info.has_changes = false // Placeholder - would need to execute git command
|
||||
|
||||
return info
|
||||
}
|
||||
|
||||
// Update directory metadata
|
||||
@[params]
|
||||
pub struct UpdateDirectoryParams {
|
||||
pub mut:
|
||||
name string
|
||||
description string
|
||||
}
|
||||
|
||||
pub fn (mut dir Directory) update(args UpdateDirectoryParams) {
|
||||
if args.name.len > 0 {
|
||||
dir.name = args.name
|
||||
}
|
||||
if args.description.len > 0 {
|
||||
dir.description = args.description
|
||||
}
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// Refresh git information for this directory
|
||||
pub fn (mut dir Directory) refresh_git_info() {
|
||||
dir.git_info = detect_git_info(dir.path)
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// Check if directory path still exists
|
||||
pub fn (dir &Directory) exists() bool {
|
||||
return os.exists(dir.path) && os.is_dir(dir.path)
|
||||
}
|
||||
|
||||
// Get directory size (number of files)
|
||||
pub fn (dir &Directory) file_count() !int {
|
||||
if !dir.exists() {
|
||||
return error('directory path no longer exists')
|
||||
}
|
||||
|
||||
// Use codewalker to count files
|
||||
mut cw := codewalker.new(codewalker.CodeWalkerArgs{})!
|
||||
mut fm := cw.filemap_get(path: dir.path, content_read: false)!
|
||||
return fm.content.len
|
||||
}
|
||||
|
||||
// Get display name with git branch if available
|
||||
pub fn (dir &Directory) display_name() string {
|
||||
if dir.git_info.is_git_dir && dir.git_info.current_branch.len > 0 {
|
||||
return '${dir.name} (${dir.git_info.current_branch})'
|
||||
}
|
||||
return dir.name
|
||||
}
|
||||
|
||||
// Directory Management Methods
|
||||
|
||||
// DirectoryContent holds the scanned files and directories from a directory
|
||||
pub struct DirectoryContent {
|
||||
pub mut:
|
||||
files []HeropromptFile // All files found in the directory
|
||||
directories []string // All directories found in the directory
|
||||
file_count int // Total number of files
|
||||
dir_count int // Total number of directories
|
||||
}
|
||||
|
||||
// get_contents scans the directory and returns all files and directories
|
||||
// This method respects .gitignore and .heroignore files
|
||||
// This is a public method that can be used to retrieve directory contents for prompt generation
|
||||
pub fn (dir &Directory) get_contents() !DirectoryContent {
|
||||
return dir.scan()
|
||||
}
|
||||
|
||||
// scan scans the entire directory and returns all files and directories
|
||||
// This method respects .gitignore and .heroignore files
|
||||
// Note: This is a private method. Use add_dir() with scan parameter or get_contents() instead.
|
||||
fn (dir &Directory) scan() !DirectoryContent {
|
||||
if !dir.exists() {
|
||||
return error('directory path does not exist: ${dir.path}')
|
||||
}
|
||||
|
||||
// Use codewalker to scan the directory with gitignore support
|
||||
mut cw := codewalker.new(codewalker.CodeWalkerArgs{})!
|
||||
mut fm := cw.filemap_get(path: dir.path, content_read: true)!
|
||||
|
||||
mut files := []HeropromptFile{}
|
||||
mut directories := map[string]bool{} // Use map to avoid duplicates
|
||||
|
||||
// Process each file from the filemap
|
||||
for file_path, content in fm.content {
|
||||
// Create HeropromptFile for each file
|
||||
abs_path := os.join_path(dir.path, file_path)
|
||||
file := HeropromptFile{
|
||||
id: rand.uuid_v4()
|
||||
name: os.base(file_path)
|
||||
path: abs_path
|
||||
content: content
|
||||
created: ourtime.now()
|
||||
updated: ourtime.now()
|
||||
}
|
||||
files << file
|
||||
|
||||
// Extract directory path
|
||||
dir_path := os.dir(file_path)
|
||||
if dir_path != '.' && dir_path.len > 0 {
|
||||
// Add all parent directories
|
||||
mut current_dir := dir_path
|
||||
for current_dir != '.' && current_dir.len > 0 {
|
||||
directories[current_dir] = true
|
||||
current_dir = os.dir(current_dir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert directories map to array
|
||||
mut dir_list := []string{}
|
||||
for directory_path, _ in directories {
|
||||
dir_list << directory_path
|
||||
}
|
||||
|
||||
return DirectoryContent{
|
||||
files: files
|
||||
directories: dir_list
|
||||
file_count: files.len
|
||||
dir_count: dir_list.len
|
||||
}
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AddFileParams {
|
||||
pub mut:
|
||||
path string @[required] // Path to file (relative to directory or absolute)
|
||||
}
|
||||
|
||||
// add_file adds a specific file to the directory
|
||||
// Returns the created HeropromptFile
|
||||
pub fn (dir &Directory) add_file(args AddFileParams) !HeropromptFile {
|
||||
mut file_path := args.path
|
||||
|
||||
// If path is relative, make it relative to directory path
|
||||
if !os.is_abs_path(file_path) {
|
||||
file_path = os.join_path(dir.path, file_path)
|
||||
}
|
||||
|
||||
// Validate file exists
|
||||
if !os.exists(file_path) {
|
||||
return error('file does not exist: ${file_path}')
|
||||
}
|
||||
if os.is_dir(file_path) {
|
||||
return error('path is a directory, not a file: ${file_path}')
|
||||
}
|
||||
|
||||
// Read file content
|
||||
content := os.read_file(file_path) or { return error('failed to read file: ${file_path}') }
|
||||
|
||||
// Create HeropromptFile
|
||||
file := HeropromptFile{
|
||||
id: rand.uuid_v4()
|
||||
name: os.base(file_path)
|
||||
path: file_path
|
||||
content: content
|
||||
created: ourtime.now()
|
||||
updated: ourtime.now()
|
||||
}
|
||||
|
||||
return file
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct SelectFileParams {
|
||||
pub mut:
|
||||
path string @[required] // Path to file (relative to directory or absolute)
|
||||
}
|
||||
|
||||
// select_file marks a file as selected within this directory
|
||||
// The file path can be relative to the directory or absolute
|
||||
pub fn (mut dir Directory) select_file(args SelectFileParams) ! {
|
||||
// Normalize the path
|
||||
mut file_path := args.path
|
||||
if !os.is_abs_path(file_path) {
|
||||
file_path = os.join_path(dir.path, file_path)
|
||||
}
|
||||
file_path = os.real_path(file_path)
|
||||
|
||||
// Verify file exists
|
||||
if !os.exists(file_path) {
|
||||
return error('file does not exist: ${args.path}')
|
||||
}
|
||||
|
||||
// Verify file is within this directory
|
||||
if !file_path.starts_with(os.real_path(dir.path)) {
|
||||
return error('file is not within this directory: ${args.path}')
|
||||
}
|
||||
|
||||
// Mark file as selected
|
||||
dir.selected_files[file_path] = true
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// select_all marks all files in this directory and subdirectories as selected
|
||||
pub fn (mut dir Directory) select_all() ! {
|
||||
// Verify directory exists
|
||||
if !dir.exists() {
|
||||
return error('directory does not exist: ${dir.path}')
|
||||
}
|
||||
|
||||
// Get all files in directory
|
||||
content := dir.get_contents()!
|
||||
|
||||
// Mark all files as selected
|
||||
for file in content.files {
|
||||
normalized_path := os.real_path(file.path)
|
||||
dir.selected_files[normalized_path] = true
|
||||
}
|
||||
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct DeselectFileParams {
|
||||
pub mut:
|
||||
path string @[required] // Path to file (relative to directory or absolute)
|
||||
}
|
||||
|
||||
// deselect_file marks a file as not selected within this directory
|
||||
pub fn (mut dir Directory) deselect_file(args DeselectFileParams) ! {
|
||||
// Normalize the path
|
||||
mut file_path := args.path
|
||||
if !os.is_abs_path(file_path) {
|
||||
file_path = os.join_path(dir.path, file_path)
|
||||
}
|
||||
file_path = os.real_path(file_path)
|
||||
|
||||
// Verify file exists
|
||||
if !os.exists(file_path) {
|
||||
return error('file does not exist: ${args.path}')
|
||||
}
|
||||
|
||||
// Verify file is within this directory
|
||||
if !file_path.starts_with(os.real_path(dir.path)) {
|
||||
return error('file is not within this directory: ${args.path}')
|
||||
}
|
||||
|
||||
// Mark file as not selected (remove from map or set to false)
|
||||
dir.selected_files.delete(file_path)
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// deselect_all marks all files in this directory as not selected
|
||||
pub fn (mut dir Directory) deselect_all() ! {
|
||||
// Verify directory exists
|
||||
if !dir.exists() {
|
||||
return error('directory does not exist: ${dir.path}')
|
||||
}
|
||||
|
||||
// Clear all selections
|
||||
dir.selected_files.clear()
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// expand sets the directory as expanded in the UI
|
||||
pub fn (mut dir Directory) expand() {
|
||||
dir.is_expanded = true
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
// collapse sets the directory as collapsed in the UI
|
||||
pub fn (mut dir Directory) collapse() {
|
||||
dir.is_expanded = false
|
||||
dir.updated = ourtime.now()
|
||||
}
|
||||
|
||||
@[params]
|
||||
pub struct AddDirParams {
|
||||
pub mut:
|
||||
path string @[required] // Path to directory (relative to directory or absolute)
|
||||
scan bool = true // Whether to automatically scan the directory (default: true)
|
||||
}
|
||||
|
||||
// add_dir adds all files from a specific directory
|
||||
// Returns DirectoryContent with files from that directory
|
||||
// If scan=true (default), automatically scans the directory respecting .gitignore
|
||||
// If scan=false, returns empty DirectoryContent (manual mode)
|
||||
pub fn (dir &Directory) add_dir(args AddDirParams) !DirectoryContent {
|
||||
mut dir_path := args.path
|
||||
|
||||
// If path is relative, make it relative to directory path
|
||||
if !os.is_abs_path(dir_path) {
|
||||
dir_path = os.join_path(dir.path, dir_path)
|
||||
}
|
||||
|
||||
// Validate directory exists
|
||||
if !os.exists(dir_path) {
|
||||
return error('directory does not exist: ${dir_path}')
|
||||
}
|
||||
if !os.is_dir(dir_path) {
|
||||
return error('path is not a directory: ${dir_path}')
|
||||
}
|
||||
|
||||
// If scan is false, return empty content (manual mode)
|
||||
if !args.scan {
|
||||
return DirectoryContent{
|
||||
files: []HeropromptFile{}
|
||||
file_count: 0
|
||||
dir_count: 0
|
||||
}
|
||||
}
|
||||
|
||||
// Use codewalker to scan the directory
|
||||
mut cw := codewalker.new(codewalker.CodeWalkerArgs{})!
|
||||
mut fm := cw.filemap_get(path: dir_path, content_read: true)!
|
||||
|
||||
mut files := []HeropromptFile{}
|
||||
mut directories := map[string]bool{} // Track directories
|
||||
|
||||
// Process each file
|
||||
for file_path, content in fm.content {
|
||||
abs_path := os.join_path(dir_path, file_path)
|
||||
file := HeropromptFile{
|
||||
id: rand.uuid_v4()
|
||||
name: os.base(file_path)
|
||||
path: abs_path
|
||||
content: content
|
||||
created: ourtime.now()
|
||||
updated: ourtime.now()
|
||||
}
|
||||
files << file
|
||||
|
||||
// Extract directory path
|
||||
dir_part := os.dir(file_path)
|
||||
if dir_part != '.' && dir_part.len > 0 {
|
||||
// Add all parent directories
|
||||
mut current_dir := dir_part
|
||||
for current_dir != '.' && current_dir.len > 0 {
|
||||
directories[current_dir] = true
|
||||
current_dir = os.dir(current_dir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert directories map to array
|
||||
mut dir_list := []string{}
|
||||
for directory_path, _ in directories {
|
||||
dir_list << directory_path
|
||||
}
|
||||
|
||||
return DirectoryContent{
|
||||
files: files
|
||||
directories: dir_list
|
||||
file_count: files.len
|
||||
dir_count: dir_list.len
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user