Compare commits
421 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba07f85fd8 | ||
|
|
7b621243d0 | ||
| 598b312140 | |||
| 0df10f5cb3 | |||
| 2c748a9fc8 | |||
| a2e1b4fb27 | |||
| 9b0da9f245 | |||
| 5b9426ba11 | |||
| 8b6ad4d076 | |||
|
|
7d4565e56f | ||
|
|
92cceeb64b | ||
|
|
3d90e39781 | ||
|
|
efbf00830a | ||
|
|
b9969c69fd | ||
|
|
15d998bf76 | ||
|
|
8c966ae853 | ||
|
|
3e10db326f | ||
|
|
dc6f1bdf52 | ||
|
|
429f3b1fea | ||
|
|
70009f1846 | ||
|
|
5f9024c7bf | ||
|
|
f2138f104f | ||
|
|
04ee73e8dd | ||
|
|
bd83ad37bf | ||
|
|
7b175c8804 | ||
|
|
22cbc806dc | ||
|
|
02e0a073aa | ||
|
|
b2e5a84ff9 | ||
|
|
abd694015b | ||
|
|
57d30eab2d | ||
| b27f0a5017 | |||
|
|
c47002430e | ||
|
|
bd3abade55 | ||
|
|
ea0637423e | ||
|
|
a6756bfe5a | ||
|
|
4cdbc51343 | ||
|
|
2e5210a0b7 | ||
|
|
e8574383ee | ||
|
|
21da15da0a | ||
|
|
dca1d877ac | ||
|
|
007f65c27e | ||
|
|
ef572f402d | ||
|
|
133e7c9809 | ||
| 0d38c1b471 | |||
| cf7bfb7650 | |||
|
|
228abe36a3 | ||
| b48abc474c | |||
|
|
c3fe788a5b | ||
|
|
025e8fba69 | ||
| 672904d914 | |||
| fac783c58d | |||
| 6e7843f368 | |||
| 6a555a5fe3 | |||
| 4796e4fe82 | |||
| 3b1068a3a8 | |||
| 392f764acc | |||
| 122cba9f6b | |||
|
|
dc178f68c7 | ||
|
|
59a0519b4e | ||
|
|
dfcaeec85f | ||
|
|
ef922d162e | ||
|
|
02e4ea180d | ||
|
|
8b9b0678b8 | ||
|
|
475e812ba3 | ||
|
|
0a953f2c09 | ||
|
|
1e26162e00 | ||
|
|
dd68bf950c | ||
|
|
e374520654 | ||
|
|
47c95f76e9 | ||
|
|
918cfd83ec | ||
|
|
83387c47ec | ||
|
|
f6c22c733b | ||
|
|
269d0474c5 | ||
|
|
28359984ff | ||
|
|
c09e424890 | ||
| a55af220bf | |||
| 9c09af83a2 | |||
| e69d67d238 | |||
| 01c6ea66ac | |||
| f9ea731a6e | |||
|
|
a974091442 | ||
|
|
413b805823 | ||
|
|
46069ba924 | ||
|
|
f3e7b979a5 | ||
|
|
f6733a67af | ||
|
|
4988b241ef | ||
|
|
7807a8e736 | ||
| 5ba11aab46 | |||
|
|
4abb46b4bf | ||
|
|
a7976c45f9 | ||
|
|
5194fabe62 | ||
|
|
ff430c2e4d | ||
| 1581628ea3 | |||
| 237f9bd742 | |||
|
|
cf27e7880e | ||
|
|
ad300c068f | ||
|
|
1a02dcaf0f | ||
|
|
9ecc2444aa | ||
|
|
0e1836c5d0 | ||
|
|
7965883744 | ||
|
|
b006bb1e41 | ||
|
|
27c9018c48 | ||
|
|
f1991d89b3 | ||
| 9448ae85cf | |||
|
|
a64e964d83 | ||
|
|
5e321b6b0f | ||
| b1453e3580 | |||
| 3da895083b | |||
| ac583741a4 | |||
| 4358ba6471 | |||
| 46afb63f31 | |||
| f773ce168e | |||
| aa79df1fcd | |||
| 420c9cb9e5 | |||
| 83d935930f | |||
| 2e2c94e897 | |||
| a96903da0e | |||
| 3dbcf00e9f | |||
| 708147435e | |||
| 26289bb00f | |||
| 1489b9f44f | |||
| e4045ef179 | |||
| 13f482fa12 | |||
| 3e2013576f | |||
| e5aa8bca09 | |||
| 4dd3908ff7 | |||
| 09f388e2ff | |||
| baecb9bbe4 | |||
| 2c5986295e | |||
| 126f23dfa2 | |||
|
|
407f3f85bc | ||
|
|
94da4fff5e | ||
|
|
0fa54f1354 | ||
|
|
80677f7177 | ||
|
|
01cac0f741 | ||
|
|
30546a34f9 | ||
|
|
0ccf317564 | ||
|
|
0c49e83a68 | ||
| 51db8257f5 | |||
| ecc2977581 | |||
| ffafef0c88 | |||
| ca3bac1d76 | |||
| 43f7bc7943 | |||
| b3555aa54e | |||
|
|
afad769066 | ||
|
|
2da37ee4a5 | ||
|
|
90c81a1234 | ||
|
|
34ea12ca23 | ||
|
|
d2c1be5396 | ||
|
|
fe934bba36 | ||
|
|
b01e40da40 | ||
|
|
ae7e7ecb84 | ||
|
|
fdf540cbd0 | ||
|
|
7cb26e5569 | ||
|
|
913b0cb790 | ||
|
|
485b47d145 | ||
| fb9c9b8070 | |||
|
|
5b69f935a5 | ||
|
|
9dbc36d634 | ||
|
|
42b0c4d48f | ||
|
|
5343d9bff6 | ||
|
|
eb47c8490d | ||
|
|
1de9be2a8a | ||
|
|
d852ecc5b1 | ||
|
|
368edcd93a | ||
|
|
71906fd891 | ||
|
|
dbd187a017 | ||
|
|
52a3546325 | ||
|
|
a23eb0ff0b | ||
|
|
b40c366335 | ||
|
|
d2ad18e8ec | ||
|
|
9b737c9280 | ||
| 6bbaa0d1f7 | |||
| 834f612bfe | |||
|
|
171e54a68c | ||
|
|
a690f98cc1 | ||
|
|
400ea6e80e | ||
|
|
84c19ca9a4 | ||
|
|
3b7ec028f9 | ||
| 83f7bf41e3 | |||
| 634b8c5bad | |||
| 4aabc8d1b0 | |||
| 75255d8cd0 | |||
|
|
a0b53126ca | ||
|
|
38cd933d41 | ||
|
|
2da014c407 | ||
|
|
9d1752f4ed | ||
|
|
d06a806184 | ||
|
|
fe1becabaf | ||
| 2d0d196cd3 | |||
| 37573b0b59 | |||
| 35dace9155 | |||
| 69e7c1cce9 | |||
| d21e71e615 | |||
|
|
972bb9f755 | ||
|
|
a798b2347f | ||
|
|
17979b4fde | ||
|
|
68d25d3622 | ||
|
|
f38d4249ef | ||
| ded4a0b102 | |||
| 6184441706 | |||
| 293dc3f1ac | |||
| 6492e42358 | |||
| 4aaf1bd6db | |||
| a56a251d7f | |||
| f306ff728f | |||
| a10a6d6507 | |||
|
|
59efa18bce | ||
|
|
4ed80481aa | ||
| fff14183a4 | |||
|
|
1f58676278 | ||
|
|
b67db23e07 | ||
|
|
4fe1e70881 | ||
|
|
988602f90f | ||
| 6a2e143b98 | |||
|
|
306de32de8 | ||
|
|
c0b57e2a01 | ||
|
|
aeeacc877b | ||
| 6820a7e9c8 | |||
| 1c7621f20a | |||
| 5263798b11 | |||
|
|
0d96c5fc65 | ||
|
|
6b0cf48292 | ||
|
|
9160e95e4a | ||
|
|
296cb9adf5 | ||
|
|
88fe8f503f | ||
|
|
a26bb56b15 | ||
|
|
2f54f05cfd | ||
|
|
f61d6808e8 | ||
|
|
4719876feb | ||
|
|
975cca77b1 | ||
|
|
eb80294b0a | ||
|
|
0704c9421d | ||
|
|
f08af0e2c5 | ||
| 29c2fccbe5 | |||
| 975c07fc2e | |||
|
|
814b61f25e | ||
|
|
1c264815c3 | ||
|
|
33150846cc | ||
| ff45beac09 | |||
| 24eb709293 | |||
| 49e2146152 | |||
| cdaf64b3cf | |||
| 49af31776e | |||
| 0880823576 | |||
| 1fa1ecb8ec | |||
| d6108c9836 | |||
|
|
2e7efdf229 | ||
|
|
f47703f599 | ||
|
|
383fc9fade | ||
|
|
cc344fa60e | ||
|
|
4691046d5f | ||
|
|
7b453962ca | ||
|
|
528d594056 | ||
|
|
6305cf159e | ||
|
|
5e468359a1 | ||
|
|
2317dd2d4c | ||
|
|
b92647c52e | ||
|
|
906f13b562 | ||
|
|
84142b60a7 | ||
|
|
54024ee222 | ||
|
|
66f29fcb02 | ||
|
|
73c3c3bdb5 | ||
|
|
fa677c01b2 | ||
| 60d6474a42 | |||
| 8c52326550 | |||
| 52aba347a8 | |||
|
|
acd1a4a61d | ||
|
|
ee1ac54dde | ||
|
|
2599fa6859 | ||
|
|
61a0fd2aa6 | ||
|
|
d604d739e3 | ||
|
|
a57f53fdb4 | ||
|
|
f276cdf697 | ||
|
|
7fb46a4c0b | ||
|
|
fc993a95d7 | ||
| c6ff7e7ba5 | |||
| 3f9a3fb1cd | |||
| 7ae4f7dbd0 | |||
| dbb44ec30a | |||
| 7f4fc42a7a | |||
| 01db4540b1 | |||
| 5c0c096a79 | |||
| c1719a057a | |||
| e969eacd06 | |||
| 8ed3439cdc | |||
|
|
5241dfddd4 | ||
|
|
6e0572b48a | ||
|
|
9a4a39b19a | ||
|
|
8abf113715 | ||
|
|
0fb8901ab5 | ||
|
|
6753b87873 | ||
|
|
715f481364 | ||
|
|
776942cd8b | ||
|
|
f0c23eb4ae | ||
| 25c997fec5 | |||
|
|
1546bf7f87 | ||
|
|
02d4adcff0 | ||
|
|
147c889b53 | ||
|
|
f6e7644284 | ||
|
|
582da6d7f0 | ||
|
|
3a337b7b0a | ||
|
|
2953dd0172 | ||
|
|
08f0620305 | ||
|
|
ec22a8e0ec | ||
|
|
6644d3b11c | ||
|
|
13471d4ca5 | ||
|
|
80254739b0 | ||
|
|
be4d2547e4 | ||
|
|
a7f8893399 | ||
|
|
d0b52f40b7 | ||
| 30cb80efcd | |||
| 3117d288b1 | |||
|
|
6ecd190de8 | ||
|
|
dacd6d5afb | ||
| 299f6dea06 | |||
| ed025f9acb | |||
|
|
de0e66a94a | ||
|
|
e86504ecd5 | ||
|
|
1b192328b2 | ||
|
|
77a77ff87e | ||
|
|
439dff4a64 | ||
|
|
f8cb6f25f7 | ||
| 5a8daa3feb | |||
|
|
c408934efd | ||
| abe81190e6 | |||
| c4ea066927 | |||
| e997946c56 | |||
| 5f9c6ff2bb | |||
|
|
dd4bb73a78 | ||
| 8965f7ae89 | |||
|
|
c157c86600 | ||
| 9a931b65e2 | |||
| 2c149507f6 | |||
|
|
34dea39c52 | ||
|
|
f1a4547961 | ||
|
|
8ae56a8df6 | ||
| cb0110ed20 | |||
| 2bbf814003 | |||
|
86e3fdb910
|
|||
| b731c4c388 | |||
| e929ce029d | |||
| 5160096a1a | |||
| f219a4041a | |||
| 674eae1c11 | |||
| f62369bd01 | |||
| 7a6660ebd8 | |||
| e20d1bdcc5 | |||
| 3e309b6379 | |||
| ae4e92e090 | |||
| 7b69719f0e | |||
| 1d631fec21 | |||
|
|
1005576814 | ||
| 690b1b68c3 | |||
| 6e619622d2 | |||
| eb38bc5e60 | |||
| b0da6d1bd2 | |||
| 1377953dcf | |||
|
|
963b31b087 | ||
|
|
2aafab50ad | ||
| aa85172700 | |||
| eff269e911 | |||
| 65ec6ee1a3 | |||
| a86b23b2e9 | |||
| bcccd5f247 | |||
| cb8c550ed1 | |||
| 5fc7019dcc | |||
| 8c9248fd94 | |||
|
4a2753d32c
|
|||
|
1c0535a8b4
|
|||
|
|
10c15f6f8e | ||
|
|
af2f33d4f6 | ||
|
a0c253fa05
|
|||
|
17a67870ef
|
|||
|
54cfd4c353
|
|||
| 717eb1e7d8 | |||
|
|
1db2c3ee54 | ||
|
|
7e8a4c5c45 | ||
|
|
f6fe3d4fda | ||
|
|
eeb2602bcf | ||
|
|
565eec0292 | ||
|
|
a98fad32d3 | ||
|
|
10f0c0bd31 | ||
|
|
01552145a8 | ||
|
|
09ed341b97 | ||
|
|
5cb30e6783 | ||
| 03f5885980 | |||
|
|
ee8fbbca09 | ||
|
|
b9b21ac44b | ||
|
|
d403f84b6c | ||
|
|
dfeeb8cd4c | ||
|
|
135866e5b0 | ||
|
|
eef88b5375 | ||
|
|
8b9717bb74 | ||
|
|
9a7a66192b | ||
|
|
df950143b4 | ||
|
|
038c563843 | ||
|
|
4733e05c58 | ||
|
|
c9496f0973 | ||
|
|
31dffa14ce | ||
|
|
7459501c8f | ||
|
|
bc9fd08f7e | ||
|
|
be1cee5d6a | ||
|
|
c91f9ba43c | ||
|
|
357000ef13 | ||
|
|
6ba89a8b9c | ||
|
|
d5af1d19b8 | ||
|
|
ce1ce722d5 | ||
|
|
fa192e10b8 | ||
|
|
7ae3296ef5 | ||
|
|
a6ba22b0b1 | ||
|
|
d49e94e412 | ||
|
|
ff8ee3693c | ||
|
|
d43d4d8a9f | ||
|
|
6f814b5d09 | ||
|
|
5d3df608e1 | ||
|
|
86af42bf4a | ||
|
|
5869998f6e | ||
|
|
cbdc0fd313 | ||
|
|
bada9508ef | ||
|
|
7bc4da97ab |
2
.github/workflows/hero_build.yml
vendored
2
.github/workflows/hero_build.yml
vendored
@@ -5,9 +5,7 @@ permissions:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main","development"]
|
||||
workflow_dispatch:
|
||||
branches: ["main","development"]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
21
.gitignore
vendored
21
.gitignore
vendored
@@ -1,4 +1,13 @@
|
||||
|
||||
# Additional ignore files and directories
|
||||
Thumbs.db
|
||||
# Logs
|
||||
logs/
|
||||
*.log
|
||||
*.out
|
||||
# Compiled Python files
|
||||
*.pyc
|
||||
*.pyo
|
||||
__pycache__/
|
||||
*dSYM/
|
||||
.vmodules/
|
||||
.vscode
|
||||
@@ -7,6 +16,7 @@ vls.*
|
||||
vls.log
|
||||
node_modules/
|
||||
docs/
|
||||
vdocs/
|
||||
photonwrapper.so
|
||||
x
|
||||
.env
|
||||
@@ -27,4 +37,11 @@ output/
|
||||
.stellar
|
||||
data.ms/
|
||||
test_basic
|
||||
cli/hero
|
||||
cli/hero
|
||||
.aider*
|
||||
.compile_cache
|
||||
compile_results.log
|
||||
tmp
|
||||
compile_summary.log
|
||||
.summary_lock
|
||||
.aider*
|
||||
|
||||
183
CONTRIBUTING.md
Normal file
183
CONTRIBUTING.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Contributing to Herolib
|
||||
|
||||
Thank you for your interest in contributing to Herolib! This document provides guidelines and instructions for contributing to the project.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Getting Started](#getting-started)
|
||||
- [Setting Up Development Environment](#setting-up-development-environment)
|
||||
- [Repository Structure](#repository-structure)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Branching Strategy](#branching-strategy)
|
||||
- [Making Changes](#making-changes)
|
||||
- [Testing](#testing)
|
||||
- [Pull Requests](#pull-requests)
|
||||
- [Code Guidelines](#code-guidelines)
|
||||
- [CI/CD Process](#cicd-process)
|
||||
- [Documentation](#documentation)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Setting Up Development Environment
|
||||
|
||||
For developers, you can use the automated installation script:
|
||||
|
||||
```bash
|
||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
||||
bash /tmp/install_v.sh --analyzer --herolib
|
||||
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
||||
```
|
||||
|
||||
Alternatively, you can manually set up the environment:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/code/github/freeflowuniverse
|
||||
cd ~/code/github/freeflowuniverse
|
||||
git clone git@github.com:freeflowuniverse/herolib.git
|
||||
cd herolib
|
||||
# checkout development branch for most recent changes
|
||||
git checkout development
|
||||
bash install.sh
|
||||
```
|
||||
|
||||
### Repository Structure
|
||||
|
||||
Herolib is an opinionated library primarily used by ThreeFold to automate cloud environments. The repository is organized into several key directories:
|
||||
|
||||
- `/lib`: Core library code
|
||||
- `/cli`: Command-line interface tools, including the Hero tool
|
||||
- `/cookbook`: Examples and guides for using Herolib
|
||||
- `/scripts`: Installation and utility scripts
|
||||
- `/docs`: Generated documentation
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Branching Strategy
|
||||
|
||||
- `development`: Main development branch where all features and fixes are merged
|
||||
- `main`: Stable release branch
|
||||
|
||||
For new features or bug fixes, create a branch from `development` with a descriptive name.
|
||||
|
||||
### Making Changes
|
||||
|
||||
1. Create a new branch from `development`:
|
||||
```bash
|
||||
git checkout development
|
||||
git pull
|
||||
git checkout -b feature/your-feature-name
|
||||
```
|
||||
|
||||
2. Make your changes, following the code guidelines.
|
||||
|
||||
3. Run tests to ensure your changes don't break existing functionality:
|
||||
```bash
|
||||
./test_basic.vsh
|
||||
```
|
||||
|
||||
4. Commit your changes with clear, descriptive commit messages.
|
||||
|
||||
### Testing
|
||||
|
||||
Before submitting a pull request, ensure all tests pass:
|
||||
|
||||
```bash
|
||||
# Run all basic tests
|
||||
./test_basic.vsh
|
||||
|
||||
# Run tests for a specific module
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
|
||||
# Run tests for an entire directory
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
```
|
||||
|
||||
The test script (`test_basic.vsh`) manages test execution and caching to optimize performance. It automatically skips tests listed in the ignore or error sections of the script.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
1. Push your branch to the repository:
|
||||
```bash
|
||||
git push origin feature/your-feature-name
|
||||
```
|
||||
|
||||
2. Create a pull request against the `development` branch.
|
||||
|
||||
3. Ensure your PR includes:
|
||||
- A clear description of the changes
|
||||
- Any related issue numbers
|
||||
- Documentation updates if applicable
|
||||
|
||||
4. Wait for CI checks to pass and address any feedback from reviewers.
|
||||
|
||||
## Code Guidelines
|
||||
|
||||
- Follow the existing code style and patterns in the repository
|
||||
- Write clear, concise code with appropriate comments
|
||||
- Keep modules separate and focused on specific functionality
|
||||
- Maintain separation between the jsonschema and jsonrpc modules rather than merging them
|
||||
|
||||
## CI/CD Process
|
||||
|
||||
The repository uses GitHub Actions for continuous integration and deployment:
|
||||
|
||||
### 1. Testing Workflow (`test.yml`)
|
||||
|
||||
This workflow runs on every push and pull request to ensure code quality:
|
||||
- Sets up V and Herolib
|
||||
- Runs all basic tests using `test_basic.vsh`
|
||||
|
||||
All tests must pass before a PR can be merged to the `development` branch.
|
||||
|
||||
### 2. Hero Build Workflow (`hero_build.yml`)
|
||||
|
||||
This workflow builds the Hero tool for multiple platforms when a new tag is created:
|
||||
- Builds for Linux (x86_64, aarch64) and macOS (x86_64, aarch64)
|
||||
- Runs all basic tests
|
||||
- Creates GitHub releases with the built binaries
|
||||
|
||||
### 3. Documentation Workflow (`documentation.yml`)
|
||||
|
||||
This workflow automatically updates the documentation on GitHub Pages when changes are pushed to the `development` branch:
|
||||
- Generates documentation using `doc.vsh`
|
||||
- Deploys the documentation to GitHub Pages
|
||||
|
||||
## Documentation
|
||||
|
||||
To generate documentation locally:
|
||||
|
||||
```bash
|
||||
cd ~/code/github/freeflowuniverse/herolib
|
||||
bash doc.sh
|
||||
```
|
||||
|
||||
The documentation is automatically published to [https://freeflowuniverse.github.io/herolib/](https://freeflowuniverse.github.io/herolib/) when changes are pushed to the `development` branch.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### TCC Compiler Error on macOS
|
||||
|
||||
If you encounter the following error when using TCC compiler on macOS:
|
||||
|
||||
```
|
||||
In file included from /Users/timurgordon/code/github/vlang/v/thirdparty/cJSON/cJSON.c:42:
|
||||
/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h:614: error: ';' expected (got "__fabsf16")
|
||||
```
|
||||
|
||||
This is caused by incompatibility between TCC and the half precision math functions in the macOS SDK. To fix this issue:
|
||||
|
||||
1. Open the math.h file:
|
||||
```bash
|
||||
sudo nano /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
|
||||
```
|
||||
|
||||
2. Comment out the half precision math functions (around line 612-626).
|
||||
|
||||
For more details, see the [README.md](README.md) troubleshooting section.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Herolib Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
- [Cookbook Examples](https://github.com/freeflowuniverse/herolib/tree/development/cookbook)
|
||||
- [AI Prompts](aiprompts/starter/0_start_here.md)
|
||||
144
README.md
144
README.md
@@ -1,32 +1,60 @@
|
||||
# herolib
|
||||
# Herolib
|
||||
|
||||
Herolib is an opinionated library primarily used by ThreeFold to automate cloud environments. It provides a comprehensive set of tools and utilities for cloud automation, git operations, documentation building, and more.
|
||||
|
||||
> [documentation of the library](https://freeflowuniverse.github.io/herolib/)
|
||||
[](https://github.com/freeflowuniverse/herolib/actions/workflows/test.yml)
|
||||
[](https://github.com/freeflowuniverse/herolib/actions/workflows/documentation.yml)
|
||||
|
||||
## hero install for users
|
||||
> [Complete Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Installation](#installation)
|
||||
- [For Users](#for-users)
|
||||
- [For Developers](#for-developers)
|
||||
- [Features](#features)
|
||||
- [Testing](#testing)
|
||||
- [Contributing](#contributing)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
||||
## Installation
|
||||
|
||||
### For Users
|
||||
|
||||
The Hero tool can be installed with a single command:
|
||||
|
||||
```bash
|
||||
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install_hero.sh
|
||||
bash /tmp/install_hero.sh
|
||||
|
||||
```
|
||||
|
||||
this tool can be used to work with git, build books, play with hero AI, ...
|
||||
Hero will be installed in:
|
||||
- `/usr/local/bin` for Linux
|
||||
- `~/hero/bin` for macOS
|
||||
|
||||
## automated install for developers
|
||||
After installation on macOS, you may need to:
|
||||
```bash
|
||||
source ~/.zprofile
|
||||
# Or copy to system bin directory
|
||||
cp ~/hero/bin/hero /usr/local/bin
|
||||
```
|
||||
|
||||
The Hero tool can be used to work with git, build documentation, interact with Hero AI, and more.
|
||||
|
||||
### For Developers
|
||||
|
||||
For development purposes, use the automated installation script:
|
||||
|
||||
```bash
|
||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
||||
bash /tmp/install_v.sh --analyzer --herolib
|
||||
#DONT FORGET TO START A NEW SHELL (otherwise the paths will not be set)
|
||||
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
||||
```
|
||||
|
||||
### details
|
||||
|
||||
```bash
|
||||
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --help
|
||||
#### Installation Options
|
||||
|
||||
```
|
||||
V & HeroLib Installer Script
|
||||
|
||||
Usage: ~/code/github/freeflowuniverse/herolib/install_v.sh [options]
|
||||
@@ -45,29 +73,97 @@ Examples:
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --analyzer
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --herolib
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --reset --analyzer # Fresh install of both
|
||||
|
||||
```
|
||||
|
||||
### to test
|
||||
## Features
|
||||
|
||||
to run the basic tests, important !!!
|
||||
Herolib provides a wide range of functionality:
|
||||
|
||||
- Cloud automation tools
|
||||
- Git operations and management
|
||||
- Documentation building
|
||||
- Hero AI integration
|
||||
- System management utilities
|
||||
- And much more
|
||||
|
||||
Check the [cookbook](https://github.com/freeflowuniverse/herolib/tree/development/cookbook) for examples and use cases.
|
||||
|
||||
## Testing
|
||||
|
||||
Running tests is an essential part of development. To run the basic tests:
|
||||
|
||||
```bash
|
||||
# Run all basic tests
|
||||
~/code/github/freeflowuniverse/herolib/test_basic.vsh
|
||||
```
|
||||
|
||||
```bash
|
||||
# Run tests for a specific module
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
#for a full dir
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
|
||||
#to do al basic tests
|
||||
~/code/github/freeflowuniverse/herolib/test_basic.vsh
|
||||
# Run tests for an entire directory
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
```
|
||||
|
||||
The `vtest` command is an alias for testing functionality.
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions to Herolib! Please see our [CONTRIBUTING.md](CONTRIBUTING.md) file for detailed information on:
|
||||
|
||||
- Setting up your development environment
|
||||
- Understanding the repository structure
|
||||
- Following our development workflow
|
||||
- Making pull requests
|
||||
- CI/CD processes
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### TCC Compiler Error on macOS
|
||||
|
||||
If you encounter the following error when using TCC compiler on macOS:
|
||||
|
||||
```
|
||||
vtest is an alias to test functionality
|
||||
In file included from /Users/timurgordon/code/github/vlang/v/thirdparty/cJSON/cJSON.c:42:
|
||||
/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h:614: error: ';' expected (got "__fabsf16")
|
||||
```
|
||||
|
||||
This is caused by incompatibility between TCC and the half precision math functions in the macOS SDK. To fix this issue:
|
||||
|
||||
## important to read
|
||||
1. Open the math.h file:
|
||||
```bash
|
||||
sudo nano /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
|
||||
```
|
||||
|
||||
- [aiprompts/starter/0_start_here.md](aiprompts/starter/0_start_here.md)
|
||||
2. Comment out the following lines (around line 612-626):
|
||||
```c
|
||||
/* half precision math functions */
|
||||
// extern _Float16 __fabsf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __hypotf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __sqrtf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __ceilf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __floorf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __rintf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __roundf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __truncf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __copysignf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __nextafterf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fmaxf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fminf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fmaf16(_Float16, _Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
```
|
||||
|
||||
3. Save the file and try compiling again.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Complete Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
- [Cookbook Examples](https://github.com/freeflowuniverse/herolib/tree/development/cookbook)
|
||||
- [AI Prompts](aiprompts/starter/0_start_here.md)
|
||||
|
||||
## Generating Documentation
|
||||
|
||||
To generate documentation locally:
|
||||
|
||||
```bash
|
||||
cd ~/code/github/freeflowuniverse/herolib
|
||||
bash doc.sh
|
||||
```
|
||||
|
||||
92
TOSORT/developer/README.md
Normal file
92
TOSORT/developer/README.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# V-Do MCP Server
|
||||
|
||||
An implementation of the [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) server for V language operations. This server uses the Standard Input/Output (stdio) transport as described in the [MCP documentation](https://modelcontextprotocol.io/docs/concepts/transports).
|
||||
|
||||
## Features
|
||||
|
||||
The server supports the following operations:
|
||||
|
||||
1. **test** - Run V tests on a file or directory
|
||||
2. **run** - Execute V code from a file or directory
|
||||
3. **compile** - Compile V code from a file or directory
|
||||
4. **vet** - Run V vet on a file or directory
|
||||
|
||||
## Usage
|
||||
|
||||
### Building the Server
|
||||
|
||||
```bash
|
||||
v -gc none -stats -enable-globals -n -w -cg -g -cc tcc /Users/despiegk/code/github/freeflowuniverse/herolib/lib/mcp/v_do
|
||||
```
|
||||
|
||||
### Using the Server
|
||||
|
||||
The server communicates using the MCP protocol over stdio. To send a request, use the following format:
|
||||
|
||||
```
|
||||
Content-Length: <length>
|
||||
|
||||
{"jsonrpc":"2.0","id":"<request-id>","method":"<method-name>","params":{"fullpath":"<path-to-file-or-directory>"}}
|
||||
```
|
||||
|
||||
Where:
|
||||
- `<length>` is the length of the JSON message in bytes
|
||||
- `<request-id>` is a unique identifier for the request
|
||||
- `<method-name>` is one of: `test`, `run`, `compile`, or `vet`
|
||||
- `<path-to-file-or-directory>` is the absolute path to the V file or directory to process
|
||||
|
||||
### Example
|
||||
|
||||
Request:
|
||||
```
|
||||
Content-Length: 85
|
||||
|
||||
{"jsonrpc":"2.0","id":"1","method":"test","params":{"fullpath":"/path/to/file.v"}}
|
||||
```
|
||||
|
||||
Response:
|
||||
```
|
||||
Content-Length: 245
|
||||
|
||||
{"jsonrpc":"2.0","id":"1","result":{"output":"Command: v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc test /path/to/file.v\nExit code: 0\nOutput:\nAll tests passed!"}}
|
||||
```
|
||||
|
||||
## Methods
|
||||
|
||||
### test
|
||||
|
||||
Runs V tests on the specified file or directory.
|
||||
|
||||
Command used:
|
||||
```
|
||||
v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc test ${fullpath}
|
||||
```
|
||||
|
||||
If a directory is specified, it will run tests on all `.v` files in the directory (non-recursive).
|
||||
|
||||
### run
|
||||
|
||||
Executes the specified V file or all V files in a directory.
|
||||
|
||||
Command used:
|
||||
```
|
||||
v -gc none -stats -enable-globals -n -w -cg -g -cc tcc run ${fullpath}
|
||||
```
|
||||
|
||||
### compile
|
||||
|
||||
Compiles the specified V file or all V files in a directory.
|
||||
|
||||
Command used:
|
||||
```
|
||||
cd /tmp && v -gc none -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc ${fullpath}
|
||||
```
|
||||
|
||||
### vet
|
||||
|
||||
Runs V vet on the specified file or directory.
|
||||
|
||||
Command used:
|
||||
```
|
||||
v vet -v -w ${fullpath}
|
||||
```
|
||||
61
TOSORT/developer/developer.v
Normal file
61
TOSORT/developer/developer.v
Normal file
@@ -0,0 +1,61 @@
|
||||
module developer
|
||||
import freeflowuniverse.herolib.mcp
|
||||
|
||||
@[heap]
|
||||
pub struct Developer {}
|
||||
|
||||
pub fn result_to_mcp_tool_contents[T](result T) []mcp.ToolContent {
|
||||
return [result_to_mcp_tool_content(result)]
|
||||
}
|
||||
|
||||
pub fn result_to_mcp_tool_content[T](result T) mcp.ToolContent {
|
||||
return $if T is string {
|
||||
mcp.ToolContent
|
||||
{
|
||||
typ: 'text'
|
||||
text: result.str()
|
||||
}
|
||||
} $else $if T is int {
|
||||
mcp.ToolContent
|
||||
{
|
||||
typ: 'number'
|
||||
number: result.int()
|
||||
}
|
||||
} $else $if T is bool {
|
||||
mcp.ToolContent
|
||||
{
|
||||
typ: 'boolean'
|
||||
boolean: result.bool()
|
||||
}
|
||||
} $else $if result is $array {
|
||||
mut items := []mcp.ToolContent{}
|
||||
for item in result {
|
||||
items << result_to_mcp_tool_content(item)
|
||||
}
|
||||
return mcp.ToolContent
|
||||
{
|
||||
typ: 'array'
|
||||
items: items
|
||||
}
|
||||
} $else $if T is $struct {
|
||||
mut properties := map[string]mcp.ToolContent{}
|
||||
$for field in T.fields {
|
||||
properties[field.name] = result_to_mcp_tool_content(result.$(field.name))
|
||||
}
|
||||
return mcp.ToolContent
|
||||
{
|
||||
typ: 'object'
|
||||
properties: properties
|
||||
}
|
||||
} $else {
|
||||
panic('Unsupported type: ${typeof(result)}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn array_to_mcp_tool_contents[U](array []U) []mcp.ToolContent {
|
||||
mut contents := []mcp.ToolContent{}
|
||||
for item in array {
|
||||
contents << result_to_mcp_tool_content(item)
|
||||
}
|
||||
return contents
|
||||
}
|
||||
391
TOSORT/developer/generate_mcp.v
Normal file
391
TOSORT/developer/generate_mcp.v
Normal file
@@ -0,0 +1,391 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.core.code
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import os
|
||||
|
||||
// create_mcp_tool_code receives the name of a V language function string, and the path to the module in which it exists.
|
||||
// returns an MCP Tool code in v for attaching the function to the mcp server
|
||||
pub fn (d &Developer) create_mcp_tool_code(function_name string, module_path string) !string {
|
||||
println('DEBUG: Looking for function ${function_name} in module path: ${module_path}')
|
||||
if !os.exists(module_path) {
|
||||
println('DEBUG: Module path does not exist: ${module_path}')
|
||||
return error('Module path does not exist: ${module_path}')
|
||||
}
|
||||
|
||||
function_ := get_function_from_module(module_path, function_name)!
|
||||
println('Function string found:\n${function_}')
|
||||
|
||||
// Try to parse the function
|
||||
function := code.parse_function(function_) or {
|
||||
println('Error parsing function: ${err}')
|
||||
return error('Failed to parse function: ${err}')
|
||||
}
|
||||
|
||||
mut types := map[string]string{}
|
||||
for param in function.params {
|
||||
// Check if the type is an Object (struct)
|
||||
if param.typ is code.Object {
|
||||
types[param.typ.symbol()] = get_type_from_module(module_path, param.typ.symbol())!
|
||||
}
|
||||
}
|
||||
|
||||
// Get the result type if it's a struct
|
||||
mut result_ := ""
|
||||
if function.result.typ is code.Result {
|
||||
result_type := (function.result.typ as code.Result).typ
|
||||
if result_type is code.Object {
|
||||
result_ = get_type_from_module(module_path, result_type.symbol())!
|
||||
}
|
||||
} else if function.result.typ is code.Object {
|
||||
result_ = get_type_from_module(module_path, function.result.typ.symbol())!
|
||||
}
|
||||
|
||||
tool_name := function.name
|
||||
tool := d.create_mcp_tool(function_, types)!
|
||||
handler := d.create_mcp_tool_handler(function_, types, result_)!
|
||||
str := $tmpl('./templates/tool_code.v.template')
|
||||
return str
|
||||
}
|
||||
|
||||
// create_mcp_tool parses a V language function string and returns an MCP Tool struct
|
||||
// function: The V function string including preceding comments
|
||||
// types: A map of struct names to their definitions for complex parameter types
|
||||
// result: The type of result of the create_mcp_tool function. Could be simply string, or struct {...}
|
||||
pub fn (d &Developer) create_mcp_tool_handler(function_ string, types map[string]string, result_ string) !string {
|
||||
function := code.parse_function(function_)!
|
||||
decode_stmts := function.params.map(argument_decode_stmt(it)).join_lines()
|
||||
|
||||
result := code.parse_type(result_)
|
||||
str := $tmpl('./templates/tool_handler.v.template')
|
||||
return str
|
||||
}
|
||||
|
||||
pub fn argument_decode_stmt(param code.Param) string {
|
||||
return if param.typ is code.Integer {
|
||||
'${param.name} := arguments["${param.name}"].int()'
|
||||
} else if param.typ is code.Boolean {
|
||||
'${param.name} := arguments["${param.name}"].bool()'
|
||||
} else if param.typ is code.String {
|
||||
'${param.name} := arguments["${param.name}"].str()'
|
||||
} else if param.typ is code.Object {
|
||||
'${param.name} := json.decode[${param.typ.symbol()}](arguments["${param.name}"].str())!'
|
||||
} else if param.typ is code.Array {
|
||||
'${param.name} := json.decode[${param.typ.symbol()}](arguments["${param.name}"].str())!'
|
||||
} else if param.typ is code.Map {
|
||||
'${param.name} := json.decode[${param.typ.symbol()}](arguments["${param.name}"].str())!'
|
||||
} else {
|
||||
panic('Unsupported type: ${param.typ}')
|
||||
}
|
||||
}
|
||||
/*
|
||||
in @generate_mcp.v , implement a create_mpc_tool_handler function that given a vlang function string and the types that map to their corresponding type definitions (for instance struct some_type: SomeType{...}), generates a vlang function such as the following:
|
||||
|
||||
ou
|
||||
pub fn (d &Developer) create_mcp_tool_tool_handler(arguments map[string]Any) !mcp.Tool {
|
||||
function := arguments['function'].str()
|
||||
types := json.decode[map[string]string](arguments['types'].str())!
|
||||
return d.create_mcp_tool(function, types)
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
// create_mcp_tool parses a V language function string and returns an MCP Tool struct
|
||||
// function: The V function string including preceding comments
|
||||
// types: A map of struct names to their definitions for complex parameter types
|
||||
pub fn (d Developer) create_mcp_tool(function string, types map[string]string) !mcp.Tool {
|
||||
// Extract description from preceding comments
|
||||
mut description := ''
|
||||
lines := function.split('\n')
|
||||
|
||||
// Find function signature line
|
||||
mut fn_line_idx := -1
|
||||
for i, line in lines {
|
||||
if line.trim_space().starts_with('fn ') || line.trim_space().starts_with('pub fn ') {
|
||||
fn_line_idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if fn_line_idx == -1 {
|
||||
return error('Invalid function: no function signature found')
|
||||
}
|
||||
|
||||
// Extract comments before the function
|
||||
for i := 0; i < fn_line_idx; i++ {
|
||||
line := lines[i].trim_space()
|
||||
if line.starts_with('//') {
|
||||
// Remove the comment marker and any leading space
|
||||
comment := line[2..].trim_space()
|
||||
if description != '' {
|
||||
description += '\n'
|
||||
}
|
||||
description += comment
|
||||
}
|
||||
}
|
||||
|
||||
// Parse function signature
|
||||
fn_signature := lines[fn_line_idx].trim_space()
|
||||
|
||||
// Extract function name
|
||||
mut fn_name := ''
|
||||
|
||||
// Check if this is a method with a receiver
|
||||
if fn_signature.contains('fn (') {
|
||||
// This is a method with a receiver
|
||||
// Format: [pub] fn (receiver Type) name(...)
|
||||
|
||||
// Find the closing parenthesis of the receiver
|
||||
mut receiver_end := fn_signature.index(')') or { return error('Invalid method signature: missing closing parenthesis for receiver') }
|
||||
|
||||
// Extract the text after the receiver
|
||||
mut after_receiver := fn_signature[receiver_end + 1..].trim_space()
|
||||
|
||||
// Extract the function name (everything before the opening parenthesis)
|
||||
mut params_start := after_receiver.index('(') or { return error('Invalid method signature: missing parameters') }
|
||||
fn_name = after_receiver[0..params_start].trim_space()
|
||||
} else if fn_signature.starts_with('pub fn ') {
|
||||
// Regular public function
|
||||
mut prefix_len := 'pub fn '.len
|
||||
mut params_start := fn_signature.index('(') or { return error('Invalid function signature: missing parameters') }
|
||||
fn_name = fn_signature[prefix_len..params_start].trim_space()
|
||||
} else if fn_signature.starts_with('fn ') {
|
||||
// Regular function
|
||||
mut prefix_len := 'fn '.len
|
||||
mut params_start := fn_signature.index('(') or { return error('Invalid function signature: missing parameters') }
|
||||
fn_name = fn_signature[prefix_len..params_start].trim_space()
|
||||
} else {
|
||||
return error('Invalid function signature: must start with "fn" or "pub fn"')
|
||||
}
|
||||
|
||||
if fn_name == '' {
|
||||
return error('Could not extract function name')
|
||||
}
|
||||
|
||||
// Extract parameters
|
||||
mut params_str := ''
|
||||
|
||||
// Check if this is a method with a receiver
|
||||
if fn_signature.contains('fn (') {
|
||||
// This is a method with a receiver
|
||||
// Find the closing parenthesis of the receiver
|
||||
mut receiver_end := fn_signature.index(')') or { return error('Invalid method signature: missing closing parenthesis for receiver') }
|
||||
|
||||
// Find the opening parenthesis of the parameters
|
||||
mut params_start := -1
|
||||
for i := receiver_end + 1; i < fn_signature.len; i++ {
|
||||
if fn_signature[i] == `(` {
|
||||
params_start = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if params_start == -1 {
|
||||
return error('Invalid method signature: missing parameter list')
|
||||
}
|
||||
|
||||
// Find the closing parenthesis of the parameters
|
||||
mut params_end := fn_signature.last_index(')') or { return error('Invalid method signature: missing closing parenthesis for parameters') }
|
||||
|
||||
// Extract the parameters
|
||||
params_str = fn_signature[params_start + 1..params_end].trim_space()
|
||||
} else {
|
||||
// Regular function
|
||||
mut params_start := fn_signature.index('(') or { return error('Invalid function signature: missing parameters') }
|
||||
mut params_end := fn_signature.last_index(')') or { return error('Invalid function signature: missing closing parenthesis') }
|
||||
|
||||
// Extract the parameters
|
||||
params_str = fn_signature[params_start + 1..params_end].trim_space()
|
||||
}
|
||||
|
||||
// Create input schema for parameters
|
||||
mut properties := map[string]mcp.ToolProperty{}
|
||||
mut required := []string{}
|
||||
|
||||
if params_str != '' {
|
||||
param_list := params_str.split(',')
|
||||
|
||||
for param in param_list {
|
||||
trimmed_param := param.trim_space()
|
||||
if trimmed_param == '' {
|
||||
continue
|
||||
}
|
||||
|
||||
// Split parameter into name and type
|
||||
param_parts := trimmed_param.split_any(' \t')
|
||||
if param_parts.len < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
param_name := param_parts[0]
|
||||
param_type := param_parts[1]
|
||||
|
||||
// Add to required parameters
|
||||
required << param_name
|
||||
|
||||
// Create property for this parameter
|
||||
mut property := mcp.ToolProperty{}
|
||||
|
||||
// Check if this is a complex type defined in the types map
|
||||
if param_type in types {
|
||||
// Parse the struct definition to create a nested schema
|
||||
struct_def := types[param_type]
|
||||
struct_schema := d.create_mcp_tool_input_schema(struct_def)!
|
||||
property = mcp.ToolProperty{
|
||||
typ: struct_schema.typ
|
||||
}
|
||||
} else {
|
||||
// Handle primitive types
|
||||
schema := d.create_mcp_tool_input_schema(param_type)!
|
||||
property = mcp.ToolProperty{
|
||||
typ: schema.typ
|
||||
}
|
||||
}
|
||||
|
||||
properties[param_name] = property
|
||||
}
|
||||
}
|
||||
|
||||
// Create the input schema
|
||||
input_schema := mcp.ToolInputSchema{
|
||||
typ: 'object',
|
||||
properties: properties,
|
||||
required: required
|
||||
}
|
||||
|
||||
// Create and return the Tool
|
||||
return mcp.Tool{
|
||||
name: fn_name,
|
||||
description: description,
|
||||
input_schema: input_schema
|
||||
}
|
||||
}
|
||||
|
||||
// create_mcp_tool_input_schema creates a ToolInputSchema for a given input type
|
||||
// input: The input type string
|
||||
// returns: A ToolInputSchema for the given input type
|
||||
// errors: Returns an error if the input type is not supported
|
||||
pub fn (d Developer) create_mcp_tool_input_schema(input string) !mcp.ToolInputSchema {
|
||||
|
||||
// if input is a primitive type, return a mcp ToolInputSchema with that type
|
||||
if input == 'string' {
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'string'
|
||||
}
|
||||
} else if input == 'int' {
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'integer'
|
||||
}
|
||||
} else if input == 'float' {
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'number'
|
||||
}
|
||||
} else if input == 'bool' {
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'boolean'
|
||||
}
|
||||
}
|
||||
|
||||
// if input is a struct, return a mcp ToolInputSchema with typ 'object' and properties for each field in the struct
|
||||
if input.starts_with('pub struct ') {
|
||||
struct_name := input[11..].split(' ')[0]
|
||||
fields := parse_struct_fields(input)
|
||||
mut properties := map[string]mcp.ToolProperty{}
|
||||
|
||||
for field_name, field_type in fields {
|
||||
property := mcp.ToolProperty{
|
||||
typ: d.create_mcp_tool_input_schema(field_type)!.typ
|
||||
}
|
||||
properties[field_name] = property
|
||||
}
|
||||
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'object',
|
||||
properties: properties
|
||||
}
|
||||
}
|
||||
|
||||
// if input is an array, return a mcp ToolInputSchema with typ 'array' and items of the item type
|
||||
if input.starts_with('[]') {
|
||||
item_type := input[2..]
|
||||
|
||||
// For array types, we create a schema with type 'array'
|
||||
// The actual item type is determined by the primitive type
|
||||
mut item_type_str := 'string' // default
|
||||
if item_type == 'int' {
|
||||
item_type_str = 'integer'
|
||||
} else if item_type == 'float' {
|
||||
item_type_str = 'number'
|
||||
} else if item_type == 'bool' {
|
||||
item_type_str = 'boolean'
|
||||
}
|
||||
|
||||
// Create a property for the array items
|
||||
mut property := mcp.ToolProperty{
|
||||
typ: 'array'
|
||||
}
|
||||
|
||||
// Add the property to the schema
|
||||
mut properties := map[string]mcp.ToolProperty{}
|
||||
properties['items'] = property
|
||||
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'array',
|
||||
properties: properties
|
||||
}
|
||||
}
|
||||
|
||||
// Default to string type for unknown types
|
||||
return mcp.ToolInputSchema{
|
||||
typ: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// parse_struct_fields parses a V language struct definition string and returns a map of field names to their types
|
||||
fn parse_struct_fields(struct_def string) map[string]string {
|
||||
mut fields := map[string]string{}
|
||||
|
||||
// Find the opening and closing braces of the struct definition
|
||||
start_idx := struct_def.index('{') or { return fields }
|
||||
end_idx := struct_def.last_index('}') or { return fields }
|
||||
|
||||
// Extract the content between the braces
|
||||
struct_content := struct_def[start_idx + 1..end_idx].trim_space()
|
||||
|
||||
// Split the content by newlines to get individual field definitions
|
||||
field_lines := struct_content.split('
|
||||
')
|
||||
|
||||
for line in field_lines {
|
||||
trimmed_line := line.trim_space()
|
||||
|
||||
// Skip empty lines and comments
|
||||
if trimmed_line == '' || trimmed_line.starts_with('//') {
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle pub: or mut: prefixes
|
||||
mut field_def := trimmed_line
|
||||
if field_def.starts_with('pub:') || field_def.starts_with('mut:') {
|
||||
field_def = field_def.all_after(':').trim_space()
|
||||
}
|
||||
|
||||
// Split by whitespace to separate field name and type
|
||||
parts := field_def.split_any(' ')
|
||||
if parts.len < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
field_name := parts[0]
|
||||
field_type := parts[1..].join(' ')
|
||||
|
||||
// Handle attributes like @[json: 'name']
|
||||
if field_name.contains('@[') {
|
||||
continue
|
||||
}
|
||||
|
||||
fields[field_name] = field_type
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
55
TOSORT/developer/generate_mcp_helpers.v
Normal file
55
TOSORT/developer/generate_mcp_helpers.v
Normal file
@@ -0,0 +1,55 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2
|
||||
|
||||
pub fn result_to_mcp_tool_contents[T](result T) []mcp.ToolContent {
|
||||
return [result_to_mcp_tool_content(result)]
|
||||
}
|
||||
|
||||
pub fn result_to_mcp_tool_content[T](result T) mcp.ToolContent {
|
||||
return $if T is string {
|
||||
mcp.ToolContent{
|
||||
typ: 'text'
|
||||
text: result.str()
|
||||
}
|
||||
} $else $if T is int {
|
||||
mcp.ToolContent{
|
||||
typ: 'number'
|
||||
number: result.int()
|
||||
}
|
||||
} $else $if T is bool {
|
||||
mcp.ToolContent{
|
||||
typ: 'boolean'
|
||||
boolean: result.bool()
|
||||
}
|
||||
} $else $if result is $array {
|
||||
mut items := []mcp.ToolContent{}
|
||||
for item in result {
|
||||
items << result_to_mcp_tool_content(item)
|
||||
}
|
||||
return mcp.ToolContent{
|
||||
typ: 'array'
|
||||
items: items
|
||||
}
|
||||
} $else $if T is $struct {
|
||||
mut properties := map[string]mcp.ToolContent{}
|
||||
$for field in T.fields {
|
||||
properties[field.name] = result_to_mcp_tool_content(result.$(field.name))
|
||||
}
|
||||
return mcp.ToolContent{
|
||||
typ: 'object'
|
||||
properties: properties
|
||||
}
|
||||
} $else {
|
||||
panic('Unsupported type: ${typeof(result)}')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn array_to_mcp_tool_contents[U](array []U) []mcp.ToolContent {
|
||||
mut contents := []mcp.ToolContent{}
|
||||
for item in array {
|
||||
contents << result_to_mcp_tool_content(item)
|
||||
}
|
||||
return contents
|
||||
}
|
||||
205
TOSORT/developer/generate_mcp_test.v
Normal file
205
TOSORT/developer/generate_mcp_test.v
Normal file
@@ -0,0 +1,205 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import json
|
||||
import os
|
||||
|
||||
// fn test_parse_struct_fields() {
|
||||
// // Test case 1: Simple struct with primitive types
|
||||
// simple_struct := 'pub struct User {
|
||||
// name string
|
||||
// age int
|
||||
// active bool
|
||||
// }'
|
||||
|
||||
// fields := parse_struct_fields(simple_struct)
|
||||
// assert fields.len == 3
|
||||
// assert fields['name'] == 'string'
|
||||
// assert fields['age'] == 'int'
|
||||
// assert fields['active'] == 'bool'
|
||||
|
||||
// // Test case 2: Struct with pub: and mut: sections
|
||||
// complex_struct := 'pub struct Config {
|
||||
// pub:
|
||||
// host string
|
||||
// port int
|
||||
// mut:
|
||||
// connected bool
|
||||
// retries int
|
||||
// }'
|
||||
|
||||
// fields2 := parse_struct_fields(complex_struct)
|
||||
// assert fields2.len == 4
|
||||
// assert fields2['host'] == 'string'
|
||||
// assert fields2['port'] == 'int'
|
||||
// assert fields2['connected'] == 'bool'
|
||||
// assert fields2['retries'] == 'int'
|
||||
|
||||
// // Test case 3: Struct with attributes and comments
|
||||
// struct_with_attrs := 'pub struct ApiResponse {
|
||||
// // User ID
|
||||
// id int
|
||||
// // User full name
|
||||
// name string @[json: "full_name"]
|
||||
// // Whether account is active
|
||||
// active bool
|
||||
// }'
|
||||
|
||||
// fields3 := parse_struct_fields(struct_with_attrs)
|
||||
// assert fields3.len == 3 // All fields are included
|
||||
// assert fields3['id'] == 'int'
|
||||
// assert fields3['active'] == 'bool'
|
||||
|
||||
// // Test case 4: Empty struct
|
||||
// empty_struct := 'pub struct Empty {}'
|
||||
// fields4 := parse_struct_fields(empty_struct)
|
||||
// assert fields4.len == 0
|
||||
|
||||
// println('test_parse_struct_fields passed')
|
||||
// }
|
||||
|
||||
// fn test_create_mcp_tool_input_schema() {
|
||||
// d := Developer{}
|
||||
|
||||
// // Test case 1: Primitive types
|
||||
// string_schema := d.create_mcp_tool_input_schema('string') or { panic(err) }
|
||||
// assert string_schema.typ == 'string'
|
||||
|
||||
// int_schema := d.create_mcp_tool_input_schema('int') or { panic(err) }
|
||||
// assert int_schema.typ == 'integer'
|
||||
|
||||
// float_schema := d.create_mcp_tool_input_schema('float') or { panic(err) }
|
||||
// assert float_schema.typ == 'number'
|
||||
|
||||
// bool_schema := d.create_mcp_tool_input_schema('bool') or { panic(err) }
|
||||
// assert bool_schema.typ == 'boolean'
|
||||
|
||||
// // Test case 2: Array type
|
||||
// array_schema := d.create_mcp_tool_input_schema('[]string') or { panic(err) }
|
||||
// assert array_schema.typ == 'array'
|
||||
// // In our implementation, arrays don't have items directly in the schema
|
||||
|
||||
// // Test case 3: Struct type
|
||||
// struct_def := 'pub struct Person {
|
||||
// name string
|
||||
// age int
|
||||
// }'
|
||||
|
||||
// struct_schema := d.create_mcp_tool_input_schema(struct_def) or { panic(err) }
|
||||
// assert struct_schema.typ == 'object'
|
||||
// assert struct_schema.properties.len == 2
|
||||
// assert struct_schema.properties['name'].typ == 'string'
|
||||
// assert struct_schema.properties['age'].typ == 'integer'
|
||||
|
||||
// println('test_create_mcp_tool_input_schema passed')
|
||||
// }
|
||||
|
||||
// fn test_create_mcp_tool() {
|
||||
// d := Developer{}
|
||||
|
||||
// // Test case 1: Simple function with primitive types
|
||||
// simple_fn := '// Get user by ID
|
||||
// // Returns user information
|
||||
// pub fn get_user(id int, include_details bool) {
|
||||
// // Implementation
|
||||
// }'
|
||||
|
||||
// tool1 := d.create_mcp_tool(simple_fn, {}) or { panic(err) }
|
||||
// assert tool1.name == 'get_user'
|
||||
// expected_desc1 := 'Get user by ID\nReturns user information'
|
||||
// assert tool1.description == expected_desc1
|
||||
// assert tool1.input_schema.typ == 'object'
|
||||
// assert tool1.input_schema.properties.len == 2
|
||||
// assert tool1.input_schema.properties['id'].typ == 'integer'
|
||||
// assert tool1.input_schema.properties['include_details'].typ == 'boolean'
|
||||
// assert tool1.input_schema.required.len == 2
|
||||
// assert 'id' in tool1.input_schema.required
|
||||
// assert 'include_details' in tool1.input_schema.required
|
||||
|
||||
// // Test case 2: Method with receiver
|
||||
// method_fn := '// Update user profile
|
||||
// pub fn (u User) update_profile(name string, age int) bool {
|
||||
// // Implementation
|
||||
// return true
|
||||
// }'
|
||||
|
||||
// tool2 := d.create_mcp_tool(method_fn, {}) or { panic(err) }
|
||||
// assert tool2.name == 'update_profile'
|
||||
// assert tool2.description == 'Update user profile'
|
||||
// assert tool2.input_schema.properties.len == 2
|
||||
// assert tool2.input_schema.properties['name'].typ == 'string'
|
||||
// assert tool2.input_schema.properties['age'].typ == 'integer'
|
||||
|
||||
// // Test case 3: Function with complex types
|
||||
// complex_fn := '// Create new configuration
|
||||
// // Sets up system configuration
|
||||
// fn create_config(name string, settings Config) !Config {
|
||||
// // Implementation
|
||||
// }'
|
||||
|
||||
// config_struct := 'pub struct Config {
|
||||
// server_url string
|
||||
// max_retries int
|
||||
// timeout float
|
||||
// }'
|
||||
|
||||
// tool3 := d.create_mcp_tool(complex_fn, {
|
||||
// 'Config': config_struct
|
||||
// }) or { panic(err) }
|
||||
// assert tool3.name == 'create_config'
|
||||
// expected_desc3 := 'Create new configuration\nSets up system configuration'
|
||||
// assert tool3.description == expected_desc3
|
||||
// assert tool3.input_schema.properties.len == 2
|
||||
// assert tool3.input_schema.properties['name'].typ == 'string'
|
||||
// assert tool3.input_schema.properties['settings'].typ == 'object'
|
||||
|
||||
// // Test case 4: Function with no parameters
|
||||
// no_params_fn := '// Initialize system
|
||||
// pub fn initialize() {
|
||||
// // Implementation
|
||||
// }'
|
||||
|
||||
// tool4 := d.create_mcp_tool(no_params_fn, {}) or { panic(err) }
|
||||
// assert tool4.name == 'initialize'
|
||||
// assert tool4.description == 'Initialize system'
|
||||
// assert tool4.input_schema.properties.len == 0
|
||||
// assert tool4.input_schema.required.len == 0
|
||||
|
||||
// println('test_create_mcp_tool passed')
|
||||
// }
|
||||
|
||||
// fn test_create_mcp_tool_code() {
|
||||
// d := Developer{}
|
||||
|
||||
// // Test with the complex function that has struct parameters and return type
|
||||
// module_path := "${os.dir(@FILE)}/testdata/mock_module"
|
||||
// function_name := 'test_function'
|
||||
|
||||
// code := d.create_mcp_tool_code(function_name, module_path) or {
|
||||
// panic('Failed to create MCP tool code: ${err}')
|
||||
// }
|
||||
|
||||
// // Print the code instead of panic for debugging
|
||||
// println('Generated code:')
|
||||
// println('----------------------------------------')
|
||||
// println(code)
|
||||
// println('----------------------------------------')
|
||||
|
||||
// // Verify the generated code contains the expected elements
|
||||
// assert code.contains('test_function_tool')
|
||||
// assert code.contains('TestConfig')
|
||||
// assert code.contains('TestResult')
|
||||
|
||||
// // Test with a simple function that has primitive types
|
||||
// simple_function_name := 'simple_function'
|
||||
// simple_code := d.create_mcp_tool_code(simple_function_name, module_path) or {
|
||||
// panic('Failed to create MCP tool code for simple function: ${err}')
|
||||
// }
|
||||
|
||||
// // Verify the simple function code
|
||||
// assert simple_code.contains('simple_function_tool')
|
||||
// assert simple_code.contains('name string')
|
||||
// assert simple_code.contains('count int')
|
||||
|
||||
// // println('test_create_mcp_tool_code passed')
|
||||
// }
|
||||
108
TOSORT/developer/generate_mcp_tools.v
Normal file
108
TOSORT/developer/generate_mcp_tools.v
Normal file
@@ -0,0 +1,108 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2 as json { Any }
|
||||
// import json
|
||||
|
||||
const create_mcp_tool_code_tool = mcp.Tool{
|
||||
name: 'create_mcp_tool_code'
|
||||
description: 'create_mcp_tool_code receives the name of a V language function string, and the path to the module in which it exists.
|
||||
returns an MCP Tool code in v for attaching the function to the mcp server'
|
||||
input_schema: mcp.ToolInputSchema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'function_name': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
items: mcp.ToolItems{
|
||||
typ: ''
|
||||
enum: []
|
||||
}
|
||||
enum: []
|
||||
}
|
||||
'module_path': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
items: mcp.ToolItems{
|
||||
typ: ''
|
||||
enum: []
|
||||
}
|
||||
enum: []
|
||||
}
|
||||
}
|
||||
required: ['function_name', 'module_path']
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (d &Developer) create_mcp_tool_code_tool_handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
function_name := arguments['function_name'].str()
|
||||
module_path := arguments['module_path'].str()
|
||||
result := d.create_mcp_tool_code(function_name, module_path) or {
|
||||
return mcp.error_tool_call_result(err)
|
||||
}
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: result_to_mcp_tool_contents[string](result)
|
||||
}
|
||||
}
|
||||
|
||||
// Tool definition for the create_mcp_tool function
|
||||
const create_mcp_tool_tool = mcp.Tool{
|
||||
name: 'create_mcp_tool'
|
||||
description: 'Parses a V language function string and returns an MCP Tool struct. This tool analyzes function signatures, extracts parameters, and generates the appropriate MCP Tool representation.'
|
||||
input_schema: mcp.ToolInputSchema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'function': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
}
|
||||
'types': mcp.ToolProperty{
|
||||
typ: 'object'
|
||||
}
|
||||
}
|
||||
required: ['function']
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (d &Developer) create_mcp_tool_tool_handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
function := arguments['function'].str()
|
||||
types := json.decode[map[string]string](arguments['types'].str())!
|
||||
result := d.create_mcp_tool(function, types) or { return mcp.error_tool_call_result(err) }
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: result_to_mcp_tool_contents[string](result.str())
|
||||
}
|
||||
}
|
||||
|
||||
// Tool definition for the create_mcp_tool_handler function
|
||||
const create_mcp_tool_handler_tool = mcp.Tool{
|
||||
name: 'create_mcp_tool_handler'
|
||||
description: 'Generates a tool handler for the create_mcp_tool function. This tool handler accepts function string and types map and returns an MCP ToolCallResult.'
|
||||
input_schema: mcp.ToolInputSchema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'function': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
}
|
||||
'types': mcp.ToolProperty{
|
||||
typ: 'object'
|
||||
}
|
||||
'result': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
}
|
||||
}
|
||||
required: ['function', 'result']
|
||||
}
|
||||
}
|
||||
|
||||
// Tool handler for the create_mcp_tool_handler function
|
||||
pub fn (d &Developer) create_mcp_tool_handler_tool_handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
function := arguments['function'].str()
|
||||
types := json.decode[map[string]string](arguments['types'].str())!
|
||||
result_ := arguments['result'].str()
|
||||
result := d.create_mcp_tool_handler(function, types, result_) or {
|
||||
return mcp.error_tool_call_result(err)
|
||||
}
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: result_to_mcp_tool_contents[string](result)
|
||||
}
|
||||
}
|
||||
31
TOSORT/developer/mcp.v
Normal file
31
TOSORT/developer/mcp.v
Normal file
@@ -0,0 +1,31 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
||||
|
||||
// pub fn new_mcp_server(d &Developer) !&mcp.Server {
|
||||
// logger.info('Creating new Developer MCP server')
|
||||
|
||||
// // Initialize the server with the empty handlers map
|
||||
// mut server := mcp.new_server(mcp.MemoryBackend{
|
||||
// tools: {
|
||||
// 'create_mcp_tool': create_mcp_tool_tool
|
||||
// 'create_mcp_tool_handler': create_mcp_tool_handler_tool
|
||||
// 'create_mcp_tool_code': create_mcp_tool_code_tool
|
||||
// }
|
||||
// tool_handlers: {
|
||||
// 'create_mcp_tool': d.create_mcp_tool_tool_handler
|
||||
// 'create_mcp_tool_handler': d.create_mcp_tool_handler_tool_handler
|
||||
// 'create_mcp_tool_code': d.create_mcp_tool_code_tool_handler
|
||||
// }
|
||||
// }, mcp.ServerParams{
|
||||
// config: mcp.ServerConfiguration{
|
||||
// server_info: mcp.ServerInfo{
|
||||
// name: 'developer'
|
||||
// version: '1.0.0'
|
||||
// }
|
||||
// }
|
||||
// })!
|
||||
// return server
|
||||
// }
|
||||
10
TOSORT/developer/scripts/run.vsh
Executable file
10
TOSORT/developer/scripts/run.vsh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.mcp.developer
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
|
||||
mut server := developer.new_mcp_server(&developer.Developer{})!
|
||||
server.start() or {
|
||||
logger.fatal('Error starting server: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
5
TOSORT/developer/templates/tool_code.v.template
Normal file
5
TOSORT/developer/templates/tool_code.v.template
Normal file
@@ -0,0 +1,5 @@
|
||||
// @{tool_name} MCP Tool
|
||||
|
||||
const @{tool_name}_tool = @{tool.str()}
|
||||
|
||||
@{handler}
|
||||
11
TOSORT/developer/templates/tool_handler.v.template
Normal file
11
TOSORT/developer/templates/tool_handler.v.template
Normal file
@@ -0,0 +1,11 @@
|
||||
pub fn (d &Developer) @{function.name}_tool_handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
@{decode_stmts}
|
||||
result := d.@{function.name}(@{function.params.map(it.name).join(',')})
|
||||
or {
|
||||
return error_tool_call_result(err)
|
||||
}
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: result_to_mcp_tool_content[@{result.symbol()}](result)
|
||||
}
|
||||
}
|
||||
38
TOSORT/developer/testdata/mock_module/mock.v
vendored
Normal file
38
TOSORT/developer/testdata/mock_module/mock.v
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
module mock_module
|
||||
|
||||
// TestConfig represents a configuration for testing
|
||||
pub struct TestConfig {
|
||||
pub:
|
||||
name string
|
||||
enabled bool
|
||||
count int
|
||||
value float64
|
||||
}
|
||||
|
||||
// TestResult represents the result of a test operation
|
||||
pub struct TestResult {
|
||||
pub:
|
||||
success bool
|
||||
message string
|
||||
code int
|
||||
}
|
||||
|
||||
// test_function is a simple function for testing the MCP tool code generation
|
||||
// It takes a config and returns a result
|
||||
pub fn test_function(config TestConfig) !TestResult {
|
||||
// This is just a mock implementation for testing purposes
|
||||
if config.name == '' {
|
||||
return error('Name cannot be empty')
|
||||
}
|
||||
|
||||
return TestResult{
|
||||
success: config.enabled
|
||||
message: 'Test completed for ${config.name}'
|
||||
code: if config.enabled { 0 } else { 1 }
|
||||
}
|
||||
}
|
||||
|
||||
// simple_function is a function with primitive types for testing
|
||||
pub fn simple_function(name string, count int) string {
|
||||
return '${name} count: ${count}'
|
||||
}
|
||||
159
TOSORT/developer/testdata/vlang_test_standalone.v
vendored
Normal file
159
TOSORT/developer/testdata/vlang_test_standalone.v
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
module main
|
||||
|
||||
import os
|
||||
|
||||
// Standalone test for the get_type_from_module function
|
||||
// This file can be run directly with: v run vlang_test_standalone.v
|
||||
|
||||
// Implementation of get_type_from_module function
|
||||
fn get_type_from_module(module_path string, type_name string) !string {
|
||||
v_files := list_v_files(module_path) or {
|
||||
return error('Failed to list V files in ${module_path}: ${err}')
|
||||
}
|
||||
|
||||
for v_file in v_files {
|
||||
content := os.read_file(v_file) or { return error('Failed to read file ${v_file}: ${err}') }
|
||||
|
||||
type_str := 'struct ${type_name} {'
|
||||
i := content.index(type_str) or { -1 }
|
||||
if i == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
start_i := i + type_str.len
|
||||
closing_i := find_closing_brace(content, start_i) or {
|
||||
return error('could not find where declaration for type ${type_name} ends')
|
||||
}
|
||||
|
||||
return content.substr(start_i, closing_i + 1)
|
||||
}
|
||||
|
||||
return error('type ${type_name} not found in module ${module_path}')
|
||||
}
|
||||
|
||||
// Helper function to find the closing brace
|
||||
fn find_closing_brace(content string, start_i int) ?int {
|
||||
mut brace_count := 1
|
||||
for i := start_i; i < content.len; i++ {
|
||||
if content[i] == `{` {
|
||||
brace_count++
|
||||
} else if content[i] == `}` {
|
||||
brace_count--
|
||||
if brace_count == 0 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return none
|
||||
}
|
||||
|
||||
// Helper function to list V files
|
||||
fn list_v_files(dir string) ![]string {
|
||||
files := os.ls(dir) or { return error('Error listing directory: ${err}') }
|
||||
|
||||
mut v_files := []string{}
|
||||
for file in files {
|
||||
if file.ends_with('.v') && !file.ends_with('_.v') {
|
||||
filepath := os.join_path(dir, file)
|
||||
v_files << filepath
|
||||
}
|
||||
}
|
||||
|
||||
return v_files
|
||||
}
|
||||
|
||||
// Helper function to create test files with struct definitions
|
||||
fn create_test_files() !(string, string, string) {
|
||||
// Create a temporary directory for our test files
|
||||
test_dir := os.temp_dir()
|
||||
test_file_path := os.join_path(test_dir, 'test_type.v')
|
||||
|
||||
// Create a test file with a simple struct
|
||||
test_content := 'module test_module
|
||||
|
||||
struct TestType {
|
||||
name string
|
||||
age int
|
||||
active bool
|
||||
}
|
||||
|
||||
// Another struct to make sure we get the right one
|
||||
struct OtherType {
|
||||
id string
|
||||
}
|
||||
'
|
||||
os.write_file(test_file_path, test_content) or {
|
||||
eprintln('Failed to create test file: ${err}')
|
||||
return error('Failed to create test file: ${err}')
|
||||
}
|
||||
|
||||
// Create a test file with a nested struct
|
||||
nested_test_content := 'module test_module
|
||||
|
||||
struct NestedType {
|
||||
config map[string]string {
|
||||
required: true
|
||||
}
|
||||
data []struct {
|
||||
key string
|
||||
value string
|
||||
}
|
||||
}
|
||||
'
|
||||
nested_test_file := os.join_path(test_dir, 'nested_test.v')
|
||||
os.write_file(nested_test_file, nested_test_content) or {
|
||||
eprintln('Failed to create nested test file: ${err}')
|
||||
return error('Failed to create nested test file: ${err}')
|
||||
}
|
||||
|
||||
return test_dir, test_file_path, nested_test_file
|
||||
}
|
||||
|
||||
// Test function for get_type_from_module
|
||||
fn test_get_type_from_module() {
|
||||
// Create test files
|
||||
test_dir, test_file_path, nested_test_file := create_test_files() or {
|
||||
eprintln('Failed to create test files: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
// Test case 1: Get a simple struct
|
||||
type_content := get_type_from_module(test_dir, 'TestType') or {
|
||||
eprintln('Failed to get type: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the content matches what we expect
|
||||
expected := '\n\tname string\n\tage int\n\tactive bool\n}'
|
||||
assert type_content == expected, 'Expected: "${expected}", got: "${type_content}"'
|
||||
|
||||
// Test case 2: Try to get a non-existent type
|
||||
non_existent := get_type_from_module(test_dir, 'NonExistentType') or {
|
||||
// This should fail, so we expect an error
|
||||
assert err.str().contains('not found in module'), 'Expected error message about type not found'
|
||||
''
|
||||
}
|
||||
assert non_existent == '', 'Expected empty string for non-existent type'
|
||||
|
||||
// Test case 3: Test with nested braces in the struct
|
||||
nested_type_content := get_type_from_module(test_dir, 'NestedType') or {
|
||||
eprintln('Failed to get nested type: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
expected_nested := '\n\tconfig map[string]string {\n\t\trequired: true\n\t}\n\tdata []struct {\n\t\tkey string\n\t\tvalue string\n\t}\n}'
|
||||
assert nested_type_content == expected_nested, 'Expected: "${expected_nested}", got: "${nested_type_content}"'
|
||||
|
||||
// Clean up test files
|
||||
os.rm(test_file_path) or {}
|
||||
os.rm(nested_test_file) or {}
|
||||
|
||||
println('All tests for get_type_from_module passed successfully!')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
test_get_type_from_module()
|
||||
}
|
||||
286
TOSORT/developer/vlang.v
Normal file
286
TOSORT/developer/vlang.v
Normal file
@@ -0,0 +1,286 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
import os
|
||||
import log
|
||||
|
||||
fn get_module_dir(mod string) string {
|
||||
module_parts := mod.trim_string_left('freeflowuniverse.herolib').split('.')
|
||||
return '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/${module_parts.join('/')}'
|
||||
}
|
||||
|
||||
// given a module path and a type name, returns the type definition of that type within that module
|
||||
// for instance: get_type_from_module('lib/mcp/developer/vlang.v', 'Developer') might return struct Developer {...}
|
||||
fn get_type_from_module(module_path string, type_name string) !string {
|
||||
println('Looking for type ${type_name} in module ${module_path}')
|
||||
v_files := list_v_files(module_path) or {
|
||||
return error('Failed to list V files in ${module_path}: ${err}')
|
||||
}
|
||||
|
||||
for v_file in v_files {
|
||||
println('Checking file: ${v_file}')
|
||||
content := os.read_file(v_file) or { return error('Failed to read file ${v_file}: ${err}') }
|
||||
|
||||
// Look for both regular and pub struct declarations
|
||||
mut type_str := 'struct ${type_name} {'
|
||||
mut i := content.index(type_str) or { -1 }
|
||||
mut is_pub := false
|
||||
|
||||
if i == -1 {
|
||||
// Try with pub struct
|
||||
type_str = 'pub struct ${type_name} {'
|
||||
i = content.index(type_str) or { -1 }
|
||||
is_pub = true
|
||||
}
|
||||
|
||||
if i == -1 {
|
||||
type_import := content.split_into_lines().filter(it.contains('import')
|
||||
&& it.contains(type_name))
|
||||
if type_import.len > 0 {
|
||||
log.debug('debugzoooo')
|
||||
mod := type_import[0].trim_space().trim_string_left('import ').all_before(' ')
|
||||
return get_type_from_module(get_module_dir(mod), type_name)
|
||||
}
|
||||
continue
|
||||
}
|
||||
println('Found type ${type_name} in ${v_file} at position ${i}')
|
||||
|
||||
// Find the start of the struct definition including comments
|
||||
mut comment_start := i
|
||||
mut line_start := i
|
||||
|
||||
// Find the start of the line containing the struct definition
|
||||
for j := i; j >= 0; j-- {
|
||||
if j == 0 || content[j - 1] == `\n` {
|
||||
line_start = j
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Find the start of the comment block (if any)
|
||||
for j := line_start - 1; j >= 0; j-- {
|
||||
if j == 0 {
|
||||
comment_start = 0
|
||||
break
|
||||
}
|
||||
|
||||
// If we hit a blank line or a non-comment line, stop
|
||||
if content[j] == `\n` {
|
||||
if j > 0 && j < content.len - 1 {
|
||||
// Check if the next line starts with a comment
|
||||
next_line_start := j + 1
|
||||
if next_line_start < content.len && content[next_line_start] != `/` {
|
||||
comment_start = j + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find the end of the struct definition
|
||||
closing_i := find_closing_brace(content, i + type_str.len) or {
|
||||
return error('could not find where declaration for type ${type_name} ends')
|
||||
}
|
||||
|
||||
// Get the full struct definition including the struct declaration line
|
||||
full_struct := content.substr(line_start, closing_i + 1)
|
||||
println('Found struct definition:\n${full_struct}')
|
||||
|
||||
// Return the full struct definition
|
||||
return full_struct
|
||||
}
|
||||
|
||||
return error('type ${type_name} not found in module ${module_path}')
|
||||
}
|
||||
|
||||
// given a module path and a function name, returns the function definition of that function within that module
|
||||
// for instance: get_function_from_module('lib/mcp/developer/vlang.v', 'develop') might return fn develop(...) {...}
|
||||
fn get_function_from_module(module_path string, function_name string) !string {
|
||||
println('Searching for function ${function_name} in module ${module_path}')
|
||||
v_files := list_v_files(module_path) or {
|
||||
println('Error listing files: ${err}')
|
||||
return error('Failed to list V files in ${module_path}: ${err}')
|
||||
}
|
||||
|
||||
println('Found ${v_files.len} V files in ${module_path}')
|
||||
for v_file in v_files {
|
||||
println('Checking file: ${v_file}')
|
||||
result := get_function_from_file(v_file, function_name) or {
|
||||
println('Function not found in ${v_file}: ${err}')
|
||||
continue
|
||||
}
|
||||
println('Found function ${function_name} in ${v_file}')
|
||||
return result
|
||||
}
|
||||
|
||||
return error('function ${function_name} not found in module ${module_path}')
|
||||
}
|
||||
|
||||
fn find_closing_brace(content string, start_i int) ?int {
|
||||
mut brace_count := 1
|
||||
for i := start_i; i < content.len; i++ {
|
||||
if content[i] == `{` {
|
||||
brace_count++
|
||||
} else if content[i] == `}` {
|
||||
brace_count--
|
||||
if brace_count == 0 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return none
|
||||
}
|
||||
|
||||
// get_function_from_file parses a V file and extracts a specific function block including its comments
|
||||
// ARGS:
|
||||
// file_path string - path to the V file
|
||||
// function_name string - name of the function to extract
|
||||
// RETURNS: string - the function block including comments, or empty string if not found
|
||||
fn get_function_from_file(file_path string, function_name string) !string {
|
||||
content := os.read_file(file_path) or {
|
||||
return error('Failed to read file: ${file_path}: ${err}')
|
||||
}
|
||||
|
||||
lines := content.split_into_lines()
|
||||
mut result := []string{}
|
||||
mut in_function := false
|
||||
mut brace_count := 0
|
||||
mut comment_block := []string{}
|
||||
|
||||
for i, line in lines {
|
||||
trimmed := line.trim_space()
|
||||
|
||||
// Collect comments that might be above the function
|
||||
if trimmed.starts_with('//') {
|
||||
if !in_function {
|
||||
comment_block << line
|
||||
} else if brace_count > 0 {
|
||||
result << line
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if we found the function
|
||||
if !in_function && (trimmed.starts_with('fn ${function_name}(')
|
||||
|| trimmed.starts_with('pub fn ${function_name}(')) {
|
||||
in_function = true
|
||||
// Add collected comments
|
||||
result << comment_block
|
||||
comment_block = []
|
||||
result << line
|
||||
if line.contains('{') {
|
||||
brace_count++
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// If we're inside the function, keep track of braces
|
||||
if in_function {
|
||||
result << line
|
||||
|
||||
for c in line {
|
||||
if c == `{` {
|
||||
brace_count++
|
||||
} else if c == `}` {
|
||||
brace_count--
|
||||
}
|
||||
}
|
||||
|
||||
// If brace_count is 0, we've reached the end of the function
|
||||
if brace_count == 0 && trimmed.contains('}') {
|
||||
return result.join('\n')
|
||||
}
|
||||
} else {
|
||||
// Reset comment block if we pass a blank line
|
||||
if trimmed == '' {
|
||||
comment_block = []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !in_function {
|
||||
return error('Function "${function_name}" not found in ${file_path}')
|
||||
}
|
||||
|
||||
return result.join('\n')
|
||||
}
|
||||
|
||||
// list_v_files returns all .v files in a directory (non-recursive), excluding generated files ending with _.v
|
||||
fn list_v_files(dir string) ![]string {
|
||||
files := os.ls(dir) or { return error('Error listing directory: ${err}') }
|
||||
|
||||
mut v_files := []string{}
|
||||
for file in files {
|
||||
if file.ends_with('.v') && !file.ends_with('_.v') {
|
||||
filepath := os.join_path(dir, file)
|
||||
v_files << filepath
|
||||
}
|
||||
}
|
||||
|
||||
return v_files
|
||||
}
|
||||
|
||||
// test runs v test on the specified file or directory
|
||||
pub fn vtest(fullpath string) !string {
|
||||
logger.info('test ${fullpath}')
|
||||
if !os.exists(fullpath) {
|
||||
return error('File or directory does not exist: ${fullpath}')
|
||||
}
|
||||
if os.is_dir(fullpath) {
|
||||
mut results := ''
|
||||
for item in list_v_files(fullpath)! {
|
||||
results += vtest(item)!
|
||||
results += '\n-----------------------\n'
|
||||
}
|
||||
return results
|
||||
} else {
|
||||
cmd := 'v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc test ${fullpath}'
|
||||
logger.debug('Executing command: ${cmd}')
|
||||
result := os.execute(cmd)
|
||||
if result.exit_code != 0 {
|
||||
return error('Test failed for ${fullpath} with exit code ${result.exit_code}\n${result.output}')
|
||||
} else {
|
||||
logger.info('Test completed for ${fullpath}')
|
||||
}
|
||||
return 'Command: ${cmd}\nExit code: ${result.exit_code}\nOutput:\n${result.output}'
|
||||
}
|
||||
}
|
||||
|
||||
// vvet runs v vet on the specified file or directory
|
||||
pub fn vvet(fullpath string) !string {
|
||||
logger.info('vet ${fullpath}')
|
||||
if !os.exists(fullpath) {
|
||||
return error('File or directory does not exist: ${fullpath}')
|
||||
}
|
||||
|
||||
if os.is_dir(fullpath) {
|
||||
mut results := ''
|
||||
files := list_v_files(fullpath) or { return error('Error listing V files: ${err}') }
|
||||
for file in files {
|
||||
results += vet_file(file) or {
|
||||
logger.error('Failed to vet ${file}: ${err}')
|
||||
return error('Failed to vet ${file}: ${err}')
|
||||
}
|
||||
results += '\n-----------------------\n'
|
||||
}
|
||||
return results
|
||||
} else {
|
||||
return vet_file(fullpath)
|
||||
}
|
||||
}
|
||||
|
||||
// vet_file runs v vet on a single file
|
||||
fn vet_file(file string) !string {
|
||||
cmd := 'v vet -v -w ${file}'
|
||||
logger.debug('Executing command: ${cmd}')
|
||||
result := os.execute(cmd)
|
||||
if result.exit_code != 0 {
|
||||
return error('Vet failed for ${file} with exit code ${result.exit_code}\n${result.output}')
|
||||
} else {
|
||||
logger.info('Vet completed for ${file}')
|
||||
}
|
||||
return 'Command: ${cmd}\nExit code: ${result.exit_code}\nOutput:\n${result.output}'
|
||||
}
|
||||
|
||||
// cmd := 'v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc ${fullpath}'
|
||||
100
TOSORT/developer/vlang_test.v
Normal file
100
TOSORT/developer/vlang_test.v
Normal file
@@ -0,0 +1,100 @@
|
||||
module developer
|
||||
|
||||
import os
|
||||
|
||||
// Test file for the get_type_from_module function in vlang.v
|
||||
|
||||
// This test verifies that the get_type_from_module function correctly extracts
|
||||
// struct definitions from V source files
|
||||
|
||||
// Helper function to create test files with struct definitions
|
||||
fn create_test_files() !(string, string, string) {
|
||||
// Create a temporary directory for our test files
|
||||
test_dir := os.temp_dir()
|
||||
test_file_path := os.join_path(test_dir, 'test_type.v')
|
||||
|
||||
// Create a test file with a simple struct
|
||||
test_content := 'module test_module
|
||||
|
||||
struct TestType {
|
||||
name string
|
||||
age int
|
||||
active bool
|
||||
}
|
||||
|
||||
// Another struct to make sure we get the right one
|
||||
struct OtherType {
|
||||
id string
|
||||
}
|
||||
'
|
||||
os.write_file(test_file_path, test_content) or {
|
||||
eprintln('Failed to create test file: ${err}')
|
||||
return error('Failed to create test file: ${err}')
|
||||
}
|
||||
|
||||
// Create a test file with a nested struct
|
||||
nested_test_content := 'module test_module
|
||||
|
||||
struct NestedType {
|
||||
config map[string]string {
|
||||
required: true
|
||||
}
|
||||
data []struct {
|
||||
key string
|
||||
value string
|
||||
}
|
||||
}
|
||||
'
|
||||
nested_test_file := os.join_path(test_dir, 'nested_test.v')
|
||||
os.write_file(nested_test_file, nested_test_content) or {
|
||||
eprintln('Failed to create nested test file: ${err}')
|
||||
return error('Failed to create nested test file: ${err}')
|
||||
}
|
||||
|
||||
return test_dir, test_file_path, nested_test_file
|
||||
}
|
||||
|
||||
// Test function for get_type_from_module
|
||||
fn test_get_type_from_module() {
|
||||
// Create test files
|
||||
test_dir, test_file_path, nested_test_file := create_test_files() or {
|
||||
eprintln('Failed to create test files: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
// Test case 1: Get a simple struct
|
||||
type_content := get_type_from_module(test_dir, 'TestType') or {
|
||||
eprintln('Failed to get type: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the content matches what we expect
|
||||
expected := '\n\tname string\n\tage int\n\tactive bool\n}'
|
||||
assert type_content == expected, 'Expected: "${expected}", got: "${type_content}"'
|
||||
|
||||
// Test case 2: Try to get a non-existent type
|
||||
non_existent := get_type_from_module(test_dir, 'NonExistentType') or {
|
||||
// This should fail, so we expect an error
|
||||
assert err.str().contains('not found in module'), 'Expected error message about type not found'
|
||||
''
|
||||
}
|
||||
assert non_existent == '', 'Expected empty string for non-existent type'
|
||||
|
||||
// Test case 3: Test with nested braces in the struct
|
||||
nested_type_content := get_type_from_module(test_dir, 'NestedType') or {
|
||||
eprintln('Failed to get nested type: ${err}')
|
||||
assert false
|
||||
return
|
||||
}
|
||||
|
||||
expected_nested := '\n\tconfig map[string]string {\n\t\trequired: true\n\t}\n\tdata []struct {\n\t\tkey string\n\t\tvalue string\n\t}\n}'
|
||||
assert nested_type_content == expected_nested, 'Expected: "${expected_nested}", got: "${nested_type_content}"'
|
||||
|
||||
// Clean up test files
|
||||
os.rm(test_file_path) or {}
|
||||
os.rm(nested_test_file) or {}
|
||||
|
||||
println('All tests for get_type_from_module passed successfully!')
|
||||
}
|
||||
34
TOSORT/developer/vlang_tools.v
Normal file
34
TOSORT/developer/vlang_tools.v
Normal file
@@ -0,0 +1,34 @@
|
||||
module developer
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
|
||||
const get_function_from_file_tool = mcp.Tool{
|
||||
name: 'get_function_from_file'
|
||||
description: 'get_function_from_file parses a V file and extracts a specific function block including its comments
|
||||
ARGS:
|
||||
file_path string - path to the V file
|
||||
function_name string - name of the function to extract
|
||||
RETURNS: string - the function block including comments, or empty string if not found'
|
||||
input_schema: mcp.ToolInputSchema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'file_path': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
items: mcp.ToolItems{
|
||||
typ: ''
|
||||
enum: []
|
||||
}
|
||||
enum: []
|
||||
}
|
||||
'function_name': mcp.ToolProperty{
|
||||
typ: 'string'
|
||||
items: mcp.ToolItems{
|
||||
typ: ''
|
||||
enum: []
|
||||
}
|
||||
enum: []
|
||||
}
|
||||
}
|
||||
required: ['file_path', 'function_name']
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
import as
|
||||
|
||||
```vlang
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.osal
|
||||
|
||||
osal.ping...
|
||||
|
||||
@@ -70,14 +70,14 @@ mut pm:=process.processmap_get()?
|
||||
info returns like:
|
||||
|
||||
```json
|
||||
}, freeflowuniverse.herolib.process.ProcessInfo{
|
||||
}, freeflowuniverse.process.ProcessInfo{
|
||||
cpu_perc: 0
|
||||
mem_perc: 0
|
||||
cmd: 'mc'
|
||||
pid: 84455
|
||||
ppid: 84467
|
||||
rss: 3168
|
||||
}, freeflowuniverse.herolib.process.ProcessInfo{
|
||||
}, freeflowuniverse.process.ProcessInfo{
|
||||
cpu_perc: 0
|
||||
mem_perc: 0
|
||||
cmd: 'zsh -Z -g'
|
||||
@@ -195,13 +195,13 @@ fn initname() !string
|
||||
e.g. systemd, bash, zinit
|
||||
fn ipaddr_pub_get() !string
|
||||
Returns the ipaddress as known on the public side is using resolver4.opendns.com
|
||||
fn is_linux() bool
|
||||
fn is_linux()! bool
|
||||
fn is_linux_arm()! bool
|
||||
fn is_linux_intel() bool
|
||||
fn is_osx() bool
|
||||
fn is_osx_arm() bool
|
||||
fn is_osx_intel() bool
|
||||
fn is_ubuntu() bool
|
||||
fn is_linux_intel()! bool
|
||||
fn is_osx()! bool
|
||||
fn is_osx_arm()! bool
|
||||
fn is_osx_intel()! bool
|
||||
fn is_ubuntu()! bool
|
||||
fn load_env_file(file_path string) !
|
||||
fn memdb_exists(key string) bool
|
||||
fn memdb_get(key string) string
|
||||
|
||||
39
aiprompts/ai_instruct/generate_player_for_models.md
Normal file
39
aiprompts/ai_instruct/generate_player_for_models.md
Normal file
@@ -0,0 +1,39 @@
|
||||
generate specs for /Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions
|
||||
|
||||
use mcp
|
||||
|
||||
get the output of it un actions/specs.v
|
||||
|
||||
then use these specs.v
|
||||
|
||||
to generate play command instructions see @3_heroscript_vlang.md
|
||||
|
||||
this play command gets heroscript in and will then call the methods for actions as are ONLY in @lib/circles/actions/db
|
||||
|
||||
so the play only calls the methods in @lib/circles/actions/db
|
||||
|
||||
|
||||
# put the play commands in
|
||||
|
||||
/Users/despiegk/code/github/freeflowuniverse/herolib/lib/circles/actions/play
|
||||
|
||||
do one file in the module per action
|
||||
|
||||
each method is an action
|
||||
|
||||
put them all on one Struct called Player
|
||||
in this Player we have a method per action
|
||||
|
||||
Player has a property called actor: which is the name of the actor as is used in the heroscript
|
||||
Player has also a output called return format which is enum for heroscript or json
|
||||
|
||||
input of the method - action is a params object
|
||||
|
||||
on player there is a method play which takes the text as input or playbook
|
||||
|
||||
if text then playbook is created
|
||||
|
||||
then we walk over all actions
|
||||
|
||||
all the ones starting with actions in this case are given to the right method
|
||||
|
||||
15
aiprompts/code/opeapi.md
Normal file
15
aiprompts/code/opeapi.md
Normal file
@@ -0,0 +1,15 @@
|
||||
for @lib/circles/mcc
|
||||
|
||||
generate openapi 3.1 spec
|
||||
do it as one file called openapi.yaml and put in the dir as mentioned above
|
||||
|
||||
based on the models and db implementation
|
||||
|
||||
implement well chosen examples in the openapi spec
|
||||
|
||||
note: in OpenAPI 3.1.0, the example property is deprecated in favor of examples
|
||||
|
||||
do this for the models & methods as defined below
|
||||
|
||||
do it also for the custom and generic methods, don't forget any
|
||||
|
||||
197
aiprompts/code/opeapi_full.md
Normal file
197
aiprompts/code/opeapi_full.md
Normal file
@@ -0,0 +1,197 @@
|
||||
in @lib/circles/mcc
|
||||
generate openapi 3.1 spec
|
||||
based on the models and db implementation
|
||||
|
||||
implement well chosen examples in the openapi spec
|
||||
|
||||
note: in OpenAPI 3.1.0, the example property is deprecated in favor of examples.
|
||||
|
||||
do this for the models & methods as defined below
|
||||
|
||||
do it for custom and generic methods, don't forget any
|
||||
|
||||
```v
|
||||
|
||||
// CalendarEvent represents a calendar event with all its properties
|
||||
pub struct CalendarEvent {
|
||||
pub mut:
|
||||
id u32 // Unique identifier
|
||||
title string // Event title
|
||||
description string // Event details
|
||||
location string // Event location
|
||||
start_time ourtime.OurTime
|
||||
end_time ourtime.OurTime // End time
|
||||
all_day bool // True if it's an all-day event
|
||||
recurrence string // RFC 5545 Recurrence Rule (e.g., "FREQ=DAILY;COUNT=10")
|
||||
attendees []string // List of emails or user IDs
|
||||
organizer string // Organizer email
|
||||
status string // "CONFIRMED", "CANCELLED", "TENTATIVE"
|
||||
caldav_uid string // CalDAV UID for syncing
|
||||
sync_token string // Sync token for tracking changes
|
||||
etag string // ETag for caching
|
||||
color string // User-friendly color categorization
|
||||
}
|
||||
|
||||
|
||||
// Email represents an email message with all its metadata and content
|
||||
pub struct Email {
|
||||
pub mut:
|
||||
// Database ID
|
||||
id u32 // Database ID (assigned by DBHandler)
|
||||
// Content fields
|
||||
uid u32 // Unique identifier of the message (in the circle)
|
||||
seq_num u32 // IMAP sequence number (in the mailbox)
|
||||
mailbox string // The mailbox this email belongs to
|
||||
message string // The email body content
|
||||
attachments []Attachment // Any file attachments
|
||||
|
||||
// IMAP specific fields
|
||||
flags []string // IMAP flags like \Seen, \Deleted, etc.
|
||||
internal_date i64 // Unix timestamp when the email was received
|
||||
size u32 // Size of the message in bytes
|
||||
envelope ?Envelope // IMAP envelope information (contains From, To, Subject, etc.)
|
||||
}
|
||||
|
||||
// Attachment represents an email attachment
|
||||
pub struct Attachment {
|
||||
pub mut:
|
||||
filename string
|
||||
content_type string
|
||||
data string // Base64 encoded binary data
|
||||
}
|
||||
|
||||
// Envelope represents an IMAP envelope structure
|
||||
pub struct Envelope {
|
||||
pub mut:
|
||||
date i64
|
||||
subject string
|
||||
from []string
|
||||
sender []string
|
||||
reply_to []string
|
||||
to []string
|
||||
cc []string
|
||||
bcc []string
|
||||
in_reply_to string
|
||||
message_id string
|
||||
}
|
||||
```
|
||||
|
||||
methods
|
||||
|
||||
```v
|
||||
pub fn (mut m MailDB) new() Email {
|
||||
}
|
||||
|
||||
// set adds or updates an email
|
||||
pub fn (mut m MailDB) set(email Email) !Email {
|
||||
}
|
||||
|
||||
// get retrieves an email by its ID
|
||||
pub fn (mut m MailDB) get(id u32) !Email {
|
||||
}
|
||||
|
||||
// list returns all email IDs
|
||||
pub fn (mut m MailDB) list() ![]u32 {
|
||||
}
|
||||
|
||||
pub fn (mut m MailDB) getall() ![]Email {
|
||||
}
|
||||
|
||||
// delete removes an email by its ID
|
||||
pub fn (mut m MailDB) delete(id u32) ! {
|
||||
}
|
||||
|
||||
//////////////////CUSTOM METHODS//////////////////////////////////
|
||||
|
||||
// get_by_uid retrieves an email by its UID
|
||||
pub fn (mut m MailDB) get_by_uid(uid u32) !Email {
|
||||
}
|
||||
|
||||
// get_by_mailbox retrieves all emails in a specific mailbox
|
||||
pub fn (mut m MailDB) get_by_mailbox(mailbox string) ![]Email {
|
||||
}
|
||||
|
||||
// delete_by_uid removes an email by its UID
|
||||
pub fn (mut m MailDB) delete_by_uid(uid u32) ! {
|
||||
}
|
||||
|
||||
// delete_by_mailbox removes all emails in a specific mailbox
|
||||
pub fn (mut m MailDB) delete_by_mailbox(mailbox string) ! {
|
||||
}
|
||||
|
||||
// update_flags updates the flags of an email
|
||||
pub fn (mut m MailDB) update_flags(uid u32, flags []string) !Email {
|
||||
}
|
||||
|
||||
// search_by_subject searches for emails with a specific subject substring
|
||||
pub fn (mut m MailDB) search_by_subject(subject string) ![]Email {
|
||||
}
|
||||
|
||||
// search_by_address searches for emails with a specific email address in from, to, cc, or bcc fields
|
||||
pub fn (mut m MailDB) search_by_address(address string) ![]Email {
|
||||
}
|
||||
|
||||
pub fn (mut c CalendarDB) new() CalendarEvent {
|
||||
CalendarEvent {}
|
||||
}
|
||||
|
||||
// set adds or updates a calendar event
|
||||
pub fn (mut c CalendarDB) set(event CalendarEvent) CalendarEvent {
|
||||
CalendarEvent {}
|
||||
}
|
||||
|
||||
// get retrieves a calendar event by its ID
|
||||
pub fn (mut c CalendarDB) get(id u32) CalendarEvent {
|
||||
CalendarEvent {}
|
||||
}
|
||||
|
||||
// list returns all calendar event IDs
|
||||
pub fn (mut c CalendarDB) list() []u32 {
|
||||
[]
|
||||
}
|
||||
|
||||
pub fn (mut c CalendarDB) getall() []CalendarEvent {
|
||||
[]
|
||||
}
|
||||
|
||||
// delete removes a calendar event by its ID
|
||||
pub fn (mut c CalendarDB) delete(id u32) {
|
||||
}
|
||||
|
||||
//////////////////CUSTOM METHODS//////////////////////////////////
|
||||
|
||||
// get_by_caldav_uid retrieves a calendar event by its CalDAV UID
|
||||
pub fn (mut c CalendarDB) get_by_caldav_uid(caldav_uid String) CalendarEvent {
|
||||
CalendarEvent {}
|
||||
}
|
||||
|
||||
// get_events_by_date retrieves all events that occur on a specific date
|
||||
pub fn (mut c CalendarDB) get_events_by_date(date String) []CalendarEvent {
|
||||
[]
|
||||
}
|
||||
|
||||
// get_events_by_organizer retrieves all events organized by a specific person
|
||||
pub fn (mut c CalendarDB) get_events_by_organizer(organizer String) []CalendarEvent {
|
||||
[]
|
||||
}
|
||||
|
||||
// get_events_by_attendee retrieves all events that a specific person is attending
|
||||
pub fn (mut c CalendarDB) get_events_by_attendee(attendee String) []CalendarEvent {
|
||||
[]
|
||||
}
|
||||
|
||||
// search_events_by_title searches for events with a specific title substring
|
||||
pub fn (mut c CalendarDB) search_events_by_title(title String) []CalendarEvent {
|
||||
[]
|
||||
}
|
||||
|
||||
// update_status updates the status of an event
|
||||
pub fn (mut c CalendarDB) update_status(id u32, status String) CalendarEvent {
|
||||
CalendarEvent {}
|
||||
}
|
||||
|
||||
// delete_by_caldav_uid removes an event by its CalDAV UID
|
||||
pub fn (mut c CalendarDB) delete_by_caldav_uid(caldav_uid String) {
|
||||
}
|
||||
|
||||
```
|
||||
26
aiprompts/code/vfs.md
Normal file
26
aiprompts/code/vfs.md
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
create a module vfs_mail in @lib/vfs
|
||||
check the interface as defined in @lib/vfs/interface.v and @metadata.v
|
||||
|
||||
see example how a vfs is made in @lib/vfs/vfs_local
|
||||
|
||||
create the vfs to represent mail objects in @lib/circles/dbs/core/mail_db.v
|
||||
|
||||
mailbox propery on the Email object defines the path in the vfs
|
||||
this mailbox property can be e.g. Draft/something/somethingelse
|
||||
|
||||
in that dir show a subdir /id:
|
||||
- which show the Email as a json underneith the ${email.id}.json
|
||||
|
||||
in that dir show subdir /subject:
|
||||
- which show the Email as a json underneith the name_fix(${email.envelope.subject}.json
|
||||
|
||||
so basically we have 2 representations of the same mail in the vfs, both have the. json as content of the file
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
14
aiprompts/env.md
Normal file
14
aiprompts/env.md
Normal file
@@ -0,0 +1,14 @@
|
||||
## Environment Variables
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
// Get environment variable
|
||||
value := osal.env_get('PATH')!
|
||||
|
||||
// Set environment variable
|
||||
osal.env_set('MY_VAR', 'value')!
|
||||
|
||||
// Check if environment variable exists
|
||||
exists := osal.env_exists('MY_VAR')
|
||||
```
|
||||
187
aiprompts/reflection.md
Normal file
187
aiprompts/reflection.md
Normal file
@@ -0,0 +1,187 @@
|
||||
## Compile time reflection
|
||||
|
||||
$ is used as a prefix for compile time (also referred to as 'comptime') operations.
|
||||
|
||||
Having built-in JSON support is nice, but V also allows you to create efficient serializers for any data format. V has compile time if and for constructs:
|
||||
|
||||
.fields
|
||||
You can iterate over struct fields using .fields, it also works with generic types (e.g. T.fields) and generic arguments (e.g. param.fields where fn gen[T](param T) {).
|
||||
|
||||
struct User {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for field in User.fields {
|
||||
$if field.typ is string {
|
||||
println('${field.name} is of type string')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// name is of type string
|
||||
.values
|
||||
You can read Enum values and their attributes.
|
||||
|
||||
enum Color {
|
||||
red @[RED] // first attribute
|
||||
blue @[BLUE] // second attribute
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for e in Color.values {
|
||||
println(e.name)
|
||||
println(e.attrs)
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// red
|
||||
// ['RED']
|
||||
// blue
|
||||
// ['BLUE']
|
||||
.attributes
|
||||
You can read Struct attributes.
|
||||
|
||||
@[COLOR]
|
||||
struct Foo {
|
||||
a int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for e in Foo.attributes {
|
||||
println(e)
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// StructAttribute{
|
||||
// name: 'COLOR'
|
||||
// has_arg: false
|
||||
// arg: ''
|
||||
// kind: plain
|
||||
// }
|
||||
.variants
|
||||
You can read variant types from Sum type.
|
||||
|
||||
type MySum = int | string
|
||||
|
||||
fn main() {
|
||||
$for v in MySum.variants {
|
||||
$if v.typ is int {
|
||||
println('has int type')
|
||||
} $else $if v.typ is string {
|
||||
println('has string type')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// has int type
|
||||
// has string type
|
||||
.methods
|
||||
You can retrieve information about struct methods.
|
||||
|
||||
struct Foo {
|
||||
}
|
||||
|
||||
fn (f Foo) test() int {
|
||||
return 123
|
||||
}
|
||||
|
||||
fn (f Foo) test2() string {
|
||||
return 'foo'
|
||||
}
|
||||
|
||||
fn main() {
|
||||
foo := Foo{}
|
||||
$for m in Foo.methods {
|
||||
$if m.return_type is int {
|
||||
print('${m.name} returns int: ')
|
||||
println(foo.$method())
|
||||
} $else $if m.return_type is string {
|
||||
print('${m.name} returns string: ')
|
||||
println(foo.$method())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// test returns int: 123
|
||||
// test2 returns string: foo
|
||||
.params
|
||||
You can retrieve information about struct method params.
|
||||
|
||||
struct Test {
|
||||
}
|
||||
|
||||
fn (t Test) foo(arg1 int, arg2 string) {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for m in Test.methods {
|
||||
$for param in m.params {
|
||||
println('${typeof(param.typ).name}: ${param.name}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// int: arg1
|
||||
// string: arg2
|
||||
|
||||
## Example
|
||||
|
||||
```v
|
||||
// An example deserializer implementation
|
||||
|
||||
struct User {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
data := 'name=Alice\nage=18'
|
||||
user := decode[User](data)
|
||||
println(user)
|
||||
}
|
||||
|
||||
fn decode[T](data string) T {
|
||||
mut result := T{}
|
||||
// compile-time `for` loop
|
||||
// T.fields gives an array of a field metadata type
|
||||
$for field in T.fields {
|
||||
$if field.typ is string {
|
||||
// $(string_expr) produces an identifier
|
||||
result.$(field.name) = get_string(data, field.name)
|
||||
} $else $if field.typ is int {
|
||||
result.$(field.name) = get_int(data, field.name)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fn get_string(data string, field_name string) string {
|
||||
for line in data.split_into_lines() {
|
||||
key_val := line.split('=')
|
||||
if key_val[0] == field_name {
|
||||
return key_val[1]
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
fn get_int(data string, field string) int {
|
||||
return get_string(data, field).int()
|
||||
}
|
||||
|
||||
// `decode<User>` generates:
|
||||
// fn decode_User(data string) User {
|
||||
// mut result := User{}
|
||||
// result.name = get_string(data, 'name')
|
||||
// result.age = get_int(data, 'age')
|
||||
// return result
|
||||
// }
|
||||
```
|
||||
78
aiprompts/starter/1_heroscript.md
Normal file
78
aiprompts/starter/1_heroscript.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# HeroScript
|
||||
|
||||
## Overview
|
||||
|
||||
HeroScript is a simple, declarative scripting language designed to define workflows and execute commands in a structured manner. It follows a straightforward syntax where each action is prefixed with `!!`, indicating the actor and action name.
|
||||
|
||||
## Example
|
||||
|
||||
A basic HeroScript script for virtual machine management looks like this:
|
||||
|
||||
```heroscript
|
||||
!!vm.define name:'test_vm' cpu:4
|
||||
memory: '8GB'
|
||||
storage: '100GB'
|
||||
description: '
|
||||
A virtual machine configuration
|
||||
with specific resources.
|
||||
'
|
||||
|
||||
!!vm.start name:'test_vm'
|
||||
|
||||
!!vm.disk_add
|
||||
name: 'test_vm'
|
||||
size: '50GB'
|
||||
type: 'SSD'
|
||||
|
||||
!!vm.delete
|
||||
name: 'test_vm'
|
||||
force: true
|
||||
```
|
||||
|
||||
### Key Features
|
||||
|
||||
- Every action starts with `!!`.
|
||||
- The first part after `!!` is the actor (e.g., `vm`).
|
||||
- The second part is the action name (e.g., `define`, `start`, `delete`).
|
||||
- Multi-line values are supported (e.g., the `description` field).
|
||||
- Lists are comma-separated where applicable and inside ''.
|
||||
- If items one 1 line, then no space between name & argument e.g. name:'test_vm'
|
||||
|
||||
## Parsing HeroScript
|
||||
|
||||
Internally, HeroScript gets parsed into an action object with parameters. Each parameter follows a `key: value` format.
|
||||
|
||||
### Parsing Example
|
||||
|
||||
```heroscript
|
||||
!!actor.action
|
||||
id:a1 name6:aaaaa
|
||||
name:'need to do something 1'
|
||||
description:
|
||||
'
|
||||
## markdown works in it
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
|
||||
name2: test
|
||||
name3: hi
|
||||
name10:'this is with space' name11:aaa11
|
||||
|
||||
name4: 'aaa'
|
||||
|
||||
//somecomment
|
||||
name5: 'aab'
|
||||
```
|
||||
|
||||
### Parsing Details
|
||||
- Each parameter follows a `key: value` format.
|
||||
- Multi-line values (such as descriptions) support Markdown formatting.
|
||||
- Comments can be added using `//`.
|
||||
- Keys and values can have spaces, and values can be enclosed in single quotes.
|
||||
|
||||
@@ -1,45 +1,3 @@
|
||||
# how to work with heroscript in vlang
|
||||
|
||||
## heroscript
|
||||
|
||||
Heroscript is our small scripting language which has following structure
|
||||
|
||||
an example of a heroscript is
|
||||
|
||||
```heroscript
|
||||
|
||||
!!dagu.script_define
|
||||
name: 'test_dag'
|
||||
homedir:''
|
||||
title:'a title'
|
||||
reset:1
|
||||
start:true //trie or 1 is same
|
||||
colors: 'green,red,purple' //lists are comma separated
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'hello_world'
|
||||
command: 'echo hello world'
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'last_step'
|
||||
command: 'echo last step'
|
||||
|
||||
|
||||
```
|
||||
|
||||
Notice how:
|
||||
- every action starts with !!
|
||||
- the first part is the actor e.g. dagu in this case
|
||||
- the 2e part is the action name
|
||||
- multilines are supported see the description field
|
||||
|
||||
## how to process heroscript in Vlang
|
||||
|
||||
3
cli/.gitignore
vendored
3
cli/.gitignore
vendored
@@ -1 +1,4 @@
|
||||
hero
|
||||
compile
|
||||
compile_upload
|
||||
vdo
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env -S v -n -w -parallel-cc -enable-globals run
|
||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
77
cli/compile_vdo.vsh
Executable file
77
cli/compile_vdo.vsh
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile_vdo.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile vdo binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
if additional_args.len > 0 {
|
||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Change to the vdo directory
|
||||
hero_dir := os.join_path(os.home_dir(), 'code/github/freeflowuniverse/herolib/cli')
|
||||
os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}') }
|
||||
|
||||
// Set HEROPATH based on OS
|
||||
mut heropath := '/usr/local/bin/vdo'
|
||||
if os.user_os() == 'macos' {
|
||||
heropath = os.join_path(os.home_dir(), 'hero/bin/vdo')
|
||||
}
|
||||
|
||||
// Set compilation command based on OS and mode
|
||||
compile_cmd := if os.user_os() == 'macos' {
|
||||
if prod_mode {
|
||||
'v -enable-globals -w -n -prod vdo.v'
|
||||
} else {
|
||||
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals vdo.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
'v -cg -enable-globals -parallel-cc -w -n vdo.v'
|
||||
} else {
|
||||
'v -cg -enable-globals -w -n vdo.v'
|
||||
}
|
||||
}
|
||||
|
||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||
|
||||
if os.system(compile_cmd) != 0 {
|
||||
panic('Failed to compile vdo.v with command: ${compile_cmd}')
|
||||
}
|
||||
|
||||
// Make executable
|
||||
os.chmod('vdo', 0o755) or { panic('Failed to make vdo binary executable: ${err}') }
|
||||
|
||||
// Ensure destination directory exists
|
||||
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
|
||||
println(heropath)
|
||||
// Copy to destination paths
|
||||
os.cp('vdo', heropath) or { panic('Failed to copy vdo binary to ${heropath}: ${err}') }
|
||||
os.cp('vdo', '/tmp/vdo') or { panic('Failed to copy vdo binary to /tmp/vdo: ${err}') }
|
||||
|
||||
// Clean up
|
||||
os.rm('vdo') or { panic('Failed to remove temporary vdo binary: ${err}') }
|
||||
|
||||
println('**COMPILE OK**')
|
||||
25
cli/hero.v
25
cli/hero.v
@@ -19,6 +19,26 @@ fn playcmds_do(path string) ! {
|
||||
}
|
||||
|
||||
fn do() ! {
|
||||
|
||||
if ! core.is_osx()! {
|
||||
if os.getenv('SUDO_COMMAND') != '' || os.getenv('SUDO_USER') != '' {
|
||||
println('Error: Please do not run this program with sudo!')
|
||||
exit(1) // Exit with error code
|
||||
}
|
||||
}
|
||||
|
||||
if os.getuid() == 0 {
|
||||
if core.is_osx()! {
|
||||
eprintln("please do not run hero as root in osx.")
|
||||
exit(1)
|
||||
}
|
||||
} else {
|
||||
if ! core.is_osx()! {
|
||||
eprintln("please do run hero as root, don't use sudo.")
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
if os.args.len == 2 {
|
||||
mypath := os.args[1]
|
||||
if mypath.to_lower().ends_with('.hero') {
|
||||
@@ -31,7 +51,7 @@ fn do() ! {
|
||||
mut cmd := Command{
|
||||
name: 'hero'
|
||||
description: 'Your HERO toolset.'
|
||||
version: '1.0.3'
|
||||
version: '1.0.22'
|
||||
}
|
||||
|
||||
// herocmds.cmd_run_add_flags(mut cmd)
|
||||
@@ -82,6 +102,7 @@ fn do() ! {
|
||||
// herocmds.cmd_juggler(mut cmd)
|
||||
herocmds.cmd_generator(mut cmd)
|
||||
herocmds.cmd_docusaurus(mut cmd)
|
||||
herocmds.cmd_starlight(mut cmd)
|
||||
// herocmds.cmd_docsorter(mut cmd)
|
||||
// cmd.add_command(publishing.cmd_publisher(pre_func))
|
||||
cmd.setup()
|
||||
@@ -94,4 +115,4 @@ fn main() {
|
||||
|
||||
fn pre_func(cmd Command) ! {
|
||||
herocmds.plbook_run(cmd)!
|
||||
}
|
||||
}
|
||||
12
cli/vdo.v
Normal file
12
cli/vdo.v
Normal file
@@ -0,0 +1,12 @@
|
||||
module main
|
||||
|
||||
import freeflowuniverse.herolib.mcp.v_do
|
||||
|
||||
fn main() {
|
||||
// Create and start the MCP server
|
||||
mut server := v_do.new_server()
|
||||
server.start() or {
|
||||
eprintln('Error starting server: $err')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
480
compile.sh
Executable file
480
compile.sh
Executable file
@@ -0,0 +1,480 @@
|
||||
#!/bin/bash
|
||||
# compile.sh - Script to compile each module in the herolib/lib directory
|
||||
# This script compiles each module in the lib directory to ensure they build correctly
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Default settings
|
||||
CONCURRENT=false
|
||||
MAX_JOBS=4 # Default number of concurrent jobs
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-c|--concurrent)
|
||||
CONCURRENT=true
|
||||
shift
|
||||
;;
|
||||
-j|--jobs)
|
||||
MAX_JOBS="$2"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [options]"
|
||||
echo "Options:"
|
||||
echo " -c, --concurrent Enable concurrent compilation"
|
||||
echo " -j, --jobs N Set maximum number of concurrent jobs (default: 4)"
|
||||
echo " -h, --help Show this help message"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
echo "Use -h or --help for usage information"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Color codes for output
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Get the directory of this script
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
LIB_DIR="$SCRIPT_DIR/lib"
|
||||
|
||||
# V compiler flags based on the project's test script
|
||||
V_FLAGS="-stats -enable-globals -n -w -gc none -d use_openssl -shared"
|
||||
|
||||
# Log file for compilation results
|
||||
LOG_FILE="$SCRIPT_DIR/compile_results.log"
|
||||
> "$LOG_FILE" # Clear log file
|
||||
|
||||
# Summary log file
|
||||
SUMMARY_FILE="$SCRIPT_DIR/compile_summary.log"
|
||||
> "$SUMMARY_FILE" # Clear summary file
|
||||
|
||||
# Cache directory for storing timestamps of last successful compilation
|
||||
CACHE_DIR="$SCRIPT_DIR/.compile_cache"
|
||||
mkdir -p "$CACHE_DIR"
|
||||
|
||||
# Create temporary directory for compiled binaries
|
||||
mkdir -p "$SCRIPT_DIR/tmp"
|
||||
|
||||
# Create a directory for temporary output files
|
||||
TEMP_DIR="$SCRIPT_DIR/.temp_compile"
|
||||
mkdir -p "$TEMP_DIR"
|
||||
|
||||
# Trap for cleaning up on exit
|
||||
cleanup() {
|
||||
echo "Cleaning up..."
|
||||
# Kill any remaining child processes
|
||||
jobs -p | xargs kill -9 2>/dev/null || true
|
||||
# Remove temporary directories
|
||||
rm -rf "$TEMP_DIR" "$SCRIPT_DIR/tmp" 2>/dev/null || true
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Set up traps for various signals
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Define modules to skip entirely due to known compilation issues
|
||||
SKIP_MODULES=("flist" "openai" "mycelium" "vastai" "rclone" "sendgrid" "mailclient" "ipapi" "runpod" "postgresql_client" "meilisearch" "livekit" "wireguard" "_archive" "clients")
|
||||
|
||||
# Function to check if a module should be skipped
|
||||
should_skip_module() {
|
||||
local module_name="$1"
|
||||
|
||||
for skip_module in "${SKIP_MODULES[@]}"; do
|
||||
if [[ "$module_name" == "$skip_module" ]]; then
|
||||
return 0 # true, should skip
|
||||
fi
|
||||
done
|
||||
|
||||
return 1 # false, should not skip
|
||||
}
|
||||
|
||||
# Function to check if a module needs recompilation
|
||||
needs_module_recompilation() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local cache_file="$CACHE_DIR/$module_name.timestamp"
|
||||
|
||||
# If cache file doesn't exist, module needs recompilation
|
||||
if [ ! -f "$cache_file" ]; then
|
||||
return 0 # true, needs recompilation
|
||||
fi
|
||||
|
||||
# Check if any .v file in the module is newer than the last compilation
|
||||
if find "$module_path" -name "*.v" -type f -newer "$cache_file" | grep -q .; then
|
||||
return 0 # true, needs recompilation
|
||||
fi
|
||||
|
||||
return 1 # false, doesn't need recompilation
|
||||
}
|
||||
|
||||
# Function to update the cache timestamp for a module
|
||||
update_module_cache() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local cache_file="$CACHE_DIR/$module_name.timestamp"
|
||||
|
||||
# Update the timestamp
|
||||
touch "$cache_file"
|
||||
}
|
||||
|
||||
# Function to check if a directory is a module (contains .v files directly, not just in subdirectories)
|
||||
is_module() {
|
||||
local dir_path="$1"
|
||||
|
||||
# Check if there are any .v files directly in this directory (not in subdirectories)
|
||||
if [ -n "$(find "$dir_path" -maxdepth 1 -name "*.v" -type f -print -quit)" ]; then
|
||||
return 0 # true, is a module
|
||||
fi
|
||||
|
||||
return 1 # false, not a module
|
||||
}
|
||||
|
||||
# Function to compile a module
|
||||
compile_module() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local output_file="$TEMP_DIR/${module_name}.log"
|
||||
local result_file="$TEMP_DIR/${module_name}.result"
|
||||
|
||||
# Initialize the result file
|
||||
echo "pending" > "$result_file"
|
||||
|
||||
# Check if this module should be skipped
|
||||
if should_skip_module "$module_name"; then
|
||||
echo "Skipping problematic module: $module_name" > "$output_file"
|
||||
echo "skipped|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "skipped" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check if this is actually a module (has .v files directly)
|
||||
if ! is_module "$module_path"; then
|
||||
echo "$module_name is not a module (no direct .v files), skipping" > "$output_file"
|
||||
echo "not_module|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "skipped" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Compiling module: $module_name" > "$output_file"
|
||||
|
||||
# Check if the module needs recompilation
|
||||
if ! needs_module_recompilation "$module_path"; then
|
||||
echo " No changes detected in $module_name, skipping compilation" >> "$output_file"
|
||||
echo "cached|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "cached" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Record start time
|
||||
local start_time=$(date +%s.%N)
|
||||
|
||||
# Try to compile the module - redirect both stdout and stderr to the output file
|
||||
if v $V_FLAGS -o "$SCRIPT_DIR/tmp/$module_name" "$module_path" >> "$output_file" 2>&1; then
|
||||
# Calculate compilation time
|
||||
local end_time=$(date +%s.%N)
|
||||
local compile_time=$(echo "$end_time - $start_time" | bc)
|
||||
|
||||
echo " Successfully compiled $module_name" >> "$output_file"
|
||||
# Update the cache timestamp
|
||||
update_module_cache "$module_path"
|
||||
|
||||
# Log result
|
||||
echo "success|${module_path#$LIB_DIR/}|$compile_time" >> "$SUMMARY_FILE"
|
||||
echo "success" > "$result_file"
|
||||
return 0
|
||||
else
|
||||
echo " Failed to compile $module_name" >> "$output_file"
|
||||
|
||||
# Log result
|
||||
echo "failed|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "failed" > "$result_file"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to run modules in parallel with a maximum number of concurrent jobs
|
||||
run_parallel() {
|
||||
local modules=("$@")
|
||||
local total=${#modules[@]}
|
||||
local completed=0
|
||||
local running=0
|
||||
local pids=()
|
||||
local module_indices=()
|
||||
|
||||
echo "Running $total modules in parallel (max $MAX_JOBS jobs at once)"
|
||||
|
||||
# Initialize arrays to track jobs
|
||||
for ((i=0; i<$total; i++)); do
|
||||
pids[$i]=-1
|
||||
done
|
||||
|
||||
# Start initial batch of jobs
|
||||
local next_job=0
|
||||
while [[ $next_job -lt $total && $running -lt $MAX_JOBS ]]; do
|
||||
compile_module "${modules[$next_job]}" > /dev/null 2>&1 &
|
||||
pids[$next_job]=$!
|
||||
((running++))
|
||||
((next_job++))
|
||||
done
|
||||
|
||||
# Display progress indicator
|
||||
display_progress() {
|
||||
local current=$1
|
||||
local total=$2
|
||||
local percent=$((current * 100 / total))
|
||||
local bar_length=50
|
||||
local filled_length=$((percent * bar_length / 100))
|
||||
|
||||
printf "\r[" >&2
|
||||
for ((i=0; i<bar_length; i++)); do
|
||||
if [ $i -lt $filled_length ]; then
|
||||
printf "#" >&2
|
||||
else
|
||||
printf " " >&2
|
||||
fi
|
||||
done
|
||||
printf "] %d%% (%d/%d modules)" $percent $current $total >&2
|
||||
}
|
||||
|
||||
# Monitor running jobs and start new ones as needed
|
||||
while [[ $completed -lt $total ]]; do
|
||||
display_progress $completed $total
|
||||
|
||||
# Check for completed jobs
|
||||
for ((i=0; i<$total; i++)); do
|
||||
if [[ ${pids[$i]} -gt 0 ]]; then
|
||||
if ! kill -0 ${pids[$i]} 2>/dev/null; then
|
||||
# Job completed
|
||||
local module_path="${modules[$i]}"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local output_file="$TEMP_DIR/${module_name}.log"
|
||||
|
||||
# Add output to log file
|
||||
if [[ -f "$output_file" ]]; then
|
||||
cat "$output_file" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
# Mark job as completed
|
||||
pids[$i]=-2
|
||||
((completed++))
|
||||
((running--))
|
||||
|
||||
# Start a new job if available
|
||||
if [[ $next_job -lt $total ]]; then
|
||||
compile_module "${modules[$next_job]}" > /dev/null 2>&1 &
|
||||
pids[$next_job]=$!
|
||||
((running++))
|
||||
((next_job++))
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Brief pause to avoid excessive CPU usage
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
# Clear the progress line
|
||||
printf "\r%$(tput cols)s\r" ""
|
||||
|
||||
# Wait for any remaining background jobs
|
||||
wait
|
||||
}
|
||||
|
||||
# Function to find all modules in a directory (recursively)
|
||||
find_modules() {
|
||||
local dir_path="$1"
|
||||
local modules=()
|
||||
|
||||
# Check if this directory is a module itself
|
||||
if is_module "$dir_path"; then
|
||||
modules+=("$dir_path")
|
||||
fi
|
||||
|
||||
# Look for modules in subdirectories (only one level deep)
|
||||
for subdir in "$dir_path"/*; do
|
||||
if [ -d "$subdir" ]; then
|
||||
local subdir_name="$(basename "$subdir")"
|
||||
|
||||
# Skip if this is in the skip list
|
||||
if should_skip_module "$subdir_name"; then
|
||||
echo -e "${YELLOW}Skipping problematic module: $subdir_name${NC}"
|
||||
echo "Skipping problematic module: $subdir_name" >> "$LOG_FILE"
|
||||
echo "skipped|${subdir#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if this subdirectory is a module
|
||||
if is_module "$subdir"; then
|
||||
modules+=("$subdir")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "${modules[@]}"
|
||||
}
|
||||
|
||||
echo "===== Starting compilation of all modules in lib ====="
|
||||
echo "===== Starting compilation of all modules in lib =====" >> "$LOG_FILE"
|
||||
|
||||
# Define priority modules to compile first
|
||||
PRIORITY_MODULES=("biz" "builder" "core" "crystallib" "jsonrpc" "jsonschema")
|
||||
|
||||
echo -e "${YELLOW}Attempting to compile each module as a whole...${NC}"
|
||||
echo "Attempting to compile each module as a whole..." >> "$LOG_FILE"
|
||||
|
||||
# Collect all modules to compile
|
||||
all_modules=()
|
||||
|
||||
# First add priority modules
|
||||
for module_name in "${PRIORITY_MODULES[@]}"; do
|
||||
module_dir="$LIB_DIR/$module_name"
|
||||
if [ -d "$module_dir" ]; then
|
||||
# Find all modules in this directory
|
||||
modules=($(find_modules "$module_dir"))
|
||||
all_modules+=("${modules[@]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Then add remaining modules
|
||||
for module_dir in "$LIB_DIR"/*; do
|
||||
if [ -d "$module_dir" ]; then
|
||||
module_name="$(basename "$module_dir")"
|
||||
# Skip modules already compiled in priority list
|
||||
if [[ " ${PRIORITY_MODULES[*]} " =~ " $module_name " ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Find all modules in this directory
|
||||
modules=($(find_modules "$module_dir"))
|
||||
all_modules+=("${modules[@]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Debug: print all modules found
|
||||
echo "Found ${#all_modules[@]} modules to compile" >> "$LOG_FILE"
|
||||
for module in "${all_modules[@]}"; do
|
||||
echo " - $module" >> "$LOG_FILE"
|
||||
done
|
||||
|
||||
# Compile modules (either in parallel or sequentially)
|
||||
if $CONCURRENT; then
|
||||
run_parallel "${all_modules[@]}"
|
||||
else
|
||||
# Sequential compilation
|
||||
for module_path in "${all_modules[@]}"; do
|
||||
# Display module being compiled
|
||||
module_name="$(basename "$module_path")"
|
||||
echo -e "${YELLOW}Compiling module: $module_name${NC}"
|
||||
|
||||
# Compile the module
|
||||
compile_module "$module_path" > /dev/null 2>&1
|
||||
|
||||
# Display result
|
||||
output_file="$TEMP_DIR/${module_name}.log"
|
||||
result_file="$TEMP_DIR/${module_name}.result"
|
||||
|
||||
if [[ -f "$output_file" ]]; then
|
||||
cat "$output_file" >> "$LOG_FILE"
|
||||
|
||||
# Display with color based on result
|
||||
result=$(cat "$result_file")
|
||||
if [[ "$result" == "success" ]]; then
|
||||
echo -e "${GREEN} Successfully compiled $module_name${NC}"
|
||||
elif [[ "$result" == "failed" ]]; then
|
||||
echo -e "${RED} Failed to compile $module_name${NC}"
|
||||
elif [[ "$result" == "cached" ]]; then
|
||||
echo -e "${GREEN} No changes detected in $module_name, skipping compilation${NC}"
|
||||
else
|
||||
echo -e "${YELLOW} Skipped $module_name${NC}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Count successes and failures
|
||||
success_count=$(grep -c "^success|" "$SUMMARY_FILE" || echo 0)
|
||||
failure_count=$(grep -c "^failed|" "$SUMMARY_FILE" || echo 0)
|
||||
cached_count=$(grep -c "^cached|" "$SUMMARY_FILE" || echo 0)
|
||||
skipped_count=$(grep -c "^skipped|" "$SUMMARY_FILE" || echo 0)
|
||||
not_module_count=$(grep -c "^not_module|" "$SUMMARY_FILE" || echo 0)
|
||||
|
||||
echo "===== Compilation complete ====="
|
||||
echo -e "${GREEN}Successfully compiled: $success_count modules${NC}"
|
||||
echo -e "${GREEN}Cached (no changes): $cached_count modules${NC}"
|
||||
echo -e "${YELLOW}Skipped: $skipped_count modules${NC}"
|
||||
echo -e "${YELLOW}Not modules: $not_module_count directories${NC}"
|
||||
echo -e "${RED}Failed to compile: $failure_count modules${NC}"
|
||||
echo "See $LOG_FILE for detailed compilation results"
|
||||
|
||||
echo "===== Compilation complete =====" >> "$LOG_FILE"
|
||||
echo "Successfully compiled: $success_count modules" >> "$LOG_FILE"
|
||||
echo "Cached (no changes): $cached_count modules" >> "$LOG_FILE"
|
||||
echo "Skipped: $skipped_count modules" >> "$LOG_FILE"
|
||||
echo "Not modules: $not_module_count directories" >> "$LOG_FILE"
|
||||
echo "Failed to compile: $failure_count modules" >> "$LOG_FILE"
|
||||
|
||||
# Print detailed summary
|
||||
echo ""
|
||||
echo "===== Module Compilation Summary ====="
|
||||
echo ""
|
||||
|
||||
# Print successful modules first, sorted by compilation time
|
||||
echo "Successful compilations:"
|
||||
grep "^success|" "$SUMMARY_FILE" | sort -t'|' -k3,3n | while IFS='|' read -r status path time; do
|
||||
# Color code based on compilation time
|
||||
time_color="$GREEN"
|
||||
if (( $(echo "$time > 10.0" | bc -l) )); then
|
||||
time_color="$RED"
|
||||
elif (( $(echo "$time > 1.0" | bc -l) )); then
|
||||
time_color="$YELLOW"
|
||||
fi
|
||||
|
||||
echo -e "✅ $path\t${time_color}${time}s${NC}"
|
||||
done
|
||||
|
||||
# Print cached modules
|
||||
echo ""
|
||||
echo "Cached modules (no changes detected):"
|
||||
grep "^cached|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "🔄 $path\t${GREEN}CACHED${NC}"
|
||||
done
|
||||
|
||||
# Print skipped modules
|
||||
echo ""
|
||||
echo "Skipped modules:"
|
||||
grep "^skipped|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "⏭️ $path\t${YELLOW}SKIPPED${NC}"
|
||||
done
|
||||
|
||||
# Print not modules
|
||||
echo ""
|
||||
echo "Not modules (directories without direct .v files):"
|
||||
grep "^not_module|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "📁 $path\t${YELLOW}NOT MODULE${NC}"
|
||||
done
|
||||
|
||||
# Print failed modules
|
||||
echo ""
|
||||
echo "Failed modules:"
|
||||
grep "^failed|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "❌ $path\t${RED}FAILED${NC}"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "===== End of Summary ====="
|
||||
|
||||
# Exit with error code if any module failed to compile
|
||||
if [ $failure_count -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
69
examples/aiexamples/openai_chat_completion.vsh
Executable file
69
examples/aiexamples/openai_chat_completion.vsh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.clients.jina
|
||||
import freeflowuniverse.herolib.osal
|
||||
import os
|
||||
|
||||
// Example of using the Jina client
|
||||
|
||||
fn main() {
|
||||
// Set environment variable for testing
|
||||
// In production, you would set this in your environment
|
||||
// osal.env_set(key: 'JINAKEY', value: 'your-api-key')
|
||||
|
||||
// Check if JINAKEY environment variable exists
|
||||
if !osal.env_exists('JINAKEY') {
|
||||
println('JINAKEY environment variable not set. Please set it before running this example.')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Create a Jina client instance
|
||||
mut client := jina.get(name: 'default')!
|
||||
|
||||
println('Jina client initialized successfully.')
|
||||
|
||||
// Example: Create embeddings
|
||||
model := 'jina-embeddings-v3'
|
||||
texts := ['Hello, world!', 'How are you doing?']
|
||||
|
||||
println('Creating embeddings for texts: ${texts}')
|
||||
result := client.create_embeddings(texts, model, 'retrieval.query')!
|
||||
|
||||
println('Embeddings created successfully.')
|
||||
println('Model: ${result['model']}')
|
||||
println('Data count: ${result['data'].arr().len}')
|
||||
|
||||
// Example: List classifiers
|
||||
println('\nListing classifiers:')
|
||||
classifiers := client.list_classifiers() or {
|
||||
println('Failed to list classifiers: ${err}')
|
||||
return
|
||||
}
|
||||
|
||||
println('Classifiers retrieved successfully.')
|
||||
|
||||
// Example: Create a classifier
|
||||
println('\nTraining a classifier:')
|
||||
examples := [
|
||||
jina.TrainingExample{
|
||||
text: 'This movie was great!'
|
||||
label: 'positive'
|
||||
},
|
||||
jina.TrainingExample{
|
||||
text: 'I did not like this movie.'
|
||||
label: 'negative'
|
||||
},
|
||||
jina.TrainingExample{
|
||||
text: 'The movie was okay.'
|
||||
label: 'neutral'
|
||||
},
|
||||
]
|
||||
|
||||
training_result := client.train(examples, model, 'private') or {
|
||||
println('Failed to train classifier: ${err}')
|
||||
return
|
||||
}
|
||||
|
||||
println('Classifier trained successfully.')
|
||||
println('Classifier ID: ${training_result['classifier_id']}')
|
||||
}
|
||||
3
examples/baobab/generator/basic/.gitignore
vendored
Normal file
3
examples/baobab/generator/basic/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
methods.v
|
||||
pet_store_actor
|
||||
docs
|
||||
9
examples/baobab/generator/basic/README.md
Normal file
9
examples/baobab/generator/basic/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Actor Generation Examples
|
||||
|
||||
## `generate_methods.vsh`
|
||||
|
||||
This example generates actor method prototypes from an actor specification.
|
||||
|
||||
## `generate_actor_module.vsh`
|
||||
|
||||
This example generates an entire actor module from an actor specification with the support for the specified interfaces.
|
||||
22
examples/baobab/generator/basic/generate_actor_module.vsh
Executable file
22
examples/baobab/generator/basic/generate_actor_module.vsh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openrpc_spec_path = os.join_path(example_dir, 'openrpc.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openrpc_spec := openrpc.new(path: openrpc_spec_path)!
|
||||
actor_spec := specification.from_openrpc(openrpc_spec)!
|
||||
|
||||
actor_module := generator.generate_actor_module(actor_spec,
|
||||
interfaces: [.openrpc]
|
||||
)!
|
||||
|
||||
actor_module.write(example_dir,
|
||||
format: true
|
||||
overwrite: true
|
||||
)!
|
||||
19
examples/baobab/generator/basic/generate_methods.vsh
Executable file
19
examples/baobab/generator/basic/generate_methods.vsh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openrpc_spec_path = os.join_path(example_dir, 'openrpc.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openrpc_spec := openrpc.new(path: openrpc_spec_path)!
|
||||
actor_spec := specification.from_openrpc(openrpc_spec)!
|
||||
|
||||
methods_file := generator.generate_methods_file(actor_spec)!
|
||||
methods_file.write(example_dir,
|
||||
format: true
|
||||
overwrite: true
|
||||
)!
|
||||
19
examples/baobab/generator/basic/generate_openrpc_file.vsh
Executable file
19
examples/baobab/generator/basic/generate_openrpc_file.vsh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openrpc_spec_path = os.join_path(example_dir, 'openrpc.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openrpc_spec_ := openrpc.new(path: openrpc_spec_path)!
|
||||
actor_spec := specification.from_openrpc(openrpc_spec_)!
|
||||
openrpc_spec := actor_spec.to_openrpc()
|
||||
|
||||
openrpc_file := generator.generate_openrpc_file(openrpc_spec)!
|
||||
openrpc_file.write(os.join_path(example_dir, 'docs'),
|
||||
overwrite: true
|
||||
)!
|
||||
132
examples/baobab/generator/basic/openrpc.json
Normal file
132
examples/baobab/generator/basic/openrpc.json
Normal file
@@ -0,0 +1,132 @@
|
||||
{
|
||||
"openrpc": "1.0.0",
|
||||
"info": {
|
||||
"title": "PetStore",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"methods": [
|
||||
{
|
||||
"name": "GetPets",
|
||||
"description": "finds pets in the system that the user has access to by tags and within a limit",
|
||||
"params": [
|
||||
{
|
||||
"name": "tags",
|
||||
"description": "tags to filter by",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "limit",
|
||||
"description": "maximum number of results to return",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet_list",
|
||||
"description": "all pets from the system, that mathes the tags",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CreatePet",
|
||||
"description": "creates a new pet in the store. Duplicates are allowed.",
|
||||
"params": [
|
||||
{
|
||||
"name": "new_pet",
|
||||
"description": "Pet to add to the store.",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/NewPet"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "the newly created pet",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GetPetById",
|
||||
"description": "gets a pet based on a single ID, if the user has access to the pet",
|
||||
"params": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of pet to fetch",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "pet response",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "DeletePetById",
|
||||
"description": "deletes a single pet based on the ID supplied",
|
||||
"params": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of pet to delete",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "pet deleted",
|
||||
"schema": {
|
||||
"type": "null"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
"schemas": {
|
||||
"NewPet": {
|
||||
"title": "NewPet",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Pet": {
|
||||
"title": "Pet",
|
||||
"description": "a pet struct that represents a pet",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "name of the pet",
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"description": "a tag of the pet, helps finding pet",
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"description": "unique indentifier",
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
3
examples/baobab/generator/geomind_poc/.gitignore
vendored
Normal file
3
examples/baobab/generator/geomind_poc/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
merchant
|
||||
profiler
|
||||
farmer
|
||||
344
examples/baobab/generator/geomind_poc/farmer.json
Normal file
344
examples/baobab/generator/geomind_poc/farmer.json
Normal file
@@ -0,0 +1,344 @@
|
||||
{
|
||||
"openapi": "3.0.1",
|
||||
"info": {
|
||||
"title": "Farmer",
|
||||
"description": "API for managing farms and nodes, tracking rewards, capacity, and location.",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "http://localhost:8080",
|
||||
"description": "Local development server"
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Farm": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "f47ac10b-58cc-4372-a567-0e02b2c3d479"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Amsterdam Data Center"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "Enterprise-grade data center with renewable energy focus"
|
||||
},
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"example": "0x742d35Cc6634C0532925a3b844Bc454e4438f44e"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"owner"
|
||||
]
|
||||
},
|
||||
"Node": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "n47ac10b-58cc-4372-a567-0e02b2c3d479"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "High-performance GPU compute node with 4x NVIDIA A100"
|
||||
},
|
||||
"farm_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "f47ac10b-58cc-4372-a567-0e02b2c3d479"
|
||||
},
|
||||
"location": {
|
||||
"$ref": "#/components/schemas/Location"
|
||||
},
|
||||
"capacity": {
|
||||
"$ref": "#/components/schemas/Capacity"
|
||||
},
|
||||
"grid_version": {
|
||||
"type": "string",
|
||||
"example": "3.16.2"
|
||||
},
|
||||
"reward": {
|
||||
"$ref": "#/components/schemas/Reward"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"description",
|
||||
"farm_id",
|
||||
"location",
|
||||
"capacity",
|
||||
"reward"
|
||||
]
|
||||
},
|
||||
"Location": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"coordinates": {
|
||||
"type": "string",
|
||||
"example": "52.3740, 4.8897"
|
||||
},
|
||||
"continent": {
|
||||
"type": "string",
|
||||
"example": "Europe"
|
||||
},
|
||||
"country": {
|
||||
"type": "string",
|
||||
"example": "Netherlands"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"coordinates",
|
||||
"continent",
|
||||
"country"
|
||||
]
|
||||
},
|
||||
"Capacity": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cpu": {
|
||||
"type": "integer",
|
||||
"example": 128
|
||||
},
|
||||
"memory_gb": {
|
||||
"type": "integer",
|
||||
"example": 1024
|
||||
},
|
||||
"storage_tb": {
|
||||
"type": "integer",
|
||||
"example": 100
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cpu",
|
||||
"memory_gb",
|
||||
"storage_tb"
|
||||
]
|
||||
},
|
||||
"Reward": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"reward_promised": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 25000.50
|
||||
},
|
||||
"reward_given": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 12500.25
|
||||
},
|
||||
"duration_months": {
|
||||
"type": "integer",
|
||||
"example": 36
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"reward_promised",
|
||||
"reward_given",
|
||||
"duration_months"
|
||||
]
|
||||
},
|
||||
"NodeStats": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"node_id": {
|
||||
"type": "integer",
|
||||
"format": "uint32",
|
||||
"example": "42"
|
||||
},
|
||||
"uptime_hours": {
|
||||
"type": "integer",
|
||||
"example": 8760
|
||||
},
|
||||
"bandwidth_gb": {
|
||||
"type": "integer",
|
||||
"example": 25000
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"node_id",
|
||||
"uptime_hours",
|
||||
"bandwidth_gb"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/farms": {
|
||||
"get": {
|
||||
"summary": "List all farms",
|
||||
"operationId": "getFarms",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of farms",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Farm"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "f47ac10b-58cc-4372-a567-0e02b2c3d479",
|
||||
"name": "Amsterdam Data Center",
|
||||
"description": "Enterprise-grade data center with renewable energy focus",
|
||||
"owner": "0x742d35Cc6634C0532925a3b844Bc454e4438f44e"
|
||||
},
|
||||
{
|
||||
"id": "d47ac10b-58cc-4372-a567-0e02b2c3d480",
|
||||
"name": "Dubai Compute Hub",
|
||||
"description": "High-density compute farm with advanced cooling",
|
||||
"owner": "0x842d35Cc6634C0532925a3b844Bc454e4438f55f"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/farms/{farmId}/nodes": {
|
||||
"get": {
|
||||
"summary": "List nodes in a farm",
|
||||
"operationId": "getNodesByFarm",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "farmId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"example": "f47ac10b-58cc-4372-a567-0e02b2c3d479"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of nodes in the farm",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Node"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "n47ac10b-58cc-4372-a567-0e02b2c3d479",
|
||||
"description": "High-performance GPU compute node with 4x NVIDIA A100",
|
||||
"farm_id": "f47ac10b-58cc-4372-a567-0e02b2c3d479",
|
||||
"location": {
|
||||
"coordinates": "52.3740, 4.8897",
|
||||
"continent": "Europe",
|
||||
"country": "Netherlands"
|
||||
},
|
||||
"capacity": {
|
||||
"cpu": 128,
|
||||
"memory_gb": 1024,
|
||||
"storage_tb": 100
|
||||
},
|
||||
"grid_version": "3.16.2",
|
||||
"reward": {
|
||||
"reward_promised": 25000.50,
|
||||
"reward_given": 12500.25,
|
||||
"duration_months": 36
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Farm not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "integer",
|
||||
"example": 404
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"example": "Farm with ID f47ac10b-58cc-4372-a567-0e02b2c3d479 not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/nodes/{nodeId}/stats": {
|
||||
"get": {
|
||||
"summary": "Get node statistics",
|
||||
"operationId": "getNodeStats",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "nodeId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"example": "42"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Node statistics",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/NodeStats"
|
||||
},
|
||||
"example": {
|
||||
"node_id": "42",
|
||||
"uptime_hours": 8760,
|
||||
"bandwidth_gb": 25000
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Node not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "integer",
|
||||
"example": 404
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"example": "Node with ID n47ac10b-58cc-4372-a567-0e02b2c3d479 not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
BIN
examples/baobab/generator/geomind_poc/generate
Executable file
BIN
examples/baobab/generator/geomind_poc/generate
Executable file
Binary file not shown.
23
examples/baobab/generator/geomind_poc/generate.vsh
Executable file
23
examples/baobab/generator/geomind_poc/generate.vsh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openapi
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const specs = ['merchant', 'profiler', 'farmer']
|
||||
|
||||
for spec in specs {
|
||||
openapi_spec_path := os.join_path(example_dir, '${spec}.json')
|
||||
openapi_spec := openapi.new(path: openapi_spec_path, process: true)!
|
||||
actor_spec := specification.from_openapi(openapi_spec)!
|
||||
actor_module := generator.generate_actor_folder(actor_spec,
|
||||
interfaces: [.openapi, .http]
|
||||
)!
|
||||
actor_module.write(example_dir,
|
||||
format: true
|
||||
overwrite: true
|
||||
compile: false
|
||||
)!
|
||||
}
|
||||
997
examples/baobab/generator/geomind_poc/merchant.json
Normal file
997
examples/baobab/generator/geomind_poc/merchant.json
Normal file
@@ -0,0 +1,997 @@
|
||||
{
|
||||
"openapi": "3.0.1",
|
||||
"info": {
|
||||
"title": "Merchant",
|
||||
"description": "API for e-commerce operations including stores, products, and orders",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"servers": [{
|
||||
"url": "http://localhost:8080",
|
||||
"description": "Local development server"
|
||||
},{
|
||||
"url": "http://localhost:8080/openapi/example",
|
||||
"description": "Local example server"
|
||||
}],
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Store": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Tech Gadgets Store"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "Premium electronics and gadgets retailer"
|
||||
},
|
||||
"contact": {
|
||||
"type": "string",
|
||||
"example": "contact@techgadgets.com"
|
||||
},
|
||||
"active": {
|
||||
"type": "boolean",
|
||||
"example": true
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"contact",
|
||||
"active"
|
||||
]
|
||||
},
|
||||
"ProductComponentTemplate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174001"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "4K Display Panel"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "55-inch 4K UHD Display Panel"
|
||||
},
|
||||
"specs": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"example": {
|
||||
"resolution": "3840x2160",
|
||||
"refreshRate": "120Hz",
|
||||
"panel_type": "OLED"
|
||||
}
|
||||
},
|
||||
"price": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 599.99
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Z]{3}$",
|
||||
"example": "USD"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"price",
|
||||
"currency"
|
||||
]
|
||||
},
|
||||
"ProductTemplate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174002"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Smart TV 55-inch"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "55-inch Smart TV with 4K Display"
|
||||
},
|
||||
"components": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ProductComponentTemplate"
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "4K Display Panel",
|
||||
"description": "55-inch 4K UHD Display Panel",
|
||||
"specs": {
|
||||
"resolution": "3840x2160",
|
||||
"refreshRate": "120Hz"
|
||||
},
|
||||
"price": 599.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
]
|
||||
},
|
||||
"store_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
},
|
||||
"category": {
|
||||
"type": "string",
|
||||
"example": "Electronics"
|
||||
},
|
||||
"active": {
|
||||
"type": "boolean",
|
||||
"example": true
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"components",
|
||||
"store_id",
|
||||
"active"
|
||||
]
|
||||
},
|
||||
"Product": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174003"
|
||||
},
|
||||
"template_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174002"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Smart TV 55-inch"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "55-inch Smart TV with 4K Display"
|
||||
},
|
||||
"price": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 899.99
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Z]{3}$",
|
||||
"example": "USD"
|
||||
},
|
||||
"store_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
},
|
||||
"stock_quantity": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"example": 50
|
||||
},
|
||||
"available": {
|
||||
"type": "boolean",
|
||||
"example": true
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"template_id",
|
||||
"name",
|
||||
"price",
|
||||
"currency",
|
||||
"store_id",
|
||||
"stock_quantity",
|
||||
"available"
|
||||
]
|
||||
},
|
||||
"OrderItem": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"product_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174003"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"example": 2
|
||||
},
|
||||
"price": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 899.99
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Z]{3}$",
|
||||
"example": "USD"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"product_id",
|
||||
"quantity",
|
||||
"price",
|
||||
"currency"
|
||||
]
|
||||
},
|
||||
"Order": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174004"
|
||||
},
|
||||
"customer_id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174005"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OrderItem"
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"product_id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"quantity": 2,
|
||||
"price": 899.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
]
|
||||
},
|
||||
"total_amount": {
|
||||
"type": "number",
|
||||
"format": "double",
|
||||
"example": 1799.98
|
||||
},
|
||||
"currency": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Z]{3}$",
|
||||
"example": "USD"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"pending",
|
||||
"confirmed",
|
||||
"shipped",
|
||||
"delivered"
|
||||
],
|
||||
"example": "pending"
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"example": "2024-02-10T10:30:00Z"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"example": "2024-02-10T10:30:00Z"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"customer_id",
|
||||
"items",
|
||||
"total_amount",
|
||||
"currency",
|
||||
"status",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
]
|
||||
},
|
||||
"Error": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "integer",
|
||||
"example": 404
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"example": "Resource not found"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"code",
|
||||
"message"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/stores": {
|
||||
"post": {
|
||||
"summary": "Create a new store",
|
||||
"operationId": "createStore",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Tech Gadgets Store"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"example": "Premium electronics and gadgets retailer"
|
||||
},
|
||||
"contact": {
|
||||
"type": "string",
|
||||
"example": "contact@techgadgets.com"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"contact"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"newStore": {
|
||||
"summary": "Create a new electronics store",
|
||||
"value": {
|
||||
"name": "Tech Gadgets Store",
|
||||
"description": "Premium electronics and gadgets retailer",
|
||||
"contact": "contact@techgadgets.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Store created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Store"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"name": "Tech Gadgets Store",
|
||||
"description": "Premium electronics and gadgets retailer",
|
||||
"contact": "contact@techgadgets.com",
|
||||
"active": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid input",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 400,
|
||||
"message": "Invalid store data provided"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/products/templates/components": {
|
||||
"post": {
|
||||
"summary": "Create a new product component template",
|
||||
"operationId": "createProductComponentTemplate",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"specs": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"price": {
|
||||
"type": "number"
|
||||
},
|
||||
"currency": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"price",
|
||||
"currency"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"displayPanel": {
|
||||
"summary": "Create a display panel component",
|
||||
"value": {
|
||||
"name": "4K Display Panel",
|
||||
"description": "55-inch 4K UHD Display Panel",
|
||||
"specs": {
|
||||
"resolution": "3840x2160",
|
||||
"refreshRate": "120Hz",
|
||||
"panel_type": "OLED"
|
||||
},
|
||||
"price": 599.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Component template created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ProductComponentTemplate"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "4K Display Panel",
|
||||
"description": "55-inch 4K UHD Display Panel",
|
||||
"specs": {
|
||||
"resolution": "3840x2160",
|
||||
"refreshRate": "120Hz",
|
||||
"panel_type": "OLED"
|
||||
},
|
||||
"price": 599.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid input",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 400,
|
||||
"message": "Invalid component template data"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/products/templates": {
|
||||
"post": {
|
||||
"summary": "Create a new product template",
|
||||
"operationId": "createProductTemplate",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"components": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
},
|
||||
"store_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"category": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"components",
|
||||
"store_id"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"smartTV": {
|
||||
"summary": "Create a Smart TV template",
|
||||
"value": {
|
||||
"name": "Smart TV 55-inch",
|
||||
"description": "55-inch Smart TV with 4K Display",
|
||||
"components": [
|
||||
"123e4567-e89b-12d3-a456-426614174001"
|
||||
],
|
||||
"store_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"category": "Electronics"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Product template created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/ProductTemplate"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"name": "Smart TV 55-inch",
|
||||
"description": "55-inch Smart TV with 4K Display",
|
||||
"components": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174001",
|
||||
"name": "4K Display Panel",
|
||||
"description": "55-inch 4K UHD Display Panel",
|
||||
"specs": {
|
||||
"resolution": "3840x2160",
|
||||
"refreshRate": "120Hz"
|
||||
},
|
||||
"price": 599.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
],
|
||||
"store_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"category": "Electronics",
|
||||
"active": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Store not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 404,
|
||||
"message": "Store not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/products": {
|
||||
"post": {
|
||||
"summary": "Create a new product from template",
|
||||
"operationId": "createProduct",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"template_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"store_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"stock_quantity": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"template_id",
|
||||
"store_id",
|
||||
"stock_quantity"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"newProduct": {
|
||||
"summary": "Create a new Smart TV product",
|
||||
"value": {
|
||||
"template_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"store_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"stock_quantity": 50
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Product created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Product"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"template_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"name": "Smart TV 55-inch",
|
||||
"description": "55-inch Smart TV with 4K Display",
|
||||
"price": 899.99,
|
||||
"currency": "USD",
|
||||
"store_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"stock_quantity": 50,
|
||||
"available": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Template or store not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 404,
|
||||
"message": "Product template not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/orders": {
|
||||
"post": {
|
||||
"summary": "Create a new order",
|
||||
"operationId": "createOrder",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"customer_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/OrderItem"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"customer_id",
|
||||
"items"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"newOrder": {
|
||||
"summary": "Create an order for two Smart TVs",
|
||||
"value": {
|
||||
"customer_id": "123e4567-e89b-12d3-a456-426614174005",
|
||||
"items": [
|
||||
{
|
||||
"product_id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"quantity": 2,
|
||||
"price": 899.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Order created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Order"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174004",
|
||||
"customer_id": "123e4567-e89b-12d3-a456-426614174005",
|
||||
"items": [
|
||||
{
|
||||
"product_id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"quantity": 2,
|
||||
"price": 899.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
],
|
||||
"total_amount": 1799.98,
|
||||
"currency": "USD",
|
||||
"status": "pending",
|
||||
"created_at": "2024-02-10T10:30:00Z",
|
||||
"updated_at": "2024-02-10T10:30:00Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid input or insufficient stock",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 400,
|
||||
"message": "Insufficient stock for product"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/orders/{orderId}/status": {
|
||||
"put": {
|
||||
"summary": "Update order status",
|
||||
"operationId": "updateOrderStatus",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "orderId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"example": "123e4567-e89b-12d3-a456-426614174004"
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"pending",
|
||||
"confirmed",
|
||||
"shipped",
|
||||
"delivered"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"status"
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"updateStatus": {
|
||||
"summary": "Update order to shipped status",
|
||||
"value": {
|
||||
"status": "shipped"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Order status updated successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Order"
|
||||
},
|
||||
"example": {
|
||||
"id": "123e4567-e89b-12d3-a456-426614174004",
|
||||
"customer_id": "123e4567-e89b-12d3-a456-426614174005",
|
||||
"items": [
|
||||
{
|
||||
"product_id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"quantity": 2,
|
||||
"price": 899.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
],
|
||||
"total_amount": 1799.98,
|
||||
"currency": "USD",
|
||||
"status": "shipped",
|
||||
"created_at": "2024-02-10T10:30:00Z",
|
||||
"updated_at": "2024-02-10T10:35:00Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Order not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 404,
|
||||
"message": "Order not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/stores/{storeId}/products": {
|
||||
"get": {
|
||||
"summary": "Get all products for a store",
|
||||
"operationId": "getStoreProducts",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "storeId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of store's products",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Product"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"template_id": "123e4567-e89b-12d3-a456-426614174002",
|
||||
"name": "Smart TV 55-inch",
|
||||
"description": "55-inch Smart TV with 4K Display",
|
||||
"price": 899.99,
|
||||
"currency": "USD",
|
||||
"store_id": "123e4567-e89b-12d3-a456-426614174000",
|
||||
"stock_quantity": 48,
|
||||
"available": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Store not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 404,
|
||||
"message": "Store not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/stores/{storeId}/orders": {
|
||||
"get": {
|
||||
"summary": "Get all orders for a store's products",
|
||||
"operationId": "getStoreOrders",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "storeId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of orders containing store's products",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Order"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "123e4567-e89b-12d3-a456-426614174004",
|
||||
"customer_id": "123e4567-e89b-12d3-a456-426614174005",
|
||||
"items": [
|
||||
{
|
||||
"product_id": "123e4567-e89b-12d3-a456-426614174003",
|
||||
"quantity": 2,
|
||||
"price": 899.99,
|
||||
"currency": "USD"
|
||||
}
|
||||
],
|
||||
"total_amount": 1799.98,
|
||||
"currency": "USD",
|
||||
"status": "shipped",
|
||||
"created_at": "2024-02-10T10:30:00Z",
|
||||
"updated_at": "2024-02-10T10:35:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Store not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"example": {
|
||||
"code": 404,
|
||||
"message": "Store not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
81
examples/baobab/generator/geomind_poc/model.v
Normal file
81
examples/baobab/generator/geomind_poc/model.v
Normal file
@@ -0,0 +1,81 @@
|
||||
module geomind_poc
|
||||
|
||||
pub struct Merchant {
|
||||
pub:
|
||||
id string
|
||||
name string
|
||||
description string
|
||||
contact string
|
||||
active bool
|
||||
}
|
||||
|
||||
pub struct ProductComponentTemplate {
|
||||
pub:
|
||||
id string
|
||||
name string
|
||||
description string
|
||||
// technical specifications
|
||||
specs map[string]string
|
||||
// price per unit
|
||||
price f64
|
||||
// currency code (e.g., 'USD', 'EUR')
|
||||
currency string
|
||||
}
|
||||
|
||||
pub struct ProductTemplate {
|
||||
pub:
|
||||
id string
|
||||
name string
|
||||
description string
|
||||
// components that make up this product template
|
||||
components []ProductComponentTemplate
|
||||
// merchant who created this template
|
||||
merchant_id string
|
||||
// category of the product (e.g., 'electronics', 'clothing')
|
||||
category string
|
||||
// whether this template is available for use
|
||||
active bool
|
||||
}
|
||||
|
||||
pub struct Product {
|
||||
pub:
|
||||
id string
|
||||
template_id string
|
||||
// specific instance details that may differ from template
|
||||
name string
|
||||
description string
|
||||
// actual price of this product instance
|
||||
price f64
|
||||
currency string
|
||||
// merchant selling this product
|
||||
merchant_id string
|
||||
// current stock level
|
||||
stock_quantity int
|
||||
// whether this product is available for purchase
|
||||
available bool
|
||||
}
|
||||
|
||||
pub struct OrderItem {
|
||||
pub:
|
||||
product_id string
|
||||
quantity int
|
||||
price f64
|
||||
currency string
|
||||
}
|
||||
|
||||
pub struct Order {
|
||||
pub:
|
||||
id string
|
||||
// customer identifier
|
||||
customer_id string
|
||||
// items in the order
|
||||
items []OrderItem
|
||||
// total order amount
|
||||
total_amount f64
|
||||
currency string
|
||||
// order status (e.g., 'pending', 'confirmed', 'shipped', 'delivered')
|
||||
status string
|
||||
// timestamps
|
||||
created_at string
|
||||
updated_at string
|
||||
}
|
||||
148
examples/baobab/generator/geomind_poc/play.v
Normal file
148
examples/baobab/generator/geomind_poc/play.v
Normal file
@@ -0,0 +1,148 @@
|
||||
module geomind_poc
|
||||
|
||||
import freeflowuniverse.crystallib.core.playbook { PlayBook }
|
||||
|
||||
// play_commerce processes heroscript actions for the commerce system
|
||||
pub fn play_commerce(mut plbook PlayBook) ! {
|
||||
commerce_actions := plbook.find(filter: 'commerce.')!
|
||||
mut c := Commerce{}
|
||||
|
||||
for action in commerce_actions {
|
||||
match action.name {
|
||||
'merchant' {
|
||||
mut p := action.params
|
||||
merchant := c.create_merchant(
|
||||
name: p.get('name')!
|
||||
description: p.get_default('description', '')!
|
||||
contact: p.get('contact')!
|
||||
)!
|
||||
println('Created merchant: ${merchant.name}')
|
||||
}
|
||||
'component' {
|
||||
mut p := action.params
|
||||
component := c.create_product_component_template(
|
||||
name: p.get('name')!
|
||||
description: p.get_default('description', '')!
|
||||
specs: p.get_map()
|
||||
price: p.get_float('price')!
|
||||
currency: p.get('currency')!
|
||||
)!
|
||||
println('Created component: ${component.name}')
|
||||
}
|
||||
'template' {
|
||||
mut p := action.params
|
||||
// Get component IDs as a list
|
||||
component_ids := p.get_list('components')!
|
||||
// Convert component IDs to actual components
|
||||
mut components := []ProductComponentTemplate{}
|
||||
for id in component_ids {
|
||||
// In a real implementation, you would fetch the component from storage
|
||||
// For this example, we create a dummy component
|
||||
component := ProductComponentTemplate{
|
||||
id: id
|
||||
name: 'Component'
|
||||
description: ''
|
||||
specs: map[string]string{}
|
||||
price: 0
|
||||
currency: 'USD'
|
||||
}
|
||||
components << component
|
||||
}
|
||||
|
||||
template := c.create_product_template(
|
||||
name: p.get('name')!
|
||||
description: p.get_default('description', '')!
|
||||
components: components
|
||||
merchant_id: p.get('merchant_id')!
|
||||
category: p.get_default('category', 'General')!
|
||||
)!
|
||||
println('Created template: ${template.name}')
|
||||
}
|
||||
'product' {
|
||||
mut p := action.params
|
||||
product := c.create_product(
|
||||
template_id: p.get('template_id')!
|
||||
merchant_id: p.get('merchant_id')!
|
||||
stock_quantity: p.get_int('stock_quantity')!
|
||||
)!
|
||||
println('Created product: ${product.name} with stock: ${product.stock_quantity}')
|
||||
}
|
||||
'order' {
|
||||
mut p := action.params
|
||||
// Get order items as a list of maps
|
||||
items_data := p.get_list('items')!
|
||||
mut items := []OrderItem{}
|
||||
for item_data in items_data {
|
||||
// Parse item data (format: "product_id:quantity:price:currency")
|
||||
parts := item_data.split(':')
|
||||
if parts.len != 4 {
|
||||
return error('Invalid order item format: ${item_data}')
|
||||
}
|
||||
item := OrderItem{
|
||||
product_id: parts[0]
|
||||
quantity: parts[1].int()
|
||||
price: parts[2].f64()
|
||||
currency: parts[3]
|
||||
}
|
||||
items << item
|
||||
}
|
||||
|
||||
order := c.create_order(
|
||||
customer_id: p.get('customer_id')!
|
||||
items: items
|
||||
)!
|
||||
println('Created order: ${order.id} with ${order.items.len} items')
|
||||
}
|
||||
'update_order' {
|
||||
mut p := action.params
|
||||
order := c.update_order_status(
|
||||
order_id: p.get('order_id')!
|
||||
new_status: p.get('status')!
|
||||
)!
|
||||
println('Updated order ${order.id} status to: ${order.status}')
|
||||
}
|
||||
else {
|
||||
return error('Unknown commerce action: ${action.name}')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Example heroscript usage:
|
||||
/*
|
||||
!!commerce.merchant
|
||||
name: "Tech Gadgets Store"
|
||||
description: "Premium electronics and gadgets retailer"
|
||||
contact: "contact@techgadgets.com"
|
||||
|
||||
!!commerce.component
|
||||
name: "4K Display Panel"
|
||||
description: "55-inch 4K UHD Display Panel"
|
||||
specs:
|
||||
resolution: "3840x2160"
|
||||
refreshRate: "120Hz"
|
||||
panel_type: "OLED"
|
||||
price: 599.99
|
||||
currency: "USD"
|
||||
|
||||
!!commerce.template
|
||||
name: "Smart TV 55-inch"
|
||||
description: "55-inch Smart TV with 4K Display"
|
||||
components: "123e4567-e89b-12d3-a456-426614174001"
|
||||
merchant_id: "123e4567-e89b-12d3-a456-426614174000"
|
||||
category: "Electronics"
|
||||
|
||||
!!commerce.product
|
||||
template_id: "123e4567-e89b-12d3-a456-426614174002"
|
||||
merchant_id: "123e4567-e89b-12d3-a456-426614174000"
|
||||
stock_quantity: 50
|
||||
|
||||
!!commerce.order
|
||||
customer_id: "123e4567-e89b-12d3-a456-426614174005"
|
||||
items:
|
||||
- "123e4567-e89b-12d3-a456-426614174003:2:899.99:USD"
|
||||
|
||||
!!commerce.update_order
|
||||
order_id: "123e4567-e89b-12d3-a456-426614174004"
|
||||
status: "shipped"
|
||||
*/
|
||||
286
examples/baobab/generator/geomind_poc/profiler.json
Normal file
286
examples/baobab/generator/geomind_poc/profiler.json
Normal file
@@ -0,0 +1,286 @@
|
||||
{
|
||||
"openapi": "3.0.1",
|
||||
"info": {
|
||||
"title": "Profiler",
|
||||
"description": "API for managing user profiles with name, public key, and KYC verification",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "http://localhost:8080",
|
||||
"description": "Local development server"
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Profile": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"example": "123e4567-e89b-12d3-a456-426614174000"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Alice Doe"
|
||||
},
|
||||
"public_key": {
|
||||
"type": "string",
|
||||
"example": "028a8f8b59f7283a47f9f6d4bc8176e847ad2b6c6d8bdfd041e5e7f3b4ac28c9fc"
|
||||
},
|
||||
"kyc_verified": {
|
||||
"type": "boolean",
|
||||
"example": false
|
||||
}
|
||||
},
|
||||
"required": ["id", "name", "public_key", "kyc_verified"]
|
||||
},
|
||||
"Error": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "integer",
|
||||
"example": 400
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"example": "Invalid request"
|
||||
}
|
||||
},
|
||||
"required": ["code", "message"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/profiles": {
|
||||
"post": {
|
||||
"summary": "Create a new profile",
|
||||
"operationId": "createProfile",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Bob Smith"
|
||||
},
|
||||
"public_key": {
|
||||
"type": "string",
|
||||
"example": "03a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2"
|
||||
}
|
||||
},
|
||||
"required": ["name", "public_key"]
|
||||
},
|
||||
"examples": {
|
||||
"newProfile": {
|
||||
"summary": "Example of creating a new profile",
|
||||
"value": {
|
||||
"name": "Bob Smith",
|
||||
"public_key": "03a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Profile created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Profile"
|
||||
},
|
||||
"examples": {
|
||||
"successResponse": {
|
||||
"summary": "Example of successful profile creation",
|
||||
"value": {
|
||||
"id": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"name": "Bob Smith",
|
||||
"public_key": "03a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2",
|
||||
"kyc_verified": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid input",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"examples": {
|
||||
"invalidInput": {
|
||||
"summary": "Example of invalid input error",
|
||||
"value": {
|
||||
"code": 400,
|
||||
"message": "Invalid public key format"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/profiles/{profileId}": {
|
||||
"get": {
|
||||
"summary": "Get profile details",
|
||||
"operationId": "getProfile",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "profileId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"example": "42"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Profile retrieved successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Profile"
|
||||
},
|
||||
"examples": {
|
||||
"existingProfile": {
|
||||
"summary": "Example of retrieved profile",
|
||||
"value": {
|
||||
"id": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"name": "Bob Smith",
|
||||
"public_key": "03a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2",
|
||||
"kyc_verified": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Profile not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"examples": {
|
||||
"notFound": {
|
||||
"summary": "Example of profile not found error",
|
||||
"value": {
|
||||
"code": 404,
|
||||
"message": "Profile with ID '550e8400-e29b-41d4-a716-446655440000' not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/profiles/{profileId}/kyc": {
|
||||
"put": {
|
||||
"summary": "Update KYC verification status",
|
||||
"operationId": "updateKYCStatus",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "profileId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"example": "42"
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kyc_verified": {
|
||||
"type": "boolean",
|
||||
"example": true
|
||||
}
|
||||
},
|
||||
"required": ["kyc_verified"]
|
||||
},
|
||||
"examples": {
|
||||
"verifyKYC": {
|
||||
"summary": "Example of verifying KYC",
|
||||
"value": {
|
||||
"kyc_verified": true
|
||||
}
|
||||
},
|
||||
"unverifyKYC": {
|
||||
"summary": "Example of unverifying KYC",
|
||||
"value": {
|
||||
"kyc_verified": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "KYC status updated successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Profile"
|
||||
},
|
||||
"examples": {
|
||||
"updatedProfile": {
|
||||
"summary": "Example of profile with updated KYC status",
|
||||
"value": {
|
||||
"id": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"name": "Bob Smith",
|
||||
"public_key": "03a1b2c3d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1b2",
|
||||
"kyc_verified": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Profile not found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Error"
|
||||
},
|
||||
"examples": {
|
||||
"notFound": {
|
||||
"summary": "Example of profile not found error",
|
||||
"value": {
|
||||
"code": 404,
|
||||
"message": "Profile with ID '550e8400-e29b-41d4-a716-446655440000' not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
191
examples/baobab/generator/geomind_poc/server.v
Normal file
191
examples/baobab/generator/geomind_poc/server.v
Normal file
@@ -0,0 +1,191 @@
|
||||
module geomind_poc
|
||||
|
||||
import crypto.rand
|
||||
import time
|
||||
|
||||
// Commerce represents the main e-commerce server handling all operations
|
||||
pub struct Commerce {
|
||||
mut:
|
||||
merchants map[string]Merchant
|
||||
templates map[string]ProductTemplate
|
||||
products map[string]Product
|
||||
orders map[string]Order
|
||||
}
|
||||
|
||||
// generate_id creates a unique identifier
|
||||
fn generate_id() string {
|
||||
return rand.uuid_v4()
|
||||
}
|
||||
|
||||
// create_merchant adds a new merchant to the system
|
||||
pub fn (mut c Commerce) create_merchant(name string, description string, contact string) !Merchant {
|
||||
merchant_id := generate_id()
|
||||
merchant := Merchant{
|
||||
id: merchant_id
|
||||
name: name
|
||||
description: description
|
||||
contact: contact
|
||||
active: true
|
||||
}
|
||||
c.merchants[merchant_id] = merchant
|
||||
return merchant
|
||||
}
|
||||
|
||||
// create_product_component_template creates a new component template
|
||||
pub fn (mut c Commerce) create_product_component_template(name string, description string, specs map[string]string, price f64, currency string) !ProductComponentTemplate {
|
||||
component := ProductComponentTemplate{
|
||||
id: generate_id()
|
||||
name: name
|
||||
description: description
|
||||
specs: specs
|
||||
price: price
|
||||
currency: currency
|
||||
}
|
||||
return component
|
||||
}
|
||||
|
||||
// create_product_template creates a new product template
|
||||
pub fn (mut c Commerce) create_product_template(name string, description string, components []ProductComponentTemplate, merchant_id string, category string) !ProductTemplate {
|
||||
if merchant_id !in c.merchants {
|
||||
return error('Merchant not found')
|
||||
}
|
||||
|
||||
template := ProductTemplate{
|
||||
id: generate_id()
|
||||
name: name
|
||||
description: description
|
||||
components: components
|
||||
merchant_id: merchant_id
|
||||
category: category
|
||||
active: true
|
||||
}
|
||||
c.templates[template.id] = template
|
||||
return template
|
||||
}
|
||||
|
||||
// create_product creates a new product instance from a template
|
||||
pub fn (mut c Commerce) create_product(template_id string, merchant_id string, stock_quantity int) !Product {
|
||||
if template_id !in c.templates {
|
||||
return error('Template not found')
|
||||
}
|
||||
if merchant_id !in c.merchants {
|
||||
return error('Merchant not found')
|
||||
}
|
||||
|
||||
template := c.templates[template_id]
|
||||
mut total_price := 0.0
|
||||
for component in template.components {
|
||||
total_price += component.price
|
||||
}
|
||||
|
||||
product := Product{
|
||||
id: generate_id()
|
||||
template_id: template_id
|
||||
name: template.name
|
||||
description: template.description
|
||||
price: total_price
|
||||
currency: template.components[0].currency // assuming all components use same currency
|
||||
merchant_id: merchant_id
|
||||
stock_quantity: stock_quantity
|
||||
available: true
|
||||
}
|
||||
c.products[product.id] = product
|
||||
return product
|
||||
}
|
||||
|
||||
// create_order creates a new order
|
||||
pub fn (mut c Commerce) create_order(customer_id string, items []OrderItem) !Order {
|
||||
mut total_amount := 0.0
|
||||
mut currency := ''
|
||||
|
||||
for item in items {
|
||||
if item.product_id !in c.products {
|
||||
return error('Product not found: ${item.product_id}')
|
||||
}
|
||||
product := c.products[item.product_id]
|
||||
if !product.available || product.stock_quantity < item.quantity {
|
||||
return error('Product ${product.name} is not available in requested quantity')
|
||||
}
|
||||
total_amount += item.price * item.quantity
|
||||
if currency == '' {
|
||||
currency = item.currency
|
||||
} else if currency != item.currency {
|
||||
return error('Mixed currencies are not supported')
|
||||
}
|
||||
}
|
||||
|
||||
order := Order{
|
||||
id: generate_id()
|
||||
customer_id: customer_id
|
||||
items: items
|
||||
total_amount: total_amount
|
||||
currency: currency
|
||||
status: 'pending'
|
||||
created_at: time.now().str()
|
||||
updated_at: time.now().str()
|
||||
}
|
||||
c.orders[order.id] = order
|
||||
|
||||
// Update stock quantities
|
||||
for item in items {
|
||||
mut product := c.products[item.product_id]
|
||||
product.stock_quantity -= item.quantity
|
||||
if product.stock_quantity == 0 {
|
||||
product.available = false
|
||||
}
|
||||
c.products[item.product_id] = product
|
||||
}
|
||||
|
||||
return order
|
||||
}
|
||||
|
||||
// update_order_status updates the status of an order
|
||||
pub fn (mut c Commerce) update_order_status(order_id string, new_status string) !Order {
|
||||
if order_id !in c.orders {
|
||||
return error('Order not found')
|
||||
}
|
||||
|
||||
mut order := c.orders[order_id]
|
||||
order.status = new_status
|
||||
order.updated_at = time.now().str()
|
||||
c.orders[order_id] = order
|
||||
return order
|
||||
}
|
||||
|
||||
// get_merchant_products returns all products for a given merchant
|
||||
pub fn (c Commerce) get_merchant_products(merchant_id string) ![]Product {
|
||||
if merchant_id !in c.merchants {
|
||||
return error('Merchant not found')
|
||||
}
|
||||
|
||||
mut products := []Product{}
|
||||
for product in c.products.values() {
|
||||
if product.merchant_id == merchant_id {
|
||||
products << product
|
||||
}
|
||||
}
|
||||
return products
|
||||
}
|
||||
|
||||
// get_merchant_orders returns all orders for products sold by a merchant
|
||||
pub fn (c Commerce) get_merchant_orders(merchant_id string) ![]Order {
|
||||
if merchant_id !in c.merchants {
|
||||
return error('Merchant not found')
|
||||
}
|
||||
|
||||
mut orders := []Order{}
|
||||
for order in c.orders.values() {
|
||||
mut includes_merchant := false
|
||||
for item in order.items {
|
||||
product := c.products[item.product_id]
|
||||
if product.merchant_id == merchant_id {
|
||||
includes_merchant = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if includes_merchant {
|
||||
orders << order
|
||||
}
|
||||
}
|
||||
return orders
|
||||
}
|
||||
57
examples/baobab/generator/geomind_poc/specs.md
Normal file
57
examples/baobab/generator/geomind_poc/specs.md
Normal file
@@ -0,0 +1,57 @@
|
||||
|
||||
|
||||
- profile management
|
||||
- my name
|
||||
- my pub key
|
||||
- kyc
|
||||
- ...
|
||||
- product has components
|
||||
- admin items
|
||||
- supported_currencies
|
||||
- countries
|
||||
- continents
|
||||
- farming
|
||||
- farms
|
||||
- default farm exists, users don't have to chose
|
||||
- name
|
||||
- description
|
||||
- owner (pubkey)
|
||||
- nodes
|
||||
- reward (nr of INCA per month and time e.g. 24 months)
|
||||
- reward_promised
|
||||
- reward_given
|
||||
- location
|
||||
- coordinates
|
||||
- continent
|
||||
- country
|
||||
- description
|
||||
- farmid
|
||||
- capacity (disks, mem, ...)
|
||||
- gridversion (eg. 3.16)
|
||||
- nodestats
|
||||
- ...
|
||||
- uptime
|
||||
- bandwidth
|
||||
- referral system
|
||||
- coupons for discounts (one product can have multiple coupons and discounts)
|
||||
- data gets imported with heroscript for what we sell
|
||||
- minimal wallet function (BTC, CHF, MGLD, TFT, INCA)
|
||||
- transactions, so they can see what they spend money on
|
||||
- transfer/exchange
|
||||
- basic communication (messages in/out)
|
||||
- to allow us to communicate with user
|
||||
- news
|
||||
- basic news feed with topics, which we can set
|
||||
- vdc
|
||||
- name
|
||||
- description (optional)
|
||||
- spendinglimit
|
||||
- currency per month, week or day e.g. 0.1 BTC/month
|
||||
- each spending limit has name
|
||||
- admins, list of pubkeys who have access to this and can add capacity to it, or delete, ...
|
||||
- deployment
|
||||
- deploymentid
|
||||
- vdcid
|
||||
- heroscript
|
||||
- status
|
||||
- links (name, link, description, category)
|
||||
47
examples/baobab/generator/geomind_poc/test_commerce.vsh
Normal file
47
examples/baobab/generator/geomind_poc/test_commerce.vsh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env -S v
|
||||
|
||||
import freeflowuniverse.crystallib.core.playbook
|
||||
import geomind_poc
|
||||
|
||||
fn main() {
|
||||
test_script := "
|
||||
!!commerce.merchant
|
||||
name: 'Tech Gadgets Store'
|
||||
description: 'Premium electronics and gadgets retailer'
|
||||
contact: 'contact@techgadgets.com'
|
||||
|
||||
!!commerce.component
|
||||
name: '4K Display Panel'
|
||||
description: '55-inch 4K UHD Display Panel'
|
||||
specs:
|
||||
resolution: '3840x2160'
|
||||
refreshRate: '120Hz'
|
||||
panel_type: 'OLED'
|
||||
price: 599.99
|
||||
currency: 'USD'
|
||||
|
||||
!!commerce.template
|
||||
name: 'Smart TV 55-inch'
|
||||
description: '55-inch Smart TV with 4K Display'
|
||||
components: '123e4567-e89b-12d3-a456-426614174001'
|
||||
merchant_id: '123e4567-e89b-12d3-a456-426614174000'
|
||||
category: 'Electronics'
|
||||
|
||||
!!commerce.product
|
||||
template_id: '123e4567-e89b-12d3-a456-426614174002'
|
||||
merchant_id: '123e4567-e89b-12d3-a456-426614174000'
|
||||
stock_quantity: 50
|
||||
|
||||
!!commerce.order
|
||||
customer_id: '123e4567-e89b-12d3-a456-426614174005'
|
||||
items:
|
||||
- '123e4567-e89b-12d3-a456-426614174003:2:899.99:USD'
|
||||
|
||||
!!commerce.update_order
|
||||
order_id: '123e4567-e89b-12d3-a456-426614174004'
|
||||
status: 'shipped'
|
||||
"
|
||||
|
||||
mut plbook := playbook.new(text: test_script)!
|
||||
geomind_poc.play_commerce(mut plbook)!
|
||||
}
|
||||
25
examples/baobab/generator/mcc_example.vsh
Executable file
25
examples/baobab/generator/mcc_example.vsh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openapi
|
||||
import os
|
||||
|
||||
const example_dir = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc',
|
||||
'baobab')
|
||||
const openapi_spec_path = os.join_path('${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/circles/mcc',
|
||||
'openapi.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openapi_spec := openapi.new(path: openapi_spec_path)!
|
||||
actor_spec := specification.from_openapi(openapi_spec)!
|
||||
|
||||
actor_module := generator.generate_actor_module(actor_spec,
|
||||
interfaces: [.openapi, .http]
|
||||
)!
|
||||
|
||||
actor_module.write(example_dir,
|
||||
format: true
|
||||
overwrite: true
|
||||
compile: false
|
||||
)!
|
||||
4
examples/baobab/generator/openapi_e2e/.gitignore
vendored
Normal file
4
examples/baobab/generator/openapi_e2e/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
methods.v
|
||||
meeting_scheduler_actor
|
||||
generate_actor_module
|
||||
src
|
||||
27
examples/baobab/generator/openapi_e2e/generate_actor_module.vsh
Executable file
27
examples/baobab/generator/openapi_e2e/generate_actor_module.vsh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.generator
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openapi
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openapi_spec_path = os.join_path(example_dir, 'openapi.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openapi_spec := openapi.new(path: openapi_spec_path)!
|
||||
actor_spec := specification.from_openapi(openapi_spec)!
|
||||
|
||||
println(actor_spec)
|
||||
|
||||
actor_module := generator.generate_actor_module(actor_spec,
|
||||
interfaces: [.openapi, .http]
|
||||
)!
|
||||
|
||||
actor_module.write(example_dir,
|
||||
format: false
|
||||
overwrite: true
|
||||
compile: false
|
||||
)!
|
||||
|
||||
// os.execvp('bash', ['${example_dir}/meeting_scheduler_actor/scripts/run.sh'])!
|
||||
311
examples/baobab/generator/openapi_e2e/openapi.json
Normal file
311
examples/baobab/generator/openapi_e2e/openapi.json
Normal file
@@ -0,0 +1,311 @@
|
||||
{
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"title": "Meeting Scheduler",
|
||||
"version": "1.0.0",
|
||||
"description": "An API for managing meetings, availability, and scheduling."
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "http://localhost:8080/openapi/v1",
|
||||
"description": "Production server"
|
||||
},
|
||||
{
|
||||
"url": "http://localhost:8081/openapi/v1",
|
||||
"description": "Example server"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/users": {
|
||||
"get": {
|
||||
"summary": "List all users",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A list of users",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"id": "1",
|
||||
"name": "Alice",
|
||||
"email": "alice@example.com"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"name": "Bob",
|
||||
"email": "bob@example.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/users/{userId}": {
|
||||
"get": {
|
||||
"operationId": "get_user",
|
||||
"summary": "Get user by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "userId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
},
|
||||
"description": "The ID of the user",
|
||||
"example": 1
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "User details",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
},
|
||||
"example": {
|
||||
"id": "1",
|
||||
"name": "Alice",
|
||||
"email": "alice@example.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/events": {
|
||||
"post": {
|
||||
"summary": "Create an event",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Event"
|
||||
},
|
||||
"example": {
|
||||
"title": "Team Meeting",
|
||||
"description": "Weekly sync",
|
||||
"startTime": "2023-10-10T10:00:00Z",
|
||||
"endTime": "2023-10-10T11:00:00Z",
|
||||
"userId": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Event created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Event"
|
||||
},
|
||||
"example": {
|
||||
"id": "101",
|
||||
"title": "Team Meeting",
|
||||
"description": "Weekly sync",
|
||||
"startTime": "2023-10-10T10:00:00Z",
|
||||
"endTime": "2023-10-10T11:00:00Z",
|
||||
"userId": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/availability": {
|
||||
"get": {
|
||||
"summary": "Get availability for a user",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "userId",
|
||||
"in": "query",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The ID of the user",
|
||||
"example": "1"
|
||||
},
|
||||
{
|
||||
"name": "date",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "date"
|
||||
},
|
||||
"description": "The date to check availability (YYYY-MM-DD)",
|
||||
"example": "2023-10-10"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Availability details",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/TimeSlot"
|
||||
}
|
||||
},
|
||||
"example": [
|
||||
{
|
||||
"startTime": "10:00:00",
|
||||
"endTime": "11:00:00",
|
||||
"available": true
|
||||
},
|
||||
{
|
||||
"startTime": "11:00:00",
|
||||
"endTime": "12:00:00",
|
||||
"available": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/bookings": {
|
||||
"post": {
|
||||
"summary": "Book a meeting",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Booking"
|
||||
},
|
||||
"example": {
|
||||
"userId": "1",
|
||||
"eventId": "101",
|
||||
"timeSlot": {
|
||||
"startTime": "10:00:00",
|
||||
"endTime": "11:00:00",
|
||||
"available": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Booking created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Booking"
|
||||
},
|
||||
"example": {
|
||||
"id": "5001",
|
||||
"userId": "1",
|
||||
"eventId": "101",
|
||||
"timeSlot": {
|
||||
"startTime": "10:00:00",
|
||||
"endTime": "11:00:00",
|
||||
"available": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"format": "email"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Event": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"startTime": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"endTime": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"TimeSlot": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"startTime": {
|
||||
"type": "string",
|
||||
"format": "time"
|
||||
},
|
||||
"endTime": {
|
||||
"type": "string",
|
||||
"format": "time"
|
||||
},
|
||||
"available": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Booking": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"userId": {
|
||||
"type": "string"
|
||||
},
|
||||
"eventId": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeSlot": {
|
||||
"$ref": "#/components/schemas/TimeSlot"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
3
examples/baobab/specification/README.md
Normal file
3
examples/baobab/specification/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Actor Specification Examples
|
||||
|
||||
These examples show how `OpenRPC` and `OpenAPI` specifications can be translated back and forth into an `ActorSpecification`. This is an important step of actor generation as actor code is generated from actor specification.
|
||||
346
examples/baobab/specification/openapi.json
Normal file
346
examples/baobab/specification/openapi.json
Normal file
@@ -0,0 +1,346 @@
|
||||
{
|
||||
"openapi": "3.0.3",
|
||||
"info": {
|
||||
"title": "Pet Store API",
|
||||
"description": "A sample API for a pet store",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "https://api.petstore.example.com/v1",
|
||||
"description": "Production server"
|
||||
},
|
||||
{
|
||||
"url": "https://staging.petstore.example.com/v1",
|
||||
"description": "Staging server"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/pets": {
|
||||
"get": {
|
||||
"summary": "List all pets",
|
||||
"operationId": "listPets",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "limit",
|
||||
"in": "query",
|
||||
"description": "Maximum number of pets to return",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A paginated list of pets",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Pets"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request"
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"summary": "Create a new pet",
|
||||
"operationId": "createPet",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/NewPet"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Pet created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Pet"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid input"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/pets/{petId}": {
|
||||
"get": {
|
||||
"summary": "Get a pet by ID",
|
||||
"operationId": "getPet",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "petId",
|
||||
"in": "path",
|
||||
"description": "ID of the pet to retrieve",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A pet",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Pet"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Pet not found"
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete a pet by ID",
|
||||
"operationId": "deletePet",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "petId",
|
||||
"in": "path",
|
||||
"description": "ID of the pet to delete",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "Pet deleted"
|
||||
},
|
||||
"404": {
|
||||
"description": "Pet not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/orders": {
|
||||
"get": {
|
||||
"summary": "List all orders",
|
||||
"operationId": "listOrders",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A list of orders",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Order"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/orders/{orderId}": {
|
||||
"get": {
|
||||
"summary": "Get an order by ID",
|
||||
"operationId": "getOrder",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "orderId",
|
||||
"in": "path",
|
||||
"description": "ID of the order to retrieve",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "An order",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Order"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Order not found"
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"summary": "Delete an order by ID",
|
||||
"operationId": "deleteOrder",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "orderId",
|
||||
"in": "path",
|
||||
"description": "ID of the order to delete",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "Order deleted"
|
||||
},
|
||||
"404": {
|
||||
"description": "Order not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/users": {
|
||||
"post": {
|
||||
"summary": "Create a user",
|
||||
"operationId": "createUser",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/NewUser"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "User created",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Pet": {
|
||||
"type": "object",
|
||||
"required": ["id", "name"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"NewPet": {
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Pets": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Pet"
|
||||
}
|
||||
},
|
||||
"Order": {
|
||||
"type": "object",
|
||||
"required": ["id", "petId", "quantity", "shipDate"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"petId": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"quantity": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"shipDate": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["placed", "approved", "delivered"]
|
||||
},
|
||||
"complete": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"User": {
|
||||
"type": "object",
|
||||
"required": ["id", "username"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"phone": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"NewUser": {
|
||||
"type": "object",
|
||||
"required": ["username"],
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"phone": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
13
examples/baobab/specification/openapi_to_specification.vsh
Executable file
13
examples/baobab/specification/openapi_to_specification.vsh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openapi
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openapi_spec_path = os.join_path(example_dir, 'openapi.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openapi_spec := openapi.new(path: openapi_spec_path)!
|
||||
actor_specification := specification.from_openapi(openapi_spec)!
|
||||
println(actor_specification)
|
||||
132
examples/baobab/specification/openrpc.json
Normal file
132
examples/baobab/specification/openrpc.json
Normal file
@@ -0,0 +1,132 @@
|
||||
{
|
||||
"openrpc": "1.0.0",
|
||||
"info": {
|
||||
"title": "PetStore",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"methods": [
|
||||
{
|
||||
"name": "GetPets",
|
||||
"description": "finds pets in the system that the user has access to by tags and within a limit",
|
||||
"params": [
|
||||
{
|
||||
"name": "tags",
|
||||
"description": "tags to filter by",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "limit",
|
||||
"description": "maximum number of results to return",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet_list",
|
||||
"description": "all pets from the system, that mathes the tags",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CreatePet",
|
||||
"description": "creates a new pet in the store. Duplicates are allowed.",
|
||||
"params": [
|
||||
{
|
||||
"name": "new_pet",
|
||||
"description": "Pet to add to the store.",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/NewPet"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "the newly created pet",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "GetPetById",
|
||||
"description": "gets a pet based on a single ID, if the user has access to the pet",
|
||||
"params": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of pet to fetch",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "pet response",
|
||||
"schema": {
|
||||
"$ref": "#\/components\/schemas\/Pet"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "DeletePetById",
|
||||
"description": "deletes a single pet based on the ID supplied",
|
||||
"params": [
|
||||
{
|
||||
"name": "id",
|
||||
"description": "ID of pet to delete",
|
||||
"schema": {
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
],
|
||||
"result": {
|
||||
"name": "pet",
|
||||
"description": "pet deleted",
|
||||
"schema": {
|
||||
"type": "null"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"components": {
|
||||
"schemas": {
|
||||
"NewPet": {
|
||||
"title": "NewPet",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Pet": {
|
||||
"title": "Pet",
|
||||
"description": "a pet struct that represents a pet",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "name of the pet",
|
||||
"type": "string"
|
||||
},
|
||||
"tag": {
|
||||
"description": "a tag of the pet, helps finding pet",
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"description": "unique indentifier",
|
||||
"type": "integer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
13
examples/baobab/specification/openrpc_to_specification.vsh
Executable file
13
examples/baobab/specification/openrpc_to_specification.vsh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const example_dir = os.dir(@FILE)
|
||||
const openrpc_spec_path = os.join_path(example_dir, 'openrpc.json')
|
||||
|
||||
// the actor specification obtained from the OpenRPC Specification
|
||||
openrpc_spec := openrpc.new(path: openrpc_spec_path)!
|
||||
actor_specification := specification.from_openrpc(openrpc_spec)!
|
||||
println(actor_specification)
|
||||
107
examples/baobab/specification/specification_to_openapi.vsh
Executable file
107
examples/baobab/specification/specification_to_openapi.vsh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
||||
|
||||
import json
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.schemas.jsonschema
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const actor_specification = specification.ActorSpecification{
|
||||
name: 'PetStore'
|
||||
interfaces: [.openrpc]
|
||||
methods: [
|
||||
specification.ActorMethod{
|
||||
name: 'GetPets'
|
||||
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'tags'
|
||||
description: 'tags to filter by'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'array'
|
||||
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'string'
|
||||
}))
|
||||
})
|
||||
},
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'limit'
|
||||
description: 'maximum number of results to return'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet_list'
|
||||
description: 'all pets from the system, that matches the tags'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'CreatePet'
|
||||
description: 'creates a new pet in the store. Duplicates are allowed.'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'new_pet'
|
||||
description: 'Pet to add to the store.'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/NewPet'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'the newly created pet'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'GetPetById'
|
||||
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'id'
|
||||
description: 'ID of pet to fetch'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'pet response'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'DeletePetById'
|
||||
description: 'deletes a single pet based on the ID supplied'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'id'
|
||||
description: 'ID of pet to delete'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'pet deleted'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'null'
|
||||
})
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
openapi_specification := actor_specification.to_openapi()
|
||||
println(json.encode_pretty(openapi_specification))
|
||||
109
examples/baobab/specification/specification_to_openrpc.vsh
Executable file
109
examples/baobab/specification/specification_to_openrpc.vsh
Executable file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
||||
|
||||
import json
|
||||
import freeflowuniverse.herolib.baobab.specification
|
||||
import freeflowuniverse.herolib.core.code
|
||||
import freeflowuniverse.herolib.schemas.jsonschema
|
||||
import freeflowuniverse.herolib.schemas.openrpc
|
||||
import os
|
||||
|
||||
const actor_specification = specification.ActorSpecification{
|
||||
name: 'PetStore'
|
||||
structure: code.Struct{}
|
||||
interfaces: [.openrpc]
|
||||
methods: [
|
||||
specification.ActorMethod{
|
||||
name: 'GetPets'
|
||||
description: 'finds pets in the system that the user has access to by tags and within a limit'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'tags'
|
||||
description: 'tags to filter by'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'array'
|
||||
items: jsonschema.Items(jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'string'
|
||||
}))
|
||||
})
|
||||
},
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'limit'
|
||||
description: 'maximum number of results to return'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet_list'
|
||||
description: 'all pets from the system, that matches the tags'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'CreatePet'
|
||||
description: 'creates a new pet in the store. Duplicates are allowed.'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'new_pet'
|
||||
description: 'Pet to add to the store.'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/NewPet'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'the newly created pet'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'GetPetById'
|
||||
description: 'gets a pet based on a single ID, if the user has access to the pet'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'id'
|
||||
description: 'ID of pet to fetch'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'pet response'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Reference{
|
||||
ref: '#/components/schemas/Pet'
|
||||
})
|
||||
}
|
||||
},
|
||||
specification.ActorMethod{
|
||||
name: 'DeletePetById'
|
||||
description: 'deletes a single pet based on the ID supplied'
|
||||
parameters: [
|
||||
openrpc.ContentDescriptor{
|
||||
name: 'id'
|
||||
description: 'ID of pet to delete'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'integer'
|
||||
})
|
||||
},
|
||||
]
|
||||
result: openrpc.ContentDescriptor{
|
||||
name: 'pet'
|
||||
description: 'pet deleted'
|
||||
schema: jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'null'
|
||||
})
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
openrpc_specification := actor_specification.to_openrpc()
|
||||
println(json.encode_pretty(openrpc_specification))
|
||||
11
examples/biztools/_archive/investor_tool.vsh
Executable file
11
examples/biztools/_archive/investor_tool.vsh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.biz.investortool
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import os
|
||||
|
||||
mut plbook := playbook.new(
|
||||
path: '${os.home_dir()}/code/git.ourworld.tf/ourworld_holding/investorstool/output'
|
||||
)!
|
||||
mut it := investortool.play(mut plbook)!
|
||||
it.check()!
|
||||
48
examples/biztools/_archive/tf9_biz.vsh
Executable file
48
examples/biztools/_archive/tf9_biz.vsh
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env -S v -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
// #!/usr/bin/env -S v -cg -enable-globals run
|
||||
import freeflowuniverse.herolib.data.doctree
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.biz.bizmodel
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
import freeflowuniverse.herolib.web.mdbook
|
||||
import freeflowuniverse.herolib.biz.spreadsheet
|
||||
import os
|
||||
|
||||
const name = 'tf9_budget'
|
||||
|
||||
const wikipath = '${os.home_dir()}/code/git.ourworld.tf/ourworld_holding/info_ourworld/collections/${name}'
|
||||
const summarypath = '${wikipath}/summary.md'
|
||||
|
||||
// mut sh := spreadsheet.sheet_new(name: 'test2') or { panic(err) }
|
||||
// println(sh)
|
||||
// sh.row_new(descr: 'this is a description', name: 'something', growth: '0:100aed,55:1000eur')!
|
||||
// println(sh)
|
||||
// println(sh.wiki()!)
|
||||
|
||||
// exit(0)
|
||||
|
||||
// execute the actions so we have the info populated
|
||||
// mut plb:=playbook.new(path: wikipath)!
|
||||
// playcmds.run(mut plb,false)!
|
||||
|
||||
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
||||
|
||||
// just run the doctree & mdbook and it should
|
||||
// load the doctree, these are all collections
|
||||
mut tree := doctree.new(name: name)!
|
||||
tree.scan(path: wikipath)!
|
||||
tree.export(dest: buildpath, reset: true)!
|
||||
|
||||
// mut bm:=bizmodel.get("test")!
|
||||
// println(bm)
|
||||
|
||||
mut mdbooks := mdbook.get()!
|
||||
mdbooks.generate(
|
||||
name: 'bizmodel'
|
||||
summary_path: summarypath
|
||||
doctree_path: buildpath
|
||||
title: 'bizmodel ${name}'
|
||||
)!
|
||||
mdbook.book_open('bizmodel')!
|
||||
12
examples/biztools/_archive/todo.md
Normal file
12
examples/biztools/_archive/todo.md
Normal file
@@ -0,0 +1,12 @@
|
||||
need to find where the manual is
|
||||
|
||||
- [manual](bizmodel_example/configuration.md)
|
||||
- [widgets](bizmodel_example/widgets.md)
|
||||
- [graph_bar_row](bizmodel_example/graph_bar_row.md)
|
||||
- [sheet_tables](bizmodel_example/sheet_tables.md)
|
||||
- [widget_args](bizmodel_example/widget_args.md)
|
||||
- [params](bizmodel_example/configuration.md)
|
||||
- [revenue params](bizmodel_example/revenue_params.md)
|
||||
- [funding params](bizmodel_example/funding_params.md)
|
||||
- [hr params](bizmodel_example/hr_params.md)
|
||||
- [costs params](bizmodel_example/costs_params.md)
|
||||
17
examples/biztools/bizmodel.vsh
Executable file
17
examples/biztools/bizmodel.vsh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.biz.bizmodel
|
||||
import os
|
||||
|
||||
const playbook_path = os.dir(@FILE) + '/playbook'
|
||||
const build_path = os.join_path(os.dir(@FILE), '/docusaurus')
|
||||
|
||||
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
||||
|
||||
mut model := bizmodel.generate('test', playbook_path)!
|
||||
|
||||
println(model.sheet)
|
||||
println(model.sheet.export()!)
|
||||
|
||||
model.sheet.export(path: '~/Downloads/test.csv')!
|
||||
model.sheet.export(path: '~/code/github/freeflowuniverse/starlight_template/src/content/test.csv')!
|
||||
4
examples/biztools/bizmodel_docusaurus/.gitignore
vendored
Normal file
4
examples/biztools/bizmodel_docusaurus/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
bizmodel
|
||||
dest
|
||||
wiki
|
||||
build
|
||||
1
examples/biztools/bizmodel_docusaurus/archive/img/.done
Normal file
1
examples/biztools/bizmodel_docusaurus/archive/img/.done
Normal file
@@ -0,0 +1 @@
|
||||
ms1bmodel.png
|
||||
BIN
examples/biztools/bizmodel_docusaurus/archive/img/ms1bmodel.png
Normal file
BIN
examples/biztools/bizmodel_docusaurus/archive/img/ms1bmodel.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.8 MiB |
13
examples/biztools/bizmodel_docusaurus/archive/revenue.md
Normal file
13
examples/biztools/bizmodel_docusaurus/archive/revenue.md
Normal file
@@ -0,0 +1,13 @@
|
||||
## Revenue
|
||||
|
||||
Overview of achieved revenue.
|
||||
|
||||
Unit is in Million USD.
|
||||
|
||||
!!bizmodel.sheet_wiki title:'REVENUE' includefilter:rev sheetname:'bizmodel_test'
|
||||
|
||||
!!bizmodel.graph_bar_row rowname:revenue_total unit:million sheetname:'bizmodel_test'
|
||||
|
||||
!!bizmodel.graph_line_row rowname:revenue_total unit:million sheetname:'bizmodel_test'
|
||||
|
||||
!!bizmodel.graph_pie_row rowname:revenue_total unit:million size:'80%' sheetname:'bizmodel_test'
|
||||
13
examples/biztools/bizmodel_docusaurus/archive/summary.md
Normal file
13
examples/biztools/bizmodel_docusaurus/archive/summary.md
Normal file
@@ -0,0 +1,13 @@
|
||||
- [bizmodel](bizmodel_example/bizmodel.md)
|
||||
- [Revenue](bizmodel_example/revenue.md)
|
||||
- [Result](bizmodel_example/overview.md)
|
||||
- [parameters](bizmodel_example/params.md)
|
||||
- [revenue_params](bizmodel_example/params/revenue_params.md)
|
||||
- [funding_params](bizmodel_example/params/funding_params.md)
|
||||
- [hr_params](bizmodel_example/params/hr_params.md)
|
||||
- [costs_params](bizmodel_example/params/costs_params.md)
|
||||
- [rows overview](bizmodel_example/rows_overview.md)
|
||||
- [employees](bizmodel_example/employees.md)
|
||||
- [debug](bizmodel_example/debug.md)
|
||||
- [worksheet](bizmodel_example/worksheet.md)
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
# Overview of the rows in the biz model sheet
|
||||
|
||||
|
||||
!!bizmodel.sheet_wiki sheetname:'bizmodel_test'
|
||||
34
examples/biztools/bizmodel_docusaurus/bizmodel_docusaurus.vsh
Executable file
34
examples/biztools/bizmodel_docusaurus/bizmodel_docusaurus.vsh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
//#!/usr/bin/env -S v -cg -enable-globals run
|
||||
import freeflowuniverse.herolib.biz.bizmodel
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
import os
|
||||
|
||||
// TODO: need to fix wrong location
|
||||
const playbook_path = os.dir(@FILE) + '/playbook'
|
||||
const build_path = os.join_path(os.dir(@FILE), '/docusaurus')
|
||||
|
||||
buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
||||
|
||||
mut model := bizmodel.getset('example')!
|
||||
model.workdir = build_path
|
||||
model.play(mut playbook.new(path: playbook_path)!)!
|
||||
|
||||
println(model.sheet)
|
||||
println(model.sheet.export()!)
|
||||
|
||||
// model.sheet.export(path:"~/Downloads/test.csv")!
|
||||
// model.sheet.export(path:"~/code/github/freeflowuniverse/starlight_template/src/content/test.csv")!
|
||||
|
||||
report := model.new_report(
|
||||
name: 'example_report'
|
||||
title: 'Example Business Model'
|
||||
)!
|
||||
|
||||
report.export(
|
||||
path: build_path
|
||||
overwrite: true
|
||||
format: .docusaurus
|
||||
)!
|
||||
@@ -0,0 +1 @@
|
||||
output dir of example
|
||||
22
examples/biztools/bizmodel_docusaurus/docusaurus/build.sh
Executable file
22
examples/biztools/bizmodel_docusaurus/docusaurus/build.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "${script_dir}"
|
||||
|
||||
echo "Docs directory: $script_dir"
|
||||
|
||||
cd "${HOME}/hero/var/docusaurus"
|
||||
|
||||
export PATH=/tmp/docusaurus_build/node_modules/.bin:${HOME}/.bun/bin/:$PATH
|
||||
|
||||
rm -rf /Users/despiegk/hero/var/docusaurus/build/
|
||||
|
||||
. ${HOME}/.zprofile
|
||||
|
||||
bun docusaurus build
|
||||
|
||||
mkdir -p /Users/despiegk/code/github/freeflowuniverse/herolib/examples/biztools/bizmodel/example/docusaurus
|
||||
echo SYNC TO /Users/despiegk/code/github/freeflowuniverse/herolib/examples/biztools/bizmodel/example/docusaurus
|
||||
rsync -rv --delete /Users/despiegk/hero/var/docusaurus/build/ /Users/despiegk/code/github/freeflowuniverse/herolib/examples/biztools/bizmodel/example/docusaurus/
|
||||
@@ -0,0 +1 @@
|
||||
{"style":"dark","links":[]}
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"","title":"Docusaurus","tagline":"","favicon":"img/favicon.png","url":"http://localhost","url_home":"docs/introduction","baseUrl":"/","image":"img/tf_graph.png","metadata":{"description":"Docusaurus","image":"Docusaurus","title":"Docusaurus"},"buildDest":[],"buildDestDev":[]}
|
||||
@@ -0,0 +1 @@
|
||||
{"title":"Business Model","items":[{"href":"https://threefold.info/kristof/","label":"ThreeFold Technology","position":"right"},{"href":"https://threefold.io","label":"Operational Plan","position":"left"}]}
|
||||
16
examples/biztools/bizmodel_docusaurus/docusaurus/develop.sh
Executable file
16
examples/biztools/bizmodel_docusaurus/docusaurus/develop.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "${script_dir}"
|
||||
|
||||
echo "Docs directory: $script_dir"
|
||||
|
||||
cd "${HOME}/hero/var/docusaurus"
|
||||
|
||||
export PATH=/tmp/docusaurus_build/node_modules/.bin:${HOME}/.bun/bin/:$PATH
|
||||
|
||||
. ${HOME}/.zprofile
|
||||
|
||||
bun run start -p 3100
|
||||
10
examples/biztools/bizmodel_docusaurus/load.md
Normal file
10
examples/biztools/bizmodel_docusaurus/load.md
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
## Loader instructions
|
||||
|
||||
this will make sure we load the appropriate biz model
|
||||
|
||||
|
||||
```js
|
||||
!!bizmodel.load name:'default' url:'https://github.com/freeflowuniverse/herolib/tree/development/bizmodel/example/data'
|
||||
```
|
||||
|
||||
1
examples/biztools/playbook/.collection
Normal file
1
examples/biztools/playbook/.collection
Normal file
@@ -0,0 +1 @@
|
||||
name:bizmodel_example
|
||||
10
examples/biztools/playbook/bizmodel.md
Normal file
10
examples/biztools/playbook/bizmodel.md
Normal file
@@ -0,0 +1,10 @@
|
||||

|
||||
|
||||
# bizmodel
|
||||
|
||||
OurWorld has developed a tool to generate and keep business models up to date.
|
||||
|
||||
Our aim is to make it easy for ourworld to track changes in planning over the multiple projects and even be able to aggregated them. Because the input for such a plan is text (as you can see in this ebook) its easy to see how the modelling and parameters change over time.
|
||||
|
||||
This is a very flexible tool which will be extended for budgetting, cashflow management, shareholder tables, ...
|
||||
|
||||
31
examples/biztools/playbook/cost_centers.md
Normal file
31
examples/biztools/playbook/cost_centers.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# HR Params
|
||||
|
||||
## Engineering
|
||||
|
||||
Costs can be grouped in cost centers which can then be used to futher process e.g. transcactions between companies.
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.costcenter_define bizname:'test'
|
||||
name:'tfdmcc'
|
||||
descr:'TFDMCC executes on near source agreement for TFTech'
|
||||
min_month:'10000USD'
|
||||
max_month:'100000USD'
|
||||
end_date:'1/1/2026' //when does agreement stop
|
||||
|
||||
!!bizmodel.costcenter_define bizname:'test'
|
||||
name:'cs_tftech'
|
||||
descr:'Nearsource agreement for TFTech towards Codescalers'
|
||||
min_month:'10000USD'
|
||||
max_month:'100000USD'
|
||||
end_date:'1/1/2026'
|
||||
|
||||
!!bizmodel.costcenter_define bizname:'test'
|
||||
name:'cs_tfcloud'
|
||||
descr:'Nearsource agreement for TFCloud towards Codescalers'
|
||||
min_month:'10000USD'
|
||||
max_month:'100000USD'
|
||||
end_date:'1/1/2026'
|
||||
|
||||
|
||||
```
|
||||
39
examples/biztools/playbook/costs_params.md
Normal file
39
examples/biztools/playbook/costs_params.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generic Overhead Costs
|
||||
|
||||
possible parameters
|
||||
|
||||
- name
|
||||
- descr: description of the cost
|
||||
- cost: is 'month:amount,month:amount, ...', no extrapolation
|
||||
- cost_growth: is 'month:amount,month:amount, ..., or just a nr', will extrapolate
|
||||
- type: travel, admin, legal, varia, office
|
||||
- cost_percent_revenue e.g. 4%, will make sure the cost will be at least 4% of revenue
|
||||
- indexation, e.g. 2%
|
||||
|
||||
Other financial flows can be mentioned here as well.
|
||||
|
||||
|
||||
```js
|
||||
!!bizmodel.cost_define bizname:'test'
|
||||
name:'rental'
|
||||
descr:'Office Rental in BE.'
|
||||
cost:'5000'
|
||||
indexation:'2%'
|
||||
type:'office'
|
||||
|
||||
!!bizmodel.cost_define bizname:'test'
|
||||
name:'oneoff'
|
||||
descr:'Event in Z.'
|
||||
cost_one:'3:50000'
|
||||
type:'event'
|
||||
|
||||
!!bizmodel.cost_define bizname:'test'
|
||||
name:'cloud'
|
||||
descr:'Datacenter and Cloud Costs'
|
||||
cost:'2000eur'
|
||||
cost_percent_revenue:'2%'
|
||||
type:'cloud'
|
||||
|
||||
|
||||
```
|
||||
|
||||
4
examples/biztools/playbook/debug.md
Normal file
4
examples/biztools/playbook/debug.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# Debug
|
||||
|
||||
Some tools and info to help debug the bizmodel simulator.
|
||||
|
||||
20
examples/biztools/playbook/department_params.md
Normal file
20
examples/biztools/playbook/department_params.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Department Params
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.department_define bizname:'test'
|
||||
name:'ops'
|
||||
title:'Operations'
|
||||
order:5
|
||||
|
||||
!!bizmodel.department_define bizname:'test'
|
||||
name:'coordination'
|
||||
title:'Coordination'
|
||||
order:1
|
||||
|
||||
!!bizmodel.department_define bizname:'test'
|
||||
name:'engineering'
|
||||
title:'Engineering'
|
||||
order:4
|
||||
|
||||
```
|
||||
29
examples/biztools/playbook/funding_params.md
Normal file
29
examples/biztools/playbook/funding_params.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Funding Params
|
||||
|
||||
possible parameters
|
||||
|
||||
- name, e.g. for a specific person
|
||||
- descr: description of the funding
|
||||
- investment is month:amount,month:amount, ...
|
||||
- type: loan or capital
|
||||
|
||||
Other financial flows can be mentioned here as well.
|
||||
|
||||
|
||||
```js
|
||||
!!bizmodel.funding_define bizname:'test'
|
||||
name:'our_investor'
|
||||
descr:'A fantastic super investor.'
|
||||
investment:'3:1000000EUR'
|
||||
type:'capital'
|
||||
|
||||
!!bizmodel.funding_define bizname:'test'
|
||||
name:'a_founder'
|
||||
descr:'Together Are Strong'
|
||||
investment:'2000000'
|
||||
type:'loan'
|
||||
|
||||
|
||||
|
||||
```
|
||||
|
||||
73
examples/biztools/playbook/hr_params.md
Normal file
73
examples/biztools/playbook/hr_params.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# HR Params
|
||||
|
||||
## Engineering
|
||||
|
||||
possible parameters
|
||||
|
||||
- descr, description of the function (e.g. master architect)
|
||||
- cost, any currency eg. 1000usd
|
||||
- in case cost changes over time e.g. 1:10000USD,20:20000USD,60:30000USD
|
||||
- indexation, e.g. 2%
|
||||
- department
|
||||
- name, e.g. for a specific person
|
||||
- nrpeople: how many people per month, growth over time notation e.g. 1:10,60:20 means 10 in month 1 growing to 20 month 60
|
||||
- cost_percent_revenue e.g. 4%, will make sure the cost will be at least 4% of revenue
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
sid:2
|
||||
descr:'Senior Engineer'
|
||||
cost:'1:12000,12:14000' //cost is always per person
|
||||
department:'engineering'
|
||||
nrpeople:'0:5,20:5'
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
name:'despiegk'
|
||||
title: 'CTO and crazy inventor.'
|
||||
sid:3
|
||||
descr:'CTO'
|
||||
cost:'12000EUR' //the salary is the cost independent of the fulltime status
|
||||
indexation:'10%'
|
||||
department:'coordination'
|
||||
page:'cto.md'
|
||||
fulltime: "50%" //100% means yes
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
descr:'Senior Architect'
|
||||
cost:'10000USD' indexation:'5%'
|
||||
department:'engineering'
|
||||
nrpeople:'0:5,20:10'
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
descr:'Junior Engineer'
|
||||
cost:'4000USD' indexation:'5%'
|
||||
department:'engineering'
|
||||
nrpeople:'0:5,20:10'
|
||||
|
||||
```
|
||||
|
||||
|
||||
## Operations
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
descr:'Ops Manager'
|
||||
cost:'1:8000,12:14000'
|
||||
department:'ops'
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
descr:'Support Junior'
|
||||
cost:'2000EUR' indexation:'5%'
|
||||
department:'ops'
|
||||
nrpeople:'7:5,18:10'
|
||||
cost_percent_revenue:'1%'
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
descr:'Support Senior'
|
||||
cost:'5000EUR' indexation:'5%'
|
||||
department:'ops'
|
||||
nrpeople:'3:5,20:10'
|
||||
cost_percent_revenue:'1%'
|
||||
costcenter:'tfdmcc:25,cs_tfcloud:75'
|
||||
generate_page:'../employees/support_senior.md'
|
||||
```
|
||||
14
examples/biztools/playbook/params.md
Normal file
14
examples/biztools/playbook/params.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Bizmodel Params
|
||||
|
||||
In this section we can find all the parameters for the bizmodel.
|
||||
|
||||
## how to use and read
|
||||
|
||||
The params are defined in the different instruction files e.g. revenue_params.md
|
||||
|
||||
Often you will see something like `revenue_growth:'10:1000,20:1100'` this can be read as month 10 it 1000, month 20 its 1100.
|
||||
|
||||
The software will extrapolate.
|
||||
|
||||
|
||||
|
||||
85
examples/biztools/playbook/revenue_params.md
Normal file
85
examples/biztools/playbook/revenue_params.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# HR Params
|
||||
|
||||
## Revenue Items (non recurring)
|
||||
|
||||
This company is a cloud company ...
|
||||
|
||||
- name, e.g. for a specific project
|
||||
- descr, description of the revenue line item
|
||||
- revenue_items: does one of revenue, is not exterpolated
|
||||
- revenue_growth: is a revenue stream which is being extrapolated
|
||||
- revenue_setup, revenue for 1 item '1000usd'
|
||||
- revenue_setup_delay
|
||||
- revenue_monthly, revenue per month for 1 item
|
||||
- revenue_monthly_delay, how many months before monthly revenue starts
|
||||
- maintenance_month_perc, how much percent of revenue_setup will come back over months
|
||||
- cogs_setup, cost of good for 1 item at setup
|
||||
- cogs_setup_delay, how many months before setup cogs starts, after sales
|
||||
- cogs_setup_perc: what is percentage of the cogs (can change over time) for setup e.g. 0:50%
|
||||
|
||||
- cogs_monthly, cost of goods for the monthly per 1 item
|
||||
- cogs_monthly_delay, how many months before monthly cogs starts, after sales
|
||||
- cogs_monthly_perc: what is percentage of the cogs (can change over time) for monthly e.g. 0:5%,12:10%
|
||||
|
||||
- nr_sold: how many do we sell per month (is in growth format e.g. 10:100,20:200, default is 1)
|
||||
- nr_months_recurring: how many months is recurring, if 0 then no recurring
|
||||
|
||||
```js
|
||||
!!bizmodel.revenue_define bizname:'test'
|
||||
descr:'OEM Deals'
|
||||
revenue_items:'10:1000000EUR,15:3333,20:1200000'
|
||||
cogs_setup_perc: '1:5%,20:10%'
|
||||
|
||||
!!bizmodel.revenue_define bizname:'test'
|
||||
descr:'License Deals'
|
||||
revenue_growth:'10:1000,20:1100'
|
||||
cogs_perc: '10%'
|
||||
rev_delay_month: 1
|
||||
|
||||
!!bizmodel.revenue_define bizname:'test'
|
||||
descr:'3NODE License Sales 1 Time'
|
||||
//means revenue is 100 month 1, 200 month 60
|
||||
revenue_item:'1:100,60:200'
|
||||
revenue_nr:'10:1000,24:2000,60:40000'
|
||||
cogs_perc: '10%'
|
||||
rev_delay_month: 1
|
||||
|
||||
```
|
||||
|
||||
## Revenue Items Recurring
|
||||
|
||||
possible parameters
|
||||
|
||||
- name, e.g. for a specific project
|
||||
- descr, description of the revenue line item
|
||||
- revenue_setup, revenue for 1 item '1000usd'
|
||||
- revenue_monthly, revenue per month for 1 item
|
||||
- revenue_setup_delay, how many months before revenue comes in after sales
|
||||
- revenue_monthly_delay, how many months before monthly revenue starts
|
||||
- cogs_setup, cost of good for 1 item at setup
|
||||
- cogs_setup_perc: what is percentage of the cogs (can change over time) for setup e.g. 0:50%
|
||||
- cogs_monthly, cost of goods for the monthly per 1 item
|
||||
- cogs_monthly_perc: what is percentage of the cogs (can change over time) for monthly e.g. 0:5%,12:10%
|
||||
- nr_sold: how many do we sell per month (is in growth format e.g. 10:100,20:200)
|
||||
- nr_months: how many months is recurring
|
||||
|
||||
if currency not specified then is always in USD
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.revenue_recurring_define bizname:'test'
|
||||
name: '3node_lic'
|
||||
descr:'3NODE License Sales Recurring Basic'
|
||||
revenue_setup:'1:100,60:50'
|
||||
// revenue_setup:'5'
|
||||
revenue_monthly_delay:3
|
||||
revenue_monthly:'1:1,60:1'
|
||||
// cogs_setup:'1:0'
|
||||
cogs_setup_perc:'50%'
|
||||
revenue_setup_delay:1
|
||||
cogs_monthly_perc:'50%'
|
||||
nr_sold:'10:1000,24:2000,60:40000'
|
||||
60 is the default
|
||||
nr_months:60
|
||||
```
|
||||
|
||||
1
examples/clients/groq/.env.example
Normal file
1
examples/clients/groq/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
export GROQ_API_KEY="your-groq-api-key-here"
|
||||
64
examples/clients/groq/README.md
Normal file
64
examples/clients/groq/README.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# Groq AI Client Example
|
||||
|
||||
This example demonstrates how to use Groq's AI API with the herolib OpenAI client. Groq provides API compatibility with OpenAI's client libraries, allowing you to leverage Groq's fast inference speeds with minimal changes to your existing code.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- V programming language installed
|
||||
- A Groq API key (get one from [Groq's website](https://console.groq.com/keys))
|
||||
|
||||
## Setup
|
||||
|
||||
1. Copy the `.env.example` file to `.env`:
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
2. Edit the `.env` file and replace `your-groq-api-key-here` with your actual Groq API key.
|
||||
|
||||
3. Load the environment variables:
|
||||
|
||||
```bash
|
||||
source .env
|
||||
```
|
||||
|
||||
## Running the Example
|
||||
|
||||
Execute the script with:
|
||||
|
||||
```bash
|
||||
v run groq_client.vsh
|
||||
```
|
||||
|
||||
Or make it executable first:
|
||||
|
||||
```bash
|
||||
chmod +x groq_client.vsh
|
||||
./groq_client.vsh
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
The example uses the existing OpenAI client from herolib but configures it to use Groq's API endpoint:
|
||||
|
||||
1. It retrieves the Groq API key from the environment variables
|
||||
2. Configures the OpenAI client with the Groq API key
|
||||
3. Overrides the default OpenAI URL with Groq's API URL (`https://api.groq.com/openai/v1`)
|
||||
4. Sends a chat completion request to Groq's API
|
||||
5. Displays the response
|
||||
|
||||
## Supported Models
|
||||
|
||||
Groq supports various models including:
|
||||
|
||||
- llama2-70b-4096
|
||||
- mixtral-8x7b-32768
|
||||
- gemma-7b-it
|
||||
|
||||
For a complete and up-to-date list of supported models, refer to the [Groq API documentation](https://console.groq.com/docs/models).
|
||||
|
||||
## Notes
|
||||
|
||||
- The example uses the `gpt_3_5_turbo` enum from the OpenAI client, but Groq will automatically map this to an appropriate model on their end.
|
||||
- For production use, you may want to explicitly specify one of Groq's supported models.
|
||||
46
examples/clients/groq/groq_client.vsh
Executable file
46
examples/clients/groq/groq_client.vsh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
module main
|
||||
|
||||
import freeflowuniverse.herolib.clients.openai
|
||||
import os
|
||||
|
||||
fn main() {
|
||||
// Get API key from environment variable
|
||||
key := os.getenv('GROQ_API_KEY')
|
||||
if key == '' {
|
||||
println('Error: GROQ_API_KEY environment variable not set')
|
||||
println('Please set it by running: source .env')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Get the configured client
|
||||
mut client := openai.OpenAI{
|
||||
name: 'groq'
|
||||
api_key: key
|
||||
server_url: 'https://api.groq.com/openai/v1'
|
||||
}
|
||||
|
||||
// Define the model and message for chat completion
|
||||
// Note: Use a model that Groq supports, like llama2-70b-4096 or mixtral-8x7b-32768
|
||||
model := 'qwen-2.5-coder-32b'
|
||||
|
||||
// Create a chat completion request
|
||||
res := client.chat_completion(model, openai.Messages{
|
||||
messages: [
|
||||
openai.Message{
|
||||
role: .user
|
||||
content: 'What are the key differences between Groq and other AI inference providers?'
|
||||
},
|
||||
]
|
||||
})!
|
||||
|
||||
// Print the response
|
||||
println('\nGroq AI Response:')
|
||||
println('==================')
|
||||
println(res.choices[0].message.content)
|
||||
println('\nUsage Statistics:')
|
||||
println('Prompt tokens: ${res.usage.prompt_tokens}')
|
||||
println('Completion tokens: ${res.usage.completion_tokens}')
|
||||
println('Total tokens: ${res.usage.total_tokens}')
|
||||
}
|
||||
86
examples/clients/jina.vsh
Executable file
86
examples/clients/jina.vsh
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.clients.jina
|
||||
|
||||
mut jina_client := jina.get()!
|
||||
health := jina_client.health()!
|
||||
println('Server health: ${health}')
|
||||
|
||||
// Create embeddings
|
||||
embeddings := jina_client.create_embeddings(
|
||||
input: ['Hello', 'World']
|
||||
model: .jina_embeddings_v3
|
||||
task: 'separation'
|
||||
) or { panic('Error while creating embeddings: ${err}') }
|
||||
|
||||
println('Created embeddings: ${embeddings}')
|
||||
|
||||
// Rerank
|
||||
rerank_result := jina_client.rerank(
|
||||
model: .reranker_v2_base_multilingual
|
||||
query: 'skincare products'
|
||||
documents: ['Product A', 'Product B', 'Product C']
|
||||
top_n: 2
|
||||
) or { panic('Error while reranking: ${err}') }
|
||||
|
||||
println('Rerank result: ${rerank_result}')
|
||||
|
||||
// Train
|
||||
train_result := jina_client.train(
|
||||
model: .jina_clip_v1
|
||||
input: [
|
||||
jina.TrainingExample{
|
||||
text: 'Sample text'
|
||||
label: 'positive'
|
||||
},
|
||||
jina.TrainingExample{
|
||||
image: 'https://letsenhance.io/static/73136da51c245e80edc6ccfe44888a99/1015f/MainBefore.jpg'
|
||||
label: 'negative'
|
||||
},
|
||||
]
|
||||
) or { panic('Error while training: ${err}') }
|
||||
|
||||
println('Train result: ${train_result}')
|
||||
|
||||
// Classify
|
||||
classify_result := jina_client.classify(
|
||||
model: .jina_clip_v1
|
||||
input: [
|
||||
jina.ClassificationInput{
|
||||
text: 'A photo of a cat'
|
||||
},
|
||||
jina.ClassificationInput{
|
||||
image: 'https://letsenhance.io/static/73136da51c245e80edc6ccfe44888a99/1015f/MainBefore.jpg'
|
||||
},
|
||||
]
|
||||
labels: ['cat', 'dog']
|
||||
) or { panic('Error while classifying: ${err}') }
|
||||
|
||||
println('Classification result: ${classify_result}')
|
||||
|
||||
// List classifiers
|
||||
classifiers := jina_client.list_classifiers() or { panic('Error fetching classifiers: ${err}') }
|
||||
println('Classifiers: ${classifiers}')
|
||||
|
||||
// Delete classifier
|
||||
delete_result := jina_client.delete_classifier(classifier_id: classifiers[0].classifier_id) or {
|
||||
panic('Error deleting classifier: ${err}')
|
||||
}
|
||||
println('Delete result: ${delete_result}')
|
||||
|
||||
// Create multi vector
|
||||
multi_vector := jina_client.create_multi_vector(
|
||||
input: [
|
||||
jina.MultiVectorTextDoc{
|
||||
text: 'Hello world'
|
||||
input_type: .document
|
||||
},
|
||||
jina.MultiVectorTextDoc{
|
||||
text: "What's up?"
|
||||
input_type: .query
|
||||
},
|
||||
]
|
||||
embedding_type: ['float']
|
||||
// dimensions: 96
|
||||
)!
|
||||
println('Multi vector: ${multi_vector}')
|
||||
108
examples/clients/mycelium.vsh
Executable file
108
examples/clients/mycelium.vsh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.clients.mycelium
|
||||
import freeflowuniverse.herolib.installers.net.mycelium_installer
|
||||
import freeflowuniverse.herolib.osal
|
||||
import time
|
||||
import os
|
||||
import encoding.base64
|
||||
|
||||
const server1_port = 9001
|
||||
const server2_port = 9002
|
||||
|
||||
fn terminate(port int) ! {
|
||||
// Step 1: Run lsof to get process details
|
||||
res := os.execute('lsof -i:${port}')
|
||||
if res.exit_code != 0 {
|
||||
return error('no service running at port ${port} due to: ${res.output}')
|
||||
}
|
||||
|
||||
// Step 2: Parse the output to extract the PID
|
||||
lines := res.output.split('\n')
|
||||
if lines.len < 2 {
|
||||
return error('no process found running on port ${port}')
|
||||
}
|
||||
|
||||
// The PID is the second column in the output
|
||||
fields := lines[1].split(' ')
|
||||
if fields.len < 2 {
|
||||
return error('failed to parse lsof output')
|
||||
}
|
||||
pid := fields[1]
|
||||
|
||||
// Step 3: Kill the process using the PID
|
||||
kill_res := os.execute('kill ${pid}')
|
||||
if kill_res.exit_code != 0 {
|
||||
return error('failed to kill process ${pid}: ${kill_res.output}')
|
||||
}
|
||||
|
||||
println('Successfully terminated process ${pid} running on port ${port}')
|
||||
}
|
||||
|
||||
// Check if not installed install it.
|
||||
mut installer := mycelium_installer.get()!
|
||||
installer.install()!
|
||||
|
||||
mycelium.delete()!
|
||||
|
||||
spawn fn () {
|
||||
os.execute('mkdir -p /tmp/mycelium_server1 && cd /tmp/mycelium_server1 && mycelium --peers tcp://188.40.132.242:9651 quic://[2a01:4f8:212:fa6::2]:9651 tcp://185.69.166.7:9651 quic://[2a02:1802:5e:0:ec4:7aff:fe51:e36b]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651 tcp://[2604:a00:50:17b:9e6b:ff:fe1f:e054]:9651 quic://5.78.122.16:9651 tcp://[2a01:4ff:2f0:3621::1]:9651 quic://142.93.217.194:9651 --tun-name tun2 --tcp-listen-port 9652 --quic-listen-port 9653 --api-addr 127.0.0.1:${server1_port}')
|
||||
}()
|
||||
|
||||
spawn fn () {
|
||||
os.execute('mkdir -p /tmp/mycelium_server2 && cd /tmp/mycelium_server2 && mycelium --peers tcp://188.40.132.242:9651 quic://[2a01:4f8:212:fa6::2]:9651 tcp://185.69.166.7:9651 quic://[2a02:1802:5e:0:ec4:7aff:fe51:e36b]:9651 tcp://65.21.231.58:9651 quic://[2a01:4f9:5a:1042::2]:9651 tcp://[2604:a00:50:17b:9e6b:ff:fe1f:e054]:9651 quic://5.78.122.16:9651 tcp://[2a01:4ff:2f0:3621::1]:9651 quic://142.93.217.194:9651 --tun-name tun3 --tcp-listen-port 9654 --quic-listen-port 9655 --api-addr 127.0.0.1:${server2_port}')
|
||||
}()
|
||||
|
||||
defer {
|
||||
terminate(server1_port) or {}
|
||||
terminate(server2_port) or {}
|
||||
}
|
||||
|
||||
time.sleep(2 * time.second)
|
||||
|
||||
mut client1 := mycelium.get()!
|
||||
client1.server_url = 'http://localhost:${server1_port}'
|
||||
client1.name = 'client1'
|
||||
println(client1)
|
||||
|
||||
mut client2 := mycelium.get()!
|
||||
client2.server_url = 'http://localhost:${server2_port}'
|
||||
client2.name = 'client2'
|
||||
println(client2)
|
||||
|
||||
inspect1 := mycelium.inspect(key_file_path: '/tmp/mycelium_server1/priv_key.bin')!
|
||||
inspect2 := mycelium.inspect(key_file_path: '/tmp/mycelium_server2/priv_key.bin')!
|
||||
|
||||
println('Server 1 public key: ${inspect1.public_key}')
|
||||
println('Server 2 public key: ${inspect2.public_key}')
|
||||
|
||||
// Send a message to a node by public key
|
||||
// Parameters: public_key, payload, topic, wait_for_reply
|
||||
msg := client1.send_msg(
|
||||
public_key: inspect2.public_key // destination public key
|
||||
payload: 'Sending a message from the client 1 to the client 2' // message payload
|
||||
topic: 'testing' // optional topic
|
||||
)!
|
||||
|
||||
println('Sent message ID: ${msg.id}')
|
||||
println('send succeeded')
|
||||
|
||||
// Receive messages
|
||||
// Parameters: wait_for_message, peek_only, topic_filter
|
||||
received := client2.receive_msg(wait: true, peek: false, topic: 'testing')!
|
||||
println('Received message from: ${received.src_pk}')
|
||||
println('Message payload: ${base64.decode_str(received.payload)}')
|
||||
|
||||
// Reply to a message
|
||||
// client1.reply_msg(
|
||||
// id: received.id
|
||||
// public_key: received.src_pk
|
||||
// payload: 'Got your message!'
|
||||
// topic: 'greetings'
|
||||
// )!
|
||||
|
||||
// // // Check message status
|
||||
// // status := client.get_msg_status(msg.id)!
|
||||
// // println('Message status: ${status.state}')
|
||||
// // println('Created at: ${status.created}')
|
||||
// // println('Expires at: ${status.deadline}')
|
||||
85
examples/clients/qdrant_example.vsh
Executable file
85
examples/clients/qdrant_example.vsh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.clients.qdrant
|
||||
import freeflowuniverse.herolib.core.httpconnection
|
||||
import rand
|
||||
|
||||
// 1. Get the qdrant client
|
||||
mut qdrant_client := qdrant.get()!
|
||||
|
||||
// 2. Generate collection name
|
||||
|
||||
collection_name := 'collection_' + rand.string(4)
|
||||
|
||||
// 2. Create a new collection
|
||||
|
||||
created_collection := qdrant_client.create_collection(
|
||||
collection_name: collection_name
|
||||
size: 15
|
||||
distance: 'Cosine'
|
||||
)!
|
||||
|
||||
println('Created Collection: ${created_collection}')
|
||||
|
||||
// 3. Get the created collection
|
||||
get_collection := qdrant_client.get_collection(
|
||||
collection_name: collection_name
|
||||
)!
|
||||
|
||||
println('Get Collection: ${get_collection}')
|
||||
|
||||
// 4. Delete the created collection
|
||||
// deleted_collection := qdrant_client.delete_collection(
|
||||
// collection_name: collection_name
|
||||
// )!
|
||||
|
||||
// println('Deleted Collection: ${deleted_collection}')
|
||||
|
||||
// 5. List all collections
|
||||
list_collection := qdrant_client.list_collections()!
|
||||
println('List Collection: ${list_collection}')
|
||||
|
||||
// 6. Check collection existence
|
||||
collection_existence := qdrant_client.is_collection_exists(
|
||||
collection_name: collection_name
|
||||
)!
|
||||
println('Collection Existence: ${collection_existence}')
|
||||
|
||||
// 7. Retrieve points
|
||||
collection_points := qdrant_client.retrieve_points(
|
||||
collection_name: collection_name
|
||||
ids: [
|
||||
0,
|
||||
3,
|
||||
100,
|
||||
]
|
||||
)!
|
||||
|
||||
println('Collection Points: ${collection_points}')
|
||||
|
||||
// 8. Upsert points
|
||||
upsert_points := qdrant_client.upsert_points(
|
||||
collection_name: collection_name
|
||||
points: [
|
||||
qdrant.Point{
|
||||
payload: {
|
||||
'key': 'value'
|
||||
}
|
||||
vector: [1.0, 2.0, 3.0]
|
||||
},
|
||||
qdrant.Point{
|
||||
payload: {
|
||||
'key': 'value'
|
||||
}
|
||||
vector: [4.0, 5.0, 6.0]
|
||||
},
|
||||
qdrant.Point{
|
||||
payload: {
|
||||
'key': 'value'
|
||||
}
|
||||
vector: [7.0, 8.0, 9.0]
|
||||
},
|
||||
]
|
||||
)!
|
||||
|
||||
println('Upsert Points: ${upsert_points}')
|
||||
115
examples/core/agent_encoding.vsh
Executable file
115
examples/core/agent_encoding.vsh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
import freeflowuniverse.herolib.core.jobs.model
|
||||
|
||||
// Create a test agent with some sample data
|
||||
mut agent := model.Agent{
|
||||
pubkey: 'ed25519:1234567890abcdef'
|
||||
address: '192.168.1.100'
|
||||
port: 9999
|
||||
description: 'Test agent for binary encoding'
|
||||
status: model.AgentStatus{
|
||||
guid: 'agent-123'
|
||||
timestamp_first: ourtime.now()
|
||||
timestamp_last: ourtime.now()
|
||||
status: model.AgentState.ok
|
||||
}
|
||||
services: []
|
||||
signature: 'signature-data-here'
|
||||
}
|
||||
|
||||
// Add a service
|
||||
mut service := model.AgentService{
|
||||
actor: 'vm'
|
||||
description: 'Virtual machine management'
|
||||
status: model.AgentServiceState.ok
|
||||
public: true
|
||||
actions: []
|
||||
}
|
||||
|
||||
// Add an action to the service
|
||||
mut action := model.AgentServiceAction{
|
||||
action: 'create'
|
||||
description: 'Create a new virtual machine'
|
||||
status: model.AgentServiceState.ok
|
||||
public: true
|
||||
params: {
|
||||
'name': 'Name of the VM'
|
||||
'memory': 'Memory in MB'
|
||||
'cpu': 'Number of CPU cores'
|
||||
}
|
||||
params_example: {
|
||||
'name': 'my-test-vm'
|
||||
'memory': '2048'
|
||||
'cpu': '2'
|
||||
}
|
||||
}
|
||||
|
||||
service.actions << action
|
||||
agent.services << service
|
||||
|
||||
// Test binary encoding
|
||||
binary_data := agent.dumps() or {
|
||||
println('Failed to encode agent: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
println('Successfully encoded agent to binary, size: ${binary_data.len} bytes')
|
||||
|
||||
// Test binary decoding
|
||||
decoded_agent := model.loads(binary_data) or {
|
||||
println('Failed to decode agent: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Verify the decoded data matches the original
|
||||
assert decoded_agent.pubkey == agent.pubkey
|
||||
assert decoded_agent.address == agent.address
|
||||
assert decoded_agent.port == agent.port
|
||||
assert decoded_agent.description == agent.description
|
||||
assert decoded_agent.signature == agent.signature
|
||||
|
||||
// Verify status
|
||||
assert decoded_agent.status.guid == agent.status.guid
|
||||
assert decoded_agent.status.status == agent.status.status
|
||||
|
||||
// Verify services
|
||||
assert decoded_agent.services.len == agent.services.len
|
||||
if decoded_agent.services.len > 0 {
|
||||
service1 := decoded_agent.services[0]
|
||||
original_service := agent.services[0]
|
||||
|
||||
assert service1.actor == original_service.actor
|
||||
assert service1.description == original_service.description
|
||||
assert service1.status == original_service.status
|
||||
assert service1.public == original_service.public
|
||||
|
||||
// Verify actions
|
||||
assert service1.actions.len == original_service.actions.len
|
||||
if service1.actions.len > 0 {
|
||||
action1 := service1.actions[0]
|
||||
original_action := original_service.actions[0]
|
||||
|
||||
assert action1.action == original_action.action
|
||||
assert action1.description == original_action.description
|
||||
assert action1.status == original_action.status
|
||||
assert action1.public == original_action.public
|
||||
|
||||
// Verify params
|
||||
assert action1.params.len == original_action.params.len
|
||||
for key, value in original_action.params {
|
||||
assert key in action1.params
|
||||
assert action1.params[key] == value
|
||||
}
|
||||
|
||||
// Verify params_example
|
||||
assert action1.params_example.len == original_action.params_example.len
|
||||
for key, value in original_action.params_example {
|
||||
assert key in action1.params_example
|
||||
assert action1.params_example[key] == value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println('Agent binary encoding/decoding test passed successfully')
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user