Compare commits
129 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 40b3911781 | |||
| febe87c55e | |||
| e8d6193d06 | |||
| 1d98724c02 | |||
| 26731c38b5 | |||
| a484da769c | |||
| cbccce3413 | |||
|
|
27c8c06cdb | ||
|
|
35eee2dcad | ||
| cba4a6d7c1 | |||
| 1217d7b10d | |||
|
|
cfd5711c86 | ||
| 9642922445 | |||
| 9b2b7283c0 | |||
| 9658f1af8d | |||
| 13bed3d48a | |||
| e620dfc6e3 | |||
| 1171b7b6d6 | |||
| 3029bf661a | |||
|
|
26c945ed08 | ||
| 1b90ce41e6 | |||
| e0a8bc32e7 | |||
|
|
f5d9c6019b | ||
|
|
27f48de1f1 | ||
| add4fb5c48 | |||
| 17cdfd8a0d | |||
|
|
854eb9972b | ||
| c0339a0922 | |||
| 601e5db76a | |||
| b109299210 | |||
| 082dbed910 | |||
| 6e32a01faa | |||
| d73fe6eb25 | |||
| 4a0fd8edde | |||
| 34202de296 | |||
| 3988548fe9 | |||
| 3bcb6bee80 | |||
| 7e33acb425 | |||
| 75d20fd8e1 | |||
|
|
c38fdd86ac | ||
|
|
9069816db1 | ||
| 1903ebe045 | |||
| 3dee0d7eef | |||
| ba85e91c58 | |||
|
|
1ed08b3ca4 | ||
|
|
bcee46fa15 | ||
| f3449d6812 | |||
|
|
3c5e0a053e | ||
|
|
f6c077c6b5 | ||
| df5d91e7b6 | |||
| 25e01e308c | |||
| 7204aff27e | |||
| cd91734a84 | |||
| 6306883463 | |||
| 42bfecffb6 | |||
| f7d5415484 | |||
| 5825640c2c | |||
| de60c5f78e | |||
| be19609855 | |||
| 27bc172257 | |||
| 6732928156 | |||
| 3bd1117210 | |||
| 1cd8e8c299 | |||
| 97d506ecbf | |||
| 43ffedfad8 | |||
| 71298a9704 | |||
| e76f558f97 | |||
| e030309b7f | |||
| e77f923cd2 | |||
| bd86f2c4f7 | |||
|
|
2d00d6cf9f | ||
|
|
a58d72615d | ||
|
|
14771ed944 | ||
|
|
a6d4a23172 | ||
|
|
5c77c6bd8d | ||
| b0ff9e3fbf | |||
|
|
3f82240564 | ||
| 06a89aead9 | |||
| 23a723e17f | |||
| 1501a09e62 | |||
|
|
1f9b8c1e76 | ||
|
|
f7ed2ea31e | ||
| 15aeb136b2 | |||
| 50f33e9303 | |||
| f026565f77 | |||
| 0845feffac | |||
| 55f0621983 | |||
| 75363d7aeb | |||
| 1f9bc11a2e | |||
| aab018925d | |||
| 5fa361256a | |||
| 42fe7b0a0d | |||
| 011e5b039e | |||
|
|
e9bcf6ef69 | ||
| f885563982 | |||
| ffff44f347 | |||
|
|
0e1450b5db | ||
| f8734a7e9f | |||
| c05ec6be7f | |||
| 8677d177cb | |||
| dd37eeaa29 | |||
| d5753ee794 | |||
| 6b46b3dbaa | |||
| 4cd5b51085 | |||
| 0a7851b920 | |||
| a0fdaf395e | |||
| 2c5a2ace17 | |||
| 965a2bebb7 | |||
| 2c08ee8687 | |||
| e105dd73b5 | |||
| f5dfe8c0af | |||
| ac97e9e7bc | |||
|
|
beae2cef82 | ||
| 2b23771056 | |||
| 19632b4b4b | |||
| 31c033300a | |||
| ca4127319d | |||
|
|
d3d8f0d0f1 | ||
|
|
2968e4dddc | ||
|
|
30c7951058 | ||
|
|
af63e266d8 | ||
|
|
5d1e3d416e | ||
|
|
dd9dc59485 | ||
| b473630ceb | |||
| a34b8b70ba | |||
| fd195f0824 | |||
| a727d19281 | |||
| 52c88bccb5 | |||
| 85cb868bff |
16
README.md
16
README.md
@@ -14,21 +14,17 @@ Herolib is an opinionated library primarily used by ThreeFold to automate cloud
|
|||||||
The Hero tool can be installed with a single command:
|
The Hero tool can be installed with a single command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh > /tmp/install_hero.sh
|
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh | bash
|
||||||
bash /tmp/install_hero.sh
|
|
||||||
#do not forget to do the following this makes sure vtest and vrun exists
|
|
||||||
bash install_herolib.vsh
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Hero will be installed in:
|
Hero will be installed in:
|
||||||
- `/usr/local/bin` for Linux
|
- `/usr/local/bin` for Linux
|
||||||
- `~/hero/bin` for macOS
|
- `~/hero/bin` for macOS
|
||||||
|
|
||||||
After installation on macOS, you may need to:
|
After installation on macOS, you may need to do source see below or restart your terminal to ensure the `hero` command is available:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
source ~/.zprofile
|
source ~/.zprofile
|
||||||
# Or copy to system bin directory
|
|
||||||
cp ~/hero/bin/hero /usr/local/bin
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The Hero tool can be used to work with git, build documentation, interact with Hero AI, and more.
|
The Hero tool can be used to work with git, build documentation, interact with Hero AI, and more.
|
||||||
@@ -40,7 +36,13 @@ For development purposes, use the automated installation script:
|
|||||||
```bash
|
```bash
|
||||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
||||||
bash /tmp/install_v.sh --analyzer --herolib
|
bash /tmp/install_v.sh --analyzer --herolib
|
||||||
|
|
||||||
|
#do not forget to do the following this makes sure vtest and vrun exists
|
||||||
|
cd ~/code/github/freeflowuniverse/herolib
|
||||||
|
bash install_herolib.vsh
|
||||||
|
|
||||||
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Installation Options
|
#### Installation Options
|
||||||
|
|||||||
19
aiprompts/.openhands/setup.sh
Normal file
19
aiprompts/.openhands/setup.sh
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Herolib Web Server Installation Script
|
||||||
|
# This script sets up the necessary environment for the Flask web server.
|
||||||
|
|
||||||
|
set -e # Exit on any error
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Script directory
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
|
||||||
|
/workspace/herolib/install_v.sh
|
||||||
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
## INTENT
|
||||||
|
|
||||||
|
we use heroscript to communicate actions and events in a structured format.
|
||||||
|
we want you to parse user intents and generate the corresponding heroscript.
|
||||||
|
|
||||||
|
ONLY RETURN THE HEROSCRIPT STATEMENTS, can be more than 1
|
||||||
|
|
||||||
|
## HEROSCRIPT FORMAT
|
||||||
|
|
||||||
|
HeroScript is a concise scripting language with the following structure:
|
||||||
|
|
||||||
|
```heroscript
|
||||||
|
!!actor.action_name
|
||||||
|
param1: 'value1'
|
||||||
|
param2: 'value with spaces'
|
||||||
|
multiline_description: '
|
||||||
|
This is a multiline description.
|
||||||
|
It can span multiple lines.
|
||||||
|
'
|
||||||
|
arg1 arg2 // Arguments without keys
|
||||||
|
|
||||||
|
!!actor.action_name2 param1:something param2:'something with spaces' nr:3
|
||||||
|
```
|
||||||
|
|
||||||
|
Key characteristics:
|
||||||
|
|
||||||
|
- **Actions**: Start with `!!`, followed by `actor.action_name` (e.g., `!!mailclient.configure`).
|
||||||
|
- **Parameters**: Defined as `key:value`. Values can be quoted for spaces.
|
||||||
|
- **Multiline Support**: Parameters like `description` can span multiple lines.
|
||||||
|
- **Arguments**: Values without keys (e.g., `arg1`).
|
||||||
|
- params can be on 1 line, with spaces in between
|
||||||
|
- time can be as +1h, +1d, +1w (hour, day, week), ofcourse 1 can be any number, +1 means 1 hour from now
|
||||||
|
- time format is: dd/mm/yyyy hh:mm (ONLY USE THIS)
|
||||||
|
- comma separation is used a lot in arguments e.g. 'jan,kristof' or 'jan , kristof' remove spaces, is list of 2
|
||||||
|
- note only !! is at start of line, rest has spaces per instruction
|
||||||
|
- make one empty line between 1 heroscript statements
|
||||||
|
- everything after // is comment
|
||||||
|
|
||||||
|
## HEROSCRIPT SCHEMA
|
||||||
|
|
||||||
|
the language we understand
|
||||||
|
|
||||||
|
### calendar management
|
||||||
|
|
||||||
|
```heroscript
|
||||||
|
!!calendar.create when:'+1h' descr:'this is event to discuss eng' attendees:'jan,kristof' name:'meet1' tags:'eng,urgent'
|
||||||
|
!!calendar.delete name:'meet1'
|
||||||
|
!!calendar.list tags:'urgent'
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### contact management
|
||||||
|
|
||||||
|
```heroscript
|
||||||
|
!!contact.add name:'jan' email:'jan@example.com' phone:'123-456-7890'
|
||||||
|
!!contact.remove name:'jan'
|
||||||
|
!!contact.list
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### task management
|
||||||
|
|
||||||
|
```heroscript
|
||||||
|
!!task.create title:'Prepare presentation' due:'+1d' assignee:'jan' name:'task1' tags:'eng,urgent'
|
||||||
|
deadline:'+10d' duration:'1h'
|
||||||
|
!!task.update name:'task1' status:'in progress'
|
||||||
|
!!task.delete name:'task1'
|
||||||
|
!!task.list
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### project management
|
||||||
|
|
||||||
|
```heroscript
|
||||||
|
!!project.create title:'Cloud Product Development' description:'Track progress of cloud product development' name:'cloud_prod'
|
||||||
|
!!project.update name:'cloud_prod' status:'in progress'
|
||||||
|
!!project.delete name:'cloud_prod'
|
||||||
|
!!project.list
|
||||||
|
!!project.tasks_list name:'cloud_prod' //required properties are name, description, and assignee of not given ask
|
||||||
|
!!project.tasks_add names:'task1, task2'
|
||||||
|
!!project.tasks_remove names:'task1, task2'
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### SUPPORTED TAGS
|
||||||
|
|
||||||
|
only tags supported are:
|
||||||
|
|
||||||
|
- for intent: eng, prod, support, mgmt, marketing
|
||||||
|
- for urgency: urgent, high, medium, low
|
||||||
|
|
||||||
|
### generic remarks
|
||||||
|
|
||||||
|
- names are lowercase and snake_case, can be distilled out of title if only title given, often a user will say name but that means title
|
||||||
|
- time: format of returned data or time is always dd/mm/yyyy hh:min
|
||||||
|
|
||||||
|
## IMPORTANT STARTING INFO
|
||||||
|
|
||||||
|
- current time is 10/08/2025 05:10 , use this to define any time-related parameters
|
||||||
|
|
||||||
|
## USER INTENT
|
||||||
|
|
||||||
|
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||||
|
|
||||||
|
also let me know which other meetings I have which are urgent
|
||||||
|
|
||||||
|
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||||
|
|
||||||
|
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||||
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
SYSTEM
|
||||||
|
You are a HeroScript compiler. Convert user intents into valid HeroScript statements.
|
||||||
|
|
||||||
|
OUTPUT RULES
|
||||||
|
|
||||||
|
1) Return ONLY HeroScript statements. No prose, no backticks.
|
||||||
|
2) Separate each statement with exactly ONE blank line.
|
||||||
|
3) Keys use snake_case. Names are lowercase snake_case derived from titles (non-alnum → "_", collapse repeats, trim).
|
||||||
|
4) Lists are comma-separated with NO spaces (e.g., "jan,alex").
|
||||||
|
5) Times: OUTPUT MUST BE ABSOLUTE in "dd/mm/yyyy hh:mm" (Europe/Zurich). Convert relative times (e.g., "tomorrow 10am") using CURRENT_TIME.
|
||||||
|
6) Tags: include at most one intent tag and at most one urgency tag when present.
|
||||||
|
- intent: eng,prod,support,mgmt,marketing
|
||||||
|
- urgency: urgent,high,medium,low
|
||||||
|
7) Quotes: quote values containing spaces; otherwise omit quotes (allowed either way).
|
||||||
|
8) Comments only with // if the user explicitly asks for explanations; otherwise omit.
|
||||||
|
|
||||||
|
SCHEMA (exact actions & parameters)
|
||||||
|
|
||||||
|
!!calendar.create when:'dd/mm/yyyy hh:mm' name:'<name>' descr:'<text>' attendees:'a,b,c' tags:'intent,urgency'
|
||||||
|
!!calendar.delete name:'<name>'
|
||||||
|
!!calendar.list [tags:'tag1,tag2']
|
||||||
|
|
||||||
|
!!contact.add name:'<name>' email:'<email>' phone:'<phone>'
|
||||||
|
!!contact.remove name:'<name>'
|
||||||
|
!!contact.list
|
||||||
|
|
||||||
|
!!task.create title:'<title>' name:'<name>' [due:'dd/mm/yyyy hh:mm'] [assignee:'<name>'] [tags:'intent,urgency'] [deadline:'dd/mm/yyyy hh:mm'] [duration:'<Nd Nh Nm> or <Nh>']
|
||||||
|
!!task.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||||
|
!!task.delete name:'<name>'
|
||||||
|
!!task.list
|
||||||
|
|
||||||
|
!!project.create title:'<title>' description:'<text>' name:'<name>'
|
||||||
|
!!project.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||||
|
!!project.delete name:'<name>'
|
||||||
|
!!project.list
|
||||||
|
!!project.tasks_list name:'<project_name>'
|
||||||
|
!!project.tasks_add name:'<project_name>' names:'task_a,task_b'
|
||||||
|
!!project.tasks_remove name:'<project_name>' names:'task_a,task_b'
|
||||||
|
|
||||||
|
NORMALIZATION & INFERENCE (silent)
|
||||||
|
- Derive names from titles when missing (see rule 3). Ensure consistency across statements.
|
||||||
|
- Map phrases to tags when obvious (e.g., "new product" ⇒ intent: prod; "high priority" ⇒ urgency: high).
|
||||||
|
- Attendees: split on commas, trim, lowercase given names.
|
||||||
|
- If the user asks for “urgent meetings,” use tags:'urgent' specifically.
|
||||||
|
- Prefer concise descriptions pulled from the user’s phrasing.
|
||||||
|
- Name's are required, if missing ask for clarification.
|
||||||
|
- For calendar management, ensure to include all relevant details such as time, attendees, and description.
|
||||||
|
|
||||||
|
|
||||||
|
CURRENT_TIME
|
||||||
|
|
||||||
|
10/08/2025 05:10
|
||||||
|
|
||||||
|
USER_MESSAGE
|
||||||
|
|
||||||
|
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||||
|
|
||||||
|
also let me know which other meetings I have which are urgent
|
||||||
|
|
||||||
|
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||||
|
|
||||||
|
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||||
|
|
||||||
|
END
|
||||||
82
aiprompts/ai_instruct/processing/intent.md
Normal file
82
aiprompts/ai_instruct/processing/intent.md
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
## INSTRUCTIONS
|
||||||
|
|
||||||
|
the user will send me multiple instructions what they wants to do, I want you to put them in separate categories
|
||||||
|
|
||||||
|
The categories we have defined are:
|
||||||
|
|
||||||
|
- calendar management
|
||||||
|
- schedule meetings, events, reminders
|
||||||
|
- list these events
|
||||||
|
- delete them
|
||||||
|
- contact management
|
||||||
|
- add/remove contact information e.g. phone numbers, email addresses, address information
|
||||||
|
- list contacts, search
|
||||||
|
- task or project management
|
||||||
|
- anything we need to do, anything we need to track and plan
|
||||||
|
- create/update tasks, set deadlines
|
||||||
|
- mark tasks as complete
|
||||||
|
- delete tasks
|
||||||
|
- project management
|
||||||
|
- communication (chat, email)
|
||||||
|
- see what needs to be communicate e.g. send a chat to ...
|
||||||
|
- search statements
|
||||||
|
- find on internet, find specific information from my friends
|
||||||
|
|
||||||
|
I want you to detect the intent and make multiple blocks out of the intent, each block should correspond to one of the identified intents, identify the intent with name of the category eg. calendar, only use above names
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
what user wants to do, stay as close as possible to the original instructions, copy the exact instructions as where given by the user, we only need to sort the instructions in these blocks
|
||||||
|
|
||||||
|
for each instruction make a separate block, e.g. if 2 tasks are given, create 2 blocks
|
||||||
|
|
||||||
|
the format to return is: (note newline after each title of block)
|
||||||
|
|
||||||
|
```template
|
||||||
|
===CALENDAR===\n
|
||||||
|
|
||||||
|
$the copied text from what user wants
|
||||||
|
|
||||||
|
===CONTACT===\n
|
||||||
|
...
|
||||||
|
|
||||||
|
===QUESTION===\n
|
||||||
|
|
||||||
|
put here what our system needs to ask to the user anything which is not clear
|
||||||
|
|
||||||
|
===END===\n
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
I want you to execute above on instructions as given by user below, give text back ONLY supporting the template
|
||||||
|
|
||||||
|
note for format is only ===$NAME=== and then on next lines the original instructions from the user, don't change
|
||||||
|
|
||||||
|
## special processing of info
|
||||||
|
|
||||||
|
- if a date or time specified e.g. tomorrow, time, ... calculate back from current date
|
||||||
|
|
||||||
|
## IMPORTANT STARTING INFO
|
||||||
|
|
||||||
|
- current time is 10/08/2025 05:10 (format of returned data is always dd/mm/yyyy hh:min)
|
||||||
|
- use the current time to define formatted time out of instructions
|
||||||
|
- only return the formatted time
|
||||||
|
|
||||||
|
## UNCLEAR INFO
|
||||||
|
|
||||||
|
check in instructions e.g. things specified like you, me, ...
|
||||||
|
are not clear ask specifically who do you mean
|
||||||
|
|
||||||
|
if task, specify per task, who needs to do it and when, make sure each instruction (block) is complete and clear for further processing
|
||||||
|
|
||||||
|
be very specific with the questions e.g. who is you, ...
|
||||||
|
|
||||||
|
## EXECUTE ABOVE ON THE FOLLOWING
|
||||||
|
|
||||||
|
I am planning a birthday for my daughters tomorrow, there will be 10 people.
|
||||||
|
|
||||||
|
I would like to know if you can help me with the preparations.
|
||||||
|
|
||||||
|
I need a place for my daughter's birthday party.
|
||||||
|
|
||||||
|
I need to send message to my wife isabelle that she needs to pick up the cake.
|
||||||
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
File diff suppressed because it is too large
Load Diff
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
File diff suppressed because it is too large
Load Diff
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
# tus Resumable Upload Protocol (Condensed for Coding Agents)
|
||||||
|
|
||||||
|
## Core Protocol
|
||||||
|
|
||||||
|
All Clients and Servers MUST implement the core protocol for resumable uploads.
|
||||||
|
|
||||||
|
### Resuming an Upload
|
||||||
|
|
||||||
|
1. **Determine Offset (HEAD Request):**
|
||||||
|
* **Request:**
|
||||||
|
```
|
||||||
|
HEAD /files/{upload_id} HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Upload-Offset: {current_offset}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* Server MUST include `Upload-Offset`.
|
||||||
|
* Server MUST include `Upload-Length` if known.
|
||||||
|
* Server SHOULD return `200 OK` or `204 No Content`.
|
||||||
|
* Server MUST prevent caching: `Cache-Control: no-store`.
|
||||||
|
|
||||||
|
2. **Resume Upload (PATCH Request):**
|
||||||
|
* **Request:**
|
||||||
|
```
|
||||||
|
PATCH /files/{upload_id} HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
Content-Type: application/offset+octet-stream
|
||||||
|
Content-Length: {chunk_size}
|
||||||
|
Upload-Offset: {current_offset}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
|
||||||
|
[binary data chunk]
|
||||||
|
```
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 204 No Content
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Upload-Offset: {new_offset}
|
||||||
|
```
|
||||||
|
* `Content-Type` MUST be `application/offset+octet-stream`.
|
||||||
|
* `Upload-Offset` in request MUST match server's current offset (else `409 Conflict`).
|
||||||
|
* Server MUST acknowledge with `204 No Content` and `Upload-Offset` (new offset).
|
||||||
|
* Server SHOULD return `404 Not Found` for non-existent resources.
|
||||||
|
|
||||||
|
### Common Headers
|
||||||
|
|
||||||
|
* **`Upload-Offset`**: Non-negative integer. Byte offset within resource.
|
||||||
|
* **`Upload-Length`**: Non-negative integer. Total size of upload in bytes.
|
||||||
|
* **`Tus-Version`**: Comma-separated list of supported protocol versions (Server response).
|
||||||
|
* **`Tus-Resumable`**: Protocol version used (e.g., `1.0.0`). MUST be in every request/response (except `OPTIONS`). If client version unsupported, server responds `412 Precondition Failed` with `Tus-Version`.
|
||||||
|
* **`Tus-Extension`**: Comma-separated list of supported extensions (Server response). Omitted if none.
|
||||||
|
* **`Tus-Max-Size`**: Non-negative integer. Max allowed upload size in bytes (Server response).
|
||||||
|
* **`X-HTTP-Method-Override`**: String. Client MAY use to override HTTP method (e.g., for `PATCH`/`DELETE` limitations).
|
||||||
|
|
||||||
|
### Server Configuration (OPTIONS Request)
|
||||||
|
|
||||||
|
* **Request:**
|
||||||
|
```
|
||||||
|
OPTIONS /files HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
```
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 204 No Content
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Tus-Version: 1.0.0,0.2.2,0.2.1
|
||||||
|
Tus-Max-Size: 1073741824
|
||||||
|
Tus-Extension: creation,expiration
|
||||||
|
```
|
||||||
|
* Response MUST contain `Tus-Version`. MAY include `Tus-Extension` and `Tus-Max-Size`.
|
||||||
|
* Client SHOULD NOT include `Tus-Resumable` in request.
|
||||||
|
|
||||||
|
## Protocol Extensions
|
||||||
|
|
||||||
|
Clients SHOULD use `OPTIONS` request and `Tus-Extension` header for feature detection.
|
||||||
|
|
||||||
|
### Creation (`creation` extension)
|
||||||
|
|
||||||
|
Create a new upload resource. Server MUST add `creation` to `Tus-Extension`.
|
||||||
|
|
||||||
|
* **Request (POST):**
|
||||||
|
```
|
||||||
|
POST /files HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
Content-Length: 0
|
||||||
|
Upload-Length: {total_size} OR Upload-Defer-Length: 1
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Upload-Metadata: filename {base64_filename},is_confidential
|
||||||
|
```
|
||||||
|
* MUST include `Upload-Length` or `Upload-Defer-Length: 1`.
|
||||||
|
* If `Upload-Defer-Length: 1`, client MUST set `Upload-Length` in subsequent `PATCH`.
|
||||||
|
* `Upload-Length: 0` creates an immediately complete empty file.
|
||||||
|
* Client MAY supply `Upload-Metadata` (key-value pairs, value Base64 encoded).
|
||||||
|
* If `Upload-Length` exceeds `Tus-Max-Size`, server responds `413 Request Entity Too Large`.
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 201 Created
|
||||||
|
Location: {upload_url}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* Server MUST respond `201 Created` and set `Location` header to new resource URL.
|
||||||
|
* New resource has implicit offset `0`.
|
||||||
|
|
||||||
|
#### Headers
|
||||||
|
|
||||||
|
* **`Upload-Defer-Length`**: `1`. Indicates upload size is unknown. Server adds `creation-defer-length` to `Tus-Extension` if supported.
|
||||||
|
* **`Upload-Metadata`**: Comma-separated `key value` pairs. Key: no spaces/commas, ASCII. Value: Base64 encoded.
|
||||||
|
|
||||||
|
### Creation With Upload (`creation-with-upload` extension)
|
||||||
|
|
||||||
|
Include initial upload data in the `POST` request. Server MUST add `creation-with-upload` to `Tus-Extension`. Depends on `creation` extension.
|
||||||
|
|
||||||
|
* **Request (POST):**
|
||||||
|
```
|
||||||
|
POST /files HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
Content-Length: {initial_chunk_size}
|
||||||
|
Upload-Length: {total_size}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Content-Type: application/offset+octet-stream
|
||||||
|
Expect: 100-continue
|
||||||
|
|
||||||
|
[initial binary data chunk]
|
||||||
|
```
|
||||||
|
* Similar rules as `PATCH` apply for content.
|
||||||
|
* Client SHOULD include `Expect: 100-continue`.
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 201 Created
|
||||||
|
Location: {upload_url}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Upload-Offset: {accepted_offset}
|
||||||
|
```
|
||||||
|
* Server MUST include `Upload-Offset` with accepted bytes.
|
||||||
|
|
||||||
|
### Expiration (`expiration` extension)
|
||||||
|
|
||||||
|
Server MAY remove unfinished uploads. Server MUST add `expiration` to `Tus-Extension`.
|
||||||
|
|
||||||
|
* **Response (PATCH/POST):**
|
||||||
|
```
|
||||||
|
HTTP/1.1 204 No Content
|
||||||
|
Upload-Expires: Wed, 25 Jun 2014 16:00:00 GMT
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Upload-Offset: {new_offset}
|
||||||
|
```
|
||||||
|
* **`Upload-Expires`**: Datetime in RFC 9110 format. Indicates when upload expires. Client SHOULD use to check validity. Server SHOULD respond `404 Not Found` or `410 Gone` for expired uploads.
|
||||||
|
|
||||||
|
### Checksum (`checksum` extension)
|
||||||
|
|
||||||
|
Verify data integrity of `PATCH` requests. Server MUST add `checksum` to `Tus-Extension`. Server MUST support `sha1`.
|
||||||
|
|
||||||
|
* **Request (PATCH):**
|
||||||
|
```
|
||||||
|
PATCH /files/{upload_id} HTTP/1.1
|
||||||
|
Content-Length: {chunk_size}
|
||||||
|
Upload-Offset: {current_offset}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
Upload-Checksum: {algorithm} {base64_checksum}
|
||||||
|
|
||||||
|
[binary data chunk]
|
||||||
|
```
|
||||||
|
* **Response:**
|
||||||
|
* `204 No Content`: Checksums match.
|
||||||
|
* `400 Bad Request`: Algorithm not supported.
|
||||||
|
* `460 Checksum Mismatch`: Checksums mismatch.
|
||||||
|
* In `400`/`460` cases, chunk MUST be discarded, upload/offset NOT updated.
|
||||||
|
* **`Tus-Checksum-Algorithm`**: Comma-separated list of supported algorithms (Server response to `OPTIONS`).
|
||||||
|
* **`Upload-Checksum`**: `{algorithm} {Base64_encoded_checksum}`.
|
||||||
|
|
||||||
|
### Termination (`termination` extension)
|
||||||
|
|
||||||
|
Client can terminate uploads. Server MUST add `termination` to `Tus-Extension`.
|
||||||
|
|
||||||
|
* **Request (DELETE):**
|
||||||
|
```
|
||||||
|
DELETE /files/{upload_id} HTTP/1.1
|
||||||
|
Host: tus.example.org
|
||||||
|
Content-Length: 0
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* **Response:**
|
||||||
|
```
|
||||||
|
HTTP/1.1 204 No Content
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* Server SHOULD free resources, MUST respond `204 No Content`.
|
||||||
|
* Future requests to URL SHOULD return `404 Not Found` or `410 Gone`.
|
||||||
|
|
||||||
|
### Concatenation (`concatenation` extension)
|
||||||
|
|
||||||
|
Concatenate multiple partial uploads into a single final upload. Server MUST add `concatenation` to `Tus-Extension`.
|
||||||
|
|
||||||
|
* **Partial Upload Creation (POST):**
|
||||||
|
```
|
||||||
|
POST /files HTTP/1.1
|
||||||
|
Upload-Concat: partial
|
||||||
|
Upload-Length: {partial_size}
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* `Upload-Concat: partial` header.
|
||||||
|
* Server SHOULD NOT process partial uploads until concatenated.
|
||||||
|
* **Final Upload Creation (POST):**
|
||||||
|
```
|
||||||
|
POST /files HTTP/1.1
|
||||||
|
Upload-Concat: final;{url_partial1} {url_partial2} ...
|
||||||
|
Tus-Resumable: 1.0.0
|
||||||
|
```
|
||||||
|
* `Upload-Concat: final;{space-separated_partial_urls}`.
|
||||||
|
* Client MUST NOT include `Upload-Length`.
|
||||||
|
* Final upload length is sum of partials.
|
||||||
|
* Server MAY delete partials after concatenation.
|
||||||
|
* Server MUST respond `403 Forbidden` to `PATCH` requests against final upload.
|
||||||
|
* **`concatenation-unfinished`**: Server adds to `Tus-Extension` if it supports concatenation while partial uploads are in progress.
|
||||||
|
* **HEAD Request for Final Upload:**
|
||||||
|
* Response SHOULD NOT contain `Upload-Offset` unless concatenation finished.
|
||||||
|
* After success, `Upload-Offset` and `Upload-Length` MUST be equal.
|
||||||
|
* Response MUST include `Upload-Concat` header.
|
||||||
|
* **HEAD Request for Partial Upload:**
|
||||||
|
* Response MUST contain `Upload-Offset`.
|
||||||
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
@@ -0,0 +1,667 @@
|
|||||||
|
|
||||||
|
# TUS (1.0.0) — Server-Side Specs (Concise)
|
||||||
|
|
||||||
|
## Always
|
||||||
|
|
||||||
|
* All requests/responses **except** `OPTIONS` MUST include: `Tus-Resumable: 1.0.0`.
|
||||||
|
If unsupported → `412 Precondition Failed` + `Tus-Version`.
|
||||||
|
* Canonical server features via `OPTIONS /files`:
|
||||||
|
|
||||||
|
* `Tus-Version: 1.0.0`
|
||||||
|
* `Tus-Extension: creation,creation-with-upload,termination,checksum,concatenation,concatenation-unfinished` (as supported)
|
||||||
|
* `Tus-Max-Size: <int>` (if hard limit)
|
||||||
|
* `Tus-Checksum-Algorithm: sha1[,md5,crc32...]` (if checksum ext.)
|
||||||
|
|
||||||
|
## Core
|
||||||
|
|
||||||
|
* **Create:** `POST /files` with `Upload-Length: <int>` OR `Upload-Defer-Length: 1`. Optional `Upload-Metadata`.
|
||||||
|
|
||||||
|
* `201 Created` + `Location: /files/{id}`, echo `Tus-Resumable`.
|
||||||
|
* *Creation-With-Upload:* If body present → `Content-Type: application/offset+octet-stream`, accept bytes, respond with `Upload-Offset`.
|
||||||
|
* **Status:** `HEAD /files/{id}`
|
||||||
|
|
||||||
|
* Always return `Upload-Offset` for partial uploads, include `Upload-Length` if known; if deferred, return `Upload-Defer-Length: 1`. `Cache-Control: no-store`.
|
||||||
|
* **Upload:** `PATCH /files/{id}`
|
||||||
|
|
||||||
|
* `Content-Type: application/offset+octet-stream` and `Upload-Offset` (must match server).
|
||||||
|
* On success → `204 No Content` + new `Upload-Offset`.
|
||||||
|
* Mismatch → `409 Conflict`. Bad type → `415 Unsupported Media Type`.
|
||||||
|
* **Terminate:** `DELETE /files/{id}` (if supported) → `204 No Content`. Subsequent requests → `404/410`.
|
||||||
|
|
||||||
|
## Checksum (optional but implemented here)
|
||||||
|
|
||||||
|
* Client MAY send: `Upload-Checksum: <algo> <base64digest>` per `PATCH`.
|
||||||
|
|
||||||
|
* Server MUST verify request body’s checksum of the exact received bytes.
|
||||||
|
* If algo unsupported → `400 Bad Request`.
|
||||||
|
* If mismatch → **discard the chunk** (no offset change) and respond `460 Checksum Mismatch`.
|
||||||
|
* If OK → `204 No Content` + new `Upload-Offset`.
|
||||||
|
* `OPTIONS` MUST include `Tus-Checksum-Algorithm` (comma-separated algos).
|
||||||
|
|
||||||
|
## Concatenation (optional but implemented here)
|
||||||
|
|
||||||
|
* **Partial uploads:** `POST /files` with `Upload-Concat: partial` and `Upload-Length`. (MUST have length; may use creation-with-upload/patch thereafter.)
|
||||||
|
* **Final upload:** `POST /files` with
|
||||||
|
`Upload-Concat: final; /files/{a} /files/{b} ...`
|
||||||
|
|
||||||
|
* MUST NOT include `Upload-Length`.
|
||||||
|
* Final uploads **cannot** be `PATCH`ed (`403`).
|
||||||
|
* Server SHOULD assemble final (in order).
|
||||||
|
* If `concatenation-unfinished` supported, final may be created before partials completed; server completes once all partials are done.
|
||||||
|
* **HEAD semantics:**
|
||||||
|
|
||||||
|
* For *partial*: MUST include `Upload-Offset`.
|
||||||
|
* For *final* before concatenation: SHOULD NOT include `Upload-Offset`. `Upload-Length` MAY be present if computable (= sum of partials’ lengths when known).
|
||||||
|
* After finalization: `Upload-Offset == Upload-Length`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# TUS FastAPI Server (disk-only, crash-safe, checksum + concatenation)
|
||||||
|
|
||||||
|
**Features**
|
||||||
|
|
||||||
|
* All persistent state on disk:
|
||||||
|
|
||||||
|
```
|
||||||
|
TUS_ROOT/
|
||||||
|
{upload_id}/
|
||||||
|
info.json # canonical metadata & status
|
||||||
|
data.part # exists while uploading or while building final
|
||||||
|
data # final file after atomic rename
|
||||||
|
```
|
||||||
|
* Crash recovery: `HEAD` offset = size of `data.part` or `data`.
|
||||||
|
* `.part` during upload; `os.replace()` (atomic) to `data` on completion.
|
||||||
|
* Streaming I/O; `fsync` on file + parent directory.
|
||||||
|
* Checksum: supports `sha1` (can easily add md5/crc32).
|
||||||
|
* Concatenation: server builds final when partials complete; supports `concatenation-unfinished`.
|
||||||
|
|
||||||
|
> Run with: `uv pip install fastapi uvicorn` then `uvicorn tus_server:app --host 0.0.0.0 --port 8080` (or `python tus_server.py`).
|
||||||
|
> Set `TUS_ROOT` env to choose storage root.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# tus_server.py
|
||||||
|
from fastapi import FastAPI, Request, Response, HTTPException
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
import os, json, uuid, base64, asyncio, errno, hashlib
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Config
|
||||||
|
# -----------------------------
|
||||||
|
TUS_VERSION = "1.0.0"
|
||||||
|
# Advertise extensions implemented below:
|
||||||
|
TUS_EXTENSIONS = ",".join([
|
||||||
|
"creation",
|
||||||
|
"creation-with-upload",
|
||||||
|
"termination",
|
||||||
|
"checksum",
|
||||||
|
"concatenation",
|
||||||
|
"concatenation-unfinished",
|
||||||
|
])
|
||||||
|
# Supported checksum algorithms (keys = header token)
|
||||||
|
CHECKSUM_ALGOS = ["sha1"] # add "md5" if desired
|
||||||
|
|
||||||
|
TUS_ROOT = os.environ.get("TUS_ROOT", "/tmp/tus")
|
||||||
|
MAX_SIZE = 1 << 40 # 1 TiB default
|
||||||
|
|
||||||
|
os.makedirs(TUS_ROOT, exist_ok=True)
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
# Per-process locks to prevent concurrent mutations on same upload_id
|
||||||
|
_locks: Dict[str, asyncio.Lock] = {}
|
||||||
|
def _lock_for(upload_id: str) -> asyncio.Lock:
|
||||||
|
if upload_id not in _locks:
|
||||||
|
_locks[upload_id] = asyncio.Lock()
|
||||||
|
return _locks[upload_id]
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Path helpers
|
||||||
|
# -----------------------------
|
||||||
|
def upload_dir(upload_id: str) -> str:
|
||||||
|
return os.path.join(TUS_ROOT, upload_id)
|
||||||
|
|
||||||
|
def info_path(upload_id: str) -> str:
|
||||||
|
return os.path.join(upload_dir(upload_id), "info.json")
|
||||||
|
|
||||||
|
def part_path(upload_id: str) -> str:
|
||||||
|
return os.path.join(upload_dir(upload_id), "data.part")
|
||||||
|
|
||||||
|
def final_path(upload_id: str) -> str:
|
||||||
|
return os.path.join(upload_dir(upload_id), "data")
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# FS utils (crash-safe)
|
||||||
|
# -----------------------------
|
||||||
|
def _fsync_dir(path: str) -> None:
|
||||||
|
fd = os.open(path, os.O_DIRECTORY)
|
||||||
|
try:
|
||||||
|
os.fsync(fd)
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
|
def _write_json_atomic(path: str, obj: Dict[str, Any]) -> None:
|
||||||
|
tmp = f"{path}.tmp"
|
||||||
|
data = json.dumps(obj, separators=(",", ":"), ensure_ascii=False)
|
||||||
|
with open(tmp, "w", encoding="utf-8") as f:
|
||||||
|
f.write(data)
|
||||||
|
f.flush()
|
||||||
|
os.fsync(f.fileno())
|
||||||
|
os.replace(tmp, path)
|
||||||
|
_fsync_dir(os.path.dirname(path))
|
||||||
|
|
||||||
|
def _read_json(path: str) -> Dict[str, Any]:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
def _size(path: str) -> int:
|
||||||
|
try:
|
||||||
|
return os.path.getsize(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def _exists(path: str) -> bool:
|
||||||
|
return os.path.exists(path)
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# TUS helpers
|
||||||
|
# -----------------------------
|
||||||
|
def _ensure_tus_version(req: Request):
|
||||||
|
if req.method == "OPTIONS":
|
||||||
|
return
|
||||||
|
v = req.headers.get("Tus-Resumable")
|
||||||
|
if v is None:
|
||||||
|
raise HTTPException(status_code=412, detail="Missing Tus-Resumable")
|
||||||
|
if v != TUS_VERSION:
|
||||||
|
raise HTTPException(status_code=412, detail="Unsupported Tus-Resumable",
|
||||||
|
headers={"Tus-Version": TUS_VERSION})
|
||||||
|
|
||||||
|
def _parse_metadata(raw: Optional[str]) -> str:
|
||||||
|
# Raw passthrough; validate/consume in your app if needed.
|
||||||
|
return raw or ""
|
||||||
|
|
||||||
|
def _new_upload_info(upload_id: str,
|
||||||
|
kind: str, # "single" | "partial" | "final"
|
||||||
|
length: Optional[int],
|
||||||
|
defer_length: bool,
|
||||||
|
metadata: str,
|
||||||
|
parts: Optional[List[str]] = None) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"upload_id": upload_id,
|
||||||
|
"kind": kind, # "single" (default), "partial", or "final"
|
||||||
|
"length": length, # int or None if deferred/unknown
|
||||||
|
"defer_length": bool(defer_length),
|
||||||
|
"metadata": metadata, # raw Upload-Metadata header
|
||||||
|
"completed": False,
|
||||||
|
"parts": parts or [], # for final: list of upload_ids (not URLs)
|
||||||
|
}
|
||||||
|
|
||||||
|
def _load_info_or_404(upload_id: str) -> Dict[str, Any]:
|
||||||
|
p = info_path(upload_id)
|
||||||
|
if not _exists(p):
|
||||||
|
raise HTTPException(404, "Upload not found")
|
||||||
|
try:
|
||||||
|
return _read_json(p)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(500, f"Corrupt metadata: {e}")
|
||||||
|
|
||||||
|
def _set_info(upload_id: str, info: Dict[str, Any]) -> None:
|
||||||
|
_write_json_atomic(info_path(upload_id), info)
|
||||||
|
|
||||||
|
def _ensure_dir(path: str):
|
||||||
|
os.makedirs(path, exist_ok=False)
|
||||||
|
|
||||||
|
def _atomic_finalize_file(upload_id: str):
|
||||||
|
"""Rename data.part → data and mark completed."""
|
||||||
|
upath = upload_dir(upload_id)
|
||||||
|
p = part_path(upload_id)
|
||||||
|
f = final_path(upload_id)
|
||||||
|
if _exists(p):
|
||||||
|
with open(p, "rb+") as fp:
|
||||||
|
fp.flush()
|
||||||
|
os.fsync(fp.fileno())
|
||||||
|
os.replace(p, f)
|
||||||
|
_fsync_dir(upath)
|
||||||
|
info = _load_info_or_404(upload_id)
|
||||||
|
info["completed"] = True
|
||||||
|
_set_info(upload_id, info)
|
||||||
|
|
||||||
|
def _current_offsets(upload_id: str):
|
||||||
|
f, p = final_path(upload_id), part_path(upload_id)
|
||||||
|
if _exists(f):
|
||||||
|
return True, False, _size(f)
|
||||||
|
if _exists(p):
|
||||||
|
return False, True, _size(p)
|
||||||
|
return False, False, 0
|
||||||
|
|
||||||
|
def _parse_concat_header(h: Optional[str]) -> Optional[Dict[str, Any]]:
|
||||||
|
if not h:
|
||||||
|
return None
|
||||||
|
h = h.strip()
|
||||||
|
if h == "partial":
|
||||||
|
return {"type": "partial", "parts": []}
|
||||||
|
if h.startswith("final;"):
|
||||||
|
# format: final;/files/a /files/b
|
||||||
|
rest = h[len("final;"):].strip()
|
||||||
|
urls = [s for s in rest.split(" ") if s]
|
||||||
|
return {"type": "final", "parts": urls}
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _extract_upload_id_from_url(url: str) -> str:
|
||||||
|
# Accept relative /files/{id} (common) — robust split:
|
||||||
|
segs = [s for s in url.split("/") if s]
|
||||||
|
return segs[-1] if segs else url
|
||||||
|
|
||||||
|
def _sum_lengths_or_none(ids: List[str]) -> Optional[int]:
|
||||||
|
total = 0
|
||||||
|
for pid in ids:
|
||||||
|
info = _load_info_or_404(pid)
|
||||||
|
if info.get("length") is None:
|
||||||
|
return None
|
||||||
|
total += int(info["length"])
|
||||||
|
return total
|
||||||
|
|
||||||
|
async def _stream_with_checksum_and_append(file_obj, request: Request, algo: Optional[str]) -> int:
|
||||||
|
"""Stream request body to file, verifying checksum if header present.
|
||||||
|
Returns bytes written. On checksum mismatch, truncate to original size and raise HTTPException(460)."""
|
||||||
|
start_pos = file_obj.tell()
|
||||||
|
# Choose hash
|
||||||
|
hasher = None
|
||||||
|
provided_digest = None
|
||||||
|
if algo:
|
||||||
|
if algo not in CHECKSUM_ALGOS:
|
||||||
|
raise HTTPException(400, "Unsupported checksum algorithm")
|
||||||
|
if algo == "sha1":
|
||||||
|
hasher = hashlib.sha1()
|
||||||
|
# elif algo == "md5": hasher = hashlib.md5()
|
||||||
|
# elif algo == "crc32": ... (custom)
|
||||||
|
# Read expected checksum
|
||||||
|
if hasher:
|
||||||
|
uh = request.headers.get("Upload-Checksum")
|
||||||
|
if not uh:
|
||||||
|
# spec: checksum header optional; if algo passed to this fn we must have parsed it already
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
name, b64 = uh.split(" ", 1)
|
||||||
|
if name != algo:
|
||||||
|
raise ValueError()
|
||||||
|
provided_digest = base64.b64decode(b64.encode("ascii"))
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||||
|
written = 0
|
||||||
|
async for chunk in request.stream():
|
||||||
|
if not chunk:
|
||||||
|
continue
|
||||||
|
file_obj.write(chunk)
|
||||||
|
if hasher:
|
||||||
|
hasher.update(chunk)
|
||||||
|
written += len(chunk)
|
||||||
|
# Verify checksum if present
|
||||||
|
if hasher and provided_digest is not None:
|
||||||
|
digest = hasher.digest()
|
||||||
|
if digest != provided_digest:
|
||||||
|
# rollback appended bytes
|
||||||
|
file_obj.truncate(start_pos)
|
||||||
|
file_obj.flush()
|
||||||
|
os.fsync(file_obj.fileno())
|
||||||
|
raise HTTPException(status_code=460, detail="Checksum Mismatch")
|
||||||
|
file_obj.flush()
|
||||||
|
os.fsync(file_obj.fileno())
|
||||||
|
return written
|
||||||
|
|
||||||
|
def _try_finalize_final(upload_id: str):
|
||||||
|
"""If this is a final upload and all partials are completed, build final data and finalize atomically."""
|
||||||
|
info = _load_info_or_404(upload_id)
|
||||||
|
if info.get("kind") != "final" or info.get("completed"):
|
||||||
|
return
|
||||||
|
part_ids = info.get("parts", [])
|
||||||
|
# Check all partials completed and have data
|
||||||
|
for pid in part_ids:
|
||||||
|
pinf = _load_info_or_404(pid)
|
||||||
|
if not pinf.get("completed"):
|
||||||
|
return # still not ready
|
||||||
|
if not _exists(final_path(pid)):
|
||||||
|
# tolerate leftover .part (e.g., if completed used .part->data). If data missing, can't finalize.
|
||||||
|
return
|
||||||
|
# Build final .part by concatenating parts' data in order, then atomically rename
|
||||||
|
up = upload_dir(upload_id)
|
||||||
|
os.makedirs(up, exist_ok=True)
|
||||||
|
ppath = part_path(upload_id)
|
||||||
|
# Reset/overwrite .part
|
||||||
|
with open(ppath, "wb") as out:
|
||||||
|
for pid in part_ids:
|
||||||
|
with open(final_path(pid), "rb") as src:
|
||||||
|
for chunk in iter(lambda: src.read(1024 * 1024), b""):
|
||||||
|
out.write(chunk)
|
||||||
|
out.flush()
|
||||||
|
os.fsync(out.fileno())
|
||||||
|
# If server can compute length now, set it
|
||||||
|
length = _sum_lengths_or_none(part_ids)
|
||||||
|
info["length"] = length if length is not None else info.get("length")
|
||||||
|
_set_info(upload_id, info)
|
||||||
|
_atomic_finalize_file(upload_id)
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Routes
|
||||||
|
# -----------------------------
|
||||||
|
@app.options("/files")
|
||||||
|
async def tus_options():
|
||||||
|
headers = {
|
||||||
|
"Tus-Version": TUS_VERSION,
|
||||||
|
"Tus-Extension": TUS_EXTENSIONS,
|
||||||
|
"Tus-Max-Size": str(MAX_SIZE),
|
||||||
|
"Tus-Checksum-Algorithm": ",".join(CHECKSUM_ALGOS),
|
||||||
|
}
|
||||||
|
return Response(status_code=204, headers=headers)
|
||||||
|
|
||||||
|
@app.post("/files")
|
||||||
|
async def tus_create(request: Request):
|
||||||
|
_ensure_tus_version(request)
|
||||||
|
|
||||||
|
metadata = _parse_metadata(request.headers.get("Upload-Metadata"))
|
||||||
|
concat = _parse_concat_header(request.headers.get("Upload-Concat"))
|
||||||
|
|
||||||
|
# Validate creation modes
|
||||||
|
hdr_len = request.headers.get("Upload-Length")
|
||||||
|
hdr_defer = request.headers.get("Upload-Defer-Length")
|
||||||
|
|
||||||
|
if concat and concat["type"] == "partial":
|
||||||
|
# Partial MUST have Upload-Length (spec)
|
||||||
|
if hdr_len is None:
|
||||||
|
raise HTTPException(400, "Partial uploads require Upload-Length")
|
||||||
|
if hdr_defer is not None:
|
||||||
|
raise HTTPException(400, "Partial uploads cannot defer length")
|
||||||
|
elif concat and concat["type"] == "final":
|
||||||
|
# Final MUST NOT include Upload-Length
|
||||||
|
if hdr_len is not None or hdr_defer is not None:
|
||||||
|
raise HTTPException(400, "Final uploads must not include Upload-Length or Upload-Defer-Length")
|
||||||
|
else:
|
||||||
|
# Normal single upload: require length or defer
|
||||||
|
if hdr_len is None and hdr_defer != "1":
|
||||||
|
raise HTTPException(400, "Must provide Upload-Length or Upload-Defer-Length: 1")
|
||||||
|
|
||||||
|
# Parse length
|
||||||
|
length: Optional[int] = None
|
||||||
|
defer = False
|
||||||
|
if hdr_len is not None:
|
||||||
|
try:
|
||||||
|
length = int(hdr_len)
|
||||||
|
if length < 0: raise ValueError()
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(400, "Invalid Upload-Length")
|
||||||
|
if length > MAX_SIZE:
|
||||||
|
raise HTTPException(413, "Upload too large")
|
||||||
|
elif not concat or concat["type"] != "final":
|
||||||
|
# final has no length at creation
|
||||||
|
defer = (hdr_defer == "1")
|
||||||
|
|
||||||
|
upload_id = str(uuid.uuid4())
|
||||||
|
udir = upload_dir(upload_id)
|
||||||
|
_ensure_dir(udir)
|
||||||
|
|
||||||
|
if concat and concat["type"] == "final":
|
||||||
|
# Resolve part ids from URLs
|
||||||
|
part_ids = [_extract_upload_id_from_url(u) for u in concat["parts"]]
|
||||||
|
# Compute length if possible
|
||||||
|
sum_len = _sum_lengths_or_none(part_ids)
|
||||||
|
info = _new_upload_info(upload_id, "final", sum_len, False, metadata, part_ids)
|
||||||
|
_set_info(upload_id, info)
|
||||||
|
|
||||||
|
# Prepare empty .part (will be filled when partials complete)
|
||||||
|
with open(part_path(upload_id), "wb") as f:
|
||||||
|
f.flush(); os.fsync(f.fileno())
|
||||||
|
_fsync_dir(udir)
|
||||||
|
|
||||||
|
# If all partials already complete, finalize immediately
|
||||||
|
_try_finalize_final(upload_id)
|
||||||
|
|
||||||
|
return Response(status_code=201,
|
||||||
|
headers={"Location": f"/files/{upload_id}",
|
||||||
|
"Tus-Resumable": TUS_VERSION})
|
||||||
|
|
||||||
|
# Create partial or single
|
||||||
|
kind = "partial" if (concat and concat["type"] == "partial") else "single"
|
||||||
|
info = _new_upload_info(upload_id, kind, length, defer, metadata)
|
||||||
|
_set_info(upload_id, info)
|
||||||
|
|
||||||
|
# Create empty .part
|
||||||
|
with open(part_path(upload_id), "wb") as f:
|
||||||
|
f.flush(); os.fsync(f.fileno())
|
||||||
|
_fsync_dir(udir)
|
||||||
|
|
||||||
|
# Creation-With-Upload (optional body)
|
||||||
|
upload_offset = 0
|
||||||
|
has_body = request.headers.get("Content-Length") or request.headers.get("Transfer-Encoding")
|
||||||
|
if has_body:
|
||||||
|
ctype = request.headers.get("Content-Type", "")
|
||||||
|
if ctype != "application/offset+octet-stream":
|
||||||
|
raise HTTPException(415, "Content-Type must be application/offset+octet-stream for creation-with-upload")
|
||||||
|
# Checksum header optional; if present, parse algo token
|
||||||
|
uh = request.headers.get("Upload-Checksum")
|
||||||
|
algo = None
|
||||||
|
if uh:
|
||||||
|
try:
|
||||||
|
algo = uh.split(" ", 1)[0]
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||||
|
|
||||||
|
async with _lock_for(upload_id):
|
||||||
|
with open(part_path(upload_id), "ab+") as f:
|
||||||
|
f.seek(0, os.SEEK_END)
|
||||||
|
upload_offset = await _stream_with_checksum_and_append(f, request, algo)
|
||||||
|
|
||||||
|
# If length known and we hit it, finalize
|
||||||
|
inf = _load_info_or_404(upload_id)
|
||||||
|
if inf["length"] is not None and upload_offset == int(inf["length"]):
|
||||||
|
_atomic_finalize_file(upload_id)
|
||||||
|
# If this is a partial that belongs to some final, a watcher could finalize final; here we rely on
|
||||||
|
# client to create final explicitly (spec). Finalization of final is handled by _try_finalize_final
|
||||||
|
# when final resource is created (or rechecked on subsequent HEAD/PATCH).
|
||||||
|
headers = {"Location": f"/files/{upload_id}", "Tus-Resumable": TUS_VERSION}
|
||||||
|
if upload_offset:
|
||||||
|
headers["Upload-Offset"] = str(upload_offset)
|
||||||
|
return Response(status_code=201, headers=headers)
|
||||||
|
|
||||||
|
@app.head("/files/{upload_id}")
|
||||||
|
async def tus_head(upload_id: str, request: Request):
|
||||||
|
_ensure_tus_version(request)
|
||||||
|
info = _load_info_or_404(upload_id)
|
||||||
|
is_final = info.get("kind") == "final"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Tus-Resumable": TUS_VERSION,
|
||||||
|
"Cache-Control": "no-store",
|
||||||
|
}
|
||||||
|
if info.get("metadata"):
|
||||||
|
headers["Upload-Metadata"] = info["metadata"]
|
||||||
|
|
||||||
|
if info.get("length") is not None:
|
||||||
|
headers["Upload-Length"] = str(int(info["length"]))
|
||||||
|
elif info.get("defer_length"):
|
||||||
|
headers["Upload-Defer-Length"] = "1"
|
||||||
|
|
||||||
|
exists_final, exists_part, offset = False, False, 0
|
||||||
|
if is_final and not info.get("completed"):
|
||||||
|
# BEFORE concatenation completes: SHOULD NOT include Upload-Offset
|
||||||
|
# Try to see if we can finalize now (e.g., partials completed after crash)
|
||||||
|
_try_finalize_final(upload_id)
|
||||||
|
info = _load_info_or_404(upload_id)
|
||||||
|
if info.get("completed"):
|
||||||
|
# fallthrough to completed case
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# For in-progress final, no Upload-Offset; include Upload-Length if computable (already handled above)
|
||||||
|
return Response(status_code=200, headers=headers)
|
||||||
|
|
||||||
|
# For partials or completed finals
|
||||||
|
f = final_path(upload_id)
|
||||||
|
p = part_path(upload_id)
|
||||||
|
if _exists(f):
|
||||||
|
exists_final, offset = True, _size(f)
|
||||||
|
elif _exists(p):
|
||||||
|
exists_part, offset = True, _size(p)
|
||||||
|
else:
|
||||||
|
# if info exists but no data, consider gone
|
||||||
|
raise HTTPException(410, "Upload gone")
|
||||||
|
|
||||||
|
headers["Upload-Offset"] = str(offset)
|
||||||
|
return Response(status_code=200, headers=headers)
|
||||||
|
|
||||||
|
@app.patch("/files/{upload_id}")
|
||||||
|
async def tus_patch(upload_id: str, request: Request):
|
||||||
|
_ensure_tus_version(request)
|
||||||
|
info = _load_info_or_404(upload_id)
|
||||||
|
|
||||||
|
if info.get("kind") == "final":
|
||||||
|
raise HTTPException(403, "Final uploads cannot be patched")
|
||||||
|
|
||||||
|
ctype = request.headers.get("Content-Type", "")
|
||||||
|
if ctype != "application/offset+octet-stream":
|
||||||
|
raise HTTPException(415, "Content-Type must be application/offset+octet-stream")
|
||||||
|
|
||||||
|
# Client offset must match server
|
||||||
|
try:
|
||||||
|
client_offset = int(request.headers.get("Upload-Offset", "-1"))
|
||||||
|
if client_offset < 0: raise ValueError()
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(400, "Invalid or missing Upload-Offset")
|
||||||
|
|
||||||
|
# If length deferred, client may now set Upload-Length (once)
|
||||||
|
if info.get("length") is None and info.get("defer_length"):
|
||||||
|
if "Upload-Length" in request.headers:
|
||||||
|
try:
|
||||||
|
new_len = int(request.headers["Upload-Length"])
|
||||||
|
if new_len < 0:
|
||||||
|
raise ValueError()
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(400, "Invalid Upload-Length")
|
||||||
|
if new_len > MAX_SIZE:
|
||||||
|
raise HTTPException(413, "Upload too large")
|
||||||
|
info["length"] = new_len
|
||||||
|
info["defer_length"] = False
|
||||||
|
_set_info(upload_id, info)
|
||||||
|
|
||||||
|
# Determine current server offset
|
||||||
|
f = final_path(upload_id)
|
||||||
|
p = part_path(upload_id)
|
||||||
|
if _exists(f):
|
||||||
|
raise HTTPException(403, "Upload already finalized")
|
||||||
|
if not _exists(p):
|
||||||
|
raise HTTPException(404, "Upload not found")
|
||||||
|
|
||||||
|
server_offset = _size(p)
|
||||||
|
if client_offset != server_offset:
|
||||||
|
return Response(status_code=409)
|
||||||
|
|
||||||
|
# Optional checksum
|
||||||
|
uh = request.headers.get("Upload-Checksum")
|
||||||
|
algo = None
|
||||||
|
if uh:
|
||||||
|
try:
|
||||||
|
algo = uh.split(" ", 1)[0]
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||||
|
|
||||||
|
# Append data (with rollback on checksum mismatch)
|
||||||
|
async with _lock_for(upload_id):
|
||||||
|
with open(p, "ab+") as fobj:
|
||||||
|
fobj.seek(0, os.SEEK_END)
|
||||||
|
written = await _stream_with_checksum_and_append(fobj, request, algo)
|
||||||
|
|
||||||
|
new_offset = server_offset + written
|
||||||
|
|
||||||
|
# If length known and reached exactly, finalize
|
||||||
|
info = _load_info_or_404(upload_id) # reload
|
||||||
|
if info.get("length") is not None and new_offset == int(info["length"]):
|
||||||
|
_atomic_finalize_file(upload_id)
|
||||||
|
|
||||||
|
# If this is a partial, a corresponding final may exist and be now completable
|
||||||
|
# We don't maintain reverse index; finalization is triggered when HEAD on final is called.
|
||||||
|
# (Optional: scan for finals to proactively finalize.)
|
||||||
|
|
||||||
|
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION, "Upload-Offset": str(new_offset)})
|
||||||
|
|
||||||
|
@app.delete("/files/{upload_id}")
|
||||||
|
async def tus_delete(upload_id: str, request: Request):
|
||||||
|
_ensure_tus_version(request)
|
||||||
|
async with _lock_for(upload_id):
|
||||||
|
udir = upload_dir(upload_id)
|
||||||
|
for p in (part_path(upload_id), final_path(upload_id), info_path(upload_id)):
|
||||||
|
try:
|
||||||
|
os.remove(p)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
os.rmdir(udir)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION})
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Client Examples (manual)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# OPTIONS
|
||||||
|
curl -i -X OPTIONS http://localhost:8080/files
|
||||||
|
|
||||||
|
# 1) Single upload (known length)
|
||||||
|
curl -i -X POST http://localhost:8080/files \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Upload-Length: 11" \
|
||||||
|
-H "Upload-Metadata: filename Zm9vLnR4dA=="
|
||||||
|
# → Location: /files/<ID>
|
||||||
|
|
||||||
|
# Upload with checksum (sha1 of "hello ")
|
||||||
|
printf "hello " | curl -i -X PATCH http://localhost:8080/files/<ID> \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Content-Type: application/offset+octet-stream" \
|
||||||
|
-H "Upload-Offset: 0" \
|
||||||
|
-H "Upload-Checksum: sha1 L6v8xR3Lw4N2n9kQox3wL7G0m/I=" \
|
||||||
|
--data-binary @-
|
||||||
|
# (Replace digest with correct base64 for your chunk)
|
||||||
|
|
||||||
|
# 2) Concatenation
|
||||||
|
# Create partial A (5 bytes)
|
||||||
|
curl -i -X POST http://localhost:8080/files \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Upload-Length: 5" \
|
||||||
|
-H "Upload-Concat: partial"
|
||||||
|
# → Location: /files/<A>
|
||||||
|
printf "hello" | curl -i -X PATCH http://localhost:8080/files/<A> \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Content-Type: application/offset+octet-stream" \
|
||||||
|
-H "Upload-Offset: 0" \
|
||||||
|
--data-binary @-
|
||||||
|
|
||||||
|
# Create partial B (6 bytes)
|
||||||
|
curl -i -X POST http://localhost:8080/files \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Upload-Length: 6" \
|
||||||
|
-H "Upload-Concat: partial"
|
||||||
|
# → Location: /files/<B>
|
||||||
|
printf " world" | curl -i -X PATCH http://localhost:8080/files/<B> \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Content-Type: application/offset+octet-stream" \
|
||||||
|
-H "Upload-Offset: 0" \
|
||||||
|
--data-binary @-
|
||||||
|
|
||||||
|
# Create final (may be before or after partials complete)
|
||||||
|
curl -i -X POST http://localhost:8080/files \
|
||||||
|
-H "Tus-Resumable: 1.0.0" \
|
||||||
|
-H "Upload-Concat: final; /files/<A> /files/<B>"
|
||||||
|
# HEAD on final will eventually show Upload-Offset once finalized
|
||||||
|
curl -i -X HEAD http://localhost:8080/files/<FINAL> -H "Tus-Resumable: 1.0.0"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation Notes (agent hints)
|
||||||
|
|
||||||
|
* **Durability:** every data write `fsync(file)`; after `os.replace` of `*.part → data` or `info.json.tmp → info.json`, also `fsync(parent)`.
|
||||||
|
* **Checksum:** verify against **this request’s** body only; on mismatch, **truncate back** to previous size and return `460`.
|
||||||
|
* **Concatenation:** final upload is never `PATCH`ed. Server builds `final.data.part` by concatenating each partial’s **final file** in order, then atomically renames and marks completed. It’s triggered lazily in `HEAD` of final (and right after creation).
|
||||||
|
* **Crash Recovery:** offset = `size(data.part)` or `size(data)`; `info.json` is canonical for `kind`, `length`, `defer_length`, `completed`, `parts`.
|
||||||
|
* **Multi-process deployments:** replace `asyncio.Lock` with file locks (`fcntl.flock`) per `upload_id` to synchronize across workers.
|
||||||
|
|
||||||
|
|
||||||
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
```bash
|
||||||
|
unpm install @uppy/react
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
Pre-composed, plug-and-play components:
|
||||||
|
|
||||||
|
<Dashboard /> renders @uppy/dashboard
|
||||||
|
<DashboardModal /> renders @uppy/dashboard as a modal
|
||||||
|
<DragDrop /> renders @uppy/drag-drop
|
||||||
|
<ProgressBar /> renders @uppy/progress-bar
|
||||||
|
<StatusBar /> renders @uppy/status-bar
|
||||||
|
|
||||||
|
more info see https://uppy.io/docs/react
|
||||||
|
|
||||||
|
|
||||||
|
we use tus server for the upload support
|
||||||
|
|
||||||
|
npm install @uppy/tus
|
||||||
|
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
import Uppy from '@uppy/core';
|
||||||
|
import Dashboard from '@uppy/dashboard';
|
||||||
|
import Tus from '@uppy/tus';
|
||||||
|
|
||||||
|
import '@uppy/core/dist/style.min.css';
|
||||||
|
import '@uppy/dashboard/dist/style.min.css';
|
||||||
|
|
||||||
|
new Uppy()
|
||||||
|
.use(Dashboard, { inline: true, target: 'body' })
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
========================
|
||||||
|
CODE SNIPPETS
|
||||||
|
========================
|
||||||
|
|
||||||
|
TITLE: React Dashboard Modal Example with TUS
|
||||||
|
DESCRIPTION: Demonstrates how to use the DashboardModal component from @uppy/react with the Tus plugin for resumable uploads.
|
||||||
|
LANGUAGE: jsx
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
/** @jsx React */
|
||||||
|
import React from 'react'
|
||||||
|
import Uppy from '@uppy/core'
|
||||||
|
import { DashboardModal } from '@uppy/react'
|
||||||
|
import Tus from '@uppy/tus'
|
||||||
|
|
||||||
|
const uppy = new Uppy({ debug: true, autoProceed: false })
|
||||||
|
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||||
|
|
||||||
|
class Example extends React.Component {
|
||||||
|
state = { open: false }
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const { open } = this.state
|
||||||
|
return (
|
||||||
|
<DashboardModal
|
||||||
|
uppy={uppy}
|
||||||
|
open={open}
|
||||||
|
onRequestClose={this.handleClose}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// ..snip..
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Installation using npm for @uppy/react
|
||||||
|
DESCRIPTION: Provides the command to install the @uppy/react package using npm.
|
||||||
|
LANGUAGE: bash
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
$ npm install @uppy/react @uppy/core @uppy/dashboard @uppy/tus
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Uppy Dashboard and Tus Integration Example (HTML & JavaScript)
|
||||||
|
DESCRIPTION: This snippet demonstrates how to initialize Uppy with the Dashboard and Tus plugins, configure them, and handle upload success events.
|
||||||
|
LANGUAGE: html
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<link rel="stylesheet" href="https://releases.transloadit.com/uppy/v4.18.0/uppy.min.css" />
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="DashboardContainer"></div>
|
||||||
|
<button class="UppyModalOpenerBtn">Upload</button>
|
||||||
|
<div class="uploaded-files">
|
||||||
|
<h5>Uploaded files:</h5>
|
||||||
|
<ol></ol>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
<script type="module">
|
||||||
|
import { Uppy, Dashboard, Tus } from 'https://releases.transloadit.com/uppy/v4.18.0/uppy.min.mjs'
|
||||||
|
var uppy = new Uppy({
|
||||||
|
debug: true,
|
||||||
|
autoProceed: false,
|
||||||
|
})
|
||||||
|
.use(Dashboard, {
|
||||||
|
browserBackButtonClose: false,
|
||||||
|
height: 470,
|
||||||
|
inline: false,
|
||||||
|
replaceTargetContent: true,
|
||||||
|
showProgressDetails: true,
|
||||||
|
target: '.DashboardContainer',
|
||||||
|
trigger: '.UppyModalOpenerBtn',
|
||||||
|
})
|
||||||
|
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||||
|
.on('upload-success', function (file, response) {
|
||||||
|
var url = response.uploadURL
|
||||||
|
var fileName = file.name
|
||||||
|
|
||||||
|
document.querySelector('.uploaded-files ol').innerHTML +=
|
||||||
|
'<li><a href="' + url + '" target="_blank">' + fileName + '</a></li>'
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
</html>
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Initialize Uppy with Tus Plugin (JavaScript)
|
||||||
|
DESCRIPTION: Demonstrates how to initialize Uppy and configure the Tus plugin for resumable uploads.
|
||||||
|
LANGUAGE: js
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
import Uppy from '@uppy/core'
|
||||||
|
import Tus from '@uppy/tus'
|
||||||
|
|
||||||
|
const uppy = new Uppy()
|
||||||
|
uppy.use(Tus, {
|
||||||
|
endpoint: 'https://tusd.tusdemo.net/files/', // use your tus endpoint here
|
||||||
|
resume: true,
|
||||||
|
retryDelays: [0, 1000, 3000, 5000],
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Uppy Core Initialization and Plugin Usage (JavaScript)
|
||||||
|
DESCRIPTION: This example demonstrates how to initialize Uppy with core functionality and integrate the Tus plugin. It also shows how to listen for upload completion events.
|
||||||
|
LANGUAGE: javascript
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
import Uppy from '@uppy/core'
|
||||||
|
import Dashboard from '@uppy/dashboard'
|
||||||
|
import Tus from '@uppy/tus'
|
||||||
|
|
||||||
|
const uppy = new Uppy()
|
||||||
|
.use(Dashboard, { trigger: '#select-files' })
|
||||||
|
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||||
|
.on('complete', (result) => {
|
||||||
|
console.log('Upload result:', result)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Uppy XHRUpload Configuration (JavaScript)
|
||||||
|
DESCRIPTION: This snippet shows the basic JavaScript configuration for Uppy, initializing it with the XHRUpload plugin to send files to a specified endpoint.
|
||||||
|
LANGUAGE: javascript
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
import Uppy from '@uppy/core';
|
||||||
|
import XHRUpload from '@uppy/xhr-upload';
|
||||||
|
|
||||||
|
const uppy = new Uppy({
|
||||||
|
debug: true,
|
||||||
|
autoProceed: false,
|
||||||
|
restrictions: {
|
||||||
|
maxFileSize: 100000000,
|
||||||
|
maxNumberOfFiles: 10,
|
||||||
|
allowedFileTypes: ['image/*', 'video/*']
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
uppy.use(XHRUpload, {
|
||||||
|
endpoint: 'YOUR_UPLOAD_ENDPOINT_URL',
|
||||||
|
fieldName: 'files[]',
|
||||||
|
method: 'post'
|
||||||
|
});
|
||||||
|
|
||||||
|
uppy.on('complete', (result) => {
|
||||||
|
console.log('Upload complete:', result);
|
||||||
|
});
|
||||||
|
|
||||||
|
uppy.on('error', (error) => {
|
||||||
|
console.error('Upload error:', error);
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TITLE: Install Uppy Core Packages for TUS
|
||||||
|
DESCRIPTION: Installs the core Uppy package along with the Dashboard and Tus plugins using npm.
|
||||||
|
LANGUAGE: bash
|
||||||
|
CODE:
|
||||||
|
```
|
||||||
|
npm install @uppy/core @uppy/dashboard @uppy/tus @uppy/xhr-upload
|
||||||
|
```
|
||||||
|
|
||||||
|
========================
|
||||||
|
QUESTIONS AND ANSWERS
|
||||||
|
========================
|
||||||
|
|
||||||
|
TOPIC: Uppy React Components
|
||||||
|
Q: What is the purpose of the @uppy/react package?
|
||||||
|
A: The @uppy/react package provides React component wrappers for Uppy's officially maintained UI plugins. It allows developers to easily integrate Uppy's file uploading capabilities into their React applications.
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TOPIC: Uppy React Components
|
||||||
|
Q: How can @uppy/react be installed in a project?
|
||||||
|
A: The @uppy/react package can be installed using npm with the command '$ npm install @uppy/react'.
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
TOPIC: Uppy React Components
|
||||||
|
Q: Where can I find more detailed documentation for the @uppy/react plugin?
|
||||||
|
A: More detailed documentation for the @uppy/react plugin is available on the Uppy website at https://uppy.io/docs/react.
|
||||||
@@ -13,12 +13,12 @@ prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
|||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||||
|
|
||||||
if help_requested {
|
if help_requested {
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
additional_args := fp.finalize() or {
|
||||||
eprintln(err)
|
eprintln(err)
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
#!/usr/bin/env -S v -n -g -cg -w -parallel-cc -showcc -enable-globals run
|
||||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
|
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
import os
|
import os
|
||||||
import flag
|
import flag
|
||||||
|
|
||||||
@@ -14,20 +14,20 @@ prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
|||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||||
|
|
||||||
if help_requested {
|
if help_requested {
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
additional_args := fp.finalize() or {
|
||||||
eprintln(err)
|
eprintln(err)
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if additional_args.len > 0 {
|
if additional_args.len > 0 {
|
||||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Change to the hero directory
|
// Change to the hero directory
|
||||||
@@ -37,35 +37,38 @@ os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}
|
|||||||
// Set HEROPATH based on OS
|
// Set HEROPATH based on OS
|
||||||
mut heropath := '/usr/local/bin/hero'
|
mut heropath := '/usr/local/bin/hero'
|
||||||
if os.user_os() == 'macos' {
|
if os.user_os() == 'macos' {
|
||||||
heropath = os.join_path(os.home_dir(), 'hero/bin/hero')
|
heropath = os.join_path(os.home_dir(), 'hero/bin/hero')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set compilation command based on OS and mode
|
// Set compilation command based on OS and mode
|
||||||
compile_cmd := if os.user_os() == 'macos' {
|
compile_cmd := if os.user_os() == 'macos' {
|
||||||
if prod_mode {
|
if prod_mode {
|
||||||
'v -enable-globals -w -n -prod hero.v'
|
'v -enable-globals -g -w -n -prod hero.v'
|
||||||
} else {
|
} else {
|
||||||
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if prod_mode {
|
if prod_mode {
|
||||||
'v -cg -enable-globals -parallel-cc -w -n hero.v'
|
'v -cg -enable-globals -parallel-cc -w -n hero.v'
|
||||||
} else {
|
} else {
|
||||||
'v -cg -enable-globals -w -n hero.v'
|
'v -cg -enable-globals -w -n hero.v'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||||
|
eprintln(compile_cmd)
|
||||||
|
|
||||||
if os.system(compile_cmd) != 0 {
|
if os.system(compile_cmd) != 0 {
|
||||||
panic('Failed to compile hero.v with command: ${compile_cmd}')
|
panic('Failed to compile hero.v with command: ${compile_cmd}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make executable
|
// Make executable
|
||||||
os.chmod('hero', 0o755) or { panic('Failed to make hero binary executable: ${err}') }
|
os.chmod('hero', 0o755) or { panic('Failed to make hero binary executable: ${err}') }
|
||||||
|
|
||||||
// Ensure destination directory exists
|
// Ensure destination directory exists
|
||||||
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
|
os.mkdir_all(os.dir(heropath)) or {
|
||||||
|
panic('Failed to create directory ${os.dir(heropath)}: ${err}')
|
||||||
|
}
|
||||||
println(heropath)
|
println(heropath)
|
||||||
// Copy to destination paths
|
// Copy to destination paths
|
||||||
os.cp('hero', heropath) or { panic('Failed to copy hero binary to ${heropath}: ${err}') }
|
os.cp('hero', heropath) or { panic('Failed to copy hero binary to ${heropath}: ${err}') }
|
||||||
|
|||||||
@@ -64,7 +64,9 @@ account = ${s3keyid}
|
|||||||
key = ${s3appid}
|
key = ${s3appid}
|
||||||
hard_delete = true'
|
hard_delete = true'
|
||||||
|
|
||||||
os.write_file(rclone_conf, config_content) or { return error('Failed to write rclone config: ${err}') }
|
os.write_file(rclone_conf, config_content) or {
|
||||||
|
return error('Failed to write rclone config: ${err}')
|
||||||
|
}
|
||||||
|
|
||||||
println('made S3 config on: ${rclone_conf}')
|
println('made S3 config on: ${rclone_conf}')
|
||||||
content := os.read_file(rclone_conf) or { return error('Failed to read rclone config: ${err}') }
|
content := os.read_file(rclone_conf) or { return error('Failed to read rclone config: ${err}') }
|
||||||
@@ -72,8 +74,10 @@ hard_delete = true'
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn hero_upload() ! {
|
fn hero_upload() ! {
|
||||||
hero_path := os.find_abs_path_of_executable('hero') or { return error("Error: 'hero' command not found in PATH") }
|
hero_path := os.find_abs_path_of_executable('hero') or {
|
||||||
|
return error("Error: 'hero' command not found in PATH")
|
||||||
|
}
|
||||||
|
|
||||||
s3_configure()!
|
s3_configure()!
|
||||||
|
|
||||||
platform_id := get_platform_id()
|
platform_id := get_platform_id()
|
||||||
@@ -83,15 +87,18 @@ fn hero_upload() ! {
|
|||||||
|
|
||||||
// List contents
|
// List contents
|
||||||
os.execute_or_panic('rclone --config="${rclone_conf}" lsl b2:threefold/${platform_id}/')
|
os.execute_or_panic('rclone --config="${rclone_conf}" lsl b2:threefold/${platform_id}/')
|
||||||
|
|
||||||
// Copy hero binary
|
// Copy hero binary
|
||||||
os.execute_or_panic('rclone --config="${rclone_conf}" copy "${hero_path}" b2:threefold/${platform_id}/')
|
os.execute_or_panic('rclone --config="${rclone_conf}" copy "${hero_path}" b2:threefold/${platform_id}/')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
//os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
// os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
||||||
println("compile hero can take 60 sec+ on osx.")
|
println('compile hero can take 60 sec+ on osx.')
|
||||||
os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
||||||
println( "upload:")
|
println('upload:')
|
||||||
hero_upload() or { eprintln(err) exit(1) }
|
hero_upload() or {
|
||||||
|
eprintln(err)
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
||||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
|
||||||
|
|
||||||
|
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
import os
|
import os
|
||||||
import flag
|
import flag
|
||||||
|
|
||||||
@@ -14,20 +14,20 @@ prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
|||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||||
|
|
||||||
if help_requested {
|
if help_requested {
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
additional_args := fp.finalize() or {
|
||||||
eprintln(err)
|
eprintln(err)
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if additional_args.len > 0 {
|
if additional_args.len > 0 {
|
||||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Change to the vdo directory
|
// Change to the vdo directory
|
||||||
@@ -37,35 +37,37 @@ os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}
|
|||||||
// Set HEROPATH based on OS
|
// Set HEROPATH based on OS
|
||||||
mut heropath := '/usr/local/bin/vdo'
|
mut heropath := '/usr/local/bin/vdo'
|
||||||
if os.user_os() == 'macos' {
|
if os.user_os() == 'macos' {
|
||||||
heropath = os.join_path(os.home_dir(), 'hero/bin/vdo')
|
heropath = os.join_path(os.home_dir(), 'hero/bin/vdo')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set compilation command based on OS and mode
|
// Set compilation command based on OS and mode
|
||||||
compile_cmd := if os.user_os() == 'macos' {
|
compile_cmd := if os.user_os() == 'macos' {
|
||||||
if prod_mode {
|
if prod_mode {
|
||||||
'v -enable-globals -w -n -prod vdo.v'
|
'v -enable-globals -w -n -prod vdo.v'
|
||||||
} else {
|
} else {
|
||||||
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals vdo.v'
|
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals vdo.v'
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if prod_mode {
|
if prod_mode {
|
||||||
'v -cg -enable-globals -parallel-cc -w -n vdo.v'
|
'v -cg -enable-globals -parallel-cc -w -n vdo.v'
|
||||||
} else {
|
} else {
|
||||||
'v -cg -enable-globals -w -n vdo.v'
|
'v -cg -enable-globals -w -n vdo.v'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||||
|
|
||||||
if os.system(compile_cmd) != 0 {
|
if os.system(compile_cmd) != 0 {
|
||||||
panic('Failed to compile vdo.v with command: ${compile_cmd}')
|
panic('Failed to compile vdo.v with command: ${compile_cmd}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make executable
|
// Make executable
|
||||||
os.chmod('vdo', 0o755) or { panic('Failed to make vdo binary executable: ${err}') }
|
os.chmod('vdo', 0o755) or { panic('Failed to make vdo binary executable: ${err}') }
|
||||||
|
|
||||||
// Ensure destination directory exists
|
// Ensure destination directory exists
|
||||||
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
|
os.mkdir_all(os.dir(heropath)) or {
|
||||||
|
panic('Failed to create directory ${os.dir(heropath)}: ${err}')
|
||||||
|
}
|
||||||
println(heropath)
|
println(heropath)
|
||||||
// Copy to destination paths
|
// Copy to destination paths
|
||||||
os.cp('vdo', heropath) or { panic('Failed to copy vdo binary to ${heropath}: ${err}') }
|
os.cp('vdo', heropath) or { panic('Failed to copy vdo binary to ${heropath}: ${err}') }
|
||||||
|
|||||||
36
cli/hero.v
36
cli/hero.v
@@ -3,8 +3,6 @@ module main
|
|||||||
import os
|
import os
|
||||||
import cli { Command }
|
import cli { Command }
|
||||||
import freeflowuniverse.herolib.core.herocmds
|
import freeflowuniverse.herolib.core.herocmds
|
||||||
// import freeflowuniverse.herolib.hero.cmds
|
|
||||||
// import freeflowuniverse.herolib.hero.publishing
|
|
||||||
import freeflowuniverse.herolib.installers.base
|
import freeflowuniverse.herolib.installers.base
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
import freeflowuniverse.herolib.ui
|
import freeflowuniverse.herolib.ui
|
||||||
@@ -15,7 +13,7 @@ import freeflowuniverse.herolib.core.playcmds
|
|||||||
|
|
||||||
fn playcmds_do(path string) ! {
|
fn playcmds_do(path string) ! {
|
||||||
mut plbook := playbook.new(path: path)!
|
mut plbook := playbook.new(path: path)!
|
||||||
playcmds.run(plbook:plbook)!
|
playcmds.run(plbook: plbook)!
|
||||||
}
|
}
|
||||||
|
|
||||||
fn do() ! {
|
fn do() ! {
|
||||||
@@ -83,35 +81,25 @@ fn do() ! {
|
|||||||
|
|
||||||
base.redis_install()!
|
base.redis_install()!
|
||||||
|
|
||||||
// herocmds.cmd_bootstrap(mut cmd)
|
|
||||||
herocmds.cmd_run(mut cmd)
|
herocmds.cmd_run(mut cmd)
|
||||||
herocmds.cmd_git(mut cmd)
|
herocmds.cmd_git(mut cmd)
|
||||||
// herocmds.cmd_init(mut cmd)
|
|
||||||
// herocmds.cmd_imagedownsize(mut cmd)
|
|
||||||
// herocmds.cmd_biztools(mut cmd)
|
|
||||||
// herocmds.cmd_gen(mut cmd)
|
|
||||||
// herocmds.cmd_sshagent(mut cmd)
|
|
||||||
// herocmds.cmd_installers(mut cmd)
|
|
||||||
// herocmds.cmd_configure(mut cmd)
|
|
||||||
// herocmds.cmd_postgres(mut cmd)
|
|
||||||
herocmds.cmd_mdbook(mut cmd)
|
|
||||||
// herocmds.cmd_luadns(mut cmd)
|
|
||||||
// herocmds.cmd_caddy(mut cmd)
|
|
||||||
// herocmds.cmd_zola(mut cmd)
|
|
||||||
// herocmds.cmd_juggler(mut cmd)
|
|
||||||
herocmds.cmd_generator(mut cmd)
|
herocmds.cmd_generator(mut cmd)
|
||||||
herocmds.cmd_docusaurus(mut cmd)
|
herocmds.cmd_docusaurus(mut cmd)
|
||||||
// herocmds.cmd_starlight(mut cmd)
|
// herocmds.cmd_web(mut cmd)
|
||||||
// herocmds.cmd_docsorter(mut cmd)
|
|
||||||
// cmd.add_command(publishing.cmd_publisher(pre_func))
|
|
||||||
cmd.setup()
|
cmd.setup()
|
||||||
cmd.parse(os.args)
|
cmd.parse(os.args)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
do() or { panic(err) }
|
do() or {
|
||||||
|
$dbg;
|
||||||
|
eprintln('Error: ${err}')
|
||||||
|
print_backtrace()
|
||||||
|
exit(1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pre_func(cmd Command) ! {
|
// fn pre_func(cmd Command) ! {
|
||||||
herocmds.plbook_run(cmd)!
|
// herocmds.plbook_run(cmd)!
|
||||||
}
|
// }
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ fn main() {
|
|||||||
// Create and start the MCP server
|
// Create and start the MCP server
|
||||||
mut server := v_do.new_server()
|
mut server := v_do.new_server()
|
||||||
server.start() or {
|
server.start() or {
|
||||||
eprintln('Error starting server: $err')
|
eprintln('Error starting server: ${err}')
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1519
debug.logs
Normal file
1519
debug.logs
Normal file
File diff suppressed because it is too large
Load Diff
35
doc.vsh
35
doc.vsh
@@ -7,13 +7,13 @@ abs_dir_of_script := dir(@FILE)
|
|||||||
// Format code
|
// Format code
|
||||||
println('Formatting code...')
|
println('Formatting code...')
|
||||||
if os.system('v fmt -w ${abs_dir_of_script}/examples') != 0 {
|
if os.system('v fmt -w ${abs_dir_of_script}/examples') != 0 {
|
||||||
eprintln('Warning: Failed to format examples')
|
eprintln('Warning: Failed to format examples')
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.system('v fmt -w ${abs_dir_of_script}/lib') != 0 {
|
if os.system('v fmt -w ${abs_dir_of_script}/lib') != 0 {
|
||||||
eprintln('Warning: Failed to format herolib')
|
eprintln('Warning: Failed to format herolib')
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean existing docs
|
// Clean existing docs
|
||||||
@@ -24,9 +24,7 @@ os.rmdir_all('docs') or {}
|
|||||||
os.rmdir_all('vdocs') or {}
|
os.rmdir_all('vdocs') or {}
|
||||||
|
|
||||||
herolib_path := os.join_path(abs_dir_of_script, 'lib')
|
herolib_path := os.join_path(abs_dir_of_script, 'lib')
|
||||||
os.chdir(herolib_path) or {
|
os.chdir(herolib_path) or { panic('Failed to change directory to herolib: ${err}') }
|
||||||
panic('Failed to change directory to herolib: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
os.mkdir_all('_docs') or {}
|
os.mkdir_all('_docs') or {}
|
||||||
os.mkdir_all('docs') or {}
|
os.mkdir_all('docs') or {}
|
||||||
@@ -35,17 +33,14 @@ os.mkdir_all('vdocs') or {}
|
|||||||
// Generate HTML documentation
|
// Generate HTML documentation
|
||||||
println('Generating HTML documentation...')
|
println('Generating HTML documentation...')
|
||||||
if os.system('v doc -m -f html . -readme -comments -no-timestamp -o ../docs') != 0 {
|
if os.system('v doc -m -f html . -readme -comments -no-timestamp -o ../docs') != 0 {
|
||||||
panic('Failed to generate HTML documentation')
|
panic('Failed to generate HTML documentation')
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.system('v doc -m -f md . -no-color -o ../vdocs/') != 0 {
|
if os.system('v doc -m -f md . -no-color -o ../vdocs/') != 0 {
|
||||||
panic('Failed to generate Hero markdown documentation')
|
panic('Failed to generate Hero markdown documentation')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
os.chdir(abs_dir_of_script) or { panic('Failed to change directory to abs_dir_of_script: ${err}') }
|
||||||
os.chdir(abs_dir_of_script) or {
|
|
||||||
panic('Failed to change directory to abs_dir_of_script: ${err}')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate Markdown documentation
|
// Generate Markdown documentation
|
||||||
println('Generating Markdown documentation...')
|
println('Generating Markdown documentation...')
|
||||||
@@ -60,12 +55,10 @@ println('Generating Markdown documentation...')
|
|||||||
|
|
||||||
// Open documentation in browser on non-Linux systems
|
// Open documentation in browser on non-Linux systems
|
||||||
$if !linux {
|
$if !linux {
|
||||||
os.chdir(abs_dir_of_script) or {
|
os.chdir(abs_dir_of_script) or { panic('Failed to change directory: ${err}') }
|
||||||
panic('Failed to change directory: ${err}')
|
if os.system('open docs/index.html') != 0 {
|
||||||
}
|
eprintln('Warning: Failed to open documentation in browser')
|
||||||
if os.system('open docs/index.html') != 0 {
|
}
|
||||||
eprintln('Warning: Failed to open documentation in browser')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Jekyll required files
|
// Create Jekyll required files
|
||||||
@@ -75,7 +68,7 @@ os.mkdir_all('docs/assets/css') or {}
|
|||||||
// Create style.scss
|
// Create style.scss
|
||||||
style_content := '---\n---\n\n@import "{{ site.theme }}";'
|
style_content := '---\n---\n\n@import "{{ site.theme }}";'
|
||||||
os.write_file('docs/assets/css/style.scss', style_content) or {
|
os.write_file('docs/assets/css/style.scss', style_content) or {
|
||||||
panic('Failed to create style.scss: ${err}')
|
panic('Failed to create style.scss: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create _config.yml
|
// Create _config.yml
|
||||||
@@ -94,7 +87,7 @@ exclude:
|
|||||||
- vendor/ruby/'
|
- vendor/ruby/'
|
||||||
|
|
||||||
os.write_file('docs/_config.yml', config_content) or {
|
os.write_file('docs/_config.yml', config_content) or {
|
||||||
panic('Failed to create _config.yml: ${err}')
|
panic('Failed to create _config.yml: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Documentation generation completed successfully!')
|
println('Documentation generation completed successfully!')
|
||||||
|
|||||||
@@ -4,46 +4,45 @@ import os
|
|||||||
import flag
|
import flag
|
||||||
|
|
||||||
fn addtoscript(tofind string, toadd string) ! {
|
fn addtoscript(tofind string, toadd string) ! {
|
||||||
home_dir := os.home_dir()
|
home_dir := os.home_dir()
|
||||||
mut rc_file := '${home_dir}/.zshrc'
|
mut rc_file := '${home_dir}/.zshrc'
|
||||||
if !os.exists(rc_file) {
|
if !os.exists(rc_file) {
|
||||||
rc_file = '${home_dir}/.bashrc'
|
rc_file = '${home_dir}/.bashrc'
|
||||||
if !os.exists(rc_file) {
|
if !os.exists(rc_file) {
|
||||||
return error('No .zshrc or .bashrc found in home directory')
|
return error('No .zshrc or .bashrc found in home directory')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read current content
|
// Read current content
|
||||||
mut content := os.read_file(rc_file)!
|
mut content := os.read_file(rc_file)!
|
||||||
|
|
||||||
// Remove existing alias if present
|
|
||||||
lines := content.split('\n')
|
|
||||||
mut new_lines := []string{}
|
|
||||||
mut prev_is_emtpy := false
|
|
||||||
for line in lines {
|
|
||||||
if prev_is_emtpy {
|
|
||||||
if line.trim_space() == ""{
|
|
||||||
continue
|
|
||||||
}else{
|
|
||||||
prev_is_emtpy = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if line.trim_space() == ""{
|
|
||||||
prev_is_emtpy = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if !line.contains(tofind) {
|
// Remove existing alias if present
|
||||||
new_lines << line
|
lines := content.split('\n')
|
||||||
}
|
mut new_lines := []string{}
|
||||||
}
|
mut prev_is_emtpy := false
|
||||||
new_lines << toadd
|
for line in lines {
|
||||||
new_lines << ""
|
if prev_is_emtpy {
|
||||||
// Write back to file
|
if line.trim_space() == '' {
|
||||||
new_content := new_lines.join('\n')
|
continue
|
||||||
os.write_file(rc_file, new_content)!
|
} else {
|
||||||
|
prev_is_emtpy = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if line.trim_space() == '' {
|
||||||
|
prev_is_emtpy = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !line.contains(tofind) {
|
||||||
|
new_lines << line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
new_lines << toadd
|
||||||
|
new_lines << ''
|
||||||
|
// Write back to file
|
||||||
|
new_content := new_lines.join('\n')
|
||||||
|
os.write_file(rc_file, new_content)!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
vroot := @VROOT
|
vroot := @VROOT
|
||||||
abs_dir_of_script := dir(@FILE)
|
abs_dir_of_script := dir(@FILE)
|
||||||
|
|
||||||
@@ -52,20 +51,20 @@ println('Resetting all symlinks...')
|
|||||||
os.rm('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
os.rm('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
||||||
|
|
||||||
// Create necessary directories
|
// Create necessary directories
|
||||||
os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
|
os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
|
||||||
panic('Failed to create directory ~/.vmodules/freeflowuniverse: ${err}')
|
panic('Failed to create directory ~/.vmodules/freeflowuniverse: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new symlinks
|
// Create new symlinks
|
||||||
os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
|
os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
|
||||||
panic('Failed to create herolib symlink: ${err}')
|
panic('Failed to create herolib symlink: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Herolib installation completed successfully!')
|
println('Herolib installation completed successfully!')
|
||||||
|
|
||||||
// Add vtest alias
|
// Add vtest alias
|
||||||
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -n -w -cg -gc none -cc tcc test\' ') or {
|
addtoscript('alias vtest=', "alias vtest='v -stats -enable-globals -n -w -cg -gc none -cc tcc test' ") or {
|
||||||
eprintln('Failed to add vtest alias: ${err}')
|
eprintln('Failed to add vtest alias: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Added vtest alias to shell configuration')
|
println('Added vtest alias to shell configuration')
|
||||||
|
|||||||
@@ -229,35 +229,35 @@ function hero_lib_get {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function install_secp256k1 {
|
# function install_secp256k1 {
|
||||||
echo "Installing secp256k1..."
|
# echo "Installing secp256k1..."
|
||||||
if [[ "${OSNAME}" == "darwin"* ]]; then
|
# if [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
brew install secp256k1
|
# brew install secp256k1
|
||||||
elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
# elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
# Install build dependencies
|
# # Install build dependencies
|
||||||
apt-get install -y build-essential wget autoconf libtool
|
# apt-get install -y build-essential wget autoconf libtool
|
||||||
|
|
||||||
# Download and extract secp256k1
|
# # Download and extract secp256k1
|
||||||
cd "${DIR_BUILD}"
|
# cd "${DIR_BUILD}"
|
||||||
wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
# wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
||||||
tar -xvf v0.3.2.tar.gz
|
# tar -xvf v0.3.2.tar.gz
|
||||||
|
|
||||||
# Build and install
|
# # Build and install
|
||||||
cd secp256k1-0.3.2/
|
# cd secp256k1-0.3.2/
|
||||||
./autogen.sh
|
# ./autogen.sh
|
||||||
./configure
|
# ./configure
|
||||||
make -j 5
|
# make -j 5
|
||||||
make install
|
# make install
|
||||||
|
|
||||||
# Cleanup
|
# # Cleanup
|
||||||
cd ..
|
# cd ..
|
||||||
rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
# rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
||||||
else
|
# else
|
||||||
echo "secp256k1 installation not implemented for ${OSNAME}"
|
# echo "secp256k1 installation not implemented for ${OSNAME}"
|
||||||
exit 1
|
# exit 1
|
||||||
fi
|
# fi
|
||||||
echo "secp256k1 installation complete!"
|
# echo "secp256k1 installation complete!"
|
||||||
}
|
# }
|
||||||
|
|
||||||
|
|
||||||
remove_all() {
|
remove_all() {
|
||||||
|
|||||||
Binary file not shown.
@@ -14,12 +14,16 @@ actor_spec := specification.from_openapi(openapi_spec)!
|
|||||||
|
|
||||||
println(actor_spec)
|
println(actor_spec)
|
||||||
|
|
||||||
|
// actor_module := generator.generate_actor_module(actor_spec,
|
||||||
|
// interfaces: [.openapi, .http]
|
||||||
|
// )!
|
||||||
|
|
||||||
actor_module := generator.generate_actor_module(actor_spec,
|
actor_module := generator.generate_actor_module(actor_spec,
|
||||||
interfaces: [.openapi, .http]
|
interfaces: [.http]
|
||||||
)!
|
)!
|
||||||
|
|
||||||
actor_module.write(example_dir,
|
actor_module.write(example_dir,
|
||||||
format: false
|
format: true
|
||||||
overwrite: true
|
overwrite: true
|
||||||
compile: false
|
compile: false
|
||||||
)!
|
)!
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.baobab.specification
|
import freeflowuniverse.herolib.baobab.specification
|
||||||
import freeflowuniverse.herolib.schemas.openapi
|
import freeflowuniverse.herolib.schemas.openapi
|
||||||
|
|||||||
@@ -1,132 +1,857 @@
|
|||||||
{
|
{
|
||||||
"openrpc": "1.0.0",
|
"openrpc": "1.2.6",
|
||||||
"info": {
|
"info": {
|
||||||
"title": "PetStore",
|
"version": "1.0.0",
|
||||||
"version": "1.0.0"
|
"title": "Zinit JSON-RPC API",
|
||||||
|
"description": "JSON-RPC 2.0 API for controlling and querying Zinit services",
|
||||||
|
"license": {
|
||||||
|
"name": "MIT"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"servers": [
|
||||||
|
{
|
||||||
|
"name": "Unix Socket",
|
||||||
|
"url": "unix:///tmp/zinit.sock"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"methods": [
|
||||||
|
{
|
||||||
|
"name": "rpc.discover",
|
||||||
|
"description": "Returns the OpenRPC specification for the API",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "OpenRPCSpec",
|
||||||
|
"description": "The OpenRPC specification",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"methods": [
|
{
|
||||||
{
|
"name": "service_list",
|
||||||
"name": "GetPets",
|
"description": "Lists all services managed by Zinit",
|
||||||
"description": "finds pets in the system that the user has access to by tags and within a limit",
|
"params": [],
|
||||||
"params": [
|
"result": {
|
||||||
{
|
"name": "ServiceList",
|
||||||
"name": "tags",
|
"description": "A map of service names to their current states",
|
||||||
"description": "tags to filter by",
|
"schema": {
|
||||||
"schema": {
|
"type": "object",
|
||||||
"type": "array",
|
"additionalProperties": {
|
||||||
"items": {
|
"type": "string",
|
||||||
"type": "string"
|
"description": "Service state (Running, Success, Error, etc.)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "List all services",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceListResult",
|
||||||
|
"value": {
|
||||||
|
"service1": "Running",
|
||||||
|
"service2": "Success",
|
||||||
|
"service3": "Error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_status",
|
||||||
|
"description": "Shows detailed status information for a specific service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatus",
|
||||||
|
"description": "Detailed status information for the service",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Service name"
|
||||||
|
},
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the running service (if running)"
|
||||||
|
},
|
||||||
|
"state": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Current state of the service (Running, Success, Error, etc.)"
|
||||||
|
},
|
||||||
|
"target": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Target state of the service (Up, Down)"
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Dependencies of the service and their states",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "State of the dependency"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "limit",
|
|
||||||
"description": "maximum number of results to return",
|
|
||||||
"schema": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"result": {
|
|
||||||
"name": "pet_list",
|
|
||||||
"description": "all pets from the system, that mathes the tags",
|
|
||||||
"schema": {
|
|
||||||
"$ref": "#\/components\/schemas\/Pet"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
"examples": [
|
||||||
"name": "CreatePet",
|
{
|
||||||
"description": "creates a new pet in the store. Duplicates are allowed.",
|
"name": "Get status of redis service",
|
||||||
"params": [
|
"params": [
|
||||||
{
|
{
|
||||||
"name": "new_pet",
|
"name": "name",
|
||||||
"description": "Pet to add to the store.",
|
"value": "redis"
|
||||||
"schema": {
|
}
|
||||||
"$ref": "#\/components\/schemas\/NewPet"
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatusResult",
|
||||||
|
"value": {
|
||||||
|
"name": "redis",
|
||||||
|
"pid": 1234,
|
||||||
|
"state": "Running",
|
||||||
|
"target": "Up",
|
||||||
|
"after": {
|
||||||
|
"dependency1": "Success",
|
||||||
|
"dependency2": "Running"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
}
|
||||||
"result": {
|
],
|
||||||
"name": "pet",
|
"errors": [
|
||||||
"description": "the newly created pet",
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_start",
|
||||||
|
"description": "Starts a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to start",
|
||||||
|
"required": true,
|
||||||
"schema": {
|
"schema": {
|
||||||
"$ref": "#\/components\/schemas\/Pet"
|
"type": "string"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartResult",
|
||||||
|
"description": "Result of the start operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
"examples": [
|
||||||
"name": "GetPetById",
|
{
|
||||||
"description": "gets a pet based on a single ID, if the user has access to the pet",
|
"name": "Start redis service",
|
||||||
"params": [
|
"params": [
|
||||||
{
|
{
|
||||||
"name": "id",
|
"name": "name",
|
||||||
"description": "ID of pet to fetch",
|
"value": "redis"
|
||||||
"schema": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartResult",
|
||||||
|
"value": null
|
||||||
}
|
}
|
||||||
],
|
}
|
||||||
"result": {
|
],
|
||||||
"name": "pet",
|
"errors": [
|
||||||
"description": "pet response",
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_stop",
|
||||||
|
"description": "Stops a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to stop",
|
||||||
|
"required": true,
|
||||||
"schema": {
|
"schema": {
|
||||||
"$ref": "#\/components\/schemas\/Pet"
|
"type": "string"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StopResult",
|
||||||
|
"description": "Result of the stop operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
"examples": [
|
||||||
"name": "DeletePetById",
|
{
|
||||||
"description": "deletes a single pet based on the ID supplied",
|
"name": "Stop redis service",
|
||||||
"params": [
|
"params": [
|
||||||
{
|
{
|
||||||
"name": "id",
|
"name": "name",
|
||||||
"description": "ID of pet to delete",
|
"value": "redis"
|
||||||
"schema": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
}
|
||||||
}
|
],
|
||||||
],
|
"result": {
|
||||||
"result": {
|
"name": "StopResult",
|
||||||
"name": "pet",
|
"value": null
|
||||||
"description": "pet deleted",
|
|
||||||
"schema": {
|
|
||||||
"type": "null"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
],
|
||||||
],
|
"errors": [
|
||||||
"components": {
|
{
|
||||||
"schemas": {
|
"code": -32000,
|
||||||
"NewPet": {
|
"message": "Service not found",
|
||||||
"title": "NewPet",
|
"data": "service name \"unknown\" unknown"
|
||||||
"properties": {
|
},
|
||||||
"name": {
|
{
|
||||||
"type": "string"
|
"code": -32003,
|
||||||
},
|
"message": "Service is down",
|
||||||
"tag": {
|
"data": "service \"redis\" is down"
|
||||||
"type": "string"
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_monitor",
|
||||||
|
"description": "Starts monitoring a service. The service configuration is loaded from the config directory.",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to monitor",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "MonitorResult",
|
||||||
|
"description": "Result of the monitor operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Monitor redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "MonitorResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32001,
|
||||||
|
"message": "Service already monitored",
|
||||||
|
"data": "service \"redis\" already monitored"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32005,
|
||||||
|
"message": "Config error",
|
||||||
|
"data": "failed to load service configuration"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_forget",
|
||||||
|
"description": "Stops monitoring a service. You can only forget a stopped service.",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to forget",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ForgetResult",
|
||||||
|
"description": "Result of the forget operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Forget redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ForgetResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32002,
|
||||||
|
"message": "Service is up",
|
||||||
|
"data": "service \"redis\" is up"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_kill",
|
||||||
|
"description": "Sends a signal to a running service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to send the signal to",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Pet": {
|
{
|
||||||
"title": "Pet",
|
"name": "signal",
|
||||||
"description": "a pet struct that represents a pet",
|
"description": "The signal to send (e.g., SIGTERM, SIGKILL)",
|
||||||
"properties": {
|
"required": true,
|
||||||
"name": {
|
"schema": {
|
||||||
"description": "name of the pet",
|
"type": "string"
|
||||||
"type": "string"
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "KillResult",
|
||||||
|
"description": "Result of the kill operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Send SIGTERM to redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
},
|
},
|
||||||
"tag": {
|
{
|
||||||
"description": "a tag of the pet, helps finding pet",
|
"name": "signal",
|
||||||
"type": "string"
|
"value": "SIGTERM"
|
||||||
},
|
}
|
||||||
"id": {
|
],
|
||||||
"description": "unique indentifier",
|
"result": {
|
||||||
"type": "integer"
|
"name": "KillResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32003,
|
||||||
|
"message": "Service is down",
|
||||||
|
"data": "service \"redis\" is down"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32004,
|
||||||
|
"message": "Invalid signal",
|
||||||
|
"data": "invalid signal: INVALID"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_shutdown",
|
||||||
|
"description": "Stops all services and powers off the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ShutdownResult",
|
||||||
|
"description": "Result of the shutdown operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Shutdown the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "ShutdownResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32006,
|
||||||
|
"message": "Shutting down",
|
||||||
|
"data": "system is already shutting down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_reboot",
|
||||||
|
"description": "Stops all services and reboots the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "RebootResult",
|
||||||
|
"description": "Result of the reboot operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Reboot the system",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "RebootResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32006,
|
||||||
|
"message": "Shutting down",
|
||||||
|
"data": "system is already shutting down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_create",
|
||||||
|
"description": "Creates a new service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to create",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "content",
|
||||||
|
"description": "The service configuration content",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"exec": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Command to run"
|
||||||
|
},
|
||||||
|
"oneshot": {
|
||||||
|
"type": "boolean",
|
||||||
|
"description": "Whether the service should be restarted"
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": "Services that must be running before this one starts"
|
||||||
|
},
|
||||||
|
"log": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["null", "ring", "stdout"],
|
||||||
|
"description": "How to handle service output"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"description": "Environment variables for the service"
|
||||||
|
},
|
||||||
|
"shutdown_timeout": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum time to wait for service to stop during shutdown"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "CreateServiceResult",
|
||||||
|
"description": "Result of the create operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32007,
|
||||||
|
"message": "Service already exists",
|
||||||
|
"data": "Service 'name' already exists"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to create service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_delete",
|
||||||
|
"description": "Deletes a service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to delete",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "DeleteServiceResult",
|
||||||
|
"description": "Result of the delete operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "Service 'name' not found"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to delete service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_get",
|
||||||
|
"description": "Gets a service configuration file",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to get",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "GetServiceResult",
|
||||||
|
"description": "The service configuration",
|
||||||
|
"schema": {
|
||||||
|
"type": "object"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "Service 'name' not found"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32008,
|
||||||
|
"message": "Service file error",
|
||||||
|
"data": "Failed to read service file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "service_stats",
|
||||||
|
"description": "Get memory and CPU usage statistics for a service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "The name of the service to get stats for",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStats",
|
||||||
|
"description": "Memory and CPU usage statistics for the service",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Service name"
|
||||||
|
},
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the service"
|
||||||
|
},
|
||||||
|
"memory_usage": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Memory usage in bytes"
|
||||||
|
},
|
||||||
|
"cpu_usage": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "CPU usage as a percentage (0-100)"
|
||||||
|
},
|
||||||
|
"children": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "Stats for child processes",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"pid": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process ID of the child process"
|
||||||
|
},
|
||||||
|
"memory_usage": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Memory usage in bytes"
|
||||||
|
},
|
||||||
|
"cpu_usage": {
|
||||||
|
"type": "number",
|
||||||
|
"description": "CPU usage as a percentage (0-100)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Get stats for redis service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "ServiceStatsResult",
|
||||||
|
"value": {
|
||||||
|
"name": "redis",
|
||||||
|
"pid": 1234,
|
||||||
|
"memory_usage": 10485760,
|
||||||
|
"cpu_usage": 2.5,
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"pid": 1235,
|
||||||
|
"memory_usage": 5242880,
|
||||||
|
"cpu_usage": 1.2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32000,
|
||||||
|
"message": "Service not found",
|
||||||
|
"data": "service name \"unknown\" unknown"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code": -32003,
|
||||||
|
"message": "Service is down",
|
||||||
|
"data": "service \"redis\" is down"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_start_http_server",
|
||||||
|
"description": "Start an HTTP/RPC server at the specified address",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "address",
|
||||||
|
"description": "The network address to bind the server to (e.g., '127.0.0.1:8080')",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartHttpServerResult",
|
||||||
|
"description": "Result of the start HTTP server operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Start HTTP server on localhost:8080",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "address",
|
||||||
|
"value": "127.0.0.1:8080"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "StartHttpServerResult",
|
||||||
|
"value": "HTTP server started at 127.0.0.1:8080"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32602,
|
||||||
|
"message": "Invalid address",
|
||||||
|
"data": "Invalid network address format"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "system_stop_http_server",
|
||||||
|
"description": "Stop the HTTP/RPC server if running",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "StopHttpServerResult",
|
||||||
|
"description": "Result of the stop HTTP server operation",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Stop the HTTP server",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "StopHttpServerResult",
|
||||||
|
"value": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": -32602,
|
||||||
|
"message": "Server not running",
|
||||||
|
"data": "No HTTP server is currently running"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stream_currentLogs",
|
||||||
|
"description": "Get current logs from zinit and monitored services",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "Optional service name filter. If provided, only logs from this service will be returned",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"description": "Array of log strings",
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Get all logs",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"value": [
|
||||||
|
"2023-01-01T12:00:00 redis: Starting service",
|
||||||
|
"2023-01-01T12:00:01 nginx: Starting service"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Get logs for a specific service",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogsResult",
|
||||||
|
"value": [
|
||||||
|
"2023-01-01T12:00:00 redis: Starting service",
|
||||||
|
"2023-01-01T12:00:02 redis: Service started"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stream_subscribeLogs",
|
||||||
|
"description": "Subscribe to log messages generated by zinit and monitored services",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"description": "Optional service name filter. If provided, only logs from this service will be returned",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"description": "A subscription to log messages",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "Subscribe to all logs",
|
||||||
|
"params": [],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"value": "2023-01-01T12:00:00 redis: Service started"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Subscribe to filtered logs",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "name",
|
||||||
|
"value": "redis"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "LogSubscription",
|
||||||
|
"value": "2023-01-01T12:00:00 redis: Service started"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
]
|
||||||
|
}
|
||||||
132
examples/baobab/specification/openrpc0.json
Normal file
132
examples/baobab/specification/openrpc0.json
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
{
|
||||||
|
"openrpc": "1.0.0",
|
||||||
|
"info": {
|
||||||
|
"title": "PetStore",
|
||||||
|
"version": "1.0.0"
|
||||||
|
},
|
||||||
|
"methods": [
|
||||||
|
{
|
||||||
|
"name": "GetPets",
|
||||||
|
"description": "finds pets in the system that the user has access to by tags and within a limit",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "tags",
|
||||||
|
"description": "tags to filter by",
|
||||||
|
"schema": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "limit",
|
||||||
|
"description": "maximum number of results to return",
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pet_list",
|
||||||
|
"description": "all pets from the system, that mathes the tags",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#\/components\/schemas\/Pet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "CreatePet",
|
||||||
|
"description": "creates a new pet in the store. Duplicates are allowed.",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "new_pet",
|
||||||
|
"description": "Pet to add to the store.",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#\/components\/schemas\/NewPet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pet",
|
||||||
|
"description": "the newly created pet",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#\/components\/schemas\/Pet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "GetPetById",
|
||||||
|
"description": "gets a pet based on a single ID, if the user has access to the pet",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"description": "ID of pet to fetch",
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pet",
|
||||||
|
"description": "pet response",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#\/components\/schemas\/Pet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "DeletePetById",
|
||||||
|
"description": "deletes a single pet based on the ID supplied",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "id",
|
||||||
|
"description": "ID of pet to delete",
|
||||||
|
"schema": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pet",
|
||||||
|
"description": "pet deleted",
|
||||||
|
"schema": {
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"components": {
|
||||||
|
"schemas": {
|
||||||
|
"NewPet": {
|
||||||
|
"title": "NewPet",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"tag": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Pet": {
|
||||||
|
"title": "Pet",
|
||||||
|
"description": "a pet struct that represents a pet",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"description": "name of the pet",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"tag": {
|
||||||
|
"description": "a tag of the pet, helps finding pet",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"description": "unique indentifier",
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.baobab.specification
|
import freeflowuniverse.herolib.baobab.specification
|
||||||
import freeflowuniverse.herolib.schemas.openrpc
|
import freeflowuniverse.herolib.schemas.openrpc
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import freeflowuniverse.herolib.baobab.specification
|
import freeflowuniverse.herolib.baobab.specification
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -w -n -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import freeflowuniverse.herolib.baobab.specification
|
import freeflowuniverse.herolib.baobab.specification
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := "
|
heroscript := "
|
||||||
@@ -21,8 +22,13 @@ This time we have the cogs defined in fixed manner, the default currency is USD
|
|||||||
cogs: '10:100000,15:1000,20:120000'
|
cogs: '10:100000,15:1000,20:120000'
|
||||||
"
|
"
|
||||||
|
|
||||||
bizmodel.play(heroscript: heroscript)!
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('test')!
|
mut bm := bizmodel.get('test')!
|
||||||
|
|
||||||
bm.sheet.pprint(nr_columns: 30)!
|
bm.sheet.pprint(nr_columns: 30)!
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := "
|
heroscript := "
|
||||||
@@ -16,8 +17,13 @@ heroscript := "
|
|||||||
//revenue_item_monthly_perc:'3%'
|
//revenue_item_monthly_perc:'3%'
|
||||||
"
|
"
|
||||||
|
|
||||||
bizmodel.play(heroscript: heroscript)!
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('test')!
|
mut bm := bizmodel.get('test')!
|
||||||
|
|
||||||
bm.sheet.pprint(nr_columns: 30)!
|
bm.sheet.pprint(nr_columns: 30)!
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := os.join_path(os.dir(@FILE), 'examples/complete.heroscript')
|
heroscript_path := os.join_path(os.dir(@FILE), 'examples/complete.heroscript')
|
||||||
|
|
||||||
// Execute the script and print results
|
// Create a new playbook with the heroscript path
|
||||||
bizmodel.play(heroscript_path: heroscript)!
|
mut pb := playbook.new(path: heroscript_path)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('threefold')!
|
mut bm := bizmodel.get('threefold')!
|
||||||
bm.sheet.pprint(nr_columns: 10)!
|
bm.sheet.pprint(nr_columns: 10)!
|
||||||
|
|||||||
@@ -1,33 +1,23 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
//#!/usr/bin/env -S v -cg -enable-globals run
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
import freeflowuniverse.herolib.core.playbook
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import freeflowuniverse.herolib.core.playcmds
|
import freeflowuniverse.herolib.core.playcmds
|
||||||
import os
|
import os
|
||||||
|
|
||||||
// heroscript := os.join_path(os.dir(@FILE), 'examples/full')
|
heroscript_path := os.join_path(os.dir(@FILE), 'examples/complete.heroscript')
|
||||||
// // Execute the script and print results
|
|
||||||
// bizmodel.play(heroscript_path:heroscript)!
|
|
||||||
|
|
||||||
heroscript := os.join_path(os.dir(@FILE), 'examples/complete.heroscript')
|
// Create a new playbook with the heroscript path
|
||||||
// Execute the script and print results
|
mut pb := playbook.new(path: heroscript_path)!
|
||||||
bizmodel.play(heroscript_path: heroscript)!
|
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('threefold')!
|
mut bm := bizmodel.get('threefold')!
|
||||||
bm.sheet.pprint(nr_columns: 10)!
|
bm.sheet.pprint(nr_columns: 10)!
|
||||||
|
|
||||||
// buildpath := '${os.home_dir()}/hero/var/mdbuild/bizmodel'
|
// Export the business model to a report
|
||||||
// println("buildpath: ${buildpath}")
|
|
||||||
|
|
||||||
// model.play(mut playbook.new(path: playbook_path)!)!
|
|
||||||
|
|
||||||
// println(model.sheet)
|
|
||||||
// println(model.sheet.export()!)
|
|
||||||
|
|
||||||
// model.sheet.export(path:"~/Downloads/test.csv")!
|
|
||||||
// model.sheet.export(path:"~/code/github/freeflowuniverse/starlight_template/src/content/test.csv")!
|
|
||||||
|
|
||||||
bm.export(
|
bm.export(
|
||||||
name: 'example_report'
|
name: 'example_report'
|
||||||
title: 'Example Business Model'
|
title: 'Example Business Model'
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := os.join_path(os.dir(@FILE), 'examples/full')
|
heroscript_path := os.join_path(os.dir(@FILE), 'examples/full')
|
||||||
|
|
||||||
// Execute the script and print results
|
// Create a new playbook with the heroscript path
|
||||||
bizmodel.play(heroscript_path: heroscript)!
|
mut pb := playbook.new(path: heroscript_path)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('threefold')!
|
mut bm := bizmodel.get('threefold')!
|
||||||
bm.sheet.pprint(nr_columns: 25)!
|
bm.sheet.pprint(nr_columns: 25)!
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := "
|
heroscript := "
|
||||||
@@ -45,8 +46,13 @@ heroscript := "
|
|||||||
|
|
||||||
"
|
"
|
||||||
|
|
||||||
bizmodel.play(heroscript: heroscript)!
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('test')!
|
mut bm := bizmodel.get('test')!
|
||||||
|
|
||||||
bm.sheet.pprint(nr_columns: 20)!
|
bm.sheet.pprint(nr_columns: 20)!
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := "
|
heroscript := "
|
||||||
@@ -17,8 +18,13 @@ heroscript := "
|
|||||||
|
|
||||||
"
|
"
|
||||||
|
|
||||||
bizmodel.play(heroscript: heroscript)!
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('test')!
|
mut bm := bizmodel.get('test')!
|
||||||
|
|
||||||
bm.sheet.pprint(nr_columns: 20)!
|
bm.sheet.pprint(nr_columns: 20)!
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.biz.bizmodel
|
import freeflowuniverse.herolib.biz.bizmodel
|
||||||
|
import freeflowuniverse.herolib.core.playbook
|
||||||
import os
|
import os
|
||||||
|
|
||||||
heroscript := "
|
heroscript := "
|
||||||
@@ -36,8 +37,13 @@ heroscript := "
|
|||||||
|
|
||||||
"
|
"
|
||||||
|
|
||||||
bizmodel.play(heroscript: heroscript)!
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
mut bm := bizmodel.get('test')!
|
mut bm := bizmodel.get('test')!
|
||||||
|
|
||||||
bm.sheet.pprint(nr_columns: 20)!
|
bm.sheet.pprint(nr_columns: 20)!
|
||||||
|
|||||||
50
examples/biztools/notworking.md
Normal file
50
examples/biztools/notworking.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# BizTools Examples Test Results
|
||||||
|
|
||||||
|
## Working Examples
|
||||||
|
All examples have been fixed and now work correctly:
|
||||||
|
|
||||||
|
- `bizmodel.vsh` - This example was already working correctly.
|
||||||
|
- `bizmodel1.vsh` - Fixed to use `playbook.new()` with text parameter.
|
||||||
|
- `bizmodel2.vsh` - Fixed to use `playbook.new()` with text parameter.
|
||||||
|
- `bizmodel_complete.vsh` - Fixed to use `playbook.new()` with path parameter.
|
||||||
|
- `bizmodel_export.vsh` - Fixed to use `playbook.new()` with path parameter.
|
||||||
|
- `bizmodel_full.vsh` - Fixed to use `playbook.new()` with path parameter.
|
||||||
|
- `costs.vsh` - Fixed to use `playbook.new()` with text parameter.
|
||||||
|
- `funding.vsh` - Fixed to use `playbook.new()` with text parameter.
|
||||||
|
- `hr.vsh` - Fixed to use `playbook.new()` with text parameter.
|
||||||
|
|
||||||
|
## Previous Issues
|
||||||
|
All examples had issues with the `bizmodel.play()` function:
|
||||||
|
|
||||||
|
1. Unknown field (`heroscript` or `heroscript_path`) in struct literal of type `PlayBook`.
|
||||||
|
2. Reference field `PlayBook.session` must be initialized.
|
||||||
|
3. Function `bizmodel.play` parameter `plbook` is `mut`, so it requires `mut PlayBook{...}` instead.
|
||||||
|
|
||||||
|
## Solution Applied
|
||||||
|
All examples have been fixed by using the `playbook.new()` function to create a properly initialized PlayBook:
|
||||||
|
|
||||||
|
For examples with heroscript text:
|
||||||
|
```v
|
||||||
|
// Create a new playbook with the heroscript text
|
||||||
|
mut pb := playbook.new(text: heroscript)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
|
||||||
|
// Get the bizmodel and print it
|
||||||
|
mut bm := bizmodel.get('test')!
|
||||||
|
```
|
||||||
|
|
||||||
|
For examples with heroscript path:
|
||||||
|
```v
|
||||||
|
// Create a new playbook with the heroscript path
|
||||||
|
mut pb := playbook.new(path: heroscript_path)!
|
||||||
|
|
||||||
|
// Play the bizmodel actions
|
||||||
|
bizmodel.play(mut pb)!
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Setup
|
||||||
|
- Tests were performed with V language version 0.4.11 a11de72
|
||||||
|
- Redis server was running during tests
|
||||||
|
- All tests were executed from the `/workspace/project/herolib/examples/biztools` directory
|
||||||
49
examples/clients/gitea.vsh
Executable file
49
examples/clients/gitea.vsh
Executable file
@@ -0,0 +1,49 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -g -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.core.playcmds
|
||||||
|
import freeflowuniverse.herolib.clients.giteaclient
|
||||||
|
|
||||||
|
heroscript := "
|
||||||
|
!!giteaclient.configure
|
||||||
|
name: 'default'
|
||||||
|
url: 'git.ourworld.tf'
|
||||||
|
user: 'despiegk'
|
||||||
|
secret: '1'
|
||||||
|
|
||||||
|
!!giteaclient.configure
|
||||||
|
name: 'two'
|
||||||
|
url: 'git.ourworld.tf'
|
||||||
|
user: 'despiegk2'
|
||||||
|
secret: '2'
|
||||||
|
|
||||||
|
"
|
||||||
|
// Process the heroscript configuration
|
||||||
|
// playcmds.play(heroscript: heroscript, emptycheck: false)!
|
||||||
|
|
||||||
|
println(giteaclient.list(fromdb: true)!)
|
||||||
|
|
||||||
|
//$dbg;
|
||||||
|
|
||||||
|
// Get the configured client
|
||||||
|
mut client := giteaclient.get()!
|
||||||
|
|
||||||
|
// Get the authenticated user
|
||||||
|
// user := client.get_current_user()!
|
||||||
|
// println('Authenticated as: ${user.login}')
|
||||||
|
|
||||||
|
// List repositories for the authenticated user
|
||||||
|
repos := client.user_list_repos()!
|
||||||
|
println('Found ${repos.len} repositories:')
|
||||||
|
for repo in repos {
|
||||||
|
println('- ${repo.full_name}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a specific repository's issues
|
||||||
|
owner := 'gitea'
|
||||||
|
repo_name := 'gitea'
|
||||||
|
println('\nFetching issues for ${owner}/${repo_name}...')
|
||||||
|
issues := client.list_repo_issues(owner, repo_name)!
|
||||||
|
println('Found ${issues.len} issues.')
|
||||||
|
for issue in issues {
|
||||||
|
println(' #${issue.number}: ${issue.title}')
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.clients.zinit_rpc
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
import freeflowuniverse.herolib.installers.infra.zinit_installer
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -9,34 +10,33 @@ import time
|
|||||||
|
|
||||||
println('=== Zinit RPC Client Example ===\n')
|
println('=== Zinit RPC Client Example ===\n')
|
||||||
|
|
||||||
// Start Zinit in the background
|
// // Start Zinit in the background
|
||||||
println('Starting Zinit in background...')
|
// println('Starting Zinit in background...')
|
||||||
mut zinit_process := os.new_process('/usr/local/bin/zinit')
|
// mut zinit_process := os.new_process('/usr/local/bin/zinit')
|
||||||
zinit_process.set_args(['init'])
|
// zinit_process.set_args(['init'])
|
||||||
zinit_process.set_redirect_stdio()
|
// zinit_process.set_redirect_stdio()
|
||||||
zinit_process.run()
|
// zinit_process.run()
|
||||||
|
|
||||||
// Wait a moment for Zinit to start up
|
// Wait a moment for Zinit to start up
|
||||||
time.sleep(2000 * time.millisecond)
|
// time.sleep(2000 * time.millisecond)
|
||||||
println('✓ Zinit started')
|
// println('✓ Zinit started')
|
||||||
|
|
||||||
// Ensure we clean up Zinit when done
|
// Ensure we clean up Zinit when done
|
||||||
defer {
|
// defer {
|
||||||
println('\nCleaning up...')
|
// println('\nCleaning up...')
|
||||||
zinit_process.signal_kill()
|
// zinit_process.signal_kill()
|
||||||
zinit_process.wait()
|
// zinit_process.wait()
|
||||||
println('✓ Zinit stopped')
|
// println('✓ Zinit stopped')
|
||||||
}
|
// }
|
||||||
|
|
||||||
|
// mut installer := zinit_installer.get()!
|
||||||
|
// installer.install()!
|
||||||
|
// installer.start()!
|
||||||
|
|
||||||
// Create a new client
|
// Create a new client
|
||||||
mut client := zinit_rpc.new_client(
|
mut client := zinit.new()!
|
||||||
name: 'example_client'
|
|
||||||
socket_path: '/tmp/zinit.sock'
|
println(client)
|
||||||
) or {
|
|
||||||
println('Failed to create client: ${err}')
|
|
||||||
println('Make sure Zinit is running and the socket exists at /tmp/zinit.sock')
|
|
||||||
exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
println('✓ Created Zinit RPC client')
|
println('✓ Created Zinit RPC client')
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ for service_name, state in services {
|
|||||||
// 3. Create a test service configuration
|
// 3. Create a test service configuration
|
||||||
println('\n3. Creating a test service...')
|
println('\n3. Creating a test service...')
|
||||||
test_service_name := 'test_echo_service'
|
test_service_name := 'test_echo_service'
|
||||||
config := zinit_rpc.ServiceConfig{
|
config := zinit.ServiceConfig{
|
||||||
exec: '/bin/echo "Hello from test service"'
|
exec: '/bin/echo "Hello from test service"'
|
||||||
oneshot: true
|
oneshot: true
|
||||||
log: 'stdout'
|
log: 'stdout'
|
||||||
@@ -147,7 +147,7 @@ println('\n8. Getting service statistics...')
|
|||||||
stats := client.service_stats(test_service_name) or {
|
stats := client.service_stats(test_service_name) or {
|
||||||
println('Failed to get service stats (service might not be running): ${err}')
|
println('Failed to get service stats (service might not be running): ${err}')
|
||||||
// Continue anyway
|
// Continue anyway
|
||||||
zinit_rpc.ServiceStats{}
|
zinit.ServiceStats{}
|
||||||
}
|
}
|
||||||
if stats.name != '' {
|
if stats.name != '' {
|
||||||
println('✓ Service statistics:')
|
println('✓ Service statistics:')
|
||||||
@@ -208,7 +208,7 @@ if subscription_id != 0 {
|
|||||||
// Get fresh status to make sure service is still running
|
// Get fresh status to make sure service is still running
|
||||||
fresh_status := client.service_status(test_service_name) or {
|
fresh_status := client.service_status(test_service_name) or {
|
||||||
println('\n12. Skipping signal test (cannot get service status)')
|
println('\n12. Skipping signal test (cannot get service status)')
|
||||||
zinit_rpc.ServiceStatus{}
|
zinit.ServiceStatus{}
|
||||||
}
|
}
|
||||||
if fresh_status.state == 'Running' && fresh_status.pid > 0 {
|
if fresh_status.state == 'Running' && fresh_status.pid > 0 {
|
||||||
println('\n12. Sending SIGTERM signal to service...')
|
println('\n12. Sending SIGTERM signal to service...')
|
||||||
@@ -258,7 +258,6 @@ server_result := client.system_start_http_server('127.0.0.1:9999') or {
|
|||||||
}
|
}
|
||||||
if server_result != '' {
|
if server_result != '' {
|
||||||
println('✓ HTTP server started: ${server_result}')
|
println('✓ HTTP server started: ${server_result}')
|
||||||
|
|
||||||
// Stop the HTTP server
|
// Stop the HTTP server
|
||||||
client.system_stop_http_server() or { println('Failed to stop HTTP server: ${err}') }
|
client.system_stop_http_server() or { println('Failed to stop HTTP server: ${err}') }
|
||||||
println('✓ HTTP server stopped')
|
println('✓ HTTP server stopped')
|
||||||
@@ -3,14 +3,20 @@
|
|||||||
import freeflowuniverse.herolib.core.generator.generic as generator
|
import freeflowuniverse.herolib.core.generator.generic as generator
|
||||||
import freeflowuniverse.herolib.core.pathlib
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
|
||||||
mut args := generator.GeneratorArgs{
|
|
||||||
path: '~/code/github/freeflowuniverse/herolib/lib/clients/postgresql_client'
|
|
||||||
force: true
|
|
||||||
}
|
|
||||||
|
|
||||||
// mut args := generator.GeneratorArgs{
|
// mut args := generator.GeneratorArgs{
|
||||||
// path: '~/code/github/freeflowuniverse/herolib/lib'
|
// path: '~/code/github/freeflowuniverse/herolib/lib/clients'
|
||||||
// force: true
|
// force: true
|
||||||
// }
|
// }
|
||||||
|
|
||||||
generator.scan(args)!
|
mut args2 := generator.GeneratorArgs{
|
||||||
|
path: '~/code/github/freeflowuniverse/herolib/lib/develop/heroprompt'
|
||||||
|
force: true
|
||||||
|
}
|
||||||
|
generator.scan(args2)!
|
||||||
|
|
||||||
|
// mut args := generator.GeneratorArgs{
|
||||||
|
// path: '~/code/github/freeflowuniverse/herolib/lib/installers'
|
||||||
|
// force: true
|
||||||
|
// }
|
||||||
|
|
||||||
|
// generator.scan(args)!
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
module gitea_client
|
|
||||||
|
|
||||||
struct GiteaClient {
|
|
||||||
}
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
module dagu
|
|
||||||
|
|
||||||
// import os
|
|
||||||
import freeflowuniverse.herolib.core.httpconnection
|
|
||||||
import os
|
|
||||||
|
|
||||||
struct GiteaClient[T] {
|
|
||||||
base.Base[T]
|
|
||||||
mut:
|
|
||||||
connection &httpconnection.HTTPConnection
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Config {
|
|
||||||
play.ConfigBase[T]
|
|
||||||
url string
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
pub fn get(args PlayArgs) GiteaClient[Config] {
|
|
||||||
mut client := GiteaClient[Config]{}
|
|
||||||
client.init(args)!
|
|
||||||
return client
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
pub fn heroplay(args PlayBookAddArgs) ! {
|
|
||||||
// make session for configuring from heroscript
|
|
||||||
mut session := play.session_new(session_name: 'config')!
|
|
||||||
session.playbook_add(path: args.path, text: args.text, git_url: args.git_url)!
|
|
||||||
for mut action in session.plbook.find(filter: 'gitea_client.define')! {
|
|
||||||
mut p := action.params
|
|
||||||
instance := p.get_default('instance', 'default')!
|
|
||||||
mut cl := get(instance: instance)!
|
|
||||||
mut cfg := cl.config()!
|
|
||||||
mut config := p.decode[T]()!
|
|
||||||
cl.config_save()!
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
pub fn (self GiteaClient[T]) config_interactive() ! {
|
|
||||||
mut myui := ui.new()!
|
|
||||||
console.clear()
|
|
||||||
println('
|
|
||||||
## Configure B2 Client')
|
|
||||||
println('========================
|
|
||||||
|
|
||||||
')
|
|
||||||
|
|
||||||
mut cfg := self.config()!
|
|
||||||
|
|
||||||
self.instance = myui.ask_question(
|
|
||||||
question: 'name for B2 (backblaze) client'
|
|
||||||
default: self.instance
|
|
||||||
)!
|
|
||||||
|
|
||||||
cfg.description = myui.ask_question(
|
|
||||||
question: 'description'
|
|
||||||
minlen: 0
|
|
||||||
default: cfg.description
|
|
||||||
)!
|
|
||||||
cfg.keyid = myui.ask_question(
|
|
||||||
question: 'keyid e.g. 003e2a7be6357fb0000000001'
|
|
||||||
minlen: 5
|
|
||||||
default: cfg.keyid
|
|
||||||
)!
|
|
||||||
|
|
||||||
cfg.appkey = myui.ask_question(
|
|
||||||
question: 'appkey e.g. K008UsdrYOAou2ulBHA8p4KBe/dL2n4'
|
|
||||||
minlen: 5
|
|
||||||
default: cfg.appkey
|
|
||||||
)!
|
|
||||||
|
|
||||||
buckets := self.list_buckets()!
|
|
||||||
bucket_names := buckets.map(it.name)
|
|
||||||
|
|
||||||
cfg.bucketname = myui.ask_dropdown(
|
|
||||||
question: 'choose default bucket name'
|
|
||||||
items: bucket_names
|
|
||||||
)!
|
|
||||||
|
|
||||||
self.config_save()!
|
|
||||||
}
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
module gitea_client
|
|
||||||
|
|
||||||
import json
|
|
||||||
import net.http
|
|
||||||
|
|
||||||
// Repository operations
|
|
||||||
pub fn (mut client GiteaClient) create_repo(name string, description string, private bool) !string {
|
|
||||||
data := {
|
|
||||||
'name': name
|
|
||||||
'description': description
|
|
||||||
'private': private.str()
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := client.connection.post('/api/v1/user/repos', json.encode(data))!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut client GiteaClient) get_repo(owner string, repo string) !string {
|
|
||||||
resp := client.connection.get('/api/v1/repos/${owner}/${repo}')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut client GiteaClient) list_repos() !string {
|
|
||||||
resp := client.connection.get('/api/v1/user/repos')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
// User operations
|
|
||||||
pub fn (mut client GiteaClient) get_user() !string {
|
|
||||||
resp := client.connection.get('/api/v1/user')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut client GiteaClient) list_users() !string {
|
|
||||||
resp := client.connection.get('/api/v1/admin/users')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
// Organization operations
|
|
||||||
pub fn (mut client GiteaClient) create_org(name string, description string) !string {
|
|
||||||
data := {
|
|
||||||
'username': name
|
|
||||||
'description': description
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := client.connection.post('/api/v1/orgs', json.encode(data))!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut client GiteaClient) list_orgs() !string {
|
|
||||||
resp := client.connection.get('/api/v1/orgs')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
// Issue operations
|
|
||||||
pub fn (mut client GiteaClient) create_issue(owner string, repo string, title string, body string) !string {
|
|
||||||
data := {
|
|
||||||
'title': title
|
|
||||||
'body': body
|
|
||||||
}
|
|
||||||
|
|
||||||
resp := client.connection.post('/api/v1/repos/${owner}/${repo}/issues', json.encode(data))!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn (mut client GiteaClient) list_issues(owner string, repo string) !string {
|
|
||||||
resp := client.connection.get('/api/v1/repos/${owner}/${repo}/issues')!
|
|
||||||
return resp
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
module gitea_client
|
|
||||||
@@ -9,6 +9,6 @@ const spec_path = '${os.dir(@FILE)}/openapi.json'
|
|||||||
mod := gen.generate_client_module(
|
mod := gen.generate_client_module(
|
||||||
api_name: 'Gitea'
|
api_name: 'Gitea'
|
||||||
)!
|
)!
|
||||||
mod.write_v('${os.dir(@FILE)}/gitea_client',
|
mod.write_v('${os.dir(@FILE)}/giteaclient',
|
||||||
overwrite: true
|
overwrite: true
|
||||||
)!
|
)!
|
||||||
|
|||||||
52
examples/data/countries.vsh
Executable file
52
examples/data/countries.vsh
Executable file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.data.countries
|
||||||
|
|
||||||
|
mut all_countries := countries.get_all_countries() or {
|
||||||
|
eprintln('Error loading countries: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
println('Total countries loaded: ${all_countries.len}')
|
||||||
|
|
||||||
|
// --- Example: Print the first few countries ---
|
||||||
|
println('\n--- First 5 Countries ---')
|
||||||
|
for i, country in all_countries {
|
||||||
|
if i >= 5 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
println(country.str())
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Example: Find a specific country (e.g., Belgium) ---
|
||||||
|
println('\n--- Searching for Belgium ---')
|
||||||
|
mut found := false
|
||||||
|
for country in all_countries {
|
||||||
|
if country.iso == 'BE' {
|
||||||
|
println('Found Belgium: ${country.str()}')
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
println('Belgium not found.')
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Example: Find countries in Europe (Continent = EU) ---
|
||||||
|
println('\n--- Countries in Europe (EU) ---')
|
||||||
|
mut eu_countries := []countries.Country{}
|
||||||
|
for country in all_countries {
|
||||||
|
if country.continent == 'EU' {
|
||||||
|
eu_countries << country
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println('Found ${eu_countries.len} European countries.')
|
||||||
|
// Optionally print them or process further
|
||||||
|
|
||||||
|
// --- Example: Using the helper function ---
|
||||||
|
println('\n--- Using helper function to find Japan ---')
|
||||||
|
japan := countries.find_country_by_iso('JP') or {
|
||||||
|
println('Error finding Japan: ${err}')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
println('Found Japan: ${japan.str()}')
|
||||||
87
examples/develop/codewalker/codewalker_example.vsh
Executable file
87
examples/develop/codewalker/codewalker_example.vsh
Executable file
@@ -0,0 +1,87 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.develop.codewalker
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
// Simple example demonstrating CodeWalker:
|
||||||
|
// - Build a FileMap from a directory (respecting .gitignore)
|
||||||
|
// - Serialize to filemap text
|
||||||
|
// - Export to a different destination
|
||||||
|
// - Parse filemap text directly
|
||||||
|
|
||||||
|
// 1) Prepare a small temp source directory
|
||||||
|
mut srcdir := pathlib.get_dir(
|
||||||
|
path: os.join_path(os.temp_dir(), 'codewalker_example_src')
|
||||||
|
create: true
|
||||||
|
empty: true
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Create some files
|
||||||
|
mut f1 := pathlib.get_file(path: os.join_path(srcdir.path, 'a/b.txt'), create: true)!
|
||||||
|
f1.write('hello from a/b.txt')!
|
||||||
|
mut f2 := pathlib.get_file(path: os.join_path(srcdir.path, 'c.txt'), create: true)!
|
||||||
|
f2.write('world from c.txt')!
|
||||||
|
|
||||||
|
// Create ignored files and a .gitignore
|
||||||
|
mut ig := pathlib.get_file(path: os.join_path(srcdir.path, '.gitignore'), create: true)!
|
||||||
|
ig.write('__pycache__/\n*.pyc\nbuild/\n')!
|
||||||
|
|
||||||
|
mut ignored_dir := pathlib.get_dir(path: os.join_path(srcdir.path, '__pycache__'), create: true)!
|
||||||
|
_ = ignored_dir // not used
|
||||||
|
|
||||||
|
mut ignored_file := pathlib.get_file(path: os.join_path(srcdir.path, 'script.pyc'), create: true)!
|
||||||
|
ignored_file.write('ignored bytecode')!
|
||||||
|
|
||||||
|
mut ignored_build := pathlib.get_dir(path: os.join_path(srcdir.path, 'build'), create: true)!
|
||||||
|
mut ignored_in_build := pathlib.get_file(
|
||||||
|
path: os.join_path(ignored_build.path, 'temp.bin')
|
||||||
|
create: true
|
||||||
|
)!
|
||||||
|
ignored_in_build.write('ignored build artifact')!
|
||||||
|
|
||||||
|
// Demonstrate level-scoped .heroignore
|
||||||
|
mut lvl := pathlib.get_dir(path: os.join_path(srcdir.path, 'test_gitignore_levels'), create: true)!
|
||||||
|
mut hero := pathlib.get_file(path: os.join_path(lvl.path, '.heroignore'), create: true)!
|
||||||
|
hero.write('dist/\n')!
|
||||||
|
// files under test_gitignore_levels/dist should be ignored (level-scoped)
|
||||||
|
mut dist := pathlib.get_dir(path: os.join_path(lvl.path, 'dist'), create: true)!
|
||||||
|
mut cachef := pathlib.get_file(path: os.join_path(dist.path, 'cache.test'), create: true)!
|
||||||
|
cachef.write('cache here any text')!
|
||||||
|
mut buildf := pathlib.get_file(path: os.join_path(dist.path, 'build.test'), create: true)!
|
||||||
|
buildf.write('just build text')!
|
||||||
|
// sibling tests folder should be included
|
||||||
|
mut tests := pathlib.get_dir(path: os.join_path(lvl.path, 'tests'), create: true)!
|
||||||
|
mut testf := pathlib.get_file(path: os.join_path(tests.path, 'file.test'), create: true)!
|
||||||
|
testf.write('print test is ok for now')!
|
||||||
|
|
||||||
|
// 2) Walk the directory into a FileMap (ignored files should be skipped)
|
||||||
|
mut cw := codewalker.new()!
|
||||||
|
mut fm := cw.filemap_get(path: srcdir.path)!
|
||||||
|
|
||||||
|
println('Collected files: ${fm.content.len}')
|
||||||
|
for k, _ in fm.content {
|
||||||
|
println(' - ${k}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3) Serialize to filemap text (for LLMs or storage)
|
||||||
|
serialized := fm.content()
|
||||||
|
println('\nSerialized filemap:')
|
||||||
|
println(serialized)
|
||||||
|
|
||||||
|
// 4) Export to a new destination directory
|
||||||
|
mut destdir := pathlib.get_dir(
|
||||||
|
path: os.join_path(os.temp_dir(), 'codewalker_example_out')
|
||||||
|
create: true
|
||||||
|
empty: true
|
||||||
|
)!
|
||||||
|
fm.export(destdir.path)!
|
||||||
|
println('\nExported to: ${destdir.path}')
|
||||||
|
|
||||||
|
// 5) Demonstrate direct parsing from filemap text
|
||||||
|
mut cw2 := codewalker.new(codewalker.CodeWalkerArgs{})!
|
||||||
|
parsed := cw2.parse(serialized)!
|
||||||
|
println('\nParsed back from text, files: ${parsed.content.len}')
|
||||||
|
for k, _ in parsed.content {
|
||||||
|
println(' * ${k}')
|
||||||
|
}
|
||||||
50
examples/develop/heroprompt/heroprompt_example.vsh
Executable file
50
examples/develop/heroprompt/heroprompt_example.vsh
Executable file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.develop.heroprompt
|
||||||
|
import os
|
||||||
|
|
||||||
|
// mut workspace := heroprompt.new(
|
||||||
|
// path: '${os.home_dir()}/code/github/freeflowuniverse/herolib'
|
||||||
|
// name: 'workspace'
|
||||||
|
// )!
|
||||||
|
|
||||||
|
mut workspace := heroprompt.get(
|
||||||
|
name: 'example_ws'
|
||||||
|
path: '${os.home_dir()}/code/github/freeflowuniverse/herolib'
|
||||||
|
create: true
|
||||||
|
)!
|
||||||
|
|
||||||
|
println('workspace (initial): ${workspace}')
|
||||||
|
println('selected (initial): ${workspace.selected_children()}')
|
||||||
|
|
||||||
|
// Add a directory and a file
|
||||||
|
workspace.add_dir(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/docker')!
|
||||||
|
workspace.add_file(
|
||||||
|
path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/docker/docker_ubuntu_install.sh'
|
||||||
|
)!
|
||||||
|
println('selected (after add): ${workspace.selected_children()}')
|
||||||
|
|
||||||
|
// Build a prompt from current selection (should be empty now)
|
||||||
|
mut prompt := workspace.prompt(
|
||||||
|
text: 'Using the selected files, i want you to get all print statments'
|
||||||
|
)
|
||||||
|
|
||||||
|
println('--- PROMPT START ---')
|
||||||
|
println(prompt)
|
||||||
|
println('--- PROMPT END ---')
|
||||||
|
|
||||||
|
// Remove the file by name, then the directory by name
|
||||||
|
workspace.remove_file(name: 'docker_ubuntu_install.sh') or { println('remove_file: ${err}') }
|
||||||
|
workspace.remove_dir(name: 'docker') or { println('remove_dir: ${err}') }
|
||||||
|
println('selected (after remove): ${workspace.selected_children()}')
|
||||||
|
|
||||||
|
// List workspaces (names only)
|
||||||
|
mut all := heroprompt.list_workspaces() or { []&heroprompt.Workspace{} }
|
||||||
|
mut names := []string{}
|
||||||
|
for w in all {
|
||||||
|
names << w.name
|
||||||
|
}
|
||||||
|
println('workspaces: ${names}')
|
||||||
|
|
||||||
|
// Optionally delete the example workspace
|
||||||
|
workspace.delete_workspace() or { println('delete_workspace: ${err}') }
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.installers.infra.zinit_installer
|
import freeflowuniverse.herolib.installers.infra.zinit_installer
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,20 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.installers.net.mycelium_installer
|
import freeflowuniverse.herolib.installers.net.mycelium_installer
|
||||||
import freeflowuniverse.herolib.clients.mycelium
|
import freeflowuniverse.herolib.clients.mycelium
|
||||||
|
|
||||||
mut installer := mycelium_installer.get()!
|
mut installer := mycelium_installer.get(create: true)!
|
||||||
|
println(installer)
|
||||||
|
|
||||||
installer.start()!
|
installer.start()!
|
||||||
|
|
||||||
|
// $dbg;
|
||||||
|
|
||||||
mut r := mycelium.inspect()!
|
mut r := mycelium.inspect()!
|
||||||
println(r)
|
println(r)
|
||||||
|
|
||||||
|
// $dbg;
|
||||||
|
|
||||||
mut client := mycelium.get()!
|
mut client := mycelium.get()!
|
||||||
|
|
||||||
// Send a message to a node by public key
|
// Send a message to a node by public key
|
||||||
|
|||||||
60
examples/lang/python/codewalker.vsh
Executable file
60
examples/lang/python/codewalker.vsh
Executable file
@@ -0,0 +1,60 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.lib.lang.codewalker
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
import freeflowuniverse.herolib.osal.core as osal
|
||||||
|
|
||||||
|
// Create test directory structure in /tmp/filemap
|
||||||
|
test_source := '/tmp/filemap'
|
||||||
|
test_destination := '/tmp/filemap2'
|
||||||
|
|
||||||
|
// Clean up any existing test directories
|
||||||
|
osal.rm(todelete: test_source)!
|
||||||
|
osal.rm(todelete: test_destination)!
|
||||||
|
|
||||||
|
// Create source directory
|
||||||
|
mut source_dir := pathlib.get(test_source)!
|
||||||
|
source_dir.dir_ensure()!
|
||||||
|
|
||||||
|
// Create test files with content
|
||||||
|
mut file1 := source_dir.join('file1.txt')!
|
||||||
|
file1.write('Content of file 1')!
|
||||||
|
|
||||||
|
mut subdir := source_dir.join('subdir')!
|
||||||
|
subdir.dir_ensure()!
|
||||||
|
|
||||||
|
mut file2 := subdir.join('file2.txt')!
|
||||||
|
file2.write('Content of file 2')!
|
||||||
|
|
||||||
|
mut file3 := subdir.join('file3.md')!
|
||||||
|
file3.write('# Markdown file content')!
|
||||||
|
|
||||||
|
println('Test files created in ${test_source}')
|
||||||
|
|
||||||
|
// Create CodeWalker instance
|
||||||
|
mut cw := codewalker.new(name: 'test', source: test_source)!
|
||||||
|
|
||||||
|
// Verify files are in the map
|
||||||
|
println('\nFiles in filemap:')
|
||||||
|
cw.filemap.write()
|
||||||
|
|
||||||
|
// Export files to destination
|
||||||
|
cw.filemap.export(test_destination)!
|
||||||
|
|
||||||
|
println('\nFiles exported to ${test_destination}')
|
||||||
|
|
||||||
|
// Verify export by listing files in destination
|
||||||
|
mut dest_dir := pathlib.get(test_destination)!
|
||||||
|
if dest_dir.exists() {
|
||||||
|
mut files := dest_dir.list(recursive: true)!
|
||||||
|
println('\nFiles in destination directory:')
|
||||||
|
for file in files {
|
||||||
|
if file.is_file() {
|
||||||
|
println(' ${file.path}')
|
||||||
|
println(' Content: ${file.read()!}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println('\nExport test completed successfully!')
|
||||||
|
} else {
|
||||||
|
println('\nError: Destination directory was not created')
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
import freeflowuniverse.herolib.lang.python
|
import freeflowuniverse.herolib.lang.python
|
||||||
import json
|
import json
|
||||||
|
|||||||
51
examples/schemas/example/generate_model.vsh
Executable file
51
examples/schemas/example/generate_model.vsh
Executable file
@@ -0,0 +1,51 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
import os
|
||||||
|
import freeflowuniverse.herolib.core.code { Alias }
|
||||||
|
import freeflowuniverse.herolib.core.pathlib
|
||||||
|
import freeflowuniverse.herolib.schemas.openrpc
|
||||||
|
import freeflowuniverse.herolib.schemas.openrpc.codegen { generate_model }
|
||||||
|
|
||||||
|
const doc_path = '${os.dir(@FILE)}/testdata/openrpc.json'
|
||||||
|
|
||||||
|
mut doc_file := pathlib.get_file(path: doc_path)!
|
||||||
|
content := doc_file.read()!
|
||||||
|
object := openrpc.decode(content)!
|
||||||
|
model := generate_model(object)!
|
||||||
|
|
||||||
|
assert model.len == 3
|
||||||
|
assert model[0] is Alias
|
||||||
|
pet_id := model[0] as Alias
|
||||||
|
assert pet_id.name == 'PetId'
|
||||||
|
println(pet_id)
|
||||||
|
// $dbg;
|
||||||
|
|
||||||
|
// assert pet_id.typ.symbol == 'int'
|
||||||
|
|
||||||
|
// assert model[1] is Struct
|
||||||
|
// pet_struct := model[1] as Struct
|
||||||
|
// assert pet_struct.name == 'Pet'
|
||||||
|
// assert pet_struct.fields.len == 3
|
||||||
|
|
||||||
|
// // test field is `id PetId @[required]`
|
||||||
|
// assert pet_struct.fields[0].name == 'id'
|
||||||
|
// assert pet_struct.fields[0].typ.symbol == 'PetId'
|
||||||
|
// assert pet_struct.fields[0].attrs.len == 1
|
||||||
|
// assert pet_struct.fields[0].attrs[0].name == 'required'
|
||||||
|
|
||||||
|
// // test field is `name string @[required]`
|
||||||
|
// assert pet_struct.fields[1].name == 'name'
|
||||||
|
// assert pet_struct.fields[1].typ.symbol == 'string'
|
||||||
|
// assert pet_struct.fields[1].attrs.len == 1
|
||||||
|
// assert pet_struct.fields[1].attrs[0].name == 'required'
|
||||||
|
|
||||||
|
// // test field is `tag string`
|
||||||
|
// assert pet_struct.fields[2].name == 'tag'
|
||||||
|
// assert pet_struct.fields[2].typ.symbol == 'string'
|
||||||
|
// assert pet_struct.fields[2].attrs.len == 0
|
||||||
|
|
||||||
|
// assert model[2] is Alias
|
||||||
|
// pets_alias := model[2] as Alias
|
||||||
|
// assert pets_alias.name == 'Pets'
|
||||||
|
// assert pets_alias.typ.symbol == '[]Pet'
|
||||||
|
// }
|
||||||
205
examples/schemas/example/testdata/openrpc.json
vendored
Normal file
205
examples/schemas/example/testdata/openrpc.json
vendored
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
{
|
||||||
|
"openrpc": "1.0.0-rc1",
|
||||||
|
"info": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"title": "Petstore",
|
||||||
|
"license": {
|
||||||
|
"name": "MIT"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"servers": [
|
||||||
|
{
|
||||||
|
"name": "localhost",
|
||||||
|
"url": "http://localhost:8080"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"methods": [
|
||||||
|
{
|
||||||
|
"name": "list_pets",
|
||||||
|
"summary": "List all pets",
|
||||||
|
"tags": [
|
||||||
|
{
|
||||||
|
"name": "pets"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "limit",
|
||||||
|
"description": "How many items to return at one time (max 100)",
|
||||||
|
"required": false,
|
||||||
|
"schema": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pets",
|
||||||
|
"description": "A paged array of pets",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Pets"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"code": 100,
|
||||||
|
"message": "pets busy"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "listPetExample",
|
||||||
|
"description": "List pet example",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "limit",
|
||||||
|
"value": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "listPetResultExample",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"name": "fluffy",
|
||||||
|
"tag": "poodle"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "create_pet",
|
||||||
|
"summary": "Create a pet",
|
||||||
|
"tags": [
|
||||||
|
{
|
||||||
|
"name": "pets"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "newPetName",
|
||||||
|
"description": "Name of pet to create",
|
||||||
|
"required": true,
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "newPetTag",
|
||||||
|
"description": "Pet tag to create",
|
||||||
|
"schema": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "createPetExample",
|
||||||
|
"description": "Create pet example",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "newPetName",
|
||||||
|
"value": "fluffy"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tag",
|
||||||
|
"value": "poodle"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "listPetResultExample",
|
||||||
|
"value": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"$ref": "#/components/contentDescriptors/PetId"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "get_pet",
|
||||||
|
"summary": "Info for a specific pet",
|
||||||
|
"tags": [
|
||||||
|
{
|
||||||
|
"name": "pets"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"$ref": "#/components/contentDescriptors/PetId"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "pet",
|
||||||
|
"description": "Expected response to a valid request",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/Pet"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "getPetExample",
|
||||||
|
"description": "get pet example",
|
||||||
|
"params": [
|
||||||
|
{
|
||||||
|
"name": "petId",
|
||||||
|
"value": 7
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"result": {
|
||||||
|
"name": "getPetExampleResult",
|
||||||
|
"value": {
|
||||||
|
"name": "fluffy",
|
||||||
|
"tag": "poodle",
|
||||||
|
"id": 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"components": {
|
||||||
|
"contentDescriptors": {
|
||||||
|
"PetId": {
|
||||||
|
"name": "petId",
|
||||||
|
"required": true,
|
||||||
|
"description": "The id of the pet to retrieve",
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/PetId"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"schemas": {
|
||||||
|
"PetId": {
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
|
"Pet": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"id",
|
||||||
|
"name"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"$ref": "#/components/schemas/PetId"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"tag": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Pets": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/components/schemas/Pet"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
1
examples/web/.gitignore
vendored
1
examples/web/.gitignore
vendored
@@ -6,3 +6,4 @@ markdown_example0
|
|||||||
doctree_example
|
doctree_example
|
||||||
tree_scan
|
tree_scan
|
||||||
*.dSYM
|
*.dSYM
|
||||||
|
ui_demo
|
||||||
|
|||||||
@@ -11,15 +11,15 @@ playcmds.run(
|
|||||||
// install: 1
|
// install: 1
|
||||||
// template_update: 1
|
// template_update: 1
|
||||||
|
|
||||||
!!docusaurus.add sitename:"tfgrid_tech"
|
!!docusaurus.add sitename:"owh_intro"
|
||||||
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
|
git_url:"https://git.ourworld.tf/ourworld_holding/docs_owh/src/branch/main/ebooks/owh_intro"
|
||||||
git_root:"/tmp/code"
|
git_root:"/tmp/code"
|
||||||
git_reset:1
|
git_reset:1
|
||||||
git_pull:1
|
git_pull:1
|
||||||
play:true
|
play:true
|
||||||
|
|
||||||
!!docusaurus.build
|
// !!docusaurus.build
|
||||||
|
|
||||||
// !!docusaurus.dev site:"tfgrid_tech" open:true
|
!!docusaurus.dev site:"owh_intro" open:true
|
||||||
'
|
'
|
||||||
)!
|
)!
|
||||||
|
|||||||
13
examples/web/ui_demo.vsh
Executable file
13
examples/web/ui_demo.vsh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||||
|
|
||||||
|
// import freeflowuniverse.herolib.web.ui
|
||||||
|
|
||||||
|
// fn main() {
|
||||||
|
// println('Starting UI test server on port 8080...')
|
||||||
|
// println('Visit http://localhost:8080 to see the admin interface')
|
||||||
|
|
||||||
|
// ui.start(
|
||||||
|
// title: 'Test Admin Panel'
|
||||||
|
// port: 8080
|
||||||
|
// )!
|
||||||
|
// }
|
||||||
30
generate.vsh
30
generate.vsh
@@ -10,41 +10,39 @@ fp.version('v0.1.0')
|
|||||||
fp.description('Generate code')
|
fp.description('Generate code')
|
||||||
fp.skip_executable()
|
fp.skip_executable()
|
||||||
|
|
||||||
mut path := fp.string('path', `p`, "", 'Path where to generate a module, if not mentioned will scan over all installers & clients.\nif . then will be path we are on.')
|
mut path := fp.string('path', `p`, '', 'Path where to generate a module, if not mentioned will scan over all installers & clients.\nif . then will be path we are on.')
|
||||||
reset := fp.bool('reset', `r`, false, 'If we want to reset')
|
reset := fp.bool('reset', `r`, false, 'If we want to reset')
|
||||||
interactive := fp.bool('interactive', `i`, false, 'If we want to work interactive')
|
interactive := fp.bool('interactive', `i`, false, 'If we want to work interactive')
|
||||||
scan := fp.bool('scan', `s`, false, 'If we want to scan')
|
scan := fp.bool('scan', `s`, false, 'If we want to scan')
|
||||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||||
|
|
||||||
if help_requested {
|
if help_requested {
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(0)
|
exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_args := fp.finalize() or {
|
additional_args := fp.finalize() or {
|
||||||
eprintln(err)
|
eprintln(err)
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if additional_args.len > 0 {
|
if additional_args.len > 0 {
|
||||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||||
println(fp.usage())
|
println(fp.usage())
|
||||||
exit(1)
|
exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
// reset bool // regenerate all, dangerous !!!
|
// reset bool // regenerate all, dangerous !!!
|
||||||
// interactive bool //if we want to ask
|
// interactive bool //if we want to ask
|
||||||
// path string
|
// path string
|
||||||
|
|
||||||
|
if path.trim_space() == '.' {
|
||||||
|
|
||||||
if path.trim_space() == "." {
|
|
||||||
path = os.getwd()
|
path = os.getwd()
|
||||||
}
|
}
|
||||||
|
|
||||||
if ! scan {
|
if !scan {
|
||||||
generator.do(path:path, reset:reset, interactive:interactive)!
|
generator.do(path: path, reset: reset, interactive: interactive)!
|
||||||
}else{
|
} else {
|
||||||
generator.scan(path:path, reset:reset, interactive:interactive)!
|
generator.scan(path: path, reset: reset, interactive: interactive)!
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,46 +4,45 @@ import os
|
|||||||
import flag
|
import flag
|
||||||
|
|
||||||
fn addtoscript(tofind string, toadd string) ! {
|
fn addtoscript(tofind string, toadd string) ! {
|
||||||
home_dir := os.home_dir()
|
home_dir := os.home_dir()
|
||||||
mut rc_file := '${home_dir}/.zshrc'
|
mut rc_file := '${home_dir}/.zshrc'
|
||||||
if !os.exists(rc_file) {
|
if !os.exists(rc_file) {
|
||||||
rc_file = '${home_dir}/.bashrc'
|
rc_file = '${home_dir}/.bashrc'
|
||||||
if !os.exists(rc_file) {
|
if !os.exists(rc_file) {
|
||||||
return error('No .zshrc or .bashrc found in home directory')
|
return error('No .zshrc or .bashrc found in home directory')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read current content
|
// Read current content
|
||||||
mut content := os.read_file(rc_file)!
|
mut content := os.read_file(rc_file)!
|
||||||
|
|
||||||
// Remove existing alias if present
|
|
||||||
lines := content.split('\n')
|
|
||||||
mut new_lines := []string{}
|
|
||||||
mut prev_is_emtpy := false
|
|
||||||
for line in lines {
|
|
||||||
if prev_is_emtpy {
|
|
||||||
if line.trim_space() == ""{
|
|
||||||
continue
|
|
||||||
}else{
|
|
||||||
prev_is_emtpy = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if line.trim_space() == ""{
|
|
||||||
prev_is_emtpy = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if !line.contains(tofind) {
|
// Remove existing alias if present
|
||||||
new_lines << line
|
lines := content.split('\n')
|
||||||
}
|
mut new_lines := []string{}
|
||||||
}
|
mut prev_is_emtpy := false
|
||||||
new_lines << toadd
|
for line in lines {
|
||||||
new_lines << ""
|
if prev_is_emtpy {
|
||||||
// Write back to file
|
if line.trim_space() == '' {
|
||||||
new_content := new_lines.join('\n')
|
continue
|
||||||
os.write_file(rc_file, new_content)!
|
} else {
|
||||||
|
prev_is_emtpy = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if line.trim_space() == '' {
|
||||||
|
prev_is_emtpy = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !line.contains(tofind) {
|
||||||
|
new_lines << line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
new_lines << toadd
|
||||||
|
new_lines << ''
|
||||||
|
// Write back to file
|
||||||
|
new_content := new_lines.join('\n')
|
||||||
|
os.write_file(rc_file, new_content)!
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
vroot := @VROOT
|
vroot := @VROOT
|
||||||
abs_dir_of_script := dir(@FILE)
|
abs_dir_of_script := dir(@FILE)
|
||||||
|
|
||||||
@@ -52,29 +51,29 @@ println('Resetting all symlinks...')
|
|||||||
os.rm('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
os.rm('${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {}
|
||||||
|
|
||||||
// Create necessary directories
|
// Create necessary directories
|
||||||
os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
|
os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
|
||||||
panic('Failed to create directory ~/.vmodules/freeflowuniverse: ${err}')
|
panic('Failed to create directory ~/.vmodules/freeflowuniverse: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new symlinks
|
// Create new symlinks
|
||||||
os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
|
os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
|
||||||
panic('Failed to create herolib symlink: ${err}')
|
panic('Failed to create herolib symlink: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
println('Herolib installation completed successfully!')
|
println('Herolib installation completed successfully!')
|
||||||
|
|
||||||
// Add vtest alias
|
// Add vtest alias
|
||||||
addtoscript('alias vtest=', 'alias vtest=\'v -stats -enable-globals -show-c-output -n -w -cg -gc none -cc tcc test\' ') or {
|
addtoscript('alias vtest=', "alias vtest='v -stats -enable-globals -show-c-output -n -w -cg -gc none -cc tcc test' ") or {
|
||||||
eprintln('Failed to add vtest alias: ${err}')
|
eprintln('Failed to add vtest alias: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add vrun alias
|
// Add vrun alias
|
||||||
addtoscript('alias vrun=', 'alias vrun=\'v -stats -enable-globals -show-c-output -n -w -cg -gc none -cc tcc run\' ') or {
|
addtoscript('alias vrun=', "alias vrun='v -stats -enable-globals -show-c-output -n -w -cg -gc none -cc tcc run' ") or {
|
||||||
eprintln('Failed to add vrun alias: ${err}')
|
eprintln('Failed to add vrun alias: ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
addtoscript('HOME/hero/bin', 'export PATH="\$PATH:\$HOME/hero/bin"') or {
|
addtoscript('HOME/hero/bin', 'export PATH="\$PATH:\$HOME/hero/bin"') or {
|
||||||
eprintln('Failed to add path to hero, ${err}')
|
eprintln('Failed to add path to hero, ${err}')
|
||||||
}
|
}
|
||||||
|
|
||||||
// ulimit -n 32000
|
// ulimit -n 32000
|
||||||
|
|||||||
65
install_v.sh
65
install_v.sh
@@ -257,42 +257,42 @@ function hero_lib_get {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function install_secp256k1 {
|
# function install_secp256k1 {
|
||||||
|
|
||||||
echo "Installing secp256k1..."
|
# echo "Installing secp256k1..."
|
||||||
if [[ "${OSNAME}" == "darwin"* ]]; then
|
# if [[ "${OSNAME}" == "darwin"* ]]; then
|
||||||
# Attempt installation only if not already found
|
# # Attempt installation only if not already found
|
||||||
echo "Attempting secp256k1 installation via Homebrew..."
|
# echo "Attempting secp256k1 installation via Homebrew..."
|
||||||
brew install secp256k1
|
# brew install secp256k1
|
||||||
elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
# elif [[ "${OSNAME}" == "ubuntu" ]]; then
|
||||||
# Install build dependencies
|
# # Install build dependencies
|
||||||
package_install "build-essential wget autoconf libtool"
|
# package_install "build-essential wget autoconf libtool"
|
||||||
|
|
||||||
# Download and extract secp256k1
|
# # Download and extract secp256k1
|
||||||
cd "${DIR_BUILD}"
|
# cd "${DIR_BUILD}"
|
||||||
wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
# wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
||||||
tar -xvf v0.3.2.tar.gz
|
# tar -xvf v0.3.2.tar.gz
|
||||||
|
|
||||||
# Build and install
|
# # Build and install
|
||||||
cd secp256k1-0.3.2/
|
# cd secp256k1-0.3.2/
|
||||||
./autogen.sh
|
# ./autogen.sh
|
||||||
./configure
|
# ./configure
|
||||||
make -j 5
|
# make -j 5
|
||||||
if is_github_actions; then
|
# if is_github_actions; then
|
||||||
run_sudo make install
|
# run_sudo make install
|
||||||
else
|
# else
|
||||||
make install
|
# make install
|
||||||
fi
|
# fi
|
||||||
|
|
||||||
# Cleanup
|
# # Cleanup
|
||||||
cd ..
|
# cd ..
|
||||||
rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
# rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
||||||
else
|
# else
|
||||||
echo "secp256k1 installation not implemented for ${OSNAME}"
|
# echo "secp256k1 installation not implemented for ${OSNAME}"
|
||||||
exit 1
|
# exit 1
|
||||||
fi
|
# fi
|
||||||
echo "secp256k1 installation complete!"
|
# echo "secp256k1 installation complete!"
|
||||||
}
|
# }
|
||||||
|
|
||||||
|
|
||||||
remove_all() {
|
remove_all() {
|
||||||
@@ -564,7 +564,6 @@ if [ "$RESET" = true ] || ! command_exists v; then
|
|||||||
sshknownkeysadd
|
sshknownkeysadd
|
||||||
|
|
||||||
# Install secp256k1
|
# Install secp256k1
|
||||||
install_secp256k1
|
|
||||||
|
|
||||||
v-install
|
v-install
|
||||||
|
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ if repos.len() > 0 {
|
|||||||
|
|
||||||
if repo_array.len() > 0 {
|
if repo_array.len() > 0 {
|
||||||
let repo = repo_array[0];
|
let repo = repo_array[0];
|
||||||
print("\nRepository path: " + get_repo_path(repo));
|
print("\nRepository path: " + path(repo));
|
||||||
|
|
||||||
// Check if the repository has changes
|
// Check if the repository has changes
|
||||||
let has_changes = has_changes(repo);
|
let has_changes = has_changes(repo);
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ const action_priorities = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if plbook.exists(filter: 'bizmodel.') == false {
|
||||||
|
return
|
||||||
|
}
|
||||||
// group actions by which bizmodel they belong to
|
// group actions by which bizmodel they belong to
|
||||||
actions_by_biz := arrays.group_by[string, &Action](plbook.find(filter: 'bizmodel.*')!,
|
actions_by_biz := arrays.group_by[string, &Action](plbook.find(filter: 'bizmodel.*')!,
|
||||||
fn (a &Action) string {
|
fn (a &Action) string {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
Product ${name1} has revenue events (one offs)
|
Product ${name1} has revenue events (one offs)
|
||||||
|
|
||||||
@{model.sheet.wiki() or {''}}
|
@{model.sheet.wiki() or {''}}
|
||||||
namefilter:'${name1}_revenue,${name1}_cogs,${name1}_cogs_perc,${name1}_maintenance_month_perc' sheetname:'bizmodel_tf9
|
namefilter:'${name1}_revenue,${name1}_cogs,${name1}_cogs_perc,${name1}_maintenance_month_perc' sheetname:'bizmodel_tf9
|
||||||
|
|
||||||
- COGS = Cost of Goods Sold (is our cost to deliver the product/service)
|
- COGS = Cost of Goods Sold (is our cost to deliver the product/service)
|
||||||
@@ -21,14 +21,14 @@ Product ${name1} has revenue events (one offs)
|
|||||||
|
|
||||||
Product sold and its revenue/cost of goods
|
Product sold and its revenue/cost of goods
|
||||||
|
|
||||||
@{model.sheet.wiki() or {''}}
|
@{model.sheet.wiki() or {''}}
|
||||||
namefilter:'${name1}_nr_sold,${name1}_revenue_setup,${name1}_revenue_monthly,${name1}_cogs_setup,${name1}_cogs_setup_perc,${name1}_cogs_monthly,${name1}_cogs_monthly_perc'
|
namefilter:'${name1}_nr_sold,${name1}_revenue_setup,${name1}_revenue_monthly,${name1}_cogs_setup,${name1}_cogs_setup_perc,${name1}_cogs_monthly,${name1}_cogs_monthly_perc'
|
||||||
sheetname:'bizmodel_tf9
|
sheetname:'bizmodel_tf9
|
||||||
|
|
||||||
- nr sold, is the nr sold per month of ${name1}
|
- nr sold, is the nr sold per month of ${name1}
|
||||||
- revenue setup is setup per item for ${name1}, this is the money we receive. Similar there is a revenue monthly.
|
- revenue setup is setup per item for ${name1}, this is the money we receive. Similar there is a revenue monthly.
|
||||||
- cogs = Cost of Goods Sold (is our cost to deliver the product)
|
- cogs = Cost of Goods Sold (is our cost to deliver the product)
|
||||||
- can we as a setup per item, or per month per item
|
- can we as a setup per item, or per month per item
|
||||||
|
|
||||||
@if product.nr_months_recurring>1
|
@if product.nr_months_recurring>1
|
||||||
|
|
||||||
@@ -40,23 +40,22 @@ This product ${name1} is recurring, means customer pays per month ongoing, the p
|
|||||||
|
|
||||||
#### the revenue/cogs calculated
|
#### the revenue/cogs calculated
|
||||||
|
|
||||||
@{model.sheet.wiki() or {''}}
|
@{model.sheet.wiki() or {''}}
|
||||||
namefilter:'${name1}_nr_sold_recurring'
|
namefilter:'${name1}_nr_sold_recurring'
|
||||||
sheetname:'bizmodel_tf9
|
sheetname:'bizmodel_tf9
|
||||||
|
|
||||||
This results in following revenues and cogs:
|
This results in following revenues and cogs:
|
||||||
|
|
||||||
@{model.sheet.wiki() or {''}}
|
@{model.sheet.wiki() or {''}}
|
||||||
namefilter:'${name1}_revenue_setup_total,${name1}_revenue_monthly_total,${name1}_cogs_setup_total,${name1}_cogs_monthly_total,${name1}_cogs_setup_from_perc,${name1}_cogs_monthly_from_perc,${name1}_maintenance_month,
|
namefilter:'${name1}_revenue_setup_total,${name1}_revenue_monthly_total,${name1}_cogs_setup_total,${name1}_cogs_monthly_total,${name1}_cogs_setup_from_perc,${name1}_cogs_monthly_from_perc,${name1}_maintenance_month,
|
||||||
${name1}_revenue_monthly_recurring,${name1}_cogs_monthly_recurring'
|
${name1}_revenue_monthly_recurring,${name1}_cogs_monthly_recurring'
|
||||||
sheetname:'bizmodel_tf9
|
sheetname:'bizmodel_tf9
|
||||||
|
|
||||||
resulting revenues:
|
resulting revenues:
|
||||||
@{model.sheet.wiki() or {''}}
|
@{model.sheet.wiki() or {''}}
|
||||||
namefilter:'${name1}_revenue_total,${name1}_cogs_total'
|
namefilter:'${name1}_revenue_total,${name1}_cogs_total'
|
||||||
sheetname:'bizmodel_tf9
|
sheetname:'bizmodel_tf9
|
||||||
|
|
||||||
|
|
||||||
!!!spreadsheet.graph_line_row rowname:'${name1}_cogs_total' unit:million sheetname:'bizmodel_tf9'
|
!!!spreadsheet.graph_line_row rowname:'${name1}_cogs_total' unit:million sheetname:'bizmodel_tf9'
|
||||||
|
|
||||||
!!!spreadsheet.graph_line_row rowname:'${name1}_revenue_total' unit:million sheetname:'bizmodel_tf9'
|
!!!spreadsheet.graph_line_row rowname:'${name1}_revenue_total' unit:million sheetname:'bizmodel_tf9'
|
||||||
|
|||||||
8
lib/clients/giteaclient/.heroscript
Normal file
8
lib/clients/giteaclient/.heroscript
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
!!hero_code.generate_client
|
||||||
|
name:'giteaclient'
|
||||||
|
classname:'GiteaClient'
|
||||||
|
singleton:0
|
||||||
|
default:1
|
||||||
|
hasconfig:1
|
||||||
|
reset:0
|
||||||
136
lib/clients/giteaclient/giteaclient_factory_.v
Normal file
136
lib/clients/giteaclient/giteaclient_factory_.v
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
module giteaclient
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.core.base
|
||||||
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
|
__global (
|
||||||
|
giteaclient_global map[string]&GiteaClient
|
||||||
|
giteaclient_default string
|
||||||
|
)
|
||||||
|
|
||||||
|
/////////FACTORY
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsGet {
|
||||||
|
pub mut:
|
||||||
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(args ArgsGet) !&GiteaClient {
|
||||||
|
mut obj := GiteaClient{
|
||||||
|
name: args.name
|
||||||
|
}
|
||||||
|
set(obj)!
|
||||||
|
return get(name: args.name)!
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&GiteaClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
giteaclient_default = args.name
|
||||||
|
if args.fromdb || args.name !in giteaclient_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:giteaclient', args.name)! {
|
||||||
|
data := r.hget('context:giteaclient', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('GiteaClient with name: giteaclient does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(GiteaClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
|
} else {
|
||||||
|
if args.create {
|
||||||
|
new(args)!
|
||||||
|
} else {
|
||||||
|
return error("GiteaClient with name 'giteaclient' does not exist")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
|
}
|
||||||
|
return giteaclient_global[args.name] or {
|
||||||
|
return error('could not get config for giteaclient with name:giteaclient')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// register the config for the future
|
||||||
|
pub fn set(o GiteaClient) ! {
|
||||||
|
mut o2 := set_in_mem(o)!
|
||||||
|
giteaclient_default = o2.name
|
||||||
|
mut context := base.context()!
|
||||||
|
mut r := context.redis()!
|
||||||
|
r.hset('context:giteaclient', o2.name, json.encode(o2))!
|
||||||
|
}
|
||||||
|
|
||||||
|
// does the config exists?
|
||||||
|
pub fn exists(args ArgsGet) !bool {
|
||||||
|
mut context := base.context()!
|
||||||
|
mut r := context.redis()!
|
||||||
|
return r.hexists('context:giteaclient', args.name)!
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete(args ArgsGet) ! {
|
||||||
|
mut context := base.context()!
|
||||||
|
mut r := context.redis()!
|
||||||
|
r.hdel('context:giteaclient', args.name)!
|
||||||
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&GiteaClient {
|
||||||
|
mut res := []&GiteaClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
giteaclient_global = map[string]&GiteaClient{}
|
||||||
|
giteaclient_default = ''
|
||||||
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:giteaclient')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in giteaclient_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
// only sets in mem, does not set as config
|
||||||
|
fn set_in_mem(o GiteaClient) !GiteaClient {
|
||||||
|
mut o2 := obj_init(o)!
|
||||||
|
giteaclient_global[o2.name] = &o2
|
||||||
|
giteaclient_default = o2.name
|
||||||
|
return o2
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'giteaclient.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
mut install_actions := plbook.find(filter: 'giteaclient.configure')!
|
||||||
|
if install_actions.len > 0 {
|
||||||
|
for install_action in install_actions {
|
||||||
|
heroscript := install_action.heroscript()
|
||||||
|
mut obj2 := heroscript_loads(heroscript)!
|
||||||
|
set(obj2)!
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// switch instance to be used for giteaclient
|
||||||
|
pub fn switch(name string) {
|
||||||
|
giteaclient_default = name
|
||||||
|
}
|
||||||
65
lib/clients/giteaclient/giteaclient_model.v
Normal file
65
lib/clients/giteaclient/giteaclient_model.v
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
// File: lib/clients/giteaclient/giteaclient_model.v
|
||||||
|
module giteaclient
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.data.paramsparser
|
||||||
|
import freeflowuniverse.herolib.data.encoderhero
|
||||||
|
import freeflowuniverse.herolib.core.httpconnection
|
||||||
|
import os
|
||||||
|
|
||||||
|
pub const version = '0.0.0'
|
||||||
|
|
||||||
|
@[heap]
|
||||||
|
pub struct GiteaClient {
|
||||||
|
pub mut:
|
||||||
|
name string = 'default'
|
||||||
|
user string
|
||||||
|
url string = 'https://git.ourworld.tf'
|
||||||
|
secret string
|
||||||
|
}
|
||||||
|
|
||||||
|
fn (mut self GiteaClient) httpclient() !&httpconnection.HTTPConnection {
|
||||||
|
mut http_conn := httpconnection.new(
|
||||||
|
name: 'giteaclient_${self.name}'
|
||||||
|
url: self.url
|
||||||
|
)!
|
||||||
|
|
||||||
|
// Add authentication header if API key is provided
|
||||||
|
if self.secret.len > 0 {
|
||||||
|
http_conn.default_header.add(.authorization, 'token ${self.secret}')
|
||||||
|
}
|
||||||
|
return http_conn
|
||||||
|
}
|
||||||
|
|
||||||
|
// your checking & initialization code if needed
|
||||||
|
fn obj_init(mycfg_ GiteaClient) !GiteaClient {
|
||||||
|
mut mycfg := mycfg_
|
||||||
|
if mycfg.url == '' {
|
||||||
|
return error('url needs to be filled in for ${mycfg.name}')
|
||||||
|
}
|
||||||
|
if mycfg.url.starts_with('https://') {
|
||||||
|
mycfg.url = mycfg.url.replace('https://', '')
|
||||||
|
}
|
||||||
|
if mycfg.url.starts_with('http://') {
|
||||||
|
mycfg.url = mycfg.url.replace('http://', '')
|
||||||
|
}
|
||||||
|
mycfg.url = mycfg.url.trim_right('/')
|
||||||
|
if mycfg.url.ends_with('/api/v1') {
|
||||||
|
mycfg.url = mycfg.url.replace('/api/v1', '')
|
||||||
|
}
|
||||||
|
if mycfg.url.ends_with('/api') {
|
||||||
|
mycfg.url = mycfg.url.replace('/api', '')
|
||||||
|
}
|
||||||
|
mycfg.url = 'https://${mycfg.url}/api/v1'
|
||||||
|
|
||||||
|
if mycfg.secret.len == 0 {
|
||||||
|
return error('secret needs to be filled in for ${mycfg.name}')
|
||||||
|
}
|
||||||
|
return mycfg
|
||||||
|
}
|
||||||
|
|
||||||
|
/////////////NORMALLY NO NEED TO TOUCH
|
||||||
|
|
||||||
|
pub fn heroscript_loads(heroscript string) !GiteaClient {
|
||||||
|
mut obj := encoderhero.decode[GiteaClient](heroscript)!
|
||||||
|
return obj
|
||||||
|
}
|
||||||
113
lib/clients/giteaclient/methods.v
Normal file
113
lib/clients/giteaclient/methods.v
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
// File: lib/clients/giteaclient/methods.v
|
||||||
|
module giteaclient
|
||||||
|
|
||||||
|
import freeflowuniverse.herolib.core.httpconnection
|
||||||
|
import json
|
||||||
|
import net.http
|
||||||
|
|
||||||
|
// List a user's own repositories
|
||||||
|
pub fn (mut client GiteaClient) user_list_repos() ![]Repository {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/user/repos'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
r := http_client.get_json_list_generic[Repository](req)!
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a repository
|
||||||
|
pub fn (mut client GiteaClient) get_repo(owner string, repo string) !Repository {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/repos/${owner}/${repo}'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_generic[Repository](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a repository for the authenticated user.
|
||||||
|
pub fn (mut client GiteaClient) create_current_user_repo(args CreateRepoOption) !Repository {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .post
|
||||||
|
prefix: '/user/repos'
|
||||||
|
data: json.encode(args)
|
||||||
|
dataformat: .json
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.post_json_generic[Repository](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Issue Operations
|
||||||
|
//
|
||||||
|
|
||||||
|
// List a repository's issues
|
||||||
|
pub fn (mut client GiteaClient) list_repo_issues(owner string, repo string) ![]Issue {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/repos/${owner}/${repo}/issues'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_list_generic[Issue](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get an issue
|
||||||
|
pub fn (mut client GiteaClient) get_issue(owner string, repo string, index i64) !Issue {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/repos/${owner}/${repo}/issues/${index}'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_generic[Issue](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an issue
|
||||||
|
pub fn (mut client GiteaClient) create_issue(owner string, repo string, args CreateIssueOption) !Issue {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .post
|
||||||
|
prefix: '/repos/${owner}/${repo}/issues'
|
||||||
|
data: json.encode(args)
|
||||||
|
dataformat: .json
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.post_json_generic[Issue](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// User Operations
|
||||||
|
//
|
||||||
|
|
||||||
|
// get_user gets a user by username
|
||||||
|
pub fn (mut client GiteaClient) get_user(username string) !User {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/users/${username}'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_generic[User](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
// get_current_user gets the authenticated user
|
||||||
|
pub fn (mut client GiteaClient) get_current_user() !User {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/user'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_generic[User](req)!
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
// Admin Operations
|
||||||
|
//
|
||||||
|
|
||||||
|
// list_users lists all users
|
||||||
|
pub fn (mut client GiteaClient) admin_list_users() ![]User {
|
||||||
|
req := httpconnection.Request{
|
||||||
|
method: .get
|
||||||
|
prefix: '/admin/users'
|
||||||
|
}
|
||||||
|
mut http_client := client.httpclient()!
|
||||||
|
return http_client.get_json_list_generic[User](req)!
|
||||||
|
}
|
||||||
508
lib/clients/giteaclient/models.v
Normal file
508
lib/clients/giteaclient/models.v
Normal file
@@ -0,0 +1,508 @@
|
|||||||
|
module giteaclient
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
pub struct APIError {
|
||||||
|
pub:
|
||||||
|
message string
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AccessToken {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
name string
|
||||||
|
scopes []string
|
||||||
|
sha1 string
|
||||||
|
token_last_eight string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ActionVariable {
|
||||||
|
pub:
|
||||||
|
owner_id i64
|
||||||
|
repo_id i64
|
||||||
|
name string
|
||||||
|
data string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Activity {
|
||||||
|
pub:
|
||||||
|
act_user User
|
||||||
|
act_user_id i64
|
||||||
|
comment Comment
|
||||||
|
comment_id i64
|
||||||
|
content string
|
||||||
|
created time.Time
|
||||||
|
id i64
|
||||||
|
is_private bool
|
||||||
|
op_type string
|
||||||
|
ref_name string
|
||||||
|
repo Repository
|
||||||
|
repo_id i64
|
||||||
|
user_id i64
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AddCollaboratorOption {
|
||||||
|
pub:
|
||||||
|
permission string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AddTimeOption {
|
||||||
|
pub:
|
||||||
|
time i64
|
||||||
|
created time.Time
|
||||||
|
user_name string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AnnotatedTagObject {
|
||||||
|
pub:
|
||||||
|
sha string
|
||||||
|
typ string @[json: 'type'] // `type` is a keyword in V
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AnnotatedTag {
|
||||||
|
pub:
|
||||||
|
message string
|
||||||
|
object AnnotatedTagObject
|
||||||
|
sha string
|
||||||
|
tag string
|
||||||
|
tagger CommitUser
|
||||||
|
url string
|
||||||
|
verification PayloadCommitVerification
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Attachment {
|
||||||
|
pub:
|
||||||
|
browser_download_url string
|
||||||
|
created_at time.Time
|
||||||
|
download_count i64
|
||||||
|
id i64
|
||||||
|
name string
|
||||||
|
size i64
|
||||||
|
uuid string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Badge {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
slug string
|
||||||
|
description string
|
||||||
|
image_url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Branch {
|
||||||
|
pub:
|
||||||
|
commit PayloadCommit
|
||||||
|
effective_branch_protection_name string
|
||||||
|
enable_status_check bool
|
||||||
|
name string
|
||||||
|
protected bool
|
||||||
|
required_approvals i64
|
||||||
|
status_check_contexts []string
|
||||||
|
user_can_merge bool
|
||||||
|
user_can_push bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct BranchProtection {
|
||||||
|
pub:
|
||||||
|
branch_name string
|
||||||
|
rule_name string
|
||||||
|
enable_push bool
|
||||||
|
enable_push_whitelist bool
|
||||||
|
push_whitelist_usernames []string
|
||||||
|
push_whitelist_teams []string
|
||||||
|
push_whitelist_deploy_keys bool
|
||||||
|
enable_merge_whitelist bool
|
||||||
|
merge_whitelist_usernames []string
|
||||||
|
merge_whitelist_teams []string
|
||||||
|
enable_status_check bool
|
||||||
|
status_check_contexts []string
|
||||||
|
required_approvals i64
|
||||||
|
enable_approvals_whitelist bool
|
||||||
|
approvals_whitelist_username []string
|
||||||
|
approvals_whitelist_teams []string
|
||||||
|
block_on_rejected_reviews bool
|
||||||
|
block_on_official_review_requests bool
|
||||||
|
block_on_outdated_branch bool
|
||||||
|
dismiss_stale_approvals bool
|
||||||
|
ignore_stale_approvals bool
|
||||||
|
require_signed_commits bool
|
||||||
|
protected_file_patterns string
|
||||||
|
unprotected_file_patterns string
|
||||||
|
created_at time.Time
|
||||||
|
updated_at time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChangeFileOperation {
|
||||||
|
pub:
|
||||||
|
operation string // "create", "update", "delete"
|
||||||
|
path string
|
||||||
|
content string // base64 encoded
|
||||||
|
from_path string
|
||||||
|
sha string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChangeFilesOptions {
|
||||||
|
pub:
|
||||||
|
author Identity
|
||||||
|
branch string
|
||||||
|
committer Identity
|
||||||
|
dates CommitDateOptions
|
||||||
|
files []ChangeFileOperation
|
||||||
|
message string
|
||||||
|
new_branch string
|
||||||
|
signoff bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ChangedFile {
|
||||||
|
pub:
|
||||||
|
additions i64
|
||||||
|
changes i64
|
||||||
|
contents_url string
|
||||||
|
deletions i64
|
||||||
|
filename string
|
||||||
|
html_url string
|
||||||
|
previous_filename string
|
||||||
|
raw_url string
|
||||||
|
status string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Commit {
|
||||||
|
pub:
|
||||||
|
author User
|
||||||
|
commit RepoCommit
|
||||||
|
committer User
|
||||||
|
created time.Time
|
||||||
|
files []CommitAffectedFiles
|
||||||
|
html_url string
|
||||||
|
parents []CommitMeta
|
||||||
|
sha string
|
||||||
|
stats CommitStats
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommitAffectedFiles {
|
||||||
|
pub:
|
||||||
|
filename string
|
||||||
|
status string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommitDateOptions {
|
||||||
|
pub:
|
||||||
|
author time.Time
|
||||||
|
committer time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommitMeta {
|
||||||
|
pub:
|
||||||
|
created time.Time
|
||||||
|
sha string
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommitStats {
|
||||||
|
pub:
|
||||||
|
additions i64
|
||||||
|
deletions i64
|
||||||
|
total i64
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CommitUser {
|
||||||
|
pub:
|
||||||
|
date string
|
||||||
|
email string
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Comment {
|
||||||
|
pub:
|
||||||
|
assets []Attachment
|
||||||
|
body string
|
||||||
|
created_at time.Time
|
||||||
|
html_url string
|
||||||
|
id i64
|
||||||
|
issue_url string
|
||||||
|
original_author string
|
||||||
|
original_author_id i64
|
||||||
|
pull_request_url string
|
||||||
|
updated_at time.Time
|
||||||
|
user User
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CreateIssueOption {
|
||||||
|
pub:
|
||||||
|
title string
|
||||||
|
assignee string
|
||||||
|
assignees []string
|
||||||
|
body string
|
||||||
|
closed bool
|
||||||
|
due_date time.Time
|
||||||
|
labels []i64
|
||||||
|
milestone i64
|
||||||
|
ref string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct CreateRepoOption {
|
||||||
|
pub:
|
||||||
|
name string
|
||||||
|
auto_init bool
|
||||||
|
default_branch string
|
||||||
|
description string
|
||||||
|
gitignores string
|
||||||
|
issue_labels string
|
||||||
|
license string
|
||||||
|
object_format_name string // "sha1" or "sha256"
|
||||||
|
private bool
|
||||||
|
readme string
|
||||||
|
template bool
|
||||||
|
trust_model string // "default", "collaborator", "committer", "collaboratorcommitter"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Identity {
|
||||||
|
pub:
|
||||||
|
email string
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct InternalTracker {
|
||||||
|
pub:
|
||||||
|
allow_only_contributors_to_track_time bool
|
||||||
|
enable_issue_dependencies bool
|
||||||
|
enable_time_tracker bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Issue {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
url string
|
||||||
|
html_url string
|
||||||
|
number i64
|
||||||
|
user User
|
||||||
|
original_author string
|
||||||
|
original_author_id i64
|
||||||
|
title string
|
||||||
|
body string
|
||||||
|
ref string
|
||||||
|
labels []Label
|
||||||
|
milestone Milestone
|
||||||
|
assignee User
|
||||||
|
assignees []User
|
||||||
|
state string // StateType
|
||||||
|
is_locked bool
|
||||||
|
comments i64
|
||||||
|
created_at time.Time
|
||||||
|
updated_at time.Time
|
||||||
|
closed_at time.Time
|
||||||
|
due_date time.Time
|
||||||
|
pull_request PullRequestMeta
|
||||||
|
repository RepositoryMeta
|
||||||
|
assets []Attachment
|
||||||
|
pin_order i64
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Label {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
name string
|
||||||
|
exclusive bool
|
||||||
|
is_archived bool
|
||||||
|
color string
|
||||||
|
description string
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Milestone {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
title string
|
||||||
|
description string
|
||||||
|
state string // StateType
|
||||||
|
open_issues i64
|
||||||
|
closed_issues i64
|
||||||
|
created_at time.Time
|
||||||
|
updated_at time.Time
|
||||||
|
closed_at time.Time
|
||||||
|
due_on time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Organization {
|
||||||
|
pub:
|
||||||
|
avatar_url string
|
||||||
|
description string
|
||||||
|
email string
|
||||||
|
full_name string
|
||||||
|
id i64
|
||||||
|
location string
|
||||||
|
name string
|
||||||
|
repo_admin_change_team_access bool
|
||||||
|
username string
|
||||||
|
visibility string
|
||||||
|
website string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PayloadCommitVerification {
|
||||||
|
pub:
|
||||||
|
payload string
|
||||||
|
reason string
|
||||||
|
signature string
|
||||||
|
signer PayloadUser
|
||||||
|
verified bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PayloadCommit {
|
||||||
|
pub:
|
||||||
|
added []string
|
||||||
|
author PayloadUser
|
||||||
|
committer PayloadUser
|
||||||
|
id string
|
||||||
|
message string
|
||||||
|
modified []string
|
||||||
|
removed []string
|
||||||
|
timestamp time.Time
|
||||||
|
url string
|
||||||
|
verification PayloadCommitVerification
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PayloadUser {
|
||||||
|
pub:
|
||||||
|
email string
|
||||||
|
name string
|
||||||
|
username string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Permission {
|
||||||
|
pub:
|
||||||
|
admin bool
|
||||||
|
pull bool
|
||||||
|
push bool
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PullRequestMeta {
|
||||||
|
pub:
|
||||||
|
merged bool
|
||||||
|
merged_at time.Time
|
||||||
|
draft bool
|
||||||
|
html_url string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RepoCommit {
|
||||||
|
pub:
|
||||||
|
author CommitUser
|
||||||
|
committer CommitUser
|
||||||
|
message string
|
||||||
|
tree CommitMeta
|
||||||
|
url string
|
||||||
|
verification PayloadCommitVerification
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Repository {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
owner User
|
||||||
|
name string
|
||||||
|
full_name string
|
||||||
|
description string
|
||||||
|
empty bool
|
||||||
|
private bool
|
||||||
|
fork bool
|
||||||
|
template bool
|
||||||
|
parent_id i64
|
||||||
|
mirror bool
|
||||||
|
size i64
|
||||||
|
language string
|
||||||
|
languages_url string
|
||||||
|
html_url string
|
||||||
|
url string
|
||||||
|
link string
|
||||||
|
ssh_url string
|
||||||
|
clone_url string
|
||||||
|
website string
|
||||||
|
stars_count i64
|
||||||
|
forks_count i64
|
||||||
|
watchers_count i64
|
||||||
|
open_issues_count i64
|
||||||
|
open_pr_counter i64
|
||||||
|
release_counter i64
|
||||||
|
default_branch string
|
||||||
|
archived bool
|
||||||
|
created_at time.Time
|
||||||
|
updated_at time.Time
|
||||||
|
archived_at time.Time
|
||||||
|
permissions Permission
|
||||||
|
has_issues bool
|
||||||
|
internal_tracker InternalTracker
|
||||||
|
has_wiki bool
|
||||||
|
has_pull_requests bool
|
||||||
|
has_projects bool
|
||||||
|
has_releases bool
|
||||||
|
has_packages bool
|
||||||
|
has_actions bool
|
||||||
|
ignore_whitespace_conflicts bool
|
||||||
|
allow_merge_commits bool
|
||||||
|
allow_rebase bool
|
||||||
|
allow_rebase_explicit bool
|
||||||
|
allow_squash_merge bool
|
||||||
|
allow_fast_forward_only_merge bool
|
||||||
|
allow_rebase_update bool
|
||||||
|
default_delete_branch_after_merge bool
|
||||||
|
default_merge_style string
|
||||||
|
default_allow_maintainer_edit bool
|
||||||
|
avatar_url string
|
||||||
|
internal bool
|
||||||
|
mirror_interval string
|
||||||
|
mirror_updated time.Time
|
||||||
|
repo_transfer RepoTransfer
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RepositoryMeta {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
name string
|
||||||
|
owner string
|
||||||
|
full_name string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Team {
|
||||||
|
pub:
|
||||||
|
can_create_org_repo bool
|
||||||
|
description string
|
||||||
|
id i64
|
||||||
|
includes_all_repositories bool
|
||||||
|
name string
|
||||||
|
organization Organization
|
||||||
|
permission string
|
||||||
|
units []string
|
||||||
|
units_map map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RepoTransfer {
|
||||||
|
pub:
|
||||||
|
doer User
|
||||||
|
recipient User
|
||||||
|
teams []Team
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct User {
|
||||||
|
pub:
|
||||||
|
id i64
|
||||||
|
login string
|
||||||
|
full_name string
|
||||||
|
email string
|
||||||
|
avatar_url string
|
||||||
|
language string
|
||||||
|
is_admin bool
|
||||||
|
last_login time.Time
|
||||||
|
created time.Time
|
||||||
|
restricted bool
|
||||||
|
active bool
|
||||||
|
prohibit_login bool
|
||||||
|
location string
|
||||||
|
website string
|
||||||
|
description string
|
||||||
|
visibility string
|
||||||
|
followers_count i64
|
||||||
|
following_count i64
|
||||||
|
starred_repos_count i64
|
||||||
|
username string
|
||||||
|
}
|
||||||
62
lib/clients/giteaclient/readme.md
Normal file
62
lib/clients/giteaclient/readme.md
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
// File: lib/clients/giteaclient/readme.md
|
||||||
|
# giteaclient
|
||||||
|
|
||||||
|
This library provides a client for interacting with the Gitea API.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
You can configure the client using a HeroScript file:
|
||||||
|
|
||||||
|
```hero
|
||||||
|
!!giteaclient.configure
|
||||||
|
name: 'default' // optional, 'default' is the default instance name
|
||||||
|
url: 'https://git.ourworld.tf'
|
||||||
|
secret: 'your-gitea-api-token'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage Example
|
||||||
|
|
||||||
|
Here's how to get the client and use its methods.
|
||||||
|
|
||||||
|
```vlang
|
||||||
|
import freeflowuniverse.herolib.clients.giteaclient
|
||||||
|
import freeflowuniverse.herolib.core.base
|
||||||
|
|
||||||
|
fn main() ! {
|
||||||
|
// Make sure hero is initialized
|
||||||
|
base.init()!
|
||||||
|
|
||||||
|
// Example configuration (can also be loaded from file)
|
||||||
|
heroscript_config := "!!giteaclient.configure url:'https://gitea.com' secret:'...your_token...'"
|
||||||
|
mut plbook := playbook.new(text: heroscript_config)!
|
||||||
|
giteaclient.play(mut plbook)!
|
||||||
|
|
||||||
|
// Get the default configured client
|
||||||
|
mut client := giteaclient.get()!
|
||||||
|
|
||||||
|
// Get the authenticated user
|
||||||
|
user := client.get_current_user()!
|
||||||
|
println('Authenticated as: ${user.login}')
|
||||||
|
|
||||||
|
// List repositories for the authenticated user
|
||||||
|
repos := client.user_list_repos()!
|
||||||
|
println('Found ${repos.len} repositories:')
|
||||||
|
for repo in repos {
|
||||||
|
println('- ${repo.full_name}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a specific repository's issues
|
||||||
|
owner := 'gitea'
|
||||||
|
repo_name := 'gitea'
|
||||||
|
println('\nFetching issues for ${owner}/${repo_name}...')
|
||||||
|
issues := client.list_repo_issues(owner, repo_name)!
|
||||||
|
println('Found ${issues.len} issues.')
|
||||||
|
for issue in issues[..5] { // print first 5 issues
|
||||||
|
println(' #${issue.number}: ${issue.title}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
## tips
|
||||||
|
|
||||||
|
- to see the admin api: https://git.ourworld.tf/api/swagger
|
||||||
25728
lib/clients/giteaclient/swagger.json
Normal file
25728
lib/clients/giteaclient/swagger.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@ module ipapi
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
ipapi_global map[string]&IPApi
|
ipapi_global map[string]&IPApi
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&IPApi {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&IPApi {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := IPApi{
|
mut obj := IPApi{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in ipapi_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&IPApi {
|
||||||
|
mut context := base.context()!
|
||||||
|
ipapi_default = args.name
|
||||||
|
if args.fromdb || args.name !in ipapi_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:ipapi', args.name)! {
|
||||||
|
data := r.hget('context:ipapi', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('IPApi with name: ipapi does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(IPApi, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('ipapi', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("IPApi with name 'ipapi' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return ipapi_global[args.name] or {
|
return ipapi_global[args.name] or {
|
||||||
println(ipapi_global)
|
return error('could not get config for ipapi with name:ipapi')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for ipapi with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o IPApi) ! {
|
pub fn set(o IPApi) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
ipapi_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('ipapi', o.name, heroscript)!
|
r.hset('context:ipapi', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('ipapi', args.name)
|
return r.hexists('context:ipapi', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('ipapi', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in ipapi_global {
|
r.hdel('context:ipapi', args.name)!
|
||||||
// del ipapi_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&IPApi {
|
||||||
|
mut res := []&IPApi{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
ipapi_global = map[string]&IPApi{}
|
||||||
|
ipapi_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:ipapi')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in ipapi_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o IPApi) ! {
|
fn set_in_mem(o IPApi) !IPApi {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
ipapi_global[o.name] = &o2
|
ipapi_global[o2.name] = &o2
|
||||||
ipapi_default = o.name
|
ipapi_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'ipapi.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'ipapi.configure')!
|
mut install_actions := plbook.find(filter: 'ipapi.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
ipapi_default = name
|
ipapi_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -282,7 +282,6 @@ pub fn (mut j Jina) list_classifiers() ![]Classifier {
|
|||||||
method: .get
|
method: .get
|
||||||
prefix: 'v1/classifiers'
|
prefix: 'v1/classifiers'
|
||||||
}
|
}
|
||||||
|
|
||||||
mut httpclient := j.httpclient()!
|
mut httpclient := j.httpclient()!
|
||||||
response := httpclient.get(req)!
|
response := httpclient.get(req)!
|
||||||
classifiers := json.decode([]Classifier, response)!
|
classifiers := json.decode([]Classifier, response)!
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ pub fn jina_model_from_string(s string) !JinaModel {
|
|||||||
'jina-embeddings-v2-base-zh' { JinaModel.jina_embeddings_v2_base_zh }
|
'jina-embeddings-v2-base-zh' { JinaModel.jina_embeddings_v2_base_zh }
|
||||||
'jina-embeddings-v2-base-code' { JinaModel.jina_embeddings_v2_base_code }
|
'jina-embeddings-v2-base-code' { JinaModel.jina_embeddings_v2_base_code }
|
||||||
'jina-embeddings-v3' { JinaModel.jina_embeddings_v3 }
|
'jina-embeddings-v3' { JinaModel.jina_embeddings_v3 }
|
||||||
else { error('Invalid Jina model string: ${s}') }
|
else { return error('Invalid Jina model string: ${s}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,7 +66,7 @@ pub fn truncate_type_from_string(s string) !TruncateType {
|
|||||||
'NONE' { TruncateType.none_ }
|
'NONE' { TruncateType.none_ }
|
||||||
'START' { TruncateType.start }
|
'START' { TruncateType.start }
|
||||||
'END' { TruncateType.end }
|
'END' { TruncateType.end }
|
||||||
else { error('Invalid truncate type string: ${s}') }
|
else { return error('Invalid truncate type string: ${s}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ pub fn embedding_type_from_string(s string) !EmbeddingType {
|
|||||||
'base64' { EmbeddingType.base64 }
|
'base64' { EmbeddingType.base64 }
|
||||||
'binary' { EmbeddingType.binary }
|
'binary' { EmbeddingType.binary }
|
||||||
'ubinary' { EmbeddingType.ubinary }
|
'ubinary' { EmbeddingType.ubinary }
|
||||||
else { error('Invalid embedding type string: ${s}') }
|
else { return error('Invalid embedding type string: ${s}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -116,7 +116,7 @@ pub fn task_type_from_string(s string) !TaskType {
|
|||||||
'text-matching' { TaskType.text_matching }
|
'text-matching' { TaskType.text_matching }
|
||||||
'classification' { TaskType.classification }
|
'classification' { TaskType.classification }
|
||||||
'separation' { TaskType.separation }
|
'separation' { TaskType.separation }
|
||||||
else { error('Invalid task type string: ${s}') }
|
else { return error('Invalid task type string: ${s}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module jina
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
jina_global map[string]&Jina
|
jina_global map[string]&Jina
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&Jina {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&Jina {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := Jina{
|
mut obj := Jina{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in jina_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&Jina {
|
||||||
|
mut context := base.context()!
|
||||||
|
jina_default = args.name
|
||||||
|
if args.fromdb || args.name !in jina_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:jina', args.name)! {
|
||||||
|
data := r.hget('context:jina', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('Jina with name: jina does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(Jina, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('jina', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("Jina with name 'jina' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return jina_global[args.name] or {
|
return jina_global[args.name] or {
|
||||||
println(jina_global)
|
return error('could not get config for jina with name:jina')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for jina with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o Jina) ! {
|
pub fn set(o Jina) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
jina_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('jina', o.name, heroscript)!
|
r.hset('context:jina', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('jina', args.name)
|
return r.hexists('context:jina', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('jina', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in jina_global {
|
r.hdel('context:jina', args.name)!
|
||||||
// del jina_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&Jina {
|
||||||
|
mut res := []&Jina{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
jina_global = map[string]&Jina{}
|
||||||
|
jina_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:jina')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in jina_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o Jina) ! {
|
fn set_in_mem(o Jina) !Jina {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
jina_global[o.name] = &o2
|
jina_global[o2.name] = &o2
|
||||||
jina_default = o.name
|
jina_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'jina.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'jina.configure')!
|
mut install_actions := plbook.find(filter: 'jina.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
jina_default = name
|
jina_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ pub fn jina_rerank_model_from_string(s string) !JinaRerankModel {
|
|||||||
'jina-reranker-v1-tiny-en' { JinaRerankModel.reranker_v1_tiny_en }
|
'jina-reranker-v1-tiny-en' { JinaRerankModel.reranker_v1_tiny_en }
|
||||||
'jina-reranker-v1-turbo-en' { JinaRerankModel.reranker_v1_turbo_en }
|
'jina-reranker-v1-turbo-en' { JinaRerankModel.reranker_v1_turbo_en }
|
||||||
'jina-colbert-v1-en' { JinaRerankModel.colbert_v1_en }
|
'jina-colbert-v1-en' { JinaRerankModel.colbert_v1_en }
|
||||||
else { error('Invalid JinaRerankModel string: ${s}') }
|
else { return error('Invalid JinaRerankModel string: ${s}') }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module livekit
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
livekit_global map[string]&LivekitClient
|
livekit_global map[string]&LivekitClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&LivekitClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&LivekitClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := LivekitClient{
|
mut obj := LivekitClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in livekit_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&LivekitClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
livekit_default = args.name
|
||||||
|
if args.fromdb || args.name !in livekit_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:livekit', args.name)! {
|
||||||
|
data := r.hget('context:livekit', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('LivekitClient with name: livekit does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(LivekitClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('livekit', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("LivekitClient with name 'livekit' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return livekit_global[args.name] or {
|
return livekit_global[args.name] or {
|
||||||
println(livekit_global)
|
return error('could not get config for livekit with name:livekit')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for livekit with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o LivekitClient) ! {
|
pub fn set(o LivekitClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
livekit_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('livekit', o.name, heroscript)!
|
r.hset('context:livekit', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('livekit', args.name)
|
return r.hexists('context:livekit', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('livekit', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in livekit_global {
|
r.hdel('context:livekit', args.name)!
|
||||||
// del livekit_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&LivekitClient {
|
||||||
|
mut res := []&LivekitClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
livekit_global = map[string]&LivekitClient{}
|
||||||
|
livekit_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:livekit')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in livekit_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o LivekitClient) ! {
|
fn set_in_mem(o LivekitClient) !LivekitClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
livekit_global[o.name] = &o2
|
livekit_global[o2.name] = &o2
|
||||||
livekit_default = o.name
|
livekit_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'livekit.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'livekit.configure')!
|
mut install_actions := plbook.find(filter: 'livekit.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
livekit_default = name
|
livekit_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module mailclient
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
mailclient_global map[string]&MailClient
|
mailclient_global map[string]&MailClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&MailClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&MailClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := MailClient{
|
mut obj := MailClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in mailclient_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&MailClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
mailclient_default = args.name
|
||||||
|
if args.fromdb || args.name !in mailclient_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:mailclient', args.name)! {
|
||||||
|
data := r.hget('context:mailclient', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('MailClient with name: mailclient does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(MailClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('mailclient', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("MailClient with name 'mailclient' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return mailclient_global[args.name] or {
|
return mailclient_global[args.name] or {
|
||||||
println(mailclient_global)
|
return error('could not get config for mailclient with name:mailclient')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for mailclient with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o MailClient) ! {
|
pub fn set(o MailClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
mailclient_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('mailclient', o.name, heroscript)!
|
r.hset('context:mailclient', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('mailclient', args.name)
|
return r.hexists('context:mailclient', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('mailclient', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in mailclient_global {
|
r.hdel('context:mailclient', args.name)!
|
||||||
// del mailclient_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&MailClient {
|
||||||
|
mut res := []&MailClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
mailclient_global = map[string]&MailClient{}
|
||||||
|
mailclient_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:mailclient')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in mailclient_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o MailClient) ! {
|
fn set_in_mem(o MailClient) !MailClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
mailclient_global[o.name] = &o2
|
mailclient_global[o2.name] = &o2
|
||||||
mailclient_default = o.name
|
mailclient_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'mailclient.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'mailclient.configure')!
|
mut install_actions := plbook.find(filter: 'mailclient.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
mailclient_default = name
|
mailclient_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module meilisearch
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
meilisearch_global map[string]&MeilisearchClient
|
meilisearch_global map[string]&MeilisearchClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&MeilisearchClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&MeilisearchClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := MeilisearchClient{
|
mut obj := MeilisearchClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in meilisearch_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&MeilisearchClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
meilisearch_default = args.name
|
||||||
|
if args.fromdb || args.name !in meilisearch_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:meilisearch', args.name)! {
|
||||||
|
data := r.hget('context:meilisearch', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('MeilisearchClient with name: meilisearch does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(MeilisearchClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('meilisearch', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("MeilisearchClient with name 'meilisearch' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return meilisearch_global[args.name] or {
|
return meilisearch_global[args.name] or {
|
||||||
println(meilisearch_global)
|
return error('could not get config for meilisearch with name:meilisearch')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for meilisearch with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o MeilisearchClient) ! {
|
pub fn set(o MeilisearchClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
meilisearch_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('meilisearch', o.name, heroscript)!
|
r.hset('context:meilisearch', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('meilisearch', args.name)
|
return r.hexists('context:meilisearch', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('meilisearch', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in meilisearch_global {
|
r.hdel('context:meilisearch', args.name)!
|
||||||
// del meilisearch_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&MeilisearchClient {
|
||||||
|
mut res := []&MeilisearchClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
meilisearch_global = map[string]&MeilisearchClient{}
|
||||||
|
meilisearch_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:meilisearch')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in meilisearch_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o MeilisearchClient) ! {
|
fn set_in_mem(o MeilisearchClient) !MeilisearchClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
meilisearch_global[o.name] = &o2
|
meilisearch_global[o2.name] = &o2
|
||||||
meilisearch_default = o.name
|
meilisearch_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'meilisearch.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'meilisearch.configure')!
|
mut install_actions := plbook.find(filter: 'meilisearch.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
meilisearch_default = name
|
meilisearch_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module mycelium
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
mycelium_global map[string]&Mycelium
|
mycelium_global map[string]&Mycelium
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&Mycelium {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&Mycelium {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := Mycelium{
|
mut obj := Mycelium{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in mycelium_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&Mycelium {
|
||||||
|
mut context := base.context()!
|
||||||
|
mycelium_default = args.name
|
||||||
|
if args.fromdb || args.name !in mycelium_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:mycelium', args.name)! {
|
||||||
|
data := r.hget('context:mycelium', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('Mycelium with name: mycelium does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(Mycelium, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('mycelium', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("Mycelium with name 'mycelium' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return mycelium_global[args.name] or {
|
return mycelium_global[args.name] or {
|
||||||
println(mycelium_global)
|
return error('could not get config for mycelium with name:mycelium')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for mycelium with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o Mycelium) ! {
|
pub fn set(o Mycelium) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
mycelium_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('mycelium', o.name, heroscript)!
|
r.hset('context:mycelium', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('mycelium', args.name)
|
return r.hexists('context:mycelium', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('mycelium', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in mycelium_global {
|
r.hdel('context:mycelium', args.name)!
|
||||||
// del mycelium_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&Mycelium {
|
||||||
|
mut res := []&Mycelium{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
mycelium_global = map[string]&Mycelium{}
|
||||||
|
mycelium_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:mycelium')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in mycelium_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o Mycelium) ! {
|
fn set_in_mem(o Mycelium) !Mycelium {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
mycelium_global[o.name] = &o2
|
mycelium_global[o2.name] = &o2
|
||||||
mycelium_default = o.name
|
mycelium_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'mycelium.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'mycelium.configure')!
|
mut install_actions := plbook.find(filter: 'mycelium.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for mycelium
|
// switch instance to be used for mycelium
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
mycelium_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module mycelium_rpc
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
mycelium_rpc_global map[string]&MyceliumRPC
|
mycelium_rpc_global map[string]&MyceliumRPC
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&MyceliumRPC {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&MyceliumRPC {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := MyceliumRPC{
|
mut obj := MyceliumRPC{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in mycelium_rpc_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&MyceliumRPC {
|
||||||
|
mut context := base.context()!
|
||||||
|
mycelium_rpc_default = args.name
|
||||||
|
if args.fromdb || args.name !in mycelium_rpc_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:mycelium_rpc', args.name)! {
|
||||||
|
data := r.hget('context:mycelium_rpc', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('MyceliumRPC with name: mycelium_rpc does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(MyceliumRPC, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('mycelium_rpc', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("MyceliumRPC with name 'mycelium_rpc' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return mycelium_rpc_global[args.name] or {
|
return mycelium_rpc_global[args.name] or {
|
||||||
println(mycelium_rpc_global)
|
return error('could not get config for mycelium_rpc with name:mycelium_rpc')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for mycelium_rpc with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o MyceliumRPC) ! {
|
pub fn set(o MyceliumRPC) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
mycelium_rpc_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('mycelium_rpc', o.name, heroscript)!
|
r.hset('context:mycelium_rpc', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('mycelium_rpc', args.name)
|
return r.hexists('context:mycelium_rpc', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('mycelium_rpc', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in mycelium_rpc_global {
|
r.hdel('context:mycelium_rpc', args.name)!
|
||||||
// del mycelium_rpc_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&MyceliumRPC {
|
||||||
|
mut res := []&MyceliumRPC{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
mycelium_rpc_global = map[string]&MyceliumRPC{}
|
||||||
|
mycelium_rpc_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:mycelium_rpc')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in mycelium_rpc_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o MyceliumRPC) ! {
|
fn set_in_mem(o MyceliumRPC) !MyceliumRPC {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
mycelium_rpc_global[o.name] = &o2
|
mycelium_rpc_global[o2.name] = &o2
|
||||||
mycelium_rpc_default = o.name
|
mycelium_rpc_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'mycelium_rpc.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'mycelium_rpc.configure')!
|
mut install_actions := plbook.find(filter: 'mycelium_rpc.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for mycelium_rpc
|
// switch instance to be used for mycelium_rpc
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
mycelium_rpc_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub struct MyceliumRPC {
|
|||||||
pub mut:
|
pub mut:
|
||||||
name string = 'default'
|
name string = 'default'
|
||||||
url string = default_url // RPC server URL
|
url string = default_url // RPC server URL
|
||||||
rpc_client ?&jsonrpc.Client @[skip]
|
// rpc_client ?&jsonrpc.Client @[skip]
|
||||||
}
|
}
|
||||||
|
|
||||||
// your checking & initialization code if needed
|
// your checking & initialization code if needed
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module openai
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
openai_global map[string]&OpenAI
|
openai_global map[string]&OpenAI
|
||||||
@@ -14,75 +15,114 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&OpenAI {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&OpenAI {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := OpenAI{
|
mut obj := OpenAI{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in openai_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&OpenAI {
|
||||||
|
mut context := base.context()!
|
||||||
|
openai_default = args.name
|
||||||
|
if args.fromdb || args.name !in openai_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:openai', args.name)! {
|
||||||
|
data := r.hget('context:openai', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('OpenAI with name: openai does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(OpenAI, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('openai', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("OpenAI with name 'openai' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return openai_global[args.name] or {
|
return openai_global[args.name] or {
|
||||||
println(openai_global)
|
return error('could not get config for openai with name:openai')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for openai with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o OpenAI) ! {
|
pub fn set(o OpenAI) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
openai_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('openai', o.name, heroscript)!
|
r.hset('context:openai', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('openai', args.name)
|
return r.hexists('context:openai', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('openai', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in openai_global {
|
r.hdel('context:openai', args.name)!
|
||||||
// del openai_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&OpenAI {
|
||||||
|
mut res := []&OpenAI{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
openai_global = map[string]&OpenAI{}
|
||||||
|
openai_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:openai')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in openai_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o OpenAI) ! {
|
fn set_in_mem(o OpenAI) !OpenAI {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
openai_global[o.name] = &o2
|
openai_global[o2.name] = &o2
|
||||||
openai_default = o.name
|
openai_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'openai.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'openai.configure')!
|
mut install_actions := plbook.find(filter: 'openai.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
// println('install_action: ${install_action}')
|
|
||||||
heroscript := install_action.heroscript()
|
heroscript := install_action.heroscript()
|
||||||
mut obj2 := heroscript_loads(heroscript)!
|
mut obj2 := heroscript_loads(heroscript)!
|
||||||
set(obj2)!
|
set(obj2)!
|
||||||
@@ -94,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
openai_default = name
|
openai_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ pub mut:
|
|||||||
api_key string
|
api_key string
|
||||||
url string = 'https://openrouter.ai/api/v1'
|
url string = 'https://openrouter.ai/api/v1'
|
||||||
model_default string = 'gpt-oss-120b'
|
model_default string = 'gpt-oss-120b'
|
||||||
conn ?&httpconnection.HTTPConnection @[skip; str: skip]
|
// conn ?&httpconnection.HTTPConnection @[skip; str: skip]
|
||||||
}
|
}
|
||||||
|
|
||||||
// your checking & initialization code if needed
|
// your checking & initialization code if needed
|
||||||
@@ -50,20 +50,15 @@ fn obj_init(mycfg_ OpenAI) !OpenAI {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
|
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
|
||||||
mut c := client.conn or {
|
mut c2 := httpconnection.new(
|
||||||
mut c2 := httpconnection.new(
|
name: 'openaiconnection_${client.name}'
|
||||||
name: 'openaiconnection_${client.name}'
|
url: client.url
|
||||||
url: client.url
|
cache: false
|
||||||
cache: false
|
retry: 20
|
||||||
retry: 20
|
)!
|
||||||
)!
|
|
||||||
c2
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authorization: 'Bearer <OPENROUTER_API_KEY>',
|
// Authorization: 'Bearer <OPENROUTER_API_KEY>',
|
||||||
c.default_header.set(.authorization, 'Bearer ${client.api_key}')
|
c2.default_header.set(.authorization, 'Bearer ${client.api_key}')
|
||||||
client.conn = c
|
return c2
|
||||||
return c
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/////////////NORMALLY NO NEED TO TOUCH
|
/////////////NORMALLY NO NEED TO TOUCH
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module postgresql_client
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
postgresql_client_global map[string]&PostgresqlClient
|
postgresql_client_global map[string]&PostgresqlClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&PostgresqlClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&PostgresqlClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := PostgresqlClient{
|
mut obj := PostgresqlClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in postgresql_client_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&PostgresqlClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
postgresql_client_default = args.name
|
||||||
|
if args.fromdb || args.name !in postgresql_client_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:postgresql_client', args.name)! {
|
||||||
|
data := r.hget('context:postgresql_client', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('PostgresqlClient with name: postgresql_client does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(PostgresqlClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('postgresql_client', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("PostgresqlClient with name 'postgresql_client' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return postgresql_client_global[args.name] or {
|
return postgresql_client_global[args.name] or {
|
||||||
println(postgresql_client_global)
|
return error('could not get config for postgresql_client with name:postgresql_client')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for postgresql_client with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o PostgresqlClient) ! {
|
pub fn set(o PostgresqlClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
postgresql_client_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('postgresql_client', o.name, heroscript)!
|
r.hset('context:postgresql_client', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('postgresql_client', args.name)
|
return r.hexists('context:postgresql_client', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('postgresql_client', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in postgresql_client_global {
|
r.hdel('context:postgresql_client', args.name)!
|
||||||
// del postgresql_client_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&PostgresqlClient {
|
||||||
|
mut res := []&PostgresqlClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
postgresql_client_global = map[string]&PostgresqlClient{}
|
||||||
|
postgresql_client_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:postgresql_client')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in postgresql_client_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o PostgresqlClient) ! {
|
fn set_in_mem(o PostgresqlClient) !PostgresqlClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
postgresql_client_global[o.name] = &o2
|
postgresql_client_global[o2.name] = &o2
|
||||||
postgresql_client_default = o.name
|
postgresql_client_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'postgresql_client.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'postgresql_client.configure')!
|
mut install_actions := plbook.find(filter: 'postgresql_client.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
postgresql_client_default = name
|
postgresql_client_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module qdrant
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
qdrant_global map[string]&QDrantClient
|
qdrant_global map[string]&QDrantClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&QDrantClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&QDrantClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := QDrantClient{
|
mut obj := QDrantClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in qdrant_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&QDrantClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
qdrant_default = args.name
|
||||||
|
if args.fromdb || args.name !in qdrant_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:qdrant', args.name)! {
|
||||||
|
data := r.hget('context:qdrant', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('QDrantClient with name: qdrant does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(QDrantClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('qdrant', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("QDrantClient with name 'qdrant' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return qdrant_global[args.name] or {
|
return qdrant_global[args.name] or {
|
||||||
println(qdrant_global)
|
return error('could not get config for qdrant with name:qdrant')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for qdrant with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o QDrantClient) ! {
|
pub fn set(o QDrantClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
qdrant_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('qdrant', o.name, heroscript)!
|
r.hset('context:qdrant', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('qdrant', args.name)
|
return r.hexists('context:qdrant', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('qdrant', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in qdrant_global {
|
r.hdel('context:qdrant', args.name)!
|
||||||
// del qdrant_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&QDrantClient {
|
||||||
|
mut res := []&QDrantClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
qdrant_global = map[string]&QDrantClient{}
|
||||||
|
qdrant_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:qdrant')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in qdrant_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o QDrantClient) ! {
|
fn set_in_mem(o QDrantClient) !QDrantClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
qdrant_global[o.name] = &o2
|
qdrant_global[o2.name] = &o2
|
||||||
qdrant_default = o.name
|
qdrant_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'qdrant.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'qdrant.configure')!
|
mut install_actions := plbook.find(filter: 'qdrant.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
qdrant_default = name
|
qdrant_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module rclone
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
rclone_global map[string]&RCloneClient
|
rclone_global map[string]&RCloneClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&RCloneClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&RCloneClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := RCloneClient{
|
mut obj := RCloneClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in rclone_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&RCloneClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
rclone_default = args.name
|
||||||
|
if args.fromdb || args.name !in rclone_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:rclone', args.name)! {
|
||||||
|
data := r.hget('context:rclone', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('RCloneClient with name: rclone does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(RCloneClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('rclone', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("RCloneClient with name 'rclone' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return rclone_global[args.name] or {
|
return rclone_global[args.name] or {
|
||||||
println(rclone_global)
|
return error('could not get config for rclone with name:rclone')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for rclone with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o RCloneClient) ! {
|
pub fn set(o RCloneClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
rclone_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('rclone', o.name, heroscript)!
|
r.hset('context:rclone', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('rclone', args.name)
|
return r.hexists('context:rclone', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('rclone', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in rclone_global {
|
r.hdel('context:rclone', args.name)!
|
||||||
// del rclone_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&RCloneClient {
|
||||||
|
mut res := []&RCloneClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
rclone_global = map[string]&RCloneClient{}
|
||||||
|
rclone_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:rclone')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in rclone_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o RCloneClient) ! {
|
fn set_in_mem(o RCloneClient) !RCloneClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
rclone_global[o.name] = &o2
|
rclone_global[o2.name] = &o2
|
||||||
rclone_default = o.name
|
rclone_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'rclone.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'rclone.configure')!
|
mut install_actions := plbook.find(filter: 'rclone.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for rclone
|
// switch instance to be used for rclone
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
rclone_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module runpod
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
runpod_global map[string]&RunPod
|
runpod_global map[string]&RunPod
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&RunPod {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&RunPod {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := RunPod{
|
mut obj := RunPod{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in runpod_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&RunPod {
|
||||||
|
mut context := base.context()!
|
||||||
|
runpod_default = args.name
|
||||||
|
if args.fromdb || args.name !in runpod_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:runpod', args.name)! {
|
||||||
|
data := r.hget('context:runpod', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('RunPod with name: runpod does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(RunPod, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('runpod', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("RunPod with name 'runpod' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return runpod_global[args.name] or {
|
return runpod_global[args.name] or {
|
||||||
println(runpod_global)
|
return error('could not get config for runpod with name:runpod')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for runpod with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o RunPod) ! {
|
pub fn set(o RunPod) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
runpod_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('runpod', o.name, heroscript)!
|
r.hset('context:runpod', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('runpod', args.name)
|
return r.hexists('context:runpod', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('runpod', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in runpod_global {
|
r.hdel('context:runpod', args.name)!
|
||||||
// del runpod_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&RunPod {
|
||||||
|
mut res := []&RunPod{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
runpod_global = map[string]&RunPod{}
|
||||||
|
runpod_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:runpod')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in runpod_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o RunPod) ! {
|
fn set_in_mem(o RunPod) !RunPod {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
runpod_global[o.name] = &o2
|
runpod_global[o2.name] = &o2
|
||||||
runpod_default = o.name
|
runpod_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'runpod.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'runpod.configure')!
|
mut install_actions := plbook.find(filter: 'runpod.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
runpod_default = name
|
runpod_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module sendgrid
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
sendgrid_global map[string]&SendGrid
|
sendgrid_global map[string]&SendGrid
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&SendGrid {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&SendGrid {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := SendGrid{
|
mut obj := SendGrid{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in sendgrid_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&SendGrid {
|
||||||
|
mut context := base.context()!
|
||||||
|
sendgrid_default = args.name
|
||||||
|
if args.fromdb || args.name !in sendgrid_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:sendgrid', args.name)! {
|
||||||
|
data := r.hget('context:sendgrid', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('SendGrid with name: sendgrid does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(SendGrid, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('sendgrid', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("SendGrid with name 'sendgrid' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return sendgrid_global[args.name] or {
|
return sendgrid_global[args.name] or {
|
||||||
println(sendgrid_global)
|
return error('could not get config for sendgrid with name:sendgrid')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for sendgrid with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o SendGrid) ! {
|
pub fn set(o SendGrid) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
sendgrid_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('sendgrid', o.name, heroscript)!
|
r.hset('context:sendgrid', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('sendgrid', args.name)
|
return r.hexists('context:sendgrid', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('sendgrid', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in sendgrid_global {
|
r.hdel('context:sendgrid', args.name)!
|
||||||
// del sendgrid_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&SendGrid {
|
||||||
|
mut res := []&SendGrid{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
sendgrid_global = map[string]&SendGrid{}
|
||||||
|
sendgrid_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:sendgrid')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in sendgrid_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o SendGrid) ! {
|
fn set_in_mem(o SendGrid) !SendGrid {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
sendgrid_global[o.name] = &o2
|
sendgrid_global[o2.name] = &o2
|
||||||
sendgrid_default = o.name
|
sendgrid_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'sendgrid.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'sendgrid.configure')!
|
mut install_actions := plbook.find(filter: 'sendgrid.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for sendgrid
|
// switch instance to be used for sendgrid
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
sendgrid_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module vastai
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
vastai_global map[string]&VastAI
|
vastai_global map[string]&VastAI
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&VastAI {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&VastAI {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := VastAI{
|
mut obj := VastAI{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in vastai_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&VastAI {
|
||||||
|
mut context := base.context()!
|
||||||
|
vastai_default = args.name
|
||||||
|
if args.fromdb || args.name !in vastai_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:vastai', args.name)! {
|
||||||
|
data := r.hget('context:vastai', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('VastAI with name: vastai does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(VastAI, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('vastai', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("VastAI with name 'vastai' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return vastai_global[args.name] or {
|
return vastai_global[args.name] or {
|
||||||
println(vastai_global)
|
return error('could not get config for vastai with name:vastai')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for vastai with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o VastAI) ! {
|
pub fn set(o VastAI) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
vastai_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('vastai', o.name, heroscript)!
|
r.hset('context:vastai', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('vastai', args.name)
|
return r.hexists('context:vastai', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('vastai', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in vastai_global {
|
r.hdel('context:vastai', args.name)!
|
||||||
// del vastai_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&VastAI {
|
||||||
|
mut res := []&VastAI{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
vastai_global = map[string]&VastAI{}
|
||||||
|
vastai_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:vastai')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in vastai_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o VastAI) ! {
|
fn set_in_mem(o VastAI) !VastAI {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
vastai_global[o.name] = &o2
|
vastai_global[o2.name] = &o2
|
||||||
vastai_default = o.name
|
vastai_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'vastai.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'vastai.configure')!
|
mut install_actions := plbook.find(filter: 'vastai.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for vastai
|
// switch instance to be used for vastai
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
vastai_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module wireguard
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
wireguard_global map[string]&WireGuard
|
wireguard_global map[string]&WireGuard
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&WireGuard {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&WireGuard {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := WireGuard{
|
mut obj := WireGuard{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in wireguard_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&WireGuard {
|
||||||
|
mut context := base.context()!
|
||||||
|
wireguard_default = args.name
|
||||||
|
if args.fromdb || args.name !in wireguard_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:wireguard', args.name)! {
|
||||||
|
data := r.hget('context:wireguard', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('WireGuard with name: wireguard does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(WireGuard, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('wireguard', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("WireGuard with name 'wireguard' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return wireguard_global[args.name] or {
|
return wireguard_global[args.name] or {
|
||||||
println(wireguard_global)
|
return error('could not get config for wireguard with name:wireguard')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for wireguard with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o WireGuard) ! {
|
pub fn set(o WireGuard) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
wireguard_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('wireguard', o.name, heroscript)!
|
r.hset('context:wireguard', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('wireguard', args.name)
|
return r.hexists('context:wireguard', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('wireguard', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in wireguard_global {
|
r.hdel('context:wireguard', args.name)!
|
||||||
// del wireguard_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&WireGuard {
|
||||||
|
mut res := []&WireGuard{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
wireguard_global = map[string]&WireGuard{}
|
||||||
|
wireguard_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:wireguard')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in wireguard_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o WireGuard) ! {
|
fn set_in_mem(o WireGuard) !WireGuard {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
wireguard_global[o.name] = &o2
|
wireguard_global[o2.name] = &o2
|
||||||
wireguard_default = o.name
|
wireguard_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'wireguard.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'wireguard.configure')!
|
mut install_actions := plbook.find(filter: 'wireguard.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
wireguard_default = name
|
wireguard_default = name
|
||||||
}
|
}
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ module zerodb_client
|
|||||||
import freeflowuniverse.herolib.core.base
|
import freeflowuniverse.herolib.core.base
|
||||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||||
import freeflowuniverse.herolib.ui.console
|
import freeflowuniverse.herolib.ui.console
|
||||||
|
import json
|
||||||
|
|
||||||
__global (
|
__global (
|
||||||
zerodb_client_global map[string]&ZeroDBClient
|
zerodb_client_global map[string]&ZeroDBClient
|
||||||
@@ -14,71 +15,111 @@ __global (
|
|||||||
@[params]
|
@[params]
|
||||||
pub struct ArgsGet {
|
pub struct ArgsGet {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string = 'default'
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
create bool // default will not create if not exist
|
||||||
}
|
}
|
||||||
|
|
||||||
fn args_get(args_ ArgsGet) ArgsGet {
|
pub fn new(args ArgsGet) !&ZeroDBClient {
|
||||||
mut args := args_
|
|
||||||
if args.name == '' {
|
|
||||||
args.name = 'default'
|
|
||||||
}
|
|
||||||
return args
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(args_ ArgsGet) !&ZeroDBClient {
|
|
||||||
mut context := base.context()!
|
|
||||||
mut args := args_get(args_)
|
|
||||||
mut obj := ZeroDBClient{
|
mut obj := ZeroDBClient{
|
||||||
name: args.name
|
name: args.name
|
||||||
}
|
}
|
||||||
if args.name !in zerodb_client_global {
|
set(obj)!
|
||||||
if !exists(args)! {
|
return get(name: args.name)!
|
||||||
set(obj)!
|
}
|
||||||
|
|
||||||
|
pub fn get(args ArgsGet) !&ZeroDBClient {
|
||||||
|
mut context := base.context()!
|
||||||
|
zerodb_client_default = args.name
|
||||||
|
if args.fromdb || args.name !in zerodb_client_global {
|
||||||
|
mut r := context.redis()!
|
||||||
|
if r.hexists('context:zerodb_client', args.name)! {
|
||||||
|
data := r.hget('context:zerodb_client', args.name)!
|
||||||
|
if data.len == 0 {
|
||||||
|
return error('ZeroDBClient with name: zerodb_client does not exist, prob bug.')
|
||||||
|
}
|
||||||
|
mut obj := json.decode(ZeroDBClient, data)!
|
||||||
|
set_in_mem(obj)!
|
||||||
} else {
|
} else {
|
||||||
heroscript := context.hero_config_get('zerodb_client', args.name)!
|
if args.create {
|
||||||
mut obj_ := heroscript_loads(heroscript)!
|
new(args)!
|
||||||
set_in_mem(obj_)!
|
} else {
|
||||||
|
return error("ZeroDBClient with name 'zerodb_client' does not exist")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return get(name: args.name)! // no longer from db nor create
|
||||||
}
|
}
|
||||||
return zerodb_client_global[args.name] or {
|
return zerodb_client_global[args.name] or {
|
||||||
println(zerodb_client_global)
|
return error('could not get config for zerodb_client with name:zerodb_client')
|
||||||
// bug if we get here because should be in globals
|
|
||||||
panic('could not get config for zerodb_client with name, is bug:${args.name}')
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// register the config for the future
|
// register the config for the future
|
||||||
pub fn set(o ZeroDBClient) ! {
|
pub fn set(o ZeroDBClient) ! {
|
||||||
set_in_mem(o)!
|
mut o2 := set_in_mem(o)!
|
||||||
|
zerodb_client_default = o2.name
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
heroscript := heroscript_dumps(o)!
|
mut r := context.redis()!
|
||||||
context.hero_config_set('zerodb_client', o.name, heroscript)!
|
r.hset('context:zerodb_client', o2.name, json.encode(o2))!
|
||||||
}
|
}
|
||||||
|
|
||||||
// does the config exists?
|
// does the config exists?
|
||||||
pub fn exists(args_ ArgsGet) !bool {
|
pub fn exists(args ArgsGet) !bool {
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
mut args := args_get(args_)
|
mut r := context.redis()!
|
||||||
return context.hero_config_exists('zerodb_client', args.name)
|
return r.hexists('context:zerodb_client', args.name)!
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(args_ ArgsGet) ! {
|
pub fn delete(args ArgsGet) ! {
|
||||||
mut args := args_get(args_)
|
|
||||||
mut context := base.context()!
|
mut context := base.context()!
|
||||||
context.hero_config_delete('zerodb_client', args.name)!
|
mut r := context.redis()!
|
||||||
if args.name in zerodb_client_global {
|
r.hdel('context:zerodb_client', args.name)!
|
||||||
// del zerodb_client_global[args.name]
|
}
|
||||||
|
|
||||||
|
@[params]
|
||||||
|
pub struct ArgsList {
|
||||||
|
pub mut:
|
||||||
|
fromdb bool // will load from filesystem
|
||||||
|
}
|
||||||
|
|
||||||
|
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
|
||||||
|
pub fn list(args ArgsList) ![]&ZeroDBClient {
|
||||||
|
mut res := []&ZeroDBClient{}
|
||||||
|
mut context := base.context()!
|
||||||
|
if args.fromdb {
|
||||||
|
// reset what is in mem
|
||||||
|
zerodb_client_global = map[string]&ZeroDBClient{}
|
||||||
|
zerodb_client_default = ''
|
||||||
}
|
}
|
||||||
|
if args.fromdb {
|
||||||
|
mut r := context.redis()!
|
||||||
|
mut l := r.hkeys('context:zerodb_client')!
|
||||||
|
|
||||||
|
for name in l {
|
||||||
|
res << get(name: name, fromdb: true)!
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} else {
|
||||||
|
// load from memory
|
||||||
|
for _, client in zerodb_client_global {
|
||||||
|
res << client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
// only sets in mem, does not set as config
|
// only sets in mem, does not set as config
|
||||||
fn set_in_mem(o ZeroDBClient) ! {
|
fn set_in_mem(o ZeroDBClient) !ZeroDBClient {
|
||||||
mut o2 := obj_init(o)!
|
mut o2 := obj_init(o)!
|
||||||
zerodb_client_global[o.name] = &o2
|
zerodb_client_global[o2.name] = &o2
|
||||||
zerodb_client_default = o.name
|
zerodb_client_default = o2.name
|
||||||
|
return o2
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn play(mut plbook PlayBook) ! {
|
pub fn play(mut plbook PlayBook) ! {
|
||||||
|
if !plbook.exists(filter: 'zerodb_client.') {
|
||||||
|
return
|
||||||
|
}
|
||||||
mut install_actions := plbook.find(filter: 'zerodb_client.configure')!
|
mut install_actions := plbook.find(filter: 'zerodb_client.configure')!
|
||||||
if install_actions.len > 0 {
|
if install_actions.len > 0 {
|
||||||
for install_action in install_actions {
|
for install_action in install_actions {
|
||||||
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
|
|||||||
|
|
||||||
// switch instance to be used for zerodb_client
|
// switch instance to be used for zerodb_client
|
||||||
pub fn switch(name string) {
|
pub fn switch(name string) {
|
||||||
zerodb_client_default = name
|
|
||||||
}
|
|
||||||
|
|
||||||
// helpers
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct DefaultConfigArgs {
|
|
||||||
instance string = 'default'
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
|
|
||||||
!!hero_code.generate_client
|
!!hero_code.generate_client
|
||||||
name:'zinit_rpc'
|
name:'zinit'
|
||||||
classname:'ZinitRPC'
|
classname:'ZinitRPC'
|
||||||
singleton:1
|
singleton:0
|
||||||
default:0
|
default:1
|
||||||
hasconfig:1
|
hasconfig:1
|
||||||
reset:0
|
reset:0
|
||||||
@@ -1,152 +1,219 @@
|
|||||||
# Zinit OpenRPC Client
|
# Zinit RPC Client
|
||||||
|
|
||||||
This is a V language client for the Zinit service manager, implementing the OpenRPC specification.
|
This is a V language client for the Zinit process manager, implementing the JSON-RPC API specification for service management operations.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Zinit is a service manager that allows you to manage and monitor services on your system. This client provides a comprehensive API to interact with Zinit via its JSON-RPC interface.
|
Zinit is a process manager that provides service monitoring, dependency management, and system control capabilities. This client provides a comprehensive API to interact with Zinit via its JSON-RPC interface for administrative tasks such as:
|
||||||
|
|
||||||
|
- Service lifecycle management (start, stop, monitor, forget)
|
||||||
|
- Service configuration management (create, delete, get)
|
||||||
|
- Service status and statistics monitoring
|
||||||
|
- System operations (shutdown, reboot, HTTP server control)
|
||||||
|
- Log streaming and monitoring
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Complete implementation of all methods in the Zinit OpenRPC specification
|
- **✅ 100% API Coverage**: Complete implementation of all 18 methods in the Zinit JSON-RPC specification
|
||||||
- Type-safe API with proper error handling
|
- **✅ Production Tested**: All methods tested and working against real Zinit instances
|
||||||
- Comprehensive documentation
|
- **✅ Type-safe API**: Proper V struct definitions with comprehensive error handling
|
||||||
- Helper functions for common operations
|
- **✅ Subscription Support**: Proper handling of streaming/subscription methods
|
||||||
- Example code for all operations
|
- **✅ Unix Socket Transport**: Reliable communication via Unix domain sockets
|
||||||
|
- **✅ Comprehensive Documentation**: Extensive documentation with working examples
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Basic Example
|
### Basic Example
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import freeflowuniverse.heroweb.clients.zinit
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
fn main() {
|
// Create a new client
|
||||||
// Create a new client with the default socket path
|
mut client := zinit.get(create:true)!
|
||||||
mut client := zinit.new_default_client()
|
|
||||||
|
// List all services
|
||||||
// List all services
|
services := client.service_list()!
|
||||||
services := client.service_list() or {
|
for service_name, state in services {
|
||||||
println('Error: ${err}')
|
println('Service: ${service_name}, State: ${state}')
|
||||||
return
|
}
|
||||||
}
|
|
||||||
|
// Get detailed status of a specific service
|
||||||
// Print the services
|
status := client.service_status('redis')!
|
||||||
for name, state in services {
|
println('Service: ${status.name}')
|
||||||
println('${name}: ${state}')
|
println('PID: ${status.pid}')
|
||||||
}
|
println('State: ${status.state}')
|
||||||
|
println('Target: ${status.target}')
|
||||||
// Get status of a specific service
|
|
||||||
if services.len > 0 {
|
// Start a service
|
||||||
service_name := services.keys()[0]
|
client.service_start('redis')!
|
||||||
status := client.service_status(service_name) or {
|
|
||||||
println('Error: ${err}')
|
// Stop a service
|
||||||
return
|
client.service_stop('redis')!
|
||||||
}
|
```
|
||||||
|
|
||||||
println('Service: ${status.name}')
|
### Service Configuration Management
|
||||||
println('State: ${status.state}')
|
|
||||||
println('PID: ${status.pid}')
|
```v
|
||||||
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
|
mut client := zinit.new_client()!
|
||||||
|
|
||||||
|
// Create a new service configuration
|
||||||
|
config := zinit.ServiceConfig{
|
||||||
|
exec: '/usr/bin/redis-server'
|
||||||
|
oneshot: false
|
||||||
|
log: 'stdout'
|
||||||
|
env: {
|
||||||
|
'REDIS_PORT': '6379'
|
||||||
|
'REDIS_HOST': '0.0.0.0'
|
||||||
}
|
}
|
||||||
|
shutdown_timeout: 30
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the service
|
||||||
|
path := client.service_create('redis', config)!
|
||||||
|
println('Service created at: ${path}')
|
||||||
|
|
||||||
|
// Get service configuration
|
||||||
|
retrieved_config := client.service_get('redis')!
|
||||||
|
println('Service exec: ${retrieved_config.exec}')
|
||||||
|
|
||||||
|
// Delete service configuration
|
||||||
|
result := client.service_delete('redis')!
|
||||||
|
println('Delete result: ${result}')
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service Statistics
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
|
mut client := zinit.new_client()!
|
||||||
|
|
||||||
|
// Get service statistics
|
||||||
|
stats := client.service_stats('redis')!
|
||||||
|
println('Service: ${stats.name}')
|
||||||
|
println('PID: ${stats.pid}')
|
||||||
|
println('Memory Usage: ${stats.memory_usage} bytes')
|
||||||
|
println('CPU Usage: ${stats.cpu_usage}%')
|
||||||
|
|
||||||
|
// Print child process statistics
|
||||||
|
for child in stats.children {
|
||||||
|
println('Child PID: ${child.pid}, Memory: ${child.memory_usage}, CPU: ${child.cpu_usage}%')
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Creating and Managing Services
|
### Log Streaming
|
||||||
|
|
||||||
```v
|
```v
|
||||||
import freeflowuniverse.heroweb.clients.zinit
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
fn main() {
|
mut client := zinit.new_client()!
|
||||||
mut client := zinit.new_default_client()
|
|
||||||
|
// Get current logs for all services
|
||||||
// Create a new service configuration
|
logs := client.stream_current_logs(name: '')!
|
||||||
config := zinit.ServiceConfig{
|
for log in logs {
|
||||||
exec: '/bin/echo "Hello, World!"'
|
println(log)
|
||||||
oneshot: true
|
|
||||||
log: zinit.log_stdout
|
|
||||||
env: {
|
|
||||||
'ENV_VAR': 'value'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the service
|
|
||||||
client.service_create('hello', config) or {
|
|
||||||
println('Error creating service: ${err}')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start the service
|
|
||||||
client.service_start('hello') or {
|
|
||||||
println('Error starting service: ${err}')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the service logs
|
|
||||||
logs := client.stream_current_logs('hello') or {
|
|
||||||
println('Error getting logs: ${err}')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for log in logs {
|
|
||||||
println(log)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean up
|
|
||||||
client.service_stop('hello') or {}
|
|
||||||
client.service_forget('hello') or {}
|
|
||||||
client.service_delete('hello') or {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get current logs for a specific service
|
||||||
|
redis_logs := client.stream_current_logs(name: 'redis')!
|
||||||
|
for log in redis_logs {
|
||||||
|
println('Redis: ${log}')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to log stream (returns subscription ID)
|
||||||
|
subscription_id := client.stream_subscribe_logs(name: 'redis')!
|
||||||
|
println('Subscribed to logs with ID: ${subscription_id}')
|
||||||
```
|
```
|
||||||
|
|
||||||
## API Reference
|
## API Reference
|
||||||
|
|
||||||
### Client Creation
|
### Service Management Methods
|
||||||
|
|
||||||
- `new_client(socket_path string) &Client` - Create a new client with a custom socket path
|
- `service_list()` - List all services and their states
|
||||||
- `new_default_client() &Client` - Create a new client with the default socket path (`/tmp/zinit.sock`)
|
- `service_status(name)` - Get detailed status of a service
|
||||||
|
- `service_start(name)` - Start a service
|
||||||
|
- `service_stop(name)` - Stop a service
|
||||||
|
- `service_monitor(name)` - Start monitoring a service
|
||||||
|
- `service_forget(name)` - Stop monitoring a service
|
||||||
|
- `service_kill(name, signal)` - Send signal to a service
|
||||||
|
|
||||||
### Service Management
|
### Service Configuration Methods
|
||||||
|
|
||||||
- `service_list() !map[string]string` - List all services and their states
|
- `service_create(name, config)` - Create service configuration
|
||||||
- `service_status(name string) !ServiceStatus` - Get detailed status of a service
|
- `service_delete(name)` - Delete service configuration
|
||||||
- `service_start(name string) !` - Start a service
|
- `service_get(name)` - Get service configuration
|
||||||
- `service_stop(name string) !` - Stop a service
|
|
||||||
- `service_monitor(name string) !` - Start monitoring a service
|
|
||||||
- `service_forget(name string) !` - Stop monitoring a service
|
|
||||||
- `service_kill(name string, signal string) !` - Send a signal to a service
|
|
||||||
- `service_create(name string, config ServiceConfig) !string` - Create a new service
|
|
||||||
- `service_delete(name string) !string` - Delete a service
|
|
||||||
- `service_get(name string) !ServiceConfig` - Get a service configuration
|
|
||||||
- `service_stats(name string) !ServiceStats` - Get memory and CPU usage statistics
|
|
||||||
|
|
||||||
### System Operations
|
### Monitoring Methods
|
||||||
|
|
||||||
- `system_shutdown() !` - Stop all services and power off the system
|
- `service_stats(name)` - Get service statistics
|
||||||
- `system_reboot() !` - Stop all services and reboot the system
|
|
||||||
- `system_start_http_server(address string) !string` - Start an HTTP/RPC server
|
|
||||||
- `system_stop_http_server() !` - Stop the HTTP/RPC server
|
|
||||||
|
|
||||||
### Logs
|
### System Methods
|
||||||
|
|
||||||
- `stream_current_logs(name ?string) ![]string` - Get current logs
|
- `system_shutdown()` - Shutdown the system
|
||||||
- `stream_subscribe_logs(name ?string) !string` - Subscribe to log messages
|
- `system_reboot()` - Reboot the system
|
||||||
|
- `system_start_http_server(address)` - Start HTTP server
|
||||||
|
- `system_stop_http_server()` - Stop HTTP server
|
||||||
|
|
||||||
## Constants
|
### Streaming Methods
|
||||||
|
|
||||||
- `default_socket_path` - Default Unix socket path (`/tmp/zinit.sock`)
|
- `stream_current_logs(args)` - Get current logs (returns array of log lines)
|
||||||
- `state_running`, `state_success`, `state_error`, etc. - Common service states
|
- `stream_subscribe_logs(args)` - Subscribe to log stream (returns subscription ID)
|
||||||
- `target_up`, `target_down` - Common service targets
|
|
||||||
- `log_null`, `log_ring`, `log_stdout` - Common log types
|
|
||||||
- `signal_term`, `signal_kill`, etc. - Common signals
|
|
||||||
|
|
||||||
## Helper Functions
|
### Discovery Methods
|
||||||
|
|
||||||
- `new_service_config(exec string) ServiceConfig` - Create a basic service configuration
|
- `rpc_discover()` - Get OpenRPC specification
|
||||||
- `new_oneshot_service_config(exec string) ServiceConfig` - Create a oneshot service configuration
|
|
||||||
- `is_service_not_found_error(err IError) bool` - Check if an error is a "service not found" error
|
## Configuration
|
||||||
- `format_memory_usage(bytes i64) string` - Format memory usage in human-readable format
|
|
||||||
- `format_cpu_usage(cpu_percent f64) string` - Format CPU usage
|
### Using the Factory Pattern
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
|
// Get client using factory (recommended)
|
||||||
|
mut client := zinit.get()!
|
||||||
|
|
||||||
|
// Use the client
|
||||||
|
services := client.service_list()!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Heroscript Configuration
|
||||||
|
|
||||||
|
```hero
|
||||||
|
!!zinit.configure
|
||||||
|
name: 'production'
|
||||||
|
socket_path: '/tmp/zinit.sock'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
The client provides comprehensive error handling for all Zinit-specific error codes:
|
||||||
|
|
||||||
|
- `-32000`: Service not found
|
||||||
|
- `-32001`: Service already monitored
|
||||||
|
- `-32002`: Service is up
|
||||||
|
- `-32003`: Service is down
|
||||||
|
- `-32004`: Invalid signal
|
||||||
|
- `-32005`: Config error
|
||||||
|
- `-32006`: Shutting down
|
||||||
|
- `-32007`: Service already exists
|
||||||
|
- `-32008`: Service file error
|
||||||
|
|
||||||
|
```v
|
||||||
|
import freeflowuniverse.herolib.clients.zinit
|
||||||
|
|
||||||
|
mut client := zinit.new_client()!
|
||||||
|
|
||||||
|
// Handle specific errors
|
||||||
|
client.service_start('nonexistent') or {
|
||||||
|
if err.msg().contains('Service not found') {
|
||||||
|
println('Service does not exist')
|
||||||
|
} else {
|
||||||
|
println('Other error: ${err}')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
module zinit
|
|
||||||
|
|
||||||
// Request Types for Zinit API
|
|
||||||
//
|
|
||||||
// This file contains all the request types used by the Zinit API.
|
|
||||||
|
|
||||||
// ZinitError represents an error returned by the zinit API
|
|
||||||
pub struct ZinitError {
|
|
||||||
pub mut:
|
|
||||||
code int // Error code
|
|
||||||
message string // Error message
|
|
||||||
data string // Additional error data
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error implements the error interface for ZinitError
|
|
||||||
pub fn (e ZinitError) msg() string {
|
|
||||||
return 'Zinit Error ${e.code}: ${e.message} - ${e.data}'
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
module zinit
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
|
||||||
|
|
||||||
// Client is an OpenRPC client for Zinit
|
|
||||||
pub struct Client {
|
|
||||||
mut:
|
|
||||||
rpc_client &jsonrpc.Client
|
|
||||||
}
|
|
||||||
|
|
||||||
@[params]
|
|
||||||
pub struct ClientParams {
|
|
||||||
path string = '/tmp/zinit.sock' // Path to the Zinit RPC socket
|
|
||||||
}
|
|
||||||
|
|
||||||
// new_client creates a new Zinit RPC client with a custom socket path
|
|
||||||
pub fn new_client(args_ ClientParams) &Client {
|
|
||||||
mut args := args_
|
|
||||||
mut cl := jsonrpc.new_unix_socket_client(args.path)
|
|
||||||
return &Client{
|
|
||||||
rpc_client: cl
|
|
||||||
}
|
|
||||||
}
|
|
||||||
20
lib/clients/zinit/instruct.md
Normal file
20
lib/clients/zinit/instruct.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
| RPC Call | Example In | Example Out | 1-Sentence Description |
|
||||||
|
|---------|-----------|------------|------------------------|
|
||||||
|
| `rpc.discover` | `{}` | `{ "openrpc": "1.2.6", "info": { "version": "1.0.0", "title": "Zinit JSON-RPC API" } }` | Returns the full OpenRPC specification of the Zinit API. |
|
||||||
|
| `service_list` | `{}` | `{ "service1": "Running", "service2": "Success", "service3": "Error" }` | Lists all managed services and their current states. |
|
||||||
|
| `service_status` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "state": "Running", "target": "Up", "after": { "dependency1": "Success", "dependency2": "Running" } }` | Returns detailed status including PID, state, dependencies, and target. |
|
||||||
|
| `service_start` | `{ "name": "redis" }` | `null` | Starts a specified service; returns no result on success. |
|
||||||
|
| `service_stop` | `{ "name": "redis" }` | `null` | Stops a specified service; returns no result on success. |
|
||||||
|
| `service_monitor` | `{ "name": "redis" }` | `null` | Starts monitoring a service using its configuration from the config directory. |
|
||||||
|
| `service_forget` | `{ "name": "redis" }` | `null` | Stops monitoring a service; only allowed for stopped services. |
|
||||||
|
| `service_kill` | `{ "name": "redis", "signal": "SIGTERM" }` | `null` | Sends a signal (e.g., SIGTERM) to a running service. |
|
||||||
|
| `system_shutdown` | `{}` | `null` | Stops all services and powers off the system. |
|
||||||
|
| `system_reboot` | `{}` | `null` | Stops all services and reboots the system. |
|
||||||
|
| `service_create` | `{ "name": "redis", "content": { "exec": "redis-server", "oneshot": false, "after": ["network"], "log": "stdout", "env": { "REDIS_PASSWORD": "secret" }, "shutdown_timeout": 30 } }` | `"service_config/redis"` | Creates a new service configuration file with specified settings. |
|
||||||
|
| `service_delete` | `{ "name": "redis" }` | `"service deleted"` | Deletes a service configuration file. |
|
||||||
|
| `service_get` | `{ "name": "redis" }` | `{ "exec": "redis-server", "oneshot": false, "after": ["network"] }` | Retrieves the configuration content of a service. |
|
||||||
|
| `service_stats` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "memory_usage": 10485760, "cpu_usage": 2.5, "children": [ { "pid": 1235, "memory_usage": 5242880, "cpu_usage": 1.2 } ] }` | Returns memory and CPU usage statistics for a running service. |
|
||||||
|
| `system_start_http_server` | `{ "address": "127.0.0.1:8080" }` | `"HTTP server started at 127.0.0.1:8080"` | Starts an HTTP/RPC server on the specified network address. |
|
||||||
|
| `system_stop_http_server` | `{}` | `null` | Stops the currently running HTTP/RPC server. |
|
||||||
|
| `stream_currentLogs` | `{ "name": "redis" }` | `["2023-01-01T12:00:00 redis: Starting service", "2023-01-01T12:00:02 redis: Service started"]` | Returns current logs; optionally filtered by service name. |
|
||||||
|
| `stream_subscribeLogs` | `{ "name": "redis" }` | `"2023-01-01T12:00:00 redis: Service started"` | Subscribes to real-time log messages, optionally filtered by service. |
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
module zinit
|
|
||||||
|
|
||||||
// ServiceCreateResponse represents the response from service_create
|
|
||||||
pub struct ServiceCreateResponse {
|
|
||||||
pub mut:
|
|
||||||
path string // Path to the created service file
|
|
||||||
}
|
|
||||||
|
|
||||||
// ServiceDeleteResponse represents the response from service_delete
|
|
||||||
pub struct ServiceDeleteResponse {
|
|
||||||
pub mut:
|
|
||||||
result string // Result of the delete operation
|
|
||||||
}
|
|
||||||
|
|
||||||
// SystemStartHttpServerResponse represents the response from system_start_http_server
|
|
||||||
pub struct SystemStartHttpServerResponse {
|
|
||||||
pub mut:
|
|
||||||
result string // Result of starting the HTTP server
|
|
||||||
}
|
|
||||||
|
|
||||||
// StreamCurrentLogsResponse represents the response from stream_currentLogs
|
|
||||||
pub struct StreamCurrentLogsResponse {
|
|
||||||
pub mut:
|
|
||||||
logs []string // Log entries
|
|
||||||
}
|
|
||||||
|
|
||||||
// StreamSubscribeLogsResponse represents the response from stream_subscribeLogs
|
|
||||||
pub struct StreamSubscribeLogsResponse {
|
|
||||||
pub mut:
|
|
||||||
subscription_id string // ID of the log subscription
|
|
||||||
}
|
|
||||||
|
|
||||||
// Module version information
|
|
||||||
pub const version = '1.0.0'
|
|
||||||
pub const author = 'Hero Code'
|
|
||||||
pub const license = 'MIT'
|
|
||||||
|
|
||||||
// Default socket path for zinit
|
|
||||||
pub const default_socket_path = '/tmp/zinit.sock'
|
|
||||||
|
|
||||||
// Common service states
|
|
||||||
pub const state_running = 'Running'
|
|
||||||
pub const state_success = 'Success'
|
|
||||||
pub const state_error = 'Error'
|
|
||||||
pub const state_stopped = 'Stopped'
|
|
||||||
pub const state_failed = 'Failed'
|
|
||||||
|
|
||||||
// Common service targets
|
|
||||||
pub const target_up = 'Up'
|
|
||||||
pub const target_down = 'Down'
|
|
||||||
|
|
||||||
// Common log types
|
|
||||||
pub const log_null = 'null'
|
|
||||||
pub const log_ring = 'ring'
|
|
||||||
pub const log_stdout = 'stdout'
|
|
||||||
|
|
||||||
// Common signals
|
|
||||||
pub const signal_term = 'SIGTERM'
|
|
||||||
pub const signal_kill = 'SIGKILL'
|
|
||||||
pub const signal_hup = 'SIGHUP'
|
|
||||||
pub const signal_usr1 = 'SIGUSR1'
|
|
||||||
pub const signal_usr2 = 'SIGUSR2'
|
|
||||||
|
|
||||||
// JSON-RPC error codes as defined in the OpenRPC specification
|
|
||||||
pub const error_service_not_found = -32000
|
|
||||||
pub const error_service_already_monitored = -32001
|
|
||||||
pub const error_service_is_up = -32002
|
|
||||||
pub const error_service_is_down = -32003
|
|
||||||
pub const error_invalid_signal = -32004
|
|
||||||
pub const error_config_error = -32005
|
|
||||||
pub const error_shutting_down = -32006
|
|
||||||
pub const error_service_already_exists = -32007
|
|
||||||
pub const error_service_file_error = -32008
|
|
||||||
63
lib/clients/zinit/model_openrpc.v
Normal file
63
lib/clients/zinit/model_openrpc.v
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
module zinit
|
||||||
|
|
||||||
|
// ServiceStatus represents detailed status information for a service
|
||||||
|
pub struct ServiceStatus {
|
||||||
|
pub mut:
|
||||||
|
name string // Service name
|
||||||
|
pid u32 // Process ID of the running service (if running)
|
||||||
|
state string // Current state of the service (Running, Success, Error, etc.)
|
||||||
|
target string // Target state of the service (Up, Down)
|
||||||
|
after map[string]string // Dependencies of the service and their states
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceConfig represents the configuration for a zinit service
|
||||||
|
pub struct ServiceConfig {
|
||||||
|
pub mut:
|
||||||
|
exec string // Command to run
|
||||||
|
test string // Test command (optional)
|
||||||
|
oneshot bool // Whether the service should be restarted (maps to one_shot in Zinit)
|
||||||
|
after []string // Services that must be running before this one starts
|
||||||
|
log string // How to handle service output (null, ring, stdout)
|
||||||
|
env map[string]string // Environment variables for the service
|
||||||
|
dir string // Working directory for the service
|
||||||
|
shutdown_timeout u64 // Maximum time to wait for service to stop during shutdown
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceStats represents memory and CPU usage statistics for a service
|
||||||
|
pub struct ServiceStats {
|
||||||
|
pub mut:
|
||||||
|
name string // Service name
|
||||||
|
pid u32 // Process ID of the service
|
||||||
|
memory_usage u64 // Memory usage in bytes
|
||||||
|
cpu_usage f32 // CPU usage as a percentage (0-100)
|
||||||
|
children []ChildStats // Stats for child processes
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChildStats represents statistics for a child process
|
||||||
|
pub struct ChildStats {
|
||||||
|
pub mut:
|
||||||
|
pid u32 // Process ID of the child process
|
||||||
|
memory_usage u64 // Memory usage in bytes
|
||||||
|
cpu_usage f32 // CPU usage as a percentage (0-100)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceCreateParams represents parameters for service_create method
|
||||||
|
pub struct ServiceCreateParams {
|
||||||
|
pub mut:
|
||||||
|
name string // Name of the service to create
|
||||||
|
content ServiceConfig // Configuration for the service
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServiceKillParams represents parameters for service_kill method
|
||||||
|
pub struct ServiceKillParams {
|
||||||
|
pub mut:
|
||||||
|
name string // Name of the service to kill
|
||||||
|
signal string // Signal to send (e.g., SIGTERM, SIGKILL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LogParams represents parameters for log streaming methods
|
||||||
|
@[params]
|
||||||
|
pub struct LogParams {
|
||||||
|
pub mut:
|
||||||
|
name string // Optional service name filter
|
||||||
|
}
|
||||||
@@ -1,175 +0,0 @@
|
|||||||
module zinit
|
|
||||||
|
|
||||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
|
||||||
|
|
||||||
// ServiceConfig represents the configuration for a zinit service
|
|
||||||
pub struct ServiceConfig {
|
|
||||||
pub mut:
|
|
||||||
exec string // Command to run
|
|
||||||
oneshot bool // Whether the service should be restarted
|
|
||||||
after []string // Services that must be running before this one starts
|
|
||||||
log string // How to handle service output (null, ring, stdout)
|
|
||||||
env map[string]string // Environment variables for the service
|
|
||||||
shutdown_timeout int // Maximum time to wait for service to stop during shutdown
|
|
||||||
}
|
|
||||||
|
|
||||||
// KillParams represents the parameters for the service_kill method
|
|
||||||
pub struct KillParams {
|
|
||||||
pub:
|
|
||||||
name string // Name of the service to kill
|
|
||||||
signal string // Signal to send (e.g., SIGTERM, SIGKILL)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RpcDiscoverResponse represents the response from rpc.discover
|
|
||||||
pub struct RpcDiscoverResponse {
|
|
||||||
pub mut:
|
|
||||||
spec map[string]string // OpenRPC specification
|
|
||||||
}
|
|
||||||
|
|
||||||
// rpc_discover returns the OpenRPC specification for the API
|
|
||||||
pub fn (mut c Client) rpc_discover() !RpcDiscoverResponse {
|
|
||||||
request := jsonrpc.new_request_generic('rpc.discover', []string{})
|
|
||||||
response := c.rpc_client.send[[]string, map[string]string](request)!
|
|
||||||
return RpcDiscoverResponse{
|
|
||||||
spec: response
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// // Response Models for Zinit API
|
|
||||||
// //
|
|
||||||
// // This file contains all the response models used by the Zinit API.
|
|
||||||
// // These models are used as type parameters in the response generics.
|
|
||||||
|
|
||||||
// // ServiceListResponse represents the response from service_list
|
|
||||||
// pub struct ServiceListResponse {
|
|
||||||
// pub mut:
|
|
||||||
// // Map of service names to their current states
|
|
||||||
// services map[string]string
|
|
||||||
// }
|
|
||||||
|
|
||||||
// service_list lists all services managed by Zinit
|
|
||||||
// Returns a map of service names to their current states
|
|
||||||
pub fn (mut c Client) service_list() !map[string]string {
|
|
||||||
request := jsonrpc.new_request_generic('service_list', map[string]string{})
|
|
||||||
services := c.rpc_client.send[map[string]string, map[string]string](request)!
|
|
||||||
// return ServiceListResponse{
|
|
||||||
// services: services
|
|
||||||
// }
|
|
||||||
return services
|
|
||||||
}
|
|
||||||
|
|
||||||
// ServiceStatusResponse represents the response from service_status
|
|
||||||
pub struct ServiceStatusResponse {
|
|
||||||
pub mut:
|
|
||||||
name string // Service name
|
|
||||||
pid int // Process ID of the running service (if running)
|
|
||||||
state string // Current state of the service (Running, Success, Error, etc.)
|
|
||||||
target string // Target state of the service (Up, Down)
|
|
||||||
after map[string]string // Dependencies of the service and their states
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_status shows detailed status information for a specific service
|
|
||||||
// name: the name of the service
|
|
||||||
pub fn (mut c Client) service_status(name string) !ServiceStatusResponse {
|
|
||||||
request := jsonrpc.new_request_generic('service_status', name)
|
|
||||||
|
|
||||||
// Use a direct struct mapping instead of manual conversion
|
|
||||||
return c.rpc_client.send[string, ServiceStatusResponse](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_start starts a service
|
|
||||||
// name: the name of the service to start
|
|
||||||
pub fn (mut c Client) service_start(name string) ! {
|
|
||||||
request := jsonrpc.new_request_generic('service_start', name)
|
|
||||||
c.rpc_client.send[string, string](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_stop stops a service
|
|
||||||
// name: the name of the service to stop
|
|
||||||
pub fn (mut c Client) service_stop(name string) ! {
|
|
||||||
request := jsonrpc.new_request_generic('service_stop', name)
|
|
||||||
c.rpc_client.send[string, string](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_monitor starts monitoring a service
|
|
||||||
// The service configuration is loaded from the config directory
|
|
||||||
// name: the name of the service to monitor
|
|
||||||
pub fn (mut c Client) service_monitor(name string) ! {
|
|
||||||
request := jsonrpc.new_request_generic('service_monitor', name)
|
|
||||||
c.rpc_client.send[string, string](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_delete deletes a service configuration file
|
|
||||||
// name: the name of the service to delete
|
|
||||||
pub fn (mut c Client) service_delete(name string) !ServiceDeleteResponse {
|
|
||||||
request := jsonrpc.new_request_generic('service_delete', name)
|
|
||||||
result := c.rpc_client.send[string, string](request)!
|
|
||||||
return ServiceDeleteResponse{
|
|
||||||
result: result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_forget stops monitoring a service
|
|
||||||
// You can only forget a stopped service
|
|
||||||
// name: the name of the service to forget
|
|
||||||
pub fn (mut c Client) service_forget(name string) ! {
|
|
||||||
request := jsonrpc.new_request_generic('service_forget', name)
|
|
||||||
c.rpc_client.send[string, string](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: make sure the signal is a valid signal and enumerator do as @[params] so its optional
|
|
||||||
|
|
||||||
// service_kill sends a signal to a running service
|
|
||||||
// name: the name of the service to send the signal to
|
|
||||||
// signal: the signal to send (e.g., SIGTERM, SIGKILL)
|
|
||||||
pub fn (mut c Client) service_kill(name string, signal string) ! {
|
|
||||||
params := KillParams{
|
|
||||||
name: name
|
|
||||||
signal: signal
|
|
||||||
}
|
|
||||||
|
|
||||||
request := jsonrpc.new_request_generic('service_kill', params)
|
|
||||||
c.rpc_client.send[KillParams, string](request)!
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateServiceParams represents the parameters for the service_create method
|
|
||||||
struct CreateServiceParams {
|
|
||||||
name string // Name of the service to create
|
|
||||||
content ServiceConfig // Configuration for the service
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_create creates a new service configuration file
|
|
||||||
// name: the name of the service to create
|
|
||||||
// config: the service configuration
|
|
||||||
pub fn (mut c Client) service_create(name string, config ServiceConfig) !ServiceCreateResponse {
|
|
||||||
params := CreateServiceParams{
|
|
||||||
name: name
|
|
||||||
content: config
|
|
||||||
}
|
|
||||||
|
|
||||||
request := jsonrpc.new_request_generic('service_create', params)
|
|
||||||
path := c.rpc_client.send[CreateServiceParams, string](request)!
|
|
||||||
return ServiceCreateResponse{
|
|
||||||
path: path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// service_get gets a service configuration file
|
|
||||||
// name: the name of the service to get
|
|
||||||
pub fn (mut c Client) service_get(name string) !ServiceConfigResponse {
|
|
||||||
request := jsonrpc.new_request_generic('service_get', {
|
|
||||||
'name': name
|
|
||||||
})
|
|
||||||
|
|
||||||
// We need to handle the conversion from ServiceConfig to ServiceConfigResponse
|
|
||||||
config := c.rpc_client.send[map[string]string, ServiceConfig](request)!
|
|
||||||
|
|
||||||
return ServiceConfigResponse{
|
|
||||||
exec: config.exec
|
|
||||||
oneshot: config.oneshot
|
|
||||||
after: config.after
|
|
||||||
log: config.log
|
|
||||||
env: config.env
|
|
||||||
shutdown_timeout: config.shutdown_timeout
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
module zinit
|
|
||||||
|
|
||||||
pub struct ServiceConfigResponse {
|
|
||||||
pub mut:
|
|
||||||
exec string // Command to run
|
|
||||||
oneshot bool // Whether the service should be restarted
|
|
||||||
after []string // Services that must be running before this one starts
|
|
||||||
log string // How to handle service output (null, ring, stdout)
|
|
||||||
env map[string]string // Environment variables for the service
|
|
||||||
shutdown_timeout int // Maximum time to wait for service to stop during shutdown
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to create a basic service configuration
|
|
||||||
pub fn new_service_config(exec string) ServiceConfig {
|
|
||||||
return ServiceConfig{
|
|
||||||
exec: exec
|
|
||||||
oneshot: false
|
|
||||||
log: log_stdout
|
|
||||||
env: map[string]string{}
|
|
||||||
shutdown_timeout: 30
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to create a oneshot service configuration
|
|
||||||
pub fn new_oneshot_service_config(exec string) ServiceConfig {
|
|
||||||
return ServiceConfig{
|
|
||||||
exec: exec
|
|
||||||
oneshot: true
|
|
||||||
log: log_stdout
|
|
||||||
env: map[string]string{}
|
|
||||||
shutdown_timeout: 30
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user