Merge branch 'development' into development_heroprompt

This commit is contained in:
Mahmoud-Emad
2025-08-17 11:42:16 +03:00
264 changed files with 28902 additions and 8904 deletions

View File

@@ -0,0 +1,109 @@
## INTENT
we use heroscript to communicate actions and events in a structured format.
we want you to parse user intents and generate the corresponding heroscript.
ONLY RETURN THE HEROSCRIPT STATEMENTS, can be more than 1
## HEROSCRIPT FORMAT
HeroScript is a concise scripting language with the following structure:
```heroscript
!!actor.action_name
param1: 'value1'
param2: 'value with spaces'
multiline_description: '
This is a multiline description.
It can span multiple lines.
'
arg1 arg2 // Arguments without keys
!!actor.action_name2 param1:something param2:'something with spaces' nr:3
```
Key characteristics:
- **Actions**: Start with `!!`, followed by `actor.action_name` (e.g., `!!mailclient.configure`).
- **Parameters**: Defined as `key:value`. Values can be quoted for spaces.
- **Multiline Support**: Parameters like `description` can span multiple lines.
- **Arguments**: Values without keys (e.g., `arg1`).
- params can be on 1 line, with spaces in between
- time can be as +1h, +1d, +1w (hour, day, week), ofcourse 1 can be any number, +1 means 1 hour from now
- time format is: dd/mm/yyyy hh:mm (ONLY USE THIS)
- comma separation is used a lot in arguments e.g. 'jan,kristof' or 'jan , kristof' remove spaces, is list of 2
- note only !! is at start of line, rest has spaces per instruction
- make one empty line between 1 heroscript statements
- everything after // is comment
## HEROSCRIPT SCHEMA
the language we understand
### calendar management
```heroscript
!!calendar.create when:'+1h' descr:'this is event to discuss eng' attendees:'jan,kristof' name:'meet1' tags:'eng,urgent'
!!calendar.delete name:'meet1'
!!calendar.list tags:'urgent'
```
### contact management
```heroscript
!!contact.add name:'jan' email:'jan@example.com' phone:'123-456-7890'
!!contact.remove name:'jan'
!!contact.list
```
### task management
```heroscript
!!task.create title:'Prepare presentation' due:'+1d' assignee:'jan' name:'task1' tags:'eng,urgent'
deadline:'+10d' duration:'1h'
!!task.update name:'task1' status:'in progress'
!!task.delete name:'task1'
!!task.list
```
### project management
```heroscript
!!project.create title:'Cloud Product Development' description:'Track progress of cloud product development' name:'cloud_prod'
!!project.update name:'cloud_prod' status:'in progress'
!!project.delete name:'cloud_prod'
!!project.list
!!project.tasks_list name:'cloud_prod' //required properties are name, description, and assignee of not given ask
!!project.tasks_add names:'task1, task2'
!!project.tasks_remove names:'task1, task2'
```
### SUPPORTED TAGS
only tags supported are:
- for intent: eng, prod, support, mgmt, marketing
- for urgency: urgent, high, medium, low
### generic remarks
- names are lowercase and snake_case, can be distilled out of title if only title given, often a user will say name but that means title
- time: format of returned data or time is always dd/mm/yyyy hh:min
## IMPORTANT STARTING INFO
- current time is 10/08/2025 05:10 , use this to define any time-related parameters
## USER INTENT
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
also let me know which other meetings I have which are urgent
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.

View File

@@ -0,0 +1,64 @@
SYSTEM
You are a HeroScript compiler. Convert user intents into valid HeroScript statements.
OUTPUT RULES
1) Return ONLY HeroScript statements. No prose, no backticks.
2) Separate each statement with exactly ONE blank line.
3) Keys use snake_case. Names are lowercase snake_case derived from titles (non-alnum → "_", collapse repeats, trim).
4) Lists are comma-separated with NO spaces (e.g., "jan,alex").
5) Times: OUTPUT MUST BE ABSOLUTE in "dd/mm/yyyy hh:mm" (Europe/Zurich). Convert relative times (e.g., "tomorrow 10am") using CURRENT_TIME.
6) Tags: include at most one intent tag and at most one urgency tag when present.
- intent: eng,prod,support,mgmt,marketing
- urgency: urgent,high,medium,low
7) Quotes: quote values containing spaces; otherwise omit quotes (allowed either way).
8) Comments only with // if the user explicitly asks for explanations; otherwise omit.
SCHEMA (exact actions & parameters)
!!calendar.create when:'dd/mm/yyyy hh:mm' name:'<name>' descr:'<text>' attendees:'a,b,c' tags:'intent,urgency'
!!calendar.delete name:'<name>'
!!calendar.list [tags:'tag1,tag2']
!!contact.add name:'<name>' email:'<email>' phone:'<phone>'
!!contact.remove name:'<name>'
!!contact.list
!!task.create title:'<title>' name:'<name>' [due:'dd/mm/yyyy hh:mm'] [assignee:'<name>'] [tags:'intent,urgency'] [deadline:'dd/mm/yyyy hh:mm'] [duration:'<Nd Nh Nm> or <Nh>']
!!task.update name:'<name>' [status:'in progress|done|blocked|todo']
!!task.delete name:'<name>'
!!task.list
!!project.create title:'<title>' description:'<text>' name:'<name>'
!!project.update name:'<name>' [status:'in progress|done|blocked|todo']
!!project.delete name:'<name>'
!!project.list
!!project.tasks_list name:'<project_name>'
!!project.tasks_add name:'<project_name>' names:'task_a,task_b'
!!project.tasks_remove name:'<project_name>' names:'task_a,task_b'
NORMALIZATION & INFERENCE (silent)
- Derive names from titles when missing (see rule 3). Ensure consistency across statements.
- Map phrases to tags when obvious (e.g., "new product" ⇒ intent: prod; "high priority" ⇒ urgency: high).
- Attendees: split on commas, trim, lowercase given names.
- If the user asks for “urgent meetings,” use tags:'urgent' specifically.
- Prefer concise descriptions pulled from the users phrasing.
- Name's are required, if missing ask for clarification.
- For calendar management, ensure to include all relevant details such as time, attendees, and description.
CURRENT_TIME
10/08/2025 05:10
USER_MESSAGE
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
also let me know which other meetings I have which are urgent
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
END

View File

@@ -0,0 +1,82 @@
## INSTRUCTIONS
the user will send me multiple instructions what they wants to do, I want you to put them in separate categories
The categories we have defined are:
- calendar management
- schedule meetings, events, reminders
- list these events
- delete them
- contact management
- add/remove contact information e.g. phone numbers, email addresses, address information
- list contacts, search
- task or project management
- anything we need to do, anything we need to track and plan
- create/update tasks, set deadlines
- mark tasks as complete
- delete tasks
- project management
- communication (chat, email)
- see what needs to be communicate e.g. send a chat to ...
- search statements
- find on internet, find specific information from my friends
I want you to detect the intent and make multiple blocks out of the intent, each block should correspond to one of the identified intents, identify the intent with name of the category eg. calendar, only use above names
what user wants to do, stay as close as possible to the original instructions, copy the exact instructions as where given by the user, we only need to sort the instructions in these blocks
for each instruction make a separate block, e.g. if 2 tasks are given, create 2 blocks
the format to return is: (note newline after each title of block)
```template
===CALENDAR===\n
$the copied text from what user wants
===CONTACT===\n
...
===QUESTION===\n
put here what our system needs to ask to the user anything which is not clear
===END===\n
```
I want you to execute above on instructions as given by user below, give text back ONLY supporting the template
note for format is only ===$NAME=== and then on next lines the original instructions from the user, don't change
## special processing of info
- if a date or time specified e.g. tomorrow, time, ... calculate back from current date
## IMPORTANT STARTING INFO
- current time is 10/08/2025 05:10 (format of returned data is always dd/mm/yyyy hh:min)
- use the current time to define formatted time out of instructions
- only return the formatted time
## UNCLEAR INFO
check in instructions e.g. things specified like you, me, ...
are not clear ask specifically who do you mean
if task, specify per task, who needs to do it and when, make sure each instruction (block) is complete and clear for further processing
be very specific with the questions e.g. who is you, ...
## EXECUTE ABOVE ON THE FOLLOWING
I am planning a birthday for my daughters tomorrow, there will be 10 people.
I would like to know if you can help me with the preparations.
I need a place for my daughter's birthday party.
I need to send message to my wife isabelle that she needs to pick up the cake.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,225 @@
# tus Resumable Upload Protocol (Condensed for Coding Agents)
## Core Protocol
All Clients and Servers MUST implement the core protocol for resumable uploads.
### Resuming an Upload
1. **Determine Offset (HEAD Request):**
* **Request:**
```
HEAD /files/{upload_id} HTTP/1.1
Host: tus.example.org
Tus-Resumable: 1.0.0
```
* **Response:**
```
HTTP/1.1 200 OK
Upload-Offset: {current_offset}
Tus-Resumable: 1.0.0
```
* Server MUST include `Upload-Offset`.
* Server MUST include `Upload-Length` if known.
* Server SHOULD return `200 OK` or `204 No Content`.
* Server MUST prevent caching: `Cache-Control: no-store`.
2. **Resume Upload (PATCH Request):**
* **Request:**
```
PATCH /files/{upload_id} HTTP/1.1
Host: tus.example.org
Content-Type: application/offset+octet-stream
Content-Length: {chunk_size}
Upload-Offset: {current_offset}
Tus-Resumable: 1.0.0
[binary data chunk]
```
* **Response:**
```
HTTP/1.1 204 No Content
Tus-Resumable: 1.0.0
Upload-Offset: {new_offset}
```
* `Content-Type` MUST be `application/offset+octet-stream`.
* `Upload-Offset` in request MUST match server's current offset (else `409 Conflict`).
* Server MUST acknowledge with `204 No Content` and `Upload-Offset` (new offset).
* Server SHOULD return `404 Not Found` for non-existent resources.
### Common Headers
* **`Upload-Offset`**: Non-negative integer. Byte offset within resource.
* **`Upload-Length`**: Non-negative integer. Total size of upload in bytes.
* **`Tus-Version`**: Comma-separated list of supported protocol versions (Server response).
* **`Tus-Resumable`**: Protocol version used (e.g., `1.0.0`). MUST be in every request/response (except `OPTIONS`). If client version unsupported, server responds `412 Precondition Failed` with `Tus-Version`.
* **`Tus-Extension`**: Comma-separated list of supported extensions (Server response). Omitted if none.
* **`Tus-Max-Size`**: Non-negative integer. Max allowed upload size in bytes (Server response).
* **`X-HTTP-Method-Override`**: String. Client MAY use to override HTTP method (e.g., for `PATCH`/`DELETE` limitations).
### Server Configuration (OPTIONS Request)
* **Request:**
```
OPTIONS /files HTTP/1.1
Host: tus.example.org
```
* **Response:**
```
HTTP/1.1 204 No Content
Tus-Resumable: 1.0.0
Tus-Version: 1.0.0,0.2.2,0.2.1
Tus-Max-Size: 1073741824
Tus-Extension: creation,expiration
```
* Response MUST contain `Tus-Version`. MAY include `Tus-Extension` and `Tus-Max-Size`.
* Client SHOULD NOT include `Tus-Resumable` in request.
## Protocol Extensions
Clients SHOULD use `OPTIONS` request and `Tus-Extension` header for feature detection.
### Creation (`creation` extension)
Create a new upload resource. Server MUST add `creation` to `Tus-Extension`.
* **Request (POST):**
```
POST /files HTTP/1.1
Host: tus.example.org
Content-Length: 0
Upload-Length: {total_size} OR Upload-Defer-Length: 1
Tus-Resumable: 1.0.0
Upload-Metadata: filename {base64_filename},is_confidential
```
* MUST include `Upload-Length` or `Upload-Defer-Length: 1`.
* If `Upload-Defer-Length: 1`, client MUST set `Upload-Length` in subsequent `PATCH`.
* `Upload-Length: 0` creates an immediately complete empty file.
* Client MAY supply `Upload-Metadata` (key-value pairs, value Base64 encoded).
* If `Upload-Length` exceeds `Tus-Max-Size`, server responds `413 Request Entity Too Large`.
* **Response:**
```
HTTP/1.1 201 Created
Location: {upload_url}
Tus-Resumable: 1.0.0
```
* Server MUST respond `201 Created` and set `Location` header to new resource URL.
* New resource has implicit offset `0`.
#### Headers
* **`Upload-Defer-Length`**: `1`. Indicates upload size is unknown. Server adds `creation-defer-length` to `Tus-Extension` if supported.
* **`Upload-Metadata`**: Comma-separated `key value` pairs. Key: no spaces/commas, ASCII. Value: Base64 encoded.
### Creation With Upload (`creation-with-upload` extension)
Include initial upload data in the `POST` request. Server MUST add `creation-with-upload` to `Tus-Extension`. Depends on `creation` extension.
* **Request (POST):**
```
POST /files HTTP/1.1
Host: tus.example.org
Content-Length: {initial_chunk_size}
Upload-Length: {total_size}
Tus-Resumable: 1.0.0
Content-Type: application/offset+octet-stream
Expect: 100-continue
[initial binary data chunk]
```
* Similar rules as `PATCH` apply for content.
* Client SHOULD include `Expect: 100-continue`.
* **Response:**
```
HTTP/1.1 201 Created
Location: {upload_url}
Tus-Resumable: 1.0.0
Upload-Offset: {accepted_offset}
```
* Server MUST include `Upload-Offset` with accepted bytes.
### Expiration (`expiration` extension)
Server MAY remove unfinished uploads. Server MUST add `expiration` to `Tus-Extension`.
* **Response (PATCH/POST):**
```
HTTP/1.1 204 No Content
Upload-Expires: Wed, 25 Jun 2014 16:00:00 GMT
Tus-Resumable: 1.0.0
Upload-Offset: {new_offset}
```
* **`Upload-Expires`**: Datetime in RFC 9110 format. Indicates when upload expires. Client SHOULD use to check validity. Server SHOULD respond `404 Not Found` or `410 Gone` for expired uploads.
### Checksum (`checksum` extension)
Verify data integrity of `PATCH` requests. Server MUST add `checksum` to `Tus-Extension`. Server MUST support `sha1`.
* **Request (PATCH):**
```
PATCH /files/{upload_id} HTTP/1.1
Content-Length: {chunk_size}
Upload-Offset: {current_offset}
Tus-Resumable: 1.0.0
Upload-Checksum: {algorithm} {base64_checksum}
[binary data chunk]
```
* **Response:**
* `204 No Content`: Checksums match.
* `400 Bad Request`: Algorithm not supported.
* `460 Checksum Mismatch`: Checksums mismatch.
* In `400`/`460` cases, chunk MUST be discarded, upload/offset NOT updated.
* **`Tus-Checksum-Algorithm`**: Comma-separated list of supported algorithms (Server response to `OPTIONS`).
* **`Upload-Checksum`**: `{algorithm} {Base64_encoded_checksum}`.
### Termination (`termination` extension)
Client can terminate uploads. Server MUST add `termination` to `Tus-Extension`.
* **Request (DELETE):**
```
DELETE /files/{upload_id} HTTP/1.1
Host: tus.example.org
Content-Length: 0
Tus-Resumable: 1.0.0
```
* **Response:**
```
HTTP/1.1 204 No Content
Tus-Resumable: 1.0.0
```
* Server SHOULD free resources, MUST respond `204 No Content`.
* Future requests to URL SHOULD return `404 Not Found` or `410 Gone`.
### Concatenation (`concatenation` extension)
Concatenate multiple partial uploads into a single final upload. Server MUST add `concatenation` to `Tus-Extension`.
* **Partial Upload Creation (POST):**
```
POST /files HTTP/1.1
Upload-Concat: partial
Upload-Length: {partial_size}
Tus-Resumable: 1.0.0
```
* `Upload-Concat: partial` header.
* Server SHOULD NOT process partial uploads until concatenated.
* **Final Upload Creation (POST):**
```
POST /files HTTP/1.1
Upload-Concat: final;{url_partial1} {url_partial2} ...
Tus-Resumable: 1.0.0
```
* `Upload-Concat: final;{space-separated_partial_urls}`.
* Client MUST NOT include `Upload-Length`.
* Final upload length is sum of partials.
* Server MAY delete partials after concatenation.
* Server MUST respond `403 Forbidden` to `PATCH` requests against final upload.
* **`concatenation-unfinished`**: Server adds to `Tus-Extension` if it supports concatenation while partial uploads are in progress.
* **HEAD Request for Final Upload:**
* Response SHOULD NOT contain `Upload-Offset` unless concatenation finished.
* After success, `Upload-Offset` and `Upload-Length` MUST be equal.
* Response MUST include `Upload-Concat` header.
* **HEAD Request for Partial Upload:**
* Response MUST contain `Upload-Offset`.

View File

@@ -0,0 +1,667 @@
# TUS (1.0.0) — Server-Side Specs (Concise)
## Always
* All requests/responses **except** `OPTIONS` MUST include: `Tus-Resumable: 1.0.0`.
If unsupported → `412 Precondition Failed` + `Tus-Version`.
* Canonical server features via `OPTIONS /files`:
* `Tus-Version: 1.0.0`
* `Tus-Extension: creation,creation-with-upload,termination,checksum,concatenation,concatenation-unfinished` (as supported)
* `Tus-Max-Size: <int>` (if hard limit)
* `Tus-Checksum-Algorithm: sha1[,md5,crc32...]` (if checksum ext.)
## Core
* **Create:** `POST /files` with `Upload-Length: <int>` OR `Upload-Defer-Length: 1`. Optional `Upload-Metadata`.
* `201 Created` + `Location: /files/{id}`, echo `Tus-Resumable`.
* *Creation-With-Upload:* If body present → `Content-Type: application/offset+octet-stream`, accept bytes, respond with `Upload-Offset`.
* **Status:** `HEAD /files/{id}`
* Always return `Upload-Offset` for partial uploads, include `Upload-Length` if known; if deferred, return `Upload-Defer-Length: 1`. `Cache-Control: no-store`.
* **Upload:** `PATCH /files/{id}`
* `Content-Type: application/offset+octet-stream` and `Upload-Offset` (must match server).
* On success → `204 No Content` + new `Upload-Offset`.
* Mismatch → `409 Conflict`. Bad type → `415 Unsupported Media Type`.
* **Terminate:** `DELETE /files/{id}` (if supported) → `204 No Content`. Subsequent requests → `404/410`.
## Checksum (optional but implemented here)
* Client MAY send: `Upload-Checksum: <algo> <base64digest>` per `PATCH`.
* Server MUST verify request bodys checksum of the exact received bytes.
* If algo unsupported → `400 Bad Request`.
* If mismatch → **discard the chunk** (no offset change) and respond `460 Checksum Mismatch`.
* If OK → `204 No Content` + new `Upload-Offset`.
* `OPTIONS` MUST include `Tus-Checksum-Algorithm` (comma-separated algos).
## Concatenation (optional but implemented here)
* **Partial uploads:** `POST /files` with `Upload-Concat: partial` and `Upload-Length`. (MUST have length; may use creation-with-upload/patch thereafter.)
* **Final upload:** `POST /files` with
`Upload-Concat: final; /files/{a} /files/{b} ...`
* MUST NOT include `Upload-Length`.
* Final uploads **cannot** be `PATCH`ed (`403`).
* Server SHOULD assemble final (in order).
* If `concatenation-unfinished` supported, final may be created before partials completed; server completes once all partials are done.
* **HEAD semantics:**
* For *partial*: MUST include `Upload-Offset`.
* For *final* before concatenation: SHOULD NOT include `Upload-Offset`. `Upload-Length` MAY be present if computable (= sum of partials lengths when known).
* After finalization: `Upload-Offset == Upload-Length`.
---
# TUS FastAPI Server (disk-only, crash-safe, checksum + concatenation)
**Features**
* All persistent state on disk:
```
TUS_ROOT/
{upload_id}/
info.json # canonical metadata & status
data.part # exists while uploading or while building final
data # final file after atomic rename
```
* Crash recovery: `HEAD` offset = size of `data.part` or `data`.
* `.part` during upload; `os.replace()` (atomic) to `data` on completion.
* Streaming I/O; `fsync` on file + parent directory.
* Checksum: supports `sha1` (can easily add md5/crc32).
* Concatenation: server builds final when partials complete; supports `concatenation-unfinished`.
> Run with: `uv pip install fastapi uvicorn` then `uvicorn tus_server:app --host 0.0.0.0 --port 8080` (or `python tus_server.py`).
> Set `TUS_ROOT` env to choose storage root.
```python
# tus_server.py
from fastapi import FastAPI, Request, Response, HTTPException
from typing import Optional, Dict, Any, List
import os, json, uuid, base64, asyncio, errno, hashlib
# -----------------------------
# Config
# -----------------------------
TUS_VERSION = "1.0.0"
# Advertise extensions implemented below:
TUS_EXTENSIONS = ",".join([
"creation",
"creation-with-upload",
"termination",
"checksum",
"concatenation",
"concatenation-unfinished",
])
# Supported checksum algorithms (keys = header token)
CHECKSUM_ALGOS = ["sha1"] # add "md5" if desired
TUS_ROOT = os.environ.get("TUS_ROOT", "/tmp/tus")
MAX_SIZE = 1 << 40 # 1 TiB default
os.makedirs(TUS_ROOT, exist_ok=True)
app = FastAPI()
# Per-process locks to prevent concurrent mutations on same upload_id
_locks: Dict[str, asyncio.Lock] = {}
def _lock_for(upload_id: str) -> asyncio.Lock:
if upload_id not in _locks:
_locks[upload_id] = asyncio.Lock()
return _locks[upload_id]
# -----------------------------
# Path helpers
# -----------------------------
def upload_dir(upload_id: str) -> str:
return os.path.join(TUS_ROOT, upload_id)
def info_path(upload_id: str) -> str:
return os.path.join(upload_dir(upload_id), "info.json")
def part_path(upload_id: str) -> str:
return os.path.join(upload_dir(upload_id), "data.part")
def final_path(upload_id: str) -> str:
return os.path.join(upload_dir(upload_id), "data")
# -----------------------------
# FS utils (crash-safe)
# -----------------------------
def _fsync_dir(path: str) -> None:
fd = os.open(path, os.O_DIRECTORY)
try:
os.fsync(fd)
finally:
os.close(fd)
def _write_json_atomic(path: str, obj: Dict[str, Any]) -> None:
tmp = f"{path}.tmp"
data = json.dumps(obj, separators=(",", ":"), ensure_ascii=False)
with open(tmp, "w", encoding="utf-8") as f:
f.write(data)
f.flush()
os.fsync(f.fileno())
os.replace(tmp, path)
_fsync_dir(os.path.dirname(path))
def _read_json(path: str) -> Dict[str, Any]:
with open(path, "r", encoding="utf-8") as f:
return json.load(f)
def _size(path: str) -> int:
try:
return os.path.getsize(path)
except FileNotFoundError:
return 0
def _exists(path: str) -> bool:
return os.path.exists(path)
# -----------------------------
# TUS helpers
# -----------------------------
def _ensure_tus_version(req: Request):
if req.method == "OPTIONS":
return
v = req.headers.get("Tus-Resumable")
if v is None:
raise HTTPException(status_code=412, detail="Missing Tus-Resumable")
if v != TUS_VERSION:
raise HTTPException(status_code=412, detail="Unsupported Tus-Resumable",
headers={"Tus-Version": TUS_VERSION})
def _parse_metadata(raw: Optional[str]) -> str:
# Raw passthrough; validate/consume in your app if needed.
return raw or ""
def _new_upload_info(upload_id: str,
kind: str, # "single" | "partial" | "final"
length: Optional[int],
defer_length: bool,
metadata: str,
parts: Optional[List[str]] = None) -> Dict[str, Any]:
return {
"upload_id": upload_id,
"kind": kind, # "single" (default), "partial", or "final"
"length": length, # int or None if deferred/unknown
"defer_length": bool(defer_length),
"metadata": metadata, # raw Upload-Metadata header
"completed": False,
"parts": parts or [], # for final: list of upload_ids (not URLs)
}
def _load_info_or_404(upload_id: str) -> Dict[str, Any]:
p = info_path(upload_id)
if not _exists(p):
raise HTTPException(404, "Upload not found")
try:
return _read_json(p)
except Exception as e:
raise HTTPException(500, f"Corrupt metadata: {e}")
def _set_info(upload_id: str, info: Dict[str, Any]) -> None:
_write_json_atomic(info_path(upload_id), info)
def _ensure_dir(path: str):
os.makedirs(path, exist_ok=False)
def _atomic_finalize_file(upload_id: str):
"""Rename data.part → data and mark completed."""
upath = upload_dir(upload_id)
p = part_path(upload_id)
f = final_path(upload_id)
if _exists(p):
with open(p, "rb+") as fp:
fp.flush()
os.fsync(fp.fileno())
os.replace(p, f)
_fsync_dir(upath)
info = _load_info_or_404(upload_id)
info["completed"] = True
_set_info(upload_id, info)
def _current_offsets(upload_id: str):
f, p = final_path(upload_id), part_path(upload_id)
if _exists(f):
return True, False, _size(f)
if _exists(p):
return False, True, _size(p)
return False, False, 0
def _parse_concat_header(h: Optional[str]) -> Optional[Dict[str, Any]]:
if not h:
return None
h = h.strip()
if h == "partial":
return {"type": "partial", "parts": []}
if h.startswith("final;"):
# format: final;/files/a /files/b
rest = h[len("final;"):].strip()
urls = [s for s in rest.split(" ") if s]
return {"type": "final", "parts": urls}
return None
def _extract_upload_id_from_url(url: str) -> str:
# Accept relative /files/{id} (common) — robust split:
segs = [s for s in url.split("/") if s]
return segs[-1] if segs else url
def _sum_lengths_or_none(ids: List[str]) -> Optional[int]:
total = 0
for pid in ids:
info = _load_info_or_404(pid)
if info.get("length") is None:
return None
total += int(info["length"])
return total
async def _stream_with_checksum_and_append(file_obj, request: Request, algo: Optional[str]) -> int:
"""Stream request body to file, verifying checksum if header present.
Returns bytes written. On checksum mismatch, truncate to original size and raise HTTPException(460)."""
start_pos = file_obj.tell()
# Choose hash
hasher = None
provided_digest = None
if algo:
if algo not in CHECKSUM_ALGOS:
raise HTTPException(400, "Unsupported checksum algorithm")
if algo == "sha1":
hasher = hashlib.sha1()
# elif algo == "md5": hasher = hashlib.md5()
# elif algo == "crc32": ... (custom)
# Read expected checksum
if hasher:
uh = request.headers.get("Upload-Checksum")
if not uh:
# spec: checksum header optional; if algo passed to this fn we must have parsed it already
pass
else:
try:
name, b64 = uh.split(" ", 1)
if name != algo:
raise ValueError()
provided_digest = base64.b64decode(b64.encode("ascii"))
except Exception:
raise HTTPException(400, "Invalid Upload-Checksum")
written = 0
async for chunk in request.stream():
if not chunk:
continue
file_obj.write(chunk)
if hasher:
hasher.update(chunk)
written += len(chunk)
# Verify checksum if present
if hasher and provided_digest is not None:
digest = hasher.digest()
if digest != provided_digest:
# rollback appended bytes
file_obj.truncate(start_pos)
file_obj.flush()
os.fsync(file_obj.fileno())
raise HTTPException(status_code=460, detail="Checksum Mismatch")
file_obj.flush()
os.fsync(file_obj.fileno())
return written
def _try_finalize_final(upload_id: str):
"""If this is a final upload and all partials are completed, build final data and finalize atomically."""
info = _load_info_or_404(upload_id)
if info.get("kind") != "final" or info.get("completed"):
return
part_ids = info.get("parts", [])
# Check all partials completed and have data
for pid in part_ids:
pinf = _load_info_or_404(pid)
if not pinf.get("completed"):
return # still not ready
if not _exists(final_path(pid)):
# tolerate leftover .part (e.g., if completed used .part->data). If data missing, can't finalize.
return
# Build final .part by concatenating parts' data in order, then atomically rename
up = upload_dir(upload_id)
os.makedirs(up, exist_ok=True)
ppath = part_path(upload_id)
# Reset/overwrite .part
with open(ppath, "wb") as out:
for pid in part_ids:
with open(final_path(pid), "rb") as src:
for chunk in iter(lambda: src.read(1024 * 1024), b""):
out.write(chunk)
out.flush()
os.fsync(out.fileno())
# If server can compute length now, set it
length = _sum_lengths_or_none(part_ids)
info["length"] = length if length is not None else info.get("length")
_set_info(upload_id, info)
_atomic_finalize_file(upload_id)
# -----------------------------
# Routes
# -----------------------------
@app.options("/files")
async def tus_options():
headers = {
"Tus-Version": TUS_VERSION,
"Tus-Extension": TUS_EXTENSIONS,
"Tus-Max-Size": str(MAX_SIZE),
"Tus-Checksum-Algorithm": ",".join(CHECKSUM_ALGOS),
}
return Response(status_code=204, headers=headers)
@app.post("/files")
async def tus_create(request: Request):
_ensure_tus_version(request)
metadata = _parse_metadata(request.headers.get("Upload-Metadata"))
concat = _parse_concat_header(request.headers.get("Upload-Concat"))
# Validate creation modes
hdr_len = request.headers.get("Upload-Length")
hdr_defer = request.headers.get("Upload-Defer-Length")
if concat and concat["type"] == "partial":
# Partial MUST have Upload-Length (spec)
if hdr_len is None:
raise HTTPException(400, "Partial uploads require Upload-Length")
if hdr_defer is not None:
raise HTTPException(400, "Partial uploads cannot defer length")
elif concat and concat["type"] == "final":
# Final MUST NOT include Upload-Length
if hdr_len is not None or hdr_defer is not None:
raise HTTPException(400, "Final uploads must not include Upload-Length or Upload-Defer-Length")
else:
# Normal single upload: require length or defer
if hdr_len is None and hdr_defer != "1":
raise HTTPException(400, "Must provide Upload-Length or Upload-Defer-Length: 1")
# Parse length
length: Optional[int] = None
defer = False
if hdr_len is not None:
try:
length = int(hdr_len)
if length < 0: raise ValueError()
except ValueError:
raise HTTPException(400, "Invalid Upload-Length")
if length > MAX_SIZE:
raise HTTPException(413, "Upload too large")
elif not concat or concat["type"] != "final":
# final has no length at creation
defer = (hdr_defer == "1")
upload_id = str(uuid.uuid4())
udir = upload_dir(upload_id)
_ensure_dir(udir)
if concat and concat["type"] == "final":
# Resolve part ids from URLs
part_ids = [_extract_upload_id_from_url(u) for u in concat["parts"]]
# Compute length if possible
sum_len = _sum_lengths_or_none(part_ids)
info = _new_upload_info(upload_id, "final", sum_len, False, metadata, part_ids)
_set_info(upload_id, info)
# Prepare empty .part (will be filled when partials complete)
with open(part_path(upload_id), "wb") as f:
f.flush(); os.fsync(f.fileno())
_fsync_dir(udir)
# If all partials already complete, finalize immediately
_try_finalize_final(upload_id)
return Response(status_code=201,
headers={"Location": f"/files/{upload_id}",
"Tus-Resumable": TUS_VERSION})
# Create partial or single
kind = "partial" if (concat and concat["type"] == "partial") else "single"
info = _new_upload_info(upload_id, kind, length, defer, metadata)
_set_info(upload_id, info)
# Create empty .part
with open(part_path(upload_id), "wb") as f:
f.flush(); os.fsync(f.fileno())
_fsync_dir(udir)
# Creation-With-Upload (optional body)
upload_offset = 0
has_body = request.headers.get("Content-Length") or request.headers.get("Transfer-Encoding")
if has_body:
ctype = request.headers.get("Content-Type", "")
if ctype != "application/offset+octet-stream":
raise HTTPException(415, "Content-Type must be application/offset+octet-stream for creation-with-upload")
# Checksum header optional; if present, parse algo token
uh = request.headers.get("Upload-Checksum")
algo = None
if uh:
try:
algo = uh.split(" ", 1)[0]
except Exception:
raise HTTPException(400, "Invalid Upload-Checksum")
async with _lock_for(upload_id):
with open(part_path(upload_id), "ab+") as f:
f.seek(0, os.SEEK_END)
upload_offset = await _stream_with_checksum_and_append(f, request, algo)
# If length known and we hit it, finalize
inf = _load_info_or_404(upload_id)
if inf["length"] is not None and upload_offset == int(inf["length"]):
_atomic_finalize_file(upload_id)
# If this is a partial that belongs to some final, a watcher could finalize final; here we rely on
# client to create final explicitly (spec). Finalization of final is handled by _try_finalize_final
# when final resource is created (or rechecked on subsequent HEAD/PATCH).
headers = {"Location": f"/files/{upload_id}", "Tus-Resumable": TUS_VERSION}
if upload_offset:
headers["Upload-Offset"] = str(upload_offset)
return Response(status_code=201, headers=headers)
@app.head("/files/{upload_id}")
async def tus_head(upload_id: str, request: Request):
_ensure_tus_version(request)
info = _load_info_or_404(upload_id)
is_final = info.get("kind") == "final"
headers = {
"Tus-Resumable": TUS_VERSION,
"Cache-Control": "no-store",
}
if info.get("metadata"):
headers["Upload-Metadata"] = info["metadata"]
if info.get("length") is not None:
headers["Upload-Length"] = str(int(info["length"]))
elif info.get("defer_length"):
headers["Upload-Defer-Length"] = "1"
exists_final, exists_part, offset = False, False, 0
if is_final and not info.get("completed"):
# BEFORE concatenation completes: SHOULD NOT include Upload-Offset
# Try to see if we can finalize now (e.g., partials completed after crash)
_try_finalize_final(upload_id)
info = _load_info_or_404(upload_id)
if info.get("completed"):
# fallthrough to completed case
pass
else:
# For in-progress final, no Upload-Offset; include Upload-Length if computable (already handled above)
return Response(status_code=200, headers=headers)
# For partials or completed finals
f = final_path(upload_id)
p = part_path(upload_id)
if _exists(f):
exists_final, offset = True, _size(f)
elif _exists(p):
exists_part, offset = True, _size(p)
else:
# if info exists but no data, consider gone
raise HTTPException(410, "Upload gone")
headers["Upload-Offset"] = str(offset)
return Response(status_code=200, headers=headers)
@app.patch("/files/{upload_id}")
async def tus_patch(upload_id: str, request: Request):
_ensure_tus_version(request)
info = _load_info_or_404(upload_id)
if info.get("kind") == "final":
raise HTTPException(403, "Final uploads cannot be patched")
ctype = request.headers.get("Content-Type", "")
if ctype != "application/offset+octet-stream":
raise HTTPException(415, "Content-Type must be application/offset+octet-stream")
# Client offset must match server
try:
client_offset = int(request.headers.get("Upload-Offset", "-1"))
if client_offset < 0: raise ValueError()
except ValueError:
raise HTTPException(400, "Invalid or missing Upload-Offset")
# If length deferred, client may now set Upload-Length (once)
if info.get("length") is None and info.get("defer_length"):
if "Upload-Length" in request.headers:
try:
new_len = int(request.headers["Upload-Length"])
if new_len < 0:
raise ValueError()
except ValueError:
raise HTTPException(400, "Invalid Upload-Length")
if new_len > MAX_SIZE:
raise HTTPException(413, "Upload too large")
info["length"] = new_len
info["defer_length"] = False
_set_info(upload_id, info)
# Determine current server offset
f = final_path(upload_id)
p = part_path(upload_id)
if _exists(f):
raise HTTPException(403, "Upload already finalized")
if not _exists(p):
raise HTTPException(404, "Upload not found")
server_offset = _size(p)
if client_offset != server_offset:
return Response(status_code=409)
# Optional checksum
uh = request.headers.get("Upload-Checksum")
algo = None
if uh:
try:
algo = uh.split(" ", 1)[0]
except Exception:
raise HTTPException(400, "Invalid Upload-Checksum")
# Append data (with rollback on checksum mismatch)
async with _lock_for(upload_id):
with open(p, "ab+") as fobj:
fobj.seek(0, os.SEEK_END)
written = await _stream_with_checksum_and_append(fobj, request, algo)
new_offset = server_offset + written
# If length known and reached exactly, finalize
info = _load_info_or_404(upload_id) # reload
if info.get("length") is not None and new_offset == int(info["length"]):
_atomic_finalize_file(upload_id)
# If this is a partial, a corresponding final may exist and be now completable
# We don't maintain reverse index; finalization is triggered when HEAD on final is called.
# (Optional: scan for finals to proactively finalize.)
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION, "Upload-Offset": str(new_offset)})
@app.delete("/files/{upload_id}")
async def tus_delete(upload_id: str, request: Request):
_ensure_tus_version(request)
async with _lock_for(upload_id):
udir = upload_dir(upload_id)
for p in (part_path(upload_id), final_path(upload_id), info_path(upload_id)):
try:
os.remove(p)
except FileNotFoundError:
pass
try:
os.rmdir(udir)
except OSError:
pass
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION})
```
---
## Quick Client Examples (manual)
```bash
# OPTIONS
curl -i -X OPTIONS http://localhost:8080/files
# 1) Single upload (known length)
curl -i -X POST http://localhost:8080/files \
-H "Tus-Resumable: 1.0.0" \
-H "Upload-Length: 11" \
-H "Upload-Metadata: filename Zm9vLnR4dA=="
# → Location: /files/<ID>
# Upload with checksum (sha1 of "hello ")
printf "hello " | curl -i -X PATCH http://localhost:8080/files/<ID> \
-H "Tus-Resumable: 1.0.0" \
-H "Content-Type: application/offset+octet-stream" \
-H "Upload-Offset: 0" \
-H "Upload-Checksum: sha1 L6v8xR3Lw4N2n9kQox3wL7G0m/I=" \
--data-binary @-
# (Replace digest with correct base64 for your chunk)
# 2) Concatenation
# Create partial A (5 bytes)
curl -i -X POST http://localhost:8080/files \
-H "Tus-Resumable: 1.0.0" \
-H "Upload-Length: 5" \
-H "Upload-Concat: partial"
# → Location: /files/<A>
printf "hello" | curl -i -X PATCH http://localhost:8080/files/<A> \
-H "Tus-Resumable: 1.0.0" \
-H "Content-Type: application/offset+octet-stream" \
-H "Upload-Offset: 0" \
--data-binary @-
# Create partial B (6 bytes)
curl -i -X POST http://localhost:8080/files \
-H "Tus-Resumable: 1.0.0" \
-H "Upload-Length: 6" \
-H "Upload-Concat: partial"
# → Location: /files/<B>
printf " world" | curl -i -X PATCH http://localhost:8080/files/<B> \
-H "Tus-Resumable: 1.0.0" \
-H "Content-Type: application/offset+octet-stream" \
-H "Upload-Offset: 0" \
--data-binary @-
# Create final (may be before or after partials complete)
curl -i -X POST http://localhost:8080/files \
-H "Tus-Resumable: 1.0.0" \
-H "Upload-Concat: final; /files/<A> /files/<B>"
# HEAD on final will eventually show Upload-Offset once finalized
curl -i -X HEAD http://localhost:8080/files/<FINAL> -H "Tus-Resumable: 1.0.0"
```
---
## Implementation Notes (agent hints)
* **Durability:** every data write `fsync(file)`; after `os.replace` of `*.part → data` or `info.json.tmp → info.json`, also `fsync(parent)`.
* **Checksum:** verify against **this requests** body only; on mismatch, **truncate back** to previous size and return `460`.
* **Concatenation:** final upload is never `PATCH`ed. Server builds `final.data.part` by concatenating each partials **final file** in order, then atomically renames and marks completed. Its triggered lazily in `HEAD` of final (and right after creation).
* **Crash Recovery:** offset = `size(data.part)` or `size(data)`; `info.json` is canonical for `kind`, `length`, `defer_length`, `completed`, `parts`.
* **Multi-process deployments:** replace `asyncio.Lock` with file locks (`fcntl.flock`) per `upload_id` to synchronize across workers.

View File

@@ -0,0 +1,229 @@
```bash
unpm install @uppy/react
```
## Components
Pre-composed, plug-and-play components:
<Dashboard /> renders @uppy/dashboard
<DashboardModal /> renders @uppy/dashboard as a modal
<DragDrop /> renders @uppy/drag-drop
<ProgressBar /> renders @uppy/progress-bar
<StatusBar /> renders @uppy/status-bar
more info see https://uppy.io/docs/react
we use tus server for the upload support
npm install @uppy/tus
e.g.
import Uppy from '@uppy/core';
import Dashboard from '@uppy/dashboard';
import Tus from '@uppy/tus';
import '@uppy/core/dist/style.min.css';
import '@uppy/dashboard/dist/style.min.css';
new Uppy()
.use(Dashboard, { inline: true, target: 'body' })
========================
CODE SNIPPETS
========================
TITLE: React Dashboard Modal Example with TUS
DESCRIPTION: Demonstrates how to use the DashboardModal component from @uppy/react with the Tus plugin for resumable uploads.
LANGUAGE: jsx
CODE:
```
/** @jsx React */
import React from 'react'
import Uppy from '@uppy/core'
import { DashboardModal } from '@uppy/react'
import Tus from '@uppy/tus'
const uppy = new Uppy({ debug: true, autoProceed: false })
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
class Example extends React.Component {
state = { open: false }
render() {
const { open } = this.state
return (
<DashboardModal
uppy={uppy}
open={open}
onRequestClose={this.handleClose}
/>
)
}
// ..snip..
}
```
----------------------------------------
TITLE: Installation using npm for @uppy/react
DESCRIPTION: Provides the command to install the @uppy/react package using npm.
LANGUAGE: bash
CODE:
```
$ npm install @uppy/react @uppy/core @uppy/dashboard @uppy/tus
```
----------------------------------------
TITLE: Uppy Dashboard and Tus Integration Example (HTML & JavaScript)
DESCRIPTION: This snippet demonstrates how to initialize Uppy with the Dashboard and Tus plugins, configure them, and handle upload success events.
LANGUAGE: html
CODE:
```
<html>
<head>
<link rel="stylesheet" href="https://releases.transloadit.com/uppy/v4.18.0/uppy.min.css" />
</head>
<body>
<div class="DashboardContainer"></div>
<button class="UppyModalOpenerBtn">Upload</button>
<div class="uploaded-files">
<h5>Uploaded files:</h5>
<ol></ol>
</div>
</body>
<script type="module">
import { Uppy, Dashboard, Tus } from 'https://releases.transloadit.com/uppy/v4.18.0/uppy.min.mjs'
var uppy = new Uppy({
debug: true,
autoProceed: false,
})
.use(Dashboard, {
browserBackButtonClose: false,
height: 470,
inline: false,
replaceTargetContent: true,
showProgressDetails: true,
target: '.DashboardContainer',
trigger: '.UppyModalOpenerBtn',
})
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
.on('upload-success', function (file, response) {
var url = response.uploadURL
var fileName = file.name
document.querySelector('.uploaded-files ol').innerHTML +=
'<li><a href="' + url + '" target="_blank">' + fileName + '</a></li>'
})
</script>
</html>
```
----------------------------------------
TITLE: Initialize Uppy with Tus Plugin (JavaScript)
DESCRIPTION: Demonstrates how to initialize Uppy and configure the Tus plugin for resumable uploads.
LANGUAGE: js
CODE:
```
import Uppy from '@uppy/core'
import Tus from '@uppy/tus'
const uppy = new Uppy()
uppy.use(Tus, {
endpoint: 'https://tusd.tusdemo.net/files/', // use your tus endpoint here
resume: true,
retryDelays: [0, 1000, 3000, 5000],
})
```
----------------------------------------
TITLE: Uppy Core Initialization and Plugin Usage (JavaScript)
DESCRIPTION: This example demonstrates how to initialize Uppy with core functionality and integrate the Tus plugin. It also shows how to listen for upload completion events.
LANGUAGE: javascript
CODE:
```
import Uppy from '@uppy/core'
import Dashboard from '@uppy/dashboard'
import Tus from '@uppy/tus'
const uppy = new Uppy()
.use(Dashboard, { trigger: '#select-files' })
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
.on('complete', (result) => {
console.log('Upload result:', result)
})
```
----------------------------------------
TITLE: Uppy XHRUpload Configuration (JavaScript)
DESCRIPTION: This snippet shows the basic JavaScript configuration for Uppy, initializing it with the XHRUpload plugin to send files to a specified endpoint.
LANGUAGE: javascript
CODE:
```
import Uppy from '@uppy/core';
import XHRUpload from '@uppy/xhr-upload';
const uppy = new Uppy({
debug: true,
autoProceed: false,
restrictions: {
maxFileSize: 100000000,
maxNumberOfFiles: 10,
allowedFileTypes: ['image/*', 'video/*']
}
});
uppy.use(XHRUpload, {
endpoint: 'YOUR_UPLOAD_ENDPOINT_URL',
fieldName: 'files[]',
method: 'post'
});
uppy.on('complete', (result) => {
console.log('Upload complete:', result);
});
uppy.on('error', (error) => {
console.error('Upload error:', error);
});
```
----------------------------------------
TITLE: Install Uppy Core Packages for TUS
DESCRIPTION: Installs the core Uppy package along with the Dashboard and Tus plugins using npm.
LANGUAGE: bash
CODE:
```
npm install @uppy/core @uppy/dashboard @uppy/tus @uppy/xhr-upload
```
========================
QUESTIONS AND ANSWERS
========================
TOPIC: Uppy React Components
Q: What is the purpose of the @uppy/react package?
A: The @uppy/react package provides React component wrappers for Uppy's officially maintained UI plugins. It allows developers to easily integrate Uppy's file uploading capabilities into their React applications.
----------------------------------------
TOPIC: Uppy React Components
Q: How can @uppy/react be installed in a project?
A: The @uppy/react package can be installed using npm with the command '$ npm install @uppy/react'.
----------------------------------------
TOPIC: Uppy React Components
Q: Where can I find more detailed documentation for the @uppy/react plugin?
A: More detailed documentation for the @uppy/react plugin is available on the Uppy website at https://uppy.io/docs/react.

View File

@@ -3,17 +3,27 @@
import freeflowuniverse.herolib.core.playcmds
import freeflowuniverse.herolib.clients.giteaclient
// Configure PostgreSQL client
// heroscript := "
// !!giteaclient.configure
// url: 'git.ourworld.tf'
// user: 'despiegk'
// secret: ''
// "
heroscript := "
!!giteaclient.configure
name: 'default'
url: 'git.ourworld.tf'
user: 'despiegk'
secret: '1'
// // Process the heroscript configuration
!!giteaclient.configure
name: 'two'
url: 'git.ourworld.tf'
user: 'despiegk2'
secret: '2'
"
// Process the heroscript configuration
// playcmds.play(heroscript: heroscript, emptycheck: false)!
println(giteaclient.list(fromdb: true)!)
//$dbg;
// Get the configured client
mut client := giteaclient.get()!
@@ -37,4 +47,3 @@ println('Found ${issues.len} issues.')
for issue in issues {
println(' #${issue.number}: ${issue.title}')
}

View File

@@ -1,6 +1,7 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.clients.zinit_rpc
import freeflowuniverse.herolib.clients.zinit
import freeflowuniverse.herolib.installers.infra.zinit_installer
import os
import time
@@ -9,34 +10,33 @@ import time
println('=== Zinit RPC Client Example ===\n')
// Start Zinit in the background
println('Starting Zinit in background...')
mut zinit_process := os.new_process('/usr/local/bin/zinit')
zinit_process.set_args(['init'])
zinit_process.set_redirect_stdio()
zinit_process.run()
// // Start Zinit in the background
// println('Starting Zinit in background...')
// mut zinit_process := os.new_process('/usr/local/bin/zinit')
// zinit_process.set_args(['init'])
// zinit_process.set_redirect_stdio()
// zinit_process.run()
// Wait a moment for Zinit to start up
time.sleep(2000 * time.millisecond)
println(' Zinit started')
// time.sleep(2000 * time.millisecond)
// println('✓ Zinit started')
// Ensure we clean up Zinit when done
defer {
println('\nCleaning up...')
zinit_process.signal_kill()
zinit_process.wait()
println(' Zinit stopped')
}
// defer {
// println('\nCleaning up...')
// zinit_process.signal_kill()
// zinit_process.wait()
// println('✓ Zinit stopped')
// }
// mut installer := zinit_installer.get()!
// installer.install()!
// installer.start()!
// Create a new client
mut client := zinit_rpc.new_client(
name: 'example_client'
socket_path: '/tmp/zinit.sock'
) or {
println('Failed to create client: ${err}')
println('Make sure Zinit is running and the socket exists at /tmp/zinit.sock')
exit(1)
}
mut client := zinit.new()!
println(client)
println(' Created Zinit RPC client')
@@ -66,7 +66,7 @@ for service_name, state in services {
// 3. Create a test service configuration
println('\n3. Creating a test service...')
test_service_name := 'test_echo_service'
config := zinit_rpc.ServiceConfig{
config := zinit.ServiceConfig{
exec: '/bin/echo "Hello from test service"'
oneshot: true
log: 'stdout'
@@ -147,7 +147,7 @@ println('\n8. Getting service statistics...')
stats := client.service_stats(test_service_name) or {
println('Failed to get service stats (service might not be running): ${err}')
// Continue anyway
zinit_rpc.ServiceStats{}
zinit.ServiceStats{}
}
if stats.name != '' {
println(' Service statistics:')
@@ -208,7 +208,7 @@ if subscription_id != 0 {
// Get fresh status to make sure service is still running
fresh_status := client.service_status(test_service_name) or {
println('\n12. Skipping signal test (cannot get service status)')
zinit_rpc.ServiceStatus{}
zinit.ServiceStatus{}
}
if fresh_status.state == 'Running' && fresh_status.pid > 0 {
println('\n12. Sending SIGTERM signal to service...')
@@ -258,7 +258,6 @@ server_result := client.system_start_http_server('127.0.0.1:9999') or {
}
if server_result != '' {
println(' HTTP server started: ${server_result}')
// Stop the HTTP server
client.system_stop_http_server() or { println('Failed to stop HTTP server: ${err}') }
println(' HTTP server stopped')

View File

@@ -3,14 +3,20 @@
import freeflowuniverse.herolib.core.generator.generic as generator
import freeflowuniverse.herolib.core.pathlib
mut args := generator.GeneratorArgs{
path: '~/code/github/freeflowuniverse/herolib/lib/clients/postgresql_client'
force: true
}
// mut args := generator.GeneratorArgs{
// path: '~/code/github/freeflowuniverse/herolib/lib'
// path: '~/code/github/freeflowuniverse/herolib/lib/clients'
// force: true
// }
// mut args2 := generator.GeneratorArgs{
// path: '~/code/github/freeflowuniverse/herolib/lib/installers/lang/rust'
// force: true
// }
// generator.scan(args2)!
mut args := generator.GeneratorArgs{
path: '~/code/github/freeflowuniverse/herolib/lib/installers'
force: true
}
generator.scan(args)!

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.infra.zinit_installer

View File

@@ -1,14 +1,20 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.installers.net.mycelium_installer
import freeflowuniverse.herolib.clients.mycelium
mut installer := mycelium_installer.get()!
mut installer := mycelium_installer.get(create: true)!
println(installer)
installer.start()!
$dbg;
mut r := mycelium.inspect()!
println(r)
// $dbg;
mut client := mycelium.get()!
// Send a message to a node by public key

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.lib.lang.codewalker
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.osal.core as osal
// Create test directory structure in /tmp/filemap
test_source := '/tmp/filemap'
test_destination := '/tmp/filemap2'
// Clean up any existing test directories
osal.rm(todelete: test_source)!
osal.rm(todelete: test_destination)!
// Create source directory
mut source_dir := pathlib.get(test_source)!
source_dir.dir_ensure()!
// Create test files with content
mut file1 := source_dir.join('file1.txt')!
file1.write('Content of file 1')!
mut subdir := source_dir.join('subdir')!
subdir.dir_ensure()!
mut file2 := subdir.join('file2.txt')!
file2.write('Content of file 2')!
mut file3 := subdir.join('file3.md')!
file3.write('# Markdown file content')!
println('Test files created in ${test_source}')
// Create CodeWalker instance
mut cw := codewalker.new(name: 'test', source: test_source)!
// Verify files are in the map
println('\nFiles in filemap:')
cw.filemap.write()
// Export files to destination
cw.filemap.export(test_destination)!
println('\nFiles exported to ${test_destination}')
// Verify export by listing files in destination
mut dest_dir := pathlib.get(test_destination)!
if dest_dir.exists() {
mut files := dest_dir.list(recursive: true)!
println('\nFiles in destination directory:')
for file in files {
if file.is_file() {
println(' ${file.path}')
println(' Content: ${file.read()!}')
}
}
println('\nExport test completed successfully!')
} else {
println('\nError: Destination directory was not created')
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.lang.python
import json

View File

@@ -22,7 +22,7 @@ if repos.len() > 0 {
if repo_array.len() > 0 {
let repo = repo_array[0];
print("\nRepository path: " + get_repo_path(repo));
print("\nRepository path: " + path(repo));
// Check if the repository has changes
let has_changes = has_changes(repo);

View File

@@ -12,6 +12,9 @@ const action_priorities = {
}
pub fn play(mut plbook PlayBook) ! {
if plbook.exists(filter: 'bizmodel.') == false {
return
}
// group actions by which bizmodel they belong to
actions_by_biz := arrays.group_by[string, &Action](plbook.find(filter: 'bizmodel.*')!,
fn (a &Action) string {

View File

@@ -3,6 +3,7 @@ module giteaclient
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
giteaclient_global map[string]&GiteaClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&GiteaClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&GiteaClient {
mut obj := GiteaClient{
name: args.name
}
if args.name !in giteaclient_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&GiteaClient {
mut context := base.context()!
giteaclient_default = args.name
if args.fromdb || args.name !in giteaclient_global {
mut r := context.redis()!
if r.hexists('context:giteaclient', args.name)! {
data := r.hget('context:giteaclient', args.name)!
if data.len == 0 {
return error('GiteaClient with name: giteaclient does not exist, prob bug.')
}
mut obj := json.decode(GiteaClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('giteaclient', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("GiteaClient with name 'giteaclient' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return giteaclient_global[args.name] or {
println(giteaclient_global)
// bug if we get here because should be in globals
panic('could not get config for giteaclient with name, is bug:${args.name}')
return error('could not get config for giteaclient with name:giteaclient')
}
}
// register the config for the future
pub fn set(o GiteaClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
giteaclient_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('giteaclient', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:giteaclient', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('giteaclient', args.name)
mut r := context.redis()!
return r.hexists('context:giteaclient', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('giteaclient', args.name)!
if args.name in giteaclient_global {
// del giteaclient_global[args.name]
mut r := context.redis()!
r.hdel('context:giteaclient', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&GiteaClient {
mut res := []&GiteaClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
giteaclient_global = map[string]&GiteaClient{}
giteaclient_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:giteaclient')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in giteaclient_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o GiteaClient) ! {
fn set_in_mem(o GiteaClient) !GiteaClient {
mut o2 := obj_init(o)!
giteaclient_global[o.name] = &o2
giteaclient_default = o.name
giteaclient_global[o2.name] = &o2
giteaclient_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'giteaclient.') {
return
}
mut install_actions := plbook.find(filter: 'giteaclient.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
giteaclient_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -20,7 +20,7 @@ pub mut:
fn (mut self GiteaClient) httpclient() !&httpconnection.HTTPConnection {
mut http_conn := httpconnection.new(
name: 'giteaclient_${self.name}'
url: self.url
url: self.url
)!
// Add authentication header if API key is provided
@@ -49,7 +49,7 @@ fn obj_init(mycfg_ GiteaClient) !GiteaClient {
if mycfg.url.ends_with('/api') {
mycfg.url = mycfg.url.replace('/api', '')
}
mycfg.url = "https://${mycfg.url}/api/v1"
mycfg.url = 'https://${mycfg.url}/api/v1'
if mycfg.secret.len == 0 {
return error('secret needs to be filled in for ${mycfg.name}')
@@ -59,10 +59,6 @@ fn obj_init(mycfg_ GiteaClient) !GiteaClient {
/////////////NORMALLY NO NEED TO TOUCH
pub fn heroscript_dumps(obj GiteaClient) !string {
return encoderhero.encode[GiteaClient](obj)!
}
pub fn heroscript_loads(heroscript string) !GiteaClient {
mut obj := encoderhero.decode[GiteaClient](heroscript)!
return obj

View File

@@ -7,14 +7,12 @@ import net.http
// List a user's own repositories
pub fn (mut client GiteaClient) user_list_repos() ![]Repository {
$dbg;
req := httpconnection.Request{
method: .get
prefix: '/user/repos'
}
mut http_client := client.httpclient()!
r:=http_client.get_json_list_generic[Repository](req)!
$dbg;
r := http_client.get_json_list_generic[Repository](req)!
return r
}

View File

@@ -5,41 +5,41 @@ import time
pub struct APIError {
pub:
message string
url string
url string
}
pub struct AccessToken {
pub:
id i64
name string
scopes []string
sha1 string
id i64
name string
scopes []string
sha1 string
token_last_eight string
}
pub struct ActionVariable {
pub:
owner_id i64
repo_id i64
name string
data string
repo_id i64
name string
data string
}
pub struct Activity {
pub:
act_user User
act_user User
act_user_id i64
comment Comment
comment_id i64
content string
created time.Time
id i64
is_private bool
op_type string
ref_name string
repo Repository
repo_id i64
user_id i64
comment Comment
comment_id i64
content string
created time.Time
id i64
is_private bool
op_type string
ref_name string
repo Repository
repo_id i64
user_id i64
}
pub struct AddCollaboratorOption {
@@ -49,460 +49,460 @@ pub:
pub struct AddTimeOption {
pub:
time i64
created time.Time
time i64
created time.Time
user_name string
}
pub struct AnnotatedTagObject {
pub:
sha string
typ string @[json: 'type'] // `type` is a keyword in V
typ string @[json: 'type'] // `type` is a keyword in V
url string
}
pub struct AnnotatedTag {
pub:
message string
object AnnotatedTagObject
sha string
tag string
tagger CommitUser
url string
message string
object AnnotatedTagObject
sha string
tag string
tagger CommitUser
url string
verification PayloadCommitVerification
}
pub struct Attachment {
pub:
browser_download_url string
created_at time.Time
download_count i64
id i64
name string
size i64
uuid string
created_at time.Time
download_count i64
id i64
name string
size i64
uuid string
}
pub struct Badge {
pub:
id i64
slug string
id i64
slug string
description string
image_url string
image_url string
}
pub struct Branch {
pub:
commit PayloadCommit
commit PayloadCommit
effective_branch_protection_name string
enable_status_check bool
name string
protected bool
required_approvals i64
status_check_contexts []string
user_can_merge bool
user_can_push bool
enable_status_check bool
name string
protected bool
required_approvals i64
status_check_contexts []string
user_can_merge bool
user_can_push bool
}
pub struct BranchProtection {
pub:
branch_name string
rule_name string
enable_push bool
enable_push_whitelist bool
push_whitelist_usernames []string
push_whitelist_teams []string
push_whitelist_deploy_keys bool
enable_merge_whitelist bool
merge_whitelist_usernames []string
merge_whitelist_teams []string
enable_status_check bool
status_check_contexts []string
required_approvals i64
enable_approvals_whitelist bool
approvals_whitelist_username []string
approvals_whitelist_teams []string
block_on_rejected_reviews bool
rule_name string
enable_push bool
enable_push_whitelist bool
push_whitelist_usernames []string
push_whitelist_teams []string
push_whitelist_deploy_keys bool
enable_merge_whitelist bool
merge_whitelist_usernames []string
merge_whitelist_teams []string
enable_status_check bool
status_check_contexts []string
required_approvals i64
enable_approvals_whitelist bool
approvals_whitelist_username []string
approvals_whitelist_teams []string
block_on_rejected_reviews bool
block_on_official_review_requests bool
block_on_outdated_branch bool
dismiss_stale_approvals bool
ignore_stale_approvals bool
require_signed_commits bool
protected_file_patterns string
unprotected_file_patterns string
created_at time.Time
updated_at time.Time
block_on_outdated_branch bool
dismiss_stale_approvals bool
ignore_stale_approvals bool
require_signed_commits bool
protected_file_patterns string
unprotected_file_patterns string
created_at time.Time
updated_at time.Time
}
pub struct ChangeFileOperation {
pub:
operation string // "create", "update", "delete"
path string
content string // base64 encoded
path string
content string // base64 encoded
from_path string
sha string
sha string
}
pub struct ChangeFilesOptions {
pub:
author Identity
branch string
committer Identity
dates CommitDateOptions
files []ChangeFileOperation
message string
author Identity
branch string
committer Identity
dates CommitDateOptions
files []ChangeFileOperation
message string
new_branch string
signoff bool
signoff bool
}
pub struct ChangedFile {
pub:
additions i64
changes i64
contents_url string
deletions i64
filename string
html_url string
additions i64
changes i64
contents_url string
deletions i64
filename string
html_url string
previous_filename string
raw_url string
status string
raw_url string
status string
}
pub struct Commit {
pub:
author User
commit RepoCommit
author User
commit RepoCommit
committer User
created time.Time
files []CommitAffectedFiles
html_url string
parents []CommitMeta
sha string
stats CommitStats
url string
created time.Time
files []CommitAffectedFiles
html_url string
parents []CommitMeta
sha string
stats CommitStats
url string
}
pub struct CommitAffectedFiles {
pub:
filename string
status string
status string
}
pub struct CommitDateOptions {
pub:
author time.Time
author time.Time
committer time.Time
}
pub struct CommitMeta {
pub:
created time.Time
sha string
url string
sha string
url string
}
pub struct CommitStats {
pub:
additions i64
deletions i64
total i64
total i64
}
pub struct CommitUser {
pub:
date string
date string
email string
name string
name string
}
pub struct Comment {
pub:
assets []Attachment
body string
created_at time.Time
html_url string
id i64
issue_url string
original_author string
assets []Attachment
body string
created_at time.Time
html_url string
id i64
issue_url string
original_author string
original_author_id i64
pull_request_url string
updated_at time.Time
user User
pull_request_url string
updated_at time.Time
user User
}
pub struct CreateIssueOption {
pub:
title string
assignee string
assignees []string
body string
closed bool
due_date time.Time
labels []i64
title string
assignee string
assignees []string
body string
closed bool
due_date time.Time
labels []i64
milestone i64
ref string
ref string
}
pub struct CreateRepoOption {
pub:
name string
auto_init bool
default_branch string
description string
gitignores string
issue_labels string
license string
name string
auto_init bool
default_branch string
description string
gitignores string
issue_labels string
license string
object_format_name string // "sha1" or "sha256"
private bool
readme string
template bool
trust_model string // "default", "collaborator", "committer", "collaboratorcommitter"
private bool
readme string
template bool
trust_model string // "default", "collaborator", "committer", "collaboratorcommitter"
}
pub struct Identity {
pub:
email string
name string
name string
}
pub struct InternalTracker {
pub:
allow_only_contributors_to_track_time bool
enable_issue_dependencies bool
enable_time_tracker bool
enable_issue_dependencies bool
enable_time_tracker bool
}
pub struct Issue {
pub:
id i64
url string
html_url string
number i64
user User
original_author string
id i64
url string
html_url string
number i64
user User
original_author string
original_author_id i64
title string
body string
ref string
labels []Label
milestone Milestone
assignee User
assignees []User
state string // StateType
is_locked bool
comments i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_date time.Time
pull_request PullRequestMeta
repository RepositoryMeta
assets []Attachment
pin_order i64
title string
body string
ref string
labels []Label
milestone Milestone
assignee User
assignees []User
state string // StateType
is_locked bool
comments i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_date time.Time
pull_request PullRequestMeta
repository RepositoryMeta
assets []Attachment
pin_order i64
}
pub struct Label {
pub:
id i64
name string
exclusive bool
id i64
name string
exclusive bool
is_archived bool
color string
color string
description string
url string
url string
}
pub struct Milestone {
pub:
id i64
title string
description string
state string // StateType
open_issues i64
id i64
title string
description string
state string // StateType
open_issues i64
closed_issues i64
created_at time.Time
updated_at time.Time
closed_at time.Time
due_on time.Time
created_at time.Time
updated_at time.Time
closed_at time.Time
due_on time.Time
}
pub struct Organization {
pub:
avatar_url string
description string
email string
full_name string
id i64
location string
name string
avatar_url string
description string
email string
full_name string
id i64
location string
name string
repo_admin_change_team_access bool
username string
visibility string
website string
username string
visibility string
website string
}
pub struct PayloadCommitVerification {
pub:
payload string
reason string
payload string
reason string
signature string
signer PayloadUser
verified bool
signer PayloadUser
verified bool
}
pub struct PayloadCommit {
pub:
added []string
author PayloadUser
committer PayloadUser
id string
message string
modified []string
removed []string
timestamp time.Time
url string
added []string
author PayloadUser
committer PayloadUser
id string
message string
modified []string
removed []string
timestamp time.Time
url string
verification PayloadCommitVerification
}
pub struct PayloadUser {
pub:
email string
name string
email string
name string
username string
}
pub struct Permission {
pub:
admin bool
pull bool
push bool
pull bool
push bool
}
pub struct PullRequestMeta {
pub:
merged bool
merged bool
merged_at time.Time
draft bool
html_url string
draft bool
html_url string
}
pub struct RepoCommit {
pub:
author CommitUser
committer CommitUser
message string
tree CommitMeta
url string
author CommitUser
committer CommitUser
message string
tree CommitMeta
url string
verification PayloadCommitVerification
}
pub struct Repository {
pub:
id i64
owner User
name string
full_name string
description string
empty bool
private bool
fork bool
template bool
parent_id i64
mirror bool
size i64
language string
languages_url string
html_url string
url string
link string
ssh_url string
clone_url string
website string
stars_count i64
forks_count i64
watchers_count i64
open_issues_count i64
open_pr_counter i64
release_counter i64
default_branch string
archived bool
created_at time.Time
updated_at time.Time
archived_at time.Time
permissions Permission
has_issues bool
internal_tracker InternalTracker
has_wiki bool
has_pull_requests bool
has_projects bool
has_releases bool
has_packages bool
has_actions bool
ignore_whitespace_conflicts bool
allow_merge_commits bool
allow_rebase bool
allow_rebase_explicit bool
allow_squash_merge bool
allow_fast_forward_only_merge bool
allow_rebase_update bool
id i64
owner User
name string
full_name string
description string
empty bool
private bool
fork bool
template bool
parent_id i64
mirror bool
size i64
language string
languages_url string
html_url string
url string
link string
ssh_url string
clone_url string
website string
stars_count i64
forks_count i64
watchers_count i64
open_issues_count i64
open_pr_counter i64
release_counter i64
default_branch string
archived bool
created_at time.Time
updated_at time.Time
archived_at time.Time
permissions Permission
has_issues bool
internal_tracker InternalTracker
has_wiki bool
has_pull_requests bool
has_projects bool
has_releases bool
has_packages bool
has_actions bool
ignore_whitespace_conflicts bool
allow_merge_commits bool
allow_rebase bool
allow_rebase_explicit bool
allow_squash_merge bool
allow_fast_forward_only_merge bool
allow_rebase_update bool
default_delete_branch_after_merge bool
default_merge_style string
default_allow_maintainer_edit bool
avatar_url string
internal bool
mirror_interval string
mirror_updated time.Time
repo_transfer RepoTransfer
default_merge_style string
default_allow_maintainer_edit bool
avatar_url string
internal bool
mirror_interval string
mirror_updated time.Time
repo_transfer RepoTransfer
}
pub struct RepositoryMeta {
pub:
id i64
name string
owner string
id i64
name string
owner string
full_name string
}
pub struct Team {
pub:
can_create_org_repo bool
description string
id i64
can_create_org_repo bool
description string
id i64
includes_all_repositories bool
name string
organization Organization
permission string
units []string
units_map map[string]string
name string
organization Organization
permission string
units []string
units_map map[string]string
}
pub struct RepoTransfer {
pub:
doer User
doer User
recipient User
teams []Team
teams []Team
}
pub struct User {
pub:
id i64
login string
full_name string
email string
avatar_url string
language string
is_admin bool
last_login time.Time
created time.Time
restricted bool
active bool
prohibit_login bool
location string
website string
description string
visibility string
followers_count i64
following_count i64
id i64
login string
full_name string
email string
avatar_url string
language string
is_admin bool
last_login time.Time
created time.Time
restricted bool
active bool
prohibit_login bool
location string
website string
description string
visibility string
followers_count i64
following_count i64
starred_repos_count i64
username string
}
username string
}

View File

@@ -14,8 +14,6 @@ You can configure the client using a HeroScript file:
secret: 'your-gitea-api-token'
```
Save this content in your project's configuration (e.g., `~/.config/hero/config.hcl`) or pass it to a playbook.
## Usage Example
Here's how to get the client and use its methods.

View File

@@ -3,6 +3,7 @@ module ipapi
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
ipapi_global map[string]&IPApi
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&IPApi {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&IPApi {
mut obj := IPApi{
name: args.name
}
if args.name !in ipapi_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&IPApi {
mut context := base.context()!
ipapi_default = args.name
if args.fromdb || args.name !in ipapi_global {
mut r := context.redis()!
if r.hexists('context:ipapi', args.name)! {
data := r.hget('context:ipapi', args.name)!
if data.len == 0 {
return error('IPApi with name: ipapi does not exist, prob bug.')
}
mut obj := json.decode(IPApi, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('ipapi', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("IPApi with name 'ipapi' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return ipapi_global[args.name] or {
println(ipapi_global)
// bug if we get here because should be in globals
panic('could not get config for ipapi with name, is bug:${args.name}')
return error('could not get config for ipapi with name:ipapi')
}
}
// register the config for the future
pub fn set(o IPApi) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
ipapi_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('ipapi', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:ipapi', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('ipapi', args.name)
mut r := context.redis()!
return r.hexists('context:ipapi', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('ipapi', args.name)!
if args.name in ipapi_global {
// del ipapi_global[args.name]
mut r := context.redis()!
r.hdel('context:ipapi', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&IPApi {
mut res := []&IPApi{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
ipapi_global = map[string]&IPApi{}
ipapi_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:ipapi')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in ipapi_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o IPApi) ! {
fn set_in_mem(o IPApi) !IPApi {
mut o2 := obj_init(o)!
ipapi_global[o.name] = &o2
ipapi_default = o.name
ipapi_global[o2.name] = &o2
ipapi_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'ipapi.') {
return
}
mut install_actions := plbook.find(filter: 'ipapi.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
ipapi_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module jina
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
jina_global map[string]&Jina
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&Jina {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&Jina {
mut obj := Jina{
name: args.name
}
if args.name !in jina_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&Jina {
mut context := base.context()!
jina_default = args.name
if args.fromdb || args.name !in jina_global {
mut r := context.redis()!
if r.hexists('context:jina', args.name)! {
data := r.hget('context:jina', args.name)!
if data.len == 0 {
return error('Jina with name: jina does not exist, prob bug.')
}
mut obj := json.decode(Jina, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('jina', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("Jina with name 'jina' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return jina_global[args.name] or {
println(jina_global)
// bug if we get here because should be in globals
panic('could not get config for jina with name, is bug:${args.name}')
return error('could not get config for jina with name:jina')
}
}
// register the config for the future
pub fn set(o Jina) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
jina_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('jina', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:jina', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('jina', args.name)
mut r := context.redis()!
return r.hexists('context:jina', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('jina', args.name)!
if args.name in jina_global {
// del jina_global[args.name]
mut r := context.redis()!
r.hdel('context:jina', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&Jina {
mut res := []&Jina{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
jina_global = map[string]&Jina{}
jina_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:jina')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in jina_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o Jina) ! {
fn set_in_mem(o Jina) !Jina {
mut o2 := obj_init(o)!
jina_global[o.name] = &o2
jina_default = o.name
jina_global[o2.name] = &o2
jina_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'jina.') {
return
}
mut install_actions := plbook.find(filter: 'jina.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
jina_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module livekit
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
livekit_global map[string]&LivekitClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&LivekitClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&LivekitClient {
mut obj := LivekitClient{
name: args.name
}
if args.name !in livekit_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&LivekitClient {
mut context := base.context()!
livekit_default = args.name
if args.fromdb || args.name !in livekit_global {
mut r := context.redis()!
if r.hexists('context:livekit', args.name)! {
data := r.hget('context:livekit', args.name)!
if data.len == 0 {
return error('LivekitClient with name: livekit does not exist, prob bug.')
}
mut obj := json.decode(LivekitClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('livekit', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("LivekitClient with name 'livekit' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return livekit_global[args.name] or {
println(livekit_global)
// bug if we get here because should be in globals
panic('could not get config for livekit with name, is bug:${args.name}')
return error('could not get config for livekit with name:livekit')
}
}
// register the config for the future
pub fn set(o LivekitClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
livekit_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('livekit', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:livekit', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('livekit', args.name)
mut r := context.redis()!
return r.hexists('context:livekit', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('livekit', args.name)!
if args.name in livekit_global {
// del livekit_global[args.name]
mut r := context.redis()!
r.hdel('context:livekit', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&LivekitClient {
mut res := []&LivekitClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
livekit_global = map[string]&LivekitClient{}
livekit_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:livekit')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in livekit_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o LivekitClient) ! {
fn set_in_mem(o LivekitClient) !LivekitClient {
mut o2 := obj_init(o)!
livekit_global[o.name] = &o2
livekit_default = o.name
livekit_global[o2.name] = &o2
livekit_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'livekit.') {
return
}
mut install_actions := plbook.find(filter: 'livekit.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
livekit_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module mailclient
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
mailclient_global map[string]&MailClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MailClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&MailClient {
mut obj := MailClient{
name: args.name
}
if args.name !in mailclient_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&MailClient {
mut context := base.context()!
mailclient_default = args.name
if args.fromdb || args.name !in mailclient_global {
mut r := context.redis()!
if r.hexists('context:mailclient', args.name)! {
data := r.hget('context:mailclient', args.name)!
if data.len == 0 {
return error('MailClient with name: mailclient does not exist, prob bug.')
}
mut obj := json.decode(MailClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('mailclient', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("MailClient with name 'mailclient' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return mailclient_global[args.name] or {
println(mailclient_global)
// bug if we get here because should be in globals
panic('could not get config for mailclient with name, is bug:${args.name}')
return error('could not get config for mailclient with name:mailclient')
}
}
// register the config for the future
pub fn set(o MailClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
mailclient_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('mailclient', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:mailclient', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('mailclient', args.name)
mut r := context.redis()!
return r.hexists('context:mailclient', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('mailclient', args.name)!
if args.name in mailclient_global {
// del mailclient_global[args.name]
mut r := context.redis()!
r.hdel('context:mailclient', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&MailClient {
mut res := []&MailClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
mailclient_global = map[string]&MailClient{}
mailclient_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:mailclient')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in mailclient_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o MailClient) ! {
fn set_in_mem(o MailClient) !MailClient {
mut o2 := obj_init(o)!
mailclient_global[o.name] = &o2
mailclient_default = o.name
mailclient_global[o2.name] = &o2
mailclient_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'mailclient.') {
return
}
mut install_actions := plbook.find(filter: 'mailclient.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
mailclient_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module meilisearch
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
meilisearch_global map[string]&MeilisearchClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MeilisearchClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&MeilisearchClient {
mut obj := MeilisearchClient{
name: args.name
}
if args.name !in meilisearch_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&MeilisearchClient {
mut context := base.context()!
meilisearch_default = args.name
if args.fromdb || args.name !in meilisearch_global {
mut r := context.redis()!
if r.hexists('context:meilisearch', args.name)! {
data := r.hget('context:meilisearch', args.name)!
if data.len == 0 {
return error('MeilisearchClient with name: meilisearch does not exist, prob bug.')
}
mut obj := json.decode(MeilisearchClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('meilisearch', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("MeilisearchClient with name 'meilisearch' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return meilisearch_global[args.name] or {
println(meilisearch_global)
// bug if we get here because should be in globals
panic('could not get config for meilisearch with name, is bug:${args.name}')
return error('could not get config for meilisearch with name:meilisearch')
}
}
// register the config for the future
pub fn set(o MeilisearchClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
meilisearch_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('meilisearch', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:meilisearch', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('meilisearch', args.name)
mut r := context.redis()!
return r.hexists('context:meilisearch', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('meilisearch', args.name)!
if args.name in meilisearch_global {
// del meilisearch_global[args.name]
mut r := context.redis()!
r.hdel('context:meilisearch', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&MeilisearchClient {
mut res := []&MeilisearchClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
meilisearch_global = map[string]&MeilisearchClient{}
meilisearch_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:meilisearch')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in meilisearch_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o MeilisearchClient) ! {
fn set_in_mem(o MeilisearchClient) !MeilisearchClient {
mut o2 := obj_init(o)!
meilisearch_global[o.name] = &o2
meilisearch_default = o.name
meilisearch_global[o2.name] = &o2
meilisearch_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'meilisearch.') {
return
}
mut install_actions := plbook.find(filter: 'meilisearch.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
meilisearch_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module mycelium
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
mycelium_global map[string]&Mycelium
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&Mycelium {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&Mycelium {
mut obj := Mycelium{
name: args.name
}
if args.name !in mycelium_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&Mycelium {
mut context := base.context()!
mycelium_default = args.name
if args.fromdb || args.name !in mycelium_global {
mut r := context.redis()!
if r.hexists('context:mycelium', args.name)! {
data := r.hget('context:mycelium', args.name)!
if data.len == 0 {
return error('Mycelium with name: mycelium does not exist, prob bug.')
}
mut obj := json.decode(Mycelium, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('mycelium', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("Mycelium with name 'mycelium' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return mycelium_global[args.name] or {
println(mycelium_global)
// bug if we get here because should be in globals
panic('could not get config for mycelium with name, is bug:${args.name}')
return error('could not get config for mycelium with name:mycelium')
}
}
// register the config for the future
pub fn set(o Mycelium) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
mycelium_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('mycelium', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:mycelium', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('mycelium', args.name)
mut r := context.redis()!
return r.hexists('context:mycelium', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('mycelium', args.name)!
if args.name in mycelium_global {
// del mycelium_global[args.name]
mut r := context.redis()!
r.hdel('context:mycelium', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&Mycelium {
mut res := []&Mycelium{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
mycelium_global = map[string]&Mycelium{}
mycelium_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:mycelium')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in mycelium_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o Mycelium) ! {
fn set_in_mem(o Mycelium) !Mycelium {
mut o2 := obj_init(o)!
mycelium_global[o.name] = &o2
mycelium_default = o.name
mycelium_global[o2.name] = &o2
mycelium_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'mycelium.') {
return
}
mut install_actions := plbook.find(filter: 'mycelium.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for mycelium
pub fn switch(name string) {
mycelium_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module mycelium_rpc
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
mycelium_rpc_global map[string]&MyceliumRPC
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MyceliumRPC {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&MyceliumRPC {
mut obj := MyceliumRPC{
name: args.name
}
if args.name !in mycelium_rpc_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&MyceliumRPC {
mut context := base.context()!
mycelium_rpc_default = args.name
if args.fromdb || args.name !in mycelium_rpc_global {
mut r := context.redis()!
if r.hexists('context:mycelium_rpc', args.name)! {
data := r.hget('context:mycelium_rpc', args.name)!
if data.len == 0 {
return error('MyceliumRPC with name: mycelium_rpc does not exist, prob bug.')
}
mut obj := json.decode(MyceliumRPC, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('mycelium_rpc', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("MyceliumRPC with name 'mycelium_rpc' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return mycelium_rpc_global[args.name] or {
println(mycelium_rpc_global)
// bug if we get here because should be in globals
panic('could not get config for mycelium_rpc with name, is bug:${args.name}')
return error('could not get config for mycelium_rpc with name:mycelium_rpc')
}
}
// register the config for the future
pub fn set(o MyceliumRPC) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
mycelium_rpc_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('mycelium_rpc', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:mycelium_rpc', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('mycelium_rpc', args.name)
mut r := context.redis()!
return r.hexists('context:mycelium_rpc', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('mycelium_rpc', args.name)!
if args.name in mycelium_rpc_global {
// del mycelium_rpc_global[args.name]
mut r := context.redis()!
r.hdel('context:mycelium_rpc', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&MyceliumRPC {
mut res := []&MyceliumRPC{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
mycelium_rpc_global = map[string]&MyceliumRPC{}
mycelium_rpc_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:mycelium_rpc')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in mycelium_rpc_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o MyceliumRPC) ! {
fn set_in_mem(o MyceliumRPC) !MyceliumRPC {
mut o2 := obj_init(o)!
mycelium_rpc_global[o.name] = &o2
mycelium_rpc_default = o.name
mycelium_rpc_global[o2.name] = &o2
mycelium_rpc_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'mycelium_rpc.') {
return
}
mut install_actions := plbook.find(filter: 'mycelium_rpc.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for mycelium_rpc
pub fn switch(name string) {
mycelium_rpc_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module openai
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
openai_global map[string]&OpenAI
@@ -14,75 +15,114 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&OpenAI {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&OpenAI {
mut obj := OpenAI{
name: args.name
}
if args.name !in openai_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&OpenAI {
mut context := base.context()!
openai_default = args.name
if args.fromdb || args.name !in openai_global {
mut r := context.redis()!
if r.hexists('context:openai', args.name)! {
data := r.hget('context:openai', args.name)!
if data.len == 0 {
return error('OpenAI with name: openai does not exist, prob bug.')
}
mut obj := json.decode(OpenAI, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('openai', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("OpenAI with name 'openai' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return openai_global[args.name] or {
println(openai_global)
// bug if we get here because should be in globals
panic('could not get config for openai with name, is bug:${args.name}')
return error('could not get config for openai with name:openai')
}
}
// register the config for the future
pub fn set(o OpenAI) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
openai_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('openai', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:openai', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('openai', args.name)
mut r := context.redis()!
return r.hexists('context:openai', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('openai', args.name)!
if args.name in openai_global {
// del openai_global[args.name]
mut r := context.redis()!
r.hdel('context:openai', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&OpenAI {
mut res := []&OpenAI{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
openai_global = map[string]&OpenAI{}
openai_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:openai')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in openai_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o OpenAI) ! {
fn set_in_mem(o OpenAI) !OpenAI {
mut o2 := obj_init(o)!
openai_global[o.name] = &o2
openai_default = o.name
openai_global[o2.name] = &o2
openai_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'openai.') {
return
}
mut install_actions := plbook.find(filter: 'openai.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
// println('install_action: ${install_action}')
heroscript := install_action.heroscript()
mut obj2 := heroscript_loads(heroscript)!
set(obj2)!
@@ -94,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
openai_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module postgresql_client
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
postgresql_client_global map[string]&PostgresqlClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&PostgresqlClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&PostgresqlClient {
mut obj := PostgresqlClient{
name: args.name
}
if args.name !in postgresql_client_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&PostgresqlClient {
mut context := base.context()!
postgresql_client_default = args.name
if args.fromdb || args.name !in postgresql_client_global {
mut r := context.redis()!
if r.hexists('context:postgresql_client', args.name)! {
data := r.hget('context:postgresql_client', args.name)!
if data.len == 0 {
return error('PostgresqlClient with name: postgresql_client does not exist, prob bug.')
}
mut obj := json.decode(PostgresqlClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('postgresql_client', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("PostgresqlClient with name 'postgresql_client' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return postgresql_client_global[args.name] or {
println(postgresql_client_global)
// bug if we get here because should be in globals
panic('could not get config for postgresql_client with name, is bug:${args.name}')
return error('could not get config for postgresql_client with name:postgresql_client')
}
}
// register the config for the future
pub fn set(o PostgresqlClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
postgresql_client_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('postgresql_client', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:postgresql_client', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('postgresql_client', args.name)
mut r := context.redis()!
return r.hexists('context:postgresql_client', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('postgresql_client', args.name)!
if args.name in postgresql_client_global {
// del postgresql_client_global[args.name]
mut r := context.redis()!
r.hdel('context:postgresql_client', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&PostgresqlClient {
mut res := []&PostgresqlClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
postgresql_client_global = map[string]&PostgresqlClient{}
postgresql_client_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:postgresql_client')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in postgresql_client_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o PostgresqlClient) ! {
fn set_in_mem(o PostgresqlClient) !PostgresqlClient {
mut o2 := obj_init(o)!
postgresql_client_global[o.name] = &o2
postgresql_client_default = o.name
postgresql_client_global[o2.name] = &o2
postgresql_client_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'postgresql_client.') {
return
}
mut install_actions := plbook.find(filter: 'postgresql_client.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
postgresql_client_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module qdrant
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
qdrant_global map[string]&QDrantClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&QDrantClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&QDrantClient {
mut obj := QDrantClient{
name: args.name
}
if args.name !in qdrant_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&QDrantClient {
mut context := base.context()!
qdrant_default = args.name
if args.fromdb || args.name !in qdrant_global {
mut r := context.redis()!
if r.hexists('context:qdrant', args.name)! {
data := r.hget('context:qdrant', args.name)!
if data.len == 0 {
return error('QDrantClient with name: qdrant does not exist, prob bug.')
}
mut obj := json.decode(QDrantClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('qdrant', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("QDrantClient with name 'qdrant' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return qdrant_global[args.name] or {
println(qdrant_global)
// bug if we get here because should be in globals
panic('could not get config for qdrant with name, is bug:${args.name}')
return error('could not get config for qdrant with name:qdrant')
}
}
// register the config for the future
pub fn set(o QDrantClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
qdrant_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('qdrant', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:qdrant', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('qdrant', args.name)
mut r := context.redis()!
return r.hexists('context:qdrant', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('qdrant', args.name)!
if args.name in qdrant_global {
// del qdrant_global[args.name]
mut r := context.redis()!
r.hdel('context:qdrant', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&QDrantClient {
mut res := []&QDrantClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
qdrant_global = map[string]&QDrantClient{}
qdrant_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:qdrant')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in qdrant_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o QDrantClient) ! {
fn set_in_mem(o QDrantClient) !QDrantClient {
mut o2 := obj_init(o)!
qdrant_global[o.name] = &o2
qdrant_default = o.name
qdrant_global[o2.name] = &o2
qdrant_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'qdrant.') {
return
}
mut install_actions := plbook.find(filter: 'qdrant.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
qdrant_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module rclone
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
rclone_global map[string]&RCloneClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&RCloneClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&RCloneClient {
mut obj := RCloneClient{
name: args.name
}
if args.name !in rclone_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&RCloneClient {
mut context := base.context()!
rclone_default = args.name
if args.fromdb || args.name !in rclone_global {
mut r := context.redis()!
if r.hexists('context:rclone', args.name)! {
data := r.hget('context:rclone', args.name)!
if data.len == 0 {
return error('RCloneClient with name: rclone does not exist, prob bug.')
}
mut obj := json.decode(RCloneClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('rclone', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("RCloneClient with name 'rclone' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return rclone_global[args.name] or {
println(rclone_global)
// bug if we get here because should be in globals
panic('could not get config for rclone with name, is bug:${args.name}')
return error('could not get config for rclone with name:rclone')
}
}
// register the config for the future
pub fn set(o RCloneClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
rclone_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('rclone', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:rclone', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('rclone', args.name)
mut r := context.redis()!
return r.hexists('context:rclone', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('rclone', args.name)!
if args.name in rclone_global {
// del rclone_global[args.name]
mut r := context.redis()!
r.hdel('context:rclone', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&RCloneClient {
mut res := []&RCloneClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
rclone_global = map[string]&RCloneClient{}
rclone_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:rclone')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in rclone_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o RCloneClient) ! {
fn set_in_mem(o RCloneClient) !RCloneClient {
mut o2 := obj_init(o)!
rclone_global[o.name] = &o2
rclone_default = o.name
rclone_global[o2.name] = &o2
rclone_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'rclone.') {
return
}
mut install_actions := plbook.find(filter: 'rclone.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for rclone
pub fn switch(name string) {
rclone_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module runpod
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
runpod_global map[string]&RunPod
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&RunPod {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&RunPod {
mut obj := RunPod{
name: args.name
}
if args.name !in runpod_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&RunPod {
mut context := base.context()!
runpod_default = args.name
if args.fromdb || args.name !in runpod_global {
mut r := context.redis()!
if r.hexists('context:runpod', args.name)! {
data := r.hget('context:runpod', args.name)!
if data.len == 0 {
return error('RunPod with name: runpod does not exist, prob bug.')
}
mut obj := json.decode(RunPod, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('runpod', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("RunPod with name 'runpod' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return runpod_global[args.name] or {
println(runpod_global)
// bug if we get here because should be in globals
panic('could not get config for runpod with name, is bug:${args.name}')
return error('could not get config for runpod with name:runpod')
}
}
// register the config for the future
pub fn set(o RunPod) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
runpod_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('runpod', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:runpod', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('runpod', args.name)
mut r := context.redis()!
return r.hexists('context:runpod', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('runpod', args.name)!
if args.name in runpod_global {
// del runpod_global[args.name]
mut r := context.redis()!
r.hdel('context:runpod', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&RunPod {
mut res := []&RunPod{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
runpod_global = map[string]&RunPod{}
runpod_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:runpod')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in runpod_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o RunPod) ! {
fn set_in_mem(o RunPod) !RunPod {
mut o2 := obj_init(o)!
runpod_global[o.name] = &o2
runpod_default = o.name
runpod_global[o2.name] = &o2
runpod_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'runpod.') {
return
}
mut install_actions := plbook.find(filter: 'runpod.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
runpod_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module sendgrid
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
sendgrid_global map[string]&SendGrid
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&SendGrid {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&SendGrid {
mut obj := SendGrid{
name: args.name
}
if args.name !in sendgrid_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&SendGrid {
mut context := base.context()!
sendgrid_default = args.name
if args.fromdb || args.name !in sendgrid_global {
mut r := context.redis()!
if r.hexists('context:sendgrid', args.name)! {
data := r.hget('context:sendgrid', args.name)!
if data.len == 0 {
return error('SendGrid with name: sendgrid does not exist, prob bug.')
}
mut obj := json.decode(SendGrid, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('sendgrid', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("SendGrid with name 'sendgrid' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return sendgrid_global[args.name] or {
println(sendgrid_global)
// bug if we get here because should be in globals
panic('could not get config for sendgrid with name, is bug:${args.name}')
return error('could not get config for sendgrid with name:sendgrid')
}
}
// register the config for the future
pub fn set(o SendGrid) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
sendgrid_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('sendgrid', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:sendgrid', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('sendgrid', args.name)
mut r := context.redis()!
return r.hexists('context:sendgrid', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('sendgrid', args.name)!
if args.name in sendgrid_global {
// del sendgrid_global[args.name]
mut r := context.redis()!
r.hdel('context:sendgrid', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&SendGrid {
mut res := []&SendGrid{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
sendgrid_global = map[string]&SendGrid{}
sendgrid_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:sendgrid')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in sendgrid_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o SendGrid) ! {
fn set_in_mem(o SendGrid) !SendGrid {
mut o2 := obj_init(o)!
sendgrid_global[o.name] = &o2
sendgrid_default = o.name
sendgrid_global[o2.name] = &o2
sendgrid_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'sendgrid.') {
return
}
mut install_actions := plbook.find(filter: 'sendgrid.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for sendgrid
pub fn switch(name string) {
sendgrid_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module vastai
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
vastai_global map[string]&VastAI
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&VastAI {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&VastAI {
mut obj := VastAI{
name: args.name
}
if args.name !in vastai_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&VastAI {
mut context := base.context()!
vastai_default = args.name
if args.fromdb || args.name !in vastai_global {
mut r := context.redis()!
if r.hexists('context:vastai', args.name)! {
data := r.hget('context:vastai', args.name)!
if data.len == 0 {
return error('VastAI with name: vastai does not exist, prob bug.')
}
mut obj := json.decode(VastAI, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('vastai', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("VastAI with name 'vastai' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return vastai_global[args.name] or {
println(vastai_global)
// bug if we get here because should be in globals
panic('could not get config for vastai with name, is bug:${args.name}')
return error('could not get config for vastai with name:vastai')
}
}
// register the config for the future
pub fn set(o VastAI) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
vastai_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('vastai', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:vastai', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('vastai', args.name)
mut r := context.redis()!
return r.hexists('context:vastai', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('vastai', args.name)!
if args.name in vastai_global {
// del vastai_global[args.name]
mut r := context.redis()!
r.hdel('context:vastai', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&VastAI {
mut res := []&VastAI{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
vastai_global = map[string]&VastAI{}
vastai_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:vastai')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in vastai_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o VastAI) ! {
fn set_in_mem(o VastAI) !VastAI {
mut o2 := obj_init(o)!
vastai_global[o.name] = &o2
vastai_default = o.name
vastai_global[o2.name] = &o2
vastai_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'vastai.') {
return
}
mut install_actions := plbook.find(filter: 'vastai.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for vastai
pub fn switch(name string) {
vastai_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module wireguard
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
wireguard_global map[string]&WireGuard
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&WireGuard {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&WireGuard {
mut obj := WireGuard{
name: args.name
}
if args.name !in wireguard_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&WireGuard {
mut context := base.context()!
wireguard_default = args.name
if args.fromdb || args.name !in wireguard_global {
mut r := context.redis()!
if r.hexists('context:wireguard', args.name)! {
data := r.hget('context:wireguard', args.name)!
if data.len == 0 {
return error('WireGuard with name: wireguard does not exist, prob bug.')
}
mut obj := json.decode(WireGuard, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('wireguard', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("WireGuard with name 'wireguard' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return wireguard_global[args.name] or {
println(wireguard_global)
// bug if we get here because should be in globals
panic('could not get config for wireguard with name, is bug:${args.name}')
return error('could not get config for wireguard with name:wireguard')
}
}
// register the config for the future
pub fn set(o WireGuard) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
wireguard_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('wireguard', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:wireguard', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('wireguard', args.name)
mut r := context.redis()!
return r.hexists('context:wireguard', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('wireguard', args.name)!
if args.name in wireguard_global {
// del wireguard_global[args.name]
mut r := context.redis()!
r.hdel('context:wireguard', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&WireGuard {
mut res := []&WireGuard{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
wireguard_global = map[string]&WireGuard{}
wireguard_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:wireguard')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in wireguard_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o WireGuard) ! {
fn set_in_mem(o WireGuard) !WireGuard {
mut o2 := obj_init(o)!
wireguard_global[o.name] = &o2
wireguard_default = o.name
wireguard_global[o2.name] = &o2
wireguard_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'wireguard.') {
return
}
mut install_actions := plbook.find(filter: 'wireguard.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -93,10 +134,3 @@ pub fn play(mut plbook PlayBook) ! {
pub fn switch(name string) {
wireguard_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -3,6 +3,7 @@ module zerodb_client
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
zerodb_client_global map[string]&ZeroDBClient
@@ -14,71 +15,111 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&ZeroDBClient {
mut context := base.context()!
mut args := args_get(args_)
pub fn new(args ArgsGet) !&ZeroDBClient {
mut obj := ZeroDBClient{
name: args.name
}
if args.name !in zerodb_client_global {
if !exists(args)! {
set(obj)!
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&ZeroDBClient {
mut context := base.context()!
zerodb_client_default = args.name
if args.fromdb || args.name !in zerodb_client_global {
mut r := context.redis()!
if r.hexists('context:zerodb_client', args.name)! {
data := r.hget('context:zerodb_client', args.name)!
if data.len == 0 {
return error('ZeroDBClient with name: zerodb_client does not exist, prob bug.')
}
mut obj := json.decode(ZeroDBClient, data)!
set_in_mem(obj)!
} else {
heroscript := context.hero_config_get('zerodb_client', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
if args.create {
new(args)!
} else {
return error("ZeroDBClient with name 'zerodb_client' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return zerodb_client_global[args.name] or {
println(zerodb_client_global)
// bug if we get here because should be in globals
panic('could not get config for zerodb_client with name, is bug:${args.name}')
return error('could not get config for zerodb_client with name:zerodb_client')
}
}
// register the config for the future
pub fn set(o ZeroDBClient) ! {
set_in_mem(o)!
mut o2 := set_in_mem(o)!
zerodb_client_default = o2.name
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('zerodb_client', o.name, heroscript)!
mut r := context.redis()!
r.hset('context:zerodb_client', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('zerodb_client', args.name)
mut r := context.redis()!
return r.hexists('context:zerodb_client', args.name)!
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
context.hero_config_delete('zerodb_client', args.name)!
if args.name in zerodb_client_global {
// del zerodb_client_global[args.name]
mut r := context.redis()!
r.hdel('context:zerodb_client', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&ZeroDBClient {
mut res := []&ZeroDBClient{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
zerodb_client_global = map[string]&ZeroDBClient{}
zerodb_client_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:zerodb_client')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in zerodb_client_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o ZeroDBClient) ! {
fn set_in_mem(o ZeroDBClient) !ZeroDBClient {
mut o2 := obj_init(o)!
zerodb_client_global[o.name] = &o2
zerodb_client_default = o.name
zerodb_client_global[o2.name] = &o2
zerodb_client_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'zerodb_client.') {
return
}
mut install_actions := plbook.find(filter: 'zerodb_client.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
@@ -91,12 +132,4 @@ pub fn play(mut plbook PlayBook) ! {
// switch instance to be used for zerodb_client
pub fn switch(name string) {
zerodb_client_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -1,8 +1,8 @@
!!hero_code.generate_client
name:'zinit_rpc'
name:'zinit'
classname:'ZinitRPC'
singleton:1
default:0
singleton:0
default:1
hasconfig:1
reset:0

View File

@@ -1,152 +1,219 @@
# Zinit OpenRPC Client
# Zinit RPC Client
This is a V language client for the Zinit service manager, implementing the OpenRPC specification.
This is a V language client for the Zinit process manager, implementing the JSON-RPC API specification for service management operations.
## Overview
Zinit is a service manager that allows you to manage and monitor services on your system. This client provides a comprehensive API to interact with Zinit via its JSON-RPC interface.
Zinit is a process manager that provides service monitoring, dependency management, and system control capabilities. This client provides a comprehensive API to interact with Zinit via its JSON-RPC interface for administrative tasks such as:
- Service lifecycle management (start, stop, monitor, forget)
- Service configuration management (create, delete, get)
- Service status and statistics monitoring
- System operations (shutdown, reboot, HTTP server control)
- Log streaming and monitoring
## Features
- Complete implementation of all methods in the Zinit OpenRPC specification
- Type-safe API with proper error handling
- Comprehensive documentation
- Helper functions for common operations
- Example code for all operations
- **✅ 100% API Coverage**: Complete implementation of all 18 methods in the Zinit JSON-RPC specification
- **✅ Production Tested**: All methods tested and working against real Zinit instances
- **✅ Type-safe API**: Proper V struct definitions with comprehensive error handling
- **✅ Subscription Support**: Proper handling of streaming/subscription methods
- **✅ Unix Socket Transport**: Reliable communication via Unix domain sockets
- **✅ Comprehensive Documentation**: Extensive documentation with working examples
## Usage
### Basic Example
```v
import freeflowuniverse.heroweb.clients.zinit
import freeflowuniverse.herolib.clients.zinit
fn main() {
// Create a new client with the default socket path
mut client := zinit.new_default_client()
// List all services
services := client.service_list() or {
println('Error: ${err}')
return
}
// Print the services
for name, state in services {
println('${name}: ${state}')
}
// Get status of a specific service
if services.len > 0 {
service_name := services.keys()[0]
status := client.service_status(service_name) or {
println('Error: ${err}')
return
}
println('Service: ${status.name}')
println('State: ${status.state}')
println('PID: ${status.pid}')
// Create a new client
mut client := zinit.get(create:true)!
// List all services
services := client.service_list()!
for service_name, state in services {
println('Service: ${service_name}, State: ${state}')
}
// Get detailed status of a specific service
status := client.service_status('redis')!
println('Service: ${status.name}')
println('PID: ${status.pid}')
println('State: ${status.state}')
println('Target: ${status.target}')
// Start a service
client.service_start('redis')!
// Stop a service
client.service_stop('redis')!
```
### Service Configuration Management
```v
import freeflowuniverse.herolib.clients.zinit
mut client := zinit.new_client()!
// Create a new service configuration
config := zinit.ServiceConfig{
exec: '/usr/bin/redis-server'
oneshot: false
log: 'stdout'
env: {
'REDIS_PORT': '6379'
'REDIS_HOST': '0.0.0.0'
}
shutdown_timeout: 30
}
// Create the service
path := client.service_create('redis', config)!
println('Service created at: ${path}')
// Get service configuration
retrieved_config := client.service_get('redis')!
println('Service exec: ${retrieved_config.exec}')
// Delete service configuration
result := client.service_delete('redis')!
println('Delete result: ${result}')
```
### Service Statistics
```v
import freeflowuniverse.herolib.clients.zinit
mut client := zinit.new_client()!
// Get service statistics
stats := client.service_stats('redis')!
println('Service: ${stats.name}')
println('PID: ${stats.pid}')
println('Memory Usage: ${stats.memory_usage} bytes')
println('CPU Usage: ${stats.cpu_usage}%')
// Print child process statistics
for child in stats.children {
println('Child PID: ${child.pid}, Memory: ${child.memory_usage}, CPU: ${child.cpu_usage}%')
}
```
### Creating and Managing Services
### Log Streaming
```v
import freeflowuniverse.heroweb.clients.zinit
import freeflowuniverse.herolib.clients.zinit
fn main() {
mut client := zinit.new_default_client()
// Create a new service configuration
config := zinit.ServiceConfig{
exec: '/bin/echo "Hello, World!"'
oneshot: true
log: zinit.log_stdout
env: {
'ENV_VAR': 'value'
}
}
// Create the service
client.service_create('hello', config) or {
println('Error creating service: ${err}')
return
}
// Start the service
client.service_start('hello') or {
println('Error starting service: ${err}')
return
}
// Get the service logs
logs := client.stream_current_logs('hello') or {
println('Error getting logs: ${err}')
return
}
for log in logs {
println(log)
}
// Clean up
client.service_stop('hello') or {}
client.service_forget('hello') or {}
client.service_delete('hello') or {}
mut client := zinit.new_client()!
// Get current logs for all services
logs := client.stream_current_logs(name: '')!
for log in logs {
println(log)
}
// Get current logs for a specific service
redis_logs := client.stream_current_logs(name: 'redis')!
for log in redis_logs {
println('Redis: ${log}')
}
// Subscribe to log stream (returns subscription ID)
subscription_id := client.stream_subscribe_logs(name: 'redis')!
println('Subscribed to logs with ID: ${subscription_id}')
```
## API Reference
### Client Creation
### Service Management Methods
- `new_client(socket_path string) &Client` - Create a new client with a custom socket path
- `new_default_client() &Client` - Create a new client with the default socket path (`/tmp/zinit.sock`)
- `service_list()` - List all services and their states
- `service_status(name)` - Get detailed status of a service
- `service_start(name)` - Start a service
- `service_stop(name)` - Stop a service
- `service_monitor(name)` - Start monitoring a service
- `service_forget(name)` - Stop monitoring a service
- `service_kill(name, signal)` - Send signal to a service
### Service Management
### Service Configuration Methods
- `service_list() !map[string]string` - List all services and their states
- `service_status(name string) !ServiceStatus` - Get detailed status of a service
- `service_start(name string) !` - Start a service
- `service_stop(name string) !` - Stop a service
- `service_monitor(name string) !` - Start monitoring a service
- `service_forget(name string) !` - Stop monitoring a service
- `service_kill(name string, signal string) !` - Send a signal to a service
- `service_create(name string, config ServiceConfig) !string` - Create a new service
- `service_delete(name string) !string` - Delete a service
- `service_get(name string) !ServiceConfig` - Get a service configuration
- `service_stats(name string) !ServiceStats` - Get memory and CPU usage statistics
- `service_create(name, config)` - Create service configuration
- `service_delete(name)` - Delete service configuration
- `service_get(name)` - Get service configuration
### System Operations
### Monitoring Methods
- `system_shutdown() !` - Stop all services and power off the system
- `system_reboot() !` - Stop all services and reboot the system
- `system_start_http_server(address string) !string` - Start an HTTP/RPC server
- `system_stop_http_server() !` - Stop the HTTP/RPC server
- `service_stats(name)` - Get service statistics
### Logs
### System Methods
- `stream_current_logs(name ?string) ![]string` - Get current logs
- `stream_subscribe_logs(name ?string) !string` - Subscribe to log messages
- `system_shutdown()` - Shutdown the system
- `system_reboot()` - Reboot the system
- `system_start_http_server(address)` - Start HTTP server
- `system_stop_http_server()` - Stop HTTP server
## Constants
### Streaming Methods
- `default_socket_path` - Default Unix socket path (`/tmp/zinit.sock`)
- `state_running`, `state_success`, `state_error`, etc. - Common service states
- `target_up`, `target_down` - Common service targets
- `log_null`, `log_ring`, `log_stdout` - Common log types
- `signal_term`, `signal_kill`, etc. - Common signals
- `stream_current_logs(args)` - Get current logs (returns array of log lines)
- `stream_subscribe_logs(args)` - Subscribe to log stream (returns subscription ID)
## Helper Functions
### Discovery Methods
- `new_service_config(exec string) ServiceConfig` - Create a basic service configuration
- `new_oneshot_service_config(exec string) ServiceConfig` - Create a oneshot service configuration
- `is_service_not_found_error(err IError) bool` - Check if an error is a "service not found" error
- `format_memory_usage(bytes i64) string` - Format memory usage in human-readable format
- `format_cpu_usage(cpu_percent f64) string` - Format CPU usage
- `rpc_discover()` - Get OpenRPC specification
## Configuration
### Using the Factory Pattern
```v
import freeflowuniverse.herolib.clients.zinit
// Get client using factory (recommended)
mut client := zinit.get()!
// Use the client
services := client.service_list()!
```
### Example Heroscript Configuration
```hero
!!zinit.configure
name: 'production'
socket_path: '/tmp/zinit.sock'
```
## Error Handling
The client provides comprehensive error handling for all Zinit-specific error codes:
- `-32000`: Service not found
- `-32001`: Service already monitored
- `-32002`: Service is up
- `-32003`: Service is down
- `-32004`: Invalid signal
- `-32005`: Config error
- `-32006`: Shutting down
- `-32007`: Service already exists
- `-32008`: Service file error
```v
import freeflowuniverse.herolib.clients.zinit
mut client := zinit.new_client()!
// Handle specific errors
client.service_start('nonexistent') or {
if err.msg().contains('Service not found') {
println('Service does not exist')
} else {
println('Other error: ${err}')
}
}
```
## License
MIT

View File

@@ -1,18 +0,0 @@
module zinit
// Request Types for Zinit API
//
// This file contains all the request types used by the Zinit API.
// ZinitError represents an error returned by the zinit API
pub struct ZinitError {
pub mut:
code int // Error code
message string // Error message
data string // Additional error data
}
// Error implements the error interface for ZinitError
pub fn (e ZinitError) msg() string {
return 'Zinit Error ${e.code}: ${e.message} - ${e.data}'
}

View File

@@ -1,23 +0,0 @@
module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
// Client is an OpenRPC client for Zinit
pub struct Client {
mut:
rpc_client &jsonrpc.Client
}
@[params]
pub struct ClientParams {
path string = '/tmp/zinit.sock' // Path to the Zinit RPC socket
}
// new_client creates a new Zinit RPC client with a custom socket path
pub fn new_client(args_ ClientParams) &Client {
mut args := args_
mut cl := jsonrpc.new_unix_socket_client(args.path)
return &Client{
rpc_client: cl
}
}

View File

@@ -0,0 +1,20 @@
| RPC Call | Example In | Example Out | 1-Sentence Description |
|---------|-----------|------------|------------------------|
| `rpc.discover` | `{}` | `{ "openrpc": "1.2.6", "info": { "version": "1.0.0", "title": "Zinit JSON-RPC API" } }` | Returns the full OpenRPC specification of the Zinit API. |
| `service_list` | `{}` | `{ "service1": "Running", "service2": "Success", "service3": "Error" }` | Lists all managed services and their current states. |
| `service_status` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "state": "Running", "target": "Up", "after": { "dependency1": "Success", "dependency2": "Running" } }` | Returns detailed status including PID, state, dependencies, and target. |
| `service_start` | `{ "name": "redis" }` | `null` | Starts a specified service; returns no result on success. |
| `service_stop` | `{ "name": "redis" }` | `null` | Stops a specified service; returns no result on success. |
| `service_monitor` | `{ "name": "redis" }` | `null` | Starts monitoring a service using its configuration from the config directory. |
| `service_forget` | `{ "name": "redis" }` | `null` | Stops monitoring a service; only allowed for stopped services. |
| `service_kill` | `{ "name": "redis", "signal": "SIGTERM" }` | `null` | Sends a signal (e.g., SIGTERM) to a running service. |
| `system_shutdown` | `{}` | `null` | Stops all services and powers off the system. |
| `system_reboot` | `{}` | `null` | Stops all services and reboots the system. |
| `service_create` | `{ "name": "redis", "content": { "exec": "redis-server", "oneshot": false, "after": ["network"], "log": "stdout", "env": { "REDIS_PASSWORD": "secret" }, "shutdown_timeout": 30 } }` | `"service_config/redis"` | Creates a new service configuration file with specified settings. |
| `service_delete` | `{ "name": "redis" }` | `"service deleted"` | Deletes a service configuration file. |
| `service_get` | `{ "name": "redis" }` | `{ "exec": "redis-server", "oneshot": false, "after": ["network"] }` | Retrieves the configuration content of a service. |
| `service_stats` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "memory_usage": 10485760, "cpu_usage": 2.5, "children": [ { "pid": 1235, "memory_usage": 5242880, "cpu_usage": 1.2 } ] }` | Returns memory and CPU usage statistics for a running service. |
| `system_start_http_server` | `{ "address": "127.0.0.1:8080" }` | `"HTTP server started at 127.0.0.1:8080"` | Starts an HTTP/RPC server on the specified network address. |
| `system_stop_http_server` | `{}` | `null` | Stops the currently running HTTP/RPC server. |
| `stream_currentLogs` | `{ "name": "redis" }` | `["2023-01-01T12:00:00 redis: Starting service", "2023-01-01T12:00:02 redis: Service started"]` | Returns current logs; optionally filtered by service name. |
| `stream_subscribeLogs` | `{ "name": "redis" }` | `"2023-01-01T12:00:00 redis: Service started"` | Subscribes to real-time log messages, optionally filtered by service. |

View File

@@ -1,73 +0,0 @@
module zinit
// ServiceCreateResponse represents the response from service_create
pub struct ServiceCreateResponse {
pub mut:
path string // Path to the created service file
}
// ServiceDeleteResponse represents the response from service_delete
pub struct ServiceDeleteResponse {
pub mut:
result string // Result of the delete operation
}
// SystemStartHttpServerResponse represents the response from system_start_http_server
pub struct SystemStartHttpServerResponse {
pub mut:
result string // Result of starting the HTTP server
}
// StreamCurrentLogsResponse represents the response from stream_currentLogs
pub struct StreamCurrentLogsResponse {
pub mut:
logs []string // Log entries
}
// StreamSubscribeLogsResponse represents the response from stream_subscribeLogs
pub struct StreamSubscribeLogsResponse {
pub mut:
subscription_id string // ID of the log subscription
}
// Module version information
pub const version = '1.0.0'
pub const author = 'Hero Code'
pub const license = 'MIT'
// Default socket path for zinit
pub const default_socket_path = '/tmp/zinit.sock'
// Common service states
pub const state_running = 'Running'
pub const state_success = 'Success'
pub const state_error = 'Error'
pub const state_stopped = 'Stopped'
pub const state_failed = 'Failed'
// Common service targets
pub const target_up = 'Up'
pub const target_down = 'Down'
// Common log types
pub const log_null = 'null'
pub const log_ring = 'ring'
pub const log_stdout = 'stdout'
// Common signals
pub const signal_term = 'SIGTERM'
pub const signal_kill = 'SIGKILL'
pub const signal_hup = 'SIGHUP'
pub const signal_usr1 = 'SIGUSR1'
pub const signal_usr2 = 'SIGUSR2'
// JSON-RPC error codes as defined in the OpenRPC specification
pub const error_service_not_found = -32000
pub const error_service_already_monitored = -32001
pub const error_service_is_up = -32002
pub const error_service_is_down = -32003
pub const error_invalid_signal = -32004
pub const error_config_error = -32005
pub const error_shutting_down = -32006
pub const error_service_already_exists = -32007
pub const error_service_file_error = -32008

View File

@@ -0,0 +1,63 @@
module zinit
// ServiceStatus represents detailed status information for a service
pub struct ServiceStatus {
pub mut:
name string // Service name
pid u32 // Process ID of the running service (if running)
state string // Current state of the service (Running, Success, Error, etc.)
target string // Target state of the service (Up, Down)
after map[string]string // Dependencies of the service and their states
}
// ServiceConfig represents the configuration for a zinit service
pub struct ServiceConfig {
pub mut:
exec string // Command to run
test string // Test command (optional)
oneshot bool // Whether the service should be restarted (maps to one_shot in Zinit)
after []string // Services that must be running before this one starts
log string // How to handle service output (null, ring, stdout)
env map[string]string // Environment variables for the service
dir string // Working directory for the service
shutdown_timeout u64 // Maximum time to wait for service to stop during shutdown
}
// ServiceStats represents memory and CPU usage statistics for a service
pub struct ServiceStats {
pub mut:
name string // Service name
pid u32 // Process ID of the service
memory_usage u64 // Memory usage in bytes
cpu_usage f32 // CPU usage as a percentage (0-100)
children []ChildStats // Stats for child processes
}
// ChildStats represents statistics for a child process
pub struct ChildStats {
pub mut:
pid u32 // Process ID of the child process
memory_usage u64 // Memory usage in bytes
cpu_usage f32 // CPU usage as a percentage (0-100)
}
// ServiceCreateParams represents parameters for service_create method
pub struct ServiceCreateParams {
pub mut:
name string // Name of the service to create
content ServiceConfig // Configuration for the service
}
// ServiceKillParams represents parameters for service_kill method
pub struct ServiceKillParams {
pub mut:
name string // Name of the service to kill
signal string // Signal to send (e.g., SIGTERM, SIGKILL)
}
// LogParams represents parameters for log streaming methods
@[params]
pub struct LogParams {
pub mut:
name string // Optional service name filter
}

View File

@@ -1,175 +0,0 @@
module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
// ServiceConfig represents the configuration for a zinit service
pub struct ServiceConfig {
pub mut:
exec string // Command to run
oneshot bool // Whether the service should be restarted
after []string // Services that must be running before this one starts
log string // How to handle service output (null, ring, stdout)
env map[string]string // Environment variables for the service
shutdown_timeout int // Maximum time to wait for service to stop during shutdown
}
// KillParams represents the parameters for the service_kill method
pub struct KillParams {
pub:
name string // Name of the service to kill
signal string // Signal to send (e.g., SIGTERM, SIGKILL)
}
// RpcDiscoverResponse represents the response from rpc.discover
pub struct RpcDiscoverResponse {
pub mut:
spec map[string]string // OpenRPC specification
}
// rpc_discover returns the OpenRPC specification for the API
pub fn (mut c Client) rpc_discover() !RpcDiscoverResponse {
request := jsonrpc.new_request_generic('rpc.discover', []string{})
response := c.rpc_client.send[[]string, map[string]string](request)!
return RpcDiscoverResponse{
spec: response
}
}
// // Response Models for Zinit API
// //
// // This file contains all the response models used by the Zinit API.
// // These models are used as type parameters in the response generics.
// // ServiceListResponse represents the response from service_list
// pub struct ServiceListResponse {
// pub mut:
// // Map of service names to their current states
// services map[string]string
// }
// service_list lists all services managed by Zinit
// Returns a map of service names to their current states
pub fn (mut c Client) service_list() !map[string]string {
request := jsonrpc.new_request_generic('service_list', map[string]string{})
services := c.rpc_client.send[map[string]string, map[string]string](request)!
// return ServiceListResponse{
// services: services
// }
return services
}
// ServiceStatusResponse represents the response from service_status
pub struct ServiceStatusResponse {
pub mut:
name string // Service name
pid int // Process ID of the running service (if running)
state string // Current state of the service (Running, Success, Error, etc.)
target string // Target state of the service (Up, Down)
after map[string]string // Dependencies of the service and their states
}
// service_status shows detailed status information for a specific service
// name: the name of the service
pub fn (mut c Client) service_status(name string) !ServiceStatusResponse {
request := jsonrpc.new_request_generic('service_status', name)
// Use a direct struct mapping instead of manual conversion
return c.rpc_client.send[string, ServiceStatusResponse](request)!
}
// service_start starts a service
// name: the name of the service to start
pub fn (mut c Client) service_start(name string) ! {
request := jsonrpc.new_request_generic('service_start', name)
c.rpc_client.send[string, string](request)!
}
// service_stop stops a service
// name: the name of the service to stop
pub fn (mut c Client) service_stop(name string) ! {
request := jsonrpc.new_request_generic('service_stop', name)
c.rpc_client.send[string, string](request)!
}
// service_monitor starts monitoring a service
// The service configuration is loaded from the config directory
// name: the name of the service to monitor
pub fn (mut c Client) service_monitor(name string) ! {
request := jsonrpc.new_request_generic('service_monitor', name)
c.rpc_client.send[string, string](request)!
}
// service_delete deletes a service configuration file
// name: the name of the service to delete
pub fn (mut c Client) service_delete(name string) !ServiceDeleteResponse {
request := jsonrpc.new_request_generic('service_delete', name)
result := c.rpc_client.send[string, string](request)!
return ServiceDeleteResponse{
result: result
}
}
// service_forget stops monitoring a service
// You can only forget a stopped service
// name: the name of the service to forget
pub fn (mut c Client) service_forget(name string) ! {
request := jsonrpc.new_request_generic('service_forget', name)
c.rpc_client.send[string, string](request)!
}
// TODO: make sure the signal is a valid signal and enumerator do as @[params] so its optional
// service_kill sends a signal to a running service
// name: the name of the service to send the signal to
// signal: the signal to send (e.g., SIGTERM, SIGKILL)
pub fn (mut c Client) service_kill(name string, signal string) ! {
params := KillParams{
name: name
signal: signal
}
request := jsonrpc.new_request_generic('service_kill', params)
c.rpc_client.send[KillParams, string](request)!
}
// CreateServiceParams represents the parameters for the service_create method
struct CreateServiceParams {
name string // Name of the service to create
content ServiceConfig // Configuration for the service
}
// service_create creates a new service configuration file
// name: the name of the service to create
// config: the service configuration
pub fn (mut c Client) service_create(name string, config ServiceConfig) !ServiceCreateResponse {
params := CreateServiceParams{
name: name
content: config
}
request := jsonrpc.new_request_generic('service_create', params)
path := c.rpc_client.send[CreateServiceParams, string](request)!
return ServiceCreateResponse{
path: path
}
}
// service_get gets a service configuration file
// name: the name of the service to get
pub fn (mut c Client) service_get(name string) !ServiceConfigResponse {
request := jsonrpc.new_request_generic('service_get', {
'name': name
})
// We need to handle the conversion from ServiceConfig to ServiceConfigResponse
config := c.rpc_client.send[map[string]string, ServiceConfig](request)!
return ServiceConfigResponse{
exec: config.exec
oneshot: config.oneshot
after: config.after
log: config.log
env: config.env
shutdown_timeout: config.shutdown_timeout
}
}

View File

@@ -1,33 +0,0 @@
module zinit
pub struct ServiceConfigResponse {
pub mut:
exec string // Command to run
oneshot bool // Whether the service should be restarted
after []string // Services that must be running before this one starts
log string // How to handle service output (null, ring, stdout)
env map[string]string // Environment variables for the service
shutdown_timeout int // Maximum time to wait for service to stop during shutdown
}
// Helper function to create a basic service configuration
pub fn new_service_config(exec string) ServiceConfig {
return ServiceConfig{
exec: exec
oneshot: false
log: log_stdout
env: map[string]string{}
shutdown_timeout: 30
}
}
// Helper function to create a oneshot service configuration
pub fn new_oneshot_service_config(exec string) ServiceConfig {
return ServiceConfig{
exec: exec
oneshot: true
log: log_stdout
env: map[string]string{}
shutdown_timeout: 30
}
}

View File

@@ -1,44 +0,0 @@
module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
// ServiceStatsResponse represents the response from service_stats
pub struct ServiceStatsResponse {
pub mut:
name string // Service name
pid int // Process ID of the service
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
children []ChildStatsResponse // Stats for child processes
}
// ChildStatsResponse represents statistics for a child process
pub struct ChildStatsResponse {
pub mut:
pid int // Process ID of the child process
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
}
// Serv
// service_stats gets memory and CPU usage statistics for a service
// name: the name of the service to get stats for
pub fn (mut c Client) service_stats(name string) !ServiceStatsResponse {
request := jsonrpc.new_request_generic('service_stats', name)
// We need to handle the conversion from the raw response to our model
raw_stats := c.rpc_client.send[string, map[string]string](request)!
// Parse the raw stats into our response model
mut children := []ChildStatsResponse{}
// In a real implementation, we would parse the children from the raw response
return ServiceStatsResponse{
name: raw_stats['name'] or { '' }
pid: raw_stats['pid'].int()
memory_usage: raw_stats['memory_usage'].i64()
cpu_usage: raw_stats['cpu_usage'].f64()
children: children
}
}

View File

@@ -1,71 +0,0 @@
module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
// system_shutdown stops all services and powers off the system
pub fn (mut c Client) system_shutdown() ! {
request := jsonrpc.new_request_generic('system_shutdown', []string{})
c.rpc_client.send[[]string, string](request)!
}
// system_reboot stops all services and reboots the system
pub fn (mut c Client) system_reboot() ! {
request := jsonrpc.new_request_generic('system_reboot', []string{})
c.rpc_client.send[[]string, string](request)!
}
// system_start_http_server starts an HTTP/RPC server at the specified address
// address: the network address to bind the server to (e.g., '127.0.0.1:8080')
pub fn (mut c Client) system_start_http_server(address string) !SystemStartHttpServerResponse {
request := jsonrpc.new_request_generic('system_start_http_server', address)
result := c.rpc_client.send[string, string](request)!
return SystemStartHttpServerResponse{
result: result
}
}
// system_stop_http_server stops the HTTP/RPC server if running
pub fn (mut c Client) system_stop_http_server() ! {
request := jsonrpc.new_request_generic('system_stop_http_server', []string{})
c.rpc_client.send[[]string, string](request)!
}
@[params]
pub struct LogParams {
name string
}
// stream_current_logs gets current logs from zinit and monitored services
// name: optional service name filter. If provided, only logs from this service will be returned
pub fn (mut c Client) stream_current_logs(args LogParams) ![]string {
mut logs := []string{}
if args.name != '' {
request := jsonrpc.new_request_generic('stream_currentLogs', {
'name': args.name
})
logs = c.rpc_client.send[map[string]string, map[string]string](request)!
} else {
request := jsonrpc.new_request_generic('stream_currentLogs', map[string]string{})
logs = c.rpc_client.send[[]map[string]string, map[string]string](request)!
}
return logs
}
// stream_subscribe_logs subscribes to log messages generated by zinit and monitored services
// name: optional service name filter. If provided, only logs from this service will be returned
pub fn (mut c Client) stream_subscribe_logs(name ?string) !StreamSubscribeLogsResponse {
mut subscription_id := ''
if service_name := name {
request := jsonrpc.new_request_generic('stream_subscribeLogs', service_name)
subscription_id = c.rpc_client.send[string, string](request)!
} else {
request := jsonrpc.new_request_generic('stream_subscribeLogs', []string{})
subscription_id = c.rpc_client.send[[]string, string](request)!
}
return StreamSubscribeLogsResponse{
subscription_id: subscription_id
}
}

View File

@@ -1,19 +0,0 @@
module zinit
// Helper function to format memory usage in human-readable format
pub fn format_memory_usage(bytes i64) string {
if bytes < 1024 {
return '${bytes} B'
} else if bytes < 1024 * 1024 {
return '${bytes / 1024} KB'
} else if bytes < 1024 * 1024 * 1024 {
return '${bytes / 1024 / 1024} MB'
} else {
return '${bytes / 1024 / 1024 / 1024} GB'
}
}
// Helper function to format CPU usage
pub fn format_cpu_usage(cpu_percent f64) string {
return '${cpu_percent:.1f}%'
}

View File

@@ -1,9 +1,10 @@
module zinit_rpc
module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.schemas.jsonrpcmodel
// Helper function to get or create the RPC client
fn (mut c ZinitRPC) get_client() !&jsonrpc.Client {
fn (mut c ZinitRPC) client_() !&jsonrpc.Client {
if client := c.rpc_client {
return client
}
@@ -16,23 +17,23 @@ fn (mut c ZinitRPC) get_client() !&jsonrpc.Client {
// Admin methods
// rpc_discover returns the OpenRPC specification for the API
pub fn (mut c ZinitRPC) rpc_discover() !OpenRPCSpec {
mut client := c.get_client()!
pub fn (mut c ZinitRPC) rpc_discover() !jsonrpcmodel.OpenRPCSpec {
mut client := c.client_()!
request := jsonrpc.new_request_generic('rpc.discover', []string{})
return client.send[[]string, OpenRPCSpec](request)!
return client.send[[]string, jsonrpcmodel.OpenRPCSpec](request)!
}
// service_list lists all services managed by Zinit
// Returns a map of service names to their current states
pub fn (mut c ZinitRPC) service_list() !map[string]string {
mut client := c.get_client()!
mut client := c.client_()!
request := jsonrpc.new_request_generic('service_list', []string{})
return client.send[[]string, map[string]string](request)!
}
// service_status shows detailed status information for a specific service
pub fn (mut c ZinitRPC) service_status(name string) !ServiceStatus {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -42,7 +43,7 @@ pub fn (mut c ZinitRPC) service_status(name string) !ServiceStatus {
// service_start starts a service
pub fn (mut c ZinitRPC) service_start(name string) ! {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -52,7 +53,7 @@ pub fn (mut c ZinitRPC) service_start(name string) ! {
// service_stop stops a service
pub fn (mut c ZinitRPC) service_stop(name string) ! {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -63,7 +64,7 @@ pub fn (mut c ZinitRPC) service_stop(name string) ! {
// service_monitor starts monitoring a service
// The service configuration is loaded from the config directory
pub fn (mut c ZinitRPC) service_monitor(name string) ! {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -74,7 +75,7 @@ pub fn (mut c ZinitRPC) service_monitor(name string) ! {
// service_forget stops monitoring a service
// You can only forget a stopped service
pub fn (mut c ZinitRPC) service_forget(name string) ! {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -84,7 +85,7 @@ pub fn (mut c ZinitRPC) service_forget(name string) ! {
// service_kill sends a signal to a running service
pub fn (mut c ZinitRPC) service_kill(name string, signal string) ! {
mut client := c.get_client()!
mut client := c.client_()!
params := ServiceKillParams{
name: name
signal: signal
@@ -95,18 +96,21 @@ pub fn (mut c ZinitRPC) service_kill(name string, signal string) ! {
// service_create creates a new service configuration file
pub fn (mut c ZinitRPC) service_create(name string, config ServiceConfig) !string {
mut client := c.get_client()!
mut client := c.client_()!
params := ServiceCreateParams{
name: name
content: config
}
println(params)
$dbg;
request := jsonrpc.new_request_generic('service_create', params)
$dbg;
return client.send[ServiceCreateParams, string](request)!
}
// service_delete deletes a service configuration file
pub fn (mut c ZinitRPC) service_delete(name string) !string {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -116,7 +120,7 @@ pub fn (mut c ZinitRPC) service_delete(name string) !string {
// service_get gets a service configuration file
pub fn (mut c ZinitRPC) service_get(name string) !ServiceConfig {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -126,7 +130,7 @@ pub fn (mut c ZinitRPC) service_get(name string) !ServiceConfig {
// service_stats gets memory and CPU usage statistics for a service
pub fn (mut c ZinitRPC) service_stats(name string) !ServiceStats {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'name': name
}
@@ -138,21 +142,21 @@ pub fn (mut c ZinitRPC) service_stats(name string) !ServiceStats {
// system_shutdown stops all services and powers off the system
pub fn (mut c ZinitRPC) system_shutdown() ! {
mut client := c.get_client()!
mut client := c.client_()!
request := jsonrpc.new_request_generic('system_shutdown', []string{})
client.send[[]string, string](request)!
}
// system_reboot stops all services and reboots the system
pub fn (mut c ZinitRPC) system_reboot() ! {
mut client := c.get_client()!
mut client := c.client_()!
request := jsonrpc.new_request_generic('system_reboot', []string{})
client.send[[]string, string](request)!
}
// system_start_http_server starts an HTTP/RPC server at the specified address
pub fn (mut c ZinitRPC) system_start_http_server(address string) !string {
mut client := c.get_client()!
mut client := c.client_()!
params := {
'address': address
}
@@ -162,7 +166,7 @@ pub fn (mut c ZinitRPC) system_start_http_server(address string) !string {
// system_stop_http_server stops the HTTP/RPC server if running
pub fn (mut c ZinitRPC) system_stop_http_server() ! {
mut client := c.get_client()!
mut client := c.client_()!
request := jsonrpc.new_request_generic('system_stop_http_server', []string{})
client.send[[]string, string](request)!
}
@@ -171,7 +175,7 @@ pub fn (mut c ZinitRPC) system_stop_http_server() ! {
// stream_current_logs gets current logs from zinit and monitored services
pub fn (mut c ZinitRPC) stream_current_logs(args LogParams) ![]string {
mut client := c.get_client()!
mut client := c.client_()!
if args.name != '' {
params := {
'name': args.name
@@ -187,7 +191,7 @@ pub fn (mut c ZinitRPC) stream_current_logs(args LogParams) ![]string {
// stream_subscribe_logs subscribes to log messages generated by zinit and monitored services
// Returns a subscription ID that can be used to manage the subscription
pub fn (mut c ZinitRPC) stream_subscribe_logs(args LogParams) !u64 {
mut client := c.get_client()!
mut client := c.client_()!
if args.name != '' {
params := {
'name': args.name

View File

@@ -0,0 +1,136 @@
module zinit
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
__global (
zinit_global map[string]&ZinitRPC
zinit_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string = 'default'
fromdb bool // will load from filesystem
create bool // default will not create if not exist
}
pub fn new(args ArgsGet) !&ZinitRPC {
mut obj := ZinitRPC{
name: args.name
}
set(obj)!
return get(name: args.name)!
}
pub fn get(args ArgsGet) !&ZinitRPC {
mut context := base.context()!
zinit_default = args.name
if args.fromdb || args.name !in zinit_global {
mut r := context.redis()!
if r.hexists('context:zinit', args.name)! {
data := r.hget('context:zinit', args.name)!
if data.len == 0 {
return error('ZinitRPC with name: zinit does not exist, prob bug.')
}
mut obj := json.decode(ZinitRPC, data)!
set_in_mem(obj)!
} else {
if args.create {
new(args)!
} else {
return error("ZinitRPC with name 'zinit' does not exist")
}
}
return get(name: args.name)! // no longer from db nor create
}
return zinit_global[args.name] or {
return error('could not get config for zinit with name:zinit')
}
}
// register the config for the future
pub fn set(o ZinitRPC) ! {
mut o2 := set_in_mem(o)!
zinit_default = o2.name
mut context := base.context()!
mut r := context.redis()!
r.hset('context:zinit', o2.name, json.encode(o2))!
}
// does the config exists?
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut r := context.redis()!
return r.hexists('context:zinit', args.name)!
}
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
mut r := context.redis()!
r.hdel('context:zinit', args.name)!
}
@[params]
pub struct ArgsList {
pub mut:
fromdb bool // will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&ZinitRPC {
mut res := []&ZinitRPC{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
zinit_global = map[string]&ZinitRPC{}
zinit_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:zinit')!
for name in l {
res << get(name: name, fromdb: true)!
}
return res
} else {
// load from memory
for _, client in zinit_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o ZinitRPC) !ZinitRPC {
mut o2 := obj_init(o)!
zinit_global[o2.name] = &o2
zinit_default = o2.name
return o2
}
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'zinit.') {
return
}
mut install_actions := plbook.find(filter: 'zinit.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
heroscript := install_action.heroscript()
mut obj2 := heroscript_loads(heroscript)!
set(obj2)!
}
}
}
// switch instance to be used for zinit
pub fn switch(name string) {
zinit_default = name
}

View File

@@ -0,0 +1,48 @@
module zinit
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.schemas.jsonrpc
import os
pub const version = '0.0.0'
const singleton = true
const default = false
// // Factory function to create a new ZinitRPC client instance
// @[params]
// pub struct NewClientArgs {
// pub mut:
// name string = 'default'
// socket_path string
// }
// pub fn new_client(args NewClientArgs) !&ZinitRPC {
// mut client := ZinitRPC{
// name: args.name
// socket_path: args.socket_path
// }
// client = obj_init(client)!
// return &client
// }
@[heap]
pub struct ZinitRPC {
pub mut:
name string = 'default'
socket_path string
rpc_client ?&jsonrpc.Client @[skip]
}
// your checking & initialization code if needed
fn obj_init(mycfg_ ZinitRPC) !ZinitRPC {
mut mycfg := mycfg_
if mycfg.socket_path == '' {
mycfg.socket_path = '/tmp/zinit.sock'
}
return mycfg
}
pub fn heroscript_loads(heroscript string) !ZinitRPC {
mut obj := encoderhero.decode[ZinitRPC](heroscript)!
return obj
}

View File

@@ -1,222 +0,0 @@
# Zinit RPC Client
This is a V language client for the Zinit process manager, implementing the JSON-RPC API specification for service management operations.
## Overview
Zinit is a process manager that provides service monitoring, dependency management, and system control capabilities. This client provides a comprehensive API to interact with Zinit via its JSON-RPC interface for administrative tasks such as:
- Service lifecycle management (start, stop, monitor, forget)
- Service configuration management (create, delete, get)
- Service status and statistics monitoring
- System operations (shutdown, reboot, HTTP server control)
- Log streaming and monitoring
## Features
- **✅ 100% API Coverage**: Complete implementation of all 18 methods in the Zinit JSON-RPC specification
- **✅ Production Tested**: All methods tested and working against real Zinit instances
- **✅ Type-safe API**: Proper V struct definitions with comprehensive error handling
- **✅ Subscription Support**: Proper handling of streaming/subscription methods
- **✅ Unix Socket Transport**: Reliable communication via Unix domain sockets
- **✅ Comprehensive Documentation**: Extensive documentation with working examples
## Usage
### Basic Example
```v
import freeflowuniverse.herolib.clients.zinit_rpc
// Create a new client
mut client := zinit_rpc.new_client(
name: 'my_client'
socket_path: '/tmp/zinit.sock'
)!
// List all services
services := client.service_list()!
for service_name, state in services {
println('Service: ${service_name}, State: ${state}')
}
// Get detailed status of a specific service
status := client.service_status('redis')!
println('Service: ${status.name}')
println('PID: ${status.pid}')
println('State: ${status.state}')
println('Target: ${status.target}')
// Start a service
client.service_start('redis')!
// Stop a service
client.service_stop('redis')!
```
### Service Configuration Management
```v
import freeflowuniverse.herolib.clients.zinit_rpc
mut client := zinit_rpc.new_client()!
// Create a new service configuration
config := zinit_rpc.ServiceConfig{
exec: '/usr/bin/redis-server'
oneshot: false
log: 'stdout'
env: {
'REDIS_PORT': '6379'
'REDIS_HOST': '0.0.0.0'
}
shutdown_timeout: 30
}
// Create the service
path := client.service_create('redis', config)!
println('Service created at: ${path}')
// Get service configuration
retrieved_config := client.service_get('redis')!
println('Service exec: ${retrieved_config.exec}')
// Delete service configuration
result := client.service_delete('redis')!
println('Delete result: ${result}')
```
### Service Statistics
```v
import freeflowuniverse.herolib.clients.zinit_rpc
mut client := zinit_rpc.new_client()!
// Get service statistics
stats := client.service_stats('redis')!
println('Service: ${stats.name}')
println('PID: ${stats.pid}')
println('Memory Usage: ${stats.memory_usage} bytes')
println('CPU Usage: ${stats.cpu_usage}%')
// Print child process statistics
for child in stats.children {
println('Child PID: ${child.pid}, Memory: ${child.memory_usage}, CPU: ${child.cpu_usage}%')
}
```
### Log Streaming
```v
import freeflowuniverse.herolib.clients.zinit_rpc
mut client := zinit_rpc.new_client()!
// Get current logs for all services
logs := client.stream_current_logs(name: '')!
for log in logs {
println(log)
}
// Get current logs for a specific service
redis_logs := client.stream_current_logs(name: 'redis')!
for log in redis_logs {
println('Redis: ${log}')
}
// Subscribe to log stream (returns subscription ID)
subscription_id := client.stream_subscribe_logs(name: 'redis')!
println('Subscribed to logs with ID: ${subscription_id}')
```
## API Reference
### Service Management Methods
- `service_list()` - List all services and their states
- `service_status(name)` - Get detailed status of a service
- `service_start(name)` - Start a service
- `service_stop(name)` - Stop a service
- `service_monitor(name)` - Start monitoring a service
- `service_forget(name)` - Stop monitoring a service
- `service_kill(name, signal)` - Send signal to a service
### Service Configuration Methods
- `service_create(name, config)` - Create service configuration
- `service_delete(name)` - Delete service configuration
- `service_get(name)` - Get service configuration
### Monitoring Methods
- `service_stats(name)` - Get service statistics
### System Methods
- `system_shutdown()` - Shutdown the system
- `system_reboot()` - Reboot the system
- `system_start_http_server(address)` - Start HTTP server
- `system_stop_http_server()` - Stop HTTP server
### Streaming Methods
- `stream_current_logs(args)` - Get current logs (returns array of log lines)
- `stream_subscribe_logs(args)` - Subscribe to log stream (returns subscription ID)
### Discovery Methods
- `rpc_discover()` - Get OpenRPC specification
## Configuration
### Using the Factory Pattern
```v
import freeflowuniverse.herolib.clients.zinit_rpc
// Get client using factory (recommended)
mut client := zinit_rpc.get()!
// Use the client
services := client.service_list()!
```
### Example Heroscript Configuration
```hero
!!zinit_rpc.configure
name: 'production'
socket_path: '/tmp/zinit.sock'
```
## Error Handling
The client provides comprehensive error handling for all Zinit-specific error codes:
- `-32000`: Service not found
- `-32001`: Service already monitored
- `-32002`: Service is up
- `-32003`: Service is down
- `-32004`: Invalid signal
- `-32005`: Config error
- `-32006`: Shutting down
- `-32007`: Service already exists
- `-32008`: Service file error
```v
import freeflowuniverse.herolib.clients.zinit_rpc
mut client := zinit_rpc.new_client()!
// Handle specific errors
client.service_start('nonexistent') or {
if err.msg().contains('Service not found') {
println('Service does not exist')
} else {
println('Other error: ${err}')
}
}
```

View File

@@ -1,102 +0,0 @@
module zinit_rpc
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
__global (
zinit_rpc_global map[string]&ZinitRPC
zinit_rpc_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&ZinitRPC {
mut context := base.context()!
mut args := args_get(args_)
mut obj := ZinitRPC{
name: args.name
}
if args.name !in zinit_rpc_global {
if !exists(args)! {
set(obj)!
} else {
heroscript := context.hero_config_get('zinit_rpc', args.name)!
mut obj_ := heroscript_loads(heroscript)!
set_in_mem(obj_)!
}
}
return zinit_rpc_global[args.name] or {
println(zinit_rpc_global)
// bug if we get here because should be in globals
panic('could not get config for zinit_rpc with name, is bug:${args.name}')
}
}
// register the config for the future
pub fn set(o ZinitRPC) ! {
set_in_mem(o)!
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set('zinit_rpc', o.name, heroscript)!
}
// does the config exists?
pub fn exists(args_ ArgsGet) !bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists('zinit_rpc', args.name)
}
pub fn delete(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_delete('zinit_rpc', args.name)!
if args.name in zinit_rpc_global {
// del zinit_rpc_global[args.name]
}
}
// only sets in mem, does not set as config
fn set_in_mem(o ZinitRPC) ! {
mut o2 := obj_init(o)!
zinit_rpc_global[o.name] = &o2
zinit_rpc_default = o.name
}
pub fn play(mut plbook PlayBook) ! {
mut install_actions := plbook.find(filter: 'zinit_rpc.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
heroscript := install_action.heroscript()
mut obj2 := heroscript_loads(heroscript)!
set(obj2)!
}
}
}
// switch instance to be used for zinit_rpc
pub fn switch(name string) {
zinit_rpc_default = name
}
// helpers
@[params]
pub struct DefaultConfigArgs {
instance string = 'default'
}

View File

@@ -1,163 +0,0 @@
module zinit_rpc
import freeflowuniverse.herolib.data.encoderhero
import freeflowuniverse.herolib.schemas.jsonrpc
pub const version = '0.0.0'
const singleton = true
const default = false
// Default configuration for Zinit JSON-RPC API
pub const default_socket_path = '/tmp/zinit.sock'
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct ZinitRPC {
pub mut:
name string = 'default'
socket_path string = default_socket_path // Unix socket path for RPC server
rpc_client ?&jsonrpc.Client @[skip]
}
// your checking & initialization code if needed
fn obj_init(mycfg_ ZinitRPC) !ZinitRPC {
mut mycfg := mycfg_
if mycfg.socket_path == '' {
mycfg.socket_path = default_socket_path
}
// For now, we'll initialize the client when needed
// The actual client will be created in the factory
return mycfg
}
// Response structs based on OpenRPC specification
// OpenRPCSpec represents the OpenRPC specification structure
pub struct OpenRPCSpec {
pub mut:
openrpc string @[json: 'openrpc'] // OpenRPC version
info OpenRPCInfo @[json: 'info'] // API information
methods []OpenRPCMethod @[json: 'methods'] // Available methods
servers []OpenRPCServer @[json: 'servers'] // Server information
}
// OpenRPCInfo represents API information
pub struct OpenRPCInfo {
pub mut:
version string @[json: 'version'] // API version
title string @[json: 'title'] // API title
description string @[json: 'description'] // API description
license OpenRPCLicense @[json: 'license'] // License information
}
// OpenRPCLicense represents license information
pub struct OpenRPCLicense {
pub mut:
name string @[json: 'name'] // License name
}
// OpenRPCMethod represents an RPC method
pub struct OpenRPCMethod {
pub mut:
name string @[json: 'name'] // Method name
description string @[json: 'description'] // Method description
// Note: params and result are dynamic and would need more complex handling
}
// OpenRPCServer represents server information
pub struct OpenRPCServer {
pub mut:
name string @[json: 'name'] // Server name
url string @[json: 'url'] // Server URL
}
// ServiceStatus represents detailed status information for a service
pub struct ServiceStatus {
pub mut:
name string @[json: 'name'] // Service name
pid u32 @[json: 'pid'] // Process ID of the running service (if running)
state string @[json: 'state'] // Current state of the service (Running, Success, Error, etc.)
target string @[json: 'target'] // Target state of the service (Up, Down)
after map[string]string @[json: 'after'] // Dependencies of the service and their states
}
// ServiceConfig represents the configuration for a zinit service
pub struct ServiceConfig {
pub mut:
exec string @[json: 'exec'] // Command to run
test string @[json: 'test'] // Test command (optional)
oneshot bool @[json: 'oneshot'] // Whether the service should be restarted (maps to one_shot in Zinit)
after []string @[json: 'after'] // Services that must be running before this one starts
log string @[json: 'log'] // How to handle service output (null, ring, stdout)
env map[string]string @[json: 'env'] // Environment variables for the service
dir string @[json: 'dir'] // Working directory for the service
shutdown_timeout u64 @[json: 'shutdown_timeout'] // Maximum time to wait for service to stop during shutdown
}
// ServiceStats represents memory and CPU usage statistics for a service
pub struct ServiceStats {
pub mut:
name string @[json: 'name'] // Service name
pid u32 @[json: 'pid'] // Process ID of the service
memory_usage u64 @[json: 'memory_usage'] // Memory usage in bytes
cpu_usage f32 @[json: 'cpu_usage'] // CPU usage as a percentage (0-100)
children []ChildStats @[json: 'children'] // Stats for child processes
}
// ChildStats represents statistics for a child process
pub struct ChildStats {
pub mut:
pid u32 @[json: 'pid'] // Process ID of the child process
memory_usage u64 @[json: 'memory_usage'] // Memory usage in bytes
cpu_usage f32 @[json: 'cpu_usage'] // CPU usage as a percentage (0-100)
}
// ServiceCreateParams represents parameters for service_create method
pub struct ServiceCreateParams {
pub mut:
name string @[json: 'name'] // Name of the service to create
content ServiceConfig @[json: 'content'] // Configuration for the service
}
// ServiceKillParams represents parameters for service_kill method
pub struct ServiceKillParams {
pub mut:
name string @[json: 'name'] // Name of the service to kill
signal string @[json: 'signal'] // Signal to send (e.g., SIGTERM, SIGKILL)
}
// LogParams represents parameters for log streaming methods
@[params]
pub struct LogParams {
pub mut:
name string // Optional service name filter
}
/////////////NORMALLY NO NEED TO TOUCH
pub fn heroscript_dumps(obj ZinitRPC) !string {
return encoderhero.encode[ZinitRPC](obj)!
}
pub fn heroscript_loads(heroscript string) !ZinitRPC {
mut obj := encoderhero.decode[ZinitRPC](heroscript)!
return obj
}
// Factory function to create a new ZinitRPC client instance
@[params]
pub struct NewClientArgs {
pub mut:
name string = 'default'
socket_path string = default_socket_path
}
pub fn new_client(args NewClientArgs) !&ZinitRPC {
mut client := ZinitRPC{
name: args.name
socket_path: args.socket_path
}
client = obj_init(client)!
return &client
}

View File

@@ -2,25 +2,16 @@ module base
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.dbfs
import freeflowuniverse.herolib.crypt.aes_symmetric
import freeflowuniverse.herolib.ui
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.rootpath
import json
import os
import crypto.md5
@[heap]
pub struct Context {
mut:
// priv_key_ ?&secp256k1.Secp256k1 @[skip; str: skip]
params_ ?&paramsparser.Params
dbcollection_ ?&dbfs.DBCollection @[skip; str: skip]
redis_ ?&redisclient.Redis @[skip; str: skip]
path_ ?pathlib.Path
params_ ?&paramsparser.Params
redis_ ?&redisclient.Redis @[skip; str: skip]
path_ ?pathlib.Path
pub mut:
// snippets map[string]string
config ContextConfig
@@ -34,10 +25,10 @@ pub mut:
params string
coderoot string
interactive bool
secret string // is hashed secret
priv_key string // encrypted version
db_path string // path to dbcollection
encrypt bool
// secret string // is hashed secret
// priv_key string // encrypted version
// db_path string // path to dbcollection
// encrypt bool
}
// return the gistructure as is being used in context
@@ -80,8 +71,6 @@ pub fn (mut self Context) redis() !&redisclient.Redis {
pub fn (mut self Context) save() ! {
jsonargs := json.encode_pretty(self.config)
mut r := self.redis()!
// console.print_debug("save")
// console.print_debug(jsonargs)
r.set('context:config', jsonargs)!
}
@@ -89,8 +78,6 @@ pub fn (mut self Context) save() ! {
pub fn (mut self Context) load() ! {
mut r := self.redis()!
d := r.get('context:config')!
// console.print_debug("load")
// console.print_debug(d)
if d.len > 0 {
self.config = json.decode(ContextConfig, d)!
}
@@ -101,96 +88,42 @@ fn (mut self Context) cfg_redis_exists() !bool {
return r.exists('context:config')!
}
// return db collection
pub fn (mut self Context) dbcollection() !&dbfs.DBCollection {
mut dbc2 := self.dbcollection_ or {
if self.config.db_path.len == 0 {
self.config.db_path = '${os.home_dir()}/hero/db/${self.config.id}'
}
mut dbc := dbfs.get(
contextid: self.config.id
dbpath: self.config.db_path
secret: self.config.secret
)!
self.dbcollection_ = &dbc
&dbc
}
// pub fn (mut self Context) secret_encrypt(txt string) !string {
// return aes_symmetric.encrypt_str(txt, self.secret_get()!)
// }
return dbc2
}
// pub fn (mut self Context) secret_decrypt(txt string) !string {
// return aes_symmetric.decrypt_str(txt, self.secret_get()!)
// }
pub fn (mut self Context) db_get(dbname string) !dbfs.DB {
mut dbc := self.dbcollection()!
return dbc.db_get_create(name: dbname, withkeys: true)!
}
// pub fn (mut self Context) secret_get() !string {
// mut secret := self.config.secret
// if secret == '' {
// self.secret_configure()!
// secret = self.config.secret
// self.save()!
// }
// if secret == '' {
// return error("can't get secret")
// }
// return secret
// }
// always return the config db which is the same for all apps in context
pub fn (mut self Context) db_config_get() !dbfs.DB {
mut dbc := self.dbcollection()!
return dbc.db_get_create(name: 'config', withkeys: true)!
}
// // show a UI in console to configure the secret
// pub fn (mut self Context) secret_configure() ! {
// mut myui := ui.new()!
// console.clear()
// secret_ := myui.ask_question(question: 'Please enter your hero secret string:')!
// self.secret_set(secret_)!
// }
pub fn (mut self Context) hero_config_set(cat string, name string, content_ string) ! {
mut content := texttools.dedent(content_)
content = rootpath.shell_expansion(content)
path := '${self.path()!.path}/${cat}__${name}.yaml'
mut config_file := pathlib.get_file(path: path)!
config_file.write(content)!
}
pub fn (mut self Context) hero_config_delete(cat string, name string) ! {
path := '${self.path()!.path}/${cat}__${name}.yaml'
mut config_file := pathlib.get_file(path: path)!
config_file.delete()!
}
pub fn (mut self Context) hero_config_exists(cat string, name string) bool {
path := '${os.home_dir()}/hero/context/${self.config.name}/${cat}__${name}.yaml'
return os.exists(path)
}
pub fn (mut self Context) hero_config_get(cat string, name string) !string {
path := '${self.path()!.path}/${cat}__${name}.yaml'
mut config_file := pathlib.get_file(path: path, create: false)!
return config_file.read()!
}
pub fn (mut self Context) secret_encrypt(txt string) !string {
return aes_symmetric.encrypt_str(txt, self.secret_get()!)
}
pub fn (mut self Context) secret_decrypt(txt string) !string {
return aes_symmetric.decrypt_str(txt, self.secret_get()!)
}
pub fn (mut self Context) secret_get() !string {
mut secret := self.config.secret
if secret == '' {
self.secret_configure()!
secret = self.config.secret
self.save()!
}
if secret == '' {
return error("can't get secret")
}
return secret
}
// show a UI in console to configure the secret
pub fn (mut self Context) secret_configure() ! {
mut myui := ui.new()!
console.clear()
secret_ := myui.ask_question(question: 'Please enter your hero secret string:')!
self.secret_set(secret_)!
}
// unhashed secret
pub fn (mut self Context) secret_set(secret_ string) ! {
secret := secret_.trim_space()
secret2 := md5.hexhash(secret)
self.config.secret = secret2
self.save()!
}
// // unhashed secret
// pub fn (mut self Context) secret_set(secret_ string) ! {
// secret := secret_.trim_space()
// secret2 := md5.hexhash(secret)
// self.config.secret = secret2
// self.save()!
// }
pub fn (mut self Context) path() !pathlib.Path {
return self.path_ or {

View File

@@ -36,30 +36,12 @@ pub fn context_new(args_ ContextConfigArgs) !&Context {
params: args_.params
coderoot: args_.coderoot
interactive: args_.interactive
secret: args_.secret
encrypt: args_.encrypt
}
if args.encrypt && args.secret == '' && args.interactive {
mut myui := ui.new()!
console.clear()
args.secret = myui.ask_question(question: 'Please enter your hero secret string:')!
}
if args.encrypt && args.secret.len > 0 {
args.secret = md5.hexhash(args.secret)
}
mut c := Context{
config: args
}
// if args_.priv_key_hex.len > 0 {
// c.privkey_set(args_.priv_key_hex)!
// }
// c.save()!
if args.params.len > 0 {
mut p := paramsparser.new('')!
c.params_ = &p

View File

@@ -3,7 +3,6 @@ module base
import freeflowuniverse.herolib.data.ourtime
// import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.data.dbfs
import freeflowuniverse.herolib.core.logger
import json
import freeflowuniverse.herolib.core.pathlib
@@ -32,16 +31,6 @@ pub mut:
// return 'hero:sessions:${self.guid()}'
// }
// get db of the session, is unique per session
pub fn (mut self Session) db_get() !dbfs.DB {
return self.context.db_get('session_${self.name}')!
}
// get the db of the config, is unique per context
pub fn (mut self Session) db_config_get() !dbfs.DB {
return self.context.db_get('config')!
}
// load the params from redis
pub fn (mut self Session) load() ! {
mut r := self.context.redis()!

View File

@@ -7,7 +7,7 @@ import freeflowuniverse.herolib.core.pathlib
@if args.startupmanager
import freeflowuniverse.herolib.osal.systemd
import freeflowuniverse.herolib.osal.zinit
import freeflowuniverse.herolib.osal.startupmanager
@end
import freeflowuniverse.herolib.installers.ulist
@@ -20,11 +20,11 @@ import freeflowuniverse.herolib.installers.lang.python
import os
@if args.startupmanager
fn startupcmd () ![]zinit.ZProcessNewArgs{
fn startupcmd () ![]startupmanager.ZProcessNewArgs{
mut installer := get()!
mut res := []zinit.ZProcessNewArgs{}
mut res := []startupmanager.ZProcessNewArgs{}
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res << zinit.ZProcessNewArgs{
// res << startupmanager.ZProcessNewArgs{
// name: '${args.name}'
// cmd: '${args.name} server'
// env: {

View File

@@ -5,10 +5,10 @@ import freeflowuniverse.herolib.core.base
@end
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import json
@if args.cat == .installer
import freeflowuniverse.herolib.osal.startupmanager
import freeflowuniverse.herolib.osal.zinit
@if args.startupmanager
import time
@end
@@ -21,93 +21,147 @@ __global (
/////////FACTORY
^^[params]
pub struct ArgsGet{
@if args.hasconfig
@@[params]
pub struct ArgsGet {
pub mut:
name string
name string = "default"
fromdb bool //will load from filesystem
create bool //default will not create if not exist
}
@else
@@[params]
pub struct ArgsGet {
pub mut:
name string = "default"
}
@end
@if args.hasconfig
fn args_get (args_ ArgsGet) ArgsGet {
mut args:=args_
if args.name == ""{
args.name = "default"
}
return args
pub fn new(args ArgsGet) !&${args.classname} {
mut obj := ${args.classname}{
name: args.name
}
set(obj)!
return get(name:args.name)!
}
pub fn get(args_ ArgsGet) !&${args.classname} {
mut context:=base.context()!
mut args := args_get(args_)
mut obj := ${args.classname}{name:args.name}
if !(args.name in ${args.name}_global) {
if ! exists(args)!{
set(obj)!
}else{
heroscript := context.hero_config_get("${args.name}",args.name)!
mut obj_:=heroscript_loads(heroscript)!
set_in_mem(obj_)!
}
pub fn get(args ArgsGet) !&${args.classname} {
mut context := base.context()!
${args.name}_default = args.name
if args.fromdb || args.name !in ${args.name}_global {
mut r := context.redis()!
if r.hexists('context:${args.name}', args.name)! {
data := r.hget('context:${args.name}', args.name)!
if data.len == 0 {
return error('${args.classname} with name: ${args.name} does not exist, prob bug.')
}
mut obj := json.decode(${args.classname},data)!
set_in_mem(obj)!
}else{
if args.create {
new(args)!
}else{
return error("${args.classname} with name '${args.name}' does not exist")
}
}
return get(name: args.name)! //no longer from db nor create
}
return ${args.name}_global[args.name] or {
println(${args.name}_global)
//bug if we get here because should be in globals
panic("could not get config for ${args.name} with name, is bug:??{args.name}")
}
return error('could not get config for ${args.name} with name:${args.name}')
}
}
//register the config for the future
pub fn set(o ${args.classname})! {
set_in_mem(o)!
mut context := base.context()!
heroscript := heroscript_dumps(o)!
context.hero_config_set("${args.name}", o.name, heroscript)!
// register the config for the future
pub fn set(o ${args.classname}) ! {
mut o2:=set_in_mem(o)!
${args.name}_default = o2.name
mut context := base.context()!
mut r := context.redis()!
r.hset('context:${args.name}', o2.name, json.encode(o2))!
}
//does the config exists?
pub fn exists(args_ ArgsGet)! bool {
mut context := base.context()!
mut args := args_get(args_)
return context.hero_config_exists("${args.name}", args.name)
// does the config exists?
pub fn exists(args ArgsGet) !bool {
mut context := base.context()!
mut r := context.redis()!
return r.hexists('context:${args.name}', args.name)!
}
pub fn delete(args_ ArgsGet)! {
mut args := args_get(args_)
mut context:=base.context()!
context.hero_config_delete("${args.name}",args.name)!
if args.name in ${args.name}_global {
//del ${args.name}_global[args.name]
}
pub fn delete(args ArgsGet) ! {
mut context := base.context()!
mut r := context.redis()!
r.hdel('context:${args.name}', args.name)!
}
//only sets in mem, does not set as config
fn set_in_mem(o ${args.classname})! {
mut o2:=obj_init(o)!
${args.name}_global[o.name] = &o2
${args.name}_default = o.name
@@[params]
pub struct ArgsList {
pub mut:
fromdb bool //will load from filesystem
}
// if fromdb set: load from filesystem, and not from mem, will also reset what is in mem
pub fn list(args ArgsList) ![]&${args.classname} {
mut res := []&${args.classname}{}
mut context := base.context()!
if args.fromdb {
// reset what is in mem
${args.name}_global = map[string]&${args.classname}{}
${args.name}_default = ''
}
if args.fromdb {
mut r := context.redis()!
mut l := r.hkeys('context:${args.name}')!
for name in l{
res << get(name:name,fromdb:true)!
}
return res
} else {
// load from memory
for _, client in ${args.name}_global {
res << client
}
}
return res
}
// only sets in mem, does not set as config
fn set_in_mem(o ${args.classname}) ! ${args.classname} {
mut o2 := obj_init(o)!
${args.name}_global[o2.name] = &o2
${args.name}_default = o2.name
return o2
}
@else
pub fn get(args_ ArgsGet) !&${args.classname} {
return &${args.classname}{}
pub fn new(args ArgsGet) !&${args.classname} {
return &${args.classname}{}
}
pub fn get(args ArgsGet) !&${args.classname} {
return new(args)!
}
@end
pub fn play(mut plbook PlayBook) ! {
@if args.hasconfig
mut install_actions := plbook.find(filter: '${args.name}.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
heroscript:=install_action.heroscript()
mut obj2:=heroscript_loads(heroscript)!
set(obj2)!
}
if ! plbook.exists(filter: '${args.name}.'){
return
}
@end
mut install_actions := plbook.find(filter: '${args.name}.configure')!
if install_actions.len > 0 {
@if args.hasconfig
for install_action in install_actions {
heroscript := install_action.heroscript()
mut obj2 := heroscript_loads(heroscript)!
set(obj2)!
}
@else
return error("can't configure ${args.name}, because no configuration allowed for this installer.")
@end
}
@if args.cat == .installer
mut other_actions := plbook.find(filter: '${args.name}.')!
for other_action in other_actions {
@@ -145,48 +199,58 @@ pub fn play(mut plbook PlayBook) ! {
}
@end
}
@end
@end
}
@if args.cat == .installer
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
@if args.startupmanager
fn startupmanager_get(cat startupmanager.StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit
// tmux
// systemd
// systemd
match cat{
.screen {
console.print_debug("startupmanager: screen")
return startupmanager.get(.screen)!
}
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
return startupmanager.get(.zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
return startupmanager.get(.systemd)!
}else{
console.print_debug("startupmanager: auto")
return startupmanager.get()!
return startupmanager.get(.auto)!
}
}
}
@end
@if args.hasconfig
//load from disk and make sure is properly intialized
pub fn (mut self ${args.classname}) reload() ! {
@if ! args.singleton
switch(self.name)
@end
self=obj_init(self)!
}
@end
@if args.startupmanager
pub fn (mut self ${args.classname}) start() ! {
@if ! args.singleton
switch(self.name)
@end
if self.running()!{
return
}
@@ -250,10 +314,12 @@ pub fn (mut self ${args.classname}) running() !bool {
//walk over the generic processes, if not running return
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
r:=sm.running(zprocess.name)!
if r==false{
return false
if zprocess.startuptype != .screen{
mut sm:=startupmanager_get(zprocess.startuptype)!
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
}
return running()!
@@ -291,15 +357,9 @@ pub fn (mut self ${args.classname}) destroy() ! {
@end
//switch instance to be used for ${args.name}
// switch instance to be used for ${args.name}
pub fn switch(name string) {
@if ! args.singleton
${args.name}_default = name
@end
}
//helpers
^^[params]
pub struct DefaultConfigArgs{
instance string = 'default'
}

View File

@@ -69,10 +69,6 @@ fn configure() ! {
/////////////NORMALLY NO NEED TO TOUCH
pub fn heroscript_dumps(obj ${args.classname}) !string {
return encoderhero.encode[${args.classname} ](obj)!
}
pub fn heroscript_loads(heroscript string) !${args.classname} {
mut obj := encoderhero.decode[${args.classname}](heroscript)!
return obj

View File

@@ -1,69 +0,0 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import os
import freeflowuniverse.herolib.core.pathlib
// will ask questions & create the .heroscript
pub fn ask(path string) ! {
mut myconsole := console.new()
mut model := gen_model_get(path, false)!
console.clear()
console.print_header('Configure generation of code for a module on path:')
console.print_green('Path: ${path}')
console.lf()
model.classname = myconsole.ask_question(
description: 'Class name of the ${model.cat}'
question: 'What is the class name of the generator e.g. MyClass ?'
warning: 'Please provide a valid class name for the generator'
default: model.classname
minlen: 4
)!
model.title = myconsole.ask_question(
description: 'Title of the ${model.cat} (optional)'
default: model.title
)!
model.hasconfig = !myconsole.ask_yesno(
description: 'Is there a config (normally yes)?'
default: model.hasconfig
)!
if model.hasconfig {
model.singleton = !myconsole.ask_yesno(
description: 'Can there be multiple instances (normally yes)?'
default: !model.singleton
)!
if model.cat == .installer {
model.templates = myconsole.ask_yesno(
description: 'Will there be templates available for your installer?'
default: model.templates
)!
}
} else {
model.singleton = true
}
if model.cat == .installer {
model.startupmanager = myconsole.ask_yesno(
description: 'Is this an installer which will be managed by a startup mananger?'
default: model.startupmanager
)!
model.build = myconsole.ask_yesno(
description: 'Are there builders for the installers (compilation)'
default: model.build
)!
}
// if true{
// println(model)
// panic("Sdsd")
// }
gen_model_set(GenerateArgs{ model: model, path: path })!
}

View File

@@ -1,85 +0,0 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import os
@[params]
pub struct GenerateArgs {
pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
model ?GenModel
cat ?Cat
}
pub struct PlayArgs {
pub mut:
name string
modulepath string
}
// the default to start with
//
// reset bool // regenerate all, dangerous !!!
// interactive bool //if we want to ask
// path string
// model ?GenModel
// cat ?Cat
//
// will return the module path where we need to execute a play command as well as the name of
pub fn do(args_ GenerateArgs) !PlayArgs {
mut args := args_
console.print_header('Generate code for path: ${args.path} (reset:${args.reset}, interactive:${args.interactive})')
mut create := true // to create .heroscript
mut model := args.model or {
create = false // we cannot create because model not given
if args.path == '' {
args.path = os.getwd()
}
mut m := gen_model_get(args.path, false)!
m
}
if model.classname == '' {
args.interactive = true
}
if create {
if args.path == '' {
return error('need to specify path fo ${args_} because we asked to create .heroscript ')
}
gen_model_set(args)! // persist it on disk
} else {
if args.path == '' {
args.path = os.getwd()
}
}
// if model.cat == .unknown {
// model.cat = args.cat or { return error('cat needs to be specified for generator.') }
// }
if args.interactive {
ask(args.path)!
args.model = gen_model_get(args.path, false)!
} else {
args.model = model
}
console.print_debug(args)
// only generate if playonly is false and there is a classname
if !args.playonly && model.classname.len > 0 {
generate(args)!
}
return PlayArgs{
name: model.play_name
modulepath: model.module_path
}
}

View File

@@ -1,77 +0,0 @@
module installer_client
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.pathlib
// generate based on filled in args, ask has to be done before
fn generate(args GenerateArgs) ! {
console.print_debug('generate code for path: ${args.path}')
// as used in the templates
model := args.model or { panic('bug no model specified in generate') }
mut path_actions := pathlib.get(args.path + '/${model.name}_actions.v')
if args.reset {
path_actions.delete()!
}
if !path_actions.exists() && model.cat == .installer {
console.print_debug('write installer actions')
mut templ_1 := $tmpl('templates/objname_actions.vtemplate')
pathlib.template_write(templ_1, '${args.path}/${model.name}_actions.v', true)!
}
mut templ_2 := $tmpl('templates/objname_factory_.vtemplate')
pathlib.template_write(templ_2, '${args.path}/${model.name}_factory_.v', true)!
mut path_model := pathlib.get(args.path + '/${model.name}_model.v')
if args.reset || !path_model.exists() {
console.print_debug('write model.')
mut templ_3 := $tmpl('templates/objname_model.vtemplate')
pathlib.template_write(templ_3, '${args.path}/${model.name}_model.v', true)!
}
// TODO: check case sensistivity for delete
mut path_readme := pathlib.get(args.path + '/readme.md')
if args.reset || !path_readme.exists() {
mut templ_readme := $tmpl('templates/readme.md')
pathlib.template_write(templ_readme, '${args.path}/readme.md', true)!
}
mut path_templ_dir := pathlib.get_dir(path: args.path + '/templates', create: false)!
if args.reset {
path_templ_dir.delete()!
}
if (args.model or { panic('bug') }).templates {
if !path_templ_dir.exists() {
mut templ_6 := $tmpl('templates/atemplate.yaml')
pathlib.template_write(templ_6, '${args.path}/templates/atemplate.yaml', true)!
}
}
}
// fn platform_check(args GenModel) ! {
// ok := 'osx,ubuntu,arch'
// ok2 := ok.split(',')
// for i in args.supported_platforms {
// if i !in ok2 {
// return error('cannot find ${i} in choices for supported_platforms. Valid ones are ${ok}')
// }
// }
// }
// pub fn (args GenModel) platform_check_str() string {
// mut out := ''
// if 'osx' in args.supported_platforms {
// out += 'myplatform == .osx || '
// }
// if 'ubuntu' in args.supported_platforms {
// out += 'myplatform == .ubuntu ||'
// }
// if 'arch' in args.supported_platforms {
// out += 'myplatform == .arch ||'
// }
// out = out.trim_right('|')
// return out
// }

View File

@@ -1,136 +0,0 @@
module installer_client
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
pub struct GenModel {
pub mut:
name string
classname string
default bool = true // means user can just get the object and a default will be created
title string
// supported_platforms []string // only relevant for installers for now
singleton bool // means there can only be one
templates bool // means we will use templates in the installer, client doesn't do this'
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
startupmanager bool = true
build bool = true
hasconfig bool = true
cat Cat // dont' set default
play_name string // e.g. docusaurus is what we look for
module_path string // e.g.freeflowuniverse.herolib.web.docusaurus
}
pub enum Cat {
unknown
client
installer
}
// creates the heroscript from the GenModel as part of GenerateArgs
pub fn gen_model_set(args GenerateArgs) ! {
console.print_debug('Code generator set: ${args}')
model := args.model or { return error('model is none') }
heroscript_templ := match model.cat {
.client { $tmpl('templates/heroscript_client') }
.installer { $tmpl('templates/heroscript_installer') }
else { return error('Invalid category: ${model.cat}') }
}
pathlib.template_write(heroscript_templ, '${args.path}/.heroscript', true)!
}
// loads the heroscript and return the model
pub fn gen_model_get(path string, create bool) !GenModel {
console.print_debug('play installer code for path: ${path}')
mut config_path := pathlib.get_file(path: '${path}/.heroscript', create: create)!
mut plbook := playbook.new(text: config_path.read()!)!
mut model := GenModel{}
mut found := false
mut install_actions := plbook.find(filter: 'hero_code.generate_installer')!
if install_actions.len > 0 {
for install_action in install_actions {
if found {
return error('cannot find more than one her_code.generate_installer ... in ${path}')
}
found = true
mut p := install_action.params
model = GenModel{
name: p.get_default('name', '')!
classname: p.get_default('classname', '')!
title: p.get_default('title', '')!
default: p.get_default_true('default')
// supported_platforms: p.get_list('supported_platforms')!
singleton: p.get_default_false('singleton')
templates: p.get_default_false('templates')
startupmanager: p.get_default_true('startupmanager')
build: p.get_default_true('build')
hasconfig: p.get_default_true('hasconfig')
cat: .installer
}
}
}
mut client_actions := plbook.find(filter: 'hero_code.generate_client')!
if client_actions.len > 0 {
for client_action in client_actions {
if found {
return error('cannot find more than one her_code.generate_client ... in ${path}')
}
found = true
mut p := client_action.params
model = GenModel{
name: p.get_default('name', '')!
classname: p.get_default('classname', '')!
title: p.get_default('title', '')!
default: p.get_default_true('default')
singleton: p.get_default_false('singleton')
hasconfig: p.get_default_true('hasconfig')
cat: .client
}
}
}
if model.cat == .unknown {
if path.contains('clients') {
model.cat = .client
} else {
model.cat = .installer
}
}
if model.name == '' {
model.name = os.base(path).to_lower()
}
model.play_name = model.name
pathsub := path.replace('${os.home_dir()}/code/github/', '')
model.module_path = pathsub.replace('/', '.').replace('.lib.', '.')
// !!hero_code.play
// name:'docusaurus'
mut play_actions := plbook.find(filter: 'hero_code.play')!
if play_actions.len > 1 {
return error('should have max 1 hero_code.play action in ${config_path.path}')
}
if play_actions.len == 1 {
mut p := play_actions[0].params
model.play_name = p.get_default('name', model.name)!
}
if model.module_path.contains('docusaurus') {
println(model)
println('4567ujhjk')
exit(0)
}
return model
}

View File

@@ -1,71 +0,0 @@
# generation framework for clients & installers
```bash
#generate all play commands
hero generate -playonly
#will ask questions if .heroscript is not there yet
hero generate -p thepath_is_optional
# to generate without questions
hero generate -p thepath_is_optional -t client
#if installer, default is a client
hero generate -p thepath_is_optional -t installer
#when you want to scan over multiple directories
hero generate -p thepath_is_optional -t installer -s
```
there will be a ```.heroscript``` in the director you want to generate for, the format is as follows:
```hero
//for a server
!!hero_code.generate_installer
name:'daguserver'
classname:'DaguServer'
singleton:1 //there can only be 1 object in the globals, is called 'default'
templates:1 //are there templates for the installer
title:''
startupmanager:1 //managed by a startup manager, default true
build:1 //will we also build the component
//or for a client
!!hero_code.generate_client
name:'mail'
classname:'MailClient'
singleton:0 //default is 0
```
needs to be put as .heroscript in the directories which we want to generate
## templates remarks
in templates:
- ^^ or @@ > gets replaced to @
- ?? > gets replaced to $
this is to make distinction between processing at compile time (pre-compile) or at runtime.
## call by code
to call in code
```v
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.code.generator.generic
generic.scan(path:"~/code/github/freeflowuniverse/herolib/herolib/installers",force:true)!
```
to run from bash
```bash
~/code/github/freeflowuniverse/herolib/scripts/fix_installers.vsh
```

View File

@@ -1,49 +0,0 @@
module installer_client
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
@[params]
pub struct ScannerArgs {
pub mut:
reset bool // regenerate all, dangerous !!!
interactive bool // if we want to ask
path string
playonly bool
}
// scan over a set of directories call the play where
pub fn scan(args ScannerArgs) ! {
console.print_debug('Code generator scan: ${args.path}')
if args.path == '' {
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/installers')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/clients')!
scan(path: '${os.home_dir()}/code/github/freeflowuniverse/herolib/lib/web')!
return
}
console.print_header('Scan for generation of code for ${args.path}')
// now walk over all directories, find .heroscript
mut pathroot := pathlib.get_dir(path: args.path, create: false)!
mut plist := pathroot.list(
recursive: true
ignoredefault: false
regex: ['.heroscript']
)!
for mut p in plist.paths {
pparent := p.parent()!
path_module := pparent.path
if os.exists('${path_module}/.heroscript') {
do(
interactive: args.interactive
path: path_module
reset: args.reset
playonly: args.playonly
)!
}
}
}

View File

@@ -1,5 +0,0 @@
name: ??{model.name}

View File

@@ -1,7 +0,0 @@
!!hero_code.generate_client
name: "${model.name}"
classname: "${model.classname}"
hasconfig: ${model.hasconfig}
singleton: ${model.singleton}
default: ${model.default}
title: "${model.title}"

View File

@@ -1,11 +0,0 @@
!!hero_code.generate_installer
name: "${model.name}"
classname: "${model.classname}"
hasconfig: ${model.hasconfig}
singleton: ${model.singleton}
default: ${model.default}
title: "${model.title}"
templates: ${model.templates}
build: ${model.build}
startupmanager: ${model.startupmanager}

View File

@@ -1,219 +0,0 @@
module ${model.name}
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.core
import freeflowuniverse.herolib.installers.ulist
import freeflowuniverse.herolib.installers.base
@if model.startupmanager
import freeflowuniverse.herolib.osal.systemd
import freeflowuniverse.herolib.osal.zinit
@end
@if model.build
import freeflowuniverse.herolib.installers.lang.golang
import freeflowuniverse.herolib.installers.lang.rust
import freeflowuniverse.herolib.installers.lang.python
@end
import os
@if model.startupmanager
fn startupcmd () ![]zinit.ZProcessNewArgs{
mut installer := get()!
mut res := []zinit.ZProcessNewArgs{}
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res << zinit.ZProcessNewArgs{
// name: '${model.name}'
// cmd: '${model.name} server'
// env: {
// 'HOME': '/root'
// }
// }
return res
}
fn running_() !bool {
mut installer := get()!
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// this checks health of ${model.name}
// curl http://localhost:3333/api/v1/s --oauth2-bearer 1234 works
// url:='http://127.0.0.1:??{cfg.port}/api/v1'
// mut conn := httpconnection.new(name: '${model.name}', url: url)!
// if cfg.secret.len > 0 {
// conn.default_header.add(.authorization, 'Bearer ??{cfg.secret}')
// }
// conn.default_header.add(.content_type, 'application/json')
// console.print_debug("curl -X 'GET' '??{url}'/tags --oauth2-bearer ??{cfg.secret}")
// r := conn.get_json_dict(prefix: 'tags', debug: false) or {return false}
// println(r)
// if true{panic("ssss")}
// tags := r['Tags'] or { return false }
// console.print_debug(tags)
// console.print_debug('${model.name} is answering.')
return false
}
fn start_pre()!{
}
fn start_post()!{
}
fn stop_pre()!{
}
fn stop_post()!{
}
@end
//////////////////// following actions are not specific to instance of the object
@if model.cat == .installer
// checks if a certain version or above is installed
fn installed_() !bool {
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// res := os.execute('??{osal.profile_path_source_and()!} ${model.name} version')
// if res.exit_code != 0 {
// return false
// }
// r := res.output.split_into_lines().filter(it.trim_space().len > 0)
// if r.len != 1 {
// return error("couldn't parse ${model.name} version.\n??{res.output}")
// }
// if texttools.version(version) == texttools.version(r[0]) {
// return true
// }
return false
}
//get the Upload List of the files
fn ulist_get() !ulist.UList {
//optionally build a UList which is all paths which are result of building, is then used e.g. in upload
return ulist.UList{}
}
//uploads to S3 server if configured
fn upload_() ! {
// installers.upload(
// cmdname: '${model.name}'
// source: '??{gitpath}/target/x86_64-unknown-linux-musl/release/${model.name}'
// )!
}
fn install_() ! {
console.print_header('install ${model.name}')
//THIS IS EXAMPLE CODEAND NEEDS TO BE CHANGED
// mut url := ''
// if core.is_linux_arm()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_linux_arm64.tar.gz'
// } else if core.is_linux_intel()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_linux_amd64.tar.gz'
// } else if core.is_osx_arm()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_darwin_arm64.tar.gz'
// } else if core.is_osx_intel()! {
// url = 'https://github.com/${model.name}-dev/${model.name}/releases/download/v??{version}/${model.name}_??{version}_darwin_amd64.tar.gz'
// } else {
// return error('unsported platform')
// }
// mut dest := osal.download(
// url: url
// minsize_kb: 9000
// expand_dir: '/tmp/${model.name}'
// )!
// //dest.moveup_single_subdir()!
// mut binpath := dest.file_get('${model.name}')!
// osal.cmd_add(
// cmdname: '${model.name}'
// source: binpath.path
// )!
}
@if model.build
fn build_() ! {
//url := 'https://github.com/threefoldtech/${model.name}'
// make sure we install base on the node
// if core.platform()!= .ubuntu {
// return error('only support ubuntu for now')
// }
//mut g:=golang.get()!
//g.install()!
//console.print_header('build coredns')
//mut gs := gittools.new(coderoot: '~/code')!
// console.print_header('build ${model.name}')
// gitpath := gittools.get_repo(url: url, reset: true, pull: true)!
// cmd := '
// cd ??{gitpath}
// source ~/.cargo/env
// exit 1 #todo
// '
// osal.execute_stdout(cmd)!
//
// //now copy to the default bin path
// mut binpath := dest.file_get('...')!
// adds it to path
// osal.cmd_add(
// cmdname: 'griddriver2'
// source: binpath.path
// )!
}
@end
fn destroy_() ! {
// mut systemdfactory := systemd.new()!
// systemdfactory.destroy("zinit")!
// osal.process_kill_recursive(name:'zinit')!
// osal.cmd_delete('zinit')!
// osal.package_remove('
// podman
// conmon
// buildah
// skopeo
// runc
// ')!
// //will remove all paths where go/bin is found
// osal.profile_path_add_remove(paths2delete:"go/bin")!
// osal.rm("
// podman
// conmon
// buildah
// skopeo
// runc
// /var/lib/containers
// /var/lib/podman
// /var/lib/buildah
// /tmp/podman
// /tmp/conmon
// ")!
}
@end

View File

@@ -1,338 +0,0 @@
module ${model.name}
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core
@if model.hasconfig
import freeflowuniverse.herolib.data.encoderhero
@end
@if model.cat == .installer
import freeflowuniverse.herolib.osal.startupmanager
import freeflowuniverse.herolib.osal.zinit
import time
@end
__global (
${model.name}_global map[string]&${model.classname}
${model.name}_default string
)
/////////FACTORY
@if model.singleton == false
^^[params]
pub struct ArgsGet{
pub mut:
name string
}
fn args_get (args_ ArgsGet) ArgsGet {
mut model:=args_
if model.name == ""{
model.name = ${model.name}_default
}
if model.name == ""{
model.name = "default"
}
return model
}
pub fn get(args_ ArgsGet) !&${model.classname} {
mut args := args_get(args_)
if !(args.name in ${model.name}_global) {
if args.name=="default"{
if ! exists(args)!{
if default{
mut context:=base.context() or { panic("bug") }
context.hero_config_set("${model.name}",args.name,heroscript_default()!)!
}
}
load(args)!
}
}
return ${model.name}_global[args.name] or {
println(${model.name}_global)
panic("could not get config for ??{args.name}.")
}
}
@end
@if model.hasconfig
//set the model in mem and the config on the filesystem
pub fn set(o ${model.classname})! {
mut o2:=obj_init(o)!
${model.name}_global[o.name] = &o2
${model.name}_default = o.name
}
//check we find the config on the filesystem
pub fn exists(args_ ArgsGet)!bool {
mut model := args_get(args_)
mut context:=base.context()!
return context.hero_config_exists("${model.name}",model.name)
}
//load the config error if it doesn't exist
pub fn load(args_ ArgsGet) ! {
mut model := args_get(args_)
mut context:=base.context()!
mut heroscript := context.hero_config_get("${model.name}",model.name)!
play(heroscript:heroscript)!
}
//save the config to the filesystem in the context
pub fn save(o ${model.classname})! {
mut context:=base.context()!
heroscript := encoderhero.encode[${model.classname}](o)!
context.hero_config_set("${model.name}",o.name,heroscript)!
}
pub fn play(mut plbook PlayBook) ! {
mut plbook := model.plbook or {
playbook.new(text: model.heroscript)!
}
@if model.hasconfig
mut configure_actions := plbook.find(filter: '${model.name}.configure')!
if configure_actions.len > 0 {
for config_action in configure_actions {
mut p := config_action.params
mycfg:=cfg_play(p)!
console.print_debug("install action ${model.name}.configure\n??{mycfg}")
set(mycfg)!
save(mycfg)!
}
}
@end
@if model.cat == .installer
mut other_actions := plbook.find(filter: '${model.name}.')!
for other_action in other_actions {
if other_action.name in ["destroy","install","build"]{
mut p := other_action.params
reset:=p.get_default_false("reset")
if other_action.name == "destroy" || reset{
console.print_debug("install action ${model.name}.destroy")
destroy_()!
}
if other_action.name == "install"{
console.print_debug("install action ${model.name}.install")
install_()!
}
}
@if model.startupmanager
if other_action.name in ["start","stop","restart"]{
mut p := other_action.params
name := p.get('name')!
mut ${model.name}_obj:=get(name:name)!
console.print_debug("action object:\n??{${model.name}_obj}")
if other_action.name == "start"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.start()!
}
if other_action.name == "stop"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.stop()!
}
if other_action.name == "restart"{
console.print_debug("install action ${model.name}.??{other_action.name}")
${model.name}_obj.restart()!
}
}
@end
}
@end
}
@end
@if model.cat == .installer
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////# LIVE CYCLE MANAGEMENT FOR INSTALLERS ///////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@if model.hasconfig
//load from disk and make sure is properly intialized
pub fn (mut self ${model.classname}) reload() ! {
switch(self.name)
self=obj_init(self)!
}
@end
@if model.startupmanager
fn startupmanager_get(cat zinit.StartupManagerType) !startupmanager.StartupManager {
// unknown
// screen
// zinit
// tmux
// systemd
match cat{
.zinit{
console.print_debug("startupmanager: zinit")
return startupmanager.get(cat:.zinit)!
}
.systemd{
console.print_debug("startupmanager: systemd")
return startupmanager.get(cat:.systemd)!
}else{
console.print_debug("startupmanager: auto")
return startupmanager.get()!
}
}
}
pub fn (mut self ${model.classname}) start() ! {
switch(self.name)
if self.running()!{
return
}
console.print_header('${model.name} start')
if ! installed_()!{
install_()!
}
configure()!
start_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
console.print_debug('starting ${model.name} with ??{zprocess.startuptype}...')
sm.new(zprocess)!
sm.start(zprocess.name)!
}
start_post()!
for _ in 0 .. 50 {
if self.running()! {
return
}
time.sleep(100 * time.millisecond)
}
return error('${model.name} did not install properly.')
}
pub fn (mut self ${model.classname}) install_start(model InstallArgs) ! {
switch(self.name)
self.install(model)!
self.start()!
}
pub fn (mut self ${model.classname}) stop() ! {
switch(self.name)
stop_pre()!
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
sm.stop(zprocess.name)!
}
stop_post()!
}
pub fn (mut self ${model.classname}) restart() ! {
switch(self.name)
self.stop()!
self.start()!
}
pub fn (mut self ${model.classname}) running() !bool {
switch(self.name)
//walk over the generic processes, if not running_ return
for zprocess in startupcmd()!{
mut sm:=startupmanager_get(zprocess.startuptype)!
r:=sm.running(zprocess.name)!
if r==false{
return false
}
}
return running_()!
}
@end
@@[params]
pub struct InstallArgs{
pub mut:
reset bool
}
@if model.singleton
pub fn install(args InstallArgs) ! {
if args.reset {
destroy()!
}
if ! (installed_()!){
install_()!
}
}
pub fn destroy() ! {
destroy_()!
}
@if model.build
pub fn build() ! {
build_()!
}
@end
@else
//switch instance to be used for ${model.name}
pub fn switch(name string) {
${model.name}_default = name
}
pub fn (mut self ${model.classname}) install(args InstallArgs) ! {
switch(self.name)
if args.reset {
destroy_()!
}
if ! (installed_()!){
install_()!
}
}
@if model.build
pub fn (mut self ${model.classname}) build() ! {
switch(self.name)
build_()!
}
@end
pub fn (mut self ${model.classname}) destroy() ! {
switch(self.name)
@if model.startupmanager
self.stop() or {}
@end
destroy_()!
}
@end
@end

View File

@@ -1,155 +0,0 @@
module ${model.name}
import freeflowuniverse.herolib.data.paramsparser
import os
pub const version = '0.0.0'
const singleton = ${model.singleton}
const default = ${model.default}
@if model.hasconfig
//TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
@if model.cat == .installer
heroscript:="
!!${model.name}.configure
name:'${model.name}'
homedir: '{HOME}/hero/var/${model.name}'
configpath: '{HOME}/.config/${model.name}/admin.yaml'
username: 'admin'
password: 'secretpassword'
secret: ''
title: 'My Hero DAG'
host: 'localhost'
port: 8888
"
@else
heroscript:="
!!${model.name}.configure
name:'${model.name}'
mail_from: 'info@@example.com'
mail_password: 'secretpassword'
mail_port: 587
mail_server: 'smtp-relay.brevo.com'
mail_username: 'kristof@@incubaid.com'
"
// mail_from := os.getenv_opt('MAIL_FROM') or {'info@@example.com'}
// mail_password := os.getenv_opt('MAIL_PASSWORD') or {'secretpassword'}
// mail_port := (os.getenv_opt('MAIL_PORT') or {"587"}).int()
// mail_server := os.getenv_opt('MAIL_SERVER') or {'smtp-relay.brevo.com'}
// mail_username := os.getenv_opt('MAIL_USERNAME') or {'kristof@@incubaid.com'}
//
// heroscript:="
// !!mailclient.configure name:'default'
// mail_from: '??{mail_from}'
// mail_password: '??{mail_password}'
// mail_port: ??{mail_port}
// mail_server: '??{mail_server}'
// mail_username: '??{mail_username}'
//
// "
//
@end
return heroscript
}
@end
//THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@if model.cat == .installer
^^[heap]
pub struct ${model.classname} {
pub mut:
name string = 'default'
@if model.hasconfig
homedir string
configpath string
username string
password string @@[secret]
secret string @@[secret]
title string
host string
port int
@end
}
@if model.hasconfig
fn cfg_play(p paramsparser.Params) !${model.classname} {
//THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := ${model.classname}{
name: p.get_default('name', 'default')!
homedir: p.get_default('homedir', '{HOME}/hero/var/${model.name}')!
configpath: p.get_default('configpath', '{HOME}/hero/var/${model.name}/admin.yaml')!
username: p.get_default('username', 'admin')!
password: p.get_default('password', '')!
secret: p.get_default('secret', '')!
title: p.get_default('title', 'HERO DAG')!
host: p.get_default('host', 'localhost')!
port: p.get_int_default('port', 8888)!
}
if mycfg.password == '' && mycfg.secret == '' {
return error('password or secret needs to be filled in for ${model.name}')
}
return mycfg
}
@end
@else
^^[heap]
pub struct ${model.classname} {
pub mut:
name string = 'default'
mail_from string
mail_password string @@[secret]
mail_port int
mail_server string
mail_username string
}
@if model.hasconfig
fn cfg_play(p paramsparser.Params) !${model.classname} {
//THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := ${model.classname}{
name: p.get_default('name', 'default')!
mail_from: p.get('mail_from')!
mail_password: p.get('mail_password')!
mail_port: p.get_int_default('mail_port', 8888)!
mail_server: p.get('mail_server')!
mail_username: p.get('mail_username')!
}
set(mycfg)!
return mycfg
}
@end
@end
fn obj_init(obj_ ${model.classname})!${model.classname}{
//never call get here, only thing we can do here is work on object itself
mut obj:=obj_
return obj
}
@if model.cat == .installer
//called before start if done
fn configure() ! {
@if model.cat == .installer
//mut installer := get()!
@else
//mut client := get()!
@end
@if model.templates
// mut mycode := ??tmpl('templates/atemplate.yaml')
// mut path := pathlib.get_file(path: cfg.configpath, create: true)!
// path.write(mycode)!
// console.print_debug(mycode)
@end
}
@end

View File

@@ -1,63 +0,0 @@
# ${model.name}
${model.title}
To get started
```vlang
@if model.cat == .installer
import freeflowuniverse.herolib.installers.something.${model.name} as ${model.name}_installer
heroscript:="
!!${model.name}.configure name:'test'
password: '1234'
port: 7701
!!${model.name}.start name:'test' reset:1
"
${model.name}_installer.play(heroscript=heroscript)!
//or we can call the default and do a start with reset
//mut installer:= ${model.name}_installer.get()!
//installer.start(reset:true)!
@else
import freeflowuniverse.herolib.clients. ${model.name}
mut client:= ${model.name}.get()!
client...
@end
```
## example heroscript
@if model.cat == .installer
```hero
!!${model.name}.configure
homedir: '/home/user/${model.name}'
username: 'admin'
password: 'secretpassword'
title: 'Some Title'
host: 'localhost'
port: 8888
```
@else
```hero
!!${model.name}.configure
secret: '...'
host: 'localhost'
port: 8888
```
@end

View File

@@ -143,8 +143,7 @@ fn cmd_docusaurus_execute(cmd Command) ! {
)!
// TODO: We need to load the sitename instead, or maybe remove it
mut dsite := docusaurus.dsite_get("")!
mut dsite := docusaurus.dsite_get('')!
if buildpublish {
// Build and publish production-ready artifacts

View File

@@ -8,7 +8,7 @@ import os
pub fn cmd_git(mut cmdroot Command) {
mut cmd_run := Command{
name: 'git'
description: 'Work with your repos, list, commit, pull, reload, ...'
description: 'Work with your repos, list, commit, pull, reload, ...\narg is url or path, or nothing if for all repos. \nCheck -f for filter. '
// required_args: 1
usage: 'sub commands of git are '
execute: cmd_git_execute
@@ -61,7 +61,7 @@ pub fn cmd_git(mut cmdroot Command) {
sort_flags: true
name: 'list'
execute: cmd_git_execute
description: 'list all repos.'
description: 'list all repos.\nThe Argument is url or path, otherwise use -f filter.'
}
mut sourcetree_command := Command{
@@ -96,7 +96,6 @@ pub fn cmd_git(mut cmdroot Command) {
mut allcmdsref := [&list_command, &clone_command, &push_command, &pull_command, &commit_command,
&reload_command, &delete_command, &sourcetree_command, &editor_command]
for mut c in allcmdsref {
c.add_flag(Flag{
flag: .bool
@@ -111,7 +110,15 @@ pub fn cmd_git(mut cmdroot Command) {
required: false
name: 'load'
abbrev: 'l'
description: 'reload the data in cache.'
description: 'reload the data in cache for selected repos.'
})
c.add_flag(Flag{
flag: .string
required: false
name: 'filter'
abbrev: 'f'
description: 'filter the repos by name or path.'
})
}
@@ -129,13 +136,6 @@ pub fn cmd_git(mut cmdroot Command) {
mut urlcmds := [&clone_command, &pull_command, &push_command, &editor_command, &sourcetree_command]
for mut c in urlcmds {
c.add_flag(Flag{
flag: .string
required: false
name: 'url'
abbrev: 'u'
description: 'url for clone operation.'
})
c.add_flag(Flag{
flag: .bool
required: false
@@ -163,16 +163,6 @@ pub fn cmd_git(mut cmdroot Command) {
})
}
for mut c in allcmdsref {
c.add_flag(Flag{
flag: .string
required: false
name: 'filter'
abbrev: 'f'
description: 'Filter is part of path of repo e.g. threefoldtech/info_'
})
}
for mut c_ in allcmdsref {
mut c := *c_
c.add_flag(Flag{
@@ -208,15 +198,21 @@ fn cmd_git_execute(cmd Command) ! {
coderoot = os.environ()['CODEROOT']
}
mut gs := gittools.get()!
if coderoot.len > 0 {
// is a hack for now
gs = gittools.new(coderoot: coderoot)!
}
mut gs := gittools.new(coderoot: coderoot)!
// create the filter for doing group actions, or action on 1 repo
mut filter := cmd.flags.get_string('filter') or { '' }
mut filter := ''
mut url := ''
mut path := ''
if cmd.args.len > 0 {
arg1 := cmd.args[0]
if arg1.starts_with('git') || arg1.starts_with('http') {
url = arg1
} else {
path = arg1
}
}
if cmd.name in gittools.gitcmds.split(',') {
mut pull := cmd.flags.get_bool('pull') or { false }
@@ -228,7 +224,7 @@ fn cmd_git_execute(cmd Command) ! {
}
mypath := gs.do(
filter: filter
filter: cmd.flags.get_string('filter') or { '' }
reload: reload
recursive: recursive
cmd: cmd.name
@@ -236,7 +232,8 @@ fn cmd_git_execute(cmd Command) ! {
pull: pull
reset: reset
msg: cmd.flags.get_string('message') or { '' }
url: cmd.flags.get_string('url') or { '' }
url: url
path: path
)!
if cmd.name == 'cd' {
print('cd ${mypath}\n')

View File

@@ -125,7 +125,7 @@ pub fn plbook_code_get(cmd Command) !string {
pull := cmd.flags.get_bool('gitpull') or { false }
// interactive := !cmd.flags.get_bool('script') or { false }
mut gs := gittools.get(coderoot: coderoot)!
mut gs := gittools.new(coderoot: coderoot)!
if url.len > 0 {
mut repo := gs.get_repo(
pull: pull

View File

@@ -48,8 +48,8 @@ play_docusaurus.play(mut plbook)! // <-- new line, optional
| Problem | What to do |
|---|---|
| **Wrong API name** the code uses **`gittools.get(gittools.GitStructureArgGet{})`** there is no `GitStructureArgGet` struct in the gittools package. The correct type is **`gittools.GitStructureArgs`** (or the default `gittools.GitStructure` argument). | Replace `GitStructureArgGet` with the correct type (`gittools.GitStructureArgs`). |
| **Missing import alias** the file uses `gittools.get` and `gittools.new` but the import is just `import freeflowuniverse.herolib.develop.gittools`. That is fine, but for clarity rename the import to **`gittools`** (it already is) and use the same alias everywhere. |
| **Wrong API name** the code uses **`gittools.new(gittools.GitStructureArgGet{})`** there is no `GitStructureArgGet` struct in the gittools package. The correct type is **`gittools.GitStructureArgs`** (or the default `gittools.GitStructure` argument). | Replace `GitStructureArgGet` with the correct type (`gittools.GitStructureArgs`). |
| **Missing import alias** the file uses `gittools.new` and `gittools.new` but the import is just `import freeflowuniverse.herolib.develop.gittools`. That is fine, but for clarity rename the import to **`gittools`** (it already is) and use the same alias everywhere. |
| **Potential nil `gs`** after a `git.clone` we do `gs = gittools.new(coderoot: coderoot)!`. This shadows the previous `gs` and loses the original configuration (e.g. `light`, `log`). The intent is to **reinitialise** the `GitStructure` **only** when a `coderoot` is explicitly given. Keep the current flow but **document** the intention. |
| **Unused variable `action_`** the variable `action_` is used only for iteration. No problem. |
| **Missing `gittools.GitCloneArgs`** check that the struct is actually named `GitCloneArgs` in the gittools package. If not, change to the proper name. | Verify and, if needed, replace with the correct struct name (`gittools.GitCloneArgs`). |
@@ -82,7 +82,7 @@ fn play_git(mut plbook PlayBook) ! {
// ... (same as before)
} else {
// Default GitStructure (no args)
gittools.get(gittools.GitStructureArgs{})!
gittools.new(gittools.GitStructureArgs{})!
}
// -----------------------------------------------------------

View File

@@ -56,7 +56,10 @@ pub fn get_dir(args_ GetArgs) !Path {
p2.absolute()
if p2.exist == .no {
if args.create {
os.mkdir_all(p2.absolute()) or { return error('cannot create path ${p2}, ${err}') } // Make sure that all the needed paths created
os.mkdir_all(p2.absolute()) or {
print_backtrace()
return error('cannot create path ${p2}, ${err}')
} // Make sure that all the needed paths created
p2.check()
}
return p2
@@ -97,7 +100,10 @@ pub fn get_file(args_ GetArgs) !Path {
mut parent_ := p2.parent()!
parent_.check()
if parent_.exist == .no {
os.mkdir_all(parent_.path) or { return error('cannot create path:${args.path}') }
os.mkdir_all(parent_.path) or {
print_backtrace()
return error('cannot create path:${args.path}')
}
}
if p2.exist == .no || args.empty {
os.write_file(args.path, '') or {

View File

@@ -31,7 +31,6 @@ pub mut:
pub fn new(args_ PlayBookNewArgs) !PlayBook {
mut args := args_
mut c := base.context() or { return error('failed to get context: ${err}') }
mut s := c.session_new()!

View File

@@ -148,7 +148,7 @@ pub fn (mut plbook PlayBook) get(args FindArgs) !&Action {
} else if res.len > 1 {
$if debug {
print_backtrace()
}
}
return error("found more than one action: '${args.filter}'")
}
return res[0] or { panic('bug') }

View File

@@ -12,7 +12,6 @@ enum State {
othertext
}
// pub struct PlayBookNewArgs {
// path string
// text string
@@ -37,23 +36,24 @@ pub fn (mut plbook PlayBook) add(args_ PlayBookNewArgs) ! {
args.path = newpath.path
}
if plbook.path=="" && args.path!="" {
if plbook.path == '' && args.path != '' {
plbook.path = args.path
}
if args.text.len>0 && args.replace.len>0{
//now we need to replace any placeholders in the text
if args.text.len > 0 && args.replace.len > 0 {
// now we need to replace any placeholders in the text
for key, value in args.replace {
if key.starts_with('@') || key.starts_with('$') || key.starts_with('[') || key.starts_with('{') {
if key.starts_with('@') || key.starts_with('$') || key.starts_with('[')
|| key.starts_with('{') {
args.text = args.text.replace(key, value)
}else{
args.text = args.text.replace("@${key}", value)
args.text = args.text.replace("$\{${key}\}", value)
args.text = args.text.replace("\{${key}\}", value)
} else {
args.text = args.text.replace('@${key}', value)
args.text = args.text.replace('$\{${key}\}', value)
args.text = args.text.replace('\{${key}\}', value)
}
}
}
// walk over directory
if args.path.len > 0 {
// console.print_header("PLBOOK add path:'${args.path}'")

View File

@@ -2,7 +2,7 @@ module playbook
import freeflowuniverse.herolib.develop.gittools // Added import for gittools
//REMARK: include is done in play_core
// REMARK: include is done in play_core
// // Include external playbook actions (from git repo or local path)
// // based on actions defined as `!!play.include`.

View File

@@ -53,9 +53,8 @@ pub fn run(args_ PlayArgs) ! {
giteaclient.play(mut plbook)!
if args.emptycheck{
if args.emptycheck {
// Ensure we did not leave any actions unprocessed
plbook.empty_check()!
}
}

View File

@@ -11,36 +11,40 @@ import os
// -------------------------------------------------------------------
fn play_core(mut plbook PlayBook) ! {
// ----------------------------------------------------------------
// 1. Include handling (play include / echo)
// ----------------------------------------------------------------
if plbook.exists(filter: 'play.') == false && plbook.exists(filter: 'play.') == false && plbook.exists(
filter: 'core.'
) == false {
return
}
// ----------------------------------------------------------------
// 1. Include handling (play include / echo)
// ----------------------------------------------------------------
// Track included paths to prevent infinite recursion
mut included_paths := map[string]bool{}
for mut action_ in plbook.find(filter: 'play.*')! {
if action_.name == 'include' {
mut action := *action_
mut toreplace := action.params.get_default('replace', '')!
mut playrunpath := action.params.get_default('path', '')!
if playrunpath.len == 0 {
action.name = 'pull'
playrunpath = gittools.get_repo_path(
mypath := gittools.path(
path: playrunpath
git_url: action.params.get_default('git_url', '')!
git_reset: action.params.get_default_false('git_reset')
git_pull: action.params.get_default_false('git_pull')
)!
playrunpath = mypath.path
}
if playrunpath.len == 0 {
return error("can't run a heroscript didn't find url or path.")
}
// console.print_debug('play run:\n${action_}')
if ! playrunpath.starts_with('/') {
playrunpath=os.abs_path("${plbook.path}/${playrunpath}")
if !playrunpath.starts_with('/') {
playrunpath = os.abs_path('${plbook.path}/${playrunpath}')
}
console.print_debug('play run include path:${playrunpath}')
@@ -50,12 +54,11 @@ fn play_core(mut plbook PlayBook) ! {
console.print_debug('Skipping already included path: ${playrunpath}')
continue
}
toreplacedict:=texttools.to_map(toreplace)
toreplacedict := texttools.to_map(toreplace)
included_paths[playrunpath] = true
plbook.add(path: playrunpath,replace:toreplacedict)!
action.done = true
plbook.add(path: playrunpath, replace: toreplacedict)!
action.done = true
}
if action_.name == 'echo' {
content := action_.params.get_default('content', "didn't find content")!
@@ -63,38 +66,35 @@ fn play_core(mut plbook PlayBook) ! {
}
}
// ----------------------------------------------------------------
// 2. Session environment handling
// ----------------------------------------------------------------
// Guard make sure a session exists
mut session := plbook.session
// !!session.env_set / env_set_once
for mut action in plbook.find(filter: 'session.')! {
// ----------------------------------------------------------------
// 2. Session environment handling
// ----------------------------------------------------------------
// Guard make sure a session exists
mut session := plbook.session
mut p := action.params
match action.name {
'env_set' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
session.env_set(key, val)!
}
'env_set_once' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
// Use the dedicated setonce method
session.env_set_once(key, val)!
}
else { /* ignore unknown subaction */ }
}
action.done = true
}
// !!session.env_set / env_set_once
for mut action in plbook.find(filter: 'session.')! {
mut p := action.params
match action.name {
'env_set' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
session.env_set(key, val)!
}
'env_set_once' {
key := p.get('key')!
val := p.get('val') or { p.get('value')! }
// Use the dedicated setonce method
session.env_set_once(key, val)!
}
else {}
}
action.done = true
}
// ----------------------------------------------------------------
// 3. Template replacement in action parameters
// ----------------------------------------------------------------
// ----------------------------------------------------------------
// 3. Template replacement in action parameters
// ----------------------------------------------------------------
// Apply template replacement from session environment variables
if session.env.len > 0 {
// Create a map with name_fix applied to keys for template replacement
@@ -142,5 +142,4 @@ fn play_core(mut plbook PlayBook) ! {
session.save()!
action.done = true
}
}

View File

@@ -11,11 +11,14 @@ import freeflowuniverse.herolib.ui.console // For verbose error reporting
// ---------------------------------------------------------------
fn play_git(mut plbook PlayBook) ! {
// -----------------------------------------------------------
// !!git.define configure the GitStructure
// -----------------------------------------------------------
if plbook.exists(filter: 'git.') == false {
return
}
mut gs := gittools.new()!
define_actions := plbook.find(filter: 'git.define')!
mut gs := if define_actions.len > 0 {
if define_actions.len > 0 {
mut p := define_actions[0].params
coderoot := p.get_default('coderoot', '')!
light := p.get_default_true('light')
@@ -25,18 +28,17 @@ fn play_git(mut plbook PlayBook) ! {
ssh_key_path := p.get_default('ssh_key_path', '')!
reload := p.get_default_false('reload')
gittools.new(
coderoot: coderoot
light: light
log: log
debug: debug
offline: offline
ssh_key_path: ssh_key_path
reload: reload
gs = gittools.new(
coderoot: coderoot
log: log
debug: debug
offline: offline
reload: reload
)!
} else {
// Default GitStructure (no args)
gittools.get()!
if light || ssh_key_path.len > 0 {
gs.config_set(light: light, ssh_key_path: ssh_key_path)!
}
}
// -----------------------------------------------------------

View File

@@ -5,6 +5,10 @@ import freeflowuniverse.herolib.core.playbook { PlayBook }
// import os
fn play_luadns(mut plbook PlayBook) ! {
if plbook.exists(filter: 'luadns.') == false {
return
}
// Variables below are not used, commenting them out
// mut buildroot := '${os.home_dir()}/hero/var/mdbuild'
// mut publishroot := '${os.home_dir()}/hero/www/info'

View File

@@ -4,6 +4,10 @@ import freeflowuniverse.herolib.osal.sshagent
import freeflowuniverse.herolib.core.playbook { PlayBook }
fn play_ssh(mut plbook PlayBook) ! {
if plbook.exists(filter: 'sshagent.') == false {
return
}
mut agent := sshagent.new()!
for mut action in plbook.find(filter: 'sshagent.*')! {
mut p := action.params

View File

@@ -23,7 +23,7 @@ pub fn to_array_int(r string) []int {
return r2
}
//convert a:b ,c:d,e:f to dict with keys a,c,e and corresponding values b,d,f
// convert a:b ,c:d,e:f to dict with keys a,c,e and corresponding values b,d,f
pub fn to_map(mapstring string) map[string]string {
mut result := map[string]string{}
mut mapstring_array := to_array(mapstring)
@@ -31,7 +31,7 @@ pub fn to_map(mapstring string) map[string]string {
if item.contains(':') {
parts := item.split(':')
if parts.len == 2 {
result[parts[0].trim_space()] = parts[1].trim_space().trim("'\"").trim_space()
result[parts[0].trim_space()] = parts[1].trim_space().trim('\'"').trim_space()
} else {
panic('to_map: expected key:value pairs, got: ${item}')
}
@@ -40,7 +40,6 @@ pub fn to_map(mapstring string) map[string]string {
}
}
return result
}
// intelligent way how to map a line to a map

View File

@@ -1,147 +0,0 @@
# libsecp256k1
This is a lib256k1 binding for vlang.
## Requirements
make sure the lib is installed
### macOS
```bash
brew install secp256k1
```
### Ubuntu
Compile latest release, version included in Ubuntu is outdated.
```
apt-get install -y build-essential wget autoconf libtool
wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
tar -xvf v0.3.2.tar.gz
cd secp256k1-0.3.2/
./autogen.sh
./configure
make -j 5
make install
```
### Arch
```bash
pacman -Su extra/libsecp256k1
```
### Gentoo
```bash
emerge dev-libs/libsecp256k1
```
## Features
- [x] Generate EC keys
- [x] Load existing EC keys
- [x] Serialize keys
- [x] Derivate shared key
- [x] Sign using ECDSA
- [x] Verify ECDSA signature
- [x] Sign using Schnorr
- [x] Verify a Schnorr signature
- [ ] Support multi-signature with Schnorr
## How to use
There are 4 differents things / features to understand in this secp256k1 implementation (wrapper).
### Public and Privaye keys for secp256k1
This is a simple private/public key schema. This wrapper deals with hexdump of keys.
- Private key is `32 bytes` long (eg: `0x4a21f247ff3744e211e95ec478d5aba94a1d6d8bed613e8a9faece6d048399fc`)
- Public key is `33 bytes` long (eg: `0x02df72fc4fa607ca3478446750bf9f8510242c4fa5849e77373d71104cd0c82ea0`)
In this library, you can instanciate a secp256k1 object from 3 ways:
```vlang
import freeflowuniverse.herolib.crypt.secp256k1
secp256k1.new()
```
Constructor without any arguments, will generate a new private and public key
```vlang
secp256k1.new(privkey: '0x4a21f247ff3744e211e95ec478d5aba94a1d6d8bed613e8a9faece6d048399fc')
```
Using `privkey` argument, this will create an object from private key and generate corresponding public key
```vlang
secp256k1.new(pubkey: '0x02df72fc4fa607ca3478446750bf9f8510242c4fa5849e77373d71104cd0c82ea0')
```
Using `privkey` argument, this will create an object with only the public key,
which can be used for shared key or signature verification
### Shared Keys
Library `secp256k1` have one feature which allows you to derivate a `shared intermediate common key` from
the private key of one party and the public key from the other party.
Example:
- Shared key from `Bob Private Key` + `Alice Public Key` = `Shared Key`
- Shared key from `Alice Private Key` + `Bob Public Key` = `Shared Key` (the same)
Using this feature, with your private key and target public key, you can derivate a `shared (secret) key`
that only you both knows. This is really interresting to switch to a symetric encryption using that key
as encryption key or use any well known secret without exchanging it.
To use the shared key feature, just call the `sharedkeys()` method:
```vlang
bob := secp256k1.new(privhex: '0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83')!
alicepub := secp256k1.new(pubkey: '0x034a87ad6fbf83d89a91c257d4cc038828c6ed9104738ffd4bb7e5069858d4767b')!
shared := bob.sharedkeys(alicepub)
// shared = 0xf114df29d930f0cd37f62cbca36c46773a42bf87e12edcb35d47c4bfbd20514d
```
This works the same in the opposite direction:
```vlang
alice := secp256k1.new(privhex: '0x8225825815f42e1c24a2e98714d99fee1a20b5ac864fbcb7a103cd0f37f0ffec')!
bobpub := secp256k1.new(pubkey: '0x03310ec949bd4f7fc24f823add1394c78e1e9d70949ccacf094c027faa20d99e21')!
shared := alice.sharedkeys(bobpub)
// shared = 0xf114df29d930f0cd37f62cbca36c46773a42bf87e12edcb35d47c4bfbd20514d (same shared key)
```
### ECDSA Signature
This is the default signature method. When doing a signature, you don't sign the actual data but you
have to sign a hash (sha256) of the data. This payload needs to be fixed length. The return signature
is a `64 bytes` long response.
When doing a signature using ecdsa method, you sign using the private key and verify using the public key
of the same party. If **Bob** sign something, you have to verify using **Bob** public key is the signature matches.
If signature matches, that mean that is really **Bob** who signed the hash.
Here, you need the signature and the message separately.
```vlang
sstr := alice.sign_str("Hello World !")
valid := alicepub.verify_str(sstr, "Hello World !")
// valid = true
```
### Schnorr Signature
This is the new prefered signature method. In theory, this method can in addition be able to sign
using multiple parties without storing signature of everyone, signature can be chained but this is not
implemented in this wrapper (lack of source documentation and understanding).
In practice, code wide, wrapper take care to handle everything for you and this really looks like
the same way than ecdsa.
```vlang
schnorr_sstr := alice.schnorr_sign_str("Hello World !")
valid := alicepub.schnorr_verify_str(schnorr_sstr, "Hello World !")
// valid = true
```

View File

@@ -1,351 +0,0 @@
@[translated]
module secp256k1
import encoding.hex
import crypto.sha256
import encoding.base64
#include "@VMODROOT/secp256k1mod.h"
#flag @VMODROOT/secp256k1mod.o
#flag -lsecp256k1
#flag -DNO_SECP_MAIN
#flag darwin -I/opt/homebrew/include
#flag darwin -L/opt/homebrew/lib
// linux: require libsecp256k1-dev
// macos: require brew install secp256k1
//
// struct definitions
//
struct Secp256k1_pubkey {
data [64]u8
}
struct Secp256k1_xonly_pubkey {
data [64]u8
}
struct Secp256k1_ecdsa_signature {
data [64]u8
}
struct Secp256k1_keypair {
data [96]u8
}
struct Secp256k1_t {
kntxt &C.secp256k1_context
seckey &u8
compressed &u8
pubkey Secp256k1_pubkey
xcompressed &u8
xpubkey Secp256k1_xonly_pubkey
keypair Secp256k1_keypair
}
struct Secp256k1_sign_t {
sig Secp256k1_ecdsa_signature
serialized &u8
length usize
}
struct Secp256k1_signature {
cctx &C.secp256k1_sign_t
}
pub struct Secp256k1 {
cctx &Secp256k1_t
}
//
// prototypes
//
fn C.secp256k1_new() &Secp256k1_t
fn C.secp256k1_schnorr_verify(secp &Secp256k1_t, signature &u8, siglen usize, hash &u8, hashlen usize) int
fn C.secp256k1_schnorr_sign_hash(secp &Secp256k1_t, hash &u8, length usize) &u8
fn C.secp256k1_sign_verify(secp &Secp256k1_t, signature &Secp256k1_sign_t, hash &u8, length usize) int
fn C.secp256k1_sign_free(signature &Secp256k1_sign_t)
fn C.secp256k1_load_signature(secp &Secp256k1_t, serialized &u8, length usize) &Secp256k1_sign_t
fn C.secp256k1_sign_hash(secp &Secp256k1_t, hash &u8, length usize) &u8
fn C.secp265k1_shared_key(private &Secp256k1_t, public &Secp256k1_t) &u8
fn C.secp256k1_load_key(secp &Secp256k1_t, key &u8) int
fn C.secp256k1_load_private_key(secp &Secp256k1_t, key &u8) int
fn C.secp256k1_load_public_key(secp &Secp256k1_t, key &u8) int
fn C.secp256k1_free(secp &Secp256k1_t)
fn C.secp256k1_dumps(secp &Secp256k1_t)
fn C.secp256k1_export(secp &Secp256k1_t) &u8
fn C.secp256k1_private_key(secp &Secp256k1_t) &u8
fn C.secp256k1_public_key(secp &Secp256k1_t) &u8
fn C.secp256k1_generate_key(secp &Secp256k1_t) int
@[params]
pub struct Secp256NewArgs {
pub:
pubhex string // public key hex (eg 03310ec949bd4f7fc24f823add1394c78e1e9d70949ccacf094c027faa20d99e21)
privhex string // private key hex (eg 478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83)
pubbase64 string
privbase64 string
// key []u8 // is in binary form (not implemented)
}
// get a Secp256k1 key, can start from an existing key in string hex format (starts with 0x)
// parameters:
// privhex: private key in hex format (full features will be available)
// pubhex: public key in hex format (reduced features available)
//
// keyhex string // e.g. 0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83
// // keyhex is still supported for _backward_ compatibility only, please do not use anymore
//
// key []u8 // is in binary form (not implemented)
// generate bool = true // default will generate a new key .
pub fn new(args_ Secp256NewArgs) !Secp256k1 {
mut args := args_
secp := Secp256k1{}
secp.cctx = C.secp256k1_new()
// if args.key.len > 0 && args.privhex.len > 0 {
// return error('cannot specify privhex and key at same time')
// }
if args.privhex.len > 0 && args.pubhex.len > 0 {
return error('cannot specify private and public key at same time')
}
if args.privhex.len > 0 {
// same as keyhex (backward compatibility)
// load key from hex like 0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83
// key is the private key
if !(args.privhex.starts_with('0x')) {
args.privhex = '0x${args.privhex}'
}
load := C.secp256k1_load_private_key(secp.cctx, args.privhex.str)
if load > 0 {
return error('invalid private key')
}
} else if args.pubhex.len > 0 {
// load key from hex like 0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83
// key is the public key, this only allow signature check, shared keys, etc.
if !(args.pubhex.starts_with('0x')) {
args.pubhex = '0x${args.pubhex}'
}
load := C.secp256k1_load_public_key(secp.cctx, args.pubhex.str)
if load > 0 {
return error('invalid public key')
}
} else if args.privbase64.len > 0 {
keybin := base64.decode(args.privbase64)
keyhex := hex.encode(keybin)
keyhex2 := '0x${keyhex}'
return new(privhex: keyhex2)!
} else if args.pubbase64.len > 0 {
keybin := base64.decode(args.pubbase64)
keyhex := hex.encode(keybin)
keyhex2 := '0x${keyhex}'
return new(pubhex: keyhex2)!
} else {
C.secp256k1_generate_key(secp.cctx)
}
// TODO: implement the binary key input
// TODO: check format in side and report properly
// dumps keys for debugging purpose
// secp.keys()
return secp
}
// request keys dump from low level library
// this basically prints keys from internal objects (private, public, shared, x-only, ...)
// warning: this is for debug purpose
fn (s Secp256k1) keys() {
C.secp256k1_dumps(s.cctx)
}
// export private key
// backward compatibility, please use private_key() and public_key() methods
pub fn (s Secp256k1) export() string {
key := C.secp256k1_export(s.cctx)
return unsafe { key.vstring() }
}
// with a private key in pair with a public key, secp256k1 can derivate a shared
// key which is the same for both parties, this is really interresting to use for example
// that shared keys for symetric encryption key since it's private but common
//
// example: sharedkey(bobpriv + alicepub) = abcdef
// sharedkey(alicepriv + bobpub) = abcdef
//
// both parties can use their own private key with target public key to derivate the same
// shared commun key, this key is unique with that pair.
pub fn (s Secp256k1) sharedkeys(target Secp256k1) []u8 {
shr := C.secp265k1_shared_key(s.cctx, target.cctx)
return unsafe { shr.vbytes(32) } // 32 bytes shared key
}
pub fn (s Secp256k1) sharedkeys_hex(target Secp256k1) string {
keybin := s.sharedkeys(target)
return hex.encode(keybin)
}
pub fn (s Secp256k1) sharedkeys_base64(target Secp256k1) string {
keybin := s.sharedkeys(target)
return base64.encode(keybin)
}
// returns private key in hex format
pub fn (s Secp256k1) private_key_hex() string {
key := C.secp256k1_private_key(s.cctx)
return unsafe { key.vstring()[2..] }
}
pub fn (s Secp256k1) private_key_base64() string {
key := s.private_key_hex()
keybin := hex.decode(key) or { panic("can't decode hex") }
return base64.encode(keybin)
}
// return public key in hex format
pub fn (s Secp256k1) public_key_hex() string {
key := C.secp256k1_public_key(s.cctx)
return unsafe { key.vstring()[2..] }
}
pub fn (s Secp256k1) public_key_base64() string {
key := s.public_key_hex()
keybin := hex.decode(key) or { panic("can't decode hex") }
return base64.encode(keybin)
}
//
// sign (ecdsa) data
// - we force user to pass data to ensure we hash the right way
// data to ensure signature is valid and safe
//
pub fn (s Secp256k1) sign_data(data []u8) []u8 {
// hash data
h256 := sha256.sum(data)
signature := C.secp256k1_sign_hash(s.cctx, h256.data, h256.len)
return unsafe { signature.vbytes(64) } // 64 bytes signature
}
// return a hex string of the signature
pub fn (s Secp256k1) sign_data_hex(data []u8) string {
payload := s.sign_data(data)
return hex.encode(payload)
}
pub fn (s Secp256k1) sign_data_base64(data []u8) string {
payload := s.sign_data(data)
return base64.encode(payload)
}
pub fn (s Secp256k1) sign_str(data string) []u8 {
return s.sign_data(data.bytes())
}
// return a hex string of the signature
pub fn (s Secp256k1) sign_str_hex(data string) string {
return s.sign_data_hex(data.bytes())
}
pub fn (s Secp256k1) sign_str_base64(data string) string {
payload := s.sign_data(data.bytes())
return base64.encode(payload)
}
//
// verify a signature
//
pub fn (s Secp256k1) verify_data(signature []u8, data []u8) bool {
// todo: check size signature
sig := Secp256k1_signature{}
sig.cctx = C.secp256k1_load_signature(s.cctx, signature.data, signature.len)
// compute data hash to ensure we do it correctly
// - do not trust the user, do it ourself -
h256 := sha256.sum(data)
valid := C.secp256k1_sign_verify(s.cctx, sig.cctx, h256.data, h256.len)
if valid == 1 {
return true
}
return false
}
pub fn (s Secp256k1) verify_str_base64(signature string, input string) bool {
signature2 := base64.decode(signature)
return s.verify_data(signature2, input.bytes())
}
pub fn (s Secp256k1) verify_str_hex(signature string, input string) bool {
signature2 := hex.decode(signature) or { panic("couldn't decode 64") }
return s.verify_data(signature2, input.bytes())
}
//
// sign (schnorr) data
// - we force user to pass data to ensure we hash the right way
// data to ensure signature is valid and safe
//
pub fn (s Secp256k1) schnorr_sign_data(data []u8) []u8 {
// hash data
h256 := sha256.sum(data)
signature := C.secp256k1_schnorr_sign_hash(s.cctx, h256.data, h256.len)
return unsafe { signature.vbytes(64) } // 64 bytes signature
}
// return a hex string of the signature
pub fn (s Secp256k1) schnorr_sign_data_hex(data []u8) string {
payload := s.schnorr_sign_data(data)
return hex.encode(payload)
}
pub fn (s Secp256k1) schnorr_sign_str(data string) []u8 {
return s.schnorr_sign_data(data.bytes())
}
// return a hex string of the signature
pub fn (s Secp256k1) schnorr_sign_str_hex(data string) string {
return s.schnorr_sign_data_hex(data.bytes())
}
//
// verify a signature
//
pub fn (s Secp256k1) schnorr_verify_data(signature []u8, data []u8) bool {
// compute data hash to ensure we do it correctly
// - do not trust the user, do it ourself -
h256 := sha256.sum(data)
valid := C.secp256k1_schnorr_verify(s.cctx, signature.data, signature.len, h256.data,
h256.len)
if valid == 1 {
return true
}
return false
}
pub fn (s Secp256k1) schnorr_verify_str(signature []u8, input string) bool {
return s.schnorr_verify_data(signature, input.bytes())
}

View File

@@ -1,460 +0,0 @@
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <string.h>
#include <sys/random.h>
#include <stddef.h>
#include <limits.h>
#include <stdio.h>
#include "secp256k1mod.h"
static int fill_random(unsigned char* data, size_t size) {
#if defined(__linux__) || defined(__FreeBSD__)
ssize_t res = getrandom(data, size, 0);
if(res < 0 || (size_t) res != size) {
return 0;
} else {
return 1;
}
#elif defined(__APPLE__) || defined(__OpenBSD__)
int res = getentropy(data, size);
if(res == 0) {
return 1;
} else {
return 0;
}
#endif
return 0;
}
static void dumphex(unsigned char *data, size_t size) {
size_t i;
printf("0x");
for(i = 0; i < size; i++) {
printf("%02x", data[i]);
}
printf("\n");
}
static char *hexifier(unsigned char *data, size_t size) {
char *target = calloc(sizeof(char), (size * 2) + 4);
char buffer[8];
strcpy(target, "0x");
memset(buffer, 0, sizeof(buffer));
for(size_t i = 0; i < size; i++) {
sprintf(buffer, "%02x", data[i]);
strcat(target, buffer);
}
return target;
}
static unsigned char *hexparse(char *input) {
if(strncmp(input, "0x", 2) != 0)
return NULL;
size_t length = strlen(input);
unsigned char *target = calloc(sizeof(char), length);
char *pos = input + 2;
for(size_t count = 0; count < length - 2; count++) {
sscanf(pos, "%2hhx", &target[count]);
pos += 2;
}
return target;
}
static void secp256k1_erase(unsigned char *target, size_t length) {
#if defined(__GNUC__)
// memory barrier to avoid memset optimization
memset(target, 0, length);
__asm__ __volatile__("" : : "r"(target) : "memory");
#else
// if we can't, fill with random, still better than
// risking avoid memset
fill_random(target, length);
#endif
}
static void secp256k1_erase_free(unsigned char *target, size_t length) {
secp256k1_erase(target, length);
free(target);
}
secp256k1_t *secp256k1_new() {
secp256k1_t *secp = malloc(sizeof(secp256k1_t));
unsigned char randomize[32];
secp->kntxt = secp256k1_context_create(SECP256K1_CONTEXT_NONE);
if(!fill_random(randomize, sizeof(randomize))) {
printf("[-] failed to generate randomness\n");
return NULL;
}
// side-channel protection
int val = secp256k1_context_randomize(secp->kntxt, randomize);
assert(val);
// allocate keys and initialize them empty
secp->seckey = calloc(sizeof(char), SECKEY_SIZE);
secp->compressed = calloc(sizeof(char), COMPPUB_SIZE);
secp->xcompressed = calloc(sizeof(char), XSERPUB_SIZE);
return secp;
}
void secp256k1_free(secp256k1_t *secp) {
secp256k1_context_destroy(secp->kntxt);
secp256k1_erase_free(secp->seckey, SECKEY_SIZE);
secp256k1_erase_free(secp->compressed, COMPPUB_SIZE);
secp256k1_erase_free(secp->xcompressed, XSERPUB_SIZE);
free(secp);
}
static int secp256k1_populate_public_key(secp256k1_t *secp) {
int retval;
retval = secp256k1_xonly_pubkey_from_pubkey(secp->kntxt, &secp->xpubkey, NULL, &secp->pubkey);
assert(retval);
retval = secp256k1_xonly_pubkey_serialize(secp->kntxt, secp->xcompressed, &secp->xpubkey);
assert(retval);
return 0;
}
static int secp256k1_populate_key(secp256k1_t *secp) {
int retval;
retval = secp256k1_ec_pubkey_create(secp->kntxt, &secp->pubkey, secp->seckey);
assert(retval);
size_t len = COMPPUB_SIZE;
retval = secp256k1_ec_pubkey_serialize(secp->kntxt, secp->compressed, &len, &secp->pubkey, SECP256K1_EC_COMPRESSED);
assert(retval);
// always compute the xonly pubkey as well, so we don't need to compute
// it later for schnorr
retval = secp256k1_keypair_create(secp->kntxt, &secp->keypair, secp->seckey);
assert(retval);
return secp256k1_populate_public_key(secp);
}
int secp256k1_generate_key(secp256k1_t *secp) {
while(1) {
if(!fill_random(secp->seckey, SECKEY_SIZE)) {
printf("[-] failed to generate randomness\n");
return 1;
}
if(secp256k1_ec_seckey_verify(secp->kntxt, secp->seckey) == 0) {
// try again
continue;
}
return secp256k1_populate_key(secp);
}
return 1;
}
// backward compatibility
int secp256k1_load_key(secp256k1_t *secp, char *key) {
// only allow valid key size
if(strlen(key) != (SECKEY_SIZE * 2) + 2)
return 1;
unsigned char *binkey = hexparse(key);
free(secp->seckey);
secp->seckey = binkey;
if(secp256k1_ec_seckey_verify(secp->kntxt, secp->seckey) == 0) {
// invalid key
return 1;
}
return secp256k1_populate_key(secp);
}
int secp256k1_load_private_key(secp256k1_t *secp, char *key) {
return secp256k1_load_key(secp, key);
}
int secp256k1_load_public_key(secp256k1_t *secp, char *key) {
// only allow valid key size
if(strlen(key) != (COMPPUB_SIZE * 2) + 2)
return 1;
unsigned char *binkey = hexparse(key);
free(secp->compressed);
secp->compressed = binkey;
if(!secp256k1_ec_pubkey_parse(secp->kntxt, &secp->pubkey, secp->compressed, COMPPUB_SIZE)) {
printf("[-] failed to load public key\n");
return 1;
}
return secp256k1_populate_public_key(secp);;
}
unsigned char *secp265k1_shared_key(secp256k1_t *private, secp256k1_t *public) {
unsigned char *shared = malloc(sizeof(unsigned char) * SHARED_SIZE);
int val = secp256k1_ecdh(private->kntxt, shared, &public->pubkey, private->seckey, NULL, NULL);
assert(val);
return shared;
}
unsigned char *secp256k1_sign_hash(secp256k1_t *secp, unsigned char *hash, size_t length) {
secp256k1_sign_t signature;
int retval;
if(length != SHA256_SIZE) {
printf("[-] warning: you should only sign sha-256 hash, size mismatch\n");
printf("[-] warning: you get warned\n");
}
retval = secp256k1_ecdsa_sign(secp->kntxt, &signature.sig, hash, secp->seckey, NULL, NULL);
assert(retval);
signature.serialized = malloc(sizeof(unsigned char) * SERSIG_SIZE);
retval = secp256k1_ecdsa_signature_serialize_compact(secp->kntxt, signature.serialized, &signature.sig);
assert(retval);
return signature.serialized;
}
secp256k1_sign_t *secp256k1_load_signature(secp256k1_t *secp, unsigned char *serialized, size_t length) {
secp256k1_sign_t *signature;
if(length != SERSIG_SIZE) {
printf("[-] serialized signature length mismatch, expected %u bytes\n", SERSIG_SIZE);
return NULL;
}
signature = calloc(sizeof(secp256k1_sign_t), 1);
signature->length = length;
signature->serialized = malloc(length);
memcpy(signature->serialized, serialized, length);
if(!secp256k1_ecdsa_signature_parse_compact(secp->kntxt, &signature->sig, signature->serialized)) {
printf("[-] failed to parse the signature\n");
// FIXME: cleanup
return NULL;
}
return signature;
}
void secp256k1_sign_free(secp256k1_sign_t *signature) {
secp256k1_erase_free(signature->serialized, signature->length);
free(signature);
}
int secp256k1_sign_verify(secp256k1_t *secp, secp256k1_sign_t *signature, unsigned char *hash, size_t length) {
if(length != SHA256_SIZE) {
printf("[-] warning: you should only check sha-256 hash, size mismatch\n");
}
return secp256k1_ecdsa_verify(secp->kntxt, &signature->sig, hash, &secp->pubkey);
}
unsigned char *secp256k1_schnorr_sign_hash(secp256k1_t *secp, unsigned char *hash, size_t length) {
unsigned char aux[32];
unsigned char *signature;
int retval;
if(length != SHA256_SIZE) {
printf("[-] warning: you should only sign sha-256 hash, size mismatch\n");
printf("[-] warning: you get warned\n");
}
if(!fill_random(aux, sizeof(aux))) {
printf("[-] failed to generate randomness\n");
return NULL;
}
signature = malloc(sizeof(unsigned char) * SCHSIG_SIZE);
retval = secp256k1_schnorrsig_sign32(secp->kntxt, signature, hash, &secp->keypair, aux);
assert(retval);
return signature;
}
int secp256k1_schnorr_verify(secp256k1_t *secp, unsigned char *signature, size_t siglen, unsigned char *hash, size_t hashlen) {
if(hashlen != SHA256_SIZE) {
printf("[-] warning: you should only check sha-256 hash, size mismatch\n");
}
if(siglen != SCHSIG_SIZE) {
printf("[-] invalid signature length, should be %u bytes\n", SCHSIG_SIZE);
return 2;
}
return secp256k1_schnorrsig_verify(secp->kntxt, signature, hash, hashlen, &secp->xpubkey);
}
void secp256k1_dumps(secp256k1_t *secp) {
printf("Private Key: ");
dumphex(secp->seckey, SECKEY_SIZE);
printf("Public Key : ");
dumphex(secp->compressed, COMPPUB_SIZE);
printf("X-Only Key : ");
dumphex(secp->xcompressed, XSERPUB_SIZE);
}
// backward compatibility
char *secp256k1_export(secp256k1_t *secp) {
return hexifier(secp->seckey, SECKEY_SIZE);
}
// return private key in hex format
char *secp256k1_private_key(secp256k1_t *secp) {
return secp256k1_export(secp);
}
char *secp256k1_public_key(secp256k1_t *secp) {
return hexifier(secp->compressed, COMPPUB_SIZE);
}
#ifndef NO_SECP_MAIN
int main() {
secp256k1_t *wendy = secp256k1_new();
secp256k1_generate_key(wendy);
printf("Wendy:\n");
dumphex(wendy->seckey, SECKEY_SIZE);
dumphex(wendy->compressed, COMPPUB_SIZE);
dumphex(wendy->xcompressed, XSERPUB_SIZE);
// bob
secp256k1_t *bob = secp256k1_new();
secp256k1_load_key(bob, "0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83");
printf("\n");
printf("Bob:\n");
dumphex(bob->seckey, SECKEY_SIZE);
dumphex(bob->compressed, COMPPUB_SIZE);
dumphex(bob->xcompressed, XSERPUB_SIZE);
// export functions
char *priv = secp256k1_private_key(bob);
char *pubk = secp256k1_public_key(bob);
printf("Private export: %s\n", priv);
printf("Public export: %s\n", pubk);
free(priv);
secp256k1_t *bobpub = secp256k1_new();
int val = secp256k1_load_public_key(bobpub, "0x03310ec949bd4f7fc24f823add1394c78e1e9d70949ccacf094c027faa20d99e21");
printf("Public key loader: %d\n", val);
secp256k1_dumps(bobpub);
// alice
secp256k1_t *alice = secp256k1_new();
secp256k1_load_key(alice, "0x8225825815f42e1c24a2e98714d99fee1a20b5ac864fbcb7a103cd0f37f0ffec");
printf("\n");
printf("Alice:\n");
dumphex(alice->seckey, SECKEY_SIZE);
dumphex(alice->compressed, COMPPUB_SIZE);
dumphex(alice->xcompressed, XSERPUB_SIZE);
unsigned char *shared1 = secp265k1_shared_key(bob, alice);
unsigned char *shared2 = secp265k1_shared_key(alice, bob);
printf("\n");
printf("Shared Key:\n");
dumphex(shared1, SHARED_SIZE);
dumphex(shared2, SHARED_SIZE);
secp256k1_erase_free(shared1, SHARED_SIZE);
secp256k1_erase_free(shared2, SHARED_SIZE);
// Hello, world!
unsigned char hash[32] = {
0x31, 0x5F, 0x5B, 0xDB, 0x76, 0xD0, 0x78, 0xC4,
0x3B, 0x8A, 0xC0, 0x06, 0x4E, 0x4A, 0x01, 0x64,
0x61, 0x2B, 0x1F, 0xCE, 0x77, 0xC8, 0x69, 0x34,
0x5B, 0xFC, 0x94, 0xC7, 0x58, 0x94, 0xED, 0xD3,
};
unsigned char *sign = secp256k1_sign_hash(bob, hash, sizeof(hash));
printf("\n");
printf("Signature (ecdsa):\n");
dumphex(sign, SERSIG_SIZE);
secp256k1_sign_t *sigobj = secp256k1_load_signature(bob, sign, SERSIG_SIZE);
int valid = secp256k1_sign_verify(bob, sigobj, hash, sizeof(hash));
printf("\n");
printf("Signature valid: %d\n", valid);
secp256k1_sign_free(sigobj);
// using bobpub
sigobj = secp256k1_load_signature(bobpub, sign, SERSIG_SIZE);
valid = secp256k1_sign_verify(bobpub, sigobj, hash, sizeof(hash));
printf("\n");
printf("Signature valid (using bob public key only): %d\n", valid);
secp256k1_erase_free(sign, SERSIG_SIZE);
secp256k1_sign_free(sigobj);
sign = secp256k1_schnorr_sign_hash(bob, hash, sizeof(hash));
printf("\n");
printf("Signature (schnorr):\n");
dumphex(sign, SCHSIG_SIZE);
valid = secp256k1_schnorr_verify(bob, sign, SCHSIG_SIZE, hash, sizeof(hash));
printf("\n");
printf("Signature valid: %d\n", valid);
valid = secp256k1_schnorr_verify(bobpub, sign, SCHSIG_SIZE, hash, sizeof(hash));
printf("\n");
printf("Signature valid (using bob pubkey key only): %d\n", valid);
secp256k1_erase_free(sign, SCHSIG_SIZE);
printf("\n");
printf("Wendy Export:\n");
char *export = secp256k1_export(wendy);
printf(">> %s\n", export);
free(export);
printf("\n");
printf("Wendy Keys dump:\n");
secp256k1_dumps(wendy);
secp256k1_free(bob);
secp256k1_free(alice);
secp256k1_free(wendy);
return 0;
}
#endif

View File

@@ -1,61 +0,0 @@
#ifndef SECP256K1_V_MOD
#define SECP256K1_V_MOD
#include <secp256k1.h>
#include <secp256k1_ecdh.h>
#include <secp256k1_extrakeys.h>
#include <secp256k1_schnorrsig.h>
typedef struct secp256k1_t {
secp256k1_context *kntxt; // library context
unsigned char *seckey; // ec private key
unsigned char *compressed; // ec public key serialized
secp256k1_pubkey pubkey; // ec public key
unsigned char *xcompressed; // x-only serialized key
secp256k1_xonly_pubkey xpubkey; // x-only public key
secp256k1_keypair keypair; // keypair opaque representation
// needed for schnorr
} secp256k1_t;
typedef struct secp256k1_sign_t {
secp256k1_ecdsa_signature sig;
unsigned char *serialized;
size_t length;
} secp256k1_sign_t;
#define SECKEY_SIZE 32 // secret key size
#define SHARED_SIZE 32 // ecdh shared key size
#define COMPPUB_SIZE 33 // compressed public key size
#define XSERPUB_SIZE 32 // x-only public key serialized size
#define SERSIG_SIZE 64 // serialized signature size
#define SCHSIG_SIZE 64 // internal schnorr signature size
#define SHA256_SIZE 32 // sha-256 digest length
secp256k1_t *secp256k1_new();
void secp256k1_free(secp256k1_t *secp);
int secp256k1_generate_key(secp256k1_t *secp);
unsigned char *secp265k1_shared_key(secp256k1_t *private, secp256k1_t *public);
unsigned char *secp256k1_sign_hash(secp256k1_t *secp, unsigned char *hash, size_t length);
secp256k1_sign_t *secp256k1_load_signature(secp256k1_t *secp, unsigned char *serialized, size_t length);
int secp256k1_sign_verify(secp256k1_t *secp, secp256k1_sign_t *signature, unsigned char *hash, size_t length);
unsigned char *secp256k1_schnorr_sign_hash(secp256k1_t *secp, unsigned char *hash, size_t length);
int secp256k1_schnorr_verify(secp256k1_t *secp, unsigned char *signature, size_t siglen, unsigned char *hash, size_t hashlen);
void secp256k1_sign_free(secp256k1_sign_t *signature);
char *secp256k1_export(secp256k1_t *secp);
char *secp256k1_private_key(secp256k1_t *secp);
char *secp256k1_public_key(secp256k1_t *secp);
void secp256k1_dumps(secp256k1_t *secp);
int secp256k1_load_key(secp256k1_t *secp, char *key);
int secp256k1_load_private_key(secp256k1_t *secp, char *key);
int secp256k1_load_public_key(secp256k1_t *secp, char *key);
#endif

View File

@@ -1,112 +0,0 @@
module secp256k1
import encoding.hex
import crypto.sha256
import freeflowuniverse.herolib.crypt.secp256k1
fn test_check() {
println('${'[+] initializing libsecp256 vlang wrapper'}')
wendy := secp256k1.new()!
webdy_priv_key := wendy.private_key_hex()
webdy_pub_key := wendy.public_key_hex()
println('-------')
println('Wendy Private: ${webdy_priv_key}')
println('Wendy Public: ${webdy_pub_key}')
println('-------')
// create 'bob' from a private key, full features will be available
bob := secp256k1.new(
privhex: '0x478b45390befc3097e3e6e1a74d78a34a113f4b9ab17deb87e9b48f43893af83'
)!
// create 'alice' from a private key, full features will be available
alice := secp256k1.new(
privhex: '0x8225825815f42e1c24a2e98714d99fee1a20b5ac864fbcb7a103cd0f37f0ffec'
)!
// create 'bobpub' from bob only public key, reduced features available (only sign check, shared keys, etc.)
bobpub := secp256k1.new(
pubhex: bob.public_key_hex()
)!
// create 'alicepub' from alice only public key, reduced features available
alicepub := secp256k1.new(
pubhex: alice.public_key_hex()
)!
shr1 := bob.sharedkeys(alice)
println('${shr1}')
shr2 := alice.sharedkeys(bob)
println('${shr2}')
// example in real world, where private key is available and only target public key
shr1pub := bob.sharedkeys(alicepub)
println('${shr1pub}')
shr2pub := alice.sharedkeys(bobpub)
println('${shr2pub}')
println('-----')
mut message := 'Hello world, this is my awesome message'
message += message
message += message
message += message
message += message
h256 := sha256.hexhash(message)
println('${h256}')
println('${h256.len}')
println('${sha256.sum(message.bytes())}')
parsed := hex.decode(h256) or { panic(err) }
println('${parsed}')
println('${parsed.len}')
//
// signature (ecdca)
//
signed := alice.sign_data(message.bytes())
println('${signed}')
signed_hex := alice.sign_data_hex(message.bytes())
println('${signed_hex}')
println('${signed_hex.len}')
signed_str := alice.sign_str(message)
println('${signed_str}')
println('${signed_str.len}')
signed_str_hex := alice.sign_str_hex(message)
assert signed_str_hex == '656699dde22d8b89d91070dee4fc8dba136172fb54e6de475024c40e4f8d5111562212c8976b5a4ccd530bdb7f40c5d9bd2cdeeec1473656566fbb9c4576ed8c'
assert signed_str_hex.len == 128
// instanciate alice with only her public key
assert alicepub.verify_data(signed, message.bytes()) == true
assert alicepub.verify_str_hex(signed_str_hex, message) == true
assert alicepub.verify_str_hex(signed_str_hex, message + 's') == false
//
// signature (schnorr)
//
// schnorr_signed := alice.schnorr_sign_data(message.bytes())
// println('${schnorr_signed}')
// schnorr_signed_hex := alice.schnorr_sign_data_hex(message.bytes())
// println('${schnorr_signed_hex}')
// schnorr_signed_str := alice.schnorr_sign_str(message)
// println('${schnorr_signed_str}')
// schnorr_signed_str_hex := alice.schnorr_sign_str_hex(message)
// println('${schnorr_signed_str_hex}')
// println('${alicepub.schnorr_verify_data(schnorr_signed, message.bytes())}')
// println('${alicepub.schnorr_verify_str(schnorr_signed_str, message)}')
// // should fails, it's not the right signature method (ecdsa / schnorr)
// println('${alicepub.verify_data(schnorr_signed, message.bytes())}')
// println('${alicepub.verify_str(schnorr_signed_str, message)}')
}

View File

@@ -1,8 +0,0 @@
Module {
name: 'secp256k1'
description: 'secp256k1 in v'
version: '0.2.0'
license: 'MIT'
dependencies: []
}

View File

@@ -25,22 +25,10 @@ assert 'bbbb' == db.get('a')!
```
## dbname
DBName has functionality to efficiently store millions of names and generate a unique id for it, each name gets a unique id, and based on the id the name can be found back easily.
Some string based data can be attached to one name so it becomes a highly efficient key value stor, can be used for e.g. having DB of pubkeys, for a nameserver, ...
## dbfs examples
Each session has such a DB attached to it, data is stored on filesystem,
e.g. ideal for config sessions (which are done on context level)
```golang
> TODO: fix, we refactored
```go
import freeflowuniverse.herolib.data.dbfs

View File

@@ -4,6 +4,10 @@ import freeflowuniverse.herolib.core.playbook { PlayBook }
// import freeflowuniverse.herolib.ui.console
pub fn play(mut plbook PlayBook) ! {
if !plbook.exists(filter: 'doctree.') {
return
}
mut doctrees := map[string]&Tree{}
mut collection_actions := plbook.find(filter: 'doctree.scan')!

View File

@@ -80,7 +80,7 @@ pub fn (mut tree Tree) scan(args TreeScannerArgs) ! {
pub fn (mut tree Tree) scan_concurrent(args_ TreeScannerArgs) ! {
mut args := args_
if args.git_url.len > 0 {
mut gs := gittools.get(coderoot: args.git_root)!
mut gs := gittools.new(coderoot: args.git_root)!
mut repo := gs.get_repo(
url: args.git_url
pull: args.git_pull

View File

@@ -12,9 +12,9 @@ pub fn (params &Params) get(key_ string) !string {
return p.value.trim(' ')
}
}
$if debug {
print_backtrace()
}
$if debug {
print_backtrace()
}
return error('Did not find key:${key} in ${params}')
}
@@ -158,9 +158,9 @@ pub fn (params &Params) get_int_default(key string, defval int) !int {
}
pub fn (params &Params) get_default_true(key string) bool {
mut r := ""
mut r := ''
if params.exists(key) {
r = params.get(key) or { panic("bug") }
r = params.get(key) or { panic('bug') }
}
r = texttools.name_fix_no_underscore(r)
if r == '' || r == '1' || r == 'true' || r == 'y' || r == 'yes' {
@@ -170,10 +170,11 @@ pub fn (params &Params) get_default_true(key string) bool {
}
pub fn (params &Params) get_default_false(key string) bool {
mut r := ""
mut r := ''
if params.exists(key) {
r = params.get(key) or { panic("bug") }
} r = texttools.name_fix_no_underscore(r)
r = params.get(key) or { panic('bug') }
}
r = texttools.name_fix_no_underscore(r)
if r == '' || r == '0' || r == 'false' || r == 'n' || r == 'no' {
return false
}

Some files were not shown because too many files have changed in this diff Show More