...
This commit is contained in:
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
@@ -0,0 +1,109 @@
|
||||
## INTENT
|
||||
|
||||
we use heroscript to communicate actions and events in a structured format.
|
||||
we want you to parse user intents and generate the corresponding heroscript.
|
||||
|
||||
ONLY RETURN THE HEROSCRIPT STATEMENTS, can be more than 1
|
||||
|
||||
## HEROSCRIPT FORMAT
|
||||
|
||||
HeroScript is a concise scripting language with the following structure:
|
||||
|
||||
```heroscript
|
||||
!!actor.action_name
|
||||
param1: 'value1'
|
||||
param2: 'value with spaces'
|
||||
multiline_description: '
|
||||
This is a multiline description.
|
||||
It can span multiple lines.
|
||||
'
|
||||
arg1 arg2 // Arguments without keys
|
||||
|
||||
!!actor.action_name2 param1:something param2:'something with spaces' nr:3
|
||||
```
|
||||
|
||||
Key characteristics:
|
||||
|
||||
- **Actions**: Start with `!!`, followed by `actor.action_name` (e.g., `!!mailclient.configure`).
|
||||
- **Parameters**: Defined as `key:value`. Values can be quoted for spaces.
|
||||
- **Multiline Support**: Parameters like `description` can span multiple lines.
|
||||
- **Arguments**: Values without keys (e.g., `arg1`).
|
||||
- params can be on 1 line, with spaces in between
|
||||
- time can be as +1h, +1d, +1w (hour, day, week), ofcourse 1 can be any number, +1 means 1 hour from now
|
||||
- time format is: dd/mm/yyyy hh:mm (ONLY USE THIS)
|
||||
- comma separation is used a lot in arguments e.g. 'jan,kristof' or 'jan , kristof' remove spaces, is list of 2
|
||||
- note only !! is at start of line, rest has spaces per instruction
|
||||
- make one empty line between 1 heroscript statements
|
||||
- everything after // is comment
|
||||
|
||||
## HEROSCRIPT SCHEMA
|
||||
|
||||
the language we understand
|
||||
|
||||
### calendar management
|
||||
|
||||
```heroscript
|
||||
!!calendar.create when:'+1h' descr:'this is event to discuss eng' attendees:'jan,kristof' name:'meet1' tags:'eng,urgent'
|
||||
!!calendar.delete name:'meet1'
|
||||
!!calendar.list tags:'urgent'
|
||||
|
||||
```
|
||||
|
||||
### contact management
|
||||
|
||||
```heroscript
|
||||
!!contact.add name:'jan' email:'jan@example.com' phone:'123-456-7890'
|
||||
!!contact.remove name:'jan'
|
||||
!!contact.list
|
||||
|
||||
```
|
||||
|
||||
### task management
|
||||
|
||||
```heroscript
|
||||
!!task.create title:'Prepare presentation' due:'+1d' assignee:'jan' name:'task1' tags:'eng,urgent'
|
||||
deadline:'+10d' duration:'1h'
|
||||
!!task.update name:'task1' status:'in progress'
|
||||
!!task.delete name:'task1'
|
||||
!!task.list
|
||||
|
||||
```
|
||||
|
||||
### project management
|
||||
|
||||
```heroscript
|
||||
!!project.create title:'Cloud Product Development' description:'Track progress of cloud product development' name:'cloud_prod'
|
||||
!!project.update name:'cloud_prod' status:'in progress'
|
||||
!!project.delete name:'cloud_prod'
|
||||
!!project.list
|
||||
!!project.tasks_list name:'cloud_prod' //required properties are name, description, and assignee of not given ask
|
||||
!!project.tasks_add names:'task1, task2'
|
||||
!!project.tasks_remove names:'task1, task2'
|
||||
|
||||
```
|
||||
|
||||
### SUPPORTED TAGS
|
||||
|
||||
only tags supported are:
|
||||
|
||||
- for intent: eng, prod, support, mgmt, marketing
|
||||
- for urgency: urgent, high, medium, low
|
||||
|
||||
### generic remarks
|
||||
|
||||
- names are lowercase and snake_case, can be distilled out of title if only title given, often a user will say name but that means title
|
||||
- time: format of returned data or time is always dd/mm/yyyy hh:min
|
||||
|
||||
## IMPORTANT STARTING INFO
|
||||
|
||||
- current time is 10/08/2025 05:10 , use this to define any time-related parameters
|
||||
|
||||
## USER INTENT
|
||||
|
||||
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||
|
||||
also let me know which other meetings I have which are urgent
|
||||
|
||||
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||
|
||||
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
@@ -0,0 +1,64 @@
|
||||
SYSTEM
|
||||
You are a HeroScript compiler. Convert user intents into valid HeroScript statements.
|
||||
|
||||
OUTPUT RULES
|
||||
|
||||
1) Return ONLY HeroScript statements. No prose, no backticks.
|
||||
2) Separate each statement with exactly ONE blank line.
|
||||
3) Keys use snake_case. Names are lowercase snake_case derived from titles (non-alnum → "_", collapse repeats, trim).
|
||||
4) Lists are comma-separated with NO spaces (e.g., "jan,alex").
|
||||
5) Times: OUTPUT MUST BE ABSOLUTE in "dd/mm/yyyy hh:mm" (Europe/Zurich). Convert relative times (e.g., "tomorrow 10am") using CURRENT_TIME.
|
||||
6) Tags: include at most one intent tag and at most one urgency tag when present.
|
||||
- intent: eng,prod,support,mgmt,marketing
|
||||
- urgency: urgent,high,medium,low
|
||||
7) Quotes: quote values containing spaces; otherwise omit quotes (allowed either way).
|
||||
8) Comments only with // if the user explicitly asks for explanations; otherwise omit.
|
||||
|
||||
SCHEMA (exact actions & parameters)
|
||||
|
||||
!!calendar.create when:'dd/mm/yyyy hh:mm' name:'<name>' descr:'<text>' attendees:'a,b,c' tags:'intent,urgency'
|
||||
!!calendar.delete name:'<name>'
|
||||
!!calendar.list [tags:'tag1,tag2']
|
||||
|
||||
!!contact.add name:'<name>' email:'<email>' phone:'<phone>'
|
||||
!!contact.remove name:'<name>'
|
||||
!!contact.list
|
||||
|
||||
!!task.create title:'<title>' name:'<name>' [due:'dd/mm/yyyy hh:mm'] [assignee:'<name>'] [tags:'intent,urgency'] [deadline:'dd/mm/yyyy hh:mm'] [duration:'<Nd Nh Nm> or <Nh>']
|
||||
!!task.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||
!!task.delete name:'<name>'
|
||||
!!task.list
|
||||
|
||||
!!project.create title:'<title>' description:'<text>' name:'<name>'
|
||||
!!project.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||
!!project.delete name:'<name>'
|
||||
!!project.list
|
||||
!!project.tasks_list name:'<project_name>'
|
||||
!!project.tasks_add name:'<project_name>' names:'task_a,task_b'
|
||||
!!project.tasks_remove name:'<project_name>' names:'task_a,task_b'
|
||||
|
||||
NORMALIZATION & INFERENCE (silent)
|
||||
- Derive names from titles when missing (see rule 3). Ensure consistency across statements.
|
||||
- Map phrases to tags when obvious (e.g., "new product" ⇒ intent: prod; "high priority" ⇒ urgency: high).
|
||||
- Attendees: split on commas, trim, lowercase given names.
|
||||
- If the user asks for “urgent meetings,” use tags:'urgent' specifically.
|
||||
- Prefer concise descriptions pulled from the user’s phrasing.
|
||||
- Name's are required, if missing ask for clarification.
|
||||
- For calendar management, ensure to include all relevant details such as time, attendees, and description.
|
||||
|
||||
|
||||
CURRENT_TIME
|
||||
|
||||
10/08/2025 05:10
|
||||
|
||||
USER_MESSAGE
|
||||
|
||||
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||
|
||||
also let me know which other meetings I have which are urgent
|
||||
|
||||
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||
|
||||
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||
|
||||
END
|
||||
82
aiprompts/ai_instruct/processing/intent.md
Normal file
82
aiprompts/ai_instruct/processing/intent.md
Normal file
@@ -0,0 +1,82 @@
|
||||
## INSTRUCTIONS
|
||||
|
||||
the user will send me multiple instructions what they wants to do, I want you to put them in separate categories
|
||||
|
||||
The categories we have defined are:
|
||||
|
||||
- calendar management
|
||||
- schedule meetings, events, reminders
|
||||
- list these events
|
||||
- delete them
|
||||
- contact management
|
||||
- add/remove contact information e.g. phone numbers, email addresses, address information
|
||||
- list contacts, search
|
||||
- task or project management
|
||||
- anything we need to do, anything we need to track and plan
|
||||
- create/update tasks, set deadlines
|
||||
- mark tasks as complete
|
||||
- delete tasks
|
||||
- project management
|
||||
- communication (chat, email)
|
||||
- see what needs to be communicate e.g. send a chat to ...
|
||||
- search statements
|
||||
- find on internet, find specific information from my friends
|
||||
|
||||
I want you to detect the intent and make multiple blocks out of the intent, each block should correspond to one of the identified intents, identify the intent with name of the category eg. calendar, only use above names
|
||||
|
||||
|
||||
|
||||
what user wants to do, stay as close as possible to the original instructions, copy the exact instructions as where given by the user, we only need to sort the instructions in these blocks
|
||||
|
||||
for each instruction make a separate block, e.g. if 2 tasks are given, create 2 blocks
|
||||
|
||||
the format to return is: (note newline after each title of block)
|
||||
|
||||
```template
|
||||
===CALENDAR===\n
|
||||
|
||||
$the copied text from what user wants
|
||||
|
||||
===CONTACT===\n
|
||||
...
|
||||
|
||||
===QUESTION===\n
|
||||
|
||||
put here what our system needs to ask to the user anything which is not clear
|
||||
|
||||
===END===\n
|
||||
|
||||
```
|
||||
|
||||
I want you to execute above on instructions as given by user below, give text back ONLY supporting the template
|
||||
|
||||
note for format is only ===$NAME=== and then on next lines the original instructions from the user, don't change
|
||||
|
||||
## special processing of info
|
||||
|
||||
- if a date or time specified e.g. tomorrow, time, ... calculate back from current date
|
||||
|
||||
## IMPORTANT STARTING INFO
|
||||
|
||||
- current time is 10/08/2025 05:10 (format of returned data is always dd/mm/yyyy hh:min)
|
||||
- use the current time to define formatted time out of instructions
|
||||
- only return the formatted time
|
||||
|
||||
## UNCLEAR INFO
|
||||
|
||||
check in instructions e.g. things specified like you, me, ...
|
||||
are not clear ask specifically who do you mean
|
||||
|
||||
if task, specify per task, who needs to do it and when, make sure each instruction (block) is complete and clear for further processing
|
||||
|
||||
be very specific with the questions e.g. who is you, ...
|
||||
|
||||
## EXECUTE ABOVE ON THE FOLLOWING
|
||||
|
||||
I am planning a birthday for my daughters tomorrow, there will be 10 people.
|
||||
|
||||
I would like to know if you can help me with the preparations.
|
||||
|
||||
I need a place for my daughter's birthday party.
|
||||
|
||||
I need to send message to my wife isabelle that she needs to pick up the cake.
|
||||
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
File diff suppressed because it is too large
Load Diff
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
File diff suppressed because it is too large
Load Diff
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
@@ -0,0 +1,225 @@
|
||||
# tus Resumable Upload Protocol (Condensed for Coding Agents)
|
||||
|
||||
## Core Protocol
|
||||
|
||||
All Clients and Servers MUST implement the core protocol for resumable uploads.
|
||||
|
||||
### Resuming an Upload
|
||||
|
||||
1. **Determine Offset (HEAD Request):**
|
||||
* **Request:**
|
||||
```
|
||||
HEAD /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 200 OK
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server MUST include `Upload-Offset`.
|
||||
* Server MUST include `Upload-Length` if known.
|
||||
* Server SHOULD return `200 OK` or `204 No Content`.
|
||||
* Server MUST prevent caching: `Cache-Control: no-store`.
|
||||
|
||||
2. **Resume Upload (PATCH Request):**
|
||||
* **Request:**
|
||||
```
|
||||
PATCH /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Type: application/offset+octet-stream
|
||||
Content-Length: {chunk_size}
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
|
||||
[binary data chunk]
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {new_offset}
|
||||
```
|
||||
* `Content-Type` MUST be `application/offset+octet-stream`.
|
||||
* `Upload-Offset` in request MUST match server's current offset (else `409 Conflict`).
|
||||
* Server MUST acknowledge with `204 No Content` and `Upload-Offset` (new offset).
|
||||
* Server SHOULD return `404 Not Found` for non-existent resources.
|
||||
|
||||
### Common Headers
|
||||
|
||||
* **`Upload-Offset`**: Non-negative integer. Byte offset within resource.
|
||||
* **`Upload-Length`**: Non-negative integer. Total size of upload in bytes.
|
||||
* **`Tus-Version`**: Comma-separated list of supported protocol versions (Server response).
|
||||
* **`Tus-Resumable`**: Protocol version used (e.g., `1.0.0`). MUST be in every request/response (except `OPTIONS`). If client version unsupported, server responds `412 Precondition Failed` with `Tus-Version`.
|
||||
* **`Tus-Extension`**: Comma-separated list of supported extensions (Server response). Omitted if none.
|
||||
* **`Tus-Max-Size`**: Non-negative integer. Max allowed upload size in bytes (Server response).
|
||||
* **`X-HTTP-Method-Override`**: String. Client MAY use to override HTTP method (e.g., for `PATCH`/`DELETE` limitations).
|
||||
|
||||
### Server Configuration (OPTIONS Request)
|
||||
|
||||
* **Request:**
|
||||
```
|
||||
OPTIONS /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
Tus-Version: 1.0.0,0.2.2,0.2.1
|
||||
Tus-Max-Size: 1073741824
|
||||
Tus-Extension: creation,expiration
|
||||
```
|
||||
* Response MUST contain `Tus-Version`. MAY include `Tus-Extension` and `Tus-Max-Size`.
|
||||
* Client SHOULD NOT include `Tus-Resumable` in request.
|
||||
|
||||
## Protocol Extensions
|
||||
|
||||
Clients SHOULD use `OPTIONS` request and `Tus-Extension` header for feature detection.
|
||||
|
||||
### Creation (`creation` extension)
|
||||
|
||||
Create a new upload resource. Server MUST add `creation` to `Tus-Extension`.
|
||||
|
||||
* **Request (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: 0
|
||||
Upload-Length: {total_size} OR Upload-Defer-Length: 1
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Metadata: filename {base64_filename},is_confidential
|
||||
```
|
||||
* MUST include `Upload-Length` or `Upload-Defer-Length: 1`.
|
||||
* If `Upload-Defer-Length: 1`, client MUST set `Upload-Length` in subsequent `PATCH`.
|
||||
* `Upload-Length: 0` creates an immediately complete empty file.
|
||||
* Client MAY supply `Upload-Metadata` (key-value pairs, value Base64 encoded).
|
||||
* If `Upload-Length` exceeds `Tus-Max-Size`, server responds `413 Request Entity Too Large`.
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 201 Created
|
||||
Location: {upload_url}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server MUST respond `201 Created` and set `Location` header to new resource URL.
|
||||
* New resource has implicit offset `0`.
|
||||
|
||||
#### Headers
|
||||
|
||||
* **`Upload-Defer-Length`**: `1`. Indicates upload size is unknown. Server adds `creation-defer-length` to `Tus-Extension` if supported.
|
||||
* **`Upload-Metadata`**: Comma-separated `key value` pairs. Key: no spaces/commas, ASCII. Value: Base64 encoded.
|
||||
|
||||
### Creation With Upload (`creation-with-upload` extension)
|
||||
|
||||
Include initial upload data in the `POST` request. Server MUST add `creation-with-upload` to `Tus-Extension`. Depends on `creation` extension.
|
||||
|
||||
* **Request (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: {initial_chunk_size}
|
||||
Upload-Length: {total_size}
|
||||
Tus-Resumable: 1.0.0
|
||||
Content-Type: application/offset+octet-stream
|
||||
Expect: 100-continue
|
||||
|
||||
[initial binary data chunk]
|
||||
```
|
||||
* Similar rules as `PATCH` apply for content.
|
||||
* Client SHOULD include `Expect: 100-continue`.
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 201 Created
|
||||
Location: {upload_url}
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {accepted_offset}
|
||||
```
|
||||
* Server MUST include `Upload-Offset` with accepted bytes.
|
||||
|
||||
### Expiration (`expiration` extension)
|
||||
|
||||
Server MAY remove unfinished uploads. Server MUST add `expiration` to `Tus-Extension`.
|
||||
|
||||
* **Response (PATCH/POST):**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Upload-Expires: Wed, 25 Jun 2014 16:00:00 GMT
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {new_offset}
|
||||
```
|
||||
* **`Upload-Expires`**: Datetime in RFC 9110 format. Indicates when upload expires. Client SHOULD use to check validity. Server SHOULD respond `404 Not Found` or `410 Gone` for expired uploads.
|
||||
|
||||
### Checksum (`checksum` extension)
|
||||
|
||||
Verify data integrity of `PATCH` requests. Server MUST add `checksum` to `Tus-Extension`. Server MUST support `sha1`.
|
||||
|
||||
* **Request (PATCH):**
|
||||
```
|
||||
PATCH /files/{upload_id} HTTP/1.1
|
||||
Content-Length: {chunk_size}
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Checksum: {algorithm} {base64_checksum}
|
||||
|
||||
[binary data chunk]
|
||||
```
|
||||
* **Response:**
|
||||
* `204 No Content`: Checksums match.
|
||||
* `400 Bad Request`: Algorithm not supported.
|
||||
* `460 Checksum Mismatch`: Checksums mismatch.
|
||||
* In `400`/`460` cases, chunk MUST be discarded, upload/offset NOT updated.
|
||||
* **`Tus-Checksum-Algorithm`**: Comma-separated list of supported algorithms (Server response to `OPTIONS`).
|
||||
* **`Upload-Checksum`**: `{algorithm} {Base64_encoded_checksum}`.
|
||||
|
||||
### Termination (`termination` extension)
|
||||
|
||||
Client can terminate uploads. Server MUST add `termination` to `Tus-Extension`.
|
||||
|
||||
* **Request (DELETE):**
|
||||
```
|
||||
DELETE /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: 0
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server SHOULD free resources, MUST respond `204 No Content`.
|
||||
* Future requests to URL SHOULD return `404 Not Found` or `410 Gone`.
|
||||
|
||||
### Concatenation (`concatenation` extension)
|
||||
|
||||
Concatenate multiple partial uploads into a single final upload. Server MUST add `concatenation` to `Tus-Extension`.
|
||||
|
||||
* **Partial Upload Creation (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Upload-Concat: partial
|
||||
Upload-Length: {partial_size}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* `Upload-Concat: partial` header.
|
||||
* Server SHOULD NOT process partial uploads until concatenated.
|
||||
* **Final Upload Creation (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Upload-Concat: final;{url_partial1} {url_partial2} ...
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* `Upload-Concat: final;{space-separated_partial_urls}`.
|
||||
* Client MUST NOT include `Upload-Length`.
|
||||
* Final upload length is sum of partials.
|
||||
* Server MAY delete partials after concatenation.
|
||||
* Server MUST respond `403 Forbidden` to `PATCH` requests against final upload.
|
||||
* **`concatenation-unfinished`**: Server adds to `Tus-Extension` if it supports concatenation while partial uploads are in progress.
|
||||
* **HEAD Request for Final Upload:**
|
||||
* Response SHOULD NOT contain `Upload-Offset` unless concatenation finished.
|
||||
* After success, `Upload-Offset` and `Upload-Length` MUST be equal.
|
||||
* Response MUST include `Upload-Concat` header.
|
||||
* **HEAD Request for Partial Upload:**
|
||||
* Response MUST contain `Upload-Offset`.
|
||||
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
@@ -0,0 +1,667 @@
|
||||
|
||||
# TUS (1.0.0) — Server-Side Specs (Concise)
|
||||
|
||||
## Always
|
||||
|
||||
* All requests/responses **except** `OPTIONS` MUST include: `Tus-Resumable: 1.0.0`.
|
||||
If unsupported → `412 Precondition Failed` + `Tus-Version`.
|
||||
* Canonical server features via `OPTIONS /files`:
|
||||
|
||||
* `Tus-Version: 1.0.0`
|
||||
* `Tus-Extension: creation,creation-with-upload,termination,checksum,concatenation,concatenation-unfinished` (as supported)
|
||||
* `Tus-Max-Size: <int>` (if hard limit)
|
||||
* `Tus-Checksum-Algorithm: sha1[,md5,crc32...]` (if checksum ext.)
|
||||
|
||||
## Core
|
||||
|
||||
* **Create:** `POST /files` with `Upload-Length: <int>` OR `Upload-Defer-Length: 1`. Optional `Upload-Metadata`.
|
||||
|
||||
* `201 Created` + `Location: /files/{id}`, echo `Tus-Resumable`.
|
||||
* *Creation-With-Upload:* If body present → `Content-Type: application/offset+octet-stream`, accept bytes, respond with `Upload-Offset`.
|
||||
* **Status:** `HEAD /files/{id}`
|
||||
|
||||
* Always return `Upload-Offset` for partial uploads, include `Upload-Length` if known; if deferred, return `Upload-Defer-Length: 1`. `Cache-Control: no-store`.
|
||||
* **Upload:** `PATCH /files/{id}`
|
||||
|
||||
* `Content-Type: application/offset+octet-stream` and `Upload-Offset` (must match server).
|
||||
* On success → `204 No Content` + new `Upload-Offset`.
|
||||
* Mismatch → `409 Conflict`. Bad type → `415 Unsupported Media Type`.
|
||||
* **Terminate:** `DELETE /files/{id}` (if supported) → `204 No Content`. Subsequent requests → `404/410`.
|
||||
|
||||
## Checksum (optional but implemented here)
|
||||
|
||||
* Client MAY send: `Upload-Checksum: <algo> <base64digest>` per `PATCH`.
|
||||
|
||||
* Server MUST verify request body’s checksum of the exact received bytes.
|
||||
* If algo unsupported → `400 Bad Request`.
|
||||
* If mismatch → **discard the chunk** (no offset change) and respond `460 Checksum Mismatch`.
|
||||
* If OK → `204 No Content` + new `Upload-Offset`.
|
||||
* `OPTIONS` MUST include `Tus-Checksum-Algorithm` (comma-separated algos).
|
||||
|
||||
## Concatenation (optional but implemented here)
|
||||
|
||||
* **Partial uploads:** `POST /files` with `Upload-Concat: partial` and `Upload-Length`. (MUST have length; may use creation-with-upload/patch thereafter.)
|
||||
* **Final upload:** `POST /files` with
|
||||
`Upload-Concat: final; /files/{a} /files/{b} ...`
|
||||
|
||||
* MUST NOT include `Upload-Length`.
|
||||
* Final uploads **cannot** be `PATCH`ed (`403`).
|
||||
* Server SHOULD assemble final (in order).
|
||||
* If `concatenation-unfinished` supported, final may be created before partials completed; server completes once all partials are done.
|
||||
* **HEAD semantics:**
|
||||
|
||||
* For *partial*: MUST include `Upload-Offset`.
|
||||
* For *final* before concatenation: SHOULD NOT include `Upload-Offset`. `Upload-Length` MAY be present if computable (= sum of partials’ lengths when known).
|
||||
* After finalization: `Upload-Offset == Upload-Length`.
|
||||
|
||||
---
|
||||
|
||||
# TUS FastAPI Server (disk-only, crash-safe, checksum + concatenation)
|
||||
|
||||
**Features**
|
||||
|
||||
* All persistent state on disk:
|
||||
|
||||
```
|
||||
TUS_ROOT/
|
||||
{upload_id}/
|
||||
info.json # canonical metadata & status
|
||||
data.part # exists while uploading or while building final
|
||||
data # final file after atomic rename
|
||||
```
|
||||
* Crash recovery: `HEAD` offset = size of `data.part` or `data`.
|
||||
* `.part` during upload; `os.replace()` (atomic) to `data` on completion.
|
||||
* Streaming I/O; `fsync` on file + parent directory.
|
||||
* Checksum: supports `sha1` (can easily add md5/crc32).
|
||||
* Concatenation: server builds final when partials complete; supports `concatenation-unfinished`.
|
||||
|
||||
> Run with: `uv pip install fastapi uvicorn` then `uvicorn tus_server:app --host 0.0.0.0 --port 8080` (or `python tus_server.py`).
|
||||
> Set `TUS_ROOT` env to choose storage root.
|
||||
|
||||
```python
|
||||
# tus_server.py
|
||||
from fastapi import FastAPI, Request, Response, HTTPException
|
||||
from typing import Optional, Dict, Any, List
|
||||
import os, json, uuid, base64, asyncio, errno, hashlib
|
||||
|
||||
# -----------------------------
|
||||
# Config
|
||||
# -----------------------------
|
||||
TUS_VERSION = "1.0.0"
|
||||
# Advertise extensions implemented below:
|
||||
TUS_EXTENSIONS = ",".join([
|
||||
"creation",
|
||||
"creation-with-upload",
|
||||
"termination",
|
||||
"checksum",
|
||||
"concatenation",
|
||||
"concatenation-unfinished",
|
||||
])
|
||||
# Supported checksum algorithms (keys = header token)
|
||||
CHECKSUM_ALGOS = ["sha1"] # add "md5" if desired
|
||||
|
||||
TUS_ROOT = os.environ.get("TUS_ROOT", "/tmp/tus")
|
||||
MAX_SIZE = 1 << 40 # 1 TiB default
|
||||
|
||||
os.makedirs(TUS_ROOT, exist_ok=True)
|
||||
app = FastAPI()
|
||||
|
||||
# Per-process locks to prevent concurrent mutations on same upload_id
|
||||
_locks: Dict[str, asyncio.Lock] = {}
|
||||
def _lock_for(upload_id: str) -> asyncio.Lock:
|
||||
if upload_id not in _locks:
|
||||
_locks[upload_id] = asyncio.Lock()
|
||||
return _locks[upload_id]
|
||||
|
||||
# -----------------------------
|
||||
# Path helpers
|
||||
# -----------------------------
|
||||
def upload_dir(upload_id: str) -> str:
|
||||
return os.path.join(TUS_ROOT, upload_id)
|
||||
|
||||
def info_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "info.json")
|
||||
|
||||
def part_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "data.part")
|
||||
|
||||
def final_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "data")
|
||||
|
||||
# -----------------------------
|
||||
# FS utils (crash-safe)
|
||||
# -----------------------------
|
||||
def _fsync_dir(path: str) -> None:
|
||||
fd = os.open(path, os.O_DIRECTORY)
|
||||
try:
|
||||
os.fsync(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
|
||||
def _write_json_atomic(path: str, obj: Dict[str, Any]) -> None:
|
||||
tmp = f"{path}.tmp"
|
||||
data = json.dumps(obj, separators=(",", ":"), ensure_ascii=False)
|
||||
with open(tmp, "w", encoding="utf-8") as f:
|
||||
f.write(data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
os.replace(tmp, path)
|
||||
_fsync_dir(os.path.dirname(path))
|
||||
|
||||
def _read_json(path: str) -> Dict[str, Any]:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
def _size(path: str) -> int:
|
||||
try:
|
||||
return os.path.getsize(path)
|
||||
except FileNotFoundError:
|
||||
return 0
|
||||
|
||||
def _exists(path: str) -> bool:
|
||||
return os.path.exists(path)
|
||||
|
||||
# -----------------------------
|
||||
# TUS helpers
|
||||
# -----------------------------
|
||||
def _ensure_tus_version(req: Request):
|
||||
if req.method == "OPTIONS":
|
||||
return
|
||||
v = req.headers.get("Tus-Resumable")
|
||||
if v is None:
|
||||
raise HTTPException(status_code=412, detail="Missing Tus-Resumable")
|
||||
if v != TUS_VERSION:
|
||||
raise HTTPException(status_code=412, detail="Unsupported Tus-Resumable",
|
||||
headers={"Tus-Version": TUS_VERSION})
|
||||
|
||||
def _parse_metadata(raw: Optional[str]) -> str:
|
||||
# Raw passthrough; validate/consume in your app if needed.
|
||||
return raw or ""
|
||||
|
||||
def _new_upload_info(upload_id: str,
|
||||
kind: str, # "single" | "partial" | "final"
|
||||
length: Optional[int],
|
||||
defer_length: bool,
|
||||
metadata: str,
|
||||
parts: Optional[List[str]] = None) -> Dict[str, Any]:
|
||||
return {
|
||||
"upload_id": upload_id,
|
||||
"kind": kind, # "single" (default), "partial", or "final"
|
||||
"length": length, # int or None if deferred/unknown
|
||||
"defer_length": bool(defer_length),
|
||||
"metadata": metadata, # raw Upload-Metadata header
|
||||
"completed": False,
|
||||
"parts": parts or [], # for final: list of upload_ids (not URLs)
|
||||
}
|
||||
|
||||
def _load_info_or_404(upload_id: str) -> Dict[str, Any]:
|
||||
p = info_path(upload_id)
|
||||
if not _exists(p):
|
||||
raise HTTPException(404, "Upload not found")
|
||||
try:
|
||||
return _read_json(p)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, f"Corrupt metadata: {e}")
|
||||
|
||||
def _set_info(upload_id: str, info: Dict[str, Any]) -> None:
|
||||
_write_json_atomic(info_path(upload_id), info)
|
||||
|
||||
def _ensure_dir(path: str):
|
||||
os.makedirs(path, exist_ok=False)
|
||||
|
||||
def _atomic_finalize_file(upload_id: str):
|
||||
"""Rename data.part → data and mark completed."""
|
||||
upath = upload_dir(upload_id)
|
||||
p = part_path(upload_id)
|
||||
f = final_path(upload_id)
|
||||
if _exists(p):
|
||||
with open(p, "rb+") as fp:
|
||||
fp.flush()
|
||||
os.fsync(fp.fileno())
|
||||
os.replace(p, f)
|
||||
_fsync_dir(upath)
|
||||
info = _load_info_or_404(upload_id)
|
||||
info["completed"] = True
|
||||
_set_info(upload_id, info)
|
||||
|
||||
def _current_offsets(upload_id: str):
|
||||
f, p = final_path(upload_id), part_path(upload_id)
|
||||
if _exists(f):
|
||||
return True, False, _size(f)
|
||||
if _exists(p):
|
||||
return False, True, _size(p)
|
||||
return False, False, 0
|
||||
|
||||
def _parse_concat_header(h: Optional[str]) -> Optional[Dict[str, Any]]:
|
||||
if not h:
|
||||
return None
|
||||
h = h.strip()
|
||||
if h == "partial":
|
||||
return {"type": "partial", "parts": []}
|
||||
if h.startswith("final;"):
|
||||
# format: final;/files/a /files/b
|
||||
rest = h[len("final;"):].strip()
|
||||
urls = [s for s in rest.split(" ") if s]
|
||||
return {"type": "final", "parts": urls}
|
||||
return None
|
||||
|
||||
def _extract_upload_id_from_url(url: str) -> str:
|
||||
# Accept relative /files/{id} (common) — robust split:
|
||||
segs = [s for s in url.split("/") if s]
|
||||
return segs[-1] if segs else url
|
||||
|
||||
def _sum_lengths_or_none(ids: List[str]) -> Optional[int]:
|
||||
total = 0
|
||||
for pid in ids:
|
||||
info = _load_info_or_404(pid)
|
||||
if info.get("length") is None:
|
||||
return None
|
||||
total += int(info["length"])
|
||||
return total
|
||||
|
||||
async def _stream_with_checksum_and_append(file_obj, request: Request, algo: Optional[str]) -> int:
|
||||
"""Stream request body to file, verifying checksum if header present.
|
||||
Returns bytes written. On checksum mismatch, truncate to original size and raise HTTPException(460)."""
|
||||
start_pos = file_obj.tell()
|
||||
# Choose hash
|
||||
hasher = None
|
||||
provided_digest = None
|
||||
if algo:
|
||||
if algo not in CHECKSUM_ALGOS:
|
||||
raise HTTPException(400, "Unsupported checksum algorithm")
|
||||
if algo == "sha1":
|
||||
hasher = hashlib.sha1()
|
||||
# elif algo == "md5": hasher = hashlib.md5()
|
||||
# elif algo == "crc32": ... (custom)
|
||||
# Read expected checksum
|
||||
if hasher:
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
if not uh:
|
||||
# spec: checksum header optional; if algo passed to this fn we must have parsed it already
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
name, b64 = uh.split(" ", 1)
|
||||
if name != algo:
|
||||
raise ValueError()
|
||||
provided_digest = base64.b64decode(b64.encode("ascii"))
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
written = 0
|
||||
async for chunk in request.stream():
|
||||
if not chunk:
|
||||
continue
|
||||
file_obj.write(chunk)
|
||||
if hasher:
|
||||
hasher.update(chunk)
|
||||
written += len(chunk)
|
||||
# Verify checksum if present
|
||||
if hasher and provided_digest is not None:
|
||||
digest = hasher.digest()
|
||||
if digest != provided_digest:
|
||||
# rollback appended bytes
|
||||
file_obj.truncate(start_pos)
|
||||
file_obj.flush()
|
||||
os.fsync(file_obj.fileno())
|
||||
raise HTTPException(status_code=460, detail="Checksum Mismatch")
|
||||
file_obj.flush()
|
||||
os.fsync(file_obj.fileno())
|
||||
return written
|
||||
|
||||
def _try_finalize_final(upload_id: str):
|
||||
"""If this is a final upload and all partials are completed, build final data and finalize atomically."""
|
||||
info = _load_info_or_404(upload_id)
|
||||
if info.get("kind") != "final" or info.get("completed"):
|
||||
return
|
||||
part_ids = info.get("parts", [])
|
||||
# Check all partials completed and have data
|
||||
for pid in part_ids:
|
||||
pinf = _load_info_or_404(pid)
|
||||
if not pinf.get("completed"):
|
||||
return # still not ready
|
||||
if not _exists(final_path(pid)):
|
||||
# tolerate leftover .part (e.g., if completed used .part->data). If data missing, can't finalize.
|
||||
return
|
||||
# Build final .part by concatenating parts' data in order, then atomically rename
|
||||
up = upload_dir(upload_id)
|
||||
os.makedirs(up, exist_ok=True)
|
||||
ppath = part_path(upload_id)
|
||||
# Reset/overwrite .part
|
||||
with open(ppath, "wb") as out:
|
||||
for pid in part_ids:
|
||||
with open(final_path(pid), "rb") as src:
|
||||
for chunk in iter(lambda: src.read(1024 * 1024), b""):
|
||||
out.write(chunk)
|
||||
out.flush()
|
||||
os.fsync(out.fileno())
|
||||
# If server can compute length now, set it
|
||||
length = _sum_lengths_or_none(part_ids)
|
||||
info["length"] = length if length is not None else info.get("length")
|
||||
_set_info(upload_id, info)
|
||||
_atomic_finalize_file(upload_id)
|
||||
|
||||
# -----------------------------
|
||||
# Routes
|
||||
# -----------------------------
|
||||
@app.options("/files")
|
||||
async def tus_options():
|
||||
headers = {
|
||||
"Tus-Version": TUS_VERSION,
|
||||
"Tus-Extension": TUS_EXTENSIONS,
|
||||
"Tus-Max-Size": str(MAX_SIZE),
|
||||
"Tus-Checksum-Algorithm": ",".join(CHECKSUM_ALGOS),
|
||||
}
|
||||
return Response(status_code=204, headers=headers)
|
||||
|
||||
@app.post("/files")
|
||||
async def tus_create(request: Request):
|
||||
_ensure_tus_version(request)
|
||||
|
||||
metadata = _parse_metadata(request.headers.get("Upload-Metadata"))
|
||||
concat = _parse_concat_header(request.headers.get("Upload-Concat"))
|
||||
|
||||
# Validate creation modes
|
||||
hdr_len = request.headers.get("Upload-Length")
|
||||
hdr_defer = request.headers.get("Upload-Defer-Length")
|
||||
|
||||
if concat and concat["type"] == "partial":
|
||||
# Partial MUST have Upload-Length (spec)
|
||||
if hdr_len is None:
|
||||
raise HTTPException(400, "Partial uploads require Upload-Length")
|
||||
if hdr_defer is not None:
|
||||
raise HTTPException(400, "Partial uploads cannot defer length")
|
||||
elif concat and concat["type"] == "final":
|
||||
# Final MUST NOT include Upload-Length
|
||||
if hdr_len is not None or hdr_defer is not None:
|
||||
raise HTTPException(400, "Final uploads must not include Upload-Length or Upload-Defer-Length")
|
||||
else:
|
||||
# Normal single upload: require length or defer
|
||||
if hdr_len is None and hdr_defer != "1":
|
||||
raise HTTPException(400, "Must provide Upload-Length or Upload-Defer-Length: 1")
|
||||
|
||||
# Parse length
|
||||
length: Optional[int] = None
|
||||
defer = False
|
||||
if hdr_len is not None:
|
||||
try:
|
||||
length = int(hdr_len)
|
||||
if length < 0: raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid Upload-Length")
|
||||
if length > MAX_SIZE:
|
||||
raise HTTPException(413, "Upload too large")
|
||||
elif not concat or concat["type"] != "final":
|
||||
# final has no length at creation
|
||||
defer = (hdr_defer == "1")
|
||||
|
||||
upload_id = str(uuid.uuid4())
|
||||
udir = upload_dir(upload_id)
|
||||
_ensure_dir(udir)
|
||||
|
||||
if concat and concat["type"] == "final":
|
||||
# Resolve part ids from URLs
|
||||
part_ids = [_extract_upload_id_from_url(u) for u in concat["parts"]]
|
||||
# Compute length if possible
|
||||
sum_len = _sum_lengths_or_none(part_ids)
|
||||
info = _new_upload_info(upload_id, "final", sum_len, False, metadata, part_ids)
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Prepare empty .part (will be filled when partials complete)
|
||||
with open(part_path(upload_id), "wb") as f:
|
||||
f.flush(); os.fsync(f.fileno())
|
||||
_fsync_dir(udir)
|
||||
|
||||
# If all partials already complete, finalize immediately
|
||||
_try_finalize_final(upload_id)
|
||||
|
||||
return Response(status_code=201,
|
||||
headers={"Location": f"/files/{upload_id}",
|
||||
"Tus-Resumable": TUS_VERSION})
|
||||
|
||||
# Create partial or single
|
||||
kind = "partial" if (concat and concat["type"] == "partial") else "single"
|
||||
info = _new_upload_info(upload_id, kind, length, defer, metadata)
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Create empty .part
|
||||
with open(part_path(upload_id), "wb") as f:
|
||||
f.flush(); os.fsync(f.fileno())
|
||||
_fsync_dir(udir)
|
||||
|
||||
# Creation-With-Upload (optional body)
|
||||
upload_offset = 0
|
||||
has_body = request.headers.get("Content-Length") or request.headers.get("Transfer-Encoding")
|
||||
if has_body:
|
||||
ctype = request.headers.get("Content-Type", "")
|
||||
if ctype != "application/offset+octet-stream":
|
||||
raise HTTPException(415, "Content-Type must be application/offset+octet-stream for creation-with-upload")
|
||||
# Checksum header optional; if present, parse algo token
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
algo = None
|
||||
if uh:
|
||||
try:
|
||||
algo = uh.split(" ", 1)[0]
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
|
||||
async with _lock_for(upload_id):
|
||||
with open(part_path(upload_id), "ab+") as f:
|
||||
f.seek(0, os.SEEK_END)
|
||||
upload_offset = await _stream_with_checksum_and_append(f, request, algo)
|
||||
|
||||
# If length known and we hit it, finalize
|
||||
inf = _load_info_or_404(upload_id)
|
||||
if inf["length"] is not None and upload_offset == int(inf["length"]):
|
||||
_atomic_finalize_file(upload_id)
|
||||
# If this is a partial that belongs to some final, a watcher could finalize final; here we rely on
|
||||
# client to create final explicitly (spec). Finalization of final is handled by _try_finalize_final
|
||||
# when final resource is created (or rechecked on subsequent HEAD/PATCH).
|
||||
headers = {"Location": f"/files/{upload_id}", "Tus-Resumable": TUS_VERSION}
|
||||
if upload_offset:
|
||||
headers["Upload-Offset"] = str(upload_offset)
|
||||
return Response(status_code=201, headers=headers)
|
||||
|
||||
@app.head("/files/{upload_id}")
|
||||
async def tus_head(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
info = _load_info_or_404(upload_id)
|
||||
is_final = info.get("kind") == "final"
|
||||
|
||||
headers = {
|
||||
"Tus-Resumable": TUS_VERSION,
|
||||
"Cache-Control": "no-store",
|
||||
}
|
||||
if info.get("metadata"):
|
||||
headers["Upload-Metadata"] = info["metadata"]
|
||||
|
||||
if info.get("length") is not None:
|
||||
headers["Upload-Length"] = str(int(info["length"]))
|
||||
elif info.get("defer_length"):
|
||||
headers["Upload-Defer-Length"] = "1"
|
||||
|
||||
exists_final, exists_part, offset = False, False, 0
|
||||
if is_final and not info.get("completed"):
|
||||
# BEFORE concatenation completes: SHOULD NOT include Upload-Offset
|
||||
# Try to see if we can finalize now (e.g., partials completed after crash)
|
||||
_try_finalize_final(upload_id)
|
||||
info = _load_info_or_404(upload_id)
|
||||
if info.get("completed"):
|
||||
# fallthrough to completed case
|
||||
pass
|
||||
else:
|
||||
# For in-progress final, no Upload-Offset; include Upload-Length if computable (already handled above)
|
||||
return Response(status_code=200, headers=headers)
|
||||
|
||||
# For partials or completed finals
|
||||
f = final_path(upload_id)
|
||||
p = part_path(upload_id)
|
||||
if _exists(f):
|
||||
exists_final, offset = True, _size(f)
|
||||
elif _exists(p):
|
||||
exists_part, offset = True, _size(p)
|
||||
else:
|
||||
# if info exists but no data, consider gone
|
||||
raise HTTPException(410, "Upload gone")
|
||||
|
||||
headers["Upload-Offset"] = str(offset)
|
||||
return Response(status_code=200, headers=headers)
|
||||
|
||||
@app.patch("/files/{upload_id}")
|
||||
async def tus_patch(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
info = _load_info_or_404(upload_id)
|
||||
|
||||
if info.get("kind") == "final":
|
||||
raise HTTPException(403, "Final uploads cannot be patched")
|
||||
|
||||
ctype = request.headers.get("Content-Type", "")
|
||||
if ctype != "application/offset+octet-stream":
|
||||
raise HTTPException(415, "Content-Type must be application/offset+octet-stream")
|
||||
|
||||
# Client offset must match server
|
||||
try:
|
||||
client_offset = int(request.headers.get("Upload-Offset", "-1"))
|
||||
if client_offset < 0: raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid or missing Upload-Offset")
|
||||
|
||||
# If length deferred, client may now set Upload-Length (once)
|
||||
if info.get("length") is None and info.get("defer_length"):
|
||||
if "Upload-Length" in request.headers:
|
||||
try:
|
||||
new_len = int(request.headers["Upload-Length"])
|
||||
if new_len < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid Upload-Length")
|
||||
if new_len > MAX_SIZE:
|
||||
raise HTTPException(413, "Upload too large")
|
||||
info["length"] = new_len
|
||||
info["defer_length"] = False
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Determine current server offset
|
||||
f = final_path(upload_id)
|
||||
p = part_path(upload_id)
|
||||
if _exists(f):
|
||||
raise HTTPException(403, "Upload already finalized")
|
||||
if not _exists(p):
|
||||
raise HTTPException(404, "Upload not found")
|
||||
|
||||
server_offset = _size(p)
|
||||
if client_offset != server_offset:
|
||||
return Response(status_code=409)
|
||||
|
||||
# Optional checksum
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
algo = None
|
||||
if uh:
|
||||
try:
|
||||
algo = uh.split(" ", 1)[0]
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
|
||||
# Append data (with rollback on checksum mismatch)
|
||||
async with _lock_for(upload_id):
|
||||
with open(p, "ab+") as fobj:
|
||||
fobj.seek(0, os.SEEK_END)
|
||||
written = await _stream_with_checksum_and_append(fobj, request, algo)
|
||||
|
||||
new_offset = server_offset + written
|
||||
|
||||
# If length known and reached exactly, finalize
|
||||
info = _load_info_or_404(upload_id) # reload
|
||||
if info.get("length") is not None and new_offset == int(info["length"]):
|
||||
_atomic_finalize_file(upload_id)
|
||||
|
||||
# If this is a partial, a corresponding final may exist and be now completable
|
||||
# We don't maintain reverse index; finalization is triggered when HEAD on final is called.
|
||||
# (Optional: scan for finals to proactively finalize.)
|
||||
|
||||
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION, "Upload-Offset": str(new_offset)})
|
||||
|
||||
@app.delete("/files/{upload_id}")
|
||||
async def tus_delete(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
async with _lock_for(upload_id):
|
||||
udir = upload_dir(upload_id)
|
||||
for p in (part_path(upload_id), final_path(upload_id), info_path(upload_id)):
|
||||
try:
|
||||
os.remove(p)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.rmdir(udir)
|
||||
except OSError:
|
||||
pass
|
||||
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION})
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Client Examples (manual)
|
||||
|
||||
```bash
|
||||
# OPTIONS
|
||||
curl -i -X OPTIONS http://localhost:8080/files
|
||||
|
||||
# 1) Single upload (known length)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 11" \
|
||||
-H "Upload-Metadata: filename Zm9vLnR4dA=="
|
||||
# → Location: /files/<ID>
|
||||
|
||||
# Upload with checksum (sha1 of "hello ")
|
||||
printf "hello " | curl -i -X PATCH http://localhost:8080/files/<ID> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
-H "Upload-Checksum: sha1 L6v8xR3Lw4N2n9kQox3wL7G0m/I=" \
|
||||
--data-binary @-
|
||||
# (Replace digest with correct base64 for your chunk)
|
||||
|
||||
# 2) Concatenation
|
||||
# Create partial A (5 bytes)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 5" \
|
||||
-H "Upload-Concat: partial"
|
||||
# → Location: /files/<A>
|
||||
printf "hello" | curl -i -X PATCH http://localhost:8080/files/<A> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
--data-binary @-
|
||||
|
||||
# Create partial B (6 bytes)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 6" \
|
||||
-H "Upload-Concat: partial"
|
||||
# → Location: /files/<B>
|
||||
printf " world" | curl -i -X PATCH http://localhost:8080/files/<B> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
--data-binary @-
|
||||
|
||||
# Create final (may be before or after partials complete)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Concat: final; /files/<A> /files/<B>"
|
||||
# HEAD on final will eventually show Upload-Offset once finalized
|
||||
curl -i -X HEAD http://localhost:8080/files/<FINAL> -H "Tus-Resumable: 1.0.0"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Notes (agent hints)
|
||||
|
||||
* **Durability:** every data write `fsync(file)`; after `os.replace` of `*.part → data` or `info.json.tmp → info.json`, also `fsync(parent)`.
|
||||
* **Checksum:** verify against **this request’s** body only; on mismatch, **truncate back** to previous size and return `460`.
|
||||
* **Concatenation:** final upload is never `PATCH`ed. Server builds `final.data.part` by concatenating each partial’s **final file** in order, then atomically renames and marks completed. It’s triggered lazily in `HEAD` of final (and right after creation).
|
||||
* **Crash Recovery:** offset = `size(data.part)` or `size(data)`; `info.json` is canonical for `kind`, `length`, `defer_length`, `completed`, `parts`.
|
||||
* **Multi-process deployments:** replace `asyncio.Lock` with file locks (`fcntl.flock`) per `upload_id` to synchronize across workers.
|
||||
|
||||
|
||||
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
@@ -0,0 +1,229 @@
|
||||
```bash
|
||||
unpm install @uppy/react
|
||||
```
|
||||
|
||||
## Components
|
||||
|
||||
Pre-composed, plug-and-play components:
|
||||
|
||||
<Dashboard /> renders @uppy/dashboard
|
||||
<DashboardModal /> renders @uppy/dashboard as a modal
|
||||
<DragDrop /> renders @uppy/drag-drop
|
||||
<ProgressBar /> renders @uppy/progress-bar
|
||||
<StatusBar /> renders @uppy/status-bar
|
||||
|
||||
more info see https://uppy.io/docs/react
|
||||
|
||||
|
||||
we use tus server for the upload support
|
||||
|
||||
npm install @uppy/tus
|
||||
|
||||
e.g.
|
||||
|
||||
import Uppy from '@uppy/core';
|
||||
import Dashboard from '@uppy/dashboard';
|
||||
import Tus from '@uppy/tus';
|
||||
|
||||
import '@uppy/core/dist/style.min.css';
|
||||
import '@uppy/dashboard/dist/style.min.css';
|
||||
|
||||
new Uppy()
|
||||
.use(Dashboard, { inline: true, target: 'body' })
|
||||
|
||||
|
||||
|
||||
========================
|
||||
CODE SNIPPETS
|
||||
========================
|
||||
|
||||
TITLE: React Dashboard Modal Example with TUS
|
||||
DESCRIPTION: Demonstrates how to use the DashboardModal component from @uppy/react with the Tus plugin for resumable uploads.
|
||||
LANGUAGE: jsx
|
||||
CODE:
|
||||
```
|
||||
/** @jsx React */
|
||||
import React from 'react'
|
||||
import Uppy from '@uppy/core'
|
||||
import { DashboardModal } from '@uppy/react'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy({ debug: true, autoProceed: false })
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
|
||||
class Example extends React.Component {
|
||||
state = { open: false }
|
||||
|
||||
render() {
|
||||
const { open } = this.state
|
||||
return (
|
||||
<DashboardModal
|
||||
uppy={uppy}
|
||||
open={open}
|
||||
onRequestClose={this.handleClose}
|
||||
/>
|
||||
)
|
||||
}
|
||||
// ..snip..
|
||||
}
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Installation using npm for @uppy/react
|
||||
DESCRIPTION: Provides the command to install the @uppy/react package using npm.
|
||||
LANGUAGE: bash
|
||||
CODE:
|
||||
```
|
||||
$ npm install @uppy/react @uppy/core @uppy/dashboard @uppy/tus
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy Dashboard and Tus Integration Example (HTML & JavaScript)
|
||||
DESCRIPTION: This snippet demonstrates how to initialize Uppy with the Dashboard and Tus plugins, configure them, and handle upload success events.
|
||||
LANGUAGE: html
|
||||
CODE:
|
||||
```
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="https://releases.transloadit.com/uppy/v4.18.0/uppy.min.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="DashboardContainer"></div>
|
||||
<button class="UppyModalOpenerBtn">Upload</button>
|
||||
<div class="uploaded-files">
|
||||
<h5>Uploaded files:</h5>
|
||||
<ol></ol>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
<script type="module">
|
||||
import { Uppy, Dashboard, Tus } from 'https://releases.transloadit.com/uppy/v4.18.0/uppy.min.mjs'
|
||||
var uppy = new Uppy({
|
||||
debug: true,
|
||||
autoProceed: false,
|
||||
})
|
||||
.use(Dashboard, {
|
||||
browserBackButtonClose: false,
|
||||
height: 470,
|
||||
inline: false,
|
||||
replaceTargetContent: true,
|
||||
showProgressDetails: true,
|
||||
target: '.DashboardContainer',
|
||||
trigger: '.UppyModalOpenerBtn',
|
||||
})
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
.on('upload-success', function (file, response) {
|
||||
var url = response.uploadURL
|
||||
var fileName = file.name
|
||||
|
||||
document.querySelector('.uploaded-files ol').innerHTML +=
|
||||
'<li><a href="' + url + '" target="_blank">' + fileName + '</a></li>'
|
||||
})
|
||||
</script>
|
||||
</html>
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Initialize Uppy with Tus Plugin (JavaScript)
|
||||
DESCRIPTION: Demonstrates how to initialize Uppy and configure the Tus plugin for resumable uploads.
|
||||
LANGUAGE: js
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy()
|
||||
uppy.use(Tus, {
|
||||
endpoint: 'https://tusd.tusdemo.net/files/', // use your tus endpoint here
|
||||
resume: true,
|
||||
retryDelays: [0, 1000, 3000, 5000],
|
||||
})
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy Core Initialization and Plugin Usage (JavaScript)
|
||||
DESCRIPTION: This example demonstrates how to initialize Uppy with core functionality and integrate the Tus plugin. It also shows how to listen for upload completion events.
|
||||
LANGUAGE: javascript
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core'
|
||||
import Dashboard from '@uppy/dashboard'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy()
|
||||
.use(Dashboard, { trigger: '#select-files' })
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
.on('complete', (result) => {
|
||||
console.log('Upload result:', result)
|
||||
})
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy XHRUpload Configuration (JavaScript)
|
||||
DESCRIPTION: This snippet shows the basic JavaScript configuration for Uppy, initializing it with the XHRUpload plugin to send files to a specified endpoint.
|
||||
LANGUAGE: javascript
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core';
|
||||
import XHRUpload from '@uppy/xhr-upload';
|
||||
|
||||
const uppy = new Uppy({
|
||||
debug: true,
|
||||
autoProceed: false,
|
||||
restrictions: {
|
||||
maxFileSize: 100000000,
|
||||
maxNumberOfFiles: 10,
|
||||
allowedFileTypes: ['image/*', 'video/*']
|
||||
}
|
||||
});
|
||||
|
||||
uppy.use(XHRUpload, {
|
||||
endpoint: 'YOUR_UPLOAD_ENDPOINT_URL',
|
||||
fieldName: 'files[]',
|
||||
method: 'post'
|
||||
});
|
||||
|
||||
uppy.on('complete', (result) => {
|
||||
console.log('Upload complete:', result);
|
||||
});
|
||||
|
||||
uppy.on('error', (error) => {
|
||||
console.error('Upload error:', error);
|
||||
});
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Install Uppy Core Packages for TUS
|
||||
DESCRIPTION: Installs the core Uppy package along with the Dashboard and Tus plugins using npm.
|
||||
LANGUAGE: bash
|
||||
CODE:
|
||||
```
|
||||
npm install @uppy/core @uppy/dashboard @uppy/tus @uppy/xhr-upload
|
||||
```
|
||||
|
||||
========================
|
||||
QUESTIONS AND ANSWERS
|
||||
========================
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: What is the purpose of the @uppy/react package?
|
||||
A: The @uppy/react package provides React component wrappers for Uppy's officially maintained UI plugins. It allows developers to easily integrate Uppy's file uploading capabilities into their React applications.
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: How can @uppy/react be installed in a project?
|
||||
A: The @uppy/react package can be installed using npm with the command '$ npm install @uppy/react'.
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: Where can I find more detailed documentation for the @uppy/react plugin?
|
||||
A: More detailed documentation for the @uppy/react plugin is available on the Uppy website at https://uppy.io/docs/react.
|
||||
59
examples/lang/python/codewalker.vsh
Executable file
59
examples/lang/python/codewalker.vsh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
import freeflowuniverse.herolib.lib.lang.codewalker
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import freeflowuniverse.herolib.osal.core as osal
|
||||
|
||||
// Create test directory structure in /tmp/filemap
|
||||
test_source := '/tmp/filemap'
|
||||
test_destination := '/tmp/filemap2'
|
||||
|
||||
// Clean up any existing test directories
|
||||
osal.rm(todelete: test_source)!
|
||||
osal.rm(todelete: test_destination)!
|
||||
|
||||
// Create source directory
|
||||
mut source_dir := pathlib.get(test_source)!
|
||||
source_dir.dir_ensure()!
|
||||
|
||||
// Create test files with content
|
||||
mut file1 := source_dir.join('file1.txt')!
|
||||
file1.write('Content of file 1')!
|
||||
|
||||
mut subdir := source_dir.join('subdir')!
|
||||
subdir.dir_ensure()!
|
||||
|
||||
mut file2 := subdir.join('file2.txt')!
|
||||
file2.write('Content of file 2')!
|
||||
|
||||
mut file3 := subdir.join('file3.md')!
|
||||
file3.write('# Markdown file content')!
|
||||
|
||||
println('Test files created in ${test_source}')
|
||||
|
||||
// Create CodeWalker instance
|
||||
mut cw := codewalker.new(name: 'test', source: test_source)!
|
||||
|
||||
// Verify files are in the map
|
||||
println('\nFiles in filemap:')
|
||||
cw.filemap.write()
|
||||
|
||||
// Export files to destination
|
||||
cw.filemap.export(test_destination)!
|
||||
|
||||
println('\nFiles exported to ${test_destination}')
|
||||
|
||||
// Verify export by listing files in destination
|
||||
mut dest_dir := pathlib.get(test_destination)!
|
||||
if dest_dir.exists() {
|
||||
mut files := dest_dir.list(recursive: true)!
|
||||
println('\nFiles in destination directory:')
|
||||
for file in files {
|
||||
if file.is_file() {
|
||||
println(' ${file.path}')
|
||||
println(' Content: ${file.read()!}')
|
||||
}
|
||||
}
|
||||
println('\nExport test completed successfully!')
|
||||
} else {
|
||||
println('\nError: Destination directory was not created')
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.lang.python
|
||||
import json
|
||||
|
||||
20
lib/clients/zinit/instruct.md
Normal file
20
lib/clients/zinit/instruct.md
Normal file
@@ -0,0 +1,20 @@
|
||||
| RPC Call | Example In | Example Out | 1-Sentence Description |
|
||||
|---------|-----------|------------|------------------------|
|
||||
| `rpc.discover` | `{}` | `{ "openrpc": "1.2.6", "info": { "version": "1.0.0", "title": "Zinit JSON-RPC API" } }` | Returns the full OpenRPC specification of the Zinit API. |
|
||||
| `service_list` | `{}` | `{ "service1": "Running", "service2": "Success", "service3": "Error" }` | Lists all managed services and their current states. |
|
||||
| `service_status` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "state": "Running", "target": "Up", "after": { "dependency1": "Success", "dependency2": "Running" } }` | Returns detailed status including PID, state, dependencies, and target. |
|
||||
| `service_start` | `{ "name": "redis" }` | `null` | Starts a specified service; returns no result on success. |
|
||||
| `service_stop` | `{ "name": "redis" }` | `null` | Stops a specified service; returns no result on success. |
|
||||
| `service_monitor` | `{ "name": "redis" }` | `null` | Starts monitoring a service using its configuration from the config directory. |
|
||||
| `service_forget` | `{ "name": "redis" }` | `null` | Stops monitoring a service; only allowed for stopped services. |
|
||||
| `service_kill` | `{ "name": "redis", "signal": "SIGTERM" }` | `null` | Sends a signal (e.g., SIGTERM) to a running service. |
|
||||
| `system_shutdown` | `{}` | `null` | Stops all services and powers off the system. |
|
||||
| `system_reboot` | `{}` | `null` | Stops all services and reboots the system. |
|
||||
| `service_create` | `{ "name": "redis", "content": { "exec": "redis-server", "oneshot": false, "after": ["network"], "log": "stdout", "env": { "REDIS_PASSWORD": "secret" }, "shutdown_timeout": 30 } }` | `"service_config/redis"` | Creates a new service configuration file with specified settings. |
|
||||
| `service_delete` | `{ "name": "redis" }` | `"service deleted"` | Deletes a service configuration file. |
|
||||
| `service_get` | `{ "name": "redis" }` | `{ "exec": "redis-server", "oneshot": false, "after": ["network"] }` | Retrieves the configuration content of a service. |
|
||||
| `service_stats` | `{ "name": "redis" }` | `{ "name": "redis", "pid": 1234, "memory_usage": 10485760, "cpu_usage": 2.5, "children": [ { "pid": 1235, "memory_usage": 5242880, "cpu_usage": 1.2 } ] }` | Returns memory and CPU usage statistics for a running service. |
|
||||
| `system_start_http_server` | `{ "address": "127.0.0.1:8080" }` | `"HTTP server started at 127.0.0.1:8080"` | Starts an HTTP/RPC server on the specified network address. |
|
||||
| `system_stop_http_server` | `{}` | `null` | Stops the currently running HTTP/RPC server. |
|
||||
| `stream_currentLogs` | `{ "name": "redis" }` | `["2023-01-01T12:00:00 redis: Starting service", "2023-01-01T12:00:02 redis: Service started"]` | Returns current logs; optionally filtered by service name. |
|
||||
| `stream_subscribeLogs` | `{ "name": "redis" }` | `"2023-01-01T12:00:00 redis: Service started"` | Subscribes to real-time log messages, optionally filtered by service. |
|
||||
@@ -19,10 +19,15 @@ fn startupcmd() ![]startupmanager.ZProcessNewArgs {
|
||||
mut peers_str := installer.peers.join(' ')
|
||||
mut tun_name := 'tun${installer.tun_nr}'
|
||||
|
||||
mut cmd:='mycelium --key-file ${osal.hero_path()!}/cfg/priv_key.bin --peers ${peers_str} --tun-name ${tun_name}'
|
||||
if core.is_osx()! {
|
||||
cmd = "sudo ${cmd}"
|
||||
}
|
||||
|
||||
res << startupmanager.ZProcessNewArgs{
|
||||
name: 'mycelium'
|
||||
startuptype: .zinit
|
||||
cmd: 'mycelium --key-file ${osal.hero_path()!}/cfg/priv_key.bin --peers ${peers_str} --tun-name ${tun_name}'
|
||||
cmd: cmd
|
||||
env: {
|
||||
'HOME': os.home_dir()
|
||||
}
|
||||
|
||||
44
lib/lang/codewalker/README.md
Normal file
44
lib/lang/codewalker/README.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# CodeWalker Module
|
||||
|
||||
The CodeWalker module provides functionality to walk through directories and create a map of files with their content. It's particularly useful for processing code directories while respecting gitignore patterns.
|
||||
|
||||
## Features
|
||||
|
||||
- Walk through directories recursively
|
||||
- Respect gitignore patterns to exclude files
|
||||
- Store file content in memory
|
||||
- Export files back to a directory structure
|
||||
|
||||
## Usage
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.lib.lang.codewalker
|
||||
|
||||
mut cw := codewalker.new('/tmp/adir')!
|
||||
|
||||
// Get content of a specific file
|
||||
content := cw.filemap.get('path/to/file.txt')!
|
||||
|
||||
// return output again
|
||||
cw.filemap.content()
|
||||
|
||||
// Export all files to a destination directory
|
||||
cw.filemap.export('/tmp/exported_files')!
|
||||
|
||||
```
|
||||
|
||||
### format of filemap
|
||||
|
||||
```
|
||||
|
||||
text before will be ignored
|
||||
|
||||
===FILE:filename===
|
||||
code
|
||||
===FILE:filename===
|
||||
code
|
||||
===END===
|
||||
|
||||
text behind will be ignored
|
||||
|
||||
```
|
||||
177
lib/lang/codewalker/codewalker.v
Normal file
177
lib/lang/codewalker/codewalker.v
Normal file
@@ -0,0 +1,177 @@
|
||||
module codewalker
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
pub struct CodeWalker {
|
||||
pub mut:
|
||||
source string
|
||||
gitignore_patterns []string
|
||||
errors []Error
|
||||
}
|
||||
|
||||
fn (cw CodeWalker) default_gitignore() []string {
|
||||
return [
|
||||
'__pycache__/',
|
||||
'*.py[cod]',
|
||||
'*\$py.class',
|
||||
'*.so',
|
||||
'.Python',
|
||||
'build/',
|
||||
'develop-eggs/',
|
||||
'dist/',
|
||||
'downloads/',
|
||||
'eggs/',
|
||||
'.eggs/',
|
||||
'lib/',
|
||||
'lib64/',
|
||||
'parts/',
|
||||
'sdist/',
|
||||
'var/',
|
||||
'wheels/',
|
||||
'*.egg-info/',
|
||||
'.installed.cfg',
|
||||
'*.egg',
|
||||
'.env',
|
||||
'.venv',
|
||||
'venv/',
|
||||
'.tox/',
|
||||
'.nox/',
|
||||
'.coverage',
|
||||
'.coveragerc',
|
||||
'coverage.xml',
|
||||
'*.cover',
|
||||
'*.gem',
|
||||
'*.pyc',
|
||||
'.cache',
|
||||
'.pytest_cache/',
|
||||
'.mypy_cache/',
|
||||
'.hypothesis/',
|
||||
]
|
||||
}
|
||||
|
||||
pub fn (mut cw CodeWalker) walk() !FileMap {
|
||||
|
||||
if cw.source == '' {
|
||||
return error('Source path is not set')
|
||||
}
|
||||
mut dir := pathlib.get(cw.source)
|
||||
if !dir.exists() {
|
||||
return error('Source directory "${cw.source}" does not exist')
|
||||
}
|
||||
|
||||
mut files := dir.list(recursive: true)!
|
||||
|
||||
mut fm := FileMap{
|
||||
source:cw.source
|
||||
}
|
||||
|
||||
for mut file in files.paths {
|
||||
if file.is_file() {
|
||||
// Check if file should be ignored
|
||||
relpath := file.path_relative(cw.source)!
|
||||
mut should_ignore := false
|
||||
|
||||
for pattern in cw.gitignore_patterns {
|
||||
if relpath.contains(pattern.trim_right('/')) ||
|
||||
(pattern.ends_with('/') && relpath.starts_with(pattern)) {
|
||||
should_ignore = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !should_ignore {
|
||||
content := file.read()!
|
||||
fm.content[relpath] = content
|
||||
}
|
||||
}
|
||||
}
|
||||
return fm
|
||||
}
|
||||
|
||||
pub fn (mut cw CodeWalker) error(msg string,linenr int,category string, fail bool) ! {
|
||||
cw.errors << Error{
|
||||
message: msg
|
||||
linenr: linenr
|
||||
category: category
|
||||
}
|
||||
if fail {
|
||||
mut errormsg:= ""
|
||||
for e in cw.errors {
|
||||
errormsg += "${e.message} (line ${e.linenr}, category: ${e.category})\n"
|
||||
}
|
||||
return error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut cw CodeWalker) filename_get(line string,linenr int) !string {
|
||||
parts := line.split('===')
|
||||
if parts.len < 2 {
|
||||
return cw.error("Invalid filename line: ${line}.",linenr, "filename_get", true)!
|
||||
}
|
||||
mut name:=parts[1].trim_space() or {panic("bug")}
|
||||
if name.len<2 {
|
||||
return cw.error("Invalid filename, < 2 chars: ${name}.",linenr, "filename_get", true)!
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
enum ParseState {
|
||||
start
|
||||
blockfound
|
||||
in_block
|
||||
end
|
||||
}
|
||||
|
||||
pub fn (mut cw CodeWalker) parse(content string) !FileMap {
|
||||
|
||||
mut fm := FileMap{
|
||||
source: cw.source
|
||||
}
|
||||
|
||||
mut filename := ""
|
||||
mut block := []string{}
|
||||
mut state := ParseState.start
|
||||
mut linenr:=0
|
||||
|
||||
//lets first cleanup
|
||||
|
||||
for line in content.split_into_lines() {
|
||||
|
||||
mut line2 := line.trim_space()
|
||||
linenr+=1
|
||||
// Process each line and extract relevant information
|
||||
if state == .start && line2.starts_with('===') {
|
||||
filename=cw.filename_get(line,linenr)!
|
||||
if filename == "END" {
|
||||
cw.error("END found in ${line} at start not good.",linenr,"parse",true)!
|
||||
}
|
||||
}
|
||||
|
||||
if ( state == .blockfound || state == .in_block ) && line2.starts_with('===') && line2.ends_with('===') {
|
||||
filenamenew=cw.filename_get(line2,linenr)!
|
||||
if filenamenew == "END" {
|
||||
//we are at end of file
|
||||
state = .end
|
||||
fm.content[filename] = block.join_lines()
|
||||
continue
|
||||
}
|
||||
|
||||
//means we now have new block
|
||||
state = .blockfound
|
||||
fm.content[filename] = block.join_lines()
|
||||
block = []string{}
|
||||
filename = filenamenew
|
||||
continue
|
||||
}
|
||||
|
||||
if line2.starts_with('===') && line2.ends_with('===') {
|
||||
cw.error("=== found in ${line2} at wrong location.",linenr,"parse",true)!
|
||||
}
|
||||
|
||||
if status == .in_block {
|
||||
output << line
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return fm
|
||||
}
|
||||
26
lib/lang/codewalker/factory.v
Normal file
26
lib/lang/codewalker/factory.v
Normal file
@@ -0,0 +1,26 @@
|
||||
module codewalker
|
||||
|
||||
|
||||
@[params]
|
||||
pub struct CodeWalkerArgs {
|
||||
source string
|
||||
content string
|
||||
}
|
||||
|
||||
pub fn new(args CodeWalkerArgs) !CodeWalker {
|
||||
|
||||
mut cw := CodeWalker{
|
||||
source: args.source
|
||||
}
|
||||
|
||||
if args.content {
|
||||
cw.filemap.content = args.content
|
||||
|
||||
}else{
|
||||
cw.walk()!
|
||||
}
|
||||
|
||||
// Load default gitignore patterns
|
||||
cw.gitignore_patterns = cw.default_gitignore()
|
||||
return cw
|
||||
}
|
||||
48
lib/lang/codewalker/filemap.v
Normal file
48
lib/lang/codewalker/filemap.v
Normal file
@@ -0,0 +1,48 @@
|
||||
module codewalker
|
||||
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
pub struct FileMap {
|
||||
pub mut:
|
||||
source string
|
||||
content map[string]string
|
||||
}
|
||||
|
||||
pub fn (mut fm FileMap) content() {
|
||||
for filepath, filecontent in fm.content {
|
||||
println('===${filepath}===')
|
||||
println(filecontent)
|
||||
println('===END===')
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut fm FileMap) export(path string)! {
|
||||
for filepath, filecontent in fm.content {
|
||||
dest := "${fm.source}/${filepath}"
|
||||
mut filepathtowrite := pathlib.get_file(path:dest,create:true)!
|
||||
filepathtowrite.write(filecontent)!
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn (fm FileMap) get(relpath string) !string {
|
||||
return fm.content[relpath] or { return error('File not found: ${relpath}') }
|
||||
}
|
||||
|
||||
pub fn (mut fm FileMap) set(relpath string, content string) {
|
||||
fm.content[relpath] = content
|
||||
}
|
||||
|
||||
pub fn (mut fm FileMap) delete(relpath string) {
|
||||
fm.content.delete(relpath)
|
||||
}
|
||||
|
||||
pub fn (fm FileMap) find(path string) []string {
|
||||
mut result := []string{}
|
||||
for filepath, _ in fm.content {
|
||||
if filepath.starts_with(path) {
|
||||
result << filepath
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
8
lib/lang/codewalker/model.v
Normal file
8
lib/lang/codewalker/model.v
Normal file
@@ -0,0 +1,8 @@
|
||||
module codewalker
|
||||
|
||||
pub struct Error {
|
||||
pub:
|
||||
message string
|
||||
linenr int
|
||||
category string
|
||||
}
|
||||
33
lib/lang/python/templates/openrpc/env.sh
Executable file
33
lib/lang/python/templates/openrpc/env.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Check if uv is installed
|
||||
if ! command -v uv &> /dev/null; then
|
||||
echo "❌ uv is not installed. Please install uv first:"
|
||||
echo " curl -LsSf https://astral.sh/uv/install.sh | sh"
|
||||
echo " or visit: https://docs.astral.sh/uv/getting-started/installation/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ uv found: $(uv --version)"
|
||||
|
||||
# Create virtual environment if it doesn't exist
|
||||
if [ ! -d ".venv" ]; then
|
||||
echo "📦 Creating Python virtual environment..."
|
||||
uv venv
|
||||
echo "✅ Virtual environment created"
|
||||
else
|
||||
echo "✅ Virtual environment already exists"
|
||||
fi
|
||||
|
||||
export PATH="$SCRIPT_DIR/.venv/bin:$PATH"
|
||||
|
||||
# Activate virtual environment
|
||||
echo "🔄 Activating virtual environment..."
|
||||
source .venv/bin/activate
|
||||
|
||||
echo "✅ Virtual environment activated"
|
||||
|
||||
uv sync
|
||||
|
||||
126
lib/lang/python/templates/openrpc/main.py
Normal file
126
lib/lang/python/templates/openrpc/main.py
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import json
|
||||
import signal
|
||||
import asyncio
|
||||
from typing import Union
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Response, WebSocket, WebSocketDisconnect
|
||||
from jsonrpcobjects.objects import (
|
||||
ErrorResponse,
|
||||
Notification,
|
||||
ParamsNotification,
|
||||
ParamsRequest,
|
||||
Request,
|
||||
ResultResponse,
|
||||
)
|
||||
from openrpc import RPCServer
|
||||
|
||||
# ---------- FastAPI + OpenRPC ----------
|
||||
app = FastAPI(title="Calculator JSON-RPC (HTTP + UDS)")
|
||||
RequestType = Union[ParamsRequest, Request, ParamsNotification, Notification]
|
||||
rpc = RPCServer(title="Calculator API", version="1.0.0")
|
||||
|
||||
# Calculator methods
|
||||
@rpc.method()
|
||||
async def add(a: float, b: float) -> float:
|
||||
return a + b
|
||||
|
||||
@rpc.method()
|
||||
async def subtract(a: float, b: float) -> float:
|
||||
return a - b
|
||||
|
||||
@rpc.method()
|
||||
async def multiply(a: float, b: float) -> float:
|
||||
return a * b
|
||||
|
||||
@rpc.method()
|
||||
async def divide(a: float, b: float) -> float:
|
||||
if b == 0:
|
||||
# Keep it simple; library turns this into a JSON-RPC error
|
||||
raise ValueError("Division by zero")
|
||||
return a / b
|
||||
|
||||
# Expose the generated OpenRPC spec as REST (proxy to rpc.discover)
|
||||
@app.get("/openrpc.json")
|
||||
async def openrpc_json() -> Response:
|
||||
req = '{"jsonrpc":"2.0","id":1,"method":"rpc.discover"}'
|
||||
resp = await rpc.process_request_async(req) # JSON string
|
||||
payload = json.loads(resp) # dict with "result"
|
||||
return Response(content=json.dumps(payload["result"]),
|
||||
media_type="application/json")
|
||||
|
||||
# JSON-RPC over WebSocket
|
||||
@app.websocket("/rpc")
|
||||
async def ws_process_rpc(websocket: WebSocket) -> None:
|
||||
await websocket.accept()
|
||||
try:
|
||||
async def _process_rpc(request: str) -> None:
|
||||
json_rpc_response = await rpc.process_request_async(request)
|
||||
if json_rpc_response is not None:
|
||||
await websocket.send_text(json_rpc_response)
|
||||
|
||||
while True:
|
||||
data = await websocket.receive_text()
|
||||
asyncio.create_task(_process_rpc(data))
|
||||
except WebSocketDisconnect:
|
||||
await websocket.close()
|
||||
|
||||
# JSON-RPC over HTTP POST
|
||||
@app.post("/rpc", response_model=Union[ErrorResponse, ResultResponse, None])
|
||||
async def http_process_rpc(request: RequestType) -> Response:
|
||||
json_rpc_response = await rpc.process_request_async(request.model_dump_json())
|
||||
return Response(content=json_rpc_response, media_type="application/json")
|
||||
|
||||
|
||||
# ---------- Run BOTH: TCP:7766 and UDS:/tmp/server1 ----------
|
||||
async def serve_both():
|
||||
uds_path = "/tmp/server1"
|
||||
|
||||
# Clean stale socket path (if previous run crashed)
|
||||
try:
|
||||
if os.path.exists(uds_path) and not os.path.isfile(uds_path):
|
||||
os.unlink(uds_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
# Create two uvicorn servers sharing the same FastAPI app
|
||||
tcp_config = uvicorn.Config(app=app, host="127.0.0.1", port=7766, log_level="info")
|
||||
uds_config = uvicorn.Config(app=app, uds=uds_path, log_level="info")
|
||||
|
||||
tcp_server = uvicorn.Server(tcp_config)
|
||||
uds_server = uvicorn.Server(uds_config)
|
||||
|
||||
# We'll handle signals ourselves (avoid conflicts between two servers)
|
||||
tcp_server.install_signal_handlers = False
|
||||
uds_server.install_signal_handlers = False
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
def _graceful_shutdown():
|
||||
tcp_server.should_exit = True
|
||||
uds_server.should_exit = True
|
||||
|
||||
for sig in (signal.SIGINT, signal.SIGTERM):
|
||||
try:
|
||||
loop.add_signal_handler(sig, _graceful_shutdown)
|
||||
except NotImplementedError:
|
||||
# e.g., on Windows; best-effort
|
||||
pass
|
||||
|
||||
try:
|
||||
await asyncio.gather(
|
||||
tcp_server.serve(),
|
||||
uds_server.serve(),
|
||||
)
|
||||
finally:
|
||||
# Cleanup the socket file on exit
|
||||
try:
|
||||
if os.path.exists(uds_path) and not os.path.isfile(uds_path):
|
||||
os.unlink(uds_path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(serve_both())
|
||||
38
lib/lang/python/templates/openrpc/methods.py
Normal file
38
lib/lang/python/templates/openrpc/methods.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import json
|
||||
import signal
|
||||
import asyncio
|
||||
from typing import Union
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, Response, WebSocket, WebSocketDisconnect
|
||||
from jsonrpcobjects.objects import (
|
||||
ErrorResponse,
|
||||
Notification,
|
||||
ParamsNotification,
|
||||
ParamsRequest,
|
||||
Request,
|
||||
ResultResponse,
|
||||
)
|
||||
from openrpc import RPCServer
|
||||
|
||||
# Calculator methods
|
||||
@rpc.method()
|
||||
async def add(a: float, b: float) -> float:
|
||||
return a + b
|
||||
|
||||
@rpc.method()
|
||||
async def subtract(a: float, b: float) -> float:
|
||||
return a - b
|
||||
|
||||
@rpc.method()
|
||||
async def multiply(a: float, b: float) -> float:
|
||||
return a * b
|
||||
|
||||
@rpc.method()
|
||||
async def divide(a: float, b: float) -> float:
|
||||
if b == 0:
|
||||
# Keep it simple; library turns this into a JSON-RPC error
|
||||
raise ValueError("Division by zero")
|
||||
return a / b
|
||||
28
lib/lang/python/templates/openrpc/pyproject.toml
Normal file
28
lib/lang/python/templates/openrpc/pyproject.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[project]
|
||||
name = "openrpc-server-1"
|
||||
version = "0.1.0"
|
||||
description = "Example openrpc server"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"fastapi>=0.104.0",
|
||||
"uvicorn[standard]>=0.24.0",
|
||||
"pydantic>=2.5.0",
|
||||
"httpx>=0.25.0",
|
||||
"fastapi-mcp>=0.1.0",
|
||||
"pydantic-settings>=2.1.0",
|
||||
"python-multipart>=0.0.6",
|
||||
"jinja2>=3.1.2",
|
||||
"click>=8.1.0",
|
||||
"openrpc>=10.4.0"
|
||||
]
|
||||
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-asyncio>=0.21.0",
|
||||
"black>=23.0.0",
|
||||
"isort>=5.12.0",
|
||||
"mypy>=1.7.0",
|
||||
]
|
||||
0
lib/lang/python/templates/openrpc/readme.md
Normal file
0
lib/lang/python/templates/openrpc/readme.md
Normal file
9
lib/lang/python/templates/openrpc/start.sh
Executable file
9
lib/lang/python/templates/openrpc/start.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
source env.sh
|
||||
|
||||
python main.py
|
||||
44
lib/lang/python/templates/openrpc/test.sh
Executable file
44
lib/lang/python/templates/openrpc/test.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
HTTP_URL="http://127.0.0.1:7766/rpc"
|
||||
HTTP_SPEC="http://127.0.0.1:7766/openrpc.json"
|
||||
UDS_PATH="/tmp/server1"
|
||||
UDS_URL="http://nothing/rpc"
|
||||
UDS_SPEC="http://nothing/openrpc.json"
|
||||
|
||||
fail() {
|
||||
echo "❌ Test failed: $1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "🔎 Testing HTTP endpoint..."
|
||||
resp_http=$(curl -s -H 'content-type: application/json' \
|
||||
-d '{"jsonrpc":"2.0","id":1,"method":"add","params":{"a":2,"b":3}}' \
|
||||
"$HTTP_URL")
|
||||
|
||||
val_http=$(echo "$resp_http" | jq -r '.result')
|
||||
[[ "$val_http" == "5.0" ]] || fail "HTTP add(2,3) expected 5, got '$val_http'"
|
||||
|
||||
echo "✅ HTTP add works"
|
||||
|
||||
spec_http=$(curl -s "$HTTP_SPEC" | jq -r '.openrpc')
|
||||
[[ "$spec_http" =~ ^1\..* ]] || fail "HTTP spec invalid"
|
||||
echo "✅ HTTP spec available"
|
||||
|
||||
echo "🔎 Testing UDS endpoint..."
|
||||
resp_uds=$(curl -s --unix-socket "$UDS_PATH" \
|
||||
-H 'content-type: application/json' \
|
||||
-d '{"jsonrpc":"2.0","id":2,"method":"add","params":{"a":10,"b":4}}' \
|
||||
"$UDS_URL")
|
||||
|
||||
val_uds=$(echo "$resp_uds" | jq -r '.result')
|
||||
[[ "$val_uds" == "14.0" ]] || fail "UDS add(10,4) expected 14, got '$val_uds'"
|
||||
|
||||
echo "✅ UDS add works"
|
||||
|
||||
spec_uds=$(curl -s --unix-socket "$UDS_PATH" "$UDS_SPEC" | jq -r '.openrpc')
|
||||
[[ "$spec_uds" =~ ^1\..* ]] || fail "UDS spec invalid"
|
||||
echo "✅ UDS spec available"
|
||||
|
||||
echo "🎉 All tests passed successfully"
|
||||
@@ -80,7 +80,7 @@ pub fn (mut sm StartupManager) new(args ZProcessNewArgs) ! {
|
||||
test: args.cmd_test // Direct mapping
|
||||
oneshot: args.oneshot // Use the oneshot flag directly
|
||||
after: args.after // Direct mapping
|
||||
// log: "" // Default to zinit's default or add a field to ZProcessNewArgs
|
||||
log: "ring"
|
||||
env: args.env // Direct mapping
|
||||
dir: args.workdir // Direct mapping
|
||||
shutdown_timeout: 0 // Default, or add to ZProcessNewArgs if needed
|
||||
|
||||
Reference in New Issue
Block a user