This commit is contained in:
2025-04-21 05:37:45 +02:00
parent f386c67acf
commit de78c229ce
20 changed files with 1830 additions and 4098 deletions

View File

@@ -1,260 +0,0 @@
module models
import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.data.encoder
// Agent represents self service provider that can execute jobs
pub struct Agent {
pub mut:
id u32
pubkey string // pubkey using ed25519
address string // where we can find the agent
port u16 // default 9999
description string // optional
status AgentStatus
services []AgentService
signature string // signature as done by private key of $address+$port+$description+$status
}
@[params]
pub struct ServiceParams {
pub mut:
actor string
description string
}
// new_service creates self new AgentService for this agent
pub fn (mut self Agent) new_service(args ServiceParams) &AgentService {
mut service := AgentService{
actor: args.actor
description: args.description
status: .ok
public: true
}
self.services << service
return &self.services[self.services.len - 1]
}
@[params]
pub struct ActionParams {
pub mut:
action string
description string
}
// new_service_action creates self new AgentServiceAction for the specified service
pub fn (mut service AgentService) new_action(args ActionParams) &AgentServiceAction {
mut service_action := AgentServiceAction{
action: args.action
description: args.description
status: .ok
public: true
}
service.actions << service_action
return &service.actions[service.actions.len - 1]
}
// AgentStatus represents the current state of an agent
pub struct AgentStatus {
pub mut:
guid string // unique id for the job
timestamp_first ourtime.OurTime // when agent came online
timestamp_last ourtime.OurTime // last time agent let us know that he is working
status AgentState // current state of the agent
}
// AgentService represents self service provided by an agent
pub struct AgentService {
pub mut:
actor string // name of the actor providing the service
actions []AgentServiceAction // available actions for this service
description string // optional description
status AgentServiceState // current state of the service
public bool // if everyone can use then true, if restricted means only certain people can use
}
// AgentServiceAction represents an action that can be performed by self service
pub struct AgentServiceAction {
pub mut:
action string // which action
description string // optional description
params map[string]string // e.g. name:'name of the vm' ...
params_example map[string]string // e.g. name:'myvm'
status AgentServiceState // current state of the action
public bool // if everyone can use then true, if restricted means only certain people can use
}
// AgentState represents the possible states of an agent
pub enum AgentState {
ok // agent is functioning normally
down // agent is not responding
error // agent encountered an error
halted // agent has been manually stopped
}
// AgentServiceState represents the possible states of an agent service or action
pub enum AgentServiceState {
ok // service/action is functioning normally
down // service/action is not available
error // service/action encountered an error
halted // service/action has been manually stopped
}
pub fn (self Agent) index_keys() map[string]string {
return {
'pubkey': self.pubkey
}
}
// dumps serializes the Agent struct to binary format using the encoder
pub fn (self Agent) dumps() ![]u8 {
mut e := encoder.new()
// Add unique encoding ID to identify this type of data
e.add_u16(100)
// Encode Agent fields
e.add_string(self.pubkey)
e.add_string(self.address)
e.add_u16(self.port)
e.add_string(self.description)
// Encode AgentStatus
e.add_string(self.status.guid)
e.add_ourtime(self.status.timestamp_first)
e.add_ourtime(self.status.timestamp_last)
e.add_u8(u8(self.status.status))
// Encode services array
e.add_u16(u16(self.services.len))
for service in self.services {
// Encode AgentService fields
e.add_string(service.actor)
e.add_string(service.description)
e.add_u8(u8(service.status))
e.add_u8(u8(service.public))
// Encode actions array
e.add_u16(u16(service.actions.len))
for action in service.actions {
// Encode AgentServiceAction fields
e.add_string(action.action)
e.add_string(action.description)
e.add_u8(u8(action.status))
e.add_u8(u8(action.public))
// Encode params map
e.add_map_string(action.params)
// Encode params_example map
e.add_map_string(action.params_example)
}
}
// Encode signature
e.add_string(self.signature)
return e.data
}
// loads deserializes binary data into an Agent struct
pub fn agent_loads(data []u8) !Agent {
mut d := encoder.decoder_new(data)
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 100 {
return error('Wrong file type: expected encoding ID 100, got ${encoding_id}, for agent')
}
mut self := Agent{}
// Decode Agent fields
self.pubkey = d.get_string()!
self.address = d.get_string()!
self.port = d.get_u16()!
self.description = d.get_string()!
// Decode AgentStatus
self.status.guid = d.get_string()!
self.status.timestamp_first = d.get_ourtime()!
self.status.timestamp_last = d.get_ourtime()!
status_val := d.get_u8()!
self.status.status = match status_val {
0 { AgentState.ok }
1 { AgentState.down }
2 { AgentState.error }
3 { AgentState.halted }
else { return error('Invalid AgentState value: ${status_val}') }
}
// Decode services array
services_len := d.get_u16()!
self.services = []AgentService{len: int(services_len)}
for i in 0 .. services_len {
mut service := AgentService{}
// Decode AgentService fields
service.actor = d.get_string()!
service.description = d.get_string()!
service_status_val := d.get_u8()!
service.status = match service_status_val {
0 { AgentServiceState.ok }
1 { AgentServiceState.down }
2 { AgentServiceState.error }
3 { AgentServiceState.halted }
else { return error('Invalid AgentServiceState value: ${service_status_val}') }
}
service.public = d.get_u8()! == 1
// Decode actions array
actions_len := d.get_u16()!
service.actions = []AgentServiceAction{len: int(actions_len)}
for j in 0 .. actions_len {
mut action := AgentServiceAction{}
// Decode AgentServiceAction fields
action.action = d.get_string()!
action.description = d.get_string()!
action_status_val := d.get_u8()!
action.status = match action_status_val {
0 { AgentServiceState.ok }
1 { AgentServiceState.down }
2 { AgentServiceState.error }
3 { AgentServiceState.halted }
else { return error('Invalid AgentServiceState value: ${action_status_val}') }
}
action.public = d.get_u8()! == 1
// Decode params map
action.params = d.get_map_string()!
// Decode params_example map
action.params_example = d.get_map_string()!
service.actions[j] = action
}
self.services[i] = service
}
// Decode signature
self.signature = d.get_string()!
return self
}
// loads deserializes binary data into the Agent struct
pub fn (mut self Agent) loads(data []u8) ! {
loaded := agent_loads(data)!
// Copy all fields from loaded to self
self.id = loaded.id
self.pubkey = loaded.pubkey
self.address = loaded.address
self.port = loaded.port
self.description = loaded.description
self.status = loaded.status
self.services = loaded.services
self.signature = loaded.signature
}

View File

@@ -1,324 +0,0 @@
module models
import freeflowuniverse.herolib.data.ourtime
fn test_agent_dumps_loads() {
// Create a test agent with some sample data
mut agent := Agent{
pubkey: 'ed25519:1234567890abcdef'
address: '192.168.1.100'
port: 9999
description: 'Test agent for binary encoding'
status: AgentStatus{
guid: 'agent-123'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.ok
}
signature: 'signature-data-here'
}
// Add a service
mut service := AgentService{
actor: 'vm'
description: 'Virtual machine management'
status: AgentServiceState.ok
public: true
}
// Add an action to the service
action := AgentServiceAction{
action: 'create'
description: 'Create a new virtual machine'
status: AgentServiceState.ok
public: true
params: {
'name': 'Name of the VM'
'memory': 'Memory in MB'
'cpu': 'Number of CPU cores'
}
params_example: {
'name': 'my-test-vm'
'memory': '2048'
'cpu': '2'
}
}
service.actions << action
// Add another action
action2 := AgentServiceAction{
action: 'delete'
description: 'Delete a virtual machine'
status: AgentServiceState.ok
public: false
params: {
'name': 'Name of the VM to delete'
}
params_example: {
'name': 'my-test-vm'
}
}
service.actions << action2
agent.services << service
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode agent: ${err}'
return
}
// Verify the decoded data matches the original
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.description == agent.description
assert decoded_agent.signature == agent.signature
// Verify status
assert decoded_agent.status.guid == agent.status.guid
assert decoded_agent.status.status == agent.status.status
// Verify services
assert decoded_agent.services.len == agent.services.len
if decoded_agent.services.len > 0 {
service1 := decoded_agent.services[0]
original_service := agent.services[0]
assert service1.actor == original_service.actor
assert service1.description == original_service.description
assert service1.status == original_service.status
assert service1.public == original_service.public
// Verify actions
assert service1.actions.len == original_service.actions.len
if service1.actions.len > 0 {
action1 := service1.actions[0]
original_action := original_service.actions[0]
assert action1.action == original_action.action
assert action1.description == original_action.description
assert action1.status == original_action.status
assert action1.public == original_action.public
// Verify params
assert action1.params.len == original_action.params.len
for key, value in original_action.params {
assert key in action1.params
assert action1.params[key] == value
}
// Verify params_example
assert action1.params_example.len == original_action.params_example.len
for key, value in original_action.params_example {
assert key in action1.params_example
assert action1.params_example[key] == value
}
}
}
println('Agent binary encoding/decoding test passed successfully')
}
fn test_agent_complex_structure() {
// Create a more complex agent with multiple services and actions
mut agent := Agent{
pubkey: 'ed25519:complex-test-key'
address: '10.0.0.5'
port: 8080
description: 'Complex test agent'
status: AgentStatus{
guid: 'complex-agent-456'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.ok
}
signature: 'complex-signature-data'
}
// Add first service - VM management
mut vm_service := AgentService{
actor: 'vm'
description: 'VM management service'
status: AgentServiceState.ok
public: true
}
// Add actions to VM service
vm_service.actions << AgentServiceAction{
action: 'create'
description: 'Create VM'
status: AgentServiceState.ok
public: true
params: {
'name': 'VM name'
'size': 'VM size'
}
params_example: {
'name': 'test-vm'
'size': 'medium'
}
}
vm_service.actions << AgentServiceAction{
action: 'start'
description: 'Start VM'
status: AgentServiceState.ok
public: true
params: {
'name': 'VM name'
}
params_example: {
'name': 'test-vm'
}
}
// Add second service - Storage management
mut storage_service := AgentService{
actor: 'storage'
description: 'Storage management service'
status: AgentServiceState.ok
public: false
}
// Add actions to storage service
storage_service.actions << AgentServiceAction{
action: 'create_volume'
description: 'Create storage volume'
status: AgentServiceState.ok
public: false
params: {
'name': 'Volume name'
'size': 'Volume size in GB'
}
params_example: {
'name': 'data-vol'
'size': '100'
}
}
storage_service.actions << AgentServiceAction{
action: 'attach_volume'
description: 'Attach volume to VM'
status: AgentServiceState.ok
public: false
params: {
'volume': 'Volume name'
'vm': 'VM name'
'mount_point': 'Mount point'
}
params_example: {
'volume': 'data-vol'
'vm': 'test-vm'
'mount_point': '/data'
}
}
// Add services to agent
agent.services << vm_service
agent.services << storage_service
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode complex agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode complex agent: ${err}'
return
}
// Verify the decoded data
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.services.len == agent.services.len
// Verify first service (VM)
if decoded_agent.services.len > 0 {
vm := decoded_agent.services[0]
assert vm.actor == 'vm'
assert vm.actions.len == 2
// Check VM create action
create_action := vm.actions[0]
assert create_action.action == 'create'
assert create_action.params.len == 2
assert create_action.params['name'] == 'VM name'
// Check VM start action
start_action := vm.actions[1]
assert start_action.action == 'start'
assert start_action.params.len == 1
}
// Verify second service (Storage)
if decoded_agent.services.len > 1 {
storage := decoded_agent.services[1]
assert storage.actor == 'storage'
assert storage.public == false
assert storage.actions.len == 2
// Check storage attach action
attach_action := storage.actions[1]
assert attach_action.action == 'attach_volume'
assert attach_action.params.len == 3
assert attach_action.params['mount_point'] == 'Mount point'
assert attach_action.params_example['mount_point'] == '/data'
}
println('Complex agent binary encoding/decoding test passed successfully')
}
fn test_agent_empty_structures() {
// Test with empty arrays and maps
mut agent := Agent{
pubkey: 'ed25519:empty-test'
address: '127.0.0.1'
port: 7777
description: ''
status: AgentStatus{
guid: 'empty-agent'
timestamp_first: ourtime.now()
timestamp_last: ourtime.now()
status: AgentState.down
}
signature: ''
services: []
}
// Test binary encoding
binary_data := agent.dumps() or {
assert false, 'Failed to encode empty agent: ${err}'
return
}
// Test binary decoding
mut decoded_agent := Agent{}
decoded_agent.loads(binary_data) or {
assert false, 'Failed to decode empty agent: ${err}'
return
}
// Verify the decoded data
assert decoded_agent.pubkey == agent.pubkey
assert decoded_agent.address == agent.address
assert decoded_agent.port == agent.port
assert decoded_agent.description == ''
assert decoded_agent.signature == ''
assert decoded_agent.services.len == 0
assert decoded_agent.status.status == AgentState.down
println('Empty agent binary encoding/decoding test passed successfully')
}

View File

@@ -1,817 +0,0 @@
openapi: 3.1.0
info:
title: Herolib Circles Core API
description: API for managing Circles, Agents, and Names in the Herolib framework
version: 1.0.0
servers:
- url: https://api.example.com/v1
description: Main API server
components:
schemas:
# Agent related schemas
AgentState:
type: string
enum:
- ok
- down
- error
- halted
description: Represents the possible states of an agent
AgentServiceState:
type: string
enum:
- ok
- down
- error
- halted
description: Represents the possible states of an agent service or action
AgentStatus:
type: object
properties:
guid:
type: string
description: Unique id for the job
timestamp_first:
type: string
format: date-time
description: When agent came online
timestamp_last:
type: string
format: date-time
description: Last time agent let us know that it is working
status:
$ref: '#/components/schemas/AgentState'
required:
- guid
- timestamp_first
- timestamp_last
- status
AgentServiceAction:
type: object
properties:
action:
type: string
description: Which action
description:
type: string
description: Optional description
params:
type: object
additionalProperties:
type: string
description: Parameters for the action
params_example:
type: object
additionalProperties:
type: string
description: Example parameters
status:
$ref: '#/components/schemas/AgentServiceState'
public:
type: boolean
description: If everyone can use then true, if restricted means only certain people can use
required:
- action
- status
- public
AgentService:
type: object
properties:
actor:
type: string
description: Name of the actor providing the service
actions:
type: array
items:
$ref: '#/components/schemas/AgentServiceAction'
description: Available actions for this service
description:
type: string
description: Optional description
status:
$ref: '#/components/schemas/AgentServiceState'
public:
type: boolean
description: If everyone can use then true, if restricted means only certain people can use
required:
- actor
- actions
- status
- public
Agent:
type: object
properties:
id:
type: integer
format: uint32
description: Unique identifier
pubkey:
type: string
description: Public key using ed25519
address:
type: string
description: Where we can find the agent
port:
type: integer
format: uint16
description: Default 9999
description:
type: string
description: Optional description
status:
$ref: '#/components/schemas/AgentStatus'
services:
type: array
items:
$ref: '#/components/schemas/AgentService'
signature:
type: string
description: Signature as done by private key of $address+$port+$description+$status
required:
- id
- pubkey
- address
- port
- status
- services
- signature
ServiceParams:
type: object
properties:
actor:
type: string
description:
type: string
ActionParams:
type: object
properties:
action:
type: string
description:
type: string
# Circle related schemas
Role:
type: string
enum:
- admin
- stakeholder
- member
- contributor
- guest
description: Represents the role of a member in a circle
Member:
type: object
properties:
pubkeys:
type: array
items:
type: string
description: Public keys of the member
emails:
type: array
items:
type: string
description: List of emails
name:
type: string
description: Name of the member
description:
type: string
description: Optional description
role:
$ref: '#/components/schemas/Role'
required:
- pubkeys
- emails
- name
- role
Circle:
type: object
properties:
id:
type: integer
format: uint32
description: Unique id
name:
type: string
description: Name of the circle
description:
type: string
description: Optional description
members:
type: array
items:
$ref: '#/components/schemas/Member'
description: Members of the circle
required:
- id
- name
- members
# Name related schemas
RecordType:
type: string
enum:
- a
- aaaa
- cname
- mx
- ns
- ptr
- soa
- srv
- txt
description: Record types for a DNS record
Record:
type: object
properties:
name:
type: string
description: Name of the record
text:
type: string
category:
$ref: '#/components/schemas/RecordType'
addr:
type: array
items:
type: string
description: Multiple IP addresses for this record
required:
- name
- category
Name:
type: object
properties:
id:
type: integer
format: uint32
description: Unique id
domain:
type: string
description: Domain name
description:
type: string
description: Optional description
records:
type: array
items:
$ref: '#/components/schemas/Record'
description: DNS records
admins:
type: array
items:
type: string
description: Public keys of admins who can change it
required:
- id
- domain
- records
paths:
# Agent endpoints
/agents:
get:
summary: List all agents
description: Returns all agent IDs
operationId: listAgents
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
type: integer
format: uint32
post:
summary: Create a new agent
description: Creates a new agent
operationId: createAgent
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
responses:
'201':
description: Agent created successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
/agents/all:
get:
summary: Get all agents
description: Returns all agents
operationId: getAllAgents
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Agent'
/agents/{id}:
get:
summary: Get agent by ID
description: Returns a single agent
operationId: getAgentById
parameters:
- name: id
in: path
description: ID of agent to return
required: true
schema:
type: integer
format: uint32
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
'404':
description: Agent not found
put:
summary: Update an agent
description: Updates an existing agent
operationId: updateAgent
parameters:
- name: id
in: path
description: ID of agent to update
required: true
schema:
type: integer
format: uint32
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
responses:
'200':
description: Agent updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
'404':
description: Agent not found
delete:
summary: Delete an agent
description: Deletes an agent
operationId: deleteAgent
parameters:
- name: id
in: path
description: ID of agent to delete
required: true
schema:
type: integer
format: uint32
responses:
'204':
description: Agent deleted successfully
'404':
description: Agent not found
/agents/pubkey/{pubkey}:
get:
summary: Get agent by public key
description: Returns a single agent by its public key
operationId: getAgentByPubkey
parameters:
- name: pubkey
in: path
description: Public key of agent to return
required: true
schema:
type: string
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
'404':
description: Agent not found
delete:
summary: Delete an agent by public key
description: Deletes an agent by its public key
operationId: deleteAgentByPubkey
parameters:
- name: pubkey
in: path
description: Public key of agent to delete
required: true
schema:
type: string
responses:
'204':
description: Agent deleted successfully
'404':
description: Agent not found
/agents/pubkey/{pubkey}/status:
put:
summary: Update agent status
description: Updates just the status of an agent
operationId: updateAgentStatus
parameters:
- name: pubkey
in: path
description: Public key of agent to update
required: true
schema:
type: string
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/AgentState'
responses:
'200':
description: Agent status updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Agent'
'404':
description: Agent not found
/agents/pubkeys:
get:
summary: Get all agent public keys
description: Returns all agent public keys
operationId: getAllAgentPubkeys
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
type: string
/agents/service:
get:
summary: Get agents by service
description: Returns all agents that provide a specific service
operationId: getAgentsByService
parameters:
- name: actor
in: query
description: Actor name
required: true
schema:
type: string
- name: action
in: query
description: Action name
required: true
schema:
type: string
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Agent'
# Circle endpoints
/circles:
get:
summary: List all circles
description: Returns all circle IDs
operationId: listCircles
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
type: integer
format: uint32
post:
summary: Create a new circle
description: Creates a new circle
operationId: createCircle
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
responses:
'201':
description: Circle created successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
/circles/all:
get:
summary: Get all circles
description: Returns all circles
operationId: getAllCircles
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Circle'
/circles/{id}:
get:
summary: Get circle by ID
description: Returns a single circle
operationId: getCircleById
parameters:
- name: id
in: path
description: ID of circle to return
required: true
schema:
type: integer
format: uint32
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
'404':
description: Circle not found
put:
summary: Update a circle
description: Updates an existing circle
operationId: updateCircle
parameters:
- name: id
in: path
description: ID of circle to update
required: true
schema:
type: integer
format: uint32
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
responses:
'200':
description: Circle updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
'404':
description: Circle not found
delete:
summary: Delete a circle
description: Deletes a circle
operationId: deleteCircle
parameters:
- name: id
in: path
description: ID of circle to delete
required: true
schema:
type: integer
format: uint32
responses:
'204':
description: Circle deleted successfully
'404':
description: Circle not found
/circles/name/{name}:
get:
summary: Get circle by name
description: Returns a single circle by its name
operationId: getCircleByName
parameters:
- name: name
in: path
description: Name of circle to return
required: true
schema:
type: string
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Circle'
'404':
description: Circle not found
delete:
summary: Delete a circle by name
description: Deletes a circle by its name
operationId: deleteCircleByName
parameters:
- name: name
in: path
description: Name of circle to delete
required: true
schema:
type: string
responses:
'204':
description: Circle deleted successfully
'404':
description: Circle not found
# Name endpoints
/names:
get:
summary: List all names
description: Returns all name IDs
operationId: listNames
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
type: integer
format: uint32
post:
summary: Create a new name
description: Creates a new name
operationId: createName
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
responses:
'201':
description: Name created successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
/names/all:
get:
summary: Get all names
description: Returns all names
operationId: getAllNames
responses:
'200':
description: Successful operation
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Name'
/names/{id}:
get:
summary: Get name by ID
description: Returns a single name
operationId: getNameById
parameters:
- name: id
in: path
description: ID of name to return
required: true
schema:
type: integer
format: uint32
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
'404':
description: Name not found
put:
summary: Update a name
description: Updates an existing name
operationId: updateName
parameters:
- name: id
in: path
description: ID of name to update
required: true
schema:
type: integer
format: uint32
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
responses:
'200':
description: Name updated successfully
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
'404':
description: Name not found
delete:
summary: Delete a name
description: Deletes a name
operationId: deleteName
parameters:
- name: id
in: path
description: ID of name to delete
required: true
schema:
type: integer
format: uint32
responses:
'204':
description: Name deleted successfully
'404':
description: Name not found
/names/domain/{domain}:
get:
summary: Get name by domain
description: Returns a single name by its domain
operationId: getNameByDomain
parameters:
- name: domain
in: path
description: Domain of name to return
required: true
schema:
type: string
responses:
'200':
description: Successful operation
content:
application/json:
schema:
$ref: '#/components/schemas/Name'
'404':
description: Name not found
delete:
summary: Delete a name by domain
description: Deletes a name by its domain
operationId: deleteNameByDomain
parameters:
- name: domain
in: path
description: Domain of name to delete
required: true
schema:
type: string
responses:
'204':
description: Name deleted successfully
'404':
description: Name not found

View File

@@ -1,58 +0,0 @@
module mail
import freeflowuniverse.herolib.baobab.osis {OSIS}
module mail
import freeflowuniverse.herolib.baobab.osis { OSIS }
pub struct HeroLibCirclesMailAPI {
mut:
osis OSIS
}
pub fn new_herolibcirclesmailapi() !HeroLibCirclesMailAPI {
return HeroLibCirclesMailAPI{osis: osis.new()!}
}
// Returns a list of all emails in the system
pub fn (mut h HeroLibCirclesMailAPI) list_emails(mailbox string) ![]Email {
panic('implement')
}
// Creates a new email in the system
pub fn (mut h HeroLibCirclesMailAPI) create_email(data EmailCreate) !Email {
panic('implement')
}
// Returns a single email by ID
pub fn (mut h HeroLibCirclesMailAPI) get_email_by_id(id u32) !Email {
panic('implement')
}
// Updates an existing email
pub fn (mut h HeroLibCirclesMailAPI) update_email(id u32, data EmailUpdate) !Email {
panic('implement')
}
// Deletes an email
pub fn (mut h HeroLibCirclesMailAPI) delete_email(id u32) ! {
panic('implement')
}
// Search for emails by various criteria
pub fn (mut h HeroLibCirclesMailAPI) search_emails(subject string, from string, to string, content string, date_from i64, date_to i64, has_attachments bool) ![]Email {
panic('implement')
}
// Returns all emails in a specific mailbox
pub fn (mut h HeroLibCirclesMailAPI) get_emails_by_mailbox(mailbox string) ![]Email {
panic('implement')
}
pub struct UpdateEmailFlags {
flags []string
}
// Update the flags of an email by its UID
pub fn (mut h HeroLibCirclesMailAPI) update_email_flags(uid u32, data UpdateEmailFlags) !Email {
panic('implement')
}

View File

@@ -1,55 +0,0 @@
module mail
import freeflowuniverse.herolib.baobab.osis {OSIS}
import x.json2 {as json}
module mail
import freeflowuniverse.herolib.baobab.osis {OSIS}
import x.json2 as json
pub struct HeroLibCirclesMailAPIExample {
osis OSIS
}
pub fn new_hero_lib_circles_mail_a_p_i_example() !HeroLibCirclesMailAPIExample {
return HeroLibCirclesMailAPIExample{osis: osis.new()!}
}
// Returns a list of all emails in the system
pub fn (mut h HeroLibCirclesMailAPIExample) list_emails(mailbox string) ![]Email {
json_str := '[]'
return json.decode[[]Email](json_str)!
}
// Creates a new email in the system
pub fn (mut h HeroLibCirclesMailAPIExample) create_email(data EmailCreate) !Email {
json_str := '{}'
return json.decode[Email](json_str)!
}
// Returns a single email by ID
pub fn (mut h HeroLibCirclesMailAPIExample) get_email_by_id(id u32) !Email {
json_str := '{}'
return json.decode[Email](json_str)!
}
// Updates an existing email
pub fn (mut h HeroLibCirclesMailAPIExample) update_email(id u32, data EmailUpdate) !Email {
json_str := '{}'
return json.decode[Email](json_str)!
}
// Deletes an email
pub fn (mut h HeroLibCirclesMailAPIExample) delete_email(id u32) ! {
// Implementation would go here
}
// Search for emails by various criteria
pub fn (mut h HeroLibCirclesMailAPIExample) search_emails(subject string, from string, to string, content string, date_from i64, date_to i64, has_attachments bool) ![]Email {
json_str := '[]'
return json.decode[[]Email](json_str)!
}
// Returns all emails in a specific mailbox
pub fn (mut h HeroLibCirclesMailAPIExample) get_emails_by_mailbox(mailbox string) ![]Email {
json_str := '[]'
return json.decode[[]Email](json_str)!
}
// Update the flags of an email by its UID
pub fn (mut h HeroLibCirclesMailAPIExample) update_email_flags(uid u32, data UpdateEmailFlags) !Email {
json_str := '{}'
return json.decode[Email](json_str)!
}

View File

@@ -1,295 +0,0 @@
module mail
import freeflowuniverse.herolib.baobab.osis {OSIS}
import x.json2 {as json}
module mail
import freeflowuniverse.herolib.baobab.osis { OSIS }
import x.json2 as json
pub struct HeroLibCirclesMailAPIExample {
osis OSIS
}
pub fn new_hero_lib_circles_mail_api_example() !HeroLibCirclesMailAPIExample {
return HeroLibCirclesMailAPIExample{osis: osis.new()!}
}
// Returns a list of all emails in the system
pub fn (mut h HeroLibCirclesMailAPIExample) list_emails(mailbox string) ![]Email {
// Example data from the OpenAPI spec
example_email1 := Email{
id: 1
uid: 101
seq_num: 1
mailbox: 'INBOX'
message: 'Hello, this is a test email.'
attachments: []
flags: ['\\Seen']
internal_date: 1647356400
size: 256
envelope: Envelope{
date: 1647356400
subject: 'Test Email'
from: ['sender@example.com']
sender: ['sender@example.com']
reply_to: ['sender@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<abc123@example.com>'
}
}
example_email2 := Email{
id: 2
uid: 102
seq_num: 2
mailbox: 'INBOX'
message: 'This is another test email with an attachment.'
attachments: [
Attachment{
filename: 'document.pdf'
content_type: 'application/pdf'
data: 'base64encodeddata'
}
]
flags: []
internal_date: 1647442800
size: 1024
envelope: Envelope{
date: 1647442800
subject: 'Email with Attachment'
from: ['sender2@example.com']
sender: ['sender2@example.com']
reply_to: ['sender2@example.com']
to: ['recipient@example.com']
cc: ['cc@example.com']
bcc: []
in_reply_to: ''
message_id: '<def456@example.com>'
}
}
// Filter by mailbox if provided
if mailbox != '' && mailbox != 'INBOX' {
return []Email{}
}
return [example_email1, example_email2]
}
// Creates a new email in the system
pub fn (mut h HeroLibCirclesMailAPIExample) create_email(data EmailCreate) !Email {
// Example created email from OpenAPI spec
return Email{
id: 3
uid: 103
seq_num: 3
mailbox: data.mailbox
message: data.message
attachments: data.attachments
flags: data.flags
internal_date: 1647529200
size: 128
envelope: Envelope{
date: 1647529200
subject: data.envelope.subject
from: data.envelope.from
sender: data.envelope.from
reply_to: data.envelope.from
to: data.envelope.to
cc: data.envelope.cc
bcc: data.envelope.bcc
in_reply_to: ''
message_id: '<ghi789@example.com>'
}
}
}
// Returns a single email by ID
pub fn (mut h HeroLibCirclesMailAPIExample) get_email_by_id(id u32) !Email {
// Example email from OpenAPI spec
if id == 1 {
return Email{
id: 1
uid: 101
seq_num: 1
mailbox: 'INBOX'
message: 'Hello, this is a test email.'
attachments: []
flags: ['\\Seen']
internal_date: 1647356400
size: 256
envelope: Envelope{
date: 1647356400
subject: 'Test Email'
from: ['sender@example.com']
sender: ['sender@example.com']
reply_to: ['sender@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<abc123@example.com>'
}
}
}
return error('Email not found')
}
// Updates an existing email
pub fn (mut h HeroLibCirclesMailAPIExample) update_email(id u32, data EmailUpdate) !Email {
// Example updated email from OpenAPI spec
if id == 1 {
return Email{
id: 1
uid: 101
seq_num: 1
mailbox: data.mailbox
message: data.message
attachments: data.attachments
flags: data.flags
internal_date: 1647356400
size: 300
envelope: Envelope{
date: 1647356400
subject: data.envelope.subject
from: data.envelope.from
sender: data.envelope.from
reply_to: data.envelope.from
to: data.envelope.to
cc: data.envelope.cc
bcc: data.envelope.bcc
in_reply_to: ''
message_id: '<abc123@example.com>'
}
}
}
return error('Email not found')
}
// Deletes an email
pub fn (mut h HeroLibCirclesMailAPIExample) delete_email(id u32) ! {
if id < 1 {
return error('Email not found')
}
// In a real implementation, this would delete the email
}
// Search for emails by various criteria
pub fn (mut h HeroLibCirclesMailAPIExample) search_emails(subject string, from string, to string, content string, date_from i64, date_to i64, has_attachments bool) ![]Email {
// Example search results from OpenAPI spec
return [
Email{
id: 1
uid: 101
seq_num: 1
mailbox: 'INBOX'
message: 'Hello, this is a test email with search terms.'
attachments: []
flags: ['\\Seen']
internal_date: 1647356400
size: 256
envelope: Envelope{
date: 1647356400
subject: 'Test Email Search'
from: ['sender@example.com']
sender: ['sender@example.com']
reply_to: ['sender@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<abc123@example.com>'
}
}
]
}
// Returns all emails in a specific mailbox
pub fn (mut h HeroLibCirclesMailAPIExample) get_emails_by_mailbox(mailbox string) ![]Email {
// Example mailbox emails from OpenAPI spec
if mailbox == 'INBOX' {
return [
Email{
id: 1
uid: 101
seq_num: 1
mailbox: 'INBOX'
message: 'Hello, this is a test email in INBOX.'
attachments: []
flags: ['\\Seen']
internal_date: 1647356400
size: 256
envelope: Envelope{
date: 1647356400
subject: 'Test Email INBOX'
from: ['sender@example.com']
sender: ['sender@example.com']
reply_to: ['sender@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<abc123@example.com>'
}
},
Email{
id: 2
uid: 102
seq_num: 2
mailbox: 'INBOX'
message: 'This is another test email in INBOX.'
attachments: []
flags: []
internal_date: 1647442800
size: 200
envelope: Envelope{
date: 1647442800
subject: 'Another Test Email INBOX'
from: ['sender2@example.com']
sender: ['sender2@example.com']
reply_to: ['sender2@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<def456@example.com>'
}
}
]
}
return error('Mailbox not found')
}
// Update the flags of an email by its UID
pub fn (mut h HeroLibCirclesMailAPIExample) update_email_flags(uid u32, data UpdateEmailFlags) !Email {
// Example updated flags from OpenAPI spec
if uid == 101 {
return Email{
id: 1
uid: 101
seq_num: 1
mailbox: 'INBOX'
message: 'Hello, this is a test email.'
attachments: []
flags: data.flags
internal_date: 1647356400
size: 256
envelope: Envelope{
date: 1647356400
subject: 'Test Email'
from: ['sender@example.com']
sender: ['sender@example.com']
reply_to: ['sender@example.com']
to: ['recipient@example.com']
cc: []
bcc: []
in_reply_to: ''
message_id: '<abc123@example.com>'
}
}
}
return error('Email not found')
}

View File

@@ -1,18 +0,0 @@
module mail
import freeflowuniverse.herolib.baobab.osis {OSIS}
module mail
import freeflowuniverse.herolib.baobab.osis { OSIS }
// Interface for Mail API
pub interface IHeroLibCirclesMailAPI {
mut:
list_emails(string) ![]Email
create_email(EmailCreate) !Email
get_email_by_id(u32) !Email
update_email(u32, EmailUpdate) !Email
delete_email(u32) !
search_emails(string, string, string, string, i64, i64, bool) ![]Email
get_emails_by_mailbox(string) ![]Email
update_email_flags(u32, UpdateEmailFlags) !Email
}

View File

@@ -1,59 +0,0 @@
module mail
module mail
pub struct Email {
id int // Database ID (assigned by DBHandler)
uid int // Unique identifier of the message (in the circle)
seq_num int // IMAP sequence number (in the mailbox)
mailbox string // The mailbox this email belongs to
message string // The email body content
attachments []Attachment // Any file attachments
flags []string // IMAP flags like \Seen, \Deleted, etc.
internal_date int // Unix timestamp when the email was received
size int // Size of the message in bytes
envelope Envelope
}
pub struct EmailCreate {
mailbox string // The mailbox this email belongs to
message string // The email body content
attachments []Attachment // Any file attachments
flags []string // IMAP flags like \Seen, \Deleted, etc.
envelope EnvelopeCreate
}
pub struct EmailUpdate {
mailbox string // The mailbox this email belongs to
message string // The email body content
attachments []Attachment // Any file attachments
flags []string // IMAP flags like \Seen, \Deleted, etc.
envelope EnvelopeCreate
}
pub struct Attachment {
filename string // Name of the attached file
content_type string // MIME type of the attachment
data string // Base64 encoded binary data
}
pub struct Envelope {
date int // Unix timestamp of the email date
subject string // Email subject
from []string // From addresses
sender []string // Sender addresses
reply_to []string // Reply-To addresses
to []string // To addresses
cc []string // CC addresses
bcc []string // BCC addresses
in_reply_to string // Message ID this email is replying to
message_id string // Unique message ID
}
pub struct EnvelopeCreate {
subject string // Email subject
from []string // From addresses
to []string // To addresses
cc []string // CC addresses
bcc []string // BCC addresses
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,841 +0,0 @@
{
"openrpc": "1.2.6",
"info": {
"title": "HeroLib Circles Mail API",
"description": "API for Mail functionality of HeroLib Circles MCC module. This API provides endpoints for managing emails.",
"version": "1.0.0",
"contact": {
"name": "FreeFlow Universe",
"url": "https://freeflowuniverse.org"
}
},
"servers": [
{
"url": "https://api.example.com/v1",
"name": "Production server"
},
{
"url": "https://dev-api.example.com/v1",
"name": "Development server"
}
],
"methods": [
{
"name": "listEmails",
"summary": "List all emails",
"description": "Returns a list of all emails in the system",
"params": [
{
"name": "mailbox",
"description": "Filter emails by mailbox",
"required": false,
"schema": {
"type": "string"
}
}
],
"result": {
"name": "emails",
"description": "A list of emails",
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Email"
}
},
"examples": [
{
"name": "listEmails",
"value": [
{
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "Hello, this is a test email.",
"attachments": [],
"flags": ["\\Seen"],
"internal_date": 1647356400,
"size": 256,
"envelope": {
"date": 1647356400,
"subject": "Test Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
},
{
"id": 2,
"uid": 102,
"seq_num": 2,
"mailbox": "INBOX",
"message": "This is another test email with an attachment.",
"attachments": [
{
"filename": "document.pdf",
"content_type": "application/pdf",
"data": "base64encodeddata"
}
],
"flags": [],
"internal_date": 1647442800,
"size": 1024,
"envelope": {
"date": 1647442800,
"subject": "Email with Attachment",
"from": ["sender2@example.com"],
"sender": ["sender2@example.com"],
"reply_to": ["sender2@example.com"],
"to": ["recipient@example.com"],
"cc": ["cc@example.com"],
"bcc": [],
"in_reply_to": "",
"message_id": "<def456@example.com>"
}
}
]
}
]
}
},
{
"name": "createEmail",
"summary": "Create a new email",
"description": "Creates a new email in the system",
"params": [
{
"name": "data",
"description": "Email data to create",
"required": true,
"schema": {
"$ref": "#/components/schemas/EmailCreate"
},
"examples": [
{
"name": "createEmail",
"value": {
"mailbox": "INBOX",
"message": "This is a new email message.",
"attachments": [],
"flags": [],
"envelope": {
"subject": "New Email",
"from": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": []
}
}
}
]
}
],
"result": {
"name": "email",
"description": "The created email",
"schema": {
"$ref": "#/components/schemas/Email"
},
"examples": [
{
"name": "createdEmail",
"value": {
"id": 3,
"uid": 103,
"seq_num": 3,
"mailbox": "INBOX",
"message": "This is a new email message.",
"attachments": [],
"flags": [],
"internal_date": 1647529200,
"size": 128,
"envelope": {
"date": 1647529200,
"subject": "New Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<ghi789@example.com>"
}
}
}
]
}
},
{
"name": "getEmailById",
"summary": "Get email by ID",
"description": "Returns a single email by ID",
"params": [
{
"name": "id",
"description": "ID of the email to retrieve",
"required": true,
"schema": {
"type": "integer",
"format": "uint32"
}
}
],
"result": {
"name": "email",
"description": "The requested email",
"schema": {
"$ref": "#/components/schemas/Email"
},
"examples": [
{
"name": "emailById",
"value": {
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "Hello, this is a test email.",
"attachments": [],
"flags": ["\\Seen"],
"internal_date": 1647356400,
"size": 256,
"envelope": {
"date": 1647356400,
"subject": "Test Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
}
}
]
}
},
{
"name": "updateEmail",
"summary": "Update email",
"description": "Updates an existing email",
"params": [
{
"name": "id",
"description": "ID of the email to update",
"required": true,
"schema": {
"type": "integer",
"format": "uint32"
}
},
{
"name": "data",
"description": "Updated email data",
"required": true,
"schema": {
"$ref": "#/components/schemas/EmailUpdate"
},
"examples": [
{
"name": "updateEmail",
"value": {
"mailbox": "INBOX",
"message": "This is an updated email message.",
"attachments": [],
"flags": ["\\Seen"],
"envelope": {
"subject": "Updated Email",
"from": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": ["cc@example.com"],
"bcc": []
}
}
}
]
}
],
"result": {
"name": "email",
"description": "The updated email",
"schema": {
"$ref": "#/components/schemas/Email"
},
"examples": [
{
"name": "updatedEmail",
"value": {
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "This is an updated email message.",
"attachments": [],
"flags": ["\\Seen"],
"internal_date": 1647356400,
"size": 300,
"envelope": {
"date": 1647356400,
"subject": "Updated Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": ["cc@example.com"],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
}
}
]
}
},
{
"name": "deleteEmail",
"summary": "Delete email",
"description": "Deletes an email",
"params": [
{
"name": "id",
"description": "ID of the email to delete",
"required": true,
"schema": {
"type": "integer",
"format": "uint32"
}
}
],
"result": {
"name": "success",
"description": "Success status",
"schema": {
"type": "null"
}
}
},
{
"name": "searchEmails",
"summary": "Search emails",
"description": "Search for emails by various criteria",
"params": [
{
"name": "subject",
"description": "Search in email subject",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "from",
"description": "Search in from field",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "to",
"description": "Search in to field",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "content",
"description": "Search in email content",
"required": false,
"schema": {
"type": "string"
}
},
{
"name": "date_from",
"description": "Filter by date from (Unix timestamp)",
"required": false,
"schema": {
"type": "integer",
"format": "int64"
}
},
{
"name": "date_to",
"description": "Filter by date to (Unix timestamp)",
"required": false,
"schema": {
"type": "integer",
"format": "int64"
}
},
{
"name": "has_attachments",
"description": "Filter by presence of attachments",
"required": false,
"schema": {
"type": "boolean"
}
}
],
"result": {
"name": "emails",
"description": "Search results",
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Email"
}
},
"examples": [
{
"name": "searchResults",
"value": [
{
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "Hello, this is a test email with search terms.",
"attachments": [],
"flags": ["\\Seen"],
"internal_date": 1647356400,
"size": 256,
"envelope": {
"date": 1647356400,
"subject": "Test Email Search",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
}
]
}
]
}
},
{
"name": "getEmailsByMailbox",
"summary": "Get emails by mailbox",
"description": "Returns all emails in a specific mailbox",
"params": [
{
"name": "mailbox",
"description": "Name of the mailbox",
"required": true,
"schema": {
"type": "string"
}
}
],
"result": {
"name": "emails",
"description": "Emails in the mailbox",
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Email"
}
},
"examples": [
{
"name": "mailboxEmails",
"value": [
{
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "Hello, this is a test email.",
"attachments": [],
"flags": ["\\Seen"],
"internal_date": 1647356400,
"size": 256,
"envelope": {
"date": 1647356400,
"subject": "Test Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
},
{
"id": 2,
"uid": 102,
"seq_num": 2,
"mailbox": "INBOX",
"message": "This is another test email with an attachment.",
"attachments": [
{
"filename": "document.pdf",
"content_type": "application/pdf",
"data": "base64encodeddata"
}
],
"flags": [],
"internal_date": 1647442800,
"size": 1024,
"envelope": {
"date": 1647442800,
"subject": "Email with Attachment",
"from": ["sender2@example.com"],
"sender": ["sender2@example.com"],
"reply_to": ["sender2@example.com"],
"to": ["recipient@example.com"],
"cc": ["cc@example.com"],
"bcc": [],
"in_reply_to": "",
"message_id": "<def456@example.com>"
}
}
]
}
]
}
},
{
"name": "updateEmailFlags",
"summary": "Update email flags",
"description": "Update the flags of an email by its UID",
"params": [
{
"name": "uid",
"description": "UID of the email to update flags",
"required": true,
"schema": {
"type": "integer",
"format": "uint32"
}
},
{
"name": "data",
"description": "Flag update data",
"required": true,
"schema": {
"$ref": "#/components/schemas/UpdateEmailFlags"
},
"examples": [
{
"name": "updateFlags",
"value": {
"flags": ["\\Seen", "\\Flagged"]
}
}
]
}
],
"result": {
"name": "email",
"description": "The email with updated flags",
"schema": {
"$ref": "#/components/schemas/Email"
},
"examples": [
{
"name": "updatedFlags",
"value": {
"id": 1,
"uid": 101,
"seq_num": 1,
"mailbox": "INBOX",
"message": "Hello, this is a test email.",
"attachments": [],
"flags": ["\\Seen", "\\Flagged"],
"internal_date": 1647356400,
"size": 256,
"envelope": {
"date": 1647356400,
"subject": "Test Email",
"from": ["sender@example.com"],
"sender": ["sender@example.com"],
"reply_to": ["sender@example.com"],
"to": ["recipient@example.com"],
"cc": [],
"bcc": [],
"in_reply_to": "",
"message_id": "<abc123@example.com>"
}
}
}
]
}
}
],
"components": {
"schemas": {
"Email": {
"type": "object",
"properties": {
"id": {
"type": "integer",
"format": "uint32",
"description": "Unique identifier for the email"
},
"uid": {
"type": "integer",
"format": "uint32",
"description": "IMAP UID of the email"
},
"seq_num": {
"type": "integer",
"format": "uint32",
"description": "Sequence number of the email"
},
"mailbox": {
"type": "string",
"description": "Mailbox the email belongs to"
},
"message": {
"type": "string",
"description": "Content of the email"
},
"attachments": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Attachment"
},
"description": "List of attachments"
},
"flags": {
"type": "array",
"items": {
"type": "string"
},
"description": "Email flags"
},
"internal_date": {
"type": "integer",
"format": "int64",
"description": "Internal date of the email (Unix timestamp)"
},
"size": {
"type": "integer",
"format": "uint32",
"description": "Size of the email in bytes"
},
"envelope": {
"$ref": "#/components/schemas/Envelope",
"description": "Email envelope information"
}
},
"required": ["id", "uid", "mailbox", "message", "envelope"]
},
"EmailCreate": {
"type": "object",
"properties": {
"mailbox": {
"type": "string",
"description": "Mailbox to create the email in"
},
"message": {
"type": "string",
"description": "Content of the email"
},
"attachments": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Attachment"
},
"description": "List of attachments"
},
"flags": {
"type": "array",
"items": {
"type": "string"
},
"description": "Email flags"
},
"envelope": {
"$ref": "#/components/schemas/EnvelopeCreate",
"description": "Email envelope information"
}
},
"required": ["mailbox", "message", "envelope"]
},
"EmailUpdate": {
"type": "object",
"properties": {
"mailbox": {
"type": "string",
"description": "Mailbox to move the email to"
},
"message": {
"type": "string",
"description": "Updated content of the email"
},
"attachments": {
"type": "array",
"items": {
"$ref": "#/components/schemas/Attachment"
},
"description": "Updated list of attachments"
},
"flags": {
"type": "array",
"items": {
"type": "string"
},
"description": "Updated email flags"
},
"envelope": {
"$ref": "#/components/schemas/EnvelopeCreate",
"description": "Updated email envelope information"
}
}
},
"Attachment": {
"type": "object",
"properties": {
"filename": {
"type": "string",
"description": "Name of the attachment file"
},
"content_type": {
"type": "string",
"description": "MIME type of the attachment"
},
"data": {
"type": "string",
"description": "Base64 encoded attachment data"
}
},
"required": ["filename", "content_type", "data"]
},
"Envelope": {
"type": "object",
"properties": {
"date": {
"type": "integer",
"format": "int64",
"description": "Date of the email (Unix timestamp)"
},
"subject": {
"type": "string",
"description": "Subject of the email"
},
"from": {
"type": "array",
"items": {
"type": "string"
},
"description": "From addresses"
},
"sender": {
"type": "array",
"items": {
"type": "string"
},
"description": "Sender addresses"
},
"reply_to": {
"type": "array",
"items": {
"type": "string"
},
"description": "Reply-To addresses"
},
"to": {
"type": "array",
"items": {
"type": "string"
},
"description": "To addresses"
},
"cc": {
"type": "array",
"items": {
"type": "string"
},
"description": "CC addresses"
},
"bcc": {
"type": "array",
"items": {
"type": "string"
},
"description": "BCC addresses"
},
"in_reply_to": {
"type": "string",
"description": "Message ID this email is replying to"
},
"message_id": {
"type": "string",
"description": "Unique message ID"
}
},
"required": ["date", "subject", "from", "to"]
},
"EnvelopeCreate": {
"type": "object",
"properties": {
"subject": {
"type": "string",
"description": "Subject of the email"
},
"from": {
"type": "array",
"items": {
"type": "string"
},
"description": "From addresses"
},
"to": {
"type": "array",
"items": {
"type": "string"
},
"description": "To addresses"
},
"cc": {
"type": "array",
"items": {
"type": "string"
},
"description": "CC addresses"
},
"bcc": {
"type": "array",
"items": {
"type": "string"
},
"description": "BCC addresses"
}
},
"required": ["subject", "from", "to"]
},
"UpdateEmailFlags": {
"type": "object",
"properties": {
"flags": {
"type": "array",
"items": {
"type": "string"
},
"description": "New flags to set on the email"
}
},
"required": ["flags"]
}
}
}
}

View File

@@ -1,34 +1,115 @@
module models
import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime
pub struct Contact {
pub mut:
// Database ID
id u32 // Database ID (assigned by DBHandler)
// Content fields
created_at i64 // Unix epoch timestamp
modified_at i64 // Unix epoch timestamp
created_at ourtime.OurTime
modified_at ourtime.OurTime
first_name string
last_name string
email string
group string
group string // Reference to a dns name, each group has a globally unique dns
groups []u32 // Groups this contact belongs to (references Circle IDs)
}
// dumps serializes the CalendarEvent to a byte array
pub fn (event Contact) dumps() ![]u8 {
// add_group adds a group to this contact
pub fn (mut contact Contact) add_group(group_id u32) {
if group_id !in contact.groups {
contact.groups << group_id
}
}
// remove_group removes a group from this contact
pub fn (mut contact Contact) remove_group(group_id u32) {
contact.groups = contact.groups.filter(it != group_id)
}
// filter_by_groups returns true if this contact belongs to any of the specified groups
pub fn (contact Contact) filter_by_groups(groups []u32) bool {
for g in groups {
if g in contact.groups {
return true
}
}
return false
}
// search_by_name returns true if the name contains the query (case-insensitive)
pub fn (contact Contact) search_by_name(query string) bool {
full_name := contact.full_name().to_lower()
query_words := query.to_lower().split(' ')
for word in query_words {
if !full_name.contains(word) {
return false
}
}
return true
}
// search_by_email returns true if the email contains the query (case-insensitive)
pub fn (contact Contact) search_by_email(query string) bool {
return contact.email.to_lower().contains(query.to_lower())
}
// update updates the contact's information
pub fn (mut contact Contact) update(first_name string, last_name string, email string, group string) {
if first_name != '' {
contact.first_name = first_name
}
if last_name != '' {
contact.last_name = last_name
}
if email != '' {
contact.email = email
}
if group != '' {
contact.group = group
}
contact.modified_at = i64(ourtime.now().unix)
}
// update_groups updates the contact's groups
pub fn (mut contact Contact) update_groups(groups []u32) {
contact.groups = groups.clone()
contact.modified_at = i64(ourtime.now().unix)
}
// full_name returns the full name of the contact
pub fn (contact Contact) full_name() string {
return '${contact.first_name} ${contact.last_name}'
}
// dumps serializes the Contact to a byte array
pub fn (contact Contact) dumps() ![]u8 {
mut enc := encoder.new()
// Add unique encoding ID to identify this type of data
enc.add_u16(303) // Unique ID for CalendarEvent type
enc.add_u16(303) // Unique ID for Contact type
enc.add_u32(event.id)
enc.add_i64(event.created_at)
enc.add_i64(event.modified_at)
enc.add_string(event.first_name)
enc.add_string(event.last_name)
enc.add_string(event.email)
enc.add_string(event.group)
enc.add_u32(contact.id)
enc.add_i64(contact.created_at)
enc.add_i64(contact.modified_at)
enc.add_string(contact.first_name)
enc.add_string(contact.last_name)
enc.add_string(contact.email)
enc.add_string(contact.group)
// Add groups array
enc.add_u32(u32(contact.groups.len))
for group_id in contact.groups {
enc.add_u32(group_id)
}
return enc.data
}
@@ -36,32 +117,55 @@ pub fn (event Contact) dumps() ![]u8 {
// loads deserializes a byte array to a Contact
pub fn contact_event_loads(data []u8) !Contact {
mut d := encoder.decoder_new(data)
mut event := Contact{}
mut contact := Contact{}
// Check encoding ID to verify this is the correct type of data
encoding_id := d.get_u16()!
if encoding_id != 303 {
return error('Wrong file type: expected encoding ID 303, got ${encoding_id}, for calendar event')
return error('Wrong file type: expected encoding ID 303, got ${encoding_id}, for contact')
}
// Decode Contact fields
event.id = d.get_u32()!
event.created_at = d.get_i64()!
event.modified_at = d.get_i64()!
event.first_name = d.get_string()!
event.last_name = d.get_string()!
event.email = d.get_string()!
event.group = d.get_string()!
contact.id = d.get_u32()!
contact.created_at = d.get_i64()!
contact.modified_at = d.get_i64()!
contact.first_name = d.get_string()!
contact.last_name = d.get_string()!
contact.email = d.get_string()!
contact.group = d.get_string()!
// Check if there's more data (for backward compatibility)
// Try to read the groups array, but handle potential errors if no more data
contact.groups = []u32{}
// We need to handle the case where older data might not have groups
// Try to read the groups length, but catch any errors if we're at the end of data
groups_len := d.get_u32() or {
// No more data, which is fine for backward compatibility
return contact
}
// If we successfully read the groups length, try to read the groups
if groups_len > 0 {
contact.groups = []u32{cap: int(groups_len)}
for _ in 0..groups_len {
group_id := d.get_u32() or {
// If we can't read a group ID, just return what we have so far
break
}
contact.groups << group_id
}
}
return event
return contact
}
// index_keys returns the keys to be indexed for this event
pub fn (event Contact) index_keys() map[string]string {
// index_keys returns the keys to be indexed for this contact
pub fn (contact Contact) index_keys() map[string]string {
mut keys := map[string]string{}
keys['id'] = event.id.str()
// if event.caldav_uid != '' {
// keys['caldav_uid'] = event.caldav_uid
// }
keys['id'] = contact.id.str()
keys['email'] = contact.email
keys['name'] = contact.full_name()
keys['group'] = contact.group
return keys
}

View File

@@ -10,6 +10,7 @@ fn test_contact_serialization_deserialization() {
last_name: 'Doe'
email: 'john.doe@example.com'
group: 'Friends'
groups: [u32(1), 2, 3]
}
// Serialize the Contact
@@ -32,6 +33,7 @@ fn test_contact_serialization_deserialization() {
assert deserialized.last_name == original.last_name, 'last_name mismatch'
assert deserialized.email == original.email, 'email mismatch'
assert deserialized.group == original.group, 'group mismatch'
assert deserialized.groups == original.groups, 'groups mismatch'
}
fn test_contact_deserialization_with_wrong_encoding_id() {
@@ -41,6 +43,7 @@ fn test_contact_deserialization_with_wrong_encoding_id() {
first_name: 'John'
last_name: 'Doe'
email: 'john.doe@example.com'
groups: [u32(1), 2]
}
// Serialize the Contact
@@ -76,6 +79,7 @@ fn test_contact_with_empty_fields() {
last_name: ''
email: ''
group: ''
groups: []u32{}
}
// Serialize the Contact
@@ -98,6 +102,7 @@ fn test_contact_with_empty_fields() {
assert deserialized.last_name == original.last_name, 'last_name mismatch'
assert deserialized.email == original.email, 'email mismatch'
assert deserialized.group == original.group, 'group mismatch'
assert deserialized.groups == original.groups, 'groups mismatch'
}
fn test_contact_serialization_size() {
@@ -110,6 +115,7 @@ fn test_contact_serialization_size() {
last_name: 'Doe'
email: 'john.doe@example.com'
group: 'Friends'
groups: [u32(1), 2, 3]
}
// Serialize the Contact
@@ -129,3 +135,118 @@ fn test_contact_serialization_size() {
assert serialized.len >= expected_min_size, 'Serialized data size is suspiciously small'
}
fn test_contact_new_constructor() {
// Test the new_contact constructor
contact := new_contact(42, 'John', 'Doe', 'john.doe@example.com', 'Friends')
assert contact.id == 42
assert contact.first_name == 'John'
assert contact.last_name == 'Doe'
assert contact.email == 'john.doe@example.com'
assert contact.group == 'Friends'
assert contact.groups.len == 0
// Check that timestamps were set
assert contact.created_at > 0
assert contact.modified_at > 0
assert contact.created_at == contact.modified_at
}
fn test_contact_groups_management() {
// Test adding and removing groups
mut contact := new_contact(42, 'John', 'Doe', 'john.doe@example.com', 'Friends')
// Initially empty
assert contact.groups.len == 0
// Add groups
contact.add_group(1)
contact.add_group(2)
contact.add_group(3)
assert contact.groups.len == 3
assert u32(1) in contact.groups
assert u32(2) in contact.groups
assert u32(3) in contact.groups
// Adding duplicate should not change anything
contact.add_group(1)
assert contact.groups.len == 3
// Remove a group
contact.remove_group(2)
assert contact.groups.len == 2
assert u32(1) in contact.groups
assert u32(2) !in contact.groups
assert u32(3) in contact.groups
// Update all groups
contact.update_groups([u32(5), 6])
assert contact.groups.len == 2
assert u32(5) in contact.groups
assert u32(6) in contact.groups
assert u32(1) !in contact.groups
assert u32(3) !in contact.groups
}
fn test_contact_filter_and_search() {
// Test filtering and searching
mut contact := Contact{
id: 42
first_name: 'John'
last_name: 'Doe'
email: 'john.doe@example.com'
group: 'Friends'
groups: [u32(1), 2, 3]
}
// Test filter_by_groups
assert contact.filter_by_groups([u32(1), 5]) == true
assert contact.filter_by_groups([u32(5), 6]) == false
// Test search_by_name
assert contact.search_by_name('john') == true
assert contact.search_by_name('doe') == true
assert contact.search_by_name('john doe') == true
assert contact.search_by_name('JOHN') == true // Case insensitive
assert contact.search_by_name('smith') == false
// Test search_by_email
assert contact.search_by_email('john') == true
assert contact.search_by_email('example') == true
assert contact.search_by_email('EXAMPLE') == true // Case insensitive
assert contact.search_by_email('gmail') == false
}
fn test_contact_update() {
// Test updating contact information
mut contact := new_contact(42, 'John', 'Doe', 'john.doe@example.com', 'Friends')
mut original_modified_at := contact.modified_at
// Update individual fields
contact.update('Jane', '', '', '')
assert contact.first_name == 'Jane'
assert contact.last_name == 'Doe' // Unchanged
assert contact.modified_at > original_modified_at
original_modified_at = contact.modified_at
// Update multiple fields
contact.update('', 'Smith', 'jane.smith@example.com', '')
assert contact.first_name == 'Jane' // Unchanged
assert contact.last_name == 'Smith'
assert contact.email == 'jane.smith@example.com'
assert contact.group == 'Friends' // Unchanged
assert contact.modified_at > original_modified_at
}
fn test_contact_full_name() {
// Test full_name method
contact := Contact{
first_name: 'John'
last_name: 'Doe'
}
assert contact.full_name() == 'John Doe'
}

View File

@@ -1,27 +1,32 @@
module models
// import freeflowuniverse.herolib.data.ourtime
import freeflowuniverse.herolib.data.encoder
// import strings
// import strconv
import time
// Email represents an email message with all its metadata and content
pub struct Email {
pub mut:
// Database ID
id u32 // Database ID (assigned by DBHandler)
// Content fields
uid u32 // Unique identifier of the message (in the circle)
seq_num u32 // IMAP sequence number (in the mailbox)
mailbox string // The mailbox this email belongs to
message_id string // Unique identifier for the email
folder string // The folder this email belongs to (inbox, sent, drafts, etc.)
message string // The email body content
attachments []Attachment // Any file attachments
// IMAP specific fields
flags []string // IMAP flags like \Seen, \Deleted, etc.
internal_date i64 // Unix timestamp when the email was received
size u32 // Size of the message in bytes
envelope ?Envelope // IMAP envelope information (contains From, To, Subject, etc.)
date i64 // Unix timestamp when the email was sent/received
size u32 // Size of the message in bytes
read bool // Whether the email has been read
flagged bool // Whether the email has been flagged/starred
// Header information
subject string
from []string
sender []string
reply_to []string
to []string
cc []string
bcc []string
in_reply_to string
}
// Attachment represents an email attachment
@@ -32,24 +37,9 @@ pub mut:
data string // Base64 encoded binary data
}
// Envelope represents an IMAP envelope structure
pub struct Envelope {
pub mut:
date i64
subject string
from []string
sender []string
reply_to []string
to []string
cc []string
bcc []string
in_reply_to string
message_id string
}
pub fn (e Email) index_keys() map[string]string {
return {
'uid': e.uid.str()
'message_id': e.message_id
}
}
@@ -63,9 +53,8 @@ pub fn (e Email) dumps() ![]u8 {
// Encode Email fields
enc.add_u32(e.id)
enc.add_u32(e.uid)
enc.add_u32(e.seq_num)
enc.add_string(e.mailbox)
enc.add_string(e.message_id)
enc.add_string(e.folder)
enc.add_string(e.message)
// Encode attachments array
@@ -76,62 +65,51 @@ pub fn (e Email) dumps() ![]u8 {
enc.add_string(attachment.data)
}
// Encode flags array
enc.add_u16(u16(e.flags.len))
for flag in e.flags {
enc.add_string(flag)
}
enc.add_i64(e.internal_date)
enc.add_i64(e.date)
enc.add_u32(e.size)
// Encode envelope (optional)
if envelope := e.envelope {
enc.add_u8(1) // Has envelope
enc.add_i64(envelope.date)
enc.add_string(envelope.subject)
// Encode from addresses
enc.add_u16(u16(envelope.from.len))
for addr in envelope.from {
enc.add_string(addr)
}
// Encode sender addresses
enc.add_u16(u16(envelope.sender.len))
for addr in envelope.sender {
enc.add_string(addr)
}
// Encode reply_to addresses
enc.add_u16(u16(envelope.reply_to.len))
for addr in envelope.reply_to {
enc.add_string(addr)
}
// Encode to addresses
enc.add_u16(u16(envelope.to.len))
for addr in envelope.to {
enc.add_string(addr)
}
// Encode cc addresses
enc.add_u16(u16(envelope.cc.len))
for addr in envelope.cc {
enc.add_string(addr)
}
// Encode bcc addresses
enc.add_u16(u16(envelope.bcc.len))
for addr in envelope.bcc {
enc.add_string(addr)
}
enc.add_string(envelope.in_reply_to)
enc.add_string(envelope.message_id)
} else {
enc.add_u8(0) // No envelope
enc.add_u8(if e.read { 1 } else { 0 })
enc.add_u8(if e.flagged { 1 } else { 0 })
// Encode header information
enc.add_string(e.subject)
// Encode from addresses
enc.add_u16(u16(e.from.len))
for addr in e.from {
enc.add_string(addr)
}
// Encode sender addresses
enc.add_u16(u16(e.sender.len))
for addr in e.sender {
enc.add_string(addr)
}
// Encode reply_to addresses
enc.add_u16(u16(e.reply_to.len))
for addr in e.reply_to {
enc.add_string(addr)
}
// Encode to addresses
enc.add_u16(u16(e.to.len))
for addr in e.to {
enc.add_string(addr)
}
// Encode cc addresses
enc.add_u16(u16(e.cc.len))
for addr in e.cc {
enc.add_string(addr)
}
// Encode bcc addresses
enc.add_u16(u16(e.bcc.len))
for addr in e.bcc {
enc.add_string(addr)
}
enc.add_string(e.in_reply_to)
return enc.data
}
@@ -149,9 +127,8 @@ pub fn email_loads(data []u8) !Email {
// Decode Email fields
email.id = d.get_u32()!
email.uid = d.get_u32()!
email.seq_num = d.get_u32()!
email.mailbox = d.get_string()!
email.message_id = d.get_string()!
email.folder = d.get_string()!
email.message = d.get_string()!
// Decode attachments array
@@ -165,82 +142,67 @@ pub fn email_loads(data []u8) !Email {
email.attachments[i] = attachment
}
// Decode flags array
flags_len := d.get_u16()!
email.flags = []string{len: int(flags_len)}
for i in 0 .. flags_len {
email.flags[i] = d.get_string()!
}
email.internal_date = d.get_i64()!
email.date = d.get_i64()!
email.size = d.get_u32()!
// Decode envelope (optional)
has_envelope := d.get_u8()!
if has_envelope == 1 {
mut envelope := Envelope{}
envelope.date = d.get_i64()!
envelope.subject = d.get_string()!
// Decode from addresses
from_len := d.get_u16()!
envelope.from = []string{len: int(from_len)}
for i in 0 .. from_len {
envelope.from[i] = d.get_string()!
}
// Decode sender addresses
sender_len := d.get_u16()!
envelope.sender = []string{len: int(sender_len)}
for i in 0 .. sender_len {
envelope.sender[i] = d.get_string()!
}
// Decode reply_to addresses
reply_to_len := d.get_u16()!
envelope.reply_to = []string{len: int(reply_to_len)}
for i in 0 .. reply_to_len {
envelope.reply_to[i] = d.get_string()!
}
// Decode to addresses
to_len := d.get_u16()!
envelope.to = []string{len: int(to_len)}
for i in 0 .. to_len {
envelope.to[i] = d.get_string()!
}
// Decode cc addresses
cc_len := d.get_u16()!
envelope.cc = []string{len: int(cc_len)}
for i in 0 .. cc_len {
envelope.cc[i] = d.get_string()!
}
// Decode bcc addresses
bcc_len := d.get_u16()!
envelope.bcc = []string{len: int(bcc_len)}
for i in 0 .. bcc_len {
envelope.bcc[i] = d.get_string()!
}
envelope.in_reply_to = d.get_string()!
envelope.message_id = d.get_string()!
email.envelope = envelope
email.read = d.get_u8()! == 1
email.flagged = d.get_u8()! == 1
// Decode header information
email.subject = d.get_string()!
// Decode from addresses
from_len := d.get_u16()!
email.from = []string{len: int(from_len)}
for i in 0 .. from_len {
email.from[i] = d.get_string()!
}
// Decode sender addresses
sender_len := d.get_u16()!
email.sender = []string{len: int(sender_len)}
for i in 0 .. sender_len {
email.sender[i] = d.get_string()!
}
// Decode reply_to addresses
reply_to_len := d.get_u16()!
email.reply_to = []string{len: int(reply_to_len)}
for i in 0 .. reply_to_len {
email.reply_to[i] = d.get_string()!
}
// Decode to addresses
to_len := d.get_u16()!
email.to = []string{len: int(to_len)}
for i in 0 .. to_len {
email.to[i] = d.get_string()!
}
// Decode cc addresses
cc_len := d.get_u16()!
email.cc = []string{len: int(cc_len)}
for i in 0 .. cc_len {
email.cc[i] = d.get_string()!
}
// Decode bcc addresses
bcc_len := d.get_u16()!
email.bcc = []string{len: int(bcc_len)}
for i in 0 .. bcc_len {
email.bcc[i] = d.get_string()!
}
email.in_reply_to = d.get_string()!
return email
}
// sender returns the first sender address or an empty string if not available
pub fn (e Email) sender() string {
if envelope := e.envelope {
if envelope.sender.len > 0 {
return envelope.sender[0]
} else if envelope.from.len > 0 {
return envelope.from[0]
}
if e.sender.len > 0 {
return e.sender[0]
} else if e.from.len > 0 {
return e.from[0]
}
return ''
}
@@ -248,13 +210,9 @@ pub fn (e Email) sender() string {
// recipients returns all recipient addresses (to, cc, bcc)
pub fn (e Email) recipients() []string {
mut recipients := []string{}
if envelope := e.envelope {
recipients << envelope.to
recipients << envelope.cc
recipients << envelope.bcc
}
recipients << e.to
recipients << e.cc
recipients << e.bcc
return recipients
}
@@ -263,24 +221,6 @@ pub fn (e Email) has_attachments() bool {
return e.attachments.len > 0
}
// is_read returns true if the email has been marked as read
pub fn (e Email) is_read() bool {
return '\\\\Seen' in e.flags
}
// is_flagged returns true if the email has been flagged
pub fn (e Email) is_flagged() bool {
return '\\\\Flagged' in e.flags
}
// date returns the date when the email was sent
pub fn (e Email) date() i64 {
if envelope := e.envelope {
return envelope.date
}
return e.internal_date
}
// calculate_size calculates the total size of the email in bytes
pub fn (e Email) calculate_size() u32 {
mut size := u32(e.message.len)
@@ -290,25 +230,23 @@ pub fn (e Email) calculate_size() u32 {
size += u32(attachment.data.len)
}
// Add estimated size of envelope data if available
if envelope := e.envelope {
size += u32(envelope.subject.len)
size += u32(envelope.message_id.len)
size += u32(envelope.in_reply_to.len)
// Add size of header data
size += u32(e.subject.len)
size += u32(e.message_id.len)
size += u32(e.in_reply_to.len)
// Add size of address fields
for addr in envelope.from {
size += u32(addr.len)
}
for addr in envelope.to {
size += u32(addr.len)
}
for addr in envelope.cc {
size += u32(addr.len)
}
for addr in envelope.bcc {
size += u32(addr.len)
}
// Add size of address fields
for addr in e.from {
size += u32(addr.len)
}
for addr in e.to {
size += u32(addr.len)
}
for addr in e.cc {
size += u32(addr.len)
}
for addr in e.bcc {
size += u32(addr.len)
}
return size
@@ -322,149 +260,70 @@ fn count_lines(s string) int {
return s.count('\n') + 1
}
// body_structure generates and returns a description of the MIME structure of the email
// This can be used by IMAP clients to understand the structure of the message
pub fn (e Email) body_structure() string {
// If there are no attachments, return a simple text structure
// get_mime_type returns the MIME type of the email
pub fn (e Email) get_mime_type() string {
if e.attachments.len == 0 {
return '("text" "plain" ("charset" "utf-8") NIL NIL "7bit" ' +
'${e.message.len} ${count_lines(e.message)}' + ' NIL NIL NIL)'
return 'text/plain'
}
// For emails with attachments, create a multipart/mixed structure
mut result := '("multipart" "mixed" NIL NIL NIL "7bit" NIL NIL ('
// Add the text part
result += '("text" "plain" ("charset" "utf-8") NIL NIL "7bit" ' +
'${e.message.len} ${count_lines(e.message)}' + ' NIL NIL NIL)'
// Add each attachment
for attachment in e.attachments {
// Default to application/octet-stream if content type is empty
mut content_type := attachment.content_type
if content_type == '' {
content_type = 'application/octet-stream'
}
// Split content type into type and subtype
parts := content_type.split('/')
mut subtype := 'octet-stream'
if parts.len == 2 {
subtype = parts[1]
}
// Add the attachment part
result += ' ("application" "${subtype}" ("name" "${attachment.filename}") NIL NIL "base64" ${attachment.data.len} NIL ("attachment" ("filename" "${attachment.filename}")) NIL)'
}
// Close the structure
result += ')'
return result
return 'multipart/mixed'
}
// Helper methods to access fields from the Envelope
// from returns the From address from the Envelope
pub fn (e Email) from() string {
if envelope := e.envelope {
if envelope.from.len > 0 {
return envelope.from[0]
}
}
return ''
// format_date returns the date formatted as a string
pub fn (e Email) format_date() string {
return time.unix(e.date).format_rfc3339()
}
// to returns the To addresses from the Envelope
pub fn (e Email) to() []string {
if envelope := e.envelope {
return envelope.to
}
return []string{}
}
// cc returns the Cc addresses from the Envelope
pub fn (e Email) cc() []string {
if envelope := e.envelope {
return envelope.cc
}
return []string{}
}
// bcc returns the Bcc addresses from the Envelope
pub fn (e Email) bcc() []string {
if envelope := e.envelope {
return envelope.bcc
}
return []string{}
}
// subject returns the Subject from the Envelope
pub fn (e Email) subject() string {
if envelope := e.envelope {
return envelope.subject
}
return ''
}
// ensure_envelope ensures that the email has an envelope, creating one if needed
pub fn (mut e Email) ensure_envelope() {
if e.envelope == none {
e.envelope = Envelope{
from: []string{}
sender: []string{}
reply_to: []string{}
to: []string{}
cc: []string{}
bcc: []string{}
}
}
}
// set_from sets the From address in the Envelope
// set_from sets the From address
pub fn (mut e Email) set_from(from string) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.from = [from]
e.envelope = envelope
e.from = [from]
}
// set_to sets the To addresses in the Envelope
// set_to sets the To addresses
pub fn (mut e Email) set_to(to []string) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.to = to.clone()
e.envelope = envelope
e.to = to.clone()
}
// set_cc sets the Cc addresses in the Envelope
// set_cc sets the Cc addresses
pub fn (mut e Email) set_cc(cc []string) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.cc = cc.clone()
e.envelope = envelope
e.cc = cc.clone()
}
// set_bcc sets the Bcc addresses in the Envelope
// set_bcc sets the Bcc addresses
pub fn (mut e Email) set_bcc(bcc []string) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.bcc = bcc.clone()
e.envelope = envelope
e.bcc = bcc.clone()
}
// set_subject sets the Subject in the Envelope
// set_subject sets the Subject
pub fn (mut e Email) set_subject(subject string) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.subject = subject
e.envelope = envelope
e.subject = subject
}
// set_date sets the Date in the Envelope
// set_date sets the Date
pub fn (mut e Email) set_date(date i64) {
e.ensure_envelope()
mut envelope := e.envelope or { Envelope{} }
envelope.date = date
e.envelope = envelope
e.date = date
}
// mark_as_read marks the email as read
pub fn (mut e Email) mark_as_read() {
e.read = true
}
// mark_as_unread marks the email as unread
pub fn (mut e Email) mark_as_unread() {
e.read = false
}
// toggle_flag toggles the flagged status of the email
pub fn (mut e Email) toggle_flag() {
e.flagged = !e.flagged
}
// add_attachment adds an attachment to the email
pub fn (mut e Email) add_attachment(filename string, content_type string, data string) {
e.attachments << Attachment{
filename: filename
content_type: content_type
data: data
}
e.size = e.calculate_size()
}

1
lib/circles/rustmodels Symbolic link
View File

@@ -0,0 +1 @@
../../../../../git.ourworld.tf/herocode/db/herodb/src/models

235
lib/data/tst/README.md Normal file
View File

@@ -0,0 +1,235 @@
# Ternary Search Tree (TST) Implementation Plan
## Overview
A Ternary Search Tree (TST) is a specialized tree structure where each node has three children:
- Left: For characters less than the current node's character
- Middle: For characters equal to the current node's character (leads to the next character in the sequence)
- Right: For characters greater than the current node's character
In this implementation, we'll focus on the core TST functionality without any balancing mechanisms. Nodes will be inserted in their natural position based on character comparisons.
## Data Structures
```mermaid
classDiagram
class Node {
+char character
+bool is_end_of_string
+[]u8 value
+u32 left_id
+u32 middle_id
+u32 right_id
}
class TST {
+&ourdb.OurDB db
+u32 root_id
+set(key string, value []u8)
+get(key string) []u8
+delete(key string)
+list(prefix string) []string
+getall(prefix string) [][]u8
}
TST --> Node : contains
```
## Core Components
1. **Node Structure**:
- `character`: The character stored at this node
- `is_end_of_string`: Flag indicating if this node represents the end of a key
- `value`: The value associated with the key (if this node is the end of a key)
- `left_id`, `middle_id`, `right_id`: Database IDs for the three children
2. **TST Structure**:
- `db`: Reference to the ourdb database for persistence
- `root_id`: Database ID of the root node
3. **Serialization/Deserialization**:
- Use the same encoder/decoder as the radix tree
- Serialize node properties efficiently
## Key Operations
### 1. Insertion
```mermaid
flowchart TD
A[Start Insertion] --> B{Is Tree Empty?}
B -->|Yes| C[Create Root Node]
B -->|No| D[Start at Root]
D --> E{Current Char vs Node Char}
E -->|Less Than| F{Left Child Exists?}
F -->|Yes| G[Go to Left Child]
F -->|No| H[Create Left Child]
E -->|Equal To| I{End of Key?}
I -->|Yes| J[Set End of String Flag]
I -->|No| K{Middle Child Exists?}
K -->|Yes| L[Go to Middle Child]
K -->|No| M[Create Middle Child]
E -->|Greater Than| N{Right Child Exists?}
N -->|Yes| O[Go to Right Child]
N -->|No| P[Create Right Child]
G --> E
L --> Q[Move to Next Char]
Q --> E
O --> E
H --> R[Insertion Complete]
J --> R
M --> Q
P --> R
C --> R
```
The insertion algorithm will:
1. Navigate the tree based on character comparisons
2. Create new nodes as needed
3. Set the end-of-string flag when the entire key is inserted
### 2. Search
```mermaid
flowchart TD
A[Start Search] --> B{Is Tree Empty?}
B -->|Yes| C[Return Not Found]
B -->|No| D[Start at Root]
D --> E{Current Char vs Node Char}
E -->|Less Than| F{Left Child Exists?}
F -->|Yes| G[Go to Left Child]
F -->|No| C
E -->|Equal To| H{End of Key?}
H -->|Yes| I{Is End of String?}
I -->|Yes| J[Return Value]
I -->|No| C
H -->|No| K{Middle Child Exists?}
K -->|Yes| L[Go to Middle Child]
K -->|No| C
E -->|Greater Than| M{Right Child Exists?}
M -->|Yes| N[Go to Right Child]
M -->|No| C
G --> E
L --> O[Move to Next Char]
O --> E
N --> E
```
### 3. Deletion
```mermaid
flowchart TD
A[Start Deletion] --> B{Is Tree Empty?}
B -->|Yes| C[Return Not Found]
B -->|No| D[Find Node]
D --> E{Node Found?}
E -->|No| C
E -->|Yes| F{Is End of String?}
F -->|No| C
F -->|Yes| G{Has Children?}
G -->|Yes| H[Mark Not End of String]
G -->|No| I[Remove Node]
H --> J[Deletion Complete]
I --> J
```
For deletion, we'll use a simple approach:
1. If the node has children, just mark it as not end-of-string
2. If the node has no children, remove it from its parent
3. No rebalancing is performed
### 4. Prefix Search
```mermaid
flowchart TD
A[Start Prefix Search] --> B{Is Tree Empty?}
B -->|Yes| C[Return Empty List]
B -->|No| D[Navigate to Prefix Node]
D --> E{Prefix Found?}
E -->|No| C
E -->|Yes| F[Collect All Keys from Subtree]
F --> G[Return Collected Keys]
```
The prefix search will:
1. Navigate to the node representing the end of the prefix
2. Collect all keys in the subtree rooted at that node
3. Return the collected keys
### 5. Serialization/Deserialization
```mermaid
flowchart TD
A[Serialize Node] --> B[Add Version Byte]
B --> C[Add Character]
C --> D[Add is_end_of_string Flag]
D --> E[Add Value if End of String]
E --> F[Add Child IDs]
F --> G[Return Serialized Data]
H[Deserialize Node] --> I[Read Version Byte]
I --> J[Read Character]
J --> K[Read is_end_of_string Flag]
K --> L[Read Value if End of String]
L --> M[Read Child IDs]
M --> N[Return Node Object]
```
## Implementation Steps
1. **Create Basic Structure (tst.v)**:
- Define the Node and TST structures
- Implement constructor function
2. **Implement Serialization (serialize.v)**:
- Create serialize_node and deserialize_node functions
- Ensure compatibility with the encoder
3. **Implement Core Operations**:
- set (insert)
- get (search)
- delete
- list (prefix search)
- getall (get all values with prefix)
4. **Create Test Files**:
- Basic functionality tests
- Prefix search tests
- Serialization tests
- Performance tests for various dataset sizes
5. **Add Documentation**:
- Create README.md with usage examples
- Add inline documentation
## File Structure
```
tst/
├── tst.v # Main implementation
├── serialize.v # Serialization functions
├── tst_test.v # Basic tests
├── prefix_test.v # Prefix search tests
├── serialize_test.v # Serialization tests
└── README.md # Documentation
```
## Performance Considerations
1. **Unbalanced Tree Characteristics**:
- Performance may degrade over time as the tree becomes unbalanced
- Worst-case time complexity for operations could approach O(n) instead of O(log n)
- Best for datasets where keys are inserted in a somewhat random order
2. **Optimizations**:
- Efficient node serialization to minimize storage requirements
- Careful memory management during traversal operations
- Optimized string handling for prefix operations
3. **Database Interaction**:
- Minimize database reads/writes
- Only create new nodes when necessary
4. **Large Dataset Handling**:
- Efficient prefix search algorithm
- Optimize node structure for millions of entries

198
lib/data/tst/prefix_test.v Normal file
View File

@@ -0,0 +1,198 @@
module tst
import os
// Define a struct for test cases
struct PrefixTestCase {
prefix string
expected_count int
}
// Test more complex prefix search scenarios
fn test_complex_prefix_search() {
mut tree := new(path: 'testdata/test_complex_prefix.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Insert a larger set of keys with various prefixes
keys := [
'a', 'ab', 'abc', 'abcd', 'abcde',
'b', 'bc', 'bcd', 'bcde',
'c', 'cd', 'cde',
'x', 'xy', 'xyz',
'test', 'testing', 'tested', 'tests',
'team', 'teammate', 'teams',
'tech', 'technology', 'technical'
]
// Insert all keys
for i, key in keys {
value := 'value-${i}'.bytes()
tree.set(key, value) or {
assert false, 'Failed to set key "${key}": ${err}'
return
}
}
// Test various prefix searches
test_cases := [
// prefix, expected_count
PrefixTestCase{'a', 5},
PrefixTestCase{'ab', 4},
PrefixTestCase{'abc', 3},
PrefixTestCase{'abcd', 2},
PrefixTestCase{'abcde', 1},
PrefixTestCase{'b', 4},
PrefixTestCase{'bc', 3},
PrefixTestCase{'t', 10},
PrefixTestCase{'te', 7},
PrefixTestCase{'tes', 4},
PrefixTestCase{'test', 4},
PrefixTestCase{'team', 3},
PrefixTestCase{'tech', 3},
PrefixTestCase{'x', 3},
PrefixTestCase{'xy', 2},
PrefixTestCase{'xyz', 1},
PrefixTestCase{'z', 0}, // No matches
PrefixTestCase{'', keys.len} // All keys
]
for test_case in test_cases {
prefix := test_case.prefix
expected_count := test_case.expected_count
result := tree.list(prefix) or {
if expected_count > 0 {
assert false, 'Failed to list keys with prefix "${prefix}": ${err}'
}
[]string{}
}
assert result.len == expected_count, 'For prefix "${prefix}": expected ${expected_count} keys, got ${result.len}'
// Verify each result starts with the prefix
for key in result {
assert key.starts_with(prefix), 'Key "${key}" does not start with prefix "${prefix}"'
}
}
}
// Test prefix search with longer strings and special characters
fn test_special_prefix_search() {
mut tree := new(path: 'testdata/test_special_prefix.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Insert keys with special characters and longer strings
special_keys := [
'user:1:profile', 'user:1:settings', 'user:1:posts',
'user:2:profile', 'user:2:settings',
'config:app:name', 'config:app:version', 'config:app:debug',
'config:db:host', 'config:db:port',
'data:2023:01:01', 'data:2023:01:02', 'data:2023:02:01',
'very:long:key:with:multiple:segments:and:special:characters:!@#$%^&*()',
'another:very:long:key:with:different:segments'
]
// Insert all keys
for i, key in special_keys {
value := 'special-value-${i}'.bytes()
tree.set(key, value) or {
assert false, 'Failed to set key "${key}": ${err}'
return
}
}
// Test various prefix searches
special_test_cases := [
// prefix, expected_count
PrefixTestCase{'user:', 5},
PrefixTestCase{'user:1:', 3},
PrefixTestCase{'user:2:', 2},
PrefixTestCase{'config:', 5},
PrefixTestCase{'config:app:', 3},
PrefixTestCase{'config:db:', 2},
PrefixTestCase{'data:2023:', 3},
PrefixTestCase{'data:2023:01:', 2},
PrefixTestCase{'very:', 1},
PrefixTestCase{'another:', 1},
PrefixTestCase{'nonexistent:', 0}
]
for test_case in special_test_cases {
prefix := test_case.prefix
expected_count := test_case.expected_count
result := tree.list(prefix) or {
if expected_count > 0 {
assert false, 'Failed to list keys with prefix "${prefix}": ${err}'
}
[]string{}
}
assert result.len == expected_count, 'For prefix "${prefix}": expected ${expected_count} keys, got ${result.len}'
// Verify each result starts with the prefix
for key in result {
assert key.starts_with(prefix), 'Key "${key}" does not start with prefix "${prefix}"'
}
}
}
// Test prefix search performance with a larger dataset
fn test_prefix_search_performance() {
mut tree := new(path: 'testdata/test_performance.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Generate a larger dataset (1000 keys)
prefixes := ['user', 'config', 'data', 'app', 'service', 'api', 'test', 'dev', 'prod', 'staging']
mut large_keys := []string{}
for prefix in prefixes {
for i in 0..100 {
large_keys << '${prefix}:${i}:name'
}
}
// Insert all keys
for i, key in large_keys {
value := 'performance-value-${i}'.bytes()
tree.set(key, value) or {
assert false, 'Failed to set key "${key}": ${err}'
return
}
}
// Test prefix search performance
for prefix in prefixes {
result := tree.list(prefix + ':') or {
assert false, 'Failed to list keys with prefix "${prefix}:": ${err}'
return
}
assert result.len == 100, 'For prefix "${prefix}:": expected 100 keys, got ${result.len}'
// Verify each result starts with the prefix
for key in result {
assert key.starts_with(prefix + ':'), 'Key "${key}" does not start with prefix "${prefix}:"'
}
}
// Test more specific prefixes
for prefix in prefixes {
for i in 0..10 {
specific_prefix := '${prefix}:${i}'
result := tree.list(specific_prefix) or {
assert false, 'Failed to list keys with prefix "${specific_prefix}": ${err}'
return
}
assert result.len == 1, 'For prefix "${specific_prefix}": expected 1 key, got ${result.len}'
assert result[0] == '${specific_prefix}:name', 'Expected "${specific_prefix}:name", got "${result[0]}"'
}
}
}

72
lib/data/tst/serialize.v Normal file
View File

@@ -0,0 +1,72 @@
module tst
import freeflowuniverse.herolib.data.encoder
const version = u8(1) // Current binary format version
// Serializes a node to bytes for storage
fn serialize_node(node Node) []u8 {
mut e := encoder.new()
// Add version byte
e.add_u8(version)
// Add character
e.add_u8(node.character)
// Add is_end_of_string flag
e.add_u8(if node.is_end_of_string { u8(1) } else { u8(0) })
// Add value if this is the end of a string
if node.is_end_of_string {
e.add_bytes(node.value)
} else {
e.add_u32(0) // Empty value length
}
// Add child IDs
e.add_u32(node.left_id)
e.add_u32(node.middle_id)
e.add_u32(node.right_id)
return e.data
}
// Deserializes bytes to a node
fn deserialize_node(data []u8) !Node {
mut d := encoder.decoder_new(data)
// Read and verify version
version_byte := d.get_u8()!
if version_byte != version {
return error('Invalid version byte: expected ${version}, got ${version_byte}')
}
// Read character
character := d.get_u8()!
// Read is_end_of_string flag
is_end_of_string := d.get_u8()! == 1
// Read value if this is the end of a string
mut value := []u8{}
if is_end_of_string {
value = d.get_bytes()!
} else {
_ = d.get_u32()! // Skip empty value length
}
// Read child IDs
left_id := d.get_u32()!
middle_id := d.get_u32()!
right_id := d.get_u32()!
return Node{
character: character
is_end_of_string: is_end_of_string
value: value
left_id: left_id
middle_id: middle_id
right_id: right_id
}
}

View File

@@ -0,0 +1,174 @@
module tst
// Test serialization and deserialization of nodes
fn test_node_serialization() {
// Create a leaf node (end of string)
leaf_node := Node{
character: `a`
is_end_of_string: true
value: 'test value'.bytes()
left_id: 0
middle_id: 0
right_id: 0
}
// Serialize the leaf node
leaf_data := serialize_node(leaf_node)
// Deserialize and verify
deserialized_leaf := deserialize_node(leaf_data) or {
assert false, 'Failed to deserialize leaf node: ${err}'
return
}
assert deserialized_leaf.character == leaf_node.character, 'Character mismatch'
assert deserialized_leaf.is_end_of_string == leaf_node.is_end_of_string, 'is_end_of_string mismatch'
assert deserialized_leaf.value.bytestr() == leaf_node.value.bytestr(), 'Value mismatch'
assert deserialized_leaf.left_id == leaf_node.left_id, 'left_id mismatch'
assert deserialized_leaf.middle_id == leaf_node.middle_id, 'middle_id mismatch'
assert deserialized_leaf.right_id == leaf_node.right_id, 'right_id mismatch'
// Create an internal node (not end of string)
internal_node := Node{
character: `b`
is_end_of_string: false
value: []u8{}
left_id: 10
middle_id: 20
right_id: 30
}
// Serialize the internal node
internal_data := serialize_node(internal_node)
// Deserialize and verify
deserialized_internal := deserialize_node(internal_data) or {
assert false, 'Failed to deserialize internal node: ${err}'
return
}
assert deserialized_internal.character == internal_node.character, 'Character mismatch'
assert deserialized_internal.is_end_of_string == internal_node.is_end_of_string, 'is_end_of_string mismatch'
assert deserialized_internal.value.len == 0, 'Value should be empty'
assert deserialized_internal.left_id == internal_node.left_id, 'left_id mismatch'
assert deserialized_internal.middle_id == internal_node.middle_id, 'middle_id mismatch'
assert deserialized_internal.right_id == internal_node.right_id, 'right_id mismatch'
// Create a root node
root_node := Node{
character: 0 // null character for root
is_end_of_string: false
value: []u8{}
left_id: 5
middle_id: 15
right_id: 25
}
// Serialize the root node
root_data := serialize_node(root_node)
// Deserialize and verify
deserialized_root := deserialize_node(root_data) or {
assert false, 'Failed to deserialize root node: ${err}'
return
}
assert deserialized_root.character == root_node.character, 'Character mismatch'
assert deserialized_root.is_end_of_string == root_node.is_end_of_string, 'is_end_of_string mismatch'
assert deserialized_root.value.len == 0, 'Value should be empty'
assert deserialized_root.left_id == root_node.left_id, 'left_id mismatch'
assert deserialized_root.middle_id == root_node.middle_id, 'middle_id mismatch'
assert deserialized_root.right_id == root_node.right_id, 'right_id mismatch'
}
// Test serialization with special characters and larger values
fn test_special_serialization() {
// Create a node with special character
special_node := Node{
character: `!` // special character
is_end_of_string: true
value: 'special value with spaces and symbols: !@#$%^&*()'.bytes()
left_id: 42
middle_id: 99
right_id: 123
}
// Serialize the special node
special_data := serialize_node(special_node)
// Deserialize and verify
deserialized_special := deserialize_node(special_data) or {
assert false, 'Failed to deserialize special node: ${err}'
return
}
assert deserialized_special.character == special_node.character, 'Character mismatch'
assert deserialized_special.is_end_of_string == special_node.is_end_of_string, 'is_end_of_string mismatch'
assert deserialized_special.value.bytestr() == special_node.value.bytestr(), 'Value mismatch'
assert deserialized_special.left_id == special_node.left_id, 'left_id mismatch'
assert deserialized_special.middle_id == special_node.middle_id, 'middle_id mismatch'
assert deserialized_special.right_id == special_node.right_id, 'right_id mismatch'
// Create a node with a large value
mut large_value := []u8{len: 1000}
for i in 0..1000 {
large_value[i] = u8(i % 256)
}
large_node := Node{
character: `z`
is_end_of_string: true
value: large_value
left_id: 1
middle_id: 2
right_id: 3
}
// Serialize the large node
large_data := serialize_node(large_node)
// Deserialize and verify
deserialized_large := deserialize_node(large_data) or {
assert false, 'Failed to deserialize large node: ${err}'
return
}
assert deserialized_large.character == large_node.character, 'Character mismatch'
assert deserialized_large.is_end_of_string == large_node.is_end_of_string, 'is_end_of_string mismatch'
assert deserialized_large.value.len == large_node.value.len, 'Value length mismatch'
// Check each byte of the large value
for i in 0..large_node.value.len {
assert deserialized_large.value[i] == large_node.value[i], 'Value byte mismatch at index ${i}'
}
assert deserialized_large.left_id == large_node.left_id, 'left_id mismatch'
assert deserialized_large.middle_id == large_node.middle_id, 'middle_id mismatch'
assert deserialized_large.right_id == large_node.right_id, 'right_id mismatch'
}
// Test serialization version handling
fn test_version_handling() {
// Create a valid node
valid_node := Node{
character: `a`
is_end_of_string: true
value: 'test'.bytes()
left_id: 0
middle_id: 0
right_id: 0
}
// Serialize the node
mut valid_data := serialize_node(valid_node)
// Corrupt the version byte
valid_data[0] = 99 // Invalid version
// Attempt to deserialize with invalid version
deserialize_node(valid_data) or {
assert err.str().contains('Invalid version byte'), 'Expected version error, got: ${err}'
return
}
assert false, 'Expected error for invalid version byte'
}

479
lib/data/tst/tst.v Normal file
View File

@@ -0,0 +1,479 @@
module tst
import freeflowuniverse.herolib.data.ourdb
// Represents a node in the ternary search tree
struct Node {
mut:
character u8 // The character stored at this nodexs
is_end_of_string bool // Flag indicating if this node represents the end of a key
value []u8 // The value associated with the key (if this node is the end of a key)
left_id u32 // Database ID for left child (character < node.character)
middle_id u32 // Database ID for middle child (character == node.character)
right_id u32 // Database ID for right child (character > node.character)
}
// TST represents a ternary search tree data structure
@[heap]
pub struct TST {
mut:
db &ourdb.OurDB // Database for persistent storage
root_id u32 // Database ID of the root node
}
@[params]
pub struct NewArgs {
pub mut:
path string
reset bool
}
// Creates a new ternary search tree with the specified database path
pub fn new(args NewArgs) !TST {
println('Creating new TST with path: ${args.path}, reset: ${args.reset}')
mut db := ourdb.new(
path: args.path
record_size_max: 1024 * 1024 // 1MB
incremental_mode: true
reset: args.reset
)!
mut root_id := u32(1) // First ID in ourdb is now 1 instead of 0
if db.get_next_id()! == 1 {
// Create a new root node if the database is empty
// We'll use a null character (0) for the root node
println('Creating new root node')
root := Node{
character: 0
is_end_of_string: false
value: []u8{}
left_id: 0
middle_id: 0
right_id: 0
}
root_id = db.set(data: serialize_node(root))!
println('Root node created with ID: ${root_id}')
assert root_id == 1 // First ID is now 1
} else {
// Database already exists, just get the root node
println('Database already exists, getting root node')
root_data := db.get(1)! // Get root node with ID 1
root := deserialize_node(root_data)!
println('Root node retrieved: character=${root.character}, is_end=${root.is_end_of_string}, left=${root.left_id}, middle=${root.middle_id}, right=${root.right_id}')
}
return TST{
db: &db
root_id: root_id
}
}
// Sets a key-value pair in the tree
pub fn (mut self TST) set(key string, value []u8) ! {
println('Setting key: "${key}"')
if key.len == 0 {
return error('Empty key not allowed')
}
// If the tree is empty, create a root node
if self.root_id == 0 {
println('Tree is empty, creating root node')
root := Node{
character: 0
is_end_of_string: false
value: []u8{}
left_id: 0
middle_id: 0
right_id: 0
}
self.root_id = self.db.set(data: serialize_node(root))!
println('Root node created with ID: ${self.root_id}')
}
self.insert_recursive(self.root_id, key, 0, value)!
println('Key "${key}" inserted successfully')
}
// Recursive helper function for insertion
fn (mut self TST) insert_recursive(node_id u32, key string, pos int, value []u8) !u32 {
// If we've reached the end of the tree, create a new node
if node_id == 0 {
println('Creating new node for character: ${key[pos]} (${key[pos].ascii_str()}) at position ${pos}')
new_node := Node{
character: key[pos]
is_end_of_string: pos == key.len - 1
value: if pos == key.len - 1 { value } else { []u8{} }
left_id: 0
middle_id: 0
right_id: 0
}
new_id := self.db.set(data: serialize_node(new_node))!
println('New node created with ID: ${new_id}, character: ${key[pos]} (${key[pos].ascii_str()}), is_end: ${pos == key.len - 1}')
return new_id
}
// Get the current node
mut node := deserialize_node(self.db.get(node_id)!)!
println('Node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}')
// Compare the current character with the node's character
if key[pos] < node.character {
println('Going left for character: ${key[pos]} (${key[pos].ascii_str()}) < ${node.character} (${node.character.ascii_str()})')
// Go left
node.left_id = self.insert_recursive(node.left_id, key, pos, value)!
self.db.set(id: node_id, data: serialize_node(node))!
} else if key[pos] > node.character {
println('Going right for character: ${key[pos]} (${key[pos].ascii_str()}) > ${node.character} (${node.character.ascii_str()})')
// Go right
node.right_id = self.insert_recursive(node.right_id, key, pos, value)!
self.db.set(id: node_id, data: serialize_node(node))!
} else {
// Equal - go middle
if pos == key.len - 1 {
println('End of key reached, setting is_end_of_string=true')
// We've reached the end of the key
node.is_end_of_string = true
node.value = value
self.db.set(id: node_id, data: serialize_node(node))!
} else {
println('Going middle for next character: ${key[pos+1]} (${key[pos+1].ascii_str()})')
// Move to the next character in the key
node.middle_id = self.insert_recursive(node.middle_id, key, pos + 1, value)!
self.db.set(id: node_id, data: serialize_node(node))!
}
}
return node_id
}
// Gets a value by key from the tree
pub fn (mut self TST) get(key string) ![]u8 {
println('Getting key: "${key}"')
if key.len == 0 {
return error('Empty key not allowed')
}
if self.root_id == 0 {
return error('Tree is empty')
}
return self.search_recursive(self.root_id, key, 0)!
}
// Recursive helper function for search
fn (mut self TST) search_recursive(node_id u32, key string, pos int) ![]u8 {
if node_id == 0 {
println('Node ID is 0, key not found')
return error('Key not found')
}
node := deserialize_node(self.db.get(node_id)!)!
println('Searching node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}, pos=${pos}')
if key[pos] < node.character {
println('Going left: ${key[pos]} (${key[pos].ascii_str()}) < ${node.character} (${node.character.ascii_str()})')
// Go left
return self.search_recursive(node.left_id, key, pos)!
} else if key[pos] > node.character {
println('Going right: ${key[pos]} (${key[pos].ascii_str()}) > ${node.character} (${node.character.ascii_str()})')
// Go right
return self.search_recursive(node.right_id, key, pos)!
} else {
// Equal
println('Character matches: ${key[pos]} (${key[pos].ascii_str()}) == ${node.character} (${node.character.ascii_str()})')
if pos == key.len - 1 {
// We've reached the end of the key
if node.is_end_of_string {
println('End of key reached and is_end_of_string=true, returning value')
return node.value
} else {
println('End of key reached but is_end_of_string=false, key not found')
return error('Key not found')
}
} else {
// Move to the next character in the key
println('Moving to next character: ${key[pos+1]} (${key[pos+1].ascii_str()})')
return self.search_recursive(node.middle_id, key, pos + 1)!
}
}
}
// Deletes a key from the tree
pub fn (mut self TST) delete(key string) ! {
println('Deleting key: "${key}"')
if key.len == 0 {
return error('Empty key not allowed')
}
if self.root_id == 0 {
return error('Tree is empty')
}
self.delete_recursive(self.root_id, key, 0)!
println('Key "${key}" deleted successfully')
}
// Recursive helper function for deletion
fn (mut self TST) delete_recursive(node_id u32, key string, pos int) !bool {
if node_id == 0 {
println('Node ID is 0, key not found')
return error('Key not found')
}
mut node := deserialize_node(self.db.get(node_id)!)!
println('Deleting from node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}, pos=${pos}')
mut deleted := false
if key[pos] < node.character {
println('Going left: ${key[pos]} (${key[pos].ascii_str()}) < ${node.character} (${node.character.ascii_str()})')
// Go left
deleted = self.delete_recursive(node.left_id, key, pos)!
if deleted && node.left_id != 0 {
// Check if the left child has been deleted
if _ := self.db.get(node.left_id) {
// Child still exists
println('Left child still exists')
} else {
// Child has been deleted
println('Left child has been deleted, updating node')
node.left_id = 0
self.db.set(id: node_id, data: serialize_node(node))!
}
}
} else if key[pos] > node.character {
println('Going right: ${key[pos]} (${key[pos].ascii_str()}) > ${node.character} (${node.character.ascii_str()})')
// Go right
deleted = self.delete_recursive(node.right_id, key, pos)!
if deleted && node.right_id != 0 {
// Check if the right child has been deleted
if _ := self.db.get(node.right_id) {
// Child still exists
println('Right child still exists')
} else {
// Child has been deleted
println('Right child has been deleted, updating node')
node.right_id = 0
self.db.set(id: node_id, data: serialize_node(node))!
}
}
} else {
// Equal
println('Character matches: ${key[pos]} (${key[pos].ascii_str()}) == ${node.character} (${node.character.ascii_str()})')
if pos == key.len - 1 {
// We've reached the end of the key
if node.is_end_of_string {
// Found the key
println('End of key reached and is_end_of_string=true, found the key')
if node.left_id == 0 && node.middle_id == 0 && node.right_id == 0 {
// Node has no children, delete it
println('Node has no children, deleting it')
self.db.delete(node_id)!
return true
} else {
// Node has children, just mark it as not end of string
println('Node has children, marking it as not end of string')
node.is_end_of_string = false
node.value = []u8{}
self.db.set(id: node_id, data: serialize_node(node))!
return false
}
} else {
println('End of key reached but is_end_of_string=false, key not found')
return error('Key not found')
}
} else {
// Move to the next character in the key
println('Moving to next character: ${key[pos+1]} (${key[pos+1].ascii_str()})')
deleted = self.delete_recursive(node.middle_id, key, pos + 1)!
if deleted && node.middle_id != 0 {
// Check if the middle child has been deleted
if _ := self.db.get(node.middle_id) {
// Child still exists
println('Middle child still exists')
} else {
// Child has been deleted
println('Middle child has been deleted, updating node')
node.middle_id = 0
self.db.set(id: node_id, data: serialize_node(node))!
}
}
}
}
// If this node has no children and is not the end of a string, delete it
if node.left_id == 0 && node.middle_id == 0 && node.right_id == 0 && !node.is_end_of_string {
println('Node has no children and is not end of string, deleting it')
self.db.delete(node_id)!
return true
}
return deleted
}
// Lists all keys with a given prefix
pub fn (mut self TST) list(prefix string) ![]string {
println('Listing keys with prefix: "${prefix}"')
mut result := []string{}
// Handle empty prefix case - will return all keys
if prefix.len == 0 {
println('Empty prefix, collecting all keys')
self.collect_all_keys(self.root_id, '', mut result)!
println('Found ${result.len} keys: ${result}')
return result
}
// Find the node corresponding to the prefix
println('Finding node for prefix: "${prefix}"')
// Start from the root and traverse to the node corresponding to the last character of the prefix
mut node_id := self.root_id
mut pos := 0
mut current_path := ''
// Traverse the tree to find the node corresponding to the prefix
for pos < prefix.len && node_id != 0 {
node := deserialize_node(self.db.get(node_id)!)!
println('Examining node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}, pos=${pos}, current_path="${current_path}"')
if prefix[pos] < node.character {
println('Going left: ${prefix[pos]} (${prefix[pos].ascii_str()}) < ${node.character} (${node.character.ascii_str()})')
node_id = node.left_id
} else if prefix[pos] > node.character {
println('Going right: ${prefix[pos]} (${prefix[pos].ascii_str()}) > ${node.character} (${node.character.ascii_str()})')
node_id = node.right_id
} else {
// Character matches
println('Character matches: ${prefix[pos]} (${prefix[pos].ascii_str()}) == ${node.character} (${node.character.ascii_str()})')
// Update the current path
if node.character != 0 { // Skip the root node character
current_path += node.character.ascii_str()
println('Updated path: "${current_path}"')
}
if pos == prefix.len - 1 {
// We've reached the end of the prefix
println('Reached end of prefix')
// If this node is the end of a string, add it to the result
if node.is_end_of_string {
println('Node is end of string, adding key: "${current_path}"')
result << current_path
}
// Collect all keys from the middle child
if node.middle_id != 0 {
println('Collecting from middle child with path: "${current_path}"')
self.collect_keys_with_prefix(node.middle_id, current_path, prefix, mut result)!
}
break
} else {
// Move to the next character in the prefix
println('Moving to next character in prefix: ${prefix[pos+1]} (${prefix[pos+1].ascii_str()})')
node_id = node.middle_id
pos++
}
}
}
if node_id == 0 || pos < prefix.len - 1 {
// Prefix not found or we didn't reach the end of the prefix
println('Prefix not found or didn\'t reach end of prefix, returning empty result')
return []string{}
}
println('Found ${result.len} keys with prefix "${prefix}": ${result}')
return result
}
// Helper function to collect all keys with a given prefix
fn (mut self TST) collect_keys_with_prefix(node_id u32, current_path string, prefix string, mut result []string) ! {
if node_id == 0 {
return
}
node := deserialize_node(self.db.get(node_id)!)!
println('Collecting keys with prefix from node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}, current_path="${current_path}"')
// Construct the path for this node
path := current_path + node.character.ascii_str()
println('Path for node: "${path}"')
// If this node is the end of a string, add it to the result
if node.is_end_of_string {
println('Node is end of string, adding key: "${path}"')
result << path
}
// Recursively collect keys from the middle child (keys that extend this prefix)
if node.middle_id != 0 {
println('Collecting from middle child with path: "${path}"')
self.collect_keys_with_prefix(node.middle_id, path, prefix, mut result)!
}
// Also collect keys from left and right children
// This is necessary because multiple keys might share the same prefix
if node.left_id != 0 {
println('Collecting from left child with path: "${current_path}"')
self.collect_keys_with_prefix(node.left_id, current_path, prefix, mut result)!
}
if node.right_id != 0 {
println('Collecting from right child with path: "${current_path}"')
self.collect_keys_with_prefix(node.right_id, current_path, prefix, mut result)!
}
}
// Helper function to recursively collect all keys under a node
fn (mut self TST) collect_all_keys(node_id u32, current_path string, mut result []string) ! {
if node_id == 0 {
return
}
node := deserialize_node(self.db.get(node_id)!)!
println('Collecting all from node ${node_id}: character=${node.character} (${node.character.ascii_str()}), is_end=${node.is_end_of_string}, left=${node.left_id}, middle=${node.middle_id}, right=${node.right_id}, current_path="${current_path}"')
// Construct the path for this node
path := current_path + node.character.ascii_str()
println('Path for node: "${path}"')
// If this node is the end of a string, add it to the result
if node.is_end_of_string {
println('Node is end of string, adding key: "${path}"')
result << path
}
// Recursively collect keys from all children
if node.left_id != 0 {
println('Collecting all from left child with path: "${current_path}"')
self.collect_all_keys(node.left_id, current_path, mut result)!
}
if node.middle_id != 0 {
println('Collecting all from middle child with path: "${path}"')
self.collect_all_keys(node.middle_id, path, mut result)!
}
if node.right_id != 0 {
println('Collecting all from right child with path: "${current_path}"')
self.collect_all_keys(node.right_id, current_path, mut result)!
}
}
// Gets all values for keys with a given prefix
pub fn (mut self TST) getall(prefix string) ![][]u8 {
println('Getting all values with prefix: "${prefix}"')
// Get all matching keys
keys := self.list(prefix)!
// Get values for each key
mut values := [][]u8{}
for key in keys {
if value := self.get(key) {
values << value
}
}
println('Found ${values.len} values with prefix "${prefix}"')
return values
}

235
lib/data/tst/tst_test.v Normal file
View File

@@ -0,0 +1,235 @@
module tst
import os
fn testsuite_begin() {
// Clean up any test files from previous runs
if os.exists('testdata') {
os.rmdir_all('testdata') or {}
}
os.mkdir('testdata') or {}
}
fn testsuite_end() {
// Clean up test files
if os.exists('testdata') {
os.rmdir_all('testdata') or {}
}
}
// Test basic set and get operations
fn test_set_get() {
mut tree := new(path: 'testdata/test_set_get.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Test setting and getting values
tree.set('hello', 'world'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('help', 'me'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('test', 'value'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
// Test getting values
value1 := tree.get('hello') or {
assert false, 'Failed to get key: ${err}'
return
}
assert value1.bytestr() == 'world', 'Expected "world", got "${value1.bytestr()}"'
value2 := tree.get('help') or {
assert false, 'Failed to get key: ${err}'
return
}
assert value2.bytestr() == 'me', 'Expected "me", got "${value2.bytestr()}"'
value3 := tree.get('test') or {
assert false, 'Failed to get key: ${err}'
return
}
assert value3.bytestr() == 'value', 'Expected "value", got "${value3.bytestr()}"'
// Test getting a non-existent key
tree.get('nonexistent') or {
assert err.str() == 'Key not found', 'Expected "Key not found", got "${err}"'
return
}
assert false, 'Expected error for non-existent key'
}
// Test deletion
fn test_delete() {
mut tree := new(path: 'testdata/test_delete.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Set some keys
tree.set('hello', 'world'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('help', 'me'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('test', 'value'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
// Delete a key
tree.delete('hello') or {
assert false, 'Failed to delete key: ${err}'
return
}
// Verify the key was deleted
tree.get('hello') or {
assert err.str() == 'Key not found', 'Expected "Key not found", got "${err}"'
return
}
assert false, 'Expected error for deleted key'
}
// Test prefix search
fn test_list_prefix() {
mut tree := new(path: 'testdata/test_list_prefix.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Set some keys with common prefixes
tree.set('hello', 'world'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('help', 'me'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('test', 'value'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('testing', 'another'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('tested', 'past'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
// Test listing keys with prefix 'hel'
hel_keys := tree.list('hel') or {
assert false, 'Failed to list keys with prefix: ${err}'
return
}
assert hel_keys.len == 2, 'Expected 2 keys with prefix "hel", got ${hel_keys.len}'
assert 'hello' in hel_keys, 'Expected "hello" in keys with prefix "hel"'
assert 'help' in hel_keys, 'Expected "help" in keys with prefix "hel"'
// Test listing keys with prefix 'test'
test_keys := tree.list('test') or {
assert false, 'Failed to list keys with prefix: ${err}'
return
}
assert test_keys.len == 2, 'Expected 2 keys with prefix "test", got ${test_keys.len}'
assert 'test' in test_keys, 'Expected "test" in keys with prefix "test"'
assert 'testing' in test_keys, 'Expected "testing" in keys with prefix "test"'
// Test listing all keys
all_keys := tree.list('') or {
assert false, 'Failed to list all keys: ${err}'
return
}
assert all_keys.len == 5, 'Expected 5 keys in total, got ${all_keys.len}'
}
// Test getall function
fn test_getall() {
mut tree := new(path: 'testdata/test_getall.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Set some keys with common prefixes
tree.set('hello', 'world'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('help', 'me'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('test', 'value'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
// Test getting all values with prefix 'hel'
hel_values := tree.getall('hel') or {
assert false, 'Failed to get values with prefix: ${err}'
return
}
assert hel_values.len == 2, 'Expected 2 values with prefix "hel", got ${hel_values.len}'
// Convert byte arrays to strings for easier comparison
mut hel_strings := []string{}
for val in hel_values {
hel_strings << val.bytestr()
}
assert 'world' in hel_strings, 'Expected "world" in values with prefix "hel"'
assert 'me' in hel_strings, 'Expected "me" in values with prefix "hel"'
}
// Test persistence
fn test_persistence() {
// Create a new TST and add some data
{
mut tree := new(path: 'testdata/test_persistence.db', reset: true) or {
assert false, 'Failed to create TST: ${err}'
return
}
tree.set('hello', 'world'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
tree.set('test', 'value'.bytes()) or {
assert false, 'Failed to set key: ${err}'
return
}
}
// Create a new TST with the same path but don't reset
{
mut tree := new(path: 'testdata/test_persistence.db', reset: false) or {
assert false, 'Failed to create TST: ${err}'
return
}
// Verify the data persisted
value1 := tree.get('hello') or {
assert false, 'Failed to get key: ${err}'
return
}
assert value1.bytestr() == 'world', 'Expected "world", got "${value1.bytestr()}"'
value2 := tree.get('test') or {
assert false, 'Failed to get key: ${err}'
return
}
assert value2.bytestr() == 'value', 'Expected "value", got "${value2.bytestr()}"'
}
}