style: improve code formatting; refactor module imports

- Apply consistent alignment for struct fields and parameters
- Standardize string literal delimiters to single quotes
- Refactor module import strategy in `models` package
- Enhance asset formatting for precise decimal display
- Remove unused imports and redundant `+}` syntax artifacts
This commit is contained in:
Mahmoud-Emad
2025-09-03 11:36:02 +03:00
parent 4a82bde192
commit dd400ba6fa
55 changed files with 1518 additions and 1503 deletions

View File

@@ -52,7 +52,6 @@ println(' - API title: ${spec.info.title}')
println(' - API version: ${spec.info.version}') println(' - API version: ${spec.info.version}')
println(' - Methods available: ${spec.methods.len}') println(' - Methods available: ${spec.methods.len}')
// 2. List all services // 2. List all services
println('\n2. Listing all services...') println('\n2. Listing all services...')
services := client.service_list() or { services := client.service_list() or {

View File

@@ -3,9 +3,7 @@ module builder
import os import os
import freeflowuniverse.herolib.core.texttools import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.core.pathlib import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.ui.console import freeflowuniverse.herolib.ui.console
import v.embed_file
const heropath_ = os.dir(@FILE) + '/../' const heropath_ = os.dir(@FILE) + '/../'
@@ -52,10 +50,10 @@ pub mut:
pub fn (mut node Node) hero_install(args HeroInstallArgs) ! { pub fn (mut node Node) hero_install(args HeroInstallArgs) ! {
console.print_debug('install hero') console.print_debug('install hero')
mut bs := bootstrapper() bootstrapper()
myenv := node.environ_get()! myenv := node.environ_get()!
homedir := myenv['HOME'] or { return error("can't find HOME in env") } _ := myenv['HOME'] or { return error("can't find HOME in env") }
mut todo := []string{} mut todo := []string{}
if !args.compile { if !args.compile {

View File

@@ -156,8 +156,7 @@ pub fn plbook_run(cmd Command) !(&playbook.PlayBook, string) {
mut plbook := if heroscript.len > 0 { mut plbook := if heroscript.len > 0 {
playbook.new(text: heroscript)! playbook.new(text: heroscript)!
} else { } else {
path path = plbook_code_get(cmd)!
= plbook_code_get(cmd)!
if path.len == 0 { if path.len == 0 {
return error(cmd.help_message()) return error(cmd.help_message())
} }

View File

@@ -175,7 +175,7 @@ fn test_get_u64_default() {
assert params.get_u64_default('key3', 17)! == 17 assert params.get_u64_default('key3', 17)! == 17
} }
fn test_get_u32()! { fn test_get_u32() ! {
text := ' text := '
key1: val1 key1: val1
key2: 19 key2: 19

View File

@@ -6,68 +6,68 @@ import time
// Calendar represents a collection of events // Calendar represents a collection of events
@[heap] @[heap]
pub struct Calendar { pub struct Calendar {
Base Base
pub mut: pub mut:
group_id u32 // Associated group for permissions group_id u32 // Associated group for permissions
events []u32 // IDs of calendar events (changed to u32 to match CalendarEvent) events []u32 // IDs of calendar events (changed to u32 to match CalendarEvent)
color string // Hex color code color string // Hex color code
timezone string timezone string
is_public bool is_public bool
} }
@[params] @[params]
pub struct CalendarArgs { pub struct CalendarArgs {
BaseArgs BaseArgs
pub mut: pub mut:
group_id u32 group_id u32
events []u32 events []u32
color string color string
timezone string timezone string
is_public bool is_public bool
} }
pub fn calendar_new(args CalendarArgs) !Calendar { pub fn calendar_new(args CalendarArgs) !Calendar {
mut commentids:=[]u32{} mut commentids := []u32{}
for comment in args.comments{ for comment in args.comments {
// Convert CommentArg to CommentArgExtended // Convert CommentArg to CommentArgExtended
extended_comment := CommentArgExtended{ extended_comment := CommentArgExtended{
comment: comment.comment comment: comment.comment
parent: 0 parent: 0
author: 0 author: 0
} }
commentids << comment_set(extended_comment)! commentids << comment_set(extended_comment)!
} }
mut obj := Calendar{ mut obj := Calendar{
id: args.id or {0} // Will be set by DB? id: args.id or { 0 } // Will be set by DB?
name: args.name name: args.name
description: args.description description: args.description
created_at: ourtime.now().unix() created_at: ourtime.now().unix()
updated_at: ourtime.now().unix() updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or {0} securitypolicy: args.securitypolicy or { 0 }
tags: tags2id(args.tags)! tags: tags2id(args.tags)!
comments: commentids comments: commentids
group_id: args.group_id group_id: args.group_id
events: args.events events: args.events
color: args.color color: args.color
timezone: args.timezone timezone: args.timezone
is_public: args.is_public is_public: args.is_public
} }
return obj return obj
} }
pub fn (mut c Calendar) add_event(event_id u32) { // Changed event_id to u32 pub fn (mut c Calendar) add_event(event_id u32) { // Changed event_id to u32
if event_id !in c.events { if event_id !in c.events {
c.events << event_id c.events << event_id
c.updated_at = ourtime.now().unix() // Use Base's updated_at c.updated_at = ourtime.now().unix() // Use Base's updated_at
} }
} }
pub fn (mut c Calendar) dump() []u8 { pub fn (mut c Calendar) dump() []u8 {
//TODO: implement based on lib/data/encoder/readme.md // TODO: implement based on lib/data/encoder/readme.md
return []u8{} return []u8{}
} }
pub fn calendar_load(data []u8) Calendar { pub fn calendar_load(data []u8) Calendar {
//TODO: implement based on lib/data/encoder/readme.md // TODO: implement based on lib/data/encoder/readme.md
return Calendar{} return Calendar{}
} }

View File

@@ -9,256 +9,253 @@ import freeflowuniverse.herolib.core.redisclient
// CalendarEvent represents a single event in a calendar // CalendarEvent represents a single event in a calendar
@[heap] @[heap]
pub struct CalendarEvent { pub struct CalendarEvent {
Base Base
pub mut: pub mut:
title string title string
start_time i64 // Unix timestamp start_time i64 // Unix timestamp
end_time i64 // Unix timestamp end_time i64 // Unix timestamp
location string location string
attendees []u32 // IDs of user groups attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar calendar_id u32 // Associated calendar
status EventStatus status EventStatus
is_all_day bool is_all_day bool
is_recurring bool is_recurring bool
recurrence []RecurrenceRule //normally empty recurrence []RecurrenceRule // normally empty
reminder_mins []int // Minutes before event for reminders reminder_mins []int // Minutes before event for reminders
color string // Hex color code color string // Hex color code
timezone string timezone string
} }
pub struct Attendee { pub struct Attendee {
pub mut: pub mut:
user_id u32 user_id u32
status AttendanceStatus status AttendanceStatus
role AttendeeRole role AttendeeRole
} }
pub enum AttendanceStatus { pub enum AttendanceStatus {
no_response no_response
accepted accepted
declined declined
tentative tentative
} }
pub enum AttendeeRole { pub enum AttendeeRole {
required required
optional optional
organizer organizer
} }
pub enum EventStatus { pub enum EventStatus {
draft draft
published published
cancelled cancelled
completed completed
} }
pub struct RecurrenceRule { pub struct RecurrenceRule {
pub mut: pub mut:
frequency RecurrenceFreq frequency RecurrenceFreq
interval int // Every N frequencies interval int // Every N frequencies
until i64 // End date (Unix timestamp) until i64 // End date (Unix timestamp)
count int // Number of occurrences count int // Number of occurrences
by_weekday []int // Days of week (0=Sunday) by_weekday []int // Days of week (0=Sunday)
by_monthday []int // Days of month by_monthday []int // Days of month
} }
pub enum RecurrenceFreq { pub enum RecurrenceFreq {
none none
daily daily
weekly weekly
monthly monthly
yearly yearly
} }
@[params] @[params]
pub struct CalendarEventArgs { pub struct CalendarEventArgs {
BaseArgs BaseArgs
pub mut: pub mut:
title string title string
start_time string // use ourtime module to go from string to epoch start_time string // use ourtime module to go from string to epoch
end_time string // use ourtime module to go from string to epoch end_time string // use ourtime module to go from string to epoch
location string location string
attendees []u32 // IDs of user groups attendees []u32 // IDs of user groups
fs_items []u32 // IDs of linked files or dirs fs_items []u32 // IDs of linked files or dirs
calendar_id u32 // Associated calendar calendar_id u32 // Associated calendar
status EventStatus status EventStatus
is_all_day bool is_all_day bool
is_recurring bool is_recurring bool
recurrence []RecurrenceRule recurrence []RecurrenceRule
reminder_mins []int // Minutes before event for reminders reminder_mins []int // Minutes before event for reminders
color string // Hex color code color string // Hex color code
timezone string timezone string
} }
pub fn calendar_event_new(args CalendarEventArgs) !CalendarEvent { pub fn calendar_event_new(args CalendarEventArgs) !CalendarEvent {
// Convert tags to u32 ID // Convert tags to u32 ID
tags_id := tags2id(args.tags)! tags_id := tags2id(args.tags)!
return CalendarEvent{
// Base fields
id: args.id or { 0 }
name: args.name
description: args.description
created_at: ourtime.now().unix()
updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 }
tags: tags_id
comments: comments2ids(args.comments)!
return CalendarEvent{ // CalendarEvent specific fields
// Base fields title: args.title
id: args.id or { 0 } start_time: ourtime.new(args.start_time)!.unix()
name: args.name end_time: ourtime.new(args.end_time)!.unix()
description: args.description location: args.location
created_at: ourtime.now().unix() attendees: args.attendees
updated_at: ourtime.now().unix() fs_items: args.fs_items
securitypolicy: args.securitypolicy or { 0 } calendar_id: args.calendar_id
tags: tags_id status: args.status
comments: comments2ids(args.comments)! is_all_day: args.is_all_day
is_recurring: args.is_recurring
// CalendarEvent specific fields recurrence: args.recurrence
title: args.title reminder_mins: args.reminder_mins
start_time: ourtime.new(args.start_time)!.unix() color: args.color
end_time: ourtime.new(args.end_time)!.unix() timezone: args.timezone
location: args.location }
attendees: args.attendees
fs_items: args.fs_items
calendar_id: args.calendar_id
status: args.status
is_all_day: args.is_all_day
is_recurring: args.is_recurring
recurrence: args.recurrence
reminder_mins: args.reminder_mins
color: args.color
timezone: args.timezone
}
} }
pub fn (mut e CalendarEvent) dump() ![]u8 { pub fn (mut e CalendarEvent) dump() ![]u8 {
// Create a new encoder // Create a new encoder
mut enc := encoder.new() mut enc := encoder.new()
// Add version byte // Add version byte
enc.add_u8(1) enc.add_u8(1)
// Encode Base fields // Encode Base fields
enc.add_u32(e.id) enc.add_u32(e.id)
enc.add_string(e.name) enc.add_string(e.name)
enc.add_string(e.description) enc.add_string(e.description)
enc.add_i64(e.created_at) enc.add_i64(e.created_at)
enc.add_i64(e.updated_at) enc.add_i64(e.updated_at)
enc.add_u32(e.securitypolicy) enc.add_u32(e.securitypolicy)
enc.add_u32(e.tags) enc.add_u32(e.tags)
enc.add_list_u32(e.comments) enc.add_list_u32(e.comments)
// Encode CalendarEvent specific fields // Encode CalendarEvent specific fields
enc.add_string(e.title) enc.add_string(e.title)
enc.add_string(e.description) enc.add_string(e.description)
enc.add_i64(e.start_time) enc.add_i64(e.start_time)
enc.add_i64(e.end_time) enc.add_i64(e.end_time)
enc.add_string(e.location) enc.add_string(e.location)
enc.add_list_u32(e.attendees) enc.add_list_u32(e.attendees)
enc.add_list_u32(e.fs_items) enc.add_list_u32(e.fs_items)
enc.add_u32(e.calendar_id) enc.add_u32(e.calendar_id)
enc.add_u8(u8(e.status)) enc.add_u8(u8(e.status))
enc.add_bool(e.is_all_day) enc.add_bool(e.is_all_day)
enc.add_bool(e.is_recurring) enc.add_bool(e.is_recurring)
// Encode recurrence array // Encode recurrence array
enc.add_u16(u16(e.recurrence.len)) enc.add_u16(u16(e.recurrence.len))
for rule in e.recurrence { for rule in e.recurrence {
enc.add_u8(u8(rule.frequency)) enc.add_u8(u8(rule.frequency))
enc.add_int(rule.interval) enc.add_int(rule.interval)
enc.add_i64(rule.until) enc.add_i64(rule.until)
enc.add_int(rule.count) enc.add_int(rule.count)
enc.add_list_int(rule.by_weekday) enc.add_list_int(rule.by_weekday)
enc.add_list_int(rule.by_monthday) enc.add_list_int(rule.by_monthday)
} }
enc.add_list_int(e.reminder_mins) enc.add_list_int(e.reminder_mins)
enc.add_string(e.color) enc.add_string(e.color)
enc.add_string(e.timezone) enc.add_string(e.timezone)
return enc.data return enc.data
} }
pub fn (ce CalendarEvent) load(data []u8) !CalendarEvent { pub fn (ce CalendarEvent) load(data []u8) !CalendarEvent {
// Create a new decoder // Create a new decoder
mut dec := encoder.decoder_new(data) mut dec := encoder.decoder_new(data)
// Read version byte // Read version byte
version := dec.get_u8()! version := dec.get_u8()!
if version != 1 { if version != 1 {
return error('wrong version in calendar event load') return error('wrong version in calendar event load')
} }
// Decode Base fields // Decode Base fields
id := dec.get_u32()! id := dec.get_u32()!
name := dec.get_string()! name := dec.get_string()!
description := dec.get_string()! description := dec.get_string()!
created_at := dec.get_i64()! created_at := dec.get_i64()!
updated_at := dec.get_i64()! updated_at := dec.get_i64()!
securitypolicy := dec.get_u32()! securitypolicy := dec.get_u32()!
tags := dec.get_u32()! tags := dec.get_u32()!
comments := dec.get_list_u32()! comments := dec.get_list_u32()!
// Decode CalendarEvent specific fields // Decode CalendarEvent specific fields
title := dec.get_string()! title := dec.get_string()!
description2 := dec.get_string()! // Second description field description2 := dec.get_string()! // Second description field
start_time := dec.get_i64()! start_time := dec.get_i64()!
end_time := dec.get_i64()! end_time := dec.get_i64()!
location := dec.get_string()! location := dec.get_string()!
attendees := dec.get_list_u32()! attendees := dec.get_list_u32()!
fs_items := dec.get_list_u32()! fs_items := dec.get_list_u32()!
calendar_id := dec.get_u32()! calendar_id := dec.get_u32()!
status := unsafe { EventStatus(dec.get_u8()!) } status := unsafe { EventStatus(dec.get_u8()!) }
is_all_day := dec.get_bool()! is_all_day := dec.get_bool()!
is_recurring := dec.get_bool()! is_recurring := dec.get_bool()!
// Decode recurrence array // Decode recurrence array
recurrence_len := dec.get_u16()! recurrence_len := dec.get_u16()!
mut recurrence := []RecurrenceRule{} mut recurrence := []RecurrenceRule{}
for _ in 0..recurrence_len { for _ in 0 .. recurrence_len {
frequency := unsafe{RecurrenceFreq(dec.get_u8()!)} frequency := unsafe { RecurrenceFreq(dec.get_u8()!) }
interval := dec.get_int()! interval := dec.get_int()!
until := dec.get_i64()! until := dec.get_i64()!
count := dec.get_int()! count := dec.get_int()!
by_weekday := dec.get_list_int()! by_weekday := dec.get_list_int()!
by_monthday := dec.get_list_int()! by_monthday := dec.get_list_int()!
recurrence << RecurrenceRule{ recurrence << RecurrenceRule{
frequency: frequency frequency: frequency
interval: interval interval: interval
until: until until: until
count: count count: count
by_weekday: by_weekday by_weekday: by_weekday
by_monthday: by_monthday by_monthday: by_monthday
} }
} }
reminder_mins := dec.get_list_int()! reminder_mins := dec.get_list_int()!
color := dec.get_string()! color := dec.get_string()!
timezone := dec.get_string()! timezone := dec.get_string()!
return CalendarEvent{ return CalendarEvent{
// Base fields // Base fields
id: id id: id
name: name name: name
description: description description: description
created_at: created_at created_at: created_at
updated_at: updated_at updated_at: updated_at
securitypolicy: securitypolicy securitypolicy: securitypolicy
tags: tags tags: tags
comments: comments comments: comments
// CalendarEvent specific fields // CalendarEvent specific fields
title: title title: title
start_time: start_time start_time: start_time
end_time: end_time end_time: end_time
location: location location: location
attendees: attendees attendees: attendees
fs_items: fs_items fs_items: fs_items
calendar_id: calendar_id calendar_id: calendar_id
status: status status: status
is_all_day: is_all_day is_all_day: is_all_day
is_recurring: is_recurring is_recurring: is_recurring
recurrence: recurrence recurrence: recurrence
reminder_mins: reminder_mins reminder_mins: reminder_mins
color: color color: color
timezone: timezone timezone: timezone
} }
} }

View File

@@ -8,57 +8,57 @@ import json
@[heap] @[heap]
pub struct ChatGroup { pub struct ChatGroup {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
description string description string
group_id string // Associated group for permissions group_id string // Associated group for permissions
chat_type ChatType chat_type ChatType
messages []string // IDs of chat messages messages []string // IDs of chat messages
created_at i64 created_at i64
updated_at i64 updated_at i64
last_activity i64 last_activity i64
is_archived bool is_archived bool
tags []string tags []string
} }
pub enum ChatType { pub enum ChatType {
public_channel public_channel
private_channel private_channel
direct_message direct_message
group_message group_message
} }
pub fn (mut c ChatGroup) calculate_id() { pub fn (mut c ChatGroup) calculate_id() {
content := json.encode(ChatGroupContent{ content := json.encode(ChatGroupContent{
name: c.name name: c.name
description: c.description description: c.description
group_id: c.group_id group_id: c.group_id
chat_type: c.chat_type chat_type: c.chat_type
is_archived: c.is_archived is_archived: c.is_archived
tags: c.tags tags: c.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
c.id = hash.hex()[..48] c.id = hash.hex()[..48]
} }
struct ChatGroupContent { struct ChatGroupContent {
name string name string
description string description string
group_id string group_id string
chat_type ChatType chat_type ChatType
is_archived bool is_archived bool
tags []string tags []string
} }
pub fn new_chat_group(name string, group_id string, chat_type ChatType) ChatGroup { pub fn new_chat_group(name string, group_id string, chat_type ChatType) ChatGroup {
mut chat_group := ChatGroup{ mut chat_group := ChatGroup{
name: name name: name
group_id: group_id group_id: group_id
chat_type: chat_type chat_type: chat_type
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
last_activity: time.now().unix() last_activity: time.now().unix()
} }
chat_group.calculate_id() chat_group.calculate_id()
return chat_group return chat_group
} }

View File

@@ -8,97 +8,97 @@ import json
@[heap] @[heap]
pub struct ChatMessage { pub struct ChatMessage {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
content string content string
chat_group_id string // Associated chat group chat_group_id string // Associated chat group
sender_id string // User ID of sender sender_id string // User ID of sender
parent_messages []MessageLink // Referenced/replied messages parent_messages []MessageLink // Referenced/replied messages
fs_files []string // IDs of linked files fs_files []string // IDs of linked files
message_type MessageType message_type MessageType
status MessageStatus status MessageStatus
created_at i64 created_at i64
updated_at i64 updated_at i64
edited_at i64 edited_at i64
deleted_at i64 deleted_at i64
reactions []MessageReaction reactions []MessageReaction
mentions []string // User IDs mentioned in message mentions []string // User IDs mentioned in message
tags []string tags []string
} }
pub struct MessageLink { pub struct MessageLink {
pub mut: pub mut:
message_id string message_id string
link_type MessageLinkType link_type MessageLinkType
} }
pub enum MessageLinkType { pub enum MessageLinkType {
reply reply
reference reference
forward forward
quote quote
} }
pub enum MessageType { pub enum MessageType {
text text
image image
file file
voice voice
video video
system system
announcement announcement
} }
pub enum MessageStatus { pub enum MessageStatus {
sent sent
delivered delivered
read read
failed failed
deleted deleted
} }
pub struct MessageReaction { pub struct MessageReaction {
pub mut: pub mut:
user_id string user_id string
emoji string emoji string
timestamp i64 timestamp i64
} }
pub fn (mut m ChatMessage) calculate_id() { pub fn (mut m ChatMessage) calculate_id() {
content := json.encode(MessageContent{ content := json.encode(MessageContent{
content: m.content content: m.content
chat_group_id: m.chat_group_id chat_group_id: m.chat_group_id
sender_id: m.sender_id sender_id: m.sender_id
parent_messages: m.parent_messages parent_messages: m.parent_messages
fs_files: m.fs_files fs_files: m.fs_files
message_type: m.message_type message_type: m.message_type
mentions: m.mentions mentions: m.mentions
tags: m.tags tags: m.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
m.id = hash.hex()[..48] m.id = hash.hex()[..48]
} }
struct MessageContent { struct MessageContent {
content string content string
chat_group_id string chat_group_id string
sender_id string sender_id string
parent_messages []MessageLink parent_messages []MessageLink
fs_files []string fs_files []string
message_type MessageType message_type MessageType
mentions []string mentions []string
tags []string tags []string
} }
pub fn new_chat_message(content string, chat_group_id string, sender_id string) ChatMessage { pub fn new_chat_message(content string, chat_group_id string, sender_id string) ChatMessage {
mut message := ChatMessage{ mut message := ChatMessage{
content: content content: content
chat_group_id: chat_group_id chat_group_id: chat_group_id
sender_id: sender_id sender_id: sender_id
message_type: .text message_type: .text
status: .sent status: .sent
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
} }
message.calculate_id() message.calculate_id()
return message return message
} }

View File

@@ -4,42 +4,44 @@ import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.encoder import freeflowuniverse.herolib.data.encoder
pub fn set[T](obj T) ! { pub fn set[T](obj T) ! {
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
id := obj.id id := obj.id
data := encoder.encode(obj)! data := encoder.encode(obj)!
redis.hset("db:${T.name}",id.str(),data.bytestr())! redis.hset('db:${T.name}', id.str(), data.bytestr())!
} }
pub fn get[T](id u32) !T { pub fn get[T](id u32) !T {
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
data := redis.hget("db:${T.name}",id.str())! data := redis.hget('db:${T.name}', id.str())!
t := T{} t := T{}
return encoder.decode[T](data.bytes())! return encoder.decode[T](data.bytes())!
} }
pub fn exists[T](id u32) !bool { pub fn exists[T](id u32) !bool {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
return redis.hexists("db:${name}",id.str())! return redis.hexists('db:${name}', id.str())!
} }
pub fn delete[T](id u32) ! { pub fn delete[T](id u32) ! {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
redis.hdel("db:${name}", id.str())! redis.hdel('db:${name}', id.str())!
} }
pub fn list[T]() ![]T { pub fn list[T]() ![]T {
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
ids := redis.hkeys("db:${name}")! ids := redis.hkeys('db:${name}')!
mut result := []T{} mut result := []T{}
for id in ids { for id in ids {
result << get[T](id.u32())! result << get[T](id.u32())!
} }
return result return result
} }
//make it easy to get a base object // make it easy to get a base object
pub fn new_from_base[T](args BaseArgs) !Base { pub fn new_from_base[T](args BaseArgs) !Base {
return T { Base: new_base(args)! } return T{
Base: new_base(args)!
}
} }

View File

@@ -5,7 +5,6 @@ import json
import freeflowuniverse.herolib.ui.console import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.hero.heromodels.openrpc import freeflowuniverse.herolib.hero.heromodels.openrpc
fn send_request(mut conn unix.StreamConn, request openrpc.JsonRpcRequest) ! { fn send_request(mut conn unix.StreamConn, request openrpc.JsonRpcRequest) ! {
request_json := json.encode(request) request_json := json.encode(request)
conn.write_string(request_json)! conn.write_string(request_json)!
@@ -31,9 +30,9 @@ console.print_item('Connected to server')
console.print_header('Test 1: Discover OpenRPC Specification') console.print_header('Test 1: Discover OpenRPC Specification')
discover_request := openrpc.JsonRpcRequest{ discover_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0' jsonrpc: '2.0'
method: 'discover' method: 'discover'
params: 'null' params: 'null'
id: '1' id: '1'
} }
send_request(mut conn, discover_request)! send_request(mut conn, discover_request)!
@@ -46,9 +45,9 @@ comment_json := '{"comment": "This is a test comment from OpenRPC client", "pare
create_request := openrpc.JsonRpcRequest{ create_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0' jsonrpc: '2.0'
method: 'comment_set' method: 'comment_set'
params: comment_json params: comment_json
id: '2' id: '2'
} }
send_request(mut conn, create_request)! send_request(mut conn, create_request)!
@@ -59,9 +58,9 @@ console.print_item('Comment created: ${create_response}')
console.print_header('Test 3: List All Comments') console.print_header('Test 3: List All Comments')
list_request := openrpc.JsonRpcRequest{ list_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0' jsonrpc: '2.0'
method: 'comment_list' method: 'comment_list'
params: 'null' params: 'null'
id: '3' id: '3'
} }
send_request(mut conn, list_request)! send_request(mut conn, list_request)!
@@ -74,9 +73,9 @@ get_args_json := '{"author": 1}'
get_request := openrpc.JsonRpcRequest{ get_request := openrpc.JsonRpcRequest{
jsonrpc: '2.0' jsonrpc: '2.0'
method: 'comment_get' method: 'comment_get'
params: get_args_json params: get_args_json
id: '4' id: '4'
} }
send_request(mut conn, get_request)! send_request(mut conn, get_request)!
@@ -84,5 +83,3 @@ get_response := read_response(mut conn)!
console.print_item('Comments by author: ${get_response}') console.print_item('Comments by author: ${get_response}')
console.print_header('All tests completed successfully!') console.print_header('All tests completed successfully!')

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run #!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
// Create a user // Create a user
mut user := new_user('John Doe', 'john@example.com') mut user := new_user('John Doe', 'john@example.com')
@@ -18,7 +17,8 @@ mut issue := new_project_issue('Fix login bug', project.id, user.id, .bug)
mut calendar := new_calendar('Team Calendar', group.id) mut calendar := new_calendar('Team Calendar', group.id)
// Create an event // Create an event
mut event := new_calendar_event('Sprint Planning', 1672531200, 1672534800, calendar.id, user.id) mut event := new_calendar_event('Sprint Planning', 1672531200, 1672534800, calendar.id,
user.id)
calendar.add_event(event.id) calendar.add_event(event.id)
// Create a filesystem // Create a filesystem

View File

@@ -8,45 +8,45 @@ import json
@[heap] @[heap]
pub struct Fs { pub struct Fs {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
description string description string
group_id string // Associated group for permissions group_id string // Associated group for permissions
root_dir_id string // ID of root directory root_dir_id string // ID of root directory
created_at i64 created_at i64
updated_at i64 updated_at i64
quota_bytes i64 // Storage quota in bytes quota_bytes i64 // Storage quota in bytes
used_bytes i64 // Current usage in bytes used_bytes i64 // Current usage in bytes
tags []string tags []string
} }
pub fn (mut f Fs) calculate_id() { pub fn (mut f Fs) calculate_id() {
content := json.encode(FsContent{ content := json.encode(FsContent{
name: f.name name: f.name
description: f.description description: f.description
group_id: f.group_id group_id: f.group_id
quota_bytes: f.quota_bytes quota_bytes: f.quota_bytes
tags: f.tags tags: f.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48] f.id = hash.hex()[..48]
} }
struct FsContent { struct FsContent {
name string name string
description string description string
group_id string group_id string
quota_bytes i64 quota_bytes i64
tags []string tags []string
} }
pub fn new_fs(name string, group_id string) Fs { pub fn new_fs(name string, group_id string) Fs {
mut fs := Fs{ mut fs := Fs{
name: name name: name
group_id: group_id group_id: group_id
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
} }
fs.calculate_id() fs.calculate_id()
return fs return fs
} }

View File

@@ -7,35 +7,35 @@ import crypto.blake3
@[heap] @[heap]
pub struct FsBlob { pub struct FsBlob {
pub mut: pub mut:
id string // blake192 hash of content id string // blake192 hash of content
data []u8 // Binary data (max 1MB) data []u8 // Binary data (max 1MB)
size_bytes int // Size in bytes size_bytes int // Size in bytes
created_at i64 created_at i64
mime_type string mime_type string
encoding string // e.g., "gzip", "none" encoding string // e.g., "gzip", "none"
} }
pub fn (mut b FsBlob) calculate_id() { pub fn (mut b FsBlob) calculate_id() {
hash := blake3.sum256(b.data) hash := blake3.sum256(b.data)
b.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars b.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
} }
pub fn new_fs_blob(data []u8) !FsBlob { pub fn new_fs_blob(data []u8) !FsBlob {
if data.len > 1024 * 1024 { // 1MB limit if data.len > 1024 * 1024 { // 1MB limit
return error('Blob size exceeds 1MB limit') return error('Blob size exceeds 1MB limit')
} }
mut blob := FsBlob{ mut blob := FsBlob{
data: data data: data
size_bytes: data.len size_bytes: data.len
created_at: time.now().unix() created_at: time.now().unix()
encoding: 'none' encoding: 'none'
} }
blob.calculate_id() blob.calculate_id()
return blob return blob
} }
pub fn (b FsBlob) verify_integrity() bool { pub fn (b FsBlob) verify_integrity() bool {
hash := blake3.sum256(b.data) hash := blake3.sum256(b.data)
return hash.hex()[..48] == b.id return hash.hex()[..48] == b.id
} }

View File

@@ -8,46 +8,46 @@ import json
@[heap] @[heap]
pub struct FsDir { pub struct FsDir {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
fs_id string // Associated filesystem fs_id string // Associated filesystem
parent_id string // Parent directory ID (empty for root) parent_id string // Parent directory ID (empty for root)
group_id string // Associated group for permissions group_id string // Associated group for permissions
children []string // Child directory and file IDs children []string // Child directory and file IDs
created_at i64 created_at i64
updated_at i64 updated_at i64
tags []string tags []string
} }
pub fn (mut d FsDir) calculate_id() { pub fn (mut d FsDir) calculate_id() {
content := json.encode(DirContent{ content := json.encode(DirContent{
name: d.name name: d.name
fs_id: d.fs_id fs_id: d.fs_id
parent_id: d.parent_id parent_id: d.parent_id
group_id: d.group_id group_id: d.group_id
tags: d.tags tags: d.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
d.id = hash.hex()[..48] d.id = hash.hex()[..48]
} }
struct DirContent { struct DirContent {
name string name string
fs_id string fs_id string
parent_id string parent_id string
group_id string group_id string
tags []string tags []string
} }
pub fn new_fs_dir(name string, fs_id string, parent_id string, group_id string) FsDir { pub fn new_fs_dir(name string, fs_id string, parent_id string, group_id string) FsDir {
mut dir := FsDir{ mut dir := FsDir{
name: name name: name
fs_id: fs_id fs_id: fs_id
parent_id: parent_id parent_id: parent_id
group_id: group_id group_id: group_id
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
} }
dir.calculate_id() dir.calculate_id()
return dir return dir
} }

View File

@@ -8,58 +8,58 @@ import json
@[heap] @[heap]
pub struct FsFile { pub struct FsFile {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
fs_id string // Associated filesystem fs_id string // Associated filesystem
directories []string // Directory IDs where this file exists directories []string // Directory IDs where this file exists
blobs []string // Blake192 IDs of file content blobs blobs []string // Blake192 IDs of file content blobs
size_bytes i64 // Total file size size_bytes i64 // Total file size
mime_type string mime_type string
checksum string // Overall file checksum checksum string // Overall file checksum
created_at i64 created_at i64
updated_at i64 updated_at i64
accessed_at i64 accessed_at i64
tags []string tags []string
metadata map[string]string // Custom metadata metadata map[string]string // Custom metadata
} }
pub fn (mut f FsFile) calculate_id() { pub fn (mut f FsFile) calculate_id() {
content := json.encode(FileContent{ content := json.encode(FileContent{
name: f.name name: f.name
fs_id: f.fs_id fs_id: f.fs_id
directories: f.directories directories: f.directories
blobs: f.blobs blobs: f.blobs
size_bytes: f.size_bytes size_bytes: f.size_bytes
mime_type: f.mime_type mime_type: f.mime_type
checksum: f.checksum checksum: f.checksum
tags: f.tags tags: f.tags
metadata: f.metadata metadata: f.metadata
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
f.id = hash.hex()[..48] f.id = hash.hex()[..48]
} }
struct FileContent { struct FileContent {
name string name string
fs_id string fs_id string
directories []string directories []string
blobs []string blobs []string
size_bytes i64 size_bytes i64
mime_type string mime_type string
checksum string checksum string
tags []string tags []string
metadata map[string]string metadata map[string]string
} }
pub fn new_fs_file(name string, fs_id string, directories []string) FsFile { pub fn new_fs_file(name string, fs_id string, directories []string) FsFile {
mut file := FsFile{ mut file := FsFile{
name: name name: name
fs_id: fs_id fs_id: fs_id
directories: directories directories: directories
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
accessed_at: time.now().unix() accessed_at: time.now().unix()
} }
file.calculate_id() file.calculate_id()
return file return file
} }

View File

@@ -8,54 +8,54 @@ import json
@[heap] @[heap]
pub struct FsSymlink { pub struct FsSymlink {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
fs_id string // Associated filesystem fs_id string // Associated filesystem
parent_id string // Parent directory ID parent_id string // Parent directory ID
target_id string // ID of target file or directory target_id string // ID of target file or directory
target_type SymlinkTargetType target_type SymlinkTargetType
created_at i64 created_at i64
updated_at i64 updated_at i64
tags []string tags []string
} }
pub enum SymlinkTargetType { pub enum SymlinkTargetType {
file file
directory directory
} }
pub fn (mut s FsSymlink) calculate_id() { pub fn (mut s FsSymlink) calculate_id() {
content := json.encode(SymlinkContent{ content := json.encode(SymlinkContent{
name: s.name name: s.name
fs_id: s.fs_id fs_id: s.fs_id
parent_id: s.parent_id parent_id: s.parent_id
target_id: s.target_id target_id: s.target_id
target_type: s.target_type target_type: s.target_type
tags: s.tags tags: s.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
s.id = hash.hex()[..48] s.id = hash.hex()[..48]
} }
struct SymlinkContent { struct SymlinkContent {
name string name string
fs_id string fs_id string
parent_id string parent_id string
target_id string target_id string
target_type SymlinkTargetType target_type SymlinkTargetType
tags []string tags []string
} }
pub fn new_fs_symlink(name string, fs_id string, parent_id string, target_id string, target_type SymlinkTargetType) FsSymlink { pub fn new_fs_symlink(name string, fs_id string, parent_id string, target_id string, target_type SymlinkTargetType) FsSymlink {
mut symlink := FsSymlink{ mut symlink := FsSymlink{
name: name name: name
fs_id: fs_id fs_id: fs_id
parent_id: parent_id parent_id: parent_id
target_id: target_id target_id: target_id
target_type: target_type target_type: target_type
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
} }
symlink.calculate_id() symlink.calculate_id()
return symlink return symlink
} }

View File

@@ -8,74 +8,74 @@ import json
@[heap] @[heap]
pub struct Group { pub struct Group {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
description string description string
members []GroupMember members []GroupMember
subgroups []string // IDs of child groups subgroups []string // IDs of child groups
parent_group string // ID of parent group parent_group string // ID of parent group
created_at i64 created_at i64
updated_at i64 updated_at i64
is_public bool is_public bool
tags []string tags []string
} }
pub struct GroupMember { pub struct GroupMember {
pub mut: pub mut:
user_id string user_id string
role GroupRole role GroupRole
joined_at i64 joined_at i64
} }
pub enum GroupRole { pub enum GroupRole {
reader reader
writer writer
admin admin
owner owner
} }
pub fn (mut g Group) calculate_id() { pub fn (mut g Group) calculate_id() {
content := json.encode(GroupContent{ content := json.encode(GroupContent{
name: g.name name: g.name
description: g.description description: g.description
members: g.members members: g.members
subgroups: g.subgroups subgroups: g.subgroups
parent_group: g.parent_group parent_group: g.parent_group
is_public: g.is_public is_public: g.is_public
tags: g.tags tags: g.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
g.id = hash.hex()[..48] g.id = hash.hex()[..48]
} }
struct GroupContent { struct GroupContent {
name string name string
description string description string
members []GroupMember members []GroupMember
subgroups []string subgroups []string
parent_group string parent_group string
is_public bool is_public bool
tags []string tags []string
} }
pub fn new_group(name string, description string) Group { pub fn new_group(name string, description string) Group {
mut group := Group{ mut group := Group{
name: name name: name
description: description description: description
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
is_public: false is_public: false
} }
group.calculate_id() group.calculate_id()
return group return group
} }
pub fn (mut g Group) add_member(user_id string, role GroupRole) { pub fn (mut g Group) add_member(user_id string, role GroupRole) {
g.members << GroupMember{ g.members << GroupMember{
user_id: user_id user_id: user_id
role: role role: role
joined_at: time.now().unix() joined_at: time.now().unix()
} }
g.updated_at = time.now().unix() g.updated_at = time.now().unix()
g.calculate_id() g.calculate_id()
} }

View File

@@ -50,7 +50,9 @@ pub fn comment_get(params string) !string {
pub fn comment_set(params string) !string { pub fn comment_set(params string) !string {
comment_arg := json.decode(heromodels.CommentArgExtended, params)! comment_arg := json.decode(heromodels.CommentArgExtended, params)!
id := heromodels.comment_set(comment_arg)! id := heromodels.comment_set(comment_arg)!
return json.encode({'id': id}) return json.encode({
'id': id
})
} }
// comment_delete removes a comment by ID // comment_delete removes a comment by ID

View File

@@ -8,96 +8,112 @@ import json
@[heap] @[heap]
pub struct Project { pub struct Project {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
description string description string
group_id string // Associated group for permissions group_id string // Associated group for permissions
swimlanes []Swimlane swimlanes []Swimlane
milestones []Milestone milestones []Milestone
issues []string // IDs of project issues issues []string // IDs of project issues
fs_files []string // IDs of linked files fs_files []string // IDs of linked files
status ProjectStatus status ProjectStatus
start_date i64 start_date i64
end_date i64 end_date i64
created_at i64 created_at i64
updated_at i64 updated_at i64
tags []string tags []string
} }
pub struct Swimlane { pub struct Swimlane {
pub mut: pub mut:
id string id string
name string name string
description string description string
order int order int
color string color string
is_done bool is_done bool
} }
pub struct Milestone { pub struct Milestone {
pub mut: pub mut:
id string id string
name string name string
description string description string
due_date i64 due_date i64
completed bool completed bool
issues []string // IDs of issues in this milestone issues []string // IDs of issues in this milestone
} }
pub enum ProjectStatus { pub enum ProjectStatus {
planning planning
active active
on_hold on_hold
completed completed
cancelled cancelled
} }
pub fn (mut p Project) calculate_id() { pub fn (mut p Project) calculate_id() {
content := json.encode(ProjectContent{ content := json.encode(ProjectContent{
name: p.name name: p.name
description: p.description description: p.description
group_id: p.group_id group_id: p.group_id
swimlanes: p.swimlanes swimlanes: p.swimlanes
milestones: p.milestones milestones: p.milestones
issues: p.issues issues: p.issues
fs_files: p.fs_files fs_files: p.fs_files
status: p.status status: p.status
start_date: p.start_date start_date: p.start_date
end_date: p.end_date end_date: p.end_date
tags: p.tags tags: p.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
p.id = hash.hex()[..48] p.id = hash.hex()[..48]
} }
struct ProjectContent { struct ProjectContent {
name string name string
description string description string
group_id string group_id string
swimlanes []Swimlane swimlanes []Swimlane
milestones []Milestone milestones []Milestone
issues []string issues []string
fs_files []string fs_files []string
status ProjectStatus status ProjectStatus
start_date i64 start_date i64
end_date i64 end_date i64
tags []string tags []string
} }
pub fn new_project(name string, description string, group_id string) Project { pub fn new_project(name string, description string, group_id string) Project {
mut project := Project{ mut project := Project{
name: name name: name
description: description description: description
group_id: group_id group_id: group_id
status: .planning status: .planning
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
swimlanes: [ swimlanes: [
Swimlane{id: 'todo', name: 'To Do', order: 1, color: '#f1c40f'}, Swimlane{
Swimlane{id: 'in_progress', name: 'In Progress', order: 2, color: '#3498db'}, id: 'todo'
Swimlane{id: 'done', name: 'Done', order: 3, color: '#2ecc71', is_done: true} name: 'To Do'
] order: 1
} color: '#f1c40f'
project.calculate_id() },
return project Swimlane{
id: 'in_progress'
name: 'In Progress'
order: 2
color: '#3498db'
},
Swimlane{
id: 'done'
name: 'Done'
order: 3
color: '#2ecc71'
is_done: true
},
]
}
project.calculate_id()
return project
} }

View File

@@ -8,109 +8,109 @@ import json
@[heap] @[heap]
pub struct ProjectIssue { pub struct ProjectIssue {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
title string title string
description string description string
project_id string // Associated project project_id string // Associated project
issue_type IssueType issue_type IssueType
priority IssuePriority priority IssuePriority
status IssueStatus status IssueStatus
swimlane_id string // Current swimlane swimlane_id string // Current swimlane
assignees []string // User IDs assignees []string // User IDs
reporter string // User ID who created the issue reporter string // User ID who created the issue
milestone_id string // Associated milestone milestone_id string // Associated milestone
deadline i64 // Unix timestamp deadline i64 // Unix timestamp
estimate int // Story points or hours estimate int // Story points or hours
fs_files []string // IDs of linked files fs_files []string // IDs of linked files
parent_id string // Parent issue ID (for sub-tasks) parent_id string // Parent issue ID (for sub-tasks)
children []string // Child issue IDs children []string // Child issue IDs
created_at i64 created_at i64
updated_at i64 updated_at i64
tags []string tags []string
} }
pub enum IssueType { pub enum IssueType {
task task
story story
bug bug
question question
epic epic
subtask subtask
} }
pub enum IssuePriority { pub enum IssuePriority {
lowest lowest
low low
medium medium
high high
highest highest
critical critical
} }
pub enum IssueStatus { pub enum IssueStatus {
open open
in_progress in_progress
blocked blocked
review review
testing testing
done done
closed closed
} }
pub fn (mut i ProjectIssue) calculate_id() { pub fn (mut i ProjectIssue) calculate_id() {
content := json.encode(IssueContent{ content := json.encode(IssueContent{
title: i.title title: i.title
description: i.description description: i.description
project_id: i.project_id project_id: i.project_id
issue_type: i.issue_type issue_type: i.issue_type
priority: i.priority priority: i.priority
status: i.status status: i.status
swimlane_id: i.swimlane_id swimlane_id: i.swimlane_id
assignees: i.assignees assignees: i.assignees
reporter: i.reporter reporter: i.reporter
milestone_id: i.milestone_id milestone_id: i.milestone_id
deadline: i.deadline deadline: i.deadline
estimate: i.estimate estimate: i.estimate
fs_files: i.fs_files fs_files: i.fs_files
parent_id: i.parent_id parent_id: i.parent_id
children: i.children children: i.children
tags: i.tags tags: i.tags
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
i.id = hash.hex()[..48] i.id = hash.hex()[..48]
} }
struct IssueContent { struct IssueContent {
title string title string
description string description string
project_id string project_id string
issue_type IssueType issue_type IssueType
priority IssuePriority priority IssuePriority
status IssueStatus status IssueStatus
swimlane_id string swimlane_id string
assignees []string assignees []string
reporter string reporter string
milestone_id string milestone_id string
deadline i64 deadline i64
estimate int estimate int
fs_files []string fs_files []string
parent_id string parent_id string
children []string children []string
tags []string tags []string
} }
pub fn new_project_issue(title string, project_id string, reporter string, issue_type IssueType) ProjectIssue { pub fn new_project_issue(title string, project_id string, reporter string, issue_type IssueType) ProjectIssue {
mut issue := ProjectIssue{ mut issue := ProjectIssue{
title: title title: title
project_id: project_id project_id: project_id
reporter: reporter reporter: reporter
issue_type: issue_type issue_type: issue_type
priority: .medium priority: .medium
status: .open status: .open
swimlane_id: 'todo' swimlane_id: 'todo'
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
} }
issue.calculate_id() issue.calculate_id()
return issue return issue
} }

View File

@@ -8,61 +8,61 @@ import json
@[heap] @[heap]
pub struct User { pub struct User {
pub mut: pub mut:
id string // blake192 hash id string // blake192 hash
name string name string
email string email string
public_key string // for encryption/signing public_key string // for encryption/signing
phone string phone string
address string address string
avatar_url string avatar_url string
bio string bio string
timezone string timezone string
created_at i64 created_at i64
updated_at i64 updated_at i64
status UserStatus status UserStatus
} }
pub enum UserStatus { pub enum UserStatus {
active active
inactive inactive
suspended suspended
pending pending
} }
pub fn (mut u User) calculate_id() { pub fn (mut u User) calculate_id() {
content := json.encode(UserContent{ content := json.encode(UserContent{
name: u.name name: u.name
email: u.email email: u.email
public_key: u.public_key public_key: u.public_key
phone: u.phone phone: u.phone
address: u.address address: u.address
bio: u.bio bio: u.bio
timezone: u.timezone timezone: u.timezone
status: u.status status: u.status
}) })
hash := blake3.sum256(content.bytes()) hash := blake3.sum256(content.bytes())
u.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars u.id = hash.hex()[..48] // blake192 = first 192 bits = 48 hex chars
} }
struct UserContent { struct UserContent {
name string name string
email string email string
public_key string public_key string
phone string phone string
address string address string
bio string bio string
timezone string timezone string
status UserStatus status UserStatus
} }
pub fn new_user(name string, email string) User { pub fn new_user(name string, email string) User {
mut user := User{ mut user := User{
name: name name: name
email: email email: email
created_at: time.now().unix() created_at: time.now().unix()
updated_at: time.now().unix() updated_at: time.now().unix()
status: .active status: .active
} }
user.calculate_id() user.calculate_id()
return user return user
} }

View File

@@ -6,32 +6,32 @@ import time
@[heap] @[heap]
pub struct VersionHistory { pub struct VersionHistory {
pub mut: pub mut:
current_id string // blake192 hash of current version current_id string // blake192 hash of current version
previous_id string // blake192 hash of previous version previous_id string // blake192 hash of previous version
next_id string // blake192 hash of next version (if exists) next_id string // blake192 hash of next version (if exists)
object_type string // Type of object (User, Group, etc.) object_type string // Type of object (User, Group, etc.)
change_type ChangeType change_type ChangeType
changed_by string // User ID who made the change changed_by string // User ID who made the change
changed_at i64 // Unix timestamp changed_at i64 // Unix timestamp
change_notes string // Optional description of changes change_notes string // Optional description of changes
} }
pub enum ChangeType { pub enum ChangeType {
create create
update update
delete delete
restore restore
} }
pub fn new_version_history(current_id string, previous_id string, object_type string, change_type ChangeType, changed_by string) VersionHistory { pub fn new_version_history(current_id string, previous_id string, object_type string, change_type ChangeType, changed_by string) VersionHistory {
return VersionHistory{ return VersionHistory{
current_id: current_id current_id: current_id
previous_id: previous_id previous_id: previous_id
object_type: object_type object_type: object_type
change_type: change_type change_type: change_type
changed_by: changed_by changed_by: changed_by
changed_at: time.now().unix() changed_at: time.now().unix()
} }
} }
// Database indexes needed: // Database indexes needed:

View File

@@ -1,13 +1,5 @@
module linux module linux
// import freeflowuniverse.herolib.osal.core as osal
import freeflowuniverse.herolib.core.texttools
// import freeflowuniverse.herolib.screen
import os
import time
// import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.osal.core as osal
@[heap] @[heap]
pub struct LinuxFactory { pub struct LinuxFactory {
pub mut: pub mut:

View File

@@ -15,10 +15,10 @@ pub fn new() ServerManager {
} }
fn (s ServerManager) execute(command string) bool { fn (s ServerManager) execute(command string) bool {
// console.print_debug(command) console.print_debug(command)
r := os.execute(command) r := os.execute(command)
// console.print_debug(r) console.print_debug(r)
return true return true
} }

View File

@@ -1,7 +1,6 @@
module sshagent module sshagent
import freeflowuniverse.herolib.ui.console import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.builder
// Check if SSH agent is properly configured and all is good // Check if SSH agent is properly configured and all is good
pub fn agent_check(mut agent SSHAgent) ! { pub fn agent_check(mut agent SSHAgent) ! {

View File

@@ -94,6 +94,7 @@ pub fn (mut c Client) send[T, D](request RequestGeneric[T], params SendParams) !
myerror := response.error_ or { myerror := response.error_ or {
return error('Failed to get error from response:\nRequest: ${request.encode()}\nResponse: ${response_json}\n${err}') return error('Failed to get error from response:\nRequest: ${request.encode()}\nResponse: ${response_json}\n${err}')
} }
// print_backtrace() // print_backtrace()
mut myreq := request.encode() mut myreq := request.encode()
if c.transport is UnixSocketTransport { if c.transport is UnixSocketTransport {

View File

@@ -78,11 +78,10 @@ pub fn (mut t UnixSocketTransport) send(request string, params SendParams) !stri
// Append the newly read data to the total response // Append the newly read data to the total response
res_total << res[..n] res_total << res[..n]
//here we need to check we are at end // here we need to check we are at end
if res.bytestr().contains('\n') { if res.bytestr().contains('\n') {
break break
} }
} }
unix.shutdown(socket.sock.handle) unix.shutdown(socket.sock.handle)
socket.close() or {} socket.close() or {}

View File

@@ -3,117 +3,115 @@ module openrpcserver
import freeflowuniverse.herolib.data.encoder import freeflowuniverse.herolib.data.encoder
import freeflowuniverse.herolib.data.ourtime import freeflowuniverse.herolib.data.ourtime
@[heap] @[heap]
pub struct Comment { pub struct Comment {
pub mut: pub mut:
id u32 id u32
comment string comment string
parent u32 //id of parent comment if any, 0 means none parent u32 // id of parent comment if any, 0 means none
updated_at i64 updated_at i64
author u32 //links to user author u32 // links to user
} }
pub fn (self Comment) type_name() string { pub fn (self Comment) type_name() string {
return 'comments' return 'comments'
} }
pub fn (self Comment) load(data []u8) !Comment { pub fn (self Comment) load(data []u8) !Comment {
return comment_load(data)! return comment_load(data)!
} }
pub fn (self Comment) dump() ![]u8{ pub fn (self Comment) dump() ![]u8 {
// Create a new encoder // Create a new encoder
mut e := encoder.new() mut e := encoder.new()
e.add_u8(1) e.add_u8(1)
e.add_u32(self.id) e.add_u32(self.id)
e.add_string(self.comment) e.add_string(self.comment)
e.add_u32(self.parent) e.add_u32(self.parent)
e.add_i64(self.updated_at) e.add_i64(self.updated_at)
e.add_u32(self.author) e.add_u32(self.author)
return e.data return e.data
} }
pub fn comment_load(data []u8) !Comment {
pub fn comment_load(data []u8) !Comment{ // Create a new decoder
// Create a new decoder mut e := encoder.decoder_new(data)
mut e := encoder.decoder_new(data) version := e.get_u8()!
version := e.get_u8()! if version != 1 {
if version != 1 { panic('wrong version in comment load')
panic("wrong version in comment load") }
} mut comment := Comment{}
mut comment := Comment{} comment.id = e.get_u32()!
comment.id = e.get_u32()! comment.comment = e.get_string()!
comment.comment = e.get_string()! comment.parent = e.get_u32()!
comment.parent = e.get_u32()! comment.updated_at = e.get_i64()!
comment.updated_at = e.get_i64()! comment.author = e.get_u32()!
comment.author = e.get_u32()! return comment
return comment
} }
pub struct CommentArg { pub struct CommentArg {
pub mut: pub mut:
comment string comment string
parent u32 parent u32
author u32 author u32
} }
pub fn comment_multiset(args []CommentArg) ![]u32 { pub fn comment_multiset(args []CommentArg) ![]u32 {
return comments2ids(args)! return comments2ids(args)!
} }
pub fn comments2ids(args []CommentArg) ![]u32 { pub fn comments2ids(args []CommentArg) ![]u32 {
return args.map(comment2id(it.comment)!) return args.map(comment2id(it.comment)!)
} }
pub fn comment2id(comment string) !u32 { pub fn comment2id(comment string) !u32 {
comment_fixed := comment.to_lower_ascii().trim_space() comment_fixed := comment.to_lower_ascii().trim_space()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
return if comment_fixed.len > 0{ return if comment_fixed.len > 0 {
hash := md5.hexhash(comment_fixed) hash := md5.hexhash(comment_fixed)
comment_found := redis.hget("db:comments", hash)! comment_found := redis.hget('db:comments', hash)!
if comment_found == ""{ if comment_found == '' {
id := u32(redis.incr("db:comments:id")!) id := u32(redis.incr('db:comments:id')!)
redis.hset("db:comments", hash, id.str())! redis.hset('db:comments', hash, id.str())!
redis.hset("db:comments", id.str(), comment_fixed)! redis.hset('db:comments', id.str(), comment_fixed)!
id id
}else{ } else {
comment_found.u32() comment_found.u32()
} }
} else { 0 } } else {
0
}
} }
// get new comment, not from the DB
//get new comment, not from the DB pub fn comment_new(args CommentArg) !Comment {
pub fn comment_new(args CommentArg) !Comment{ mut o := Comment{
mut o := Comment { comment: args.comment
comment: args.comment parent: args.parent
parent: args.parent updated_at: ourtime.now().unix()
updated_at: ourtime.now().unix() author: args.author
author: args.author }
} return o
return o
} }
pub fn comment_multiset(args []CommentArg) ![]u32{ pub fn comment_multiset(args []CommentArg) ![]u32 {
mut ids := []u32{} mut ids := []u32{}
for comment in args { for comment in args {
ids << comment_set(comment)! ids << comment_set(comment)!
} }
return ids return ids
} }
pub fn comment_set(args CommentArg) !u32{ pub fn comment_set(args CommentArg) !u32 {
mut o := comment_new(args)! mut o := comment_new(args)!
// Use openrpcserver set function which now returns the ID // Use openrpcserver set function which now returns the ID
return openrpcserver.set[Comment](mut o)! return set[Comment](mut o)!
} }
pub fn comment_exist(id u32) !bool{ pub fn comment_exist(id u32) !bool {
return openrpcserver.exists[Comment](id)! return exists[Comment](id)!
} }
pub fn comment_get(id u32) !Comment{ pub fn comment_get(id u32) !Comment {
return openrpcserver.get[Comment](id)! return get[Comment](id)!
} }

View File

@@ -3,55 +3,57 @@ module openrpcserver
import freeflowuniverse.herolib.core.redisclient import freeflowuniverse.herolib.core.redisclient
pub fn set[T](mut obj T) !u32 { pub fn set[T](mut obj T) !u32 {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
// Generate ID if not set // Generate ID if not set
if obj.id == 0 { if obj.id == 0 {
myid := redis.incr("db:${name}:id")! myid := redis.incr('db:${name}:id')!
obj.id = u32(myid) obj.id = u32(myid)
} }
data := obj.dump()! data := obj.dump()!
redis.hset("db:${name}",obj.id.str(),data.bytestr())! redis.hset('db:${name}', obj.id.str(), data.bytestr())!
return obj.id return obj.id
} }
pub fn get[T](id u32) !T { pub fn get[T](id u32) !T {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
data := redis.hget("db:${name}",id.str())! data := redis.hget('db:${name}', id.str())!
if data.len > 0 { if data.len > 0 {
return T{}.load(data.bytes())! return T{}.load(data.bytes())!
} else { } else {
return error("Can't find ${name} with id: ${id}") return error("Can't find ${name} with id: ${id}")
} }
} }
pub fn exists[T](id u32) !bool { pub fn exists[T](id u32) !bool {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
return redis.hexists("db:${name}",id.str())! return redis.hexists('db:${name}', id.str())!
} }
pub fn delete[T](id u32) ! { pub fn delete[T](id u32) ! {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
redis.hdel("db:${name}", id.str())! redis.hdel('db:${name}', id.str())!
} }
pub fn list[T]() ![]T { pub fn list[T]() ![]T {
name := T{}.type_name() name := T{}.type_name()
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
all_data := redis.hgetall("db:${name}")! all_data := redis.hgetall('db:${name}')!
mut result := []T{} mut result := []T{}
for _, data in all_data { for _, data in all_data {
result << T{}.load(data.bytes())! result << T{}.load(data.bytes())!
} }
return result return result
} }
//make it easy to get a base object // make it easy to get a base object
pub fn new_from_base[T](args BaseArgs) !Base { pub fn new_from_base[T](args BaseArgs) !Base {
return T { Base: new_base(args)! } return T{
Base: new_base(args)!
}
} }

View File

@@ -1,7 +1,6 @@
module openrpcserver module openrpcserver
import crypto.md5 import crypto.md5
import freeflowuniverse.herolib.core.redisclient import freeflowuniverse.herolib.core.redisclient
import freeflowuniverse.herolib.data.ourtime import freeflowuniverse.herolib.data.ourtime
@@ -9,85 +8,83 @@ import freeflowuniverse.herolib.data.ourtime
@[heap] @[heap]
pub struct Base { pub struct Base {
pub mut: pub mut:
id u32 id u32
name string name string
description string description string
created_at i64 created_at i64
updated_at i64 updated_at i64
securitypolicy u32 securitypolicy u32
tags u32 //when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags tags u32 // when we set/get we always do as []string but this can then be sorted and md5ed this gies the unique id of tags
comments []u32 comments []u32
} }
@[heap] @[heap]
pub struct SecurityPolicy { pub struct SecurityPolicy {
pub mut: pub mut:
id u32 id u32
read []u32 //links to users & groups read []u32 // links to users & groups
write []u32 //links to users & groups write []u32 // links to users & groups
delete []u32 //links to users & groups delete []u32 // links to users & groups
public bool public bool
md5 string //this sorts read, write and delete u32 + hash, then do md5 hash, this allows to go from a random read/write/delete/public config to a hash md5 string // this sorts read, write and delete u32 + hash, then do md5 hash, this allows to go from a random read/write/delete/public config to a hash
} }
@[heap] @[heap]
pub struct Tags { pub struct Tags {
pub mut: pub mut:
id u32 id u32
names []string //unique per id names []string // unique per id
md5 string //of sorted names, to make easy to find unique id, each name lowercased and made ascii md5 string // of sorted names, to make easy to find unique id, each name lowercased and made ascii
} }
///////////////// /////////////////
@[params] @[params]
pub struct BaseArgs { pub struct BaseArgs {
pub mut: pub mut:
id ?u32 id ?u32
name string name string
description string description string
securitypolicy ?u32 securitypolicy ?u32
tags []string tags []string
comments []CommentArg comments []CommentArg
} }
//make it easy to get a base object // make it easy to get a base object
pub fn new_base(args BaseArgs) !Base { pub fn new_base(args BaseArgs) !Base {
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
commentids:=comment_multiset(args.comments)! commentids := comment_multiset(args.comments)!
tags:=tags2id(args.tags)! tags := tags2id(args.tags)!
return Base { return Base{
id: args.id or { 0 } id: args.id or { 0 }
name: args.name name: args.name
description: args.description description: args.description
created_at: ourtime.now().unix() created_at: ourtime.now().unix()
updated_at: ourtime.now().unix() updated_at: ourtime.now().unix()
securitypolicy: args.securitypolicy or { 0 } securitypolicy: args.securitypolicy or { 0 }
tags: tags tags: tags
comments: commentids comments: commentids
} }
} }
pub fn tags2id(tags []string) !u32 { pub fn tags2id(tags []string) !u32 {
mut redis := redisclient.core_get()! mut redis := redisclient.core_get()!
return if tags.len>0{ return if tags.len > 0 {
mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != "") mut tags_fixed := tags.map(it.to_lower_ascii().trim_space()).filter(it != '')
tags_fixed.sort_ignore_case() tags_fixed.sort_ignore_case()
hash :=md5.hexhash(tags_fixed.join(",")) hash := md5.hexhash(tags_fixed.join(','))
tags_found := redis.hget("db:tags", hash)! tags_found := redis.hget('db:tags', hash)!
return if tags_found == ""{ return if tags_found == '' {
id := u32(redis.incr("db:tags:id")!) id := u32(redis.incr('db:tags:id')!)
redis.hset("db:tags", hash, id.str())! redis.hset('db:tags', hash, id.str())!
redis.hset("db:tags", id.str(), tags_fixed.join(","))! redis.hset('db:tags', id.str(), tags_fixed.join(','))!
id id
}else{ } else {
tags_found.u32() tags_found.u32()
} }
} else { } else {
0 0
} }
} }

View File

@@ -6,7 +6,7 @@ import net.unix
import os import os
import freeflowuniverse.herolib.ui.console import freeflowuniverse.herolib.ui.console
//THIS IS DEFAULT NEEDED FOR EACH OPENRPC SERVER WE MAKE // THIS IS DEFAULT NEEDED FOR EACH OPENRPC SERVER WE MAKE
pub struct JsonRpcRequest { pub struct JsonRpcRequest {
pub: pub:
@@ -33,10 +33,9 @@ pub:
data string data string
} }
pub struct RPCServer { pub struct RPCServer {
pub mut: pub mut:
listener &unix.StreamListener listener &unix.StreamListener
socket_path string socket_path string
} }
@@ -63,7 +62,7 @@ pub fn new_rpc_server(args RPCServerArgs) !&RPCServer {
listener := unix.listen_stream(args.socket_path, unix.ListenOptions{})! listener := unix.listen_stream(args.socket_path, unix.ListenOptions{})!
return &RPCServer{ return &RPCServer{
listener: listener listener: listener
socket_path: args.socket_path socket_path: args.socket_path
} }
} }
@@ -145,22 +144,22 @@ pub fn (mut server RPCServer) process(method string, params_str string) !string
fn (mut server RPCServer) create_success_response(result string, id string) string { fn (mut server RPCServer) create_success_response(result string, id string) string {
response := JsonRpcResponse{ response := JsonRpcResponse{
jsonrpc: '2.0' jsonrpc: '2.0'
result: result result: result
id: id id: id
} }
return json.encode(response) return json.encode(response)
} }
fn (mut server RPCServer) create_error_response(code int, message string, id string) string { fn (mut server RPCServer) create_error_response(code int, message string, id string) string {
error := JsonRpcError{ error := JsonRpcError{
code: code code: code
message: message message: message
data: 'null' data: 'null'
} }
response := JsonRpcResponse{ response := JsonRpcResponse{
jsonrpc: '2.0' jsonrpc: '2.0'
error: error error: error
id: id id: id
} }
return json.encode(response) return json.encode(response)
} }

View File

@@ -41,21 +41,21 @@ pub mut:
// new creates a new Company with default values // new creates a new Company with default values
pub fn Company.new() Company { pub fn Company.new() Company {
return Company{ return Company{
id: 0 id: 0
name: '' name: ''
registration_number: '' registration_number: ''
incorporation_date: 0 incorporation_date: 0
fiscal_year_end: '' fiscal_year_end: ''
email: '' email: ''
phone: '' phone: ''
website: '' website: ''
address: '' address: ''
business_type: .single business_type: .single
industry: '' industry: ''
description: '' description: ''
status: .pending_payment status: .pending_payment
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -34,19 +34,19 @@ pub mut:
pub fn Payment.new(payment_intent_id string, company_id u32, payment_plan string, setup_fee f64, monthly_fee f64, total_amount f64) Payment { pub fn Payment.new(payment_intent_id string, company_id u32, payment_plan string, setup_fee f64, monthly_fee f64, total_amount f64) Payment {
now := time.now().unix_time() now := time.now().unix_time()
return Payment{ return Payment{
id: 0 id: 0
payment_intent_id: payment_intent_id payment_intent_id: payment_intent_id
company_id: company_id company_id: company_id
payment_plan: payment_plan payment_plan: payment_plan
setup_fee: setup_fee setup_fee: setup_fee
monthly_fee: monthly_fee monthly_fee: monthly_fee
total_amount: total_amount total_amount: total_amount
currency: 'usd' currency: 'usd'
status: .pending status: .pending
stripe_customer_id: none stripe_customer_id: none
created_at: now created_at: now
completed_at: none completed_at: none
updated_at: u64(now) updated_at: u64(now)
} }
} }

View File

@@ -23,9 +23,9 @@ pub mut:
// new creates a new ProductComponent with default values // new creates a new ProductComponent with default values
pub fn ProductComponent.new() ProductComponent { pub fn ProductComponent.new() ProductComponent {
return ProductComponent{ return ProductComponent{
name: '' name: ''
description: '' description: ''
quantity: 1 quantity: 1
} }
} }
@@ -51,37 +51,37 @@ pub fn (mut pc ProductComponent) quantity(quantity u32) ProductComponent {
@[heap] @[heap]
pub struct Product { pub struct Product {
pub mut: pub mut:
id u32 // Unique product ID id u32 // Unique product ID
name string // Product name name string // Product name
description string // Product description description string // Product description
price f64 // Product price price f64 // Product price
type_ ProductType // Product type (product or service) type_ ProductType // Product type (product or service)
category string // Product category category string // Product category
status ProductStatus // Product status status ProductStatus // Product status
max_amount u16 // Maximum amount available max_amount u16 // Maximum amount available
purchase_till i64 // Purchase deadline timestamp purchase_till i64 // Purchase deadline timestamp
active_till i64 // Active until timestamp active_till i64 // Active until timestamp
components []ProductComponent // Product components components []ProductComponent // Product components
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Product with default values // new creates a new Product with default values
pub fn Product.new() Product { pub fn Product.new() Product {
return Product{ return Product{
id: 0 id: 0
name: '' name: ''
description: '' description: ''
price: 0.0 price: 0.0
type_: .product type_: .product
category: '' category: ''
status: .available status: .available
max_amount: 0 max_amount: 0
purchase_till: 0 purchase_till: 0
active_till: 0 active_till: 0
components: [] components: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -10,22 +10,22 @@ pub enum SaleStatus {
// SaleItem represents an individual item within a Sale // SaleItem represents an individual item within a Sale
pub struct SaleItem { pub struct SaleItem {
pub mut: pub mut:
product_id u32 // Product ID product_id u32 // Product ID
name string // Denormalized product name at time of sale name string // Denormalized product name at time of sale
quantity i32 // Quantity purchased quantity i32 // Quantity purchased
unit_price f64 // Price per unit at time of sale unit_price f64 // Price per unit at time of sale
subtotal f64 // Subtotal for this item subtotal f64 // Subtotal for this item
service_active_until ?i64 // Optional: For services, date until this specific purchased instance is active service_active_until ?i64 // Optional: For services, date until this specific purchased instance is active
} }
// new creates a new SaleItem with default values // new creates a new SaleItem with default values
pub fn SaleItem.new() SaleItem { pub fn SaleItem.new() SaleItem {
return SaleItem{ return SaleItem{
product_id: 0 product_id: 0
name: '' name: ''
quantity: 0 quantity: 0
unit_price: 0.0 unit_price: 0.0
subtotal: 0.0 subtotal: 0.0
service_active_until: none service_active_until: none
} }
} }
@@ -91,17 +91,17 @@ pub mut:
// new creates a new Sale with default values // new creates a new Sale with default values
pub fn Sale.new() Sale { pub fn Sale.new() Sale {
return Sale{ return Sale{
id: 0 id: 0
company_id: 0 company_id: 0
buyer_id: 0 buyer_id: 0
transaction_id: 0 transaction_id: 0
total_amount: 0.0 total_amount: 0.0
status: .pending status: .pending
sale_date: 0 sale_date: 0
items: [] items: []
notes: '' notes: ''
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -16,12 +16,12 @@ pub mut:
// new creates a new Comment with default values // new creates a new Comment with default values
pub fn Comment.new() Comment { pub fn Comment.new() Comment {
return Comment{ return Comment{
id: 0 id: 0
user_id: 0 user_id: 0
content: '' content: ''
parent_comment_id: none parent_comment_id: none
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -4,31 +4,31 @@ module finance
@[heap] @[heap]
pub struct Account { pub struct Account {
pub mut: pub mut:
id u32 // Unique account ID id u32 // Unique account ID
name string // Internal name of the account for the user name string // Internal name of the account for the user
user_id u32 // User ID of the owner of the account user_id u32 // User ID of the owner of the account
description string // Optional description of the account description string // Optional description of the account
ledger string // Describes the ledger/blockchain where the account is located ledger string // Describes the ledger/blockchain where the account is located
address string // Address of the account on the blockchain address string // Address of the account on the blockchain
pubkey string // Public key pubkey string // Public key
assets []u32 // List of asset IDs in this account assets []u32 // List of asset IDs in this account
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Account with default values // new creates a new Account with default values
pub fn Account.new() Account { pub fn Account.new() Account {
return Account{ return Account{
id: 0 id: 0
name: '' name: ''
user_id: 0 user_id: 0
description: '' description: ''
ledger: '' ledger: ''
address: '' address: ''
pubkey: '' pubkey: ''
assets: [] assets: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -26,15 +26,15 @@ pub mut:
// new creates a new Asset with default values // new creates a new Asset with default values
pub fn Asset.new() Asset { pub fn Asset.new() Asset {
return Asset{ return Asset{
id: 0 id: 0
name: '' name: ''
description: '' description: ''
amount: 0.0 amount: 0.0
address: '' address: ''
asset_type: .native asset_type: .native
decimals: 18 decimals: 18
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }
@@ -81,7 +81,31 @@ pub fn (a Asset) formatted_amount() string {
factor *= 10 factor *= 10
} }
formatted_amount := (a.amount * factor).round() / factor formatted_amount := (a.amount * factor).round() / factor
return '${formatted_amount:.${a.decimals}f}' // Format with the specified number of decimal places
if a.decimals == 0 {
return '${formatted_amount:.0f}'
} else if a.decimals == 1 {
return '${formatted_amount:.1f}'
} else if a.decimals == 2 {
return '${formatted_amount:.2f}'
} else if a.decimals == 3 {
return '${formatted_amount:.3f}'
} else if a.decimals == 4 {
return '${formatted_amount:.4f}'
} else {
// For more than 4 decimals, use string manipulation
str_amount := formatted_amount.str()
if str_amount.contains('.') {
parts := str_amount.split('.')
if parts.len == 2 {
decimal_part := parts[1]
if decimal_part.len > a.decimals {
return '${parts[0]}.${decimal_part[..a.decimals]}'
}
}
}
return str_amount
}
} }
// transfer_to transfers amount to another asset // transfer_to transfers amount to another asset

View File

@@ -40,10 +40,10 @@ pub mut:
pub fn Bid.new() Bid { pub fn Bid.new() Bid {
return Bid{ return Bid{
listing_id: '' listing_id: ''
bidder_id: 0 bidder_id: 0
amount: 0.0 amount: 0.0
currency: '' currency: ''
status: .active status: .active
created_at: u64(time.now().unix_time()) created_at: u64(time.now().unix_time())
} }
} }
@@ -82,50 +82,50 @@ pub fn (mut b Bid) status(status BidStatus) Bid {
@[heap] @[heap]
pub struct Listing { pub struct Listing {
pub mut: pub mut:
id u32 // Unique listing ID id u32 // Unique listing ID
title string // Title of the listing title string // Title of the listing
description string // Description of the listing description string // Description of the listing
asset_id string // ID of the asset being listed asset_id string // ID of the asset being listed
asset_type AssetType // Type of the asset asset_type AssetType // Type of the asset
seller_id string // ID of the user selling the asset seller_id string // ID of the user selling the asset
price f64 // Initial price for fixed price, or starting price for auction price f64 // Initial price for fixed price, or starting price for auction
currency string // Currency of the listing currency string // Currency of the listing
listing_type ListingType // Type of listing (fixed_price, auction, exchange) listing_type ListingType // Type of listing (fixed_price, auction, exchange)
status ListingStatus // Status of the listing status ListingStatus // Status of the listing
expires_at ?u64 // Optional expiration date expires_at ?u64 // Optional expiration date
sold_at ?u64 // Optional date when the item was sold sold_at ?u64 // Optional date when the item was sold
buyer_id ?string // Optional buyer ID buyer_id ?string // Optional buyer ID
sale_price ?f64 // Optional final sale price sale_price ?f64 // Optional final sale price
bids []Bid // List of bids for auction type listings bids []Bid // List of bids for auction type listings
tags []string // Tags for the listing tags []string // Tags for the listing
image_url ?string // Optional image URL image_url ?string // Optional image URL
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Listing with default values // new creates a new Listing with default values
pub fn Listing.new() Listing { pub fn Listing.new() Listing {
now := u64(time.now().unix_time()) now := u64(time.now().unix_time())
return Listing{ return Listing{
id: 0 id: 0
title: '' title: ''
description: '' description: ''
asset_id: '' asset_id: ''
asset_type: .native asset_type: .native
seller_id: '' seller_id: ''
price: 0.0 price: 0.0
currency: '' currency: ''
listing_type: .fixed_price listing_type: .fixed_price
status: .active status: .active
expires_at: none expires_at: none
sold_at: none sold_at: none
buyer_id: none buyer_id: none
sale_price: none sale_price: none
bids: [] bids: []
tags: [] tags: []
image_url: none image_url: none
created_at: now created_at: now
updated_at: now updated_at: now
} }
} }

View File

@@ -4,13 +4,13 @@ module flow
@[heap] @[heap]
pub struct Flow { pub struct Flow {
pub mut: pub mut:
id u32 // Unique flow ID id u32 // Unique flow ID
flow_uuid string // A unique UUID for the flow, for external reference flow_uuid string // A unique UUID for the flow, for external reference
name string // Name of the flow name string // Name of the flow
status string // Current status of the flow (e.g., "Pending", "InProgress", "Completed", "Failed") status string // Current status of the flow (e.g., "Pending", "InProgress", "Completed", "Failed")
steps []FlowStep // Steps involved in this flow steps []FlowStep // Steps involved in this flow
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Flow // new creates a new Flow
@@ -18,11 +18,11 @@ pub mut:
// The ID is managed by the database // The ID is managed by the database
pub fn Flow.new(flow_uuid string) Flow { pub fn Flow.new(flow_uuid string) Flow {
return Flow{ return Flow{
id: 0 id: 0
flow_uuid: flow_uuid flow_uuid: flow_uuid
name: '' name: ''
status: 'Pending' status: 'Pending'
steps: [] steps: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }

View File

@@ -15,12 +15,12 @@ pub mut:
// new creates a new flow step // new creates a new flow step
pub fn FlowStep.new(step_order u32) FlowStep { pub fn FlowStep.new(step_order u32) FlowStep {
return FlowStep{ return FlowStep{
id: 0 id: 0
description: none description: none
step_order: step_order step_order: step_order
status: 'Pending' status: 'Pending'
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -18,15 +18,15 @@ pub mut:
// new creates a new signature requirement // new creates a new signature requirement
pub fn SignatureRequirement.new(flow_step_id u32, public_key string, message string) SignatureRequirement { pub fn SignatureRequirement.new(flow_step_id u32, public_key string, message string) SignatureRequirement {
return SignatureRequirement{ return SignatureRequirement{
id: 0 id: 0
flow_step_id: flow_step_id flow_step_id: flow_step_id
public_key: public_key public_key: public_key
message: message message: message
signed_by: none signed_by: none
signature: none signature: none
status: 'Pending' status: 'Pending'
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -3,45 +3,45 @@ module identity
// IdenfyWebhookEvent represents an iDenfy webhook event structure // IdenfyWebhookEvent represents an iDenfy webhook event structure
pub struct IdenfyWebhookEvent { pub struct IdenfyWebhookEvent {
pub mut: pub mut:
client_id string // Client ID client_id string // Client ID
scan_ref string // Scan reference scan_ref string // Scan reference
status string // Verification status status string // Verification status
platform string // Platform used platform string // Platform used
started_at string // When verification started started_at string // When verification started
finished_at ?string // When verification finished (optional) finished_at ?string // When verification finished (optional)
client_ip ?string // Client IP address (optional) client_ip ?string // Client IP address (optional)
client_location ?string // Client location (optional) client_location ?string // Client location (optional)
data ?IdenfyVerificationData // Verification data (optional) data ?IdenfyVerificationData // Verification data (optional)
} }
// IdenfyVerificationData represents the verification data from iDenfy // IdenfyVerificationData represents the verification data from iDenfy
pub struct IdenfyVerificationData { pub struct IdenfyVerificationData {
pub mut: pub mut:
doc_first_name ?string // First name from document doc_first_name ?string // First name from document
doc_last_name ?string // Last name from document doc_last_name ?string // Last name from document
doc_number ?string // Document number doc_number ?string // Document number
doc_personal_code ?string // Personal code from document doc_personal_code ?string // Personal code from document
doc_expiry ?string // Document expiry date doc_expiry ?string // Document expiry date
doc_dob ?string // Date of birth from document doc_dob ?string // Date of birth from document
doc_type ?string // Document type doc_type ?string // Document type
doc_sex ?string // Sex from document doc_sex ?string // Sex from document
doc_nationality ?string // Nationality from document doc_nationality ?string // Nationality from document
doc_issuing_country ?string // Document issuing country doc_issuing_country ?string // Document issuing country
manually_data_changed ?bool // Whether data was manually changed manually_data_changed ?bool // Whether data was manually changed
} }
// new creates a new IdenfyWebhookEvent // new creates a new IdenfyWebhookEvent
pub fn IdenfyWebhookEvent.new() IdenfyWebhookEvent { pub fn IdenfyWebhookEvent.new() IdenfyWebhookEvent {
return IdenfyWebhookEvent{ return IdenfyWebhookEvent{
client_id: '' client_id: ''
scan_ref: '' scan_ref: ''
status: '' status: ''
platform: '' platform: ''
started_at: '' started_at: ''
finished_at: none finished_at: none
client_ip: none client_ip: none
client_location: none client_location: none
data: none data: none
} }
} }
@@ -102,16 +102,16 @@ pub fn (mut event IdenfyWebhookEvent) data(data ?IdenfyVerificationData) IdenfyW
// new creates a new IdenfyVerificationData // new creates a new IdenfyVerificationData
pub fn IdenfyVerificationData.new() IdenfyVerificationData { pub fn IdenfyVerificationData.new() IdenfyVerificationData {
return IdenfyVerificationData{ return IdenfyVerificationData{
doc_first_name: none doc_first_name: none
doc_last_name: none doc_last_name: none
doc_number: none doc_number: none
doc_personal_code: none doc_personal_code: none
doc_expiry: none doc_expiry: none
doc_dob: none doc_dob: none
doc_type: none doc_type: none
doc_sex: none doc_sex: none
doc_nationality: none doc_nationality: none
doc_issuing_country: none doc_issuing_country: none
manually_data_changed: none manually_data_changed: none
} }
} }

View File

@@ -32,11 +32,11 @@ pub mut:
// new creates a new ContractRevision // new creates a new ContractRevision
pub fn ContractRevision.new(version u32, content string, created_by string) ContractRevision { pub fn ContractRevision.new(version u32, content string, created_by string) ContractRevision {
return ContractRevision{ return ContractRevision{
version: version version: version
content: content content: content
created_at: u64(time.now().unix_time()) created_at: u64(time.now().unix_time())
created_by: created_by created_by: created_by
comments: none comments: none
} }
} }
@@ -49,27 +49,27 @@ pub fn (mut cr ContractRevision) comments(comments string) ContractRevision {
// ContractSigner represents a party involved in signing a contract // ContractSigner represents a party involved in signing a contract
pub struct ContractSigner { pub struct ContractSigner {
pub mut: pub mut:
id string // Unique ID for the signer (UUID string) id string // Unique ID for the signer (UUID string)
name string // Signer's name name string // Signer's name
email string // Signer's email email string // Signer's email
status SignerStatus // Current status status SignerStatus // Current status
signed_at ?u64 // When they signed (optional) signed_at ?u64 // When they signed (optional)
comments ?string // Optional comments from signer comments ?string // Optional comments from signer
last_reminder_mail_sent_at ?u64 // Last reminder timestamp last_reminder_mail_sent_at ?u64 // Last reminder timestamp
signature_data ?string // Base64 encoded signature image data signature_data ?string // Base64 encoded signature image data
} }
// new creates a new ContractSigner // new creates a new ContractSigner
pub fn ContractSigner.new(id string, name string, email string) ContractSigner { pub fn ContractSigner.new(id string, name string, email string) ContractSigner {
return ContractSigner{ return ContractSigner{
id: id id: id
name: name name: name
email: email email: email
status: .pending status: .pending
signed_at: none signed_at: none
comments: none comments: none
last_reminder_mail_sent_at: none last_reminder_mail_sent_at: none
signature_data: none signature_data: none
} }
} }
@@ -139,48 +139,48 @@ pub fn (mut cs ContractSigner) sign(signature_data ?string, comments ?string) {
@[heap] @[heap]
pub struct Contract { pub struct Contract {
pub mut: pub mut:
id u32 // Unique contract ID id u32 // Unique contract ID
contract_id string // Unique UUID for the contract contract_id string // Unique UUID for the contract
title string // Contract title title string // Contract title
description string // Contract description description string // Contract description
contract_type string // Type of contract contract_type string // Type of contract
status ContractStatus // Current status status ContractStatus // Current status
created_by string // Who created the contract created_by string // Who created the contract
terms_and_conditions string // Terms and conditions text terms_and_conditions string // Terms and conditions text
start_date ?u64 // Optional start date start_date ?u64 // Optional start date
end_date ?u64 // Optional end date end_date ?u64 // Optional end date
renewal_period_days ?i32 // Optional renewal period in days renewal_period_days ?i32 // Optional renewal period in days
next_renewal_date ?u64 // Optional next renewal date next_renewal_date ?u64 // Optional next renewal date
signers []ContractSigner // List of signers signers []ContractSigner // List of signers
revisions []ContractRevision // Contract revisions revisions []ContractRevision // Contract revisions
current_version u32 // Current version number current_version u32 // Current version number
last_signed_date ?u64 // Last signing date last_signed_date ?u64 // Last signing date
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Contract // new creates a new Contract
pub fn Contract.new(contract_id string) Contract { pub fn Contract.new(contract_id string) Contract {
now := u64(time.now().unix_time()) now := u64(time.now().unix_time())
return Contract{ return Contract{
id: 0 id: 0
contract_id: contract_id contract_id: contract_id
title: '' title: ''
description: '' description: ''
contract_type: '' contract_type: ''
status: .draft status: .draft
created_by: '' created_by: ''
terms_and_conditions: '' terms_and_conditions: ''
start_date: none start_date: none
end_date: none end_date: none
renewal_period_days: none renewal_period_days: none
next_renewal_date: none next_renewal_date: none
signers: [] signers: []
revisions: [] revisions: []
current_version: 0 current_version: 0
last_signed_date: none last_signed_date: none
created_at: now created_at: now
updated_at: now updated_at: now
} }
} }

View File

@@ -4,31 +4,31 @@ module library
@[heap] @[heap]
pub struct Collection { pub struct Collection {
pub mut: pub mut:
id u32 // Unique collection ID id u32 // Unique collection ID
title string // Title of the collection title string // Title of the collection
description ?string // Optional description of the collection description ?string // Optional description of the collection
images []u32 // List of image item IDs belonging to this collection images []u32 // List of image item IDs belonging to this collection
pdfs []u32 // List of PDF item IDs belonging to this collection pdfs []u32 // List of PDF item IDs belonging to this collection
markdowns []u32 // List of Markdown item IDs belonging to this collection markdowns []u32 // List of Markdown item IDs belonging to this collection
books []u32 // List of Book item IDs belonging to this collection books []u32 // List of Book item IDs belonging to this collection
slides []u32 // List of Slides item IDs belonging to this collection slides []u32 // List of Slides item IDs belonging to this collection
created_at u64 // Creation timestamp created_at u64 // Creation timestamp
updated_at u64 // Last update timestamp updated_at u64 // Last update timestamp
} }
// new creates a new Collection with default values // new creates a new Collection with default values
pub fn Collection.new() Collection { pub fn Collection.new() Collection {
return Collection{ return Collection{
id: 0 id: 0
title: '' title: ''
description: none description: none
images: [] images: []
pdfs: [] pdfs: []
markdowns: [] markdowns: []
books: [] books: []
slides: [] slides: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -17,14 +17,14 @@ pub mut:
// new creates a new Image with default values // new creates a new Image with default values
pub fn Image.new() Image { pub fn Image.new() Image {
return Image{ return Image{
id: 0 id: 0
title: '' title: ''
description: none description: none
url: '' url: ''
width: 0 width: 0
height: 0 height: 0
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }
@@ -97,13 +97,13 @@ pub mut:
// new creates a new Pdf with default values // new creates a new Pdf with default values
pub fn Pdf.new() Pdf { pub fn Pdf.new() Pdf {
return Pdf{ return Pdf{
id: 0 id: 0
title: '' title: ''
description: none description: none
url: '' url: ''
page_count: 0 page_count: 0
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }
@@ -151,12 +151,12 @@ pub mut:
// new creates a new Markdown document with default values // new creates a new Markdown document with default values
pub fn Markdown.new() Markdown { pub fn Markdown.new() Markdown {
return Markdown{ return Markdown{
id: 0 id: 0
title: '' title: ''
description: none description: none
content: '' content: ''
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }
@@ -200,8 +200,8 @@ pub mut:
// new creates a new TocEntry with default values // new creates a new TocEntry with default values
pub fn TocEntry.new() TocEntry { pub fn TocEntry.new() TocEntry {
return TocEntry{ return TocEntry{
title: '' title: ''
page: 0 page: 0
subsections: [] subsections: []
} }
} }
@@ -245,13 +245,13 @@ pub mut:
// new creates a new Book with default values // new creates a new Book with default values
pub fn Book.new() Book { pub fn Book.new() Book {
return Book{ return Book{
id: 0 id: 0
title: '' title: ''
description: none description: none
table_of_contents: [] table_of_contents: []
pages: [] pages: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }
@@ -325,8 +325,8 @@ pub mut:
// new creates a new Slide // new creates a new Slide
pub fn Slide.new() Slide { pub fn Slide.new() Slide {
return Slide{ return Slide{
image_url: '' image_url: ''
title: none title: none
description: none description: none
} }
} }
@@ -374,12 +374,12 @@ pub mut:
// new creates a new Slideshow with default values // new creates a new Slideshow with default values
pub fn Slideshow.new() Slideshow { pub fn Slideshow.new() Slideshow {
return Slideshow{ return Slideshow{
id: 0 id: 0
title: '' title: ''
description: none description: none
slides: [] slides: []
created_at: 0 created_at: 0
updated_at: 0 updated_at: 0
} }
} }

View File

@@ -14,12 +14,12 @@ pub mut:
// new creates a new Address with default values // new creates a new Address with default values
pub fn Address.new() Address { pub fn Address.new() Address {
return Address{ return Address{
street: '' street: ''
city: '' city: ''
state: none state: none
postal_code: '' postal_code: ''
country: '' country: ''
company: none company: none
} }
} }
@@ -169,22 +169,15 @@ pub fn (a Address) get_company_string() string {
// equals compares two addresses for equality // equals compares two addresses for equality
pub fn (a Address) equals(other Address) bool { pub fn (a Address) equals(other Address) bool {
return a.street == other.street && return a.street == other.street && a.city == other.city && a.state == other.state
a.city == other.city && && a.postal_code == other.postal_code && a.country == other.country
a.state == other.state && && a.company == other.company
a.postal_code == other.postal_code &&
a.country == other.country &&
a.company == other.company
} }
// is_empty checks if the address is completely empty // is_empty checks if the address is completely empty
pub fn (a Address) is_empty() bool { pub fn (a Address) is_empty() bool {
return a.street.len == 0 && return a.street.len == 0 && a.city.len == 0 && a.postal_code.len == 0 && a.country.len == 0
a.city.len == 0 && && a.state == none && a.company == none
a.postal_code.len == 0 &&
a.country.len == 0 &&
a.state == none &&
a.company == none
} }
// validate performs basic validation on the address // validate performs basic validation on the address

View File

@@ -9,11 +9,12 @@ module models
// - Payment models (Stripe webhooks) // - Payment models (Stripe webhooks)
// - Location models (addresses) // - Location models (addresses)
// Re-export all model modules for easy access // Import all model modules for easy access
pub use core
pub use finance import freeflowuniverse.herolib.threefold.models.core
pub use flow import freeflowuniverse.herolib.threefold.models.finance
pub use business import freeflowuniverse.herolib.threefold.models.flow
pub use identity import freeflowuniverse.herolib.threefold.models.business
pub use payment import freeflowuniverse.herolib.threefold.models.identity
pub use location import freeflowuniverse.herolib.threefold.models.payment
import freeflowuniverse.herolib.threefold.models.location

View File

@@ -17,8 +17,8 @@ pub mut:
// StripeEventData represents the data portion of a Stripe event // StripeEventData represents the data portion of a Stripe event
pub struct StripeEventData { pub struct StripeEventData {
pub mut: pub mut:
object string // The main object data (JSON as string for flexibility) object string // The main object data (JSON as string for flexibility)
previous_attributes ?string // Previous attributes if this is an update (JSON as string) previous_attributes ?string // Previous attributes if this is an update (JSON as string)
} }
// StripeEventRequest represents request information for a Stripe event // StripeEventRequest represents request information for a Stripe event
@@ -31,15 +31,15 @@ pub mut:
// new creates a new StripeWebhookEvent // new creates a new StripeWebhookEvent
pub fn StripeWebhookEvent.new() StripeWebhookEvent { pub fn StripeWebhookEvent.new() StripeWebhookEvent {
return StripeWebhookEvent{ return StripeWebhookEvent{
id: '' id: ''
object: 'event' object: 'event'
api_version: none api_version: none
created: 0 created: 0
data: StripeEventData.new() data: StripeEventData.new()
livemode: false livemode: false
pending_webhooks: 0 pending_webhooks: 0
request: none request: none
event_type: '' event_type: ''
} }
} }
@@ -100,7 +100,7 @@ pub fn (mut event StripeWebhookEvent) event_type(event_type string) StripeWebhoo
// new creates a new StripeEventData // new creates a new StripeEventData
pub fn StripeEventData.new() StripeEventData { pub fn StripeEventData.new() StripeEventData {
return StripeEventData{ return StripeEventData{
object: '' object: ''
previous_attributes: none previous_attributes: none
} }
} }
@@ -120,7 +120,7 @@ pub fn (mut data StripeEventData) previous_attributes(previous_attributes ?strin
// new creates a new StripeEventRequest // new creates a new StripeEventRequest
pub fn StripeEventRequest.new() StripeEventRequest { pub fn StripeEventRequest.new() StripeEventRequest {
return StripeEventRequest{ return StripeEventRequest{
id: none id: none
idempotency_key: none idempotency_key: none
} }
} }

View File

@@ -41,7 +41,7 @@ pub fn (mut docsite DocSite) generate_docs() ! {
} }
if gen.errors.len > 0 { if gen.errors.len > 0 {
println("Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}") println('Page List: is header collection and page name per collection.\nAvailable pages:\n${gen.client.list_markdown()!}')
return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n') return error('Errors occurred during site generation:\n${gen.errors.join('\n\n')}\n')
} }
} }