This commit is contained in:
2025-07-19 15:54:23 +02:00
parent f092095e7b
commit 02ffc71aea
65 changed files with 2424 additions and 2165 deletions

View File

@@ -8,5 +8,3 @@ import freeflowuniverse.herolib.develop.gittools
mut gs := gittools.get(reload: true)!
gs.repos_print()!

View File

@@ -4,6 +4,6 @@ import freeflowuniverse.herolib.installers.infra.dify as dify_installer
mut dify := dify_installer.get()!
dify.install()!
dify.start()!
dify.install()!
dify.start()!
// dify.destroy()!

View File

@@ -6,9 +6,9 @@ import json
fn main() {
// Create a new Zinit client with the default socket path
mut zinit_client := zinit.new_stateless(socket_path: '/tmp/zinit.sock')!
println('Connected to Zinit via OpenRPC')
// Example 1: Get the OpenRPC API specification
println('\n=== Getting API Specification ===')
api_spec := zinit_client.client.discover() or {
@@ -16,7 +16,7 @@ fn main() {
return
}
println('API Specification (first 100 chars): ${api_spec[..100]}...')
// Example 2: List all services
println('\n=== Listing Services ===')
service_list := zinit_client.client.list() or {
@@ -27,17 +27,17 @@ fn main() {
for name, state in service_list {
println('- ${name}: ${state}')
}
// Example 3: Get detailed status of a service (if any exist)
if service_list.len > 0 {
service_name := service_list.keys()[0]
println('\n=== Getting Status for Service: ${service_name} ===')
status := zinit_client.client.status(service_name) or {
println('Error getting status: ${err}')
return
}
println('Service Status:')
println('- Name: ${status.name}')
println('- PID: ${status.pid}')
@@ -47,7 +47,7 @@ fn main() {
for dep_name, dep_state in status.after {
println(' - ${dep_name}: ${dep_state}')
}
// Example 4: Get service stats
println('\n=== Getting Stats for Service: ${service_name} ===')
stats := zinit_client.client.stats(service_name) or {
@@ -55,7 +55,7 @@ fn main() {
println('Note: Stats are only available for running services')
return
}
println('Service Stats:')
println('- Memory Usage: ${stats.memory_usage} bytes')
println('- CPU Usage: ${stats.cpu_usage}%')
@@ -68,7 +68,7 @@ fn main() {
} else {
println('\nNo services found to query')
}
// Example 5: Create a new service (commented out for safety)
/*
println('\n=== Creating a New Service ===')
@@ -121,6 +121,6 @@ fn main() {
}
println('Service deleted')
*/
println('\nZinit OpenRPC client example completed')
}
}

View File

@@ -1,23 +1,23 @@
#!/usr/bin/env -S v -n -w -cg -gc none -d use_openssl -enable-globals run
import freeflowuniverse.herolib.schemas.jsonrpc
import freeflowuniverse.herolib.schemas.openrpc //for the model as used
import freeflowuniverse.herolib.schemas.openrpc // for the model as used
import json
// Define the service status response structure based on the OpenRPC schema
struct ServiceStatus {
name string
pid int
state string
name string
pid int
state string
target string
after map[string]string
after map[string]string
}
// Generic approach: Use a map to handle any complex JSON response
// This is more flexible than creating specific structs for each API
// Create a client using the Unix socket transport
mut cl := jsonrpc.new_unix_socket_client("/tmp/zinit.sock")
mut cl := jsonrpc.new_unix_socket_client('/tmp/zinit.sock')
// Example 1: Discover the API using rpc_discover
// Create a request for rpc_discover method with empty parameters
@@ -65,7 +65,6 @@ println(' - For OpenRPC integration: Extract result as JSON string and pass to
println('\n The core issue (type mismatch) is now completely resolved!')
// Example 2: List all services
// Create a request for service_list method with empty parameters
list_request := jsonrpc.new_request_generic('service_list', []string{})
@@ -86,10 +85,12 @@ if service_list.len > 0 {
// Create a request for service_status method with the service name as parameter
// The parameter for service_status is a single string (service name)
status_request := jsonrpc.new_request_generic('service_status', {"name":service_name})
status_request := jsonrpc.new_request_generic('service_status', {
'name': service_name
})
// Send the request and receive a ServiceStatus object
println('\nSending service_status request for service: $service_name')
println('\nSending service_status request for service: ${service_name}')
service_status := cl.send[map[string]string, ServiceStatus](status_request)!
// Display the service status details
@@ -100,7 +101,7 @@ if service_list.len > 0 {
println('- Target: ${service_status.target}')
println('- Dependencies:')
for dep_name, dep_state in service_status.after {
println(' - $dep_name: $dep_state')
println(' - ${dep_name}: ${dep_state}')
}
} else {
println('\nNo services found to query status')

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import freeflowuniverse.herolib.data.doctree
import freeflowuniverse.herolib.web.doctreeclient
import os
@@ -18,8 +17,8 @@ tree.scan(
)!
tree.export(
destination: '/tmp/doctree_example_export'
reset: true
destination: '/tmp/doctree_example_export'
reset: true
)!
println('Doctree data setup complete.')
@@ -72,7 +71,6 @@ non_existent_page := 'non_existent_page_123'
exists_non_existent := client.page_exists(collection_name, non_existent_page)
println('Page "${non_existent_page}" exists: ${exists_non_existent}')
// Step 7: List images in the collection
println('\n7. Listing images:')
images := client.list_images(collection_name)!
@@ -121,6 +119,4 @@ content := client.get_page_content(collection_name, non_existent_page2) or {
'Error content'
}
println('\nExample completed successfully!')

View File

@@ -2,7 +2,8 @@
import freeflowuniverse.herolib.web.docusaurus
docusaurus.new(heroscript:'
docusaurus.new(
heroscript: '
!!docusaurus.define
path_build: "/tmp/docusaurus_build"
@@ -11,9 +12,10 @@ docusaurus.new(heroscript:'
!!docusaurus.add name:"tfgrid_docs"
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
git_root:"/tmp/code"
git_reset:1
git_pull:1
// git_reset:1
// git_pull:1
!!docusaurus.dev
')!
'
)!

View File

@@ -15,4 +15,3 @@ text := os.read_file(path1)!
println('=== PLAINTEXT RENDERING ===')
println(markdown.to_plain(text))
println('')

View File

@@ -58,8 +58,6 @@ fn (mut m BizModel) revenue_action(action Action) !Action {
extrapolate: true
)!
// Handle revenue_nr parameter (number of revenue items)
mut revenue_nr := m.sheet.row_new(
name: '${name}_nr_sold'
@@ -69,7 +67,6 @@ fn (mut m BizModel) revenue_action(action Action) !Action {
extrapolate: false
)!
mut nr_sold := m.sheet.row_new(
name: '${name}_nr_sold'
growth: action.params.get_default('nr_sold', '0')!
@@ -82,7 +79,6 @@ fn (mut m BizModel) revenue_action(action Action) !Action {
product.has_items = true
}
// Handle revenue_item parameter (singular item)
mut revenue_item := m.sheet.row_new(
name: '${name}_revenue_item_setup'

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
import db.sqlite
import db.pg
import time
@@ -74,20 +73,20 @@ pub enum MemberStatus {
@[params]
pub struct ChatNewArgs {
pub mut:
name string
description ?string
chat_type ChatType
visibility ChatVisibility = .private
owner_id int
project_id ?int
team_id ?int
customer_id ?int
task_id ?int
issue_id ?int
name string
description ?string
chat_type ChatType
visibility ChatVisibility = .private
owner_id int
project_id ?int
team_id ?int
customer_id ?int
task_id ?int
issue_id ?int
milestone_id ?int
sprint_id ?int
agenda_id ?int
created_by int
sprint_id ?int
agenda_id ?int
created_by int
}
@[params]
@@ -117,8 +116,8 @@ pub mut:
@[params]
pub struct ChatListArgs {
pub mut:
chat_type ChatType = ChatType.direct_message // default value, will be ignored if not set
status ChatStatus = ChatStatus.active // default value, will be ignored if not set
chat_type ChatType = ChatType.direct_message // default value, will be ignored if not set
status ChatStatus = ChatStatus.active // default value, will be ignored if not set
owner_id int
limit int = 50
offset int
@@ -135,38 +134,38 @@ pub mut:
@[table: 'chat']
pub struct ChatORM {
pub mut:
id int @[primary; sql: serial]
name string
description ?string
chat_type ChatType
status ChatStatus
visibility ChatVisibility
owner_id int
project_id ?int
team_id ?int
customer_id ?int
task_id ?int
issue_id ?int
milestone_id ?int
sprint_id ?int
agenda_id ?int
last_activity u32
message_count int
file_count int
archived_at u32
created_at u32
updated_at u32
created_by int
updated_by int
version int
deleted_at u32
id int @[primary; sql: serial]
name string
description ?string
chat_type ChatType
status ChatStatus
visibility ChatVisibility
owner_id int
project_id ?int
team_id ?int
customer_id ?int
task_id ?int
issue_id ?int
milestone_id ?int
sprint_id ?int
agenda_id ?int
last_activity u32
message_count int
file_count int
archived_at u32
created_at u32
updated_at u32
created_by int
updated_by int
version int
deleted_at u32
}
// Message model for ORM
@[table: 'message']
pub struct MessageORM {
pub mut:
id int @[primary; sql: serial]
id int @[primary; sql: serial]
chat_id int
sender_id int
content string
@@ -199,20 +198,20 @@ pub mut:
@[table: 'chat_member']
pub struct ChatMemberORM {
pub mut:
id int @[primary; sql: serial]
user_id int
chat_id int
role MemberRole
joined_at u32
last_read_at u32
id int @[primary; sql: serial]
user_id int
chat_id int
role MemberRole
joined_at u32
last_read_at u32
last_read_message_id ?int
status MemberStatus
invited_by ?int
muted bool
muted_until u32
custom_title ?string
created_at u32
updated_at u32
status MemberStatus
invited_by ?int
muted bool
muted_until u32
custom_title ?string
created_at u32
updated_at u32
}
// ChatRepository using V ORM
@@ -230,29 +229,33 @@ mut:
// Initialize SQLite database with ORM
pub fn new_chat_repository(db_path string) !ChatRepository {
mut db := sqlite.connect(db_path)!
// Create tables using ORM
sql db {
create table ChatORM
create table MessageORM
create table ChatMemberORM
}!
return ChatRepository{db: db}
return ChatRepository{
db: db
}
}
// Initialize PostgreSQL database with ORM
pub fn new_chat_repository_pg(host string, port int, user string, password string, dbname string) !ChatRepositoryPG {
mut db := pg.connect(host: host, port: port, user: user, password: password, dbname: dbname)!
// Create tables using ORM
sql db {
create table ChatORM
create table MessageORM
create table ChatMemberORM
}!
return ChatRepositoryPG{db: db}
return ChatRepositoryPG{
db: db
}
}
// Create a new chat using ORM
@@ -260,40 +263,40 @@ pub fn (mut repo ChatRepository) create_chat(args_ ChatNewArgs) !ChatORM {
mut args := args_
now := u32(time.now().unix())
mut chat := ChatORM{
name: args.name
description: args.description
chat_type: args.chat_type
status: .active
visibility: args.visibility
owner_id: args.owner_id
project_id: args.project_id
team_id: args.team_id
customer_id: args.customer_id
task_id: args.task_id
issue_id: args.issue_id
milestone_id: args.milestone_id
sprint_id: args.sprint_id
agenda_id: args.agenda_id
created_by: args.created_by
updated_by: args.created_by
created_at: now
updated_at: now
deleted_at: 0
name: args.name
description: args.description
chat_type: args.chat_type
status: .active
visibility: args.visibility
owner_id: args.owner_id
project_id: args.project_id
team_id: args.team_id
customer_id: args.customer_id
task_id: args.task_id
issue_id: args.issue_id
milestone_id: args.milestone_id
sprint_id: args.sprint_id
agenda_id: args.agenda_id
created_by: args.created_by
updated_by: args.created_by
created_at: now
updated_at: now
deleted_at: 0
last_activity: 0
message_count: 0
file_count: 0
archived_at: 0
version: 1
file_count: 0
archived_at: 0
version: 1
}
// Insert using ORM
sql repo.db {
insert chat into ChatORM
}!
// Get the last inserted ID
chat.id = repo.db.last_id()
return chat
}
@@ -302,11 +305,11 @@ pub fn (repo ChatRepository) get_chat(id int) !ChatORM {
chat := sql repo.db {
select from ChatORM where id == id && deleted_at == 0
}!
if chat.len == 0 {
return error('Chat not found')
}
return chat[0]
}
@@ -315,11 +318,10 @@ pub fn (mut repo ChatRepository) update_chat(mut chat ChatORM, updated_by int) !
chat.updated_at = u32(time.now().unix())
chat.updated_by = updated_by
chat.version++
sql repo.db {
update ChatORM set name = chat.name, description = chat.description,
status = chat.status, updated_at = chat.updated_at,
updated_by = chat.updated_by, version = chat.version
update ChatORM set name = chat.name, description = chat.description, status = chat.status,
updated_at = chat.updated_at, updated_by = chat.updated_by, version = chat.version
where id == chat.id
}!
}
@@ -327,10 +329,9 @@ pub fn (mut repo ChatRepository) update_chat(mut chat ChatORM, updated_by int) !
// Delete chat (soft delete) using ORM
pub fn (mut repo ChatRepository) delete_chat(id int, deleted_by int) ! {
now := u32(time.now().unix())
sql repo.db {
update ChatORM set deleted_at = now, updated_by = deleted_by, updated_at = now
where id == id
update ChatORM set deleted_at = now, updated_by = deleted_by, updated_at = now where id == id
}!
}
@@ -338,19 +339,17 @@ pub fn (mut repo ChatRepository) delete_chat(id int, deleted_by int) ! {
pub fn (repo ChatRepository) list_chats(args_ ChatListArgs) ![]ChatORM {
mut args := args_
mut chats := []ChatORM{}
if args.owner_id > 0 {
chats = sql repo.db {
select from ChatORM where owner_id == args.owner_id && deleted_at == 0
order by updated_at desc limit args.limit offset args.offset
select from ChatORM where owner_id == args.owner_id && deleted_at == 0 order by updated_at desc limit args.limit offset args.offset
}!
} else {
chats = sql repo.db {
select from ChatORM where deleted_at == 0
order by updated_at desc limit args.limit offset args.offset
select from ChatORM where deleted_at == 0 order by updated_at desc limit args.limit offset args.offset
}!
}
return chats
}
@@ -358,30 +357,27 @@ pub fn (repo ChatRepository) list_chats(args_ ChatListArgs) ![]ChatORM {
pub fn (repo ChatRepository) search_chats(args_ ChatSearchArgs) ![]ChatORM {
mut args := args_
chats := sql repo.db {
select from ChatORM where name like '%${args.search_term}%' && deleted_at == 0
order by updated_at desc limit args.limit
select from ChatORM where name like '%${args.search_term}%' && deleted_at == 0 order by updated_at desc limit args.limit
}!
return chats
}
// Get chats by project using ORM
pub fn (repo ChatRepository) get_chats_by_project(project_id int) ![]ChatORM {
chats := sql repo.db {
select from ChatORM where project_id == project_id && deleted_at == 0
order by updated_at desc
select from ChatORM where project_id == project_id && deleted_at == 0 order by updated_at desc
}!
return chats
}
// Get chats by team using ORM
pub fn (repo ChatRepository) get_chats_by_team(team_id int) ![]ChatORM {
chats := sql repo.db {
select from ChatORM where team_id == team_id && deleted_at == 0
order by updated_at desc
select from ChatORM where team_id == team_id && deleted_at == 0 order by updated_at desc
}!
return chats
}
@@ -390,7 +386,7 @@ pub fn (repo ChatRepository) count_chats() !int {
result := sql repo.db {
select count from ChatORM where deleted_at == 0
}!
return result
}
@@ -399,36 +395,35 @@ pub fn (mut repo ChatRepository) add_chat_member(args_ ChatMemberNewArgs) !ChatM
mut args := args_
now := u32(time.now().unix())
mut member := ChatMemberORM{
user_id: args.user_id
chat_id: args.chat_id
role: args.role
invited_by: args.invited_by
joined_at: now
created_at: now
updated_at: now
user_id: args.user_id
chat_id: args.chat_id
role: args.role
invited_by: args.invited_by
joined_at: now
created_at: now
updated_at: now
last_read_at: 0
status: .active
muted: false
muted_until: 0
status: .active
muted: false
muted_until: 0
}
sql repo.db {
insert member into ChatMemberORM
}!
// Get the last inserted ID
member.id = repo.db.last_id()
return member
}
// Get chat members using ORM
pub fn (repo ChatRepository) get_chat_members(chat_id int) ![]ChatMemberORM {
members := sql repo.db {
select from ChatMemberORM where chat_id == chat_id && status == MemberStatus.active
order by joined_at
select from ChatMemberORM where chat_id == chat_id && status == MemberStatus.active order by joined_at
}!
return members
}
@@ -436,8 +431,8 @@ pub fn (repo ChatRepository) get_chat_members(chat_id int) ![]ChatMemberORM {
pub fn (mut repo ChatRepository) remove_chat_member(chat_id int, user_id int) ! {
now := u32(time.now().unix())
sql repo.db {
update ChatMemberORM set status = MemberStatus.inactive, updated_at = now
where chat_id == chat_id && user_id == user_id
update ChatMemberORM set status = MemberStatus.inactive, updated_at = now where
chat_id == chat_id && user_id == user_id
}!
}
@@ -446,52 +441,51 @@ pub fn (mut repo ChatRepository) send_message(args_ MessageNewArgs) !MessageORM
mut args := args_
now := u32(time.now().unix())
mut message := MessageORM{
chat_id: args.chat_id
sender_id: args.sender_id
content: args.content
message_type: args.message_type
thread_id: args.thread_id
reply_to_id: args.reply_to_id
priority: args.priority
scheduled_at: args.scheduled_at or { 0 }
expires_at: args.expires_at or { 0 }
created_by: args.created_by
updated_by: args.created_by
created_at: now
updated_at: now
deleted_at: 0
edited_at: 0
pinned: false
pinned_at: 0
chat_id: args.chat_id
sender_id: args.sender_id
content: args.content
message_type: args.message_type
thread_id: args.thread_id
reply_to_id: args.reply_to_id
priority: args.priority
scheduled_at: args.scheduled_at or { 0 }
expires_at: args.expires_at or { 0 }
created_by: args.created_by
updated_by: args.created_by
created_at: now
updated_at: now
deleted_at: 0
edited_at: 0
pinned: false
pinned_at: 0
delivery_status: .sent
system_message: false
bot_message: false
version: 1
system_message: false
bot_message: false
version: 1
}
sql repo.db {
insert message into MessageORM
}!
// Get the last inserted ID
message.id = repo.db.last_id()
// Update chat message count and last activity
sql repo.db {
update ChatORM set message_count = message_count + 1, last_activity = now, updated_at = now
where id == args.chat_id
}!
return message
}
// Get messages for chat using ORM
pub fn (repo ChatRepository) get_messages(chat_id int, limit int, offset int) ![]MessageORM {
messages := sql repo.db {
select from MessageORM where chat_id == chat_id && deleted_at == 0
order by created_at desc limit limit offset offset
select from MessageORM where chat_id == chat_id && deleted_at == 0 order by created_at desc limit limit offset offset
}!
return messages
}
@@ -500,29 +494,28 @@ pub fn (repo ChatRepository) get_message(id int) !MessageORM {
message := sql repo.db {
select from MessageORM where id == id && deleted_at == 0
}!
if message.len == 0 {
return error('Message not found')
}
return message[0]
}
// Edit message using ORM
pub fn (mut repo ChatRepository) edit_message(id int, new_content string, edited_by int) ! {
now := u32(time.now().unix())
sql repo.db {
update MessageORM set content = new_content, edited_at = now,
edited_by = edited_by, updated_at = now
where id == id
update MessageORM set content = new_content, edited_at = now, edited_by = edited_by,
updated_at = now where id == id
}!
}
// Delete message using ORM
pub fn (mut repo ChatRepository) delete_message(id int, deleted_by int) ! {
now := u32(time.now().unix())
sql repo.db {
update MessageORM set deleted_at = now, deleted_by = deleted_by, updated_at = now
where id == id
@@ -532,10 +525,9 @@ pub fn (mut repo ChatRepository) delete_message(id int, deleted_by int) ! {
// Pin message using ORM
pub fn (mut repo ChatRepository) pin_message(id int, pinned_by int) ! {
now := u32(time.now().unix())
sql repo.db {
update MessageORM set pinned = true, pinned_at = now,
pinned_by = pinned_by, updated_at = now
update MessageORM set pinned = true, pinned_at = now, pinned_by = pinned_by, updated_at = now
where id == id
}!
}
@@ -543,20 +535,19 @@ pub fn (mut repo ChatRepository) pin_message(id int, pinned_by int) ! {
// Get pinned messages using ORM
pub fn (repo ChatRepository) get_pinned_messages(chat_id int) ![]MessageORM {
messages := sql repo.db {
select from MessageORM where chat_id == chat_id && pinned == true && deleted_at == 0
order by pinned_at desc
select from MessageORM where chat_id == chat_id && pinned == true && deleted_at == 0 order by pinned_at desc
}!
return messages
}
// Mark messages as read using ORM
pub fn (mut repo ChatRepository) mark_as_read(chat_id int, user_id int, message_id int) ! {
now := u32(time.now().unix())
sql repo.db {
update ChatMemberORM set last_read_at = now, last_read_message_id = message_id
where chat_id == chat_id && user_id == user_id
update ChatMemberORM set last_read_at = now, last_read_message_id = message_id where
chat_id == chat_id && user_id == user_id
}!
}
@@ -566,47 +557,47 @@ pub fn (repo ChatRepository) get_unread_count(chat_id int, user_id int) !int {
member := sql repo.db {
select from ChatMemberORM where chat_id == chat_id && user_id == user_id
}!
if member.len == 0 {
return 0
}
last_read_id := member[0].last_read_message_id or { 0 }
// Count messages after last read
result := sql repo.db {
select count from MessageORM where chat_id == chat_id &&
id > last_read_id && deleted_at == 0 && system_message == false
select count from MessageORM where chat_id == chat_id && id > last_read_id && deleted_at == 0
&& system_message == false
}!
return result
}
// Delete all data from repository (removes all records from all tables)
pub fn (mut repo ChatRepository) delete_all()! {
pub fn (mut repo ChatRepository) delete_all() ! {
sql repo.db {
delete from MessageORM where id > 0
}!
sql repo.db {
delete from ChatMemberORM where id > 0
}!
sql repo.db {
delete from ChatORM where id > 0
}!
}
// Delete all data from PostgreSQL repository (removes all records from all tables)
pub fn (mut repo ChatRepositoryPG) delete_all()! {
pub fn (mut repo ChatRepositoryPG) delete_all() ! {
sql repo.db {
delete from MessageORM where id > 0
}!
sql repo.db {
delete from ChatMemberORM where id > 0
}!
sql repo.db {
delete from ChatORM where id > 0
}!
@@ -616,48 +607,48 @@ pub fn (mut repo ChatRepositoryPG) delete_all()! {
pub fn example_usage() ! {
// Initialize repository
mut repo := new_chat_repository('chat_example.db')!
// Create a new chat using the new parameter struct
mut chat := repo.create_chat(
name: 'Project Alpha Discussion'
chat_type: .project_chat
owner_id: 1
name: 'Project Alpha Discussion'
chat_type: .project_chat
owner_id: 1
created_by: 1
)!
println('Created chat: ${chat.name} with ID: ${chat.id}')
// Add members to chat using the new parameter struct
member1 := repo.add_chat_member(
chat_id: chat.id
user_id: 2
role: .member
chat_id: chat.id
user_id: 2
role: .member
invited_by: 1
)!
member2 := repo.add_chat_member(
chat_id: chat.id
user_id: 3
role: .moderator
chat_id: chat.id
user_id: 3
role: .moderator
invited_by: 1
)!
println('Added members: ${member1.user_id}, ${member2.user_id}')
// Send messages using the new parameter struct
msg1 := repo.send_message(
chat_id: chat.id
sender_id: 1
content: 'Welcome to the project chat!'
chat_id: chat.id
sender_id: 1
content: 'Welcome to the project chat!'
message_type: .text
created_by: 1
created_by: 1
)!
msg2 := repo.send_message(
chat_id: chat.id
sender_id: 2
content: 'Thanks for adding me!'
chat_id: chat.id
sender_id: 2
content: 'Thanks for adding me!'
message_type: .text
created_by: 2
created_by: 2
)!
println('Sent messages: ${msg1.id}, ${msg2.id}')
// Debug: Check what's in the database
all_messages := sql repo.db {
select from MessageORM
@@ -666,46 +657,46 @@ pub fn example_usage() ! {
for i, msg in all_messages {
println(' DB Message ${i + 1}: ID=${msg.id}, chat_id=${msg.chat_id}, content="${msg.content}", deleted_at=${msg.deleted_at}')
}
// Get messages
messages := repo.get_messages(chat.id, 10, 0)!
println('Retrieved ${messages.len} messages')
for i, msg in messages {
println(' Message ${i + 1}: "${msg.content}" from user ${msg.sender_id}')
}
// Mark as read
repo.mark_as_read(chat.id, 2, msg2.id)!
// Get unread count
unread := repo.get_unread_count(chat.id, 3)!
println('User 3 has ${unread} unread messages')
// Search chats using the new parameter struct
found_chats := repo.search_chats(
search_term: 'Alpha'
limit: 5
limit: 5
)!
println('Found ${found_chats.len} chats matching "Alpha"')
for i, found_chat in found_chats {
println(' Chat ${i + 1}: "${found_chat.name}" (ID: ${found_chat.id})')
}
// Pin a message
repo.pin_message(msg1.id, 1)!
// Get pinned messages
pinned := repo.get_pinned_messages(chat.id)!
println('Found ${pinned.len} pinned messages')
for i, pinned_msg in pinned {
println(' Pinned message ${i + 1}: "${pinned_msg.content}"')
}
// Test the delete_all method
// println('Testing delete_all method...')
// repo.delete_all()!
// println('All data deleted successfully!')
// println('Testing delete_all method...')
// repo.delete_all()!
// println('All data deleted successfully!')
}
// Run the example
example_usage() or { panic(err) }
example_usage() or { panic(err) }

View File

@@ -6,46 +6,46 @@ import time
pub struct Agenda {
BaseModel
pub mut:
title string @[required]
description string
agenda_type AgendaType
status AgendaStatus
priority Priority
start_time time.Time
end_time time.Time
all_day bool
location string
virtual_link string
organizer_id int // User who organized the event
attendees []Attendee
required_attendees []int // User IDs who must attend
optional_attendees []int // User IDs who are optional
resources []Resource // Rooms, equipment, etc.
project_id int // Links to Project (optional)
task_id int // Links to Task (optional)
milestone_id int // Links to Milestone (optional)
sprint_id int // Links to Sprint (optional)
team_id int // Links to Team (optional)
customer_id int // Links to Customer (optional)
recurrence Recurrence
reminders []Reminder
agenda_items []AgendaItem
attachments []Attachment
notes string
meeting_notes string
action_items []ActionItem
decisions []Decision
recording_url string
transcript string
follow_up_tasks []int // Task IDs created from this meeting
time_zone string
visibility EventVisibility
booking_type BookingType
cost f64 // Cost of the meeting (room, catering, etc.)
capacity int // Maximum attendees
waiting_list []int // User IDs on waiting list
tags []string
custom_fields map[string]string
title string @[required]
description string
agenda_type AgendaType
status AgendaStatus
priority Priority
start_time time.Time
end_time time.Time
all_day bool
location string
virtual_link string
organizer_id int // User who organized the event
attendees []Attendee
required_attendees []int // User IDs who must attend
optional_attendees []int // User IDs who are optional
resources []Resource // Rooms, equipment, etc.
project_id int // Links to Project (optional)
task_id int // Links to Task (optional)
milestone_id int // Links to Milestone (optional)
sprint_id int // Links to Sprint (optional)
team_id int // Links to Team (optional)
customer_id int // Links to Customer (optional)
recurrence Recurrence
reminders []Reminder
agenda_items []AgendaItem
attachments []Attachment
notes string
meeting_notes string
action_items []ActionItem
decisions []Decision
recording_url string
transcript string
follow_up_tasks []int // Task IDs created from this meeting
time_zone string
visibility EventVisibility
booking_type BookingType
cost f64 // Cost of the meeting (room, catering, etc.)
capacity int // Maximum attendees
waiting_list []int // User IDs on waiting list
tags []string
custom_fields map[string]string
}
// AgendaType for categorizing events
@@ -106,19 +106,19 @@ pub enum BookingType {
// Attendee represents a meeting attendee
pub struct Attendee {
pub mut:
user_id int
agenda_id int
attendance_type AttendanceType
response_status ResponseStatus
response_time time.Time
response_note string
user_id int
agenda_id int
attendance_type AttendanceType
response_status ResponseStatus
response_time time.Time
response_note string
actual_attendance bool
check_in_time time.Time
check_out_time time.Time
role AttendeeRole
permissions []string
delegate_id int // User ID if someone else attends on their behalf
cost f64 // Cost for this attendee (travel, accommodation, etc.)
check_in_time time.Time
check_out_time time.Time
role AttendeeRole
permissions []string
delegate_id int // User ID if someone else attends on their behalf
cost f64 // Cost for this attendee (travel, accommodation, etc.)
}
// AttendanceType for attendee requirements
@@ -154,17 +154,17 @@ pub enum AttendeeRole {
// Resource represents a bookable resource
pub struct Resource {
pub mut:
id int
name string
resource_type ResourceType
location string
capacity int
cost_per_hour f64
booking_status ResourceStatus
equipment []string
requirements []string
contact_person string
booking_notes string
id int
name string
resource_type ResourceType
location string
capacity int
cost_per_hour f64
booking_status ResourceStatus
equipment []string
requirements []string
contact_person string
booking_notes string
}
// ResourceType for categorizing resources
@@ -191,17 +191,17 @@ pub enum ResourceStatus {
// Recurrence represents recurring event patterns
pub struct Recurrence {
pub mut:
pattern RecurrencePattern
interval int // Every N days/weeks/months
days_of_week []int // 0=Sunday, 1=Monday, etc.
day_of_month int // For monthly recurrence
week_of_month int // First, second, third, fourth, last week
months []int // For yearly recurrence
end_type RecurrenceEndType
end_date time.Time
pattern RecurrencePattern
interval int // Every N days/weeks/months
days_of_week []int // 0=Sunday, 1=Monday, etc.
day_of_month int // For monthly recurrence
week_of_month int // First, second, third, fourth, last week
months []int // For yearly recurrence
end_type RecurrenceEndType
end_date time.Time
occurrence_count int
exceptions []time.Time // Dates to skip
modifications []RecurrenceModification
exceptions []time.Time // Dates to skip
modifications []RecurrenceModification
}
// RecurrencePattern for different recurrence types
@@ -224,30 +224,30 @@ pub enum RecurrenceEndType {
// RecurrenceModification for modifying specific occurrences
pub struct RecurrenceModification {
pub mut:
original_date time.Time
new_start_time time.Time
new_end_time time.Time
cancelled bool
title_override string
original_date time.Time
new_start_time time.Time
new_end_time time.Time
cancelled bool
title_override string
location_override string
}
// AgendaItem represents an item on the meeting agenda
pub struct AgendaItem {
pub mut:
id int
agenda_id int
title string
description string
item_type AgendaItemType
presenter_id int
id int
agenda_id int
title string
description string
item_type AgendaItemType
presenter_id int
duration_minutes int
order_index int
status AgendaItemStatus
notes string
attachments []Attachment
action_items []ActionItem
decisions []Decision
order_index int
status AgendaItemStatus
notes string
attachments []Attachment
action_items []ActionItem
decisions []Decision
}
// AgendaItemType for categorizing agenda items
@@ -277,23 +277,23 @@ pub enum AgendaItemStatus {
// Decision represents a decision made during a meeting
pub struct Decision {
pub mut:
id int
agenda_id int
agenda_item_id int
title string
description string
decision_type DecisionType
decision_maker_id int
participants []int // User IDs involved in decision
rationale string
alternatives []string
impact string
id int
agenda_id int
agenda_item_id int
title string
description string
decision_type DecisionType
decision_maker_id int
participants []int // User IDs involved in decision
rationale string
alternatives []string
impact string
implementation_date time.Time
review_date time.Time
status DecisionStatus
follow_up_tasks []int // Task IDs for implementation
created_at time.Time
created_by int
review_date time.Time
status DecisionStatus
follow_up_tasks []int // Task IDs for implementation
created_at time.Time
created_by int
}
// DecisionType for categorizing decisions
@@ -371,7 +371,7 @@ pub fn (a Agenda) get_attendance_rate() f32 {
if a.attendees.len == 0 {
return 0
}
attended := a.attendees.filter(it.actual_attendance).len
return f32(attended) / f32(a.attendees.len) * 100
}
@@ -388,14 +388,14 @@ pub fn (mut a Agenda) add_attendee(user_id int, attendance_type AttendanceType,
return
}
}
// Add new attendee
a.attendees << Attendee{
user_id: user_id
agenda_id: a.id
user_id: user_id
agenda_id: a.id
attendance_type: attendance_type
response_status: .pending
role: role
role: role
}
a.update_timestamp(by_user_id)
}
@@ -456,15 +456,15 @@ pub fn (mut a Agenda) add_resource(resource Resource, by_user_id int) {
// add_agenda_item adds an item to the meeting agenda
pub fn (mut a Agenda) add_agenda_item(title string, description string, item_type AgendaItemType, presenter_id int, duration_minutes int, by_user_id int) {
a.agenda_items << AgendaItem{
id: a.agenda_items.len + 1
agenda_id: a.id
title: title
description: description
item_type: item_type
presenter_id: presenter_id
id: a.agenda_items.len + 1
agenda_id: a.id
title: title
description: description
item_type: item_type
presenter_id: presenter_id
duration_minutes: duration_minutes
order_index: a.agenda_items.len
status: .pending
order_index: a.agenda_items.len
status: .pending
}
a.update_timestamp(by_user_id)
}
@@ -484,17 +484,17 @@ pub fn (mut a Agenda) complete_agenda_item(item_id int, notes string, by_user_id
// add_decision records a decision made during the meeting
pub fn (mut a Agenda) add_decision(title string, description string, decision_type DecisionType, decision_maker_id int, participants []int, rationale string, by_user_id int) {
a.decisions << Decision{
id: a.decisions.len + 1
agenda_id: a.id
title: title
description: description
decision_type: decision_type
id: a.decisions.len + 1
agenda_id: a.id
title: title
description: description
decision_type: decision_type
decision_maker_id: decision_maker_id
participants: participants
rationale: rationale
status: .pending
created_at: time.now()
created_by: by_user_id
participants: participants
rationale: rationale
status: .pending
created_at: time.now()
created_by: by_user_id
}
a.update_timestamp(by_user_id)
}
@@ -529,11 +529,11 @@ pub fn (mut a Agenda) postpone_meeting(new_start_time time.Time, new_end_time ti
// add_reminder adds a reminder for the event
pub fn (mut a Agenda) add_reminder(reminder_type ReminderType, minutes_before int, by_user_id int) {
a.reminders << Reminder{
reminder_type: reminder_type
reminder_type: reminder_type
minutes_before: minutes_before
sent: false
created_at: time.now()
created_by: by_user_id
sent: false
created_at: time.now()
created_by: by_user_id
}
a.update_timestamp(by_user_id)
}
@@ -541,18 +541,18 @@ pub fn (mut a Agenda) add_reminder(reminder_type ReminderType, minutes_before in
// calculate_cost calculates the total cost of the meeting
pub fn (a Agenda) calculate_cost() f64 {
mut total_cost := a.cost
// Add attendee costs
for attendee in a.attendees {
total_cost += attendee.cost
}
// Add resource costs
duration_hours := f64(a.get_duration()) / 60.0
for resource in a.resources {
total_cost += resource.cost_per_hour * duration_hours
}
return total_cost
}
@@ -561,18 +561,24 @@ pub fn (a Agenda) get_next_occurrence() ?time.Time {
if a.recurrence.pattern == .none {
return none
}
// Simple implementation - in practice this would be more complex
match a.recurrence.pattern {
.daily {
return time.Time{unix: a.start_time.unix + (86400 * a.recurrence.interval)}
return time.Time{
unix: a.start_time.unix + (86400 * a.recurrence.interval)
}
}
.weekly {
return time.Time{unix: a.start_time.unix + (86400 * 7 * a.recurrence.interval)}
return time.Time{
unix: a.start_time.unix + (86400 * 7 * a.recurrence.interval)
}
}
.monthly {
// Simplified - would need proper month calculation
return time.Time{unix: a.start_time.unix + (86400 * 30 * a.recurrence.interval)}
return time.Time{
unix: a.start_time.unix + (86400 * 30 * a.recurrence.interval)
}
}
else {
return none
@@ -590,26 +596,26 @@ pub fn (a Agenda) get_effectiveness_score() f32 {
if a.status != .completed {
return 0
}
mut score := f32(1.0)
// Attendance rate (30% weight)
attendance_rate := a.get_attendance_rate()
score *= 0.3 + (0.7 * attendance_rate / 100)
// Agenda completion (40% weight)
if a.agenda_items.len > 0 {
completed_items := a.agenda_items.filter(it.status == .completed).len
completion_rate := f32(completed_items) / f32(a.agenda_items.len)
score *= 0.4 + (0.6 * completion_rate)
}
// Decision making (30% weight)
if a.decisions.len > 0 {
approved_decisions := a.decisions.filter(it.status == .approved).len
decision_rate := f32(approved_decisions) / f32(a.decisions.len)
score *= 0.3 + (0.7 * decision_rate)
}
return score
}
}

View File

@@ -5,15 +5,15 @@ import time
// BaseModel provides common fields and functionality for all root objects
pub struct BaseModel {
pub mut:
id int @[primary; sql: serial]
created_at time.Time @[sql_type: 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP']
updated_at time.Time @[sql_type: 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP']
created_by int // User ID who created this record
updated_by int // User ID who last updated this record
version int // For optimistic locking
tags []string // Flexible tagging system for categorization
metadata map[string]string // Extensible key-value data for custom fields
is_active bool = true // Soft delete flag
id int @[primary; sql: serial]
created_at time.Time @[sql_type: 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP']
updated_at time.Time @[sql_type: 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP']
created_by int // User ID who created this record
updated_by int // User ID who last updated this record
version int // For optimistic locking
tags []string // Flexible tagging system for categorization
metadata map[string]string // Extensible key-value data for custom fields
is_active bool = true // Soft delete flag
}
// update_timestamp updates the updated_at field and version for optimistic locking
@@ -60,4 +60,4 @@ pub fn (mut base BaseModel) soft_delete(user_id int) {
pub fn (mut base BaseModel) restore(user_id int) {
base.is_active = true
base.update_timestamp(user_id)
}
}

View File

@@ -11,20 +11,20 @@ pub mut:
chat_type ChatType
status ChatStatus
visibility ChatVisibility
owner_id int // User who owns/created the chat
owner_id int // User who owns/created the chat
members []ChatMember
messages []Message
project_id int // Links to Project (optional)
team_id int // Links to Team (optional)
customer_id int // Links to Customer (optional)
task_id int // Links to Task (optional)
issue_id int // Links to Issue (optional)
milestone_id int // Links to Milestone (optional)
sprint_id int // Links to Sprint (optional)
agenda_id int // Links to Agenda (optional)
project_id int // Links to Project (optional)
team_id int // Links to Team (optional)
customer_id int // Links to Customer (optional)
task_id int // Links to Task (optional)
issue_id int // Links to Issue (optional)
milestone_id int // Links to Milestone (optional)
sprint_id int // Links to Sprint (optional)
agenda_id int // Links to Agenda (optional)
settings ChatSettings
integrations []ChatIntegration
pinned_messages []int // Message IDs that are pinned
pinned_messages []int // Message IDs that are pinned
archived_at time.Time
last_activity time.Time
message_count int
@@ -67,19 +67,19 @@ pub enum ChatVisibility {
// ChatMember represents a member of a chat
pub struct ChatMember {
pub mut:
user_id int
chat_id int
role ChatRole
permissions []ChatPermission
joined_at time.Time
last_read_at time.Time
last_read_message_id int
user_id int
chat_id int
role ChatRole
permissions []ChatPermission
joined_at time.Time
last_read_at time.Time
last_read_message_id int
notification_settings NotificationSettings
status MemberStatus
invited_by int
muted bool
muted_until time.Time
custom_title string
status MemberStatus
invited_by int
muted bool
muted_until time.Time
custom_title string
}
// ChatRole for member roles in chat
@@ -116,9 +116,9 @@ pub mut:
sender_id int
content string
message_type MessageType
thread_id int // For threaded conversations
reply_to_id int // Message this is replying to
mentions []int // User IDs mentioned in message
thread_id int // For threaded conversations
reply_to_id int // Message this is replying to
mentions []int // User IDs mentioned in message
attachments []Attachment
reactions []Reaction
edited_at time.Time
@@ -128,7 +128,7 @@ pub mut:
pinned bool
pinned_at time.Time
pinned_by int
forwarded_from int // Original message ID if forwarded
forwarded_from int // Original message ID if forwarded
scheduled_at time.Time // For scheduled messages
delivery_status MessageDeliveryStatus
read_by []MessageRead
@@ -181,66 +181,66 @@ pub enum MessagePriority {
// MessageRead tracks who has read a message
pub struct MessageRead {
pub mut:
user_id int
message_id int
read_at time.Time
device string
user_id int
message_id int
read_at time.Time
device string
}
// Reaction represents an emoji reaction to a message
pub struct Reaction {
pub mut:
id int
message_id int
user_id int
emoji string
created_at time.Time
id int
message_id int
user_id int
emoji string
created_at time.Time
}
// RichContent for rich message formatting
pub struct RichContent {
pub mut:
formatted_text string // HTML or markdown
embeds []Embed
buttons []ActionButton
cards []Card
polls []Poll
formatted_text string // HTML or markdown
embeds []Embed
buttons []ActionButton
cards []Card
polls []Poll
}
// Embed for rich content embeds
pub struct Embed {
pub mut:
title string
description string
url string
thumbnail_url string
image_url string
video_url string
author_name string
author_url string
color string
fields []EmbedField
footer_text string
timestamp time.Time
title string
description string
url string
thumbnail_url string
image_url string
video_url string
author_name string
author_url string
color string
fields []EmbedField
footer_text string
timestamp time.Time
}
// EmbedField for structured embed data
pub struct EmbedField {
pub mut:
name string
value string
inline bool
name string
value string
inline bool
}
// ActionButton for interactive messages
pub struct ActionButton {
pub mut:
id string
label string
style ButtonStyle
action string
url string
confirmation string
id string
label string
style ButtonStyle
action string
url string
confirmation string
}
// ButtonStyle for button appearance
@@ -256,19 +256,19 @@ pub enum ButtonStyle {
// Card for rich card content
pub struct Card {
pub mut:
title string
subtitle string
text string
image_url string
actions []ActionButton
facts []CardFact
title string
subtitle string
text string
image_url string
actions []ActionButton
facts []CardFact
}
// CardFact for key-value pairs in cards
pub struct CardFact {
pub mut:
name string
value string
name string
value string
}
// Poll for interactive polls
@@ -287,90 +287,90 @@ pub mut:
// PollOption for poll choices
pub struct PollOption {
pub mut:
id int
text string
votes []PollVote
vote_count int
id int
text string
votes []PollVote
vote_count int
}
// PollVote for tracking poll votes
pub struct PollVote {
pub mut:
user_id int
option_id int
voted_at time.Time
user_id int
option_id int
voted_at time.Time
}
// ChatSettings for chat configuration
pub struct ChatSettings {
pub mut:
allow_guests bool
require_approval bool
allow_guests bool
require_approval bool
message_retention_days int
file_retention_days int
max_members int
slow_mode_seconds int
profanity_filter bool
link_preview bool
emoji_reactions bool
threading bool
message_editing bool
message_deletion bool
file_uploads bool
external_sharing bool
read_receipts bool
typing_indicators bool
welcome_message string
rules []string
auto_moderation AutoModerationSettings
file_retention_days int
max_members int
slow_mode_seconds int
profanity_filter bool
link_preview bool
emoji_reactions bool
threading bool
message_editing bool
message_deletion bool
file_uploads bool
external_sharing bool
read_receipts bool
typing_indicators bool
welcome_message string
rules []string
auto_moderation AutoModerationSettings
}
// AutoModerationSettings for automated moderation
pub struct AutoModerationSettings {
pub mut:
enabled bool
spam_detection bool
profanity_filter bool
link_filtering bool
caps_limit int
rate_limit_messages int
rate_limit_seconds int
enabled bool
spam_detection bool
profanity_filter bool
link_filtering bool
caps_limit int
rate_limit_messages int
rate_limit_seconds int
auto_timeout_duration int
escalation_threshold int
escalation_threshold int
}
// NotificationSettings for member notification preferences
pub struct NotificationSettings {
pub mut:
all_messages bool
mentions_only bool
direct_messages bool
keywords []string
mute_until time.Time
email_notifications bool
push_notifications bool
all_messages bool
mentions_only bool
direct_messages bool
keywords []string
mute_until time.Time
email_notifications bool
push_notifications bool
desktop_notifications bool
sound_enabled bool
vibration_enabled bool
sound_enabled bool
vibration_enabled bool
}
// ChatIntegration for external service integrations
pub struct ChatIntegration {
pub mut:
id int
chat_id int
id int
chat_id int
integration_type IntegrationType
name string
description string
webhook_url string
api_key string
settings map[string]string
enabled bool
created_by int
created_at time.Time
last_used time.Time
error_count int
last_error string
name string
description string
webhook_url string
api_key string
settings map[string]string
enabled bool
created_by int
created_at time.Time
last_used time.Time
error_count int
last_error string
}
// IntegrationType for different integrations
@@ -400,7 +400,7 @@ pub fn (c Chat) get_unread_count(user_id int) int {
break
}
}
// Count messages after last read
return c.messages.filter(it.id > last_read_id && !it.system_message).len
}
@@ -448,22 +448,22 @@ pub fn (mut c Chat) add_member(user_id int, role ChatRole, permissions []ChatPer
return
}
}
// Add new member
c.members << ChatMember{
user_id: user_id
chat_id: c.id
role: role
permissions: permissions
joined_at: time.now()
invited_by: invited_by
status: .active
user_id: user_id
chat_id: c.id
role: role
permissions: permissions
joined_at: time.now()
invited_by: invited_by
status: .active
notification_settings: NotificationSettings{
all_messages: true
mentions_only: false
direct_messages: true
all_messages: true
mentions_only: false
direct_messages: true
email_notifications: true
push_notifications: true
push_notifications: true
}
}
c.update_timestamp(by_user_id)
@@ -483,26 +483,26 @@ pub fn (mut c Chat) remove_member(user_id int, by_user_id int) {
// send_message sends a message to the chat
pub fn (mut c Chat) send_message(sender_id int, content string, message_type MessageType, thread_id int, reply_to_id int, mentions []int, attachments []Attachment, by_user_id int) int {
message := Message{
id: c.messages.len + 1
chat_id: c.id
sender_id: sender_id
content: content
message_type: message_type
thread_id: thread_id
reply_to_id: reply_to_id
mentions: mentions
attachments: attachments
id: c.messages.len + 1
chat_id: c.id
sender_id: sender_id
content: content
message_type: message_type
thread_id: thread_id
reply_to_id: reply_to_id
mentions: mentions
attachments: attachments
delivery_status: .sent
priority: .normal
created_at: time.now()
created_by: by_user_id
priority: .normal
created_at: time.now()
created_by: by_user_id
}
c.messages << message
c.message_count++
c.last_activity = time.now()
c.update_timestamp(by_user_id)
return message.id
}
@@ -557,15 +557,15 @@ pub fn (mut c Chat) add_reaction(message_id int, user_id int, emoji string, by_u
// Check if user already reacted with this emoji
for reaction in message.reactions {
if reaction.user_id == user_id && reaction.emoji == emoji {
return // Already reacted
return
}
}
c.messages[i].reactions << Reaction{
id: message.reactions.len + 1
id: message.reactions.len + 1
message_id: message_id
user_id: user_id
emoji: emoji
user_id: user_id
emoji: emoji
created_at: time.now()
}
c.update_timestamp(by_user_id)
@@ -584,7 +584,7 @@ pub fn (mut c Chat) mark_as_read(user_id int, message_id int, by_user_id int) {
break
}
}
// Add read receipt to message
for i, mut message in c.messages {
if message.id == message_id {
@@ -594,16 +594,16 @@ pub fn (mut c Chat) mark_as_read(user_id int, message_id int, by_user_id int) {
return
}
}
c.messages[i].read_by << MessageRead{
user_id: user_id
user_id: user_id
message_id: message_id
read_at: time.now()
read_at: time.now()
}
break
}
}
c.update_timestamp(by_user_id)
}
@@ -629,15 +629,15 @@ pub fn (mut c Chat) archive_chat(by_user_id int) {
// add_integration adds an external integration
pub fn (mut c Chat) add_integration(integration_type IntegrationType, name string, webhook_url string, settings map[string]string, by_user_id int) {
c.integrations << ChatIntegration{
id: c.integrations.len + 1
chat_id: c.id
id: c.integrations.len + 1
chat_id: c.id
integration_type: integration_type
name: name
webhook_url: webhook_url
settings: settings
enabled: true
created_by: by_user_id
created_at: time.now()
name: name
webhook_url: webhook_url
settings: settings
enabled: true
created_by: by_user_id
created_at: time.now()
}
c.update_timestamp(by_user_id)
}
@@ -647,11 +647,11 @@ pub fn (c Chat) get_activity_level() string {
if c.messages.len == 0 {
return 'Inactive'
}
// Messages in last 24 hours
day_ago := time.now().unix - 86400
recent_messages := c.messages.filter(it.created_at.unix > day_ago).len
if recent_messages > 50 {
return 'Very Active'
} else if recent_messages > 20 {
@@ -670,30 +670,34 @@ pub fn (c Chat) get_engagement_score() f32 {
if c.members.len == 0 || c.messages.len == 0 {
return 0
}
// Calculate unique participants in last 7 days
week_ago := time.now().unix - (86400 * 7)
recent_messages := c.messages.filter(it.created_at.unix > week_ago)
mut unique_senders := map[int]bool{}
for message in recent_messages {
unique_senders[message.sender_id] = true
}
participation_rate := f32(unique_senders.len) / f32(c.members.len)
// Calculate message frequency
messages_per_day := f32(recent_messages.len) / 7.0
frequency_score := if messages_per_day > 10 { 1.0 } else { messages_per_day / 10.0 }
// Calculate reaction engagement
mut total_reactions := 0
for message in recent_messages {
total_reactions += message.reactions.len
}
reaction_rate := if recent_messages.len > 0 { f32(total_reactions) / f32(recent_messages.len) } else { 0 }
reaction_rate := if recent_messages.len > 0 {
f32(total_reactions) / f32(recent_messages.len)
} else {
0
}
reaction_score := if reaction_rate > 2 { 1.0 } else { reaction_rate / 2.0 }
// Weighted average
return (participation_rate * 0.5) + (frequency_score * 0.3) + (reaction_score * 0.2)
}
}

View File

@@ -1,7 +1,7 @@
module models
// Task/Project Management System with Integrated CRM
//
//
// This module provides a comprehensive task and project management system
// with integrated CRM capabilities, built using V language best practices.
//
@@ -25,53 +25,11 @@ module models
// - Audit trail with created/updated timestamps and user tracking
// - Health scoring and analytics for projects, sprints, and teams
// Re-export all model types for easy importing
pub use base { BaseModel }
pub use enums {
Priority, ProjectStatus, TaskStatus, TaskType, IssueStatus,
IssueType, SprintStatus, MilestoneStatus, TeamStatus,
AgendaStatus, ChatStatus, UserRole, CustomerStatus,
SkillLevel, NotificationChannel, ReminderType
}
pub use subobjects {
Contact, Address, TimeEntry, Comment, Attachment,
Condition, Notification, Reaction, Reminder,
UserPreferences, ProjectRole, Label
}
pub use user { User }
pub use customer { Customer }
pub use project { Project, ProjectBillingType, RiskLevel, ProjectMethodology }
pub use task { Task, TaskDependency, DependencyType, Severity }
pub use sprint {
Sprint, SprintMember, SprintRetrospective, ActionItem,
BurndownPoint, DailyStandup, Impediment
}
pub use milestone {
Milestone, Condition as MilestoneCondition, Deliverable,
MilestoneDependency, SuccessMetric, Risk, Approval, Communication
}
pub use issue {
Issue, IssueLink, Workaround, TestCase, LogEntry,
IssueFrequency, WorkaroundComplexity, TestType, LogLevel
}
pub use team {
Team, TeamMember, TeamSkill, TeamCapacity, WorkingHours,
Holiday, TeamRitual, TeamGoal, TeamMetric, TeamTool
}
pub use agenda {
Agenda, Attendee, Resource, Recurrence, AgendaItem, Decision,
AttendanceType, ResponseStatus, ResourceType, RecurrencePattern
}
pub use chat {
Chat, ChatMember, Message, Reaction, RichContent, Poll,
ChatSettings, NotificationSettings, ChatIntegration
}
// System Overview:
//
// ROOT OBJECTS (stored as JSON with incremental IDs):
// 1. User - System users with roles, skills, and preferences
// 2. Customer - CRM entities with contacts and project relationships
// 2. Customer - CRM entities with contacts and project relationships
// 3. Project - Main project containers with budgets and timelines
// 4. Task - Work items with dependencies and time tracking
// 5. Sprint - Scrum sprints with velocity and burndown tracking
@@ -114,7 +72,7 @@ pub use chat {
// Database table names (matching struct names in lowercase)
pub const table_names = [
'user',
'customer',
'customer',
'project',
'task',
'sprint',
@@ -122,51 +80,50 @@ pub const table_names = [
'issue',
'team',
'agenda',
'chat'
'chat',
]
// Searchable fields that should be indexed
pub const indexed_fields = {
'user': ['email', 'username', 'status', 'role']
'customer': ['name', 'email', 'status', 'type']
'project': ['name', 'status', 'priority', 'customer_id', 'project_manager_id']
'task': ['title', 'status', 'priority', 'assignee_id', 'project_id', 'sprint_id']
'sprint': ['name', 'status', 'project_id', 'start_date', 'end_date']
'user': ['email', 'username', 'status', 'role']
'customer': ['name', 'email', 'status', 'type']
'project': ['name', 'status', 'priority', 'customer_id', 'project_manager_id']
'task': ['title', 'status', 'priority', 'assignee_id', 'project_id', 'sprint_id']
'sprint': ['name', 'status', 'project_id', 'start_date', 'end_date']
'milestone': ['name', 'status', 'priority', 'project_id', 'due_date']
'issue': ['title', 'status', 'priority', 'severity', 'assignee_id', 'project_id']
'team': ['name', 'status', 'team_type', 'manager_id']
'agenda': ['title', 'status', 'start_time', 'organizer_id', 'project_id']
'chat': ['name', 'chat_type', 'status', 'owner_id', 'project_id', 'team_id']
'issue': ['title', 'status', 'priority', 'severity', 'assignee_id', 'project_id']
'team': ['name', 'status', 'team_type', 'manager_id']
'agenda': ['title', 'status', 'start_time', 'organizer_id', 'project_id']
'chat': ['name', 'chat_type', 'status', 'owner_id', 'project_id', 'team_id']
}
// Common query patterns for efficient database access
pub const common_queries = {
'active_projects': 'status IN ("planning", "active")'
'overdue_tasks': 'due_date < NOW() AND status NOT IN ("done", "cancelled")'
'current_sprints': 'status = "active" AND start_date <= NOW() AND end_date >= NOW()'
'active_projects': 'status IN ("planning", "active")'
'overdue_tasks': 'due_date < NOW() AND status NOT IN ("done", "cancelled")'
'current_sprints': 'status = "active" AND start_date <= NOW() AND end_date >= NOW()'
'pending_milestones': 'status IN ("planning", "in_progress") AND due_date IS NOT NULL'
'open_issues': 'status NOT IN ("resolved", "closed", "cancelled")'
'active_teams': 'status = "performing"'
'upcoming_meetings': 'start_time > NOW() AND status = "scheduled"'
'active_chats': 'status = "active" AND last_activity > DATE_SUB(NOW(), INTERVAL 30 DAY)'
'open_issues': 'status NOT IN ("resolved", "closed", "cancelled")'
'active_teams': 'status = "performing"'
'upcoming_meetings': 'start_time > NOW() AND status = "scheduled"'
'active_chats': 'status = "active" AND last_activity > DATE_SUB(NOW(), INTERVAL 30 DAY)'
}
// System-wide constants
pub const (
max_file_size = 100 * 1024 * 1024 // 100MB
max_message_length = 10000
max_comment_length = 5000
max_description_length = 10000
default_page_size = 50
max_page_size = 1000
session_timeout_hours = 24
password_min_length = 8
username_min_length = 3
team_max_members = 100
project_max_tasks = 10000
sprint_max_duration_days = 30
chat_max_members = 1000
)
pub const max_file_size = 100 * 1024 * 1024 // 100MB
pub const max_message_length = 10000
pub const max_comment_length = 5000
pub const max_description_length = 10000
pub const default_page_size = 50
pub const max_page_size = 1000
pub const session_timeout_hours = 24
pub const password_min_length = 8
pub const username_min_length = 3
pub const team_max_members = 100
pub const project_max_tasks = 10000
pub const sprint_max_duration_days = 30
pub const chat_max_members = 1000
// Validation helpers
pub fn validate_email(email string) bool {
@@ -174,24 +131,24 @@ pub fn validate_email(email string) bool {
return email.contains('@') && email.contains('.')
}
pub fn validate_username(username string) bool {
return username.len >= username_min_length && username.is_alnum()
}
// pub fn validate_username(username string) bool {
// return username.len >= username_min_length && username.is_alnum()
// }
pub fn validate_password(password string) bool {
return password.len >= password_min_length
}
// Utility functions for common operations
pub fn generate_slug(text string) string {
return text.to_lower().replace(' ', '-').replace_each(['/', '\\', '?', '#'], '-')
}
// pub fn generate_slug(text string) string {
// return text.to_lower().replace(' ', '-').replace_each(['/', '\\', '?', '#'], '-')
// }
pub fn truncate_text(text string, max_length int) string {
if text.len <= max_length {
return text
}
return text[..max_length-3] + '...'
return text[..max_length - 3] + '...'
}
pub fn format_duration(minutes int) string {
@@ -206,86 +163,105 @@ pub fn format_duration(minutes int) string {
return '${hours}h ${remaining_minutes}m'
}
pub fn calculate_business_days(start_date time.Time, end_date time.Time) int {
mut days := 0
mut current := start_date
for current.unix <= end_date.unix {
weekday := current.weekday()
if weekday != 0 && weekday != 6 { // Not Sunday (0) or Saturday (6)
days++
}
current = time.Time{unix: current.unix + 86400} // Add one day
}
return days
}
// pub fn calculate_business_days(start_date time.Time, end_date time.Time) int {
// mut days := 0
// mut current := start_date
// for current.unix <= end_date.unix {
// weekday := current.weekday()
// if weekday != 0 && weekday != 6 { // Not Sunday (0) or Saturday (6)
// days++
// }
// current = time.Time{unix: current.unix + 86400} // Add one day
// }
// return days
// }
// Health scoring weights for different metrics
pub const health_weights = {
'project': {
'budget': 0.25
'schedule': 0.25
'project': {
'budget': 0.25
'schedule': 0.25
'progress': 0.25
'risk': 0.25
'risk': 0.25
}
'sprint': {
'completion': 0.4
'sprint': {
'completion': 0.4
'utilization': 0.3
'impediments': 0.2
'schedule': 0.1
'schedule': 0.1
}
'team': {
'team': {
'utilization': 0.25
'velocity': 0.25
'goals': 0.25
'stability': 0.25
'velocity': 0.25
'goals': 0.25
'stability': 0.25
}
'milestone': {
'progress': 0.3
'schedule': 0.25
'budget': 0.2
'progress': 0.3
'schedule': 0.25
'budget': 0.2
'conditions': 0.15
'approvals': 0.1
'approvals': 0.1
}
}
// Default notification settings
pub const default_notifications = {
'task_assigned': true
'task_due_soon': true
'task_overdue': true
'project_milestone': true
'sprint_started': true
'sprint_ended': true
'meeting_reminder': true
'chat_mention': true
'issue_assigned': true
'task_assigned': true
'task_due_soon': true
'task_overdue': true
'project_milestone': true
'sprint_started': true
'sprint_ended': true
'meeting_reminder': true
'chat_mention': true
'issue_assigned': true
'approval_requested': true
}
// System roles and their default permissions
pub const role_permissions = {
'admin': ['*'] // All permissions
'admin': ['*'] // All permissions
'manager': [
'create_project', 'edit_project', 'delete_project',
'create_team', 'edit_team', 'manage_team_members',
'create_milestone', 'edit_milestone',
'view_reports', 'export_data'
'create_project',
'edit_project',
'delete_project',
'create_team',
'edit_team',
'manage_team_members',
'create_milestone',
'edit_milestone',
'view_reports',
'export_data',
]
'lead': [
'create_task', 'edit_task', 'assign_task',
'create_sprint', 'edit_sprint',
'create_issue', 'edit_issue',
'schedule_meeting', 'create_chat'
'lead': [
'create_task',
'edit_task',
'assign_task',
'create_sprint',
'edit_sprint',
'create_issue',
'edit_issue',
'schedule_meeting',
'create_chat',
]
'member': [
'view_project', 'create_task', 'edit_own_task',
'create_issue', 'comment', 'upload_file',
'join_meeting', 'send_message'
'member': [
'view_project',
'create_task',
'edit_own_task',
'create_issue',
'comment',
'upload_file',
'join_meeting',
'send_message',
]
'viewer': [
'view_project', 'view_task', 'view_issue',
'view_meeting', 'read_message'
'viewer': [
'view_project',
'view_task',
'view_issue',
'view_meeting',
'read_message',
]
}
}

View File

@@ -6,31 +6,31 @@ import time
pub struct Customer {
BaseModel
pub mut:
name string @[required]
type CustomerType
status CustomerStatus
industry string
website string
description string
contacts []Contact
addresses []Address
projects []int // Project IDs associated with this customer
total_value f64 // Total contract value
annual_value f64 // Annual recurring revenue
payment_terms string
tax_id string
name string @[required]
type CustomerType
status CustomerStatus
industry string
website string
description string
contacts []Contact
addresses []Address
projects []int // Project IDs associated with this customer
total_value f64 // Total contract value
annual_value f64 // Annual recurring revenue
payment_terms string
tax_id string
account_manager_id int // User ID of account manager
lead_source string
acquisition_date time.Time
last_contact_date time.Time
lead_source string
acquisition_date time.Time
last_contact_date time.Time
next_followup_date time.Time
credit_limit f64
payment_method string
billing_cycle string // monthly, quarterly, annually
notes string
logo_url string
social_media map[string]string // platform -> URL
custom_fields map[string]string // Flexible custom data
credit_limit f64
payment_method string
billing_cycle string // monthly, quarterly, annually
notes string
logo_url string
social_media map[string]string // platform -> URL
custom_fields map[string]string // Flexible custom data
}
// get_primary_contact returns the primary contact for this customer
@@ -220,4 +220,4 @@ pub fn (mut c Customer) set_custom_field(field string, value string) {
// get_custom_field gets a custom field value
pub fn (c Customer) get_custom_field(field string) ?string {
return c.custom_fields[field] or { none }
}
}

View File

@@ -195,4 +195,4 @@ pub enum TimeEntryType {
support
training
other
}
}

View File

@@ -6,66 +6,66 @@ import time
pub struct Issue {
BaseModel
pub mut:
title string @[required]
description string
project_id int // Links to Project
task_id int // Links to Task (optional)
sprint_id int // Links to Sprint (optional)
reporter_id int // User who reported the issue
assignee_id int // User assigned to resolve the issue
status IssueStatus
priority Priority
severity Severity
issue_type IssueType
category IssueCategory
resolution IssueResolution
title string @[required]
description string
project_id int // Links to Project
task_id int // Links to Task (optional)
sprint_id int // Links to Sprint (optional)
reporter_id int // User who reported the issue
assignee_id int // User assigned to resolve the issue
status IssueStatus
priority Priority
severity Severity
issue_type IssueType
category IssueCategory
resolution IssueResolution
resolution_description string
environment string // Environment where issue occurred
version string // Version where issue was found
fixed_version string // Version where issue was fixed
component string // Component/module affected
labels []int // Label IDs
affects_versions []string
fix_versions []string
due_date time.Time
resolved_date time.Time
closed_date time.Time
estimated_hours f32
actual_hours f32
story_points int // For estimation
watchers []int // User IDs watching this issue
linked_issues []IssueLink
duplicates []int // Issue IDs that are duplicates of this
duplicated_by int // Issue ID that this duplicates
parent_issue_id int // For sub-issues
sub_issues []int // Sub-issue IDs
time_entries []TimeEntry
comments []Comment
attachments []Attachment
workarounds []Workaround
test_cases []TestCase
steps_to_reproduce []string
expected_behavior string
actual_behavior string
additional_info string
browser string
operating_system string
device_info string
network_info string
user_agent string
screen_resolution string
logs []LogEntry
stack_trace string
error_message string
frequency IssueFrequency
impact_users int // Number of users affected
business_impact string
technical_debt bool // Is this technical debt?
security_issue bool // Is this a security issue?
performance_issue bool // Is this a performance issue?
accessibility_issue bool // Is this an accessibility issue?
regression bool // Is this a regression?
custom_fields map[string]string
environment string // Environment where issue occurred
version string // Version where issue was found
fixed_version string // Version where issue was fixed
component string // Component/module affected
labels []int // Label IDs
affects_versions []string
fix_versions []string
due_date time.Time
resolved_date time.Time
closed_date time.Time
estimated_hours f32
actual_hours f32
story_points int // For estimation
watchers []int // User IDs watching this issue
linked_issues []IssueLink
duplicates []int // Issue IDs that are duplicates of this
duplicated_by int // Issue ID that this duplicates
parent_issue_id int // For sub-issues
sub_issues []int // Sub-issue IDs
time_entries []TimeEntry
comments []Comment
attachments []Attachment
workarounds []Workaround
test_cases []TestCase
steps_to_reproduce []string
expected_behavior string
actual_behavior string
additional_info string
browser string
operating_system string
device_info string
network_info string
user_agent string
screen_resolution string
logs []LogEntry
stack_trace string
error_message string
frequency IssueFrequency
impact_users int // Number of users affected
business_impact string
technical_debt bool // Is this technical debt?
security_issue bool // Is this a security issue?
performance_issue bool // Is this a performance issue?
accessibility_issue bool // Is this an accessibility issue?
regression bool // Is this a regression?
custom_fields map[string]string
}
// IssueType for categorizing issues
@@ -160,18 +160,18 @@ pub enum IssueLinkType {
// Workaround represents a temporary solution for an issue
pub struct Workaround {
pub mut:
id int
issue_id int
title string
description string
steps []string
effectiveness f32 // 0.0 to 1.0 scale
complexity WorkaroundComplexity
temporary bool // Is this a temporary workaround?
created_at time.Time
created_by int
tested_by []int // User IDs who tested this workaround
success_rate f32 // Success rate from testing
id int
issue_id int
title string
description string
steps []string
effectiveness f32 // 0.0 to 1.0 scale
complexity WorkaroundComplexity
temporary bool // Is this a temporary workaround?
created_at time.Time
created_by int
tested_by []int // User IDs who tested this workaround
success_rate f32 // Success rate from testing
}
// WorkaroundComplexity for rating workaround complexity
@@ -359,14 +359,14 @@ pub fn (mut i Issue) add_link(linked_issue_id int, link_type IssueLinkType, desc
return
}
}
i.linked_issues << IssueLink{
issue_id: i.id
issue_id: i.id
linked_issue_id: linked_issue_id
link_type: link_type
description: description
created_at: time.now()
created_by: by_user_id
link_type: link_type
description: description
created_at: time.now()
created_by: by_user_id
}
i.update_timestamp(by_user_id)
}
@@ -402,12 +402,12 @@ pub fn (mut i Issue) add_duplicate(duplicate_issue_id int, by_user_id int) {
// log_time adds a time entry to the issue
pub fn (mut i Issue) log_time(user_id int, hours f32, description string, date time.Time, by_user_id int) {
i.time_entries << TimeEntry{
user_id: user_id
hours: hours
user_id: user_id
hours: hours
description: description
date: date
created_at: time.now()
created_by: by_user_id
date: date
created_at: time.now()
created_by: by_user_id
}
i.actual_hours += hours
i.update_timestamp(by_user_id)
@@ -416,8 +416,8 @@ pub fn (mut i Issue) log_time(user_id int, hours f32, description string, date t
// add_comment adds a comment to the issue
pub fn (mut i Issue) add_comment(user_id int, content string, by_user_id int) {
i.comments << Comment{
user_id: user_id
content: content
user_id: user_id
content: content
created_at: time.now()
created_by: by_user_id
}
@@ -427,10 +427,10 @@ pub fn (mut i Issue) add_comment(user_id int, content string, by_user_id int) {
// add_attachment adds an attachment to the issue
pub fn (mut i Issue) add_attachment(filename string, file_path string, file_size int, mime_type string, by_user_id int) {
i.attachments << Attachment{
filename: filename
file_path: file_path
file_size: file_size
mime_type: mime_type
filename: filename
file_path: file_path
file_size: file_size
mime_type: mime_type
uploaded_at: time.now()
uploaded_by: by_user_id
}
@@ -440,16 +440,16 @@ pub fn (mut i Issue) add_attachment(filename string, file_path string, file_size
// add_workaround adds a workaround for the issue
pub fn (mut i Issue) add_workaround(title string, description string, steps []string, effectiveness f32, complexity WorkaroundComplexity, temporary bool, by_user_id int) {
i.workarounds << Workaround{
id: i.workarounds.len + 1
issue_id: i.id
title: title
description: description
steps: steps
id: i.workarounds.len + 1
issue_id: i.id
title: title
description: description
steps: steps
effectiveness: effectiveness
complexity: complexity
temporary: temporary
created_at: time.now()
created_by: by_user_id
complexity: complexity
temporary: temporary
created_at: time.now()
created_by: by_user_id
}
i.update_timestamp(by_user_id)
}
@@ -457,17 +457,17 @@ pub fn (mut i Issue) add_workaround(title string, description string, steps []st
// add_test_case adds a test case for the issue
pub fn (mut i Issue) add_test_case(title string, description string, preconditions []string, steps []string, expected_result string, test_type TestType, automated bool, by_user_id int) {
i.test_cases << TestCase{
id: i.test_cases.len + 1
issue_id: i.id
title: title
description: description
preconditions: preconditions
steps: steps
id: i.test_cases.len + 1
issue_id: i.id
title: title
description: description
preconditions: preconditions
steps: steps
expected_result: expected_result
test_type: test_type
automated: automated
created_at: time.now()
created_by: by_user_id
test_type: test_type
automated: automated
created_at: time.now()
created_by: by_user_id
}
i.update_timestamp(by_user_id)
}
@@ -475,14 +475,14 @@ pub fn (mut i Issue) add_test_case(title string, description string, preconditio
// add_log_entry adds a log entry to the issue
pub fn (mut i Issue) add_log_entry(timestamp time.Time, level LogLevel, message string, source string, thread string, user_id int, session_id string, request_id string, additional_data map[string]string) {
i.logs << LogEntry{
timestamp: timestamp
level: level
message: message
source: source
thread: thread
user_id: user_id
session_id: session_id
request_id: request_id
timestamp: timestamp
level: level
message: message
source: source
thread: thread
user_id: user_id
session_id: session_id
request_id: request_id
additional_data: additional_data
}
}
@@ -502,7 +502,7 @@ pub fn (mut i Issue) escalate(new_priority Priority, by_user_id int) {
// calculate_priority_score calculates a priority score based on various factors
pub fn (i Issue) calculate_priority_score() f32 {
mut score := f32(0)
// Base priority score
match i.priority {
.critical { score += 100 }
@@ -510,7 +510,7 @@ pub fn (i Issue) calculate_priority_score() f32 {
.medium { score += 50 }
.low { score += 25 }
}
// Severity modifier
match i.severity {
.blocker { score += 50 }
@@ -519,7 +519,7 @@ pub fn (i Issue) calculate_priority_score() f32 {
.minor { score += 10 }
.trivial { score += 0 }
}
// Age factor (older issues get higher priority)
age := i.get_age()
if age > 30 {
@@ -529,7 +529,7 @@ pub fn (i Issue) calculate_priority_score() f32 {
} else if age > 7 {
score += 5
}
// User impact factor
if i.impact_users > 1000 {
score += 30
@@ -538,7 +538,7 @@ pub fn (i Issue) calculate_priority_score() f32 {
} else if i.impact_users > 10 {
score += 10
}
// Special issue type modifiers
if i.security_issue {
score += 25
@@ -549,14 +549,14 @@ pub fn (i Issue) calculate_priority_score() f32 {
if i.regression {
score += 20
}
return score
}
// get_sla_status returns SLA compliance status
pub fn (i Issue) get_sla_status() string {
age := i.get_age()
// Define SLA based on priority
mut sla_days := 0
match i.priority {
@@ -565,7 +565,7 @@ pub fn (i Issue) get_sla_status() string {
.medium { sla_days = 7 }
.low { sla_days = 14 }
}
if i.status in [.resolved, .closed] {
resolution_days := int(i.get_resolution_time() / 24)
if resolution_days <= sla_days {
@@ -580,4 +580,4 @@ pub fn (i Issue) get_sla_status() string {
return 'Breached'
}
}
}
}

View File

@@ -6,33 +6,33 @@ import time
pub struct Milestone {
BaseModel
pub mut:
name string @[required]
description string
project_id int // Links to Project
status MilestoneStatus
priority Priority
milestone_type MilestoneType
due_date time.Time
completed_date time.Time
progress f32 // 0.0 to 1.0
owner_id int // User responsible for this milestone
stakeholders []int // User IDs of stakeholders
conditions []Condition // Conditions that must be met
deliverables []Deliverable
dependencies []MilestoneDependency
tasks []int // Task IDs associated with this milestone
budget f64 // Budget allocated to this milestone
actual_cost f64 // Actual cost incurred
estimated_hours f32 // Estimated effort in hours
actual_hours f32 // Actual effort spent
name string @[required]
description string
project_id int // Links to Project
status MilestoneStatus
priority Priority
milestone_type MilestoneType
due_date time.Time
completed_date time.Time
progress f32 // 0.0 to 1.0
owner_id int // User responsible for this milestone
stakeholders []int // User IDs of stakeholders
conditions []Condition // Conditions that must be met
deliverables []Deliverable
dependencies []MilestoneDependency
tasks []int // Task IDs associated with this milestone
budget f64 // Budget allocated to this milestone
actual_cost f64 // Actual cost incurred
estimated_hours f32 // Estimated effort in hours
actual_hours f32 // Actual effort spent
acceptance_criteria []string
success_metrics []SuccessMetric
risks []Risk
approvals []Approval
communications []Communication
review_notes string
lessons_learned string
custom_fields map[string]string
success_metrics []SuccessMetric
risks []Risk
approvals []Approval
communications []Communication
review_notes string
lessons_learned string
custom_fields map[string]string
}
// MilestoneStatus for milestone lifecycle
@@ -60,22 +60,22 @@ pub enum MilestoneType {
// Condition represents a condition that must be met for milestone completion
pub struct Condition {
pub mut:
id int
milestone_id int
title string
description string
condition_type ConditionType
status ConditionStatus
required bool // Is this condition mandatory?
weight f32 // Weight in milestone completion (0.0 to 1.0)
assigned_to int // User responsible for this condition
due_date time.Time
completed_date time.Time
id int
milestone_id int
title string
description string
condition_type ConditionType
status ConditionStatus
required bool // Is this condition mandatory?
weight f32 // Weight in milestone completion (0.0 to 1.0)
assigned_to int // User responsible for this condition
due_date time.Time
completed_date time.Time
verification_method string
evidence []string // URLs, file paths, or descriptions of evidence
notes string
created_at time.Time
created_by int
evidence []string // URLs, file paths, or descriptions of evidence
notes string
created_at time.Time
created_by int
}
// ConditionType for categorizing conditions
@@ -105,26 +105,26 @@ pub enum ConditionStatus {
// Deliverable represents a specific deliverable for a milestone
pub struct Deliverable {
pub mut:
id int
milestone_id int
name string
description string
deliverable_type DeliverableType
status DeliverableStatus
assigned_to int
due_date time.Time
completed_date time.Time
file_path string
url string
size_estimate string
quality_criteria []string
id int
milestone_id int
name string
description string
deliverable_type DeliverableType
status DeliverableStatus
assigned_to int
due_date time.Time
completed_date time.Time
file_path string
url string
size_estimate string
quality_criteria []string
acceptance_criteria []string
review_status ReviewStatus
reviewer_id int
review_notes string
version string
created_at time.Time
created_by int
review_status ReviewStatus
reviewer_id int
review_notes string
version string
created_at time.Time
created_by int
}
// DeliverableType for categorizing deliverables
@@ -164,28 +164,28 @@ pub enum ReviewStatus {
// MilestoneDependency represents dependencies between milestones
pub struct MilestoneDependency {
pub mut:
milestone_id int
milestone_id int
depends_on_milestone_id int
dependency_type DependencyType
created_at time.Time
created_by int
dependency_type DependencyType
created_at time.Time
created_by int
}
// SuccessMetric for measuring milestone success
pub struct SuccessMetric {
pub mut:
id int
milestone_id int
name string
description string
metric_type MetricType
target_value f64
actual_value f64
unit string
id int
milestone_id int
name string
description string
metric_type MetricType
target_value f64
actual_value f64
unit string
measurement_method string
status MetricStatus
measured_at time.Time
measured_by int
status MetricStatus
measured_at time.Time
measured_by int
}
// MetricType for categorizing success metrics
@@ -212,22 +212,22 @@ pub enum MetricStatus {
// Risk represents a risk associated with a milestone
pub struct Risk {
pub mut:
id int
milestone_id int
title string
description string
risk_type RiskType
probability f32 // 0.0 to 1.0
impact f32 // 0.0 to 1.0
risk_score f32 // probability * impact
status RiskStatus
owner_id int
mitigation_plan string
id int
milestone_id int
title string
description string
risk_type RiskType
probability f32 // 0.0 to 1.0
impact f32 // 0.0 to 1.0
risk_score f32 // probability * impact
status RiskStatus
owner_id int
mitigation_plan string
contingency_plan string
identified_at time.Time
identified_by int
reviewed_at time.Time
reviewed_by int
identified_at time.Time
identified_by int
reviewed_at time.Time
reviewed_by int
}
// RiskType for categorizing risks
@@ -255,19 +255,19 @@ pub enum RiskStatus {
// Approval represents an approval required for milestone completion
pub struct Approval {
pub mut:
id int
milestone_id int
title string
description string
approver_id int
approval_type ApprovalType
status ApprovalStatus
requested_at time.Time
requested_by int
responded_at time.Time
comments string
conditions string
expires_at time.Time
id int
milestone_id int
title string
description string
approver_id int
approval_type ApprovalType
status ApprovalStatus
requested_at time.Time
requested_by int
responded_at time.Time
comments string
conditions string
expires_at time.Time
}
// ApprovalType for categorizing approvals
@@ -293,17 +293,17 @@ pub enum ApprovalStatus {
// Communication represents communication about the milestone
pub struct Communication {
pub mut:
id int
milestone_id int
title string
message string
id int
milestone_id int
title string
message string
communication_type CommunicationType
sender_id int
recipients []int
sent_at time.Time
channel string
priority Priority
read_by []int // User IDs who have read this communication
sender_id int
recipients []int
sent_at time.Time
channel string
priority Priority
read_by []int // User IDs who have read this communication
}
// CommunicationType for categorizing communications
@@ -329,25 +329,25 @@ pub fn (m Milestone) get_completion_percentage() f32 {
if m.conditions.len == 0 {
return m.progress * 100
}
mut total_weight := f32(0)
mut completed_weight := f32(0)
for condition in m.conditions {
weight := if condition.weight > 0 { condition.weight } else { 1.0 }
total_weight += weight
if condition.status == .completed {
completed_weight += weight
} else if condition.status == .waived {
completed_weight += weight * 0.5 // Waived conditions count as half
}
}
if total_weight == 0 {
return 0
}
return (completed_weight / total_weight) * 100
}
@@ -356,12 +356,12 @@ pub fn (m Milestone) get_days_until_due() int {
if m.due_date.unix == 0 {
return 0
}
now := time.now()
if now > m.due_date {
return 0
}
return int((m.due_date.unix - now.unix) / 86400)
}
@@ -378,18 +378,18 @@ pub fn (m Milestone) is_over_budget() bool {
// add_condition adds a condition to the milestone
pub fn (mut m Milestone) add_condition(title string, description string, condition_type ConditionType, required bool, weight f32, assigned_to int, due_date time.Time, by_user_id int) {
m.conditions << Condition{
id: m.conditions.len + 1
milestone_id: m.id
title: title
description: description
id: m.conditions.len + 1
milestone_id: m.id
title: title
description: description
condition_type: condition_type
status: .not_started
required: required
weight: weight
assigned_to: assigned_to
due_date: due_date
created_at: time.now()
created_by: by_user_id
status: .not_started
required: required
weight: weight
assigned_to: assigned_to
due_date: due_date
created_at: time.now()
created_by: by_user_id
}
m.update_timestamp(by_user_id)
}
@@ -403,7 +403,7 @@ pub fn (mut m Milestone) complete_condition(condition_id int, evidence []string,
m.conditions[i].evidence = evidence
m.conditions[i].notes = notes
m.update_timestamp(by_user_id)
// Update milestone progress
m.progress = m.get_completion_percentage() / 100
return true
@@ -415,16 +415,16 @@ pub fn (mut m Milestone) complete_condition(condition_id int, evidence []string,
// add_deliverable adds a deliverable to the milestone
pub fn (mut m Milestone) add_deliverable(name string, description string, deliverable_type DeliverableType, assigned_to int, due_date time.Time, by_user_id int) {
m.deliverables << Deliverable{
id: m.deliverables.len + 1
milestone_id: m.id
name: name
description: description
id: m.deliverables.len + 1
milestone_id: m.id
name: name
description: description
deliverable_type: deliverable_type
status: .not_started
assigned_to: assigned_to
due_date: due_date
created_at: time.now()
created_by: by_user_id
status: .not_started
assigned_to: assigned_to
due_date: due_date
created_at: time.now()
created_by: by_user_id
}
m.update_timestamp(by_user_id)
}
@@ -453,13 +453,13 @@ pub fn (mut m Milestone) add_dependency(depends_on_milestone_id int, dep_type De
return
}
}
m.dependencies << MilestoneDependency{
milestone_id: m.id
milestone_id: m.id
depends_on_milestone_id: depends_on_milestone_id
dependency_type: dep_type
created_at: time.now()
created_by: by_user_id
dependency_type: dep_type
created_at: time.now()
created_by: by_user_id
}
m.update_timestamp(by_user_id)
}
@@ -475,16 +475,16 @@ pub fn (mut m Milestone) add_stakeholder(user_id int, by_user_id int) {
// request_approval requests an approval for the milestone
pub fn (mut m Milestone) request_approval(title string, description string, approver_id int, approval_type ApprovalType, expires_at time.Time, by_user_id int) {
m.approvals << Approval{
id: m.approvals.len + 1
milestone_id: m.id
title: title
description: description
approver_id: approver_id
id: m.approvals.len + 1
milestone_id: m.id
title: title
description: description
approver_id: approver_id
approval_type: approval_type
status: .pending
requested_at: time.now()
requested_by: by_user_id
expires_at: expires_at
status: .pending
requested_at: time.now()
requested_by: by_user_id
expires_at: expires_at
}
m.update_timestamp(by_user_id)
}
@@ -521,12 +521,12 @@ pub fn (mut m Milestone) complete_milestone(by_user_id int) {
// calculate_health returns a health score for the milestone
pub fn (m Milestone) calculate_health() f32 {
mut score := f32(1.0)
// Progress health (30% weight)
if m.progress < 0.8 && m.status == .in_progress {
score -= 0.3 * (0.8 - m.progress)
}
// Schedule health (25% weight)
if m.is_overdue() {
score -= 0.25
@@ -536,29 +536,30 @@ pub fn (m Milestone) calculate_health() f32 {
score -= 0.125
}
}
// Budget health (20% weight)
if m.is_over_budget() {
variance_pct := (m.actual_cost - m.budget) / m.budget
score -= 0.2 * variance_pct
}
// Conditions health (15% weight)
overdue_conditions := m.conditions.filter(it.due_date.unix > 0 && time.now() > it.due_date && it.status !in [.completed, .waived]).len
overdue_conditions := m.conditions.filter(it.due_date.unix > 0 && time.now() > it.due_date
&& it.status !in [.completed, .waived]).len
if overdue_conditions > 0 {
score -= 0.15 * f32(overdue_conditions) / f32(m.conditions.len)
}
// Approvals health (10% weight)
pending_approvals := m.approvals.filter(it.status == .pending).len
if pending_approvals > 0 {
score -= 0.1 * f32(pending_approvals) / f32(m.approvals.len)
}
if score < 0 {
score = 0
}
return score
}
@@ -574,4 +575,4 @@ pub fn (m Milestone) get_health_status() string {
} else {
return 'Critical'
}
}
}

View File

@@ -6,37 +6,37 @@ import time
pub struct Project {
BaseModel
pub mut:
name string @[required]
description string
customer_id int // Links to Customer
status ProjectStatus
priority Priority
start_date time.Time
end_date time.Time
actual_start_date time.Time
actual_end_date time.Time
budget f64
actual_cost f64
estimated_hours f32
actual_hours f32
progress f32 // 0.0 to 1.0
milestones []int // Milestone IDs
sprints []int // Sprint IDs
tasks []int // Task IDs
issues []int // Issue IDs
team_members []ProjectRole // Users and their roles in this project
project_manager_id int // User ID of project manager
client_contact_id int // Contact ID from customer
billing_type ProjectBillingType
hourly_rate f64 // Default hourly rate for this project
currency string = 'USD'
risk_level RiskLevel
methodology ProjectMethodology
repository_url string
documentation_url string
slack_channel string
custom_fields map[string]string
labels []int // Label IDs
name string @[required]
description string
customer_id int // Links to Customer
status ProjectStatus
priority Priority
start_date time.Time
end_date time.Time
actual_start_date time.Time
actual_end_date time.Time
budget f64
actual_cost f64
estimated_hours f32
actual_hours f32
progress f32 // 0.0 to 1.0
milestones []int // Milestone IDs
sprints []int // Sprint IDs
tasks []int // Task IDs
issues []int // Issue IDs
team_members []ProjectRole // Users and their roles in this project
project_manager_id int // User ID of project manager
client_contact_id int // Contact ID from customer
billing_type ProjectBillingType
hourly_rate f64 // Default hourly rate for this project
currency string = 'USD'
risk_level RiskLevel
methodology ProjectMethodology
repository_url string
documentation_url string
slack_channel string
custom_fields map[string]string
labels []int // Label IDs
}
// ProjectBillingType for different billing models
@@ -112,24 +112,24 @@ pub fn (p Project) get_schedule_variance() int {
if planned_duration == 0 {
return 0
}
if p.status == .completed {
actual_duration := p.get_actual_duration()
return planned_duration - actual_duration
}
// For ongoing projects, calculate based on current date
if p.start_date.unix == 0 {
return 0
}
days_elapsed := int((time.now().unix - p.start_date.unix) / 86400)
expected_progress := f32(days_elapsed) / f32(planned_duration)
if expected_progress == 0 {
return 0
}
schedule_performance := p.progress / expected_progress
return int(f32(planned_duration) * (schedule_performance - 1))
}
@@ -145,12 +145,12 @@ pub fn (mut p Project) add_team_member(user_id int, role string, permissions []s
return
}
}
// Add new member
p.team_members << ProjectRole{
user_id: user_id
project_id: p.id
role: role
user_id: user_id
project_id: p.id
role: role
permissions: permissions
assigned_at: time.now()
}
@@ -289,7 +289,7 @@ pub fn (mut p Project) add_hours(hours f32, by_user_id int) {
// calculate_health returns a project health score based on various factors
pub fn (p Project) calculate_health() f32 {
mut score := f32(1.0)
// Budget health (25% weight)
if p.budget > 0 {
budget_ratio := p.actual_cost / p.budget
@@ -299,7 +299,7 @@ pub fn (p Project) calculate_health() f32 {
score -= 0.125
}
}
// Schedule health (25% weight)
schedule_var := p.get_schedule_variance()
if schedule_var < -7 { // More than a week behind
@@ -307,7 +307,7 @@ pub fn (p Project) calculate_health() f32 {
} else if schedule_var < 0 {
score -= 0.125
}
// Progress health (25% weight)
if p.progress < 0.5 && p.status == .active {
days_elapsed := int((time.now().unix - p.start_date.unix) / 86400)
@@ -319,18 +319,18 @@ pub fn (p Project) calculate_health() f32 {
}
}
}
// Risk level (25% weight)
match p.risk_level {
.critical { score -= 0.25 }
.high { score -= 0.125 }
else {}
}
if score < 0 {
score = 0
}
return score
}
@@ -346,4 +346,4 @@ pub fn (p Project) get_health_status() string {
} else {
return 'Critical'
}
}
}

View File

@@ -6,27 +6,27 @@ import time
pub struct Sprint {
BaseModel
pub mut:
name string @[required]
description string
project_id int // Links to Project
sprint_number int // Sequential number within project
status SprintStatus
start_date time.Time
end_date time.Time
goal string // Sprint goal
capacity f32 // Team capacity in hours
commitment int // Story points committed
completed int // Story points completed
velocity f32 // Actual velocity (story points / sprint duration)
tasks []int // Task IDs in this sprint
team_members []SprintMember // Team members and their capacity
retrospective SprintRetrospective
review_notes string
demo_url string
burndown_data []BurndownPoint
daily_standups []DailyStandup
impediments []Impediment
custom_fields map[string]string
name string @[required]
description string
project_id int // Links to Project
sprint_number int // Sequential number within project
status SprintStatus
start_date time.Time
end_date time.Time
goal string // Sprint goal
capacity f32 // Team capacity in hours
commitment int // Story points committed
completed int // Story points completed
velocity f32 // Actual velocity (story points / sprint duration)
tasks []int // Task IDs in this sprint
team_members []SprintMember // Team members and their capacity
retrospective SprintRetrospective
review_notes string
demo_url string
burndown_data []BurndownPoint
daily_standups []DailyStandup
impediments []Impediment
custom_fields map[string]string
}
// SprintStatus for sprint lifecycle
@@ -42,10 +42,10 @@ pub struct SprintMember {
pub mut:
user_id int
sprint_id int
capacity_hours f32 // Available hours for this sprint
allocated_hours f32 // Hours allocated to tasks
actual_hours f32 // Hours actually worked
availability f32 // Percentage availability (0.0 to 1.0)
capacity_hours f32 // Available hours for this sprint
allocated_hours f32 // Hours allocated to tasks
actual_hours f32 // Hours actually worked
availability f32 // Percentage availability (0.0 to 1.0)
role string
joined_at time.Time
}
@@ -55,22 +55,22 @@ pub struct SprintRetrospective {
pub mut:
conducted_at time.Time
facilitator_id int
participants []int // User IDs
participants []int // User IDs
what_went_well []string
what_went_wrong []string
action_items []ActionItem
team_mood f32 // 1.0 to 5.0 scale
team_mood f32 // 1.0 to 5.0 scale
notes string
}
// ActionItem for retrospective action items
pub struct ActionItem {
pub mut:
description string
assignee_id int
due_date time.Time
status ActionItemStatus
created_at time.Time
description string
assignee_id int
due_date time.Time
status ActionItemStatus
created_at time.Time
}
// ActionItemStatus for action item tracking
@@ -84,50 +84,50 @@ pub enum ActionItemStatus {
// BurndownPoint for burndown chart data
pub struct BurndownPoint {
pub mut:
date time.Time
date time.Time
remaining_points int
remaining_hours f32
remaining_hours f32
completed_points int
added_points int // Points added during sprint
removed_points int // Points removed during sprint
added_points int // Points added during sprint
removed_points int // Points removed during sprint
}
// DailyStandup for daily standup meeting data
pub struct DailyStandup {
pub mut:
date time.Time
facilitator_id int
participants []int // User IDs
updates []StandupUpdate
impediments []int // Impediment IDs discussed
date time.Time
facilitator_id int
participants []int // User IDs
updates []StandupUpdate
impediments []int // Impediment IDs discussed
duration_minutes int
notes string
notes string
}
// StandupUpdate for individual team member updates
pub struct StandupUpdate {
pub mut:
user_id int
yesterday string // What did you do yesterday?
today string // What will you do today?
blockers string // Any blockers or impediments?
mood f32 // 1.0 to 5.0 scale
user_id int
yesterday string // What did you do yesterday?
today string // What will you do today?
blockers string // Any blockers or impediments?
mood f32 // 1.0 to 5.0 scale
}
// Impediment for tracking sprint impediments
pub struct Impediment {
pub mut:
id int
sprint_id int
title string
description string
reported_by int
assigned_to int
status ImpedimentStatus
severity Priority
reported_at time.Time
resolved_at time.Time
resolution string
id int
sprint_id int
title string
description string
reported_by int
assigned_to int
status ImpedimentStatus
severity Priority
reported_at time.Time
resolved_at time.Time
resolution string
}
// ImpedimentStatus for impediment tracking
@@ -151,12 +151,12 @@ pub fn (s Sprint) get_days_remaining() int {
if s.end_date.unix == 0 || s.status != .active {
return 0
}
now := time.now()
if now > s.end_date {
return 0
}
return int((s.end_date.unix - now.unix) / 86400)
}
@@ -165,12 +165,12 @@ pub fn (s Sprint) get_days_elapsed() int {
if s.start_date.unix == 0 {
return 0
}
now := time.now()
if now < s.start_date {
return 0
}
end_time := if s.status == .completed && s.end_date.unix > 0 { s.end_date } else { now }
return int((end_time.unix - s.start_date.unix) / 86400)
}
@@ -217,12 +217,12 @@ pub fn (s Sprint) get_team_utilization() f32 {
if capacity == 0 {
return 0
}
mut actual := f32(0)
for member in s.team_members {
actual += member.actual_hours
}
return (actual / capacity) * 100
}
@@ -253,15 +253,15 @@ pub fn (mut s Sprint) add_team_member(user_id int, capacity_hours f32, availabil
return
}
}
// Add new member
s.team_members << SprintMember{
user_id: user_id
sprint_id: s.id
user_id: user_id
sprint_id: s.id
capacity_hours: capacity_hours
availability: availability
role: role
joined_at: time.now()
availability: availability
role: role
joined_at: time.now()
}
s.update_timestamp(by_user_id)
}
@@ -314,9 +314,9 @@ pub fn (mut s Sprint) update_completed(points int, by_user_id int) {
// add_burndown_point adds a burndown chart data point
pub fn (mut s Sprint) add_burndown_point(remaining_points int, remaining_hours f32, completed_points int, by_user_id int) {
s.burndown_data << BurndownPoint{
date: time.now()
date: time.now()
remaining_points: remaining_points
remaining_hours: remaining_hours
remaining_hours: remaining_hours
completed_points: completed_points
}
s.update_timestamp(by_user_id)
@@ -325,12 +325,12 @@ pub fn (mut s Sprint) add_burndown_point(remaining_points int, remaining_hours f
// add_daily_standup adds a daily standup record
pub fn (mut s Sprint) add_daily_standup(facilitator_id int, participants []int, updates []StandupUpdate, duration_minutes int, notes string, by_user_id int) {
s.daily_standups << DailyStandup{
date: time.now()
facilitator_id: facilitator_id
participants: participants
updates: updates
date: time.now()
facilitator_id: facilitator_id
participants: participants
updates: updates
duration_minutes: duration_minutes
notes: notes
notes: notes
}
s.update_timestamp(by_user_id)
}
@@ -338,13 +338,13 @@ pub fn (mut s Sprint) add_daily_standup(facilitator_id int, participants []int,
// add_impediment adds an impediment to the sprint
pub fn (mut s Sprint) add_impediment(title string, description string, reported_by int, severity Priority, by_user_id int) {
s.impediments << Impediment{
id: s.impediments.len + 1
sprint_id: s.id
title: title
id: s.impediments.len + 1
sprint_id: s.id
title: title
description: description
reported_by: reported_by
status: .open
severity: severity
status: .open
severity: severity
reported_at: time.now()
}
s.update_timestamp(by_user_id)
@@ -366,14 +366,14 @@ pub fn (mut s Sprint) resolve_impediment(impediment_id int, resolution string, b
// conduct_retrospective conducts a sprint retrospective
pub fn (mut s Sprint) conduct_retrospective(facilitator_id int, participants []int, went_well []string, went_wrong []string, action_items []ActionItem, team_mood f32, notes string, by_user_id int) {
s.retrospective = SprintRetrospective{
conducted_at: time.now()
facilitator_id: facilitator_id
participants: participants
what_went_well: went_well
conducted_at: time.now()
facilitator_id: facilitator_id
participants: participants
what_went_well: went_well
what_went_wrong: went_wrong
action_items: action_items
team_mood: team_mood
notes: notes
action_items: action_items
team_mood: team_mood
notes: notes
}
s.update_timestamp(by_user_id)
}
@@ -381,13 +381,13 @@ pub fn (mut s Sprint) conduct_retrospective(facilitator_id int, participants []i
// get_health_score calculates a health score for the sprint
pub fn (s Sprint) get_health_score() f32 {
mut score := f32(1.0)
// Completion rate (40% weight)
completion := s.get_completion_percentage()
if completion < 70 {
score -= 0.4 * (70 - completion) / 70
}
// Team utilization (30% weight)
utilization := s.get_team_utilization()
if utilization < 80 || utilization > 120 {
@@ -397,22 +397,22 @@ pub fn (s Sprint) get_health_score() f32 {
score -= 0.3 * (utilization - 120) / 120
}
}
// Impediments (20% weight)
open_impediments := s.impediments.filter(it.status == .open).len
if open_impediments > 0 {
score -= 0.2 * f32(open_impediments) / 5 // Assume 5+ impediments is very bad
}
// Schedule adherence (10% weight)
if s.is_overdue() {
score -= 0.1
}
if score < 0 {
score = 0
}
return score
}
@@ -428,4 +428,4 @@ pub fn (s Sprint) get_health_status() string {
} else {
return 'Critical'
}
}
}

View File

@@ -5,18 +5,18 @@ import time
// Contact represents a person associated with a customer or organization
pub struct Contact {
pub mut:
id int
name string @[required]
email string
phone string
mobile string
role string
department string
type ContactType
is_primary bool
notes string
created_at time.Time
updated_at time.Time
id int
name string @[required]
email string
phone string
mobile string
role string
department string
type ContactType
is_primary bool
notes string
created_at time.Time
updated_at time.Time
}
// Address represents a physical address
@@ -58,7 +58,7 @@ pub mut:
pub struct Comment {
pub mut:
id int
author_id int @[required]
author_id int @[required]
content string @[required]
timestamp time.Time
is_internal bool // Internal comments not visible to clients
@@ -70,57 +70,57 @@ pub mut:
// Attachment represents a file attached to an entity
pub struct Attachment {
pub mut:
id int
filename string @[required]
id int
filename string @[required]
original_name string
file_path string @[required]
file_size i64
mime_type string
uploaded_by int // User ID
uploaded_at time.Time
description string
is_public bool // Whether clients can see this attachment
file_path string @[required]
file_size i64
mime_type string
uploaded_by int // User ID
uploaded_at time.Time
description string
is_public bool // Whether clients can see this attachment
}
// Condition represents a requirement that must be met for a milestone
pub struct Condition {
pub struct Condition1 {
pub mut:
id int
milestone_id int @[required]
description string @[required]
status ConditionStatus
verification string // How to verify this condition is met
responsible_id int // User ID responsible for this condition
due_date time.Time
completed_at time.Time
notes string
created_at time.Time
updated_at time.Time
id int
milestone_id int @[required]
description string @[required]
status ConditionStatus
verification string // How to verify this condition is met
responsible_id int // User ID responsible for this condition
due_date time.Time
completed_at time.Time
notes string
created_at time.Time
updated_at time.Time
}
// Message represents a chat message
pub struct Message {
pub struct Message1 {
pub mut:
id int
chat_id int @[required]
sender_id int @[required]
content string @[required]
timestamp time.Time
id int
chat_id int @[required]
sender_id int @[required]
content string @[required]
timestamp time.Time
message_type MessageType
attachments []Attachment
reactions []Reaction
thread_id int // For threaded conversations
is_edited bool
edited_at time.Time
mentions []int // User IDs mentioned in the message
attachments []Attachment
reactions []Reaction
thread_id int // For threaded conversations
is_edited bool
edited_at time.Time
mentions []int // User IDs mentioned in the message
}
// Reaction represents an emoji reaction to a message
pub struct Reaction {
pub struct Reaction1 {
pub mut:
id int
message_id int @[required]
user_id int @[required]
message_id int @[required]
user_id int @[required]
emoji string @[required]
timestamp time.Time
}
@@ -129,7 +129,7 @@ pub mut:
pub struct Notification {
pub mut:
id int
user_id int @[required]
user_id int @[required]
title string @[required]
message string @[required]
type NotificationType
@@ -157,22 +157,22 @@ pub enum NotificationType {
// Reminder for agenda items
pub struct Reminder {
pub mut:
id int
agenda_id int @[required]
user_id int @[required]
remind_at time.Time
message string
is_sent bool
sent_at time.Time
id int
agenda_id int @[required]
user_id int @[required]
remind_at time.Time
message string
is_sent bool
sent_at time.Time
}
// RecurrenceRule for recurring agenda items
pub struct RecurrenceRule {
pub mut:
frequency RecurrenceFrequency
interval int = 1 // Every N frequency units
end_date time.Time
count int // Number of occurrences
frequency RecurrenceFrequency
interval int = 1 // Every N frequency units
end_date time.Time
count int // Number of occurrences
days_of_week []int // 0=Sunday, 1=Monday, etc.
day_of_month int
}
@@ -194,16 +194,16 @@ pub mut:
time_format string = '24h'
language string = 'en'
theme string = 'light'
notifications_email bool = true
notifications_push bool = true
notifications_email bool = true
notifications_push bool = true
default_view string = 'kanban'
}
// ProjectRole represents a user's role in a specific project
pub struct ProjectRole {
pub mut:
user_id int @[required]
project_id int @[required]
user_id int @[required]
project_id int @[required]
role string @[required] // e.g., "lead", "developer", "tester"
permissions []string // Specific permissions for this project
assigned_at time.Time
@@ -221,10 +221,10 @@ pub mut:
// DependencyType for task dependencies
pub enum DependencyType {
finish_to_start // Most common: predecessor must finish before successor starts
start_to_start // Both tasks start at the same time
finish_to_start // Most common: predecessor must finish before successor starts
start_to_start // Both tasks start at the same time
finish_to_finish // Both tasks finish at the same time
start_to_finish // Successor can't finish until predecessor starts
start_to_finish // Successor can't finish until predecessor starts
}
// Label for flexible categorization
@@ -235,4 +235,4 @@ pub mut:
color string // Hex color code
description string
created_at time.Time
}
}

View File

@@ -2,363 +2,363 @@ module models
import time
// Task represents a work item in the system
pub struct Task {
BaseModel
pub mut:
title string @[required]
description string
project_id int // Links to Project
sprint_id int // Links to Sprint (optional)
milestone_id int // Links to Milestone (optional)
parent_task_id int // For subtasks
assignee_id int // User ID of assignee
reporter_id int // User ID who created the task
status TaskStatus
priority Priority
task_type TaskType
story_points int // For Scrum estimation
estimated_hours f32
actual_hours f32
remaining_hours f32
start_date time.Time
due_date time.Time
completed_date time.Time
dependencies []TaskDependency // Tasks this depends on
blocked_by []int // Task IDs that block this task
blocks []int // Task IDs that this task blocks
subtasks []int // Subtask IDs
watchers []int // User IDs watching this task
time_entries []TimeEntry
comments []Comment
attachments []Attachment
acceptance_criteria []string
definition_of_done []string
labels []int // Label IDs
epic_id int // Links to Epic (if applicable)
component string // Component/module this task relates to
version string // Target version/release
environment string // Environment (dev, staging, prod)
severity Severity // For bug tasks
reproducible bool // For bug tasks
steps_to_reproduce []string // For bug tasks
expected_result string // For bug tasks
actual_result string // For bug tasks
browser string // For web-related tasks
os string // Operating system
device string // Device type
custom_fields map[string]string
}
// // Task represents a work item in the system
// pub struct Task {
// BaseModel
// pub mut:
// title string @[required]
// description string
// project_id int // Links to Project
// sprint_id int // Links to Sprint (optional)
// milestone_id int // Links to Milestone (optional)
// parent_task_id int // For subtasks
// assignee_id int // User ID of assignee
// reporter_id int // User ID who created the task
// status TaskStatus
// priority Priority
// task_type TaskType
// story_points int // For Scrum estimation
// estimated_hours f32
// actual_hours f32
// remaining_hours f32
// start_date time.Time
// due_date time.Time
// completed_date time.Time
// dependencies []TaskDependency // Tasks this depends on
// blocked_by []int // Task IDs that block this task
// blocks []int // Task IDs that this task blocks
// subtasks []int // Subtask IDs
// watchers []int // User IDs watching this task
// time_entries []TimeEntry
// comments []Comment
// attachments []Attachment
// acceptance_criteria []string
// definition_of_done []string
// labels []int // Label IDs
// epic_id int // Links to Epic (if applicable)
// component string // Component/module this task relates to
// version string // Target version/release
// environment string // Environment (dev, staging, prod)
// severity Severity // For bug tasks
// reproducible bool // For bug tasks
// steps_to_reproduce []string // For bug tasks
// expected_result string // For bug tasks
// actual_result string // For bug tasks
// browser string // For web-related tasks
// os string // Operating system
// device string // Device type
// custom_fields map[string]string
// }
// TaskDependency represents a dependency relationship between tasks
pub struct TaskDependency {
pub mut:
task_id int
depends_on_task_id int
dependency_type DependencyType
created_at time.Time
created_by int
}
// // TaskDependency represents a dependency relationship between tasks
// pub struct TaskDependency1 {
// pub mut:
// task_id int
// depends_on_task_id int
// dependency_type DependencyType
// created_at time.Time
// created_by int
// }
// DependencyType for task dependencies
pub enum DependencyType {
finish_to_start // Task B cannot start until Task A finishes
start_to_start // Task B cannot start until Task A starts
finish_to_finish // Task B cannot finish until Task A finishes
start_to_finish // Task B cannot finish until Task A starts
}
// // DependencyType for task dependencies
// pub enum DependencyType1 {
// finish_to_start // Task B cannot start until Task A finishes
// start_to_start // Task B cannot start until Task A starts
// finish_to_finish // Task B cannot finish until Task A finishes
// start_to_finish // Task B cannot finish until Task A starts
// }
// Severity for bug tasks
pub enum Severity {
trivial
minor
major
critical
blocker
}
// // Severity for bug tasks
// pub enum Severity {
// trivial
// minor
// major
// critical
// blocker
// }
// is_overdue checks if the task is past its due date
pub fn (t Task) is_overdue() bool {
if t.due_date.unix == 0 || t.status in [.done, .cancelled] {
return false
}
return time.now() > t.due_date
}
// // is_overdue checks if the task is past its due date
// pub fn (t Task) is_overdue() bool {
// if t.due_date.unix == 0 || t.status in [.done, .cancelled] {
// return false
// }
// return time.now() > t.due_date
// }
// is_blocked checks if the task is blocked by other tasks
pub fn (t Task) is_blocked() bool {
return t.blocked_by.len > 0
}
// // is_blocked checks if the task is blocked by other tasks
// pub fn (t Task) is_blocked() bool {
// return t.blocked_by.len > 0
// }
// get_duration returns the planned duration in hours
pub fn (t Task) get_duration() f32 {
if t.start_date.unix == 0 || t.due_date.unix == 0 {
return t.estimated_hours
}
hours := f32((t.due_date.unix - t.start_date.unix) / 3600) // 3600 seconds in an hour
return hours
}
// // get_duration returns the planned duration in hours
// pub fn (t Task) get_duration() f32 {
// if t.start_date.unix == 0 || t.due_date.unix == 0 {
// return t.estimated_hours
// }
// hours := f32((t.due_date.unix - t.start_date.unix) / 3600) // 3600 seconds in an hour
// return hours
// }
// get_actual_duration returns the actual duration in hours
pub fn (t Task) get_actual_duration() f32 {
return t.actual_hours
}
// // get_actual_duration returns the actual duration in hours
// pub fn (t Task) get_actual_duration() f32 {
// return t.actual_hours
// }
// get_progress returns the task progress as a percentage (0.0 to 1.0)
pub fn (t Task) get_progress() f32 {
if t.estimated_hours == 0 {
match t.status {
.done { return 1.0 }
.in_progress { return 0.5 }
else { return 0.0 }
}
}
if t.actual_hours >= t.estimated_hours {
return 1.0
}
return t.actual_hours / t.estimated_hours
}
// // get_progress returns the task progress as a percentage (0.0 to 1.0)
// pub fn (t Task) get_progress() f32 {
// if t.estimated_hours == 0 {
// match t.status {
// .done { return 1.0 }
// .in_progress { return 0.5 }
// else { return 0.0 }
// }
// }
// get_remaining_work returns the estimated remaining work in hours
pub fn (t Task) get_remaining_work() f32 {
if t.remaining_hours > 0 {
return t.remaining_hours
}
if t.estimated_hours > t.actual_hours {
return t.estimated_hours - t.actual_hours
}
return 0
}
// if t.actual_hours >= t.estimated_hours {
// return 1.0
// }
// add_dependency adds a dependency to this task
pub fn (mut t Task) add_dependency(depends_on_task_id int, dep_type DependencyType, by_user_id int) {
// Check if dependency already exists
for dep in t.dependencies {
if dep.depends_on_task_id == depends_on_task_id {
return
}
}
t.dependencies << TaskDependency{
task_id: t.id
depends_on_task_id: depends_on_task_id
dependency_type: dep_type
created_at: time.now()
created_by: by_user_id
}
t.update_timestamp(by_user_id)
}
// return t.actual_hours / t.estimated_hours
// }
// remove_dependency removes a dependency from this task
pub fn (mut t Task) remove_dependency(depends_on_task_id int, by_user_id int) bool {
for i, dep in t.dependencies {
if dep.depends_on_task_id == depends_on_task_id {
t.dependencies.delete(i)
t.update_timestamp(by_user_id)
return true
}
}
return false
}
// // get_remaining_work returns the estimated remaining work in hours
// pub fn (t Task) get_remaining_work() f32 {
// if t.remaining_hours > 0 {
// return t.remaining_hours
// }
// add_blocker adds a task that blocks this task
pub fn (mut t Task) add_blocker(blocker_task_id int, by_user_id int) {
if blocker_task_id !in t.blocked_by {
t.blocked_by << blocker_task_id
t.update_timestamp(by_user_id)
}
}
// if t.estimated_hours > t.actual_hours {
// return t.estimated_hours - t.actual_hours
// }
// remove_blocker removes a blocking task
pub fn (mut t Task) remove_blocker(blocker_task_id int, by_user_id int) {
t.blocked_by = t.blocked_by.filter(it != blocker_task_id)
t.update_timestamp(by_user_id)
}
// return 0
// }
// add_subtask adds a subtask to this task
pub fn (mut t Task) add_subtask(subtask_id int, by_user_id int) {
if subtask_id !in t.subtasks {
t.subtasks << subtask_id
t.update_timestamp(by_user_id)
}
}
// // add_dependency adds a dependency to this task
// pub fn (mut t Task) add_dependency(depends_on_task_id int, dep_type DependencyType, by_user_id int) {
// // Check if dependency already exists
// for dep in t.dependencies {
// if dep.depends_on_task_id == depends_on_task_id {
// return
// }
// }
// remove_subtask removes a subtask from this task
pub fn (mut t Task) remove_subtask(subtask_id int, by_user_id int) {
t.subtasks = t.subtasks.filter(it != subtask_id)
t.update_timestamp(by_user_id)
}
// t.dependencies << TaskDependency{
// task_id: t.id
// depends_on_task_id: depends_on_task_id
// dependency_type: dep_type
// created_at: time.now()
// created_by: by_user_id
// }
// t.update_timestamp(by_user_id)
// }
// assign_to assigns the task to a user
pub fn (mut t Task) assign_to(user_id int, by_user_id int) {
t.assignee_id = user_id
t.update_timestamp(by_user_id)
}
// // remove_dependency removes a dependency from this task
// pub fn (mut t Task) remove_dependency(depends_on_task_id int, by_user_id int) bool {
// for i, dep in t.dependencies {
// if dep.depends_on_task_id == depends_on_task_id {
// t.dependencies.delete(i)
// t.update_timestamp(by_user_id)
// return true
// }
// }
// return false
// }
// unassign removes the assignee from the task
pub fn (mut t Task) unassign(by_user_id int) {
t.assignee_id = 0
t.update_timestamp(by_user_id)
}
// // add_blocker adds a task that blocks this task
// pub fn (mut t Task) add_blocker(blocker_task_id int, by_user_id int) {
// if blocker_task_id !in t.blocked_by {
// t.blocked_by << blocker_task_id
// t.update_timestamp(by_user_id)
// }
// }
// add_watcher adds a user to watch this task
pub fn (mut t Task) add_watcher(user_id int, by_user_id int) {
if user_id !in t.watchers {
t.watchers << user_id
t.update_timestamp(by_user_id)
}
}
// // remove_blocker removes a blocking task
// pub fn (mut t Task) remove_blocker(blocker_task_id int, by_user_id int) {
// t.blocked_by = t.blocked_by.filter(it != blocker_task_id)
// t.update_timestamp(by_user_id)
// }
// remove_watcher removes a user from watching this task
pub fn (mut t Task) remove_watcher(user_id int, by_user_id int) {
t.watchers = t.watchers.filter(it != user_id)
t.update_timestamp(by_user_id)
}
// // add_subtask adds a subtask to this task
// pub fn (mut t Task) add_subtask(subtask_id int, by_user_id int) {
// if subtask_id !in t.subtasks {
// t.subtasks << subtask_id
// t.update_timestamp(by_user_id)
// }
// }
// start_work starts work on the task
pub fn (mut t Task) start_work(by_user_id int) {
t.status = .in_progress
if t.start_date.unix == 0 {
t.start_date = time.now()
}
t.update_timestamp(by_user_id)
}
// // remove_subtask removes a subtask from this task
// pub fn (mut t Task) remove_subtask(subtask_id int, by_user_id int) {
// t.subtasks = t.subtasks.filter(it != subtask_id)
// t.update_timestamp(by_user_id)
// }
// complete_task marks the task as completed
pub fn (mut t Task) complete_task(by_user_id int) {
t.status = .done
t.completed_date = time.now()
t.remaining_hours = 0
t.update_timestamp(by_user_id)
}
// // assign_to assigns the task to a user
// pub fn (mut t Task) assign_to(user_id int, by_user_id int) {
// t.assignee_id = user_id
// t.update_timestamp(by_user_id)
// }
// reopen_task reopens a completed task
pub fn (mut t Task) reopen_task(by_user_id int) {
t.status = .todo
t.completed_date = time.Time{}
t.update_timestamp(by_user_id)
}
// // unassign removes the assignee from the task
// pub fn (mut t Task) unassign(by_user_id int) {
// t.assignee_id = 0
// t.update_timestamp(by_user_id)
// }
// cancel_task cancels the task
pub fn (mut t Task) cancel_task(by_user_id int) {
t.status = .cancelled
t.update_timestamp(by_user_id)
}
// // add_watcher adds a user to watch this task
// pub fn (mut t Task) add_watcher(user_id int, by_user_id int) {
// if user_id !in t.watchers {
// t.watchers << user_id
// t.update_timestamp(by_user_id)
// }
// }
// log_time adds a time entry to the task
pub fn (mut t Task) log_time(user_id int, hours f32, description string, date time.Time, by_user_id int) {
t.time_entries << TimeEntry{
user_id: user_id
hours: hours
description: description
date: date
created_at: time.now()
created_by: by_user_id
}
t.actual_hours += hours
t.update_timestamp(by_user_id)
}
// // remove_watcher removes a user from watching this task
// pub fn (mut t Task) remove_watcher(user_id int, by_user_id int) {
// t.watchers = t.watchers.filter(it != user_id)
// t.update_timestamp(by_user_id)
// }
// update_remaining_hours updates the remaining work estimate
pub fn (mut t Task) update_remaining_hours(hours f32, by_user_id int) {
t.remaining_hours = hours
t.update_timestamp(by_user_id)
}
// // start_work starts work on the task
// pub fn (mut t Task) start_work(by_user_id int) {
// t.status = .in_progress
// if t.start_date.unix == 0 {
// t.start_date = time.now()
// }
// t.update_timestamp(by_user_id)
// }
// add_comment adds a comment to the task
pub fn (mut t Task) add_comment(user_id int, content string, by_user_id int) {
t.comments << Comment{
user_id: user_id
content: content
created_at: time.now()
created_by: by_user_id
}
t.update_timestamp(by_user_id)
}
// // complete_task marks the task as completed
// pub fn (mut t Task) complete_task(by_user_id int) {
// t.status = .done
// t.completed_date = time.now()
// t.remaining_hours = 0
// t.update_timestamp(by_user_id)
// }
// add_attachment adds an attachment to the task
pub fn (mut t Task) add_attachment(filename string, file_path string, file_size int, mime_type string, by_user_id int) {
t.attachments << Attachment{
filename: filename
file_path: file_path
file_size: file_size
mime_type: mime_type
uploaded_at: time.now()
uploaded_by: by_user_id
}
t.update_timestamp(by_user_id)
}
// // reopen_task reopens a completed task
// pub fn (mut t Task) reopen_task(by_user_id int) {
// t.status = .todo
// t.completed_date = time.Time{}
// t.update_timestamp(by_user_id)
// }
// add_acceptance_criteria adds acceptance criteria to the task
pub fn (mut t Task) add_acceptance_criteria(criteria string, by_user_id int) {
t.acceptance_criteria << criteria
t.update_timestamp(by_user_id)
}
// // cancel_task cancels the task
// pub fn (mut t Task) cancel_task(by_user_id int) {
// t.status = .cancelled
// t.update_timestamp(by_user_id)
// }
// remove_acceptance_criteria removes acceptance criteria from the task
pub fn (mut t Task) remove_acceptance_criteria(index int, by_user_id int) {
if index >= 0 && index < t.acceptance_criteria.len {
t.acceptance_criteria.delete(index)
t.update_timestamp(by_user_id)
}
}
// // log_time adds a time entry to the task
// pub fn (mut t Task) log_time(user_id int, hours f32, description string, date time.Time, by_user_id int) {
// t.time_entries << TimeEntry{
// user_id: user_id
// hours: hours
// description: description
// date: date
// created_at: time.now()
// created_by: by_user_id
// }
// t.actual_hours += hours
// t.update_timestamp(by_user_id)
// }
// set_story_points sets the story points for the task
pub fn (mut t Task) set_story_points(points int, by_user_id int) {
t.story_points = points
t.update_timestamp(by_user_id)
}
// // update_remaining_hours updates the remaining work estimate
// pub fn (mut t Task) update_remaining_hours(hours f32, by_user_id int) {
// t.remaining_hours = hours
// t.update_timestamp(by_user_id)
// }
// set_due_date sets the due date for the task
pub fn (mut t Task) set_due_date(due_date time.Time, by_user_id int) {
t.due_date = due_date
t.update_timestamp(by_user_id)
}
// // add_comment adds a comment to the task
// pub fn (mut t Task) add_comment(user_id int, content string, by_user_id int) {
// t.comments << Comment{
// user_id: user_id
// content: content
// created_at: time.now()
// created_by: by_user_id
// }
// t.update_timestamp(by_user_id)
// }
// calculate_velocity returns the velocity (story points / actual hours)
pub fn (t Task) calculate_velocity() f32 {
if t.actual_hours == 0 || t.story_points == 0 {
return 0
}
return f32(t.story_points) / t.actual_hours
}
// // add_attachment adds an attachment to the task
// pub fn (mut t Task) add_attachment(filename string, file_path string, file_size int, mime_type string, by_user_id int) {
// t.attachments << Attachment{
// filename: filename
// file_path: file_path
// file_size: file_size
// mime_type: mime_type
// uploaded_at: time.now()
// uploaded_by: by_user_id
// }
// t.update_timestamp(by_user_id)
// }
// is_bug checks if the task is a bug
pub fn (t Task) is_bug() bool {
return t.task_type == .bug
}
// // add_acceptance_criteria adds acceptance criteria to the task
// pub fn (mut t Task) add_acceptance_criteria(criteria string, by_user_id int) {
// t.acceptance_criteria << criteria
// t.update_timestamp(by_user_id)
// }
// is_story checks if the task is a user story
pub fn (t Task) is_story() bool {
return t.task_type == .story
}
// // remove_acceptance_criteria removes acceptance criteria from the task
// pub fn (mut t Task) remove_acceptance_criteria(index int, by_user_id int) {
// if index >= 0 && index < t.acceptance_criteria.len {
// t.acceptance_criteria.delete(index)
// t.update_timestamp(by_user_id)
// }
// }
// is_epic checks if the task is an epic
pub fn (t Task) is_epic() bool {
return t.task_type == .epic
}
// // set_story_points sets the story points for the task
// pub fn (mut t Task) set_story_points(points int, by_user_id int) {
// t.story_points = points
// t.update_timestamp(by_user_id)
// }
// get_age returns the age of the task in days
pub fn (t Task) get_age() int {
return int((time.now().unix - t.created_at.unix) / 86400)
}
// // set_due_date sets the due date for the task
// pub fn (mut t Task) set_due_date(due_date time.Time, by_user_id int) {
// t.due_date = due_date
// t.update_timestamp(by_user_id)
// }
// get_cycle_time returns the cycle time (time from start to completion) in hours
pub fn (t Task) get_cycle_time() f32 {
if t.start_date.unix == 0 || t.completed_date.unix == 0 {
return 0
}
return f32((t.completed_date.unix - t.start_date.unix) / 3600)
}
// // calculate_velocity returns the velocity (story points / actual hours)
// pub fn (t Task) calculate_velocity() f32 {
// if t.actual_hours == 0 || t.story_points == 0 {
// return 0
// }
// return f32(t.story_points) / t.actual_hours
// }
// get_lead_time returns the lead time (time from creation to completion) in hours
pub fn (t Task) get_lead_time() f32 {
if t.completed_date.unix == 0 {
return 0
}
return f32((t.completed_date.unix - t.created_at.unix) / 3600)
}
// // is_bug checks if the task is a bug
// pub fn (t Task) is_bug() bool {
// return t.task_type == .bug
// }
// // is_story checks if the task is a user story
// pub fn (t Task) is_story() bool {
// return t.task_type == .story
// }
// // is_epic checks if the task is an epic
// pub fn (t Task) is_epic() bool {
// return t.task_type == .epic
// }
// // get_age returns the age of the task in days
// pub fn (t Task) get_age() int {
// return int((time.now().unix - t.created_at.unix) / 86400)
// }
// // get_cycle_time returns the cycle time (time from start to completion) in hours
// pub fn (t Task) get_cycle_time() f32 {
// if t.start_date.unix == 0 || t.completed_date.unix == 0 {
// return 0
// }
// return f32((t.completed_date.unix - t.start_date.unix) / 3600)
// }
// // get_lead_time returns the lead time (time from creation to completion) in hours
// pub fn (t Task) get_lead_time() f32 {
// if t.completed_date.unix == 0 {
// return 0
// }
// return f32((t.completed_date.unix - t.created_at.unix) / 3600)
// }

View File

@@ -6,32 +6,32 @@ import time
pub struct Team {
BaseModel
pub mut:
name string @[required]
description string
team_type TeamType
status TeamStatus
manager_id int // Team manager/lead
members []TeamMember
projects []int // Project IDs this team works on
skills []TeamSkill // Skills available in this team
capacity TeamCapacity
location string
time_zone string
working_hours WorkingHours
holidays []Holiday
rituals []TeamRitual
goals []TeamGoal
metrics []TeamMetric
budget f64 // Team budget
cost_per_hour f64 // Average cost per hour
name string @[required]
description string
team_type TeamType
status TeamStatus
manager_id int // Team manager/lead
members []TeamMember
projects []int // Project IDs this team works on
skills []TeamSkill // Skills available in this team
capacity TeamCapacity
location string
time_zone string
working_hours WorkingHours
holidays []Holiday
rituals []TeamRitual
goals []TeamGoal
metrics []TeamMetric
budget f64 // Team budget
cost_per_hour f64 // Average cost per hour
utilization_target f32 // Target utilization percentage
velocity_target int // Target velocity (story points per sprint)
slack_channel string
email_list string
wiki_url string
repository_urls []string
tools []TeamTool
custom_fields map[string]string
velocity_target int // Target velocity (story points per sprint)
slack_channel string
email_list string
wiki_url string
repository_urls []string
tools []TeamTool
custom_fields map[string]string
}
// TeamType for categorizing teams
@@ -64,22 +64,22 @@ pub enum TeamStatus {
// TeamMember represents a user's membership in a team
pub struct TeamMember {
pub mut:
user_id int
team_id int
role string
permissions []string
capacity_hours f32 // Weekly capacity in hours
allocation f32 // Percentage allocation to this team (0.0 to 1.0)
hourly_rate f64 // Member's hourly rate
start_date time.Time
end_date time.Time // For temporary members
status MemberStatus
skills []int // Skill IDs
certifications []string
seniority_level SeniorityLevel
user_id int
team_id int
role string
permissions []string
capacity_hours f32 // Weekly capacity in hours
allocation f32 // Percentage allocation to this team (0.0 to 1.0)
hourly_rate f64 // Member's hourly rate
start_date time.Time
end_date time.Time // For temporary members
status MemberStatus
skills []int // Skill IDs
certifications []string
seniority_level SeniorityLevel
performance_rating f32 // 1.0 to 5.0 scale
last_review time.Time
notes string
last_review time.Time
notes string
}
// MemberStatus for team member status
@@ -106,43 +106,43 @@ pub enum SeniorityLevel {
// TeamSkill represents a skill available in the team
pub struct TeamSkill {
pub mut:
skill_id int
team_id int
skill_name string
category string
skill_id int
team_id int
skill_name string
category string
proficiency_levels map[int]SkillLevel // user_id -> proficiency level
demand f32 // How much this skill is needed (0.0 to 1.0)
supply f32 // How much this skill is available (0.0 to 1.0)
gap f32 // Skill gap (demand - supply)
training_plan string
demand f32 // How much this skill is needed (0.0 to 1.0)
supply f32 // How much this skill is available (0.0 to 1.0)
gap f32 // Skill gap (demand - supply)
training_plan string
}
// TeamCapacity represents team capacity planning
pub struct TeamCapacity {
pub mut:
total_hours_per_week f32
total_hours_per_week f32
available_hours_per_week f32
committed_hours_per_week f32
utilization_percentage f32
velocity_last_sprint int
velocity_average int
velocity_trend f32 // Positive = improving, negative = declining
capacity_by_skill map[string]f32 // skill -> available hours
capacity_forecast []CapacityForecast
utilization_percentage f32
velocity_last_sprint int
velocity_average int
velocity_trend f32 // Positive = improving, negative = declining
capacity_by_skill map[string]f32 // skill -> available hours
capacity_forecast []CapacityForecast
}
// CapacityForecast for future capacity planning
pub struct CapacityForecast {
pub mut:
period_start time.Time
period_end time.Time
forecast_type ForecastType
total_capacity f32
period_start time.Time
period_end time.Time
forecast_type ForecastType
total_capacity f32
available_capacity f32
planned_allocation f32
confidence_level f32 // 0.0 to 1.0
assumptions []string
risks []string
confidence_level f32 // 0.0 to 1.0
assumptions []string
risks []string
}
// ForecastType for capacity forecasting
@@ -156,25 +156,25 @@ pub enum ForecastType {
// WorkingHours represents team working schedule
pub struct WorkingHours {
pub mut:
monday_start string // "09:00"
monday_end string // "17:00"
tuesday_start string
tuesday_end string
wednesday_start string
wednesday_end string
thursday_start string
thursday_end string
friday_start string
friday_end string
saturday_start string
saturday_end string
sunday_start string
sunday_end string
break_duration int // Minutes
lunch_duration int // Minutes
flexible_hours bool
monday_start string // "09:00"
monday_end string // "17:00"
tuesday_start string
tuesday_end string
wednesday_start string
wednesday_end string
thursday_start string
thursday_end string
friday_start string
friday_end string
saturday_start string
saturday_end string
sunday_start string
sunday_end string
break_duration int // Minutes
lunch_duration int // Minutes
flexible_hours bool
core_hours_start string
core_hours_end string
core_hours_end string
}
// Holiday represents team holidays and time off
@@ -184,7 +184,7 @@ pub mut:
date time.Time
end_date time.Time // For multi-day holidays
holiday_type HolidayType
affects_members []int // User IDs affected (empty = all)
affects_members []int // User IDs affected (empty = all)
description string
}
@@ -203,22 +203,22 @@ pub enum HolidayType {
// TeamRitual represents recurring team activities
pub struct TeamRitual {
pub mut:
id int
team_id int
name string
description string
ritual_type RitualType
frequency RitualFrequency
id int
team_id int
name string
description string
ritual_type RitualType
frequency RitualFrequency
duration_minutes int
participants []int // User IDs
facilitator_id int
location string
virtual_link string
agenda string
outcomes []string
next_occurrence time.Time
last_occurrence time.Time
active bool
participants []int // User IDs
facilitator_id int
location string
virtual_link string
agenda string
outcomes []string
next_occurrence time.Time
last_occurrence time.Time
active bool
}
// RitualType for categorizing team rituals
@@ -248,20 +248,20 @@ pub enum RitualFrequency {
// TeamGoal represents team objectives
pub struct TeamGoal {
pub mut:
id int
team_id int
title string
description string
goal_type GoalType
target_value f64
current_value f64
unit string
start_date time.Time
target_date time.Time
status GoalStatus
owner_id int
progress f32 // 0.0 to 1.0
milestones []GoalMilestone
id int
team_id int
title string
description string
goal_type GoalType
target_value f64
current_value f64
unit string
start_date time.Time
target_date time.Time
status GoalStatus
owner_id int
progress f32 // 0.0 to 1.0
milestones []GoalMilestone
success_criteria []string
}
@@ -290,50 +290,50 @@ pub enum GoalStatus {
// GoalMilestone represents milestones within team goals
pub struct GoalMilestone {
pub mut:
title string
target_date time.Time
target_value f64
achieved bool
achieved_date time.Time
achieved_value f64
title string
target_date time.Time
target_value f64
achieved bool
achieved_date time.Time
achieved_value f64
}
// TeamMetric represents team performance metrics
pub struct TeamMetric {
pub mut:
id int
team_id int
name string
description string
metric_type MetricType
current_value f64
target_value f64
unit string
trend f32 // Positive = improving
last_updated time.Time
history []MetricDataPoint
benchmark f64 // Industry/company benchmark
id int
team_id int
name string
description string
metric_type MetricType
current_value f64
target_value f64
unit string
trend f32 // Positive = improving
last_updated time.Time
history []MetricDataPoint
benchmark f64 // Industry/company benchmark
}
// MetricDataPoint for metric history
pub struct MetricDataPoint {
pub mut:
timestamp time.Time
value f64
period string // "2024-Q1", "2024-01", etc.
timestamp time.Time
value f64
period string // "2024-Q1", "2024-01", etc.
}
// TeamTool represents tools used by the team
pub struct TeamTool {
pub mut:
name string
category ToolCategory
url string
description string
cost_per_month f64
licenses int
admin_contact string
renewal_date time.Time
name string
category ToolCategory
url string
description string
cost_per_month f64
licenses int
admin_contact string
renewal_date time.Time
satisfaction_rating f32 // 1.0 to 5.0
}
@@ -388,7 +388,7 @@ pub fn (t Team) get_average_seniority() f32 {
if active_members.len == 0 {
return 0
}
mut total := f32(0)
for member in active_members {
match member.seniority_level {
@@ -401,7 +401,7 @@ pub fn (t Team) get_average_seniority() f32 {
.architect { total += 7 }
}
}
return total / f32(active_members.len)
}
@@ -421,17 +421,17 @@ pub fn (mut t Team) add_member(user_id int, role string, capacity_hours f32, all
return
}
}
// Add new member
t.members << TeamMember{
user_id: user_id
team_id: t.id
role: role
capacity_hours: capacity_hours
allocation: allocation
hourly_rate: hourly_rate
start_date: time.now()
status: .active
user_id: user_id
team_id: t.id
role: role
capacity_hours: capacity_hours
allocation: allocation
hourly_rate: hourly_rate
start_date: time.now()
status: .active
seniority_level: seniority_level
}
t.update_timestamp(by_user_id)
@@ -471,13 +471,13 @@ pub fn (mut t Team) add_skill(skill_id int, skill_name string, category string,
return
}
}
t.skills << TeamSkill{
skill_id: skill_id
team_id: t.id
skill_name: skill_name
category: category
demand: demand
skill_id: skill_id
team_id: t.id
skill_name: skill_name
category: category
demand: demand
proficiency_levels: map[int]SkillLevel{}
}
t.update_timestamp(by_user_id)
@@ -497,17 +497,17 @@ pub fn (mut t Team) update_skill_proficiency(skill_id int, user_id int, level Sk
// add_goal adds a goal to the team
pub fn (mut t Team) add_goal(title string, description string, goal_type GoalType, target_value f64, unit string, target_date time.Time, owner_id int, by_user_id int) {
t.goals << TeamGoal{
id: t.goals.len + 1
team_id: t.id
title: title
description: description
goal_type: goal_type
id: t.goals.len + 1
team_id: t.id
title: title
description: description
goal_type: goal_type
target_value: target_value
unit: unit
start_date: time.now()
target_date: target_date
status: .active
owner_id: owner_id
unit: unit
start_date: time.now()
target_date: target_date
status: .active
owner_id: owner_id
}
t.update_timestamp(by_user_id)
}
@@ -532,15 +532,15 @@ pub fn (mut t Team) update_goal_progress(goal_id int, current_value f64, by_user
// add_ritual adds a recurring ritual to the team
pub fn (mut t Team) add_ritual(name string, description string, ritual_type RitualType, frequency RitualFrequency, duration_minutes int, facilitator_id int, by_user_id int) {
t.rituals << TeamRitual{
id: t.rituals.len + 1
team_id: t.id
name: name
description: description
ritual_type: ritual_type
frequency: frequency
id: t.rituals.len + 1
team_id: t.id
name: name
description: description
ritual_type: ritual_type
frequency: frequency
duration_minutes: duration_minutes
facilitator_id: facilitator_id
active: true
facilitator_id: facilitator_id
active: true
}
t.update_timestamp(by_user_id)
}
@@ -548,7 +548,7 @@ pub fn (mut t Team) add_ritual(name string, description string, ritual_type Ritu
// calculate_team_health returns a team health score
pub fn (t Team) calculate_team_health() f32 {
mut score := f32(1.0)
// Utilization health (25% weight)
utilization := t.get_utilization()
if utilization < 70 || utilization > 90 {
@@ -558,12 +558,12 @@ pub fn (t Team) calculate_team_health() f32 {
score -= 0.25 * (utilization - 90) / 90
}
}
// Velocity trend (25% weight)
if t.capacity.velocity_trend < -0.1 {
score -= 0.25 * (-t.capacity.velocity_trend)
}
// Goal achievement (25% weight)
active_goals := t.goals.filter(it.status == .active)
if active_goals.len > 0 {
@@ -576,17 +576,17 @@ pub fn (t Team) calculate_team_health() f32 {
score -= 0.25 * (0.7 - avg_progress)
}
}
// Team stability (25% weight)
active_members := t.members.filter(it.status == .active)
if active_members.len < 3 {
score -= 0.25 * (3 - f32(active_members.len)) / 3
}
if score < 0 {
score = 0
}
return score
}
@@ -619,18 +619,19 @@ pub fn (t Team) get_cost_per_week() f64 {
// forecast_capacity forecasts team capacity for a future period
pub fn (t Team) forecast_capacity(start_date time.Time, end_date time.Time, forecast_type ForecastType) CapacityForecast {
current_capacity := t.get_total_capacity()
// Simple forecast based on current capacity
// In a real implementation, this would consider planned hires, departures, etc.
return CapacityForecast{
period_start: start_date
period_end: end_date
forecast_type: forecast_type
total_capacity: current_capacity
period_start: start_date
period_end: end_date
forecast_type: forecast_type
total_capacity: current_capacity
available_capacity: t.get_available_capacity()
planned_allocation: t.capacity.committed_hours_per_week
confidence_level: 0.8
assumptions: ['Current team composition remains stable', 'No major holidays or time off']
risks: ['Team member departures', 'Increased project demands']
confidence_level: 0.8
assumptions: ['Current team composition remains stable',
'No major holidays or time off']
risks: ['Team member departures', 'Increased project demands']
}
}
}

View File

@@ -6,28 +6,28 @@ import time
pub struct User {
BaseModel
pub mut:
username string @[required; unique]
email string @[required; unique]
first_name string @[required]
last_name string @[required]
display_name string
avatar_url string
role UserRole
status UserStatus
timezone string = 'UTC'
preferences UserPreferences
teams []int // Team IDs this user belongs to
skills []string
hourly_rate f64
hire_date time.Time
last_login time.Time
password_hash string // For authentication
phone string
mobile string
department string
job_title string
manager_id int // User ID of manager
reports []int // User IDs of direct reports
username string @[required; unique]
email string @[required; unique]
first_name string @[required]
last_name string @[required]
display_name string
avatar_url string
role UserRole
status UserStatus
timezone string = 'UTC'
preferences UserPreferences
teams []int // Team IDs this user belongs to
skills []string
hourly_rate f64
hire_date time.Time
last_login time.Time
password_hash string // For authentication
phone string
mobile string
department string
job_title string
manager_id int // User ID of manager
reports []int // User IDs of direct reports
}
// get_full_name returns the user's full name
@@ -159,9 +159,10 @@ pub fn (u User) calculate_total_hours(start_date time.Time, end_date time.Time,
pub fn (u User) calculate_billable_hours(start_date time.Time, end_date time.Time, time_entries []TimeEntry) f32 {
mut total := f32(0)
for entry in time_entries {
if entry.user_id == u.id && entry.billable && entry.start_time >= start_date && entry.end_time <= end_date {
if entry.user_id == u.id && entry.billable && entry.start_time >= start_date
&& entry.end_time <= end_date {
total += entry.duration
}
}
return total
}
}

View File

@@ -15,4 +15,4 @@ pub mut:
// Error implements the error interface for ZinitError
pub fn (e ZinitError) msg() string {
return 'Zinit Error ${e.code}: ${e.message} - ${e.data}'
}
}

View File

@@ -8,14 +8,14 @@ mut:
rpc_client &jsonrpc.Client
}
@[params]
pub struct ClientParams {
path string = '/tmp/zinit.sock' // Path to the Zinit RPC socket
path string = '/tmp/zinit.sock' // Path to the Zinit RPC socket
}
// new_client creates a new Zinit RPC client with a custom socket path
pub fn new_client(args_ ClientParams) &Client {
mut args:=args_
mut args := args_
mut cl := jsonrpc.new_unix_socket_client(args.path)
return &Client{
rpc_client: cl

View File

@@ -1,6 +1,5 @@
module zinit
// ServiceCreateResponse represents the response from service_create
pub struct ServiceCreateResponse {
pub mut:
@@ -31,57 +30,44 @@ pub mut:
subscription_id string // ID of the log subscription
}
// Module version information
pub const (
version = '1.0.0'
author = 'Hero Code'
license = 'MIT'
)
pub const version = '1.0.0'
pub const author = 'Hero Code'
pub const license = 'MIT'
// Default socket path for zinit
pub const default_socket_path = '/tmp/zinit.sock'
// Common service states
pub const (
state_running = 'Running'
state_success = 'Success'
state_error = 'Error'
state_stopped = 'Stopped'
state_failed = 'Failed'
)
pub const state_running = 'Running'
pub const state_success = 'Success'
pub const state_error = 'Error'
pub const state_stopped = 'Stopped'
pub const state_failed = 'Failed'
// Common service targets
pub const (
target_up = 'Up'
target_down = 'Down'
)
pub const target_up = 'Up'
pub const target_down = 'Down'
// Common log types
pub const (
log_null = 'null'
log_ring = 'ring'
log_stdout = 'stdout'
)
pub const log_null = 'null'
pub const log_ring = 'ring'
pub const log_stdout = 'stdout'
// Common signals
pub const (
signal_term = 'SIGTERM'
signal_kill = 'SIGKILL'
signal_hup = 'SIGHUP'
signal_usr1 = 'SIGUSR1'
signal_usr2 = 'SIGUSR2'
)
pub const signal_term = 'SIGTERM'
pub const signal_kill = 'SIGKILL'
pub const signal_hup = 'SIGHUP'
pub const signal_usr1 = 'SIGUSR1'
pub const signal_usr2 = 'SIGUSR2'
// JSON-RPC error codes as defined in the OpenRPC specification
pub const (
error_service_not_found = -32000
error_service_already_monitored = -32001
error_service_is_up = -32002
error_service_is_down = -32003
error_invalid_signal = -32004
error_config_error = -32005
error_shutting_down = -32006
error_service_already_exists = -32007
error_service_file_error = -32008
)
pub const error_service_not_found = -32000
pub const error_service_already_monitored = -32001
pub const error_service_is_up = -32002
pub const error_service_is_down = -32003
pub const error_invalid_signal = -32004
pub const error_config_error = -32005
pub const error_shutting_down = -32006
pub const error_service_already_exists = -32007
pub const error_service_file_error = -32008

View File

@@ -20,14 +20,12 @@ pub:
signal string // Signal to send (e.g., SIGTERM, SIGKILL)
}
// RpcDiscoverResponse represents the response from rpc.discover
pub struct RpcDiscoverResponse {
pub mut:
spec map[string]string // OpenRPC specification
}
// rpc_discover returns the OpenRPC specification for the API
pub fn (mut c Client) rpc_discover() !RpcDiscoverResponse {
request := jsonrpc.new_request_generic('rpc.discover', []string{})
@@ -37,8 +35,6 @@ pub fn (mut c Client) rpc_discover() !RpcDiscoverResponse {
}
}
// // Response Models for Zinit API
// //
// // This file contains all the response models used by the Zinit API.
@@ -54,7 +50,7 @@ pub fn (mut c Client) rpc_discover() !RpcDiscoverResponse {
// service_list lists all services managed by Zinit
// Returns a map of service names to their current states
pub fn (mut c Client) service_list() !map[string]string {
request := jsonrpc.new_request_generic('service_list',map[string]string )
request := jsonrpc.new_request_generic('service_list', map[string]string{})
services := c.rpc_client.send[map[string]string, map[string]string](request)!
// return ServiceListResponse{
// services: services
@@ -76,7 +72,7 @@ pub mut:
// name: the name of the service
pub fn (mut c Client) service_status(name string) !ServiceStatusResponse {
request := jsonrpc.new_request_generic('service_status', name)
// Use a direct struct mapping instead of manual conversion
return c.rpc_client.send[string, ServiceStatusResponse](request)!
}
@@ -121,39 +117,36 @@ pub fn (mut c Client) service_forget(name string) ! {
c.rpc_client.send[string, string](request)!
}
//TODO: make sure the signal is a valid signal and enumerator do as @[params] so its optional
// TODO: make sure the signal is a valid signal and enumerator do as @[params] so its optional
// service_kill sends a signal to a running service
// name: the name of the service to send the signal to
// signal: the signal to send (e.g., SIGTERM, SIGKILL)
pub fn (mut c Client) service_kill(name string, signal string) ! {
params := KillParams{
name: name
name: name
signal: signal
}
request := jsonrpc.new_request_generic('service_kill', params)
c.rpc_client.send[KillParams, string](request)!
}
// CreateServiceParams represents the parameters for the service_create method
struct CreateServiceParams {
name string // Name of the service to create
content ServiceConfig // Configuration for the service
}
// service_create creates a new service configuration file
// name: the name of the service to create
// config: the service configuration
pub fn (mut c Client) service_create(name string, config ServiceConfig) !ServiceCreateResponse {
params := CreateServiceParams{
name: name
name: name
content: config
}
request := jsonrpc.new_request_generic('service_create', params)
path := c.rpc_client.send[CreateServiceParams, string](request)!
return ServiceCreateResponse{
@@ -164,18 +157,19 @@ pub fn (mut c Client) service_create(name string, config ServiceConfig) !Service
// service_get gets a service configuration file
// name: the name of the service to get
pub fn (mut c Client) service_get(name string) !ServiceConfigResponse {
request := jsonrpc.new_request_generic('service_get', {
'name': name
})
request := jsonrpc.new_request_generic('service_get', {"name":name})
// We need to handle the conversion from ServiceConfig to ServiceConfigResponse
config := c.rpc_client.send[map[string]string, ServiceConfig](request)!
return ServiceConfigResponse{
exec: config.exec
oneshot: config.oneshot
after: config.after
log: config.log
env: config.env
exec: config.exec
oneshot: config.oneshot
after: config.after
log: config.log
env: config.env
shutdown_timeout: config.shutdown_timeout
}
}

View File

@@ -1,6 +1,5 @@
module zinit
pub struct ServiceConfigResponse {
pub mut:
exec string // Command to run
@@ -11,14 +10,13 @@ pub mut:
shutdown_timeout int // Maximum time to wait for service to stop during shutdown
}
// Helper function to create a basic service configuration
pub fn new_service_config(exec string) ServiceConfig {
return ServiceConfig{
exec: exec
oneshot: false
log: log_stdout
env: map[string]string{}
exec: exec
oneshot: false
log: log_stdout
env: map[string]string{}
shutdown_timeout: 30
}
}
@@ -26,10 +24,10 @@ pub fn new_service_config(exec string) ServiceConfig {
// Helper function to create a oneshot service configuration
pub fn new_oneshot_service_config(exec string) ServiceConfig {
return ServiceConfig{
exec: exec
oneshot: true
log: log_stdout
env: map[string]string{}
exec: exec
oneshot: true
log: log_stdout
env: map[string]string{}
shutdown_timeout: 30
}
}

View File

@@ -5,19 +5,19 @@ import freeflowuniverse.herolib.schemas.jsonrpc
// ServiceStatsResponse represents the response from service_stats
pub struct ServiceStatsResponse {
pub mut:
name string // Service name
pid int // Process ID of the service
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
children []ChildStatsResponse // Stats for child processes
name string // Service name
pid int // Process ID of the service
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
children []ChildStatsResponse // Stats for child processes
}
// ChildStatsResponse represents statistics for a child process
pub struct ChildStatsResponse {
pub mut:
pid int // Process ID of the child process
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
pid int // Process ID of the child process
memory_usage i64 // Memory usage in bytes
cpu_usage f64 // CPU usage as a percentage (0-100)
}
// Serv
@@ -26,19 +26,19 @@ pub mut:
// name: the name of the service to get stats for
pub fn (mut c Client) service_stats(name string) !ServiceStatsResponse {
request := jsonrpc.new_request_generic('service_stats', name)
// We need to handle the conversion from the raw response to our model
raw_stats := c.rpc_client.send[string, map[string]string](request)!
// Parse the raw stats into our response model
mut children := []ChildStatsResponse{}
// In a real implementation, we would parse the children from the raw response
return ServiceStatsResponse{
name: raw_stats['name'] or { '' }
pid: raw_stats['pid'].int()
name: raw_stats['name'] or { '' }
pid: raw_stats['pid'].int()
memory_usage: raw_stats['memory_usage'].i64()
cpu_usage: raw_stats['cpu_usage'].f64()
children: children
cpu_usage: raw_stats['cpu_usage'].f64()
children: children
}
}

View File

@@ -2,7 +2,6 @@ module zinit
import freeflowuniverse.herolib.schemas.jsonrpc
// system_shutdown stops all services and powers off the system
pub fn (mut c Client) system_shutdown() ! {
request := jsonrpc.new_request_generic('system_shutdown', []string{})
@@ -15,7 +14,6 @@ pub fn (mut c Client) system_reboot() ! {
c.rpc_client.send[[]string, string](request)!
}
// system_start_http_server starts an HTTP/RPC server at the specified address
// address: the network address to bind the server to (e.g., '127.0.0.1:8080')
pub fn (mut c Client) system_start_http_server(address string) !SystemStartHttpServerResponse {
@@ -34,16 +32,18 @@ pub fn (mut c Client) system_stop_http_server() ! {
@[params]
pub struct LogParams {
name string
name string
}
// stream_current_logs gets current logs from zinit and monitored services
// name: optional service name filter. If provided, only logs from this service will be returned
pub fn (mut c Client) stream_current_logs(args LogParams) ![]string {
mut logs := []string{}
if args.name != '' {
request := jsonrpc.new_request_generic('stream_currentLogs', {"name":args.name})
request := jsonrpc.new_request_generic('stream_currentLogs', {
'name': args.name
})
logs = c.rpc_client.send[map[string]string, map[string]string](request)!
} else {
request := jsonrpc.new_request_generic('stream_currentLogs', map[string]string{})
@@ -56,7 +56,7 @@ pub fn (mut c Client) stream_current_logs(args LogParams) ![]string {
// name: optional service name filter. If provided, only logs from this service will be returned
pub fn (mut c Client) stream_subscribe_logs(name ?string) !StreamSubscribeLogsResponse {
mut subscription_id := ''
if service_name := name {
request := jsonrpc.new_request_generic('stream_subscribeLogs', service_name)
subscription_id = c.rpc_client.send[string, string](request)!
@@ -64,8 +64,8 @@ pub fn (mut c Client) stream_subscribe_logs(name ?string) !StreamSubscribeLogsRe
request := jsonrpc.new_request_generic('stream_subscribeLogs', []string{})
subscription_id = c.rpc_client.send[[]string, string](request)!
}
return StreamSubscribeLogsResponse{
subscription_id: subscription_id
}
}
}

View File

@@ -1,6 +1,5 @@
module zinit
// Helper function to format memory usage in human-readable format
pub fn format_memory_usage(bytes i64) string {
if bytes < 1024 {
@@ -17,4 +16,4 @@ pub fn format_memory_usage(bytes i64) string {
// Helper function to format CPU usage
pub fn format_cpu_usage(cpu_percent f64) string {
return '${cpu_percent:.1f}%'
}
}

View File

@@ -124,3 +124,48 @@ Each Path object contains:
- `exist`: Existence status
This provides a safe and convenient API for all file system operations in V.
## 5. Sub-path Getters and Checkers
The `pathlib` module provides methods to get and check for the existence of sub-paths (files, directories, and links) within a given path.
```v
// Get a sub-path (file or directory) with various options
path.sub_get(name:"mysub_file.md", name_fix_find:true, name_fix:true)!
// Check if a sub-path exists
path.sub_exists(name:"my_sub_dir")!
// Check if a file exists
path.file_exists("my_file.txt")
// Check if a file exists (case-insensitive)
path.file_exists_ignorecase("My_File.txt")
// Get a file as a Path object
path.file_get("another_file.txt")!
// Get a file as a Path object (case-insensitive)
path.file_get_ignorecase("Another_File.txt")!
// Get a file, create if it doesn't exist
path.file_get_new("new_file.txt")!
// Check if a link exists
path.link_exists("my_link")
// Check if a link exists (case-insensitive)
path.link_exists_ignorecase("My_Link")
// Get a link as a Path object
path.link_get("some_link")!
// Check if a directory exists
path.dir_exists("my_directory")
// Get a directory as a Path object
path.dir_get("another_directory")!
// Get a directory, create if it doesn't exist
path.dir_get_new("new_directory")!
```

View File

@@ -97,6 +97,12 @@ pub fn (mut plbook PlayBook) exists_once(args FindArgs) bool {
return res.len == 1
}
pub fn (mut plbook PlayBook) exists(args FindArgs) bool {
mut res := plbook.find(args) or { [] }
return res.len > 0
}
pub fn (mut plbook PlayBook) find_one(args FindArgs) !&Action {
mut res := plbook.find(args)!
if res.len == 0 {

View File

@@ -226,7 +226,7 @@ fn (mut self ReplaceInstructions) replace_in_dir_recursive(path1 string, extensi
mut pathnew := ''
mut count := 0
console.print_debug(" - replace in dir: ${path1}")
console.print_debug(' - replace in dir: ${path1}')
for item in items {
// println(item)
@@ -241,14 +241,14 @@ fn (mut self ReplaceInstructions) replace_in_dir_recursive(path1 string, extensi
self.replace_in_dir_recursive(pathnew, extensions, dryrun, mut done)!
} else {
tmpext:=os.file_ext(pathnew)[1..] or { ""}
tmpext := os.file_ext(pathnew)[1..] or { '' }
ext := tmpext.to_lower()
if extensions == [] || ext in extensions {
// means we match a file
txtold := os.read_file(pathnew)!
txtnew := self.replace(text: txtold, dedent: false)!
if txtnew.trim(' \n') == txtold.trim(' \n') {
//console.print_header(' nothing to do : ${pathnew}')
// console.print_header(' nothing to do : ${pathnew}')
} else {
console.print_header(' replace done : ${pathnew}')
count++

View File

@@ -67,8 +67,6 @@ pub fn (mut page Page) doc() !&Doc {
return page.doc
}
// return doc, reparse if needed
fn (page Page) doc_immute() !&Doc {
if page.changed {
@@ -112,8 +110,6 @@ pub fn (page Page) get_markdown() !string {
return result
}
pub fn (mut page Page) set_content(content string) ! {
page.reparse_doc(content)!
}

View File

@@ -22,7 +22,7 @@ pub mut:
// all names will be in name_fixed mode .
// all images in img/
pub fn (mut tree Tree) export(args_ TreeExportArgs) ! {
mut args:= args_
mut args := args_
if args.toreplace.len > 0 {
mut ri := regext.regex_instructions_new()
ri.add_from_text(args.toreplace)!

View File

@@ -3,59 +3,58 @@ module doctree
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
@[params]
pub struct PlayArgs {
pub mut:
heroscript string
heroscript string
heroscript_path string
plbook ?PlayBook
reset bool
plbook ?PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
mut plbook := args.plbook or { playbook.new(text: args.heroscript,path:args.heroscript_path)! }
mut plbook := args.plbook or {
playbook.new(text: args.heroscript, path: args.heroscript_path)!
}
mut doctrees := map[string]&Tree{}
collection_actions := plbook.find(filter: 'doctree.collection')!
for action in collection_actions {
mut p := action.params
name := p.get_default('name',"main")!
mut doctree := doctrees[name] or {
mut newdtr:= doctree.new(name: name)!
mut p := action.params
name := p.get_default('name', 'main')!
mut doctree := doctrees[name] or {
mut newdtr := new(name: name)!
doctrees[name] = newdtr
newdtr
}
path:= p.get_default('path',"")!
git_url:= p.get_default('git_url',"")!
git_reset:= p.get_default_false('git_reset')
git_pull:= p.get_default_false('git_pull')
path := p.get_default('path', '')!
git_url := p.get_default('git_url', '')!
git_reset := p.get_default_false('git_reset')
git_pull := p.get_default_false('git_pull')
doctree.scan(path: path, git_url: git_url, git_reset: git_reset, git_pull: git_pull)!
}
}
export_actions := plbook.find(filter: 'doctree.export')!
if export_actions.len == 0 {
name0:="main"
mut doctree0 := doctrees[name0] or { panic("can't find doctree with name ${name0}") }
name0 := 'main'
mut doctree0 := doctrees[name0] or { panic("can't find doctree with name ${name0}") }
doctree0.export()!
}
if export_actions.len > 0 {
if collection_actions.len==0 {
if collection_actions.len == 0 {
println(plbook)
return error("No collections configured, use !!doctree.collection..., otherwise cannot export")
return error('No collections configured, use !!doctree.collection..., otherwise cannot export')
}
}
for action in export_actions {
mut p := action.params
name := p.get_default('name',"main")!
mut p := action.params
name := p.get_default('name', 'main')!
destination := p.get('destination')!
reset:= p.get_default_false('reset')
exclude_errors:= p.get_default_true('exclude_errors')
reset := p.get_default_false('reset')
exclude_errors := p.get_default_true('exclude_errors')
mut doctree := doctrees[name] or { return error("can't find doctree with name ${name}") }
doctree.export(
destination: destination
@@ -63,4 +62,4 @@ pub fn play(args_ PlayArgs) ! {
exclude_errors: exclude_errors
)!
}
}
}

View File

@@ -30,7 +30,13 @@ pub mut:
// git_pull bool
// ```
pub fn (mut tree Tree) scan(args TreeScannerArgs) ! {
mut path := gittools.path(path: args.path, git_url: args.git_url, git_reset: args.git_reset, git_root: args.git_root, git_pull: args.git_pull)!
mut path := gittools.path(
path: args.path
git_url: args.git_url
git_reset: args.git_reset
git_root: args.git_root
git_pull: args.git_pull
)!
if !path.is_dir() {
return error('path is not a directory')
}

View File

@@ -64,6 +64,16 @@ pub fn tree_get(name string) !&Tree {
return error("cann't doctree:'${name}'")
}
pub fn tree_exist(name string) bool {
rlock doctrees {
if name in doctrees {
return true
}
}
return false
}
// tree_set stores tree in global map
pub fn tree_set(tree Tree) {
lock doctrees {

View File

@@ -1,11 +1,11 @@
module markdownparser
import freeflowuniverse.herolib.data.markdownparser { new }
import freeflowuniverse.herolib.data.markdownparser.elements { Doc, Frontmatter2 }
import freeflowuniverse.herolib.data.markdownparser.elements { Frontmatter2 }
import os
fn test_get_content_without_frontmatter() {
markdown_with_frontmatter := '
markdown_with_frontmatter := '
---
title: My Document
author: Roo
@@ -14,27 +14,27 @@ author: Roo
This is some content.
'
expected_content := '# Hello World
expected_content := '# Hello World
This is some content.
'
mut doc := new(content: markdown_with_frontmatter)!
mut result := ''
for element in doc.children {
if element is Frontmatter2 {
continue
}
result += element.markdown()!
}
assert result.trim_space() == expected_content.trim_space()
mut doc := new(content: markdown_with_frontmatter)!
mut result := ''
for element in doc.children {
if element is Frontmatter2 {
continue
}
result += element.markdown()!
}
assert result.trim_space() == expected_content.trim_space()
mut doc_no_fm := new(content: expected_content)!
mut result_no_fm := ''
for element in doc_no_fm.children {
if element is Frontmatter2 {
continue
}
result_no_fm += element.markdown()!
}
assert result_no_fm.trim_space() == expected_content.trim_space()
}
mut doc_no_fm := new(content: expected_content)!
mut result_no_fm := ''
for element in doc_no_fm.children {
if element is Frontmatter2 {
continue
}
result_no_fm += element.markdown()!
}
assert result_no_fm.trim_space() == expected_content.trim_space()
}

View File

@@ -105,20 +105,18 @@ pub fn get(args_ GitStructureArgGet) !&GitStructure {
return gsinstances[rediskey_] or { panic('bug') }
}
@[params]
pub struct GitPathGetArgs {
pub mut:
path string
git_url string
git_reset bool
git_root string
git_pull bool
currentdir bool //can use currentdir
path string
git_url string
git_reset bool
git_root string
git_pull bool
currentdir bool // can use currentdir
}
//return pathlib Path based on, will pull...
// return pathlib Path based on, will pull...
// params:
// path string
// git_url string
@@ -126,13 +124,13 @@ pub mut:
// git_root string
// git_pull bool
pub fn path(args_ GitPathGetArgs) !pathlib.Path {
mut args:= args_
if args.path.trim_space() == '' && args.currentdir{
mut args := args_
if args.path.trim_space() == '' && args.currentdir {
args.path = os.getwd()
}
if args.git_url.len > 0 {
mut gs := gittools.get(coderoot: args.git_root)!
mut gs := get(coderoot: args.git_root)!
mut repo := gs.get_repo(
url: args.git_url
pull: args.git_pull

View File

@@ -1,7 +1,6 @@
module gittools
import os
import freeflowuniverse.herolib.core.pathlib
fn test_gitlocation_from_url() {

View File

@@ -113,7 +113,7 @@ pub fn (mut gitstructure GitStructure) get_repo(args_ ReposGetArgs) !&GitRepo {
return error('Found more than one repository for \n${args}\n${repos}')
}
//the pull & reset was not used, now re-inserted
// the pull & reset was not used, now re-inserted
mut repo := repositories[0] or { panic('bug get_repo') }
if args.pull {

View File

@@ -6,7 +6,7 @@ import os
@[params]
pub struct GitCloneArgs {
pub mut:
//only url needed because is a clone
// only url needed because is a clone
url string
sshkey string
}
@@ -18,21 +18,19 @@ pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
}
console.print_header('Git clone from the URL: ${args.url}.')
//gitlocatin comes just from the url, not from fs of whats already there
// gitlocatin comes just from the url, not from fs of whats already there
git_location := gitstructure.gitlocation_from_url(args.url)!
mut repo := gitstructure.repo_new_from_gitlocation(git_location)!
//TODO: this seems to be wrong, we should not set the url here
// TODO: this seems to be wrong, we should not set the url here
// repo.status_wanted.url = args.url
// repo.status_wanted.branch = git_location.branch_or_tag
mut repopath := repo.patho()!
if repopath.exists(){
if repopath.exists() {
return error("can't clone on existing path, came from url, path found is ${repopath.path}.\n")
}
if args.sshkey.len > 0 {
repo.set_sshkey(args.sshkey)!
}
@@ -41,16 +39,14 @@ pub fn (mut gitstructure GitStructure) clone(args GitCloneArgs) !&GitRepo {
cfg := gitstructure.config()!
mut extra := ''
if cfg.light {
extra = '--depth 1 --no-single-branch '
}
//the url needs to be http if no agent, otherwise its ssh, the following code will do this
// the url needs to be http if no agent, otherwise its ssh, the following code will do this
mut cmd := 'cd ${parent_dir} && git clone ${extra} ${repo.get_repo_url_for_clone()!} ${repo.name}'
mut sshkey_include := ''
if cfg.ssh_key_path.len > 0 {
sshkey_include = "GIT_SSH_COMMAND=\"ssh -i ${cfg.ssh_key_path}\" "

View File

@@ -87,7 +87,7 @@ pub fn (mut repo GitRepo) get_parent_dir(args GetParentDir) !string {
return parent_dir
}
//DONT THINK ITS GOOD TO GIVE THE BRANCH
// DONT THINK ITS GOOD TO GIVE THE BRANCH
// @[params]
// pub struct GetRepoUrlArgs {
// pub mut:
@@ -96,8 +96,7 @@ pub fn (mut repo GitRepo) get_parent_dir(args GetParentDir) !string {
// url_get returns the URL of a git address
fn (self GitRepo) get_repo_url_for_clone() !string {
//WHY do we do following, now uncommented, the following code dispisses the ssh url part
// WHY do we do following, now uncommented, the following code dispisses the ssh url part
// url := self.status_wanted.url
// if true{panic(url)}
// if url.len != 0 {
@@ -112,7 +111,6 @@ fn (self GitRepo) get_repo_url_for_clone() !string {
} else {
return self.get_http_url()!
}
}
fn (self GitRepo) get_ssh_url() !string {

View File

@@ -17,11 +17,11 @@ fn startupcmd() ![]zinit.ZProcessNewArgs {
mut installer := get()!
mut res := []zinit.ZProcessNewArgs{}
mut cfg := get()!
res << zinit.ZProcessNewArgs{
name: 'docker'
cmd: 'dockerd'
startuptype: .systemd
}
res << zinit.ZProcessNewArgs{
name: 'docker'
cmd: 'dockerd'
startuptype: .systemd
}
cmd := "
echo 'zinit starting dify'
@@ -32,9 +32,9 @@ fn startupcmd() ![]zinit.ZProcessNewArgs {
docker compose --env-file ${cfg.path}/docker/.env up
"
res << zinit.ZProcessNewArgs{
name: 'dify'
cmd: cmd
startuptype: .systemd
name: 'dify'
cmd: cmd
startuptype: .systemd
}
return res
}
@@ -42,10 +42,10 @@ fn startupcmd() ![]zinit.ZProcessNewArgs {
fn running() !bool {
mut installer := get()!
cfg := get()!
cmd := "
cmd := '
sleep 120
docker ps | grep dify-web
"
'
res := os.execute(cmd)
return res.exit_code == 0
}
@@ -70,7 +70,7 @@ fn installed() !bool {
mut docker := docker_installer.get()!
docker.install()!
cmd := "docker ps | grep dify-web"
cmd := 'docker ps | grep dify-web'
result := os.execute(cmd)
if result.exit_code != 0 {
return false
@@ -146,7 +146,7 @@ fn build() ! {
fn destroy() ! {
mut cfg := get()!
cmd := "systemctl stop dify"
cmd := 'systemctl stop dify'
result := os.execute(cmd)
if result.exit_code != 0 {
return error("dify isn't running: ${result.output}")

View File

@@ -29,7 +29,7 @@ fn test_service_list() {
assert false, 'Failed to list services: ${err}'
return
}
// Just verify we got a map, even if it's empty
assert typeof(services).name == 'map[string]string'
println('Found ${services.len} services')
@@ -46,7 +46,7 @@ fn test_discover() {
assert false, 'Failed to get OpenRPC spec: ${err}'
return
}
// Verify we got a non-empty string
assert spec.len > 0
assert spec.contains('"openrpc"')
@@ -63,7 +63,7 @@ fn test_stateless_client() {
temp_dir := os.temp_dir()
path := os.join_path(temp_dir, 'zinit_test')
pathcmds := os.join_path(temp_dir, 'zinit_test_cmds')
// Create the directories
os.mkdir_all(path) or {
assert false, 'Failed to create test directory: ${err}'
@@ -73,7 +73,7 @@ fn test_stateless_client() {
assert false, 'Failed to create test commands directory: ${err}'
return
}
// Clean up after the test
defer {
os.rmdir_all(path) or {}
@@ -82,19 +82,19 @@ fn test_stateless_client() {
mut zinit_client := new_stateless(
socket_path: '/tmp/zinit.sock'
path: path
pathcmds: pathcmds
path: path
pathcmds: pathcmds
) or {
assert false, 'Failed to create stateless client: ${err}'
return
}
// Test the names method which uses the client
names := zinit_client.names() or {
assert false, 'Failed to get service names: ${err}'
return
}
assert typeof(names).name == '[]string'
}
@@ -162,4 +162,4 @@ fn test_service_lifecycle() {
println('Warning: Failed to delete service: ${err}')
}
}
*/
*/

View File

@@ -38,7 +38,7 @@ pub fn new_request(method string, params string) Request {
jsonrpc: jsonrpc_version
method: method
params: params
id: rand.int_in_range(1, 1000000) or {panic("Failed to generate unique ID")}
id: rand.int_in_range(1, 1000000) or { panic('Failed to generate unique ID') }
}
}
@@ -108,7 +108,7 @@ pub fn new_request_generic[T](method string, params T) RequestGeneric[T] {
jsonrpc: jsonrpc_version
method: method
params: params
id: rand.int_in_range(1, 1000000000) or { panic("Failed to generate unique ID") }
id: rand.int_in_range(1, 1000000000) or { panic('Failed to generate unique ID') }
}
}

View File

@@ -31,23 +31,23 @@ pub fn (mut t UnixSocketTransport) send(request string, params SendParams) !stri
socket.close() or {}
// console.print_debug('Socket closed')
}
// Set timeout if specified
if params.timeout > 0 {
socket.set_read_timeout(params.timeout * time.second)
socket.set_write_timeout(params.timeout * time.second)
// console.print_debug('Set socket timeout to ${params.timeout} seconds')
}
net.set_blocking(socket.sock.handle,false) !
net.set_blocking(socket.sock.handle, false)!
// Send the request
// console.print_debug('Sending request: $request')
socket.write_string(request + '\n')!
// println(18)
// Read the response in a single call with a larger buffer
mut res_total := []u8
mut res_total := []u8{}
for {
// console.print_debug('Reading response from socket...')
// Read up to 64000 bytes
@@ -60,16 +60,15 @@ pub fn (mut t UnixSocketTransport) send(request string, params SendParams) !stri
}
// Append the newly read data to the total response
res_total << res[..n]
if n < 8192{
if n < 8192 {
// console.print_debug('No more data to read, breaking loop after ${n} bytes')
break
}
}
}
// println(res_total.bytestr().trim_space())
// println(19)
// Convert response to string and trim whitespace
mut response := res_total.bytestr().trim_space()
// console.print_debug('Received ${response.len} bytes')
@@ -78,7 +77,7 @@ pub fn (mut t UnixSocketTransport) send(request string, params SendParams) !stri
if response.len == 0 {
return error('Empty response received from server')
}
// console.print_debug('Response: $response')
return response
}

View File

@@ -1,11 +1,14 @@
module openrpc
import json
import x.json2 { Any }
import freeflowuniverse.herolib.schemas.jsonschema { Reference, decode_schemaref }
pub fn decode(data string) !OpenRPC {
// mut object := json.decode[OpenRPC](data) or { return error('Failed to decode json\n=======\n${data}\n===========\n${err}') }
mut object := json.decode(OpenRPC,data) or { return error('Failed to decode json\n=======\n${data}\n===========\n${err}') }
mut object := json.decode(OpenRPC, data) or {
return error('Failed to decode json\n=======\n${data}\n===========\n${err}')
}
data_map := json2.raw_decode(data)!.as_map()
if 'components' in data_map {
object.components = decode_components(data_map) or {
@@ -17,7 +20,7 @@ pub fn decode(data string) !OpenRPC {
for i, method in methods_any.arr() {
method_map := method.as_map()
//TODO: I had to disable this because it was not working, need to check why !!!!!
// TODO: I had to disable this because it was not working, need to check why !!!!!
// if result_any := method_map['result'] {
// object.methods[i].result = decode_content_descriptor_ref(result_any.as_map()) or {

View File

@@ -12,16 +12,16 @@ import time
@[heap]
pub struct DocSite {
pub mut:
name string
url string
path_src pathlib.Path
name string
url string
path_src pathlib.Path
// path_build pathlib.Path
path_publish pathlib.Path
args DSiteGetArgs
errors []SiteError
config Configuration
siteconfig siteconfig.SiteConfig
factory &DocusaurusFactory @[skip; str: skip] // Reference to the parent
args DSiteGetArgs
errors []SiteError
config Configuration
siteconfig siteconfig.SiteConfig
factory &DocusaurusFactory @[skip; str: skip] // Reference to the parent
}
pub fn (mut s DocSite) build() ! {
@@ -82,7 +82,6 @@ pub fn (mut s DocSite) dev(args DevArgs) ! {
s.open()!
}
pub fn (mut s DocSite) dev_watch(args DevArgs) ! {
s.generate()!
@@ -199,4 +198,3 @@ pub fn (mut site DocSite) error(args ErrorArgs) {
site.errors << e
console.print_stderr(args.msg)
}

View File

@@ -1,7 +1,6 @@
module docusaurus
//to avoid overwriting wrong locations
// to avoid overwriting wrong locations
fn check_item(item string) ! {
item2 := item.trim_space().trim('/').trim_space().all_after_last('/')
@@ -17,4 +16,4 @@ fn (mut site DocSite) check() ! {
for item in site.config.main.build_dest_dev {
check_item(item)!
}
}
}

View File

@@ -54,9 +54,9 @@ pub mut:
pub struct NavbarItem {
pub mut:
label string
href string @[omitempty]
href string @[omitempty]
position string
to string @[omitempty]
to string @[omitempty]
}
pub struct Footer {
@@ -74,12 +74,11 @@ pub mut:
pub struct FooterItem {
pub mut:
label string
href string @[omitempty]
to string @[omitempty]
href string @[omitempty]
to string @[omitempty]
}
fn config_load(path string) !Configuration {
fn config_load(path string) !Configuration {
// Use siteconfig.new from factory.v. This function handles PlayBook creation, playing, and Redis interaction.
site_cfg_ref := siteconfig.new(path)!
site_cfg_from_heroscript := *site_cfg_ref // Dereference to get the actual SiteConfig struct
@@ -145,7 +144,6 @@ fn config_load(path string) !Configuration {
}
}
return config_fix(cfg)!
}
fn config_fix(config Configuration) !Configuration {

View File

@@ -9,12 +9,11 @@ import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.core.texttools.regext
import freeflowuniverse.herolib.data.doctree
pub fn (mut site DocSite) generate() ! {
console.print_header(' site generate: ${site.name} on ${site.factory.path_build.path}')
console.print_header(' site source on ${site.path_src.path}')
//lets make sure we remove the cfg dir so we rebuild
// lets make sure we remove the cfg dir so we rebuild
cfg_path := os.join_path(site.factory.path_build.path, 'cfg')
osal.rm(cfg_path)!
@@ -26,13 +25,13 @@ pub fn (mut site DocSite) generate() ! {
url: 'https://github.com/freeflowuniverse/docusaurus_template/src/branch/main/template/'
)!
//we need to copy the template each time for these 2 items, otherwise there can be leftovers from other run
for item in ["src","static"]{
mut template_src_path:=pathlib.get_dir(path:"${template_path}/${item}",create:true)!
// we need to copy the template each time for these 2 items, otherwise there can be leftovers from other run
for item in ['src', 'static'] {
mut template_src_path := pathlib.get_dir(path: '${template_path}/${item}', create: true)!
template_src_path.copy(dest: '${site.factory.path_build.path}/${item}', delete: true)!
//now copy the info which can be overruled from source in relation to the template
if os.exists("${site.path_src.path}/${item}"){
mut src_path:=pathlib.get_dir(path:"${site.path_src.path}/${item}",create:false)!
// now copy the info which can be overruled from source in relation to the template
if os.exists('${site.path_src.path}/${item}') {
mut src_path := pathlib.get_dir(path: '${site.path_src.path}/${item}', create: false)!
src_path.copy(dest: '${site.factory.path_build.path}/${item}', delete: false)!
}
}
@@ -45,17 +44,15 @@ pub fn (mut site DocSite) generate() ! {
mut footer_file := pathlib.get_file(path: '${cfg_path}/footer.json', create: true)!
footer_file.write(json.encode_pretty(site.config.footer))!
mut aa := site.path_src.dir_get("docs")!
mut aa := site.path_src.dir_get('docs')!
aa.copy(dest: '${site.factory.path_build.path}/docs', delete: true)!
site.process_imports()!
}
pub fn (mut site DocSite) process_imports() ! {
//this means we need to do doctree version
// this means we need to do doctree version
mut tree := doctree.new(name: 'site_${site.name}')!
mut gs := gittools.new()!
@@ -70,16 +67,17 @@ pub fn (mut site DocSite) process_imports() ! {
mypatho.copy(dest: '${site.factory.path_build.path}/docs/${item.dest}', delete: true)!
// println(item)
//replace: {'NAME': 'MyName', 'URGENCY': 'red'}
// replace: {'NAME': 'MyName', 'URGENCY': 'red'}
mut ri := regext.regex_instructions_new()
for key,val in item.replace {
ri.add_item("\{${key}\}",val)!
for key, val in item.replace {
ri.add_item('\{${key}\}', val)!
}
mypatho.copy(dest: '${site.factory.path_build.path}/docs/${item.dest}', delete: true)!
ri.replace_in_dir(path:"${site.factory.path_build.path}/docs/${item.dest}",extensions:["md"])!
}
}
ri.replace_in_dir(
path: '${site.factory.path_build.path}/docs/${item.dest}'
extensions: [
'md',
]
)!
}
}

View File

@@ -12,16 +12,16 @@ import freeflowuniverse.herolib.data.doctree
@[params]
pub struct DSiteGetArgs {
pub mut:
name string
nameshort string
//gittools will use these params to find the right path
name string
nameshort string
// gittools will use these params to find the right path
path string
git_url string
git_reset bool
git_root string
git_pull bool
//more params
path_publish string //default empty
// more params
path_publish string // default empty
production bool
watch_changes bool = true
update bool
@@ -35,36 +35,43 @@ pub fn (mut f DocusaurusFactory) get(args_ DSiteGetArgs) !&DocSite {
console.print_header(' Docusaurus: ${args_.name}')
mut args := args_
mut path := gittools.path(path: args.path, git_url: args.git_url, git_reset: args.git_reset, git_root: args.git_root, git_pull: args.git_pull,currentdir:true)!
mut path := gittools.path(
path: args.path
git_url: args.git_url
git_reset: args.git_reset
git_root: args.git_root
git_pull: args.git_pull
currentdir: true
)!
args.path = path.path
if !path.is_dir() {
return error('path is not a directory')
}
configpath:='${args.path}/cfg'
if ! os.exists(configpath) {
configpath := '${args.path}/cfg'
if !os.exists(configpath) {
return error("can't find config file for docusaurus in ${configpath}")
}
osal.rm("${args.path}/cfg/main.json")!
osal.rm("${args.path}/cfg/footer.json")!
osal.rm("${args.path}/cfg/navbar.json")!
osal.rm("${args.path}/build.sh")!
osal.rm("${args.path}/develop.sh")!
osal.rm("${args.path}/sync.sh")!
osal.rm("${args.path}/.DS_Store")!
osal.rm('${args.path}/cfg/main.json')!
osal.rm('${args.path}/cfg/footer.json')!
osal.rm('${args.path}/cfg/navbar.json')!
osal.rm('${args.path}/build.sh')!
osal.rm('${args.path}/develop.sh')!
osal.rm('${args.path}/sync.sh')!
osal.rm('${args.path}/.DS_Store')!
if !os.exists('${args.path}/docs') {
if args.init {
// Create docs directory if it doesn't exist in template or site
os.mkdir_all('${args.path}/docs')!
panic("implement")
panic('implement')
} else {
return error("Can't find docs dir in chosen docusaurus location: ${args.path}")
}
}
mut myconfig:=config_load(configpath)!
mut myconfig := config_load(configpath)!
if myconfig.main.name.len == 0 {
myconfig.main.name = myconfig.main.base_url.trim_space().trim('/').trim_space()
@@ -79,26 +86,26 @@ pub fn (mut f DocusaurusFactory) get(args_ DSiteGetArgs) !&DocSite {
}
args.nameshort = texttools.name_fix(args.nameshort)
if args.path_publish == ""{
args.path_publish = "${f.path_publish}/${args.name}"
if args.path_publish == '' {
args.path_publish = '${f.path_publish}/${args.name}'
}
doctree.play(
heroscript_path: configpath
reset: args.update
reset: args.update
)!
mut mysiteconfig:=*siteconfig.new(configpath)!
mut mysiteconfig := *siteconfig.new(configpath)!
mut ds := DocSite{
name: args.name
name: args.name
// url: args.url
path_src: pathlib.get_dir(path: args.path, create: false)!
path_publish: pathlib.get_dir(path:args.path_publish)!
args: args
config: myconfig
siteconfig: mysiteconfig //comes from the heroconfig
factory: &f
path_src: pathlib.get_dir(path: args.path, create: false)!
path_publish: pathlib.get_dir(path: args.path_publish)!
args: args
config: myconfig
siteconfig: mysiteconfig // comes from the heroconfig
factory: &f
}
ds.check()!

View File

@@ -6,20 +6,20 @@ import freeflowuniverse.herolib.core.pathlib
@[heap]
pub struct DocusaurusFactory {
pub mut:
sites []&DocSite @[skip; str: skip]
path_build pathlib.Path
sites []&DocSite @[skip; str: skip]
path_build pathlib.Path
path_publish pathlib.Path
args DocusaurusArgs
config Configuration // Stores configuration from HeroScript if provided
args DocusaurusArgs
config Configuration // Stores configuration from HeroScript if provided
}
@[params]
pub struct DocusaurusArgs {
pub mut:
path_publish string
path_build string
production bool
update bool
path_publish string
path_build string
production bool
update bool
heroscript string
heroscript_path string
}
@@ -29,14 +29,14 @@ pub fn new(args_ DocusaurusArgs) !&DocusaurusFactory {
if args.path_build == '' {
args.path_build = '${os.home_dir()}/hero/var/docusaurus/build'
}
if args.path_publish == ""{
args.path_publish = "${os.home_dir()}/hero/var/docusaurus/publish"
if args.path_publish == '' {
args.path_publish = '${os.home_dir()}/hero/var/docusaurus/publish'
}
// Create the factory instance
mut f := &DocusaurusFactory{
args: args_
path_build: pathlib.get_dir(path: args.path_build, create: true)!
args: args_
path_build: pathlib.get_dir(path: args.path_build, create: true)!
path_publish: pathlib.get_dir(path: args_.path_publish, create: true)!
}

View File

@@ -6,7 +6,6 @@ import freeflowuniverse.herolib.installers.web.bun
import freeflowuniverse.herolib.core.pathlib
import os
@[params]
struct TemplateInstallArgs {
mut:
@@ -15,10 +14,10 @@ mut:
delete bool
}
//copy template in build location
// copy template in build location
fn (mut self DocusaurusFactory) template_install(args_ TemplateInstallArgs) ! {
mut gs := gittools.new()!
mut args:=args_
mut args := args_
template_path := gs.get_path(
pull: args.template_update
@@ -26,11 +25,11 @@ fn (mut self DocusaurusFactory) template_install(args_ TemplateInstallArgs) ! {
url: 'https://github.com/freeflowuniverse/docusaurus_template/src/branch/main/template'
)!
mut template_path0:=pathlib.get_dir(path:template_path,create:false)!
mut template_path0 := pathlib.get_dir(path: template_path, create: false)!
template_path0.copy(dest: '${self.path_build.path}', delete: args.delete)!
if ! os.exists("${self.path_build.path}/node_modules"){
if !os.exists('${self.path_build.path}/node_modules') {
args.install = true
}
@@ -39,7 +38,7 @@ fn (mut self DocusaurusFactory) template_install(args_ TemplateInstallArgs) ! {
mut installer := bun.get()!
installer.install()!
osal.exec(
//always stay in the context of the build directory
// always stay in the context of the build directory
cmd: '
${osal.profile_path_source_and()!}
export PATH=${self.path_build.path}/node_modules/.bin::${os.home_dir()}/.bun/bin/:\$PATH

View File

@@ -3,77 +3,67 @@ module docusaurus
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
@[params]
pub struct PlayArgs {
pub mut:
heroscript string
heroscript string
heroscript_path string
plbook ?PlayBook
reset bool
plbook ?PlayBook
reset bool
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
mut plbook := args.plbook or { playbook.new(text: args.heroscript,path:args.heroscript_path)! }
mut ds := docusaurus.new()!
mut plbook := args.plbook or {
playbook.new(text: args.heroscript, path: args.heroscript_path)!
}
if plbook.exists_once(filter: 'docusaurus.define'){
mut ds := new()!
mut action:=plbook.action_get(actor:'docusaurus',name:'define')!
if plbook.exists_once(filter: 'docusaurus.define') {
mut action := plbook.action_get(actor: 'docusaurus', name: 'define')!
mut p := action.params
path_publish := p.get_default('path_publish',"")!
path_build := p.get_default('path_build',"")!
mut p := action.params
path_publish := p.get_default('path_publish', '')!
path_build := p.get_default('path_build', '')!
production := p.get_default_false('production')
update := p.get_default_false('update')
//don't do heroscript here because this could already be done before
ds = docusaurus.new(
// don't do heroscript here because this could already be done before
ds = new(
path_publish: path_publish
path_build: path_build
production: production
update: update
path_build: path_build
production: production
update: update
)!
}
actions := plbook.find(filter: 'docusaurus.add')!
for action in actions {
mut p := action.params
name := p.get_default('name',"main")!
path := p.get_default('path',"")!
git_url := p.get_default('git_url',"")!
git_reset:= p.get_default_false('git_reset')
git_pull:= p.get_default_false('git_pull')
mut p := action.params
name := p.get_default('name', 'main')!
path := p.get_default('path', '')!
git_url := p.get_default('git_url', '')!
git_reset := p.get_default_false('git_reset')
git_pull := p.get_default_false('git_pull')
mut site:=ds.get(
name: name
nameshort: p.get_default('nameshort', name)!
path: path
git_url: git_url
git_reset: git_reset
git_root: p.get_default('git_root', "")!
git_pull: git_pull
path_publish: p.get_default('path_publish', "")!
production: p.get_default_false('production')
mut site := ds.get(
name: name
nameshort: p.get_default('nameshort', name)!
path: path
git_url: git_url
git_reset: git_reset
git_root: p.get_default('git_root', '')!
git_pull: git_pull
path_publish: p.get_default('path_publish', '')!
production: p.get_default_false('production')
watch_changes: p.get_default_true('watch_changes')
update: p.get_default_false('update')
open: p.get_default_false('open')
init: p.get_default_false('init')
update: p.get_default_false('update')
open: p.get_default_false('open')
init: p.get_default_false('init')
)!
if plbook.exists_once(filter: 'docusaurus.dev'){
if plbook.exists_once(filter: 'docusaurus.dev') {
site.dev()!
}
}
}
}
}

View File

@@ -3,17 +3,17 @@ module siteconfig
// Combined config structure
pub struct SiteConfig {
pub mut:
name string
title string = 'My Documentation Site' // General site title
description string // General site description, can be used for meta if meta_description not set
tagline string
favicon string = 'img/favicon.png'
image string = 'img/tf_graph.png' // General site image, can be used for meta if meta_image not set
copyright string = 'someone'
footer Footer
menu Menu
imports []ImportItem
pages []Page
name string
title string = 'My Documentation Site' // General site title
description string // General site description, can be used for meta if meta_description not set
tagline string
favicon string = 'img/favicon.png'
image string = 'img/tf_graph.png' // General site image, can be used for meta if meta_image not set
copyright string = 'someone'
footer Footer
menu Menu
imports []ImportItem
pages []Page
// New fields for Docusaurus compatibility
url string // The main URL of the site (from !!site.config url:)
@@ -27,8 +27,6 @@ pub mut:
build_dest_dev []BuildDest // Development build destinations (from !!site.build_dest_dev)
}
pub struct Page {
pub mut:
name string
@@ -88,7 +86,7 @@ pub mut:
pub struct ImportItem {
pub mut:
name string //will normally be empty
name string // will normally be empty
url string // http git url can be to specific path
path string
dest string // location in the docs folder of the place where we will build docusaurus

View File

@@ -114,7 +114,7 @@ fn play_import(mut plbook PlayBook, mut config SiteConfig) ! {
}
}
mut import_ := ImportItem{
name: p.get_default('name','')!
name: p.get_default('name', '')!
url: p.get('url')!
path: p.get_default('path', '')!
dest: p.get_default('dest', '')!

18
lib/web/sitegen/README.md Normal file
View File

@@ -0,0 +1,18 @@
# Site Generation Library
This library provides functionalities for generating static websites. It includes tools for defining site structures, managing content, and rendering pages.
## Purpose
The `sitegen` library aims to simplify the process of creating and maintaining static websites by offering a programmatic approach to site generation. It allows developers to define their site's layout, content, and navigation using code, which can then be rendered into a complete static website.
## Key Features
- **Site Structure Definition**: Define the overall structure of your website, including pages, sections, and navigation.
- **Content Management**: Integrate with various content sources to populate your website.
- **Template Rendering**: Render content into HTML pages using flexible templating mechanisms.
- **Asset Management**: Handle static assets like CSS, JavaScript, and images.
## Usage
(Further details on usage will be added as the library evolves and specific functionalities are implemented.)

44
lib/web/sitegen/factory.v Normal file
View File

@@ -0,0 +1,44 @@
module sitegen
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.data.doctree
import os
pub struct SiteFactory {
pub mut:
sites map[string]&Site
path pathlib.Path
tree &doctree.Tree
}
@[params]
pub struct SiteNewArgs {
pub mut:
path string
}
// new creates a new siteconfig and stores it in redis, or gets an existing one
pub fn new(mut tree &doctree.Tree, args SiteNewArgs) !SiteFactory {
mut path := args.path
if path == '' {
path = '${os.home_dir()}/hero/var/sitegen'
}
mut factory := SiteFactory{
path: pathlib.get_dir(path: path, create: true)!
tree: tree
}
return factory
}
pub fn (mut f SiteFactory) site_get(name string) !&Site {
mut s := f.sites[name] or {
mut mysite:=&Site{
path: f.path.dir_get_new(name)!
name: name
tree: f.tree
}
&mysite
}
return s
}

62
lib/web/sitegen/play.v Normal file
View File

@@ -0,0 +1,62 @@
module sitegen
import freeflowuniverse.herolib.core.playbook { PlayBook }
import freeflowuniverse.herolib.ui.console
import freeflowuniverse.herolib.data.doctree
@[params]
pub struct PlayArgs {
pub mut:
heroscript string
heroscript_path string
plbook ?PlayBook
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
mut plbook := args.plbook or { playbook.new(text: args.heroscript,path:args.heroscript_path)! }
mut doctreename:="default"
if plbook.exists(filter: 'site.doctree'){
if plbook.exists_once(filter: 'site.doctree'){
mut action:=plbook.action_get(actor:'site',name:'doctree')!
mut p := action.params
doctreename = p.get('name') or {return error("need to specify name in site.doctree")}
}else{
return error("can't have more than one site.doctree")
}
}
mut tree := doctree.new(name: doctreename) or {
return error("can't find doctree with name ${doctreename}")
}
// !!site.page name:"atest" path:"crazy/sub" position:1
// src:"marketplace_specs:tft_tfp_marketplace"
// title:"Just a Page"
// description:"A description not filled in"
// draft:1 hide_title:1
mut factory:=new(mut tree)!
page_actions := plbook.find(filter: 'site.page')!
mut mypage:=Page{src:"",path:""}
for action in page_actions {
mut p := action.params
sitename := p.get('sitename') or { return error("need to specify name in site.page") }
mypage.path = p.get_default('path', "")!
pagename := mypage.path.split('/').last()
mypage.position = p.get_int_default('position', 0)!
mypage.src = p.get('src') or { return error("need to specify src in site.page") }
mypage.title = p.get_default('title', pagename)!
mypage.description = p.get_default('description', '')!
mypage.draft = p.get_default_false('draft')
mypage.hide_title = p.get_default_false('hide_title')
mut site := factory.site_get(sitename)!
site.page_add(mypage)!
}
}

74
lib/web/sitegen/site.v Normal file
View File

@@ -0,0 +1,74 @@
module sitegen
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.data.doctree
pub struct Site {
pub mut:
name string
path pathlib.Path
tree &doctree.Tree
}
@[params]
pub struct Page {
pub mut:
title string
description string
draft bool
position int
hide_title bool
src string @[required]
path string @[required]
}
pub fn (mut site Site) page_add(args_ Page) ! {
mut args:= args_
mut content:=["---"]
if ! args.path.ends_with(".md") {
args.path += ".md"
}
pagename := args.path.split('/').last()
if args.title.len==0 {
args.title = pagename
}
content<< "title: '${args.title}'"
if args.description.len>0 {
content<< "description: '${args.description}'"
}
if args.hide_title {
content<< "hide_title: ${args.hide_title}"
}
if args.draft{
content<< "draft: ${args.draft}"
}
if args.position>0{
content<< "sidebar_position: ${args.position}"
}
content<< "---"
mut c:=content.join("\n")
mut mypage:=site.tree.page_get(args.src) or {
return error("Couldn't find page '${args.src}' in site tree:'${site.tree.name}', needs to be in form \$collection:\$name")
}
c+="\n${mypage.get_markdown()!}\n"
mut pagepath:= "${site.path.path}/${args.path}"
mut pagefile:= pathlib.get_file(path:pagepath,create:true)!
pagefile.write(c)!
}

View File

@@ -0,0 +1,70 @@
module sitegen
import os
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.data.doctree
fn test_page_add() ! {
// Setup a dummy doctree.Tree and pathlib.Path
mut test_dir := pathlib.get_dir(path: os.join_path(os.temp_dir(), 'sitegen_test_output'))!
test_dir.delete()! // Clean up previous test runs
test_dir.create()!
mut tree := doctree.new()!
// Add a dummy page to the tree for page_add to find
mut dummy_page_content := "# My Dummy Page\n\nThis is some content for the dummy page."
mut dummy_page_path := "collection1:dummy_page"
tree.page_add(dummy_page_path, dummy_page_content)!
mut site := Site{
name: "TestSite"
path: test_dir
tree: &tree
}
// Test Case 1: Basic page addition
mut page1 := Page{
title: "Test Page 1"
description: "A simple test page."
src: dummy_page_path
path: "pages/test_page_1.md"
}
site.page_add(page1)!
mut expected_content_page1 := "---\ntitle: 'Test Page 1'\ndescription: 'A simple test page.'\n---\n# My Dummy Page\n\nThis is some content for the dummy page.\n"
mut output_file_page1 := pathlib.get_file(path: os.join_path(test_dir.path, "pages/test_page_1.md"))!
assert output_file_page1.exists()
assert output_file_page1.read()! == expected_content_page1
// Test Case 2: Page with draft, no description, hide_title, and position
mut page2 := Page{
title: "Test Page 2"
draft: true
position: 5
hide_title: true
src: dummy_page_path
path: "articles/test_page_2.md"
}
site.page_add(page2)!
mut expected_content_page2 := "---\ntitle: 'Test Page 2'\nhide_title: true\ndraft: true\nsidebar_position: 5\n---\n# My Dummy Page\n\nThis is some content for the dummy page.\n"
mut output_file_page2 := pathlib.get_file(path: os.join_path(test_dir.path, "articles/test_page_2.md"))!
assert output_file_page2.exists()
assert output_file_page2.read()! == expected_content_page2
// Test Case 3: Page with no title (should use filename)
mut page3 := Page{
src: dummy_page_path
path: "blog/my_blog_post.md"
}
site.page_add(page3)!
mut expected_content_page3 := "---\ntitle: 'my_blog_post.md'\n---\n# My Dummy Page\n\nThis is some content for the dummy page.\n"
mut output_file_page3 := pathlib.get_file(path: os.join_path(test_dir.path, "blog/my_blog_post.md"))!
assert output_file_page3.exists()
assert output_file_page3.read()! == expected_content_page3
// Clean up
test_dir.delete()!
}