This commit is contained in:
2025-10-13 05:36:06 +04:00
parent 44c12281d3
commit 10d1dc943c
41 changed files with 0 additions and 0 deletions

View File

@@ -1,107 +0,0 @@
# DedupeStore
DedupeStore is a content-addressable key-value store with built-in deduplication. It uses blake2b-160 content hashing to identify and deduplicate data, making it ideal for storing files or data blocks where the same content might appear multiple times.
## Features
- Content-based deduplication using blake2b-160 hashing
- Efficient storage using RadixTree for hash lookups
- Persistent storage using OurDB
- Maximum value size limit of 1MB
- Fast retrieval of data using content hash
- Automatic deduplication of identical content
## Usage
```v
import incubaid.herolib.data.dedupestor
// Create a new dedupestore
mut ds := dedupestor.new(
path: 'path/to/store'
reset: false // Set to true to reset existing data
)!
// Store some data
data := 'Hello, World!'.bytes()
hash := ds.store(data)!
println('Stored data with hash: ${hash}')
// Retrieve data using hash
retrieved := ds.get(hash)!
println('Retrieved data: ${retrieved.bytestr()}')
// Check if data exists
exists := ds.exists(hash)
println('Data exists: ${exists}')
// Attempting to store the same data again returns the same hash
same_hash := ds.store(data)!
assert hash == same_hash // True, data was deduplicated
```
## Implementation Details
DedupeStore uses two main components for storage:
1. **RadixTree**: Stores mappings from content hashes to data location IDs
2. **OurDB**: Stores the actual data blocks
When storing data:
1. The data is hashed using blake2b-160
2. If the hash exists in the RadixTree, the existing data location is returned
3. If the hash is new:
- Data is stored in OurDB, getting a new location ID
- Hash -> ID mapping is stored in RadixTree
- The hash is returned
When retrieving data:
1. The RadixTree is queried with the hash to get the data location ID
2. The data is retrieved from OurDB using the ID
## Size Limits
- Maximum value size: 1MB
- Attempting to store larger values will result in an error
## the reference field
In the dedupestor system, the Reference struct is defined with two fields:
```v
pub struct Reference {
pub:
owner u16
id u32
}
```
The purpose of the id field in this context is to serve as an identifier within a specific owner's domain. Here's what each field represents:
owner (u16): Identifies which entity or system component "owns" or is referencing the data. This could represent different applications, users, or subsystems that are using the dedupestor.
id (u32): A unique identifier within that owner's domain. This allows each owner to have their own independent numbering system for referencing stored data.
Together, the {owner: 1, id: 100} combination creates a unique reference that:
Tracks which entities are referencing a particular piece of data
Allows the system to know when data can be safely deleted (when no references remain)
Provides a way for different components to maintain their own ID systems without conflicts
The dedupestor uses these references to implement a reference counting mechanism. When data is stored, a reference is attached to it. When all references to a piece of data are removed (via the delete method), the actual data can be safely deleted from storage.
This design allows for efficient deduplication - if the same data is stored multiple times with different references, it's only physically stored once, but the system keeps track of all the references to it.
## Testing
The module includes comprehensive tests covering:
- Basic store/retrieve operations
- Deduplication functionality
- Size limit enforcement
- Edge cases
Run tests with:
```bash
v test lib/data/dedupestor/

View File

@@ -1,130 +0,0 @@
module dedupe_ourdb
import incubaid.herolib.data.radixtree
import incubaid.herolib.data.ourdb
import incubaid.herolib.data.dedupestor
// DedupeStore provides a key-value store with deduplication based on content hashing
pub struct DedupeStore {
mut:
radix &radixtree.RadixTree // For storing hash -> id mappings
data &ourdb.OurDB // For storing the actual data
}
@[params]
pub struct NewArgs {
pub mut:
path string // Base path for the store
reset bool // Whether to reset existing data
}
// new creates a new deduplication store
pub fn new(args NewArgs) !&DedupeStore {
// Create the radixtree for hash -> id mapping
mut rt := radixtree.new(
path: '${args.path}/radixtree'
reset: args.reset
)!
// Create the ourdb for actual data storage
mut db := ourdb.new(
path: '${args.path}/data'
record_size_max: dedupestor.max_value_size
incremental_mode: true // We want auto-incrementing IDs
reset: args.reset
)!
return &DedupeStore{
radix: &rt
data: &db
}
}
// store stores data with its reference and returns its id
// If the data already exists (same hash), returns the existing id without storing again
// appends reference to the radix tree entry of the hash to track references
pub fn (mut ds DedupeStore) store(data []u8, ref dedupestor.Reference) !u32 {
// Check size limit
if data.len > dedupestor.max_value_size {
return error('value size exceeds maximum allowed size of 1MB')
}
// Calculate blake160 hash of the value
hash := dedupestor.hash_data(data)
// Check if this hash already exists
if metadata_bytes := ds.radix.get(hash) {
// Value already exists, add new ref & return the id
mut metadata_obj := dedupestor.bytes_to_metadata(metadata_bytes)
metadata_obj = metadata_obj.add_reference(ref)!
ds.radix.update(hash, metadata_obj.to_bytes())!
return metadata_obj.id
}
// Store the actual data in ourdb
id := ds.data.set(data: data)!
metadata_obj := dedupestor.Metadata{
id: id
references: [ref]
}
// Store the mapping of hash -> id in radixtree
ds.radix.set(hash, metadata_obj.to_bytes())!
return metadata_obj.id
}
// get retrieves a value by its hash
pub fn (mut ds DedupeStore) get(id u32) ![]u8 {
return ds.data.get(id)!
}
// get retrieves a value by its hash
pub fn (mut ds DedupeStore) get_from_hash(hash string) ![]u8 {
// Get the ID from radixtree
metadata_bytes := ds.radix.get(hash)!
// Convert bytes back to metadata
metadata_obj := dedupestor.bytes_to_metadata(metadata_bytes)
// Get the actual data from ourdb
return ds.data.get(metadata_obj.id)!
}
// exists checks if a value with the given hash exists
pub fn (mut ds DedupeStore) id_exists(id u32) bool {
if _ := ds.data.get(id) {
return true
} else {
return false
}
}
// exists checks if a value with the given hash exists
pub fn (mut ds DedupeStore) hash_exists(hash string) bool {
return if _ := ds.radix.get(hash) { true } else { false }
}
// delete removes a reference from the hash entry
// If it's the last reference, removes the hash entry and its data
pub fn (mut ds DedupeStore) delete(id u32, ref dedupestor.Reference) ! {
// Calculate blake160 hash of the value
data := ds.data.get(id)!
hash := dedupestor.hash_data(data)
// Get the current entry from radixtree
metadata_bytes := ds.radix.get(hash)!
mut metadata_obj := dedupestor.bytes_to_metadata(metadata_bytes)
metadata_obj = metadata_obj.remove_reference(ref)!
if metadata_obj.references.len == 0 {
// Delete from radixtree
ds.radix.delete(hash)!
// Delete from data db
ds.data.delete(id)!
return
}
// Update hash metadata
ds.radix.update(hash, metadata_obj.to_bytes())!
}

View File

@@ -1,188 +0,0 @@
module dedupe_ourdb
import os
import incubaid.herolib.data.dedupestor
fn testsuite_begin() ! {
// Ensure test directories exist and are clean
test_dirs := [
'/tmp/dedupestor_test',
'/tmp/dedupestor_test_size',
'/tmp/dedupestor_test_exists',
'/tmp/dedupestor_test_multiple',
'/tmp/dedupestor_test_refs',
]
for dir in test_dirs {
if os.exists(dir) {
os.rmdir_all(dir) or {}
}
os.mkdir_all(dir) or {}
}
}
fn test_basic_operations() ! {
mut ds := new(
path: '/tmp/dedupestor_test'
reset: true
)!
// Test storing and retrieving data
value1 := 'test data 1'.bytes()
ref1 := dedupestor.Reference{
owner: 1
id: 1
}
id1 := ds.store(value1, ref1)!
retrieved1 := ds.get(id1)!
assert retrieved1 == value1
// Test deduplication with different reference
ref2 := dedupestor.Reference{
owner: 1
id: 2
}
id2 := ds.store(value1, ref2)!
assert id1 == id2 // Should return same id for same data
// Test different data gets different id
value2 := 'test data 2'.bytes()
ref3 := dedupestor.Reference{
owner: 1
id: 3
}
id3 := ds.store(value2, ref3)!
assert id1 != id3 // Should be different id for different data
retrieved2 := ds.get(id3)!
assert retrieved2 == value2
}
fn test_size_limit() ! {
mut ds := new(
path: '/tmp/dedupestor_test_size'
reset: true
)!
// Test data under size limit (1KB)
small_data := []u8{len: 1024, init: u8(index)}
ref := dedupestor.Reference{
owner: 1
id: 1
}
small_id := ds.store(small_data, ref)!
retrieved := ds.get(small_id)!
assert retrieved == small_data
// Test data over size limit (2MB)
large_data := []u8{len: 2 * 1024 * 1024, init: u8(index)}
if _ := ds.store(large_data, ref) {
assert false, 'Expected error for data exceeding size limit'
}
}
fn test_exists() ! {
mut ds := new(
path: '/tmp/dedupestor_test_exists'
reset: true
)!
value := 'test data'.bytes()
ref := dedupestor.Reference{
owner: 1
id: 1
}
id := ds.store(value, ref)!
assert ds.id_exists(id) == true
assert ds.id_exists(u32(99)) == false
}
fn test_multiple_operations() ! {
mut ds := new(
path: '/tmp/dedupestor_test_multiple'
reset: true
)!
// Store multiple values
mut values := [][]u8{}
mut ids := []u32{}
for i in 0 .. 5 {
value := 'test data ${i}'.bytes()
values << value
ref := dedupestor.Reference{
owner: 1
id: u32(i)
}
id := ds.store(value, ref)!
ids << id
}
// Verify all values can be retrieved
for i, id in ids {
retrieved := ds.get(id)!
assert retrieved == values[i]
}
// Test deduplication by storing same values again
for i, value in values {
ref := dedupestor.Reference{
owner: 2
id: u32(i)
}
id := ds.store(value, ref)!
assert id == ids[i] // Should get same id for same data
}
}
fn test_references() ! {
mut ds := new(
path: '/tmp/dedupestor_test_refs'
reset: true
)!
// Store same data with different references
value := 'test data'.bytes()
ref1 := dedupestor.Reference{
owner: 1
id: 1
}
ref2 := dedupestor.Reference{
owner: 1
id: 2
}
ref3 := dedupestor.Reference{
owner: 2
id: 1
}
// Store with first reference
id := ds.store(value, ref1)!
// Store same data with second reference
id2 := ds.store(value, ref2)!
assert id == id2 // Same id for same data
// Store same data with third reference
id3 := ds.store(value, ref3)!
assert id == id3 // Same id for same data
// Delete first reference - data should still exist
ds.delete(id, ref1)!
assert ds.id_exists(id) == true
// Delete second reference - data should still exist
ds.delete(id, ref2)!
assert ds.id_exists(id) == true
// Delete last reference - data should be gone
ds.delete(id, ref3)!
assert ds.id_exists(id) == false
// Verify data is actually deleted by trying to get it
if _ := ds.get(id) {
assert false, 'Expected error getting deleted data'
}
}

View File

@@ -1,10 +0,0 @@
module dedupestor
import crypto.blake2b
pub const max_value_size = 1024 * 1024 // 1MB
// hash_data calculates the blake160 hash of the given data and returns it as a hex string.
pub fn hash_data(data []u8) string {
return blake2b.sum160(data).hex()
}

View File

@@ -1,109 +0,0 @@
module dedupestor
// Metadata represents a stored value with its ID and references
pub struct Metadata {
pub:
id u32
pub mut:
references []Reference
}
// Reference represents a reference to stored data
pub struct Reference {
pub:
owner u16
id u32
}
// to_bytes converts Metadata to bytes for storage
pub fn (m Metadata) to_bytes() []u8 {
mut bytes := u32_to_bytes(m.id)
for ref in m.references {
bytes << ref.to_bytes()
}
return bytes
}
// bytes_to_metadata converts bytes back to Metadata
pub fn bytes_to_metadata(b []u8) Metadata {
if b.len < 4 {
return Metadata{
id: 0
references: []Reference{}
}
}
id := bytes_to_u32(b[0..4])
mut refs := []Reference{}
// Parse references (each reference is 6 bytes)
mut i := 4
for i < b.len {
if i + 6 <= b.len {
refs << bytes_to_reference(b[i..i + 6])
}
i += 6
}
return Metadata{
id: id
references: refs
}
}
// add_reference adds a new reference if it doesn't already exist
pub fn (mut m Metadata) add_reference(ref Reference) !Metadata {
// Check if reference already exists
for existing in m.references {
if existing.owner == ref.owner && existing.id == ref.id {
return m
}
}
m.references << ref
return m
}
// remove_reference removes a reference if it exists
pub fn (mut m Metadata) remove_reference(ref Reference) !Metadata {
mut new_refs := []Reference{}
for existing in m.references {
if existing.owner != ref.owner || existing.id != ref.id {
new_refs << existing
}
}
m.references = new_refs
return m
}
// to_bytes converts Reference to bytes
pub fn (r Reference) to_bytes() []u8 {
mut bytes := []u8{len: 6}
bytes[0] = u8(r.owner)
bytes[1] = u8(r.owner >> 8)
bytes[2] = u8(r.id)
bytes[3] = u8(r.id >> 8)
bytes[4] = u8(r.id >> 16)
bytes[5] = u8(r.id >> 24)
return bytes
}
// bytes_to_reference converts bytes to Reference
pub fn bytes_to_reference(b []u8) Reference {
owner := u16(b[0]) | (u16(b[1]) << 8)
id := u32(b[2]) | (u32(b[3]) << 8) | (u32(b[4]) << 16) | (u32(b[5]) << 24)
return Reference{
owner: owner
id: id
}
}
// Helper function to convert u32 to []u8
fn u32_to_bytes(n u32) []u8 {
return [u8(n), u8(n >> 8), u8(n >> 16), u8(n >> 24)]
}
// Helper function to convert []u8 to u32
fn bytes_to_u32(b []u8) u32 {
return u32(b[0]) | (u32(b[1]) << 8) | (u32(b[2]) << 16) | (u32(b[3]) << 24)
}

View File

@@ -1,121 +0,0 @@
module dedupestor
fn test_reference_bytes_conversion() {
ref := Reference{
owner: 12345
id: 67890
}
bytes := ref.to_bytes()
recovered := bytes_to_reference(bytes)
assert ref.owner == recovered.owner
assert ref.id == recovered.id
}
fn test_metadata_bytes_conversion() {
mut metadata := Metadata{
id: 42
references: []Reference{}
}
ref1 := Reference{
owner: 1
id: 100
}
ref2 := Reference{
owner: 2
id: 200
}
metadata = metadata.add_reference(ref1)!
metadata = metadata.add_reference(ref2)!
bytes := metadata.to_bytes()
recovered := bytes_to_metadata(bytes)
assert metadata.id == recovered.id
assert metadata.references.len == recovered.references.len
assert metadata.references[0].owner == recovered.references[0].owner
assert metadata.references[0].id == recovered.references[0].id
assert metadata.references[1].owner == recovered.references[1].owner
assert metadata.references[1].id == recovered.references[1].id
}
fn test_add_reference() {
mut metadata := Metadata{
id: 1
references: []Reference{}
}
ref1 := Reference{
owner: 1
id: 100
}
ref2 := Reference{
owner: 2
id: 200
}
// Add first reference
metadata = metadata.add_reference(ref1)!
assert metadata.references.len == 1
assert metadata.references[0].owner == ref1.owner
assert metadata.references[0].id == ref1.id
// Add second reference
metadata = metadata.add_reference(ref2)!
assert metadata.references.len == 2
assert metadata.references[1].owner == ref2.owner
assert metadata.references[1].id == ref2.id
// Try adding duplicate reference
metadata = metadata.add_reference(ref1)!
assert metadata.references.len == 2 // Length shouldn't change
}
fn test_remove_reference() {
mut metadata := Metadata{
id: 1
references: []Reference{}
}
ref1 := Reference{
owner: 1
id: 100
}
ref2 := Reference{
owner: 2
id: 200
}
metadata = metadata.add_reference(ref1)!
metadata = metadata.add_reference(ref2)!
// Remove first reference
metadata = metadata.remove_reference(ref1)!
assert metadata.references.len == 1
assert metadata.references[0].owner == ref2.owner
assert metadata.references[0].id == ref2.id
// Remove non-existent reference
metadata = metadata.remove_reference(Reference{ owner: 999, id: 999 })!
assert metadata.references.len == 1 // Length shouldn't change
// Remove last reference
metadata = metadata.remove_reference(ref2)!
assert metadata.references.len == 0
}
fn test_empty_metadata_bytes() {
empty := bytes_to_metadata([]u8{})
assert empty.id == 0
assert empty.references.len == 0
}
fn test_u32_bytes_conversion() {
n := u32(0x12345678)
bytes := u32_to_bytes(n)
recovered := bytes_to_u32(bytes)
assert n == recovered
}

View File

@@ -1,349 +0,0 @@
module encoderherocomplex
// import time
// // i8 uses `Any` as a 16-bit integer.
// pub fn (f Any) i8() i8 {
// match f {
// i8 {
// return f
// }
// i16, i32, int, i64, u8, u16, u32, u64, f32, f64, bool {
// return i8(f)
// }
// string {
// return f.i8()
// }
// else {
// return 0
// }
// }
// }
// // i16 uses `Any` as a 16-bit integer.
// pub fn (f Any) i16() i16 {
// match f {
// i16 {
// return f
// }
// i8, i32, int, i64, u8, u16, u32, u64, f32, f64, bool {
// return i16(f)
// }
// string {
// return f.i16()
// }
// else {
// return 0
// }
// }
// }
// // int uses `Any` as an integer.
// pub fn (f Any) int() int {
// match f {
// int {
// return f
// }
// i8, i16, i32, i64, u8, u16, u32, u64, f32, f64, bool {
// return int(f)
// }
// string {
// return f.int()
// }
// else {
// return 0
// }
// }
// }
// // i32 uses `Any` as a 32-bit integer.
// pub fn (f Any) i32() i32 {
// match f {
// i32 {
// return f
// }
// i8, i16, int, i64, u8, u16, u32, u64, f32, f64, bool {
// return i32(f)
// }
// string {
// return f.i32()
// }
// else {
// return 0
// }
// }
// }
// // i64 uses `Any` as a 64-bit integer.
// pub fn (f Any) i64() i64 {
// match f {
// i64 {
// return f
// }
// i8, i16, i32, int, u8, u16, u32, u64, f32, f64, bool {
// return i64(f)
// }
// string {
// return f.i64()
// }
// else {
// return 0
// }
// }
// }
// // u64 uses `Any` as a 64-bit unsigned integer.
// pub fn (f Any) u64() u64 {
// match f {
// u64 {
// return f
// }
// u8, u16, u32, i8, i16, i32, int, i64, f32, f64, bool {
// return u64(f)
// }
// string {
// return f.u64()
// }
// else {
// return 0
// }
// }
// }
// // f32 uses `Any` as a 32-bit float.
// pub fn (f Any) f32() f32 {
// match f {
// f32 {
// return f
// }
// bool, i8, i16, i32, int, i64, u8, u16, u32, u64, f64 {
// return f32(f)
// }
// string {
// return f.f32()
// }
// else {
// return 0.0
// }
// }
// }
// // f64 uses `Any` as a 64-bit float.
// pub fn (f Any) f64() f64 {
// match f {
// f64 {
// return f
// }
// i8, i16, i32, int, i64, u8, u16, u32, u64, f32 {
// return f64(f)
// }
// string {
// return f.f64()
// }
// else {
// return 0.0
// }
// }
// }
// // bool uses `Any` as a bool.
// pub fn (f Any) bool() bool {
// match f {
// bool {
// return f
// }
// string {
// if f == 'false' {
// return false
// }
// if f == 'true' {
// return true
// }
// if f.len > 0 {
// return f != '0' && f != '0.0'
// } else {
// return false
// }
// }
// i8, i16, i32, int, i64 {
// return i64(f) != 0
// }
// u8, u16, u32, u64 {
// return u64(f) != 0
// }
// f32, f64 {
// return f64(f) != 0.0
// }
// else {
// return false
// }
// }
// }
// // arr uses `Any` as an array.
// pub fn (f Any) arr() []Any {
// if f is []Any {
// return f
// } else if f is map[string]Any {
// mut arr := []Any{}
// for _, v in f {
// arr << v
// }
// return arr
// }
// return [f]
// }
// // as_map uses `Any` as a map.
// pub fn (f Any) as_map() map[string]Any {
// if f is map[string]Any {
// return f
// } else if f is []Any {
// mut mp := map[string]Any{}
// for i, fi in f {
// mp['${i}'] = fi
// }
// return mp
// }
// return {
// '0': f
// }
// }
// // to_time uses `Any` as a time.Time.
// pub fn (f Any) to_time() !time.Time {
// match f {
// time.Time {
// return f
// }
// i64 {
// return time.unix(f)
// }
// string {
// is_iso8601 := f[4] == `-` && f[7] == `-`
// if is_iso8601 {
// return time.parse_iso8601(f)!
// }
// is_rfc3339 := f.len == 24 && f[23] == `Z` && f[10] == `T`
// if is_rfc3339 {
// return time.parse_rfc3339(f)!
// }
// mut is_unix_timestamp := true
// for c in f {
// if c == `-` || (c >= `0` && c <= `9`) {
// continue
// }
// is_unix_timestamp = false
// break
// }
// if is_unix_timestamp {
// return time.unix(f.i64())
// }
// // TODO: parse_iso8601
// // TODO: parse_rfc2822
// return time.parse(f)!
// }
// else {
// return error('not a time value: ${f} of type: ${f.type_name()}')
// }
// }
// }
// // map_from convert a struct to map of Any
// pub fn map_from[T](t T) map[string]Any {
// mut m := map[string]Any{}
// $if T is $struct {
// $for field in T.fields {
// value := t.$(field.name)
// $if field.is_array {
// mut arr := []Any{}
// for variable in value {
// arr << Any(variable)
// }
// m[field.name] = arr
// arr.clear()
// } $else $if field.is_struct {
// m[field.name] = map_from(value)
// } $else $if field.is_map {
// // TODO
// } $else $if field.is_alias {
// // TODO
// } $else $if field.is_option {
// // TODO
// } $else {
// // TODO: improve memory usage when convert
// $if field.typ is string {
// m[field.name] = value.str()
// } $else $if field.typ is bool {
// m[field.name] = t.$(field.name).str().bool()
// } $else $if field.typ is i8 {
// m[field.name] = t.$(field.name).str().i8()
// } $else $if field.typ is i16 {
// m[field.name] = t.$(field.name).str().i16()
// } $else $if field.typ is i32 {
// m[field.name] = t.$(field.name).str().i32()
// } $else $if field.typ is int {
// m[field.name] = t.$(field.name).str().int()
// } $else $if field.typ is i64 {
// m[field.name] = t.$(field.name).str().i64()
// } $else $if field.typ is f32 {
// m[field.name] = t.$(field.name).str().f32()
// } $else $if field.typ is f64 {
// m[field.name] = t.$(field.name).str().f64()
// } $else $if field.typ is u8 {
// m[field.name] = t.$(field.name).str().u8()
// } $else $if field.typ is u16 {
// m[field.name] = t.$(field.name).str().u16()
// } $else $if field.typ is u32 {
// m[field.name] = t.$(field.name).str().u32()
// } $else $if field.typ is u64 {
// m[field.name] = t.$(field.name).str().u64()
// } $else {
// // return error("The type of `${field.name}` can't be decoded. Please open an issue at https://github.com/vlang/v/issues/new/choose")
// }
// }
// }
// }
// return m
// }
// // str returns the JSON string representation of the `map[string]Any` type.
// pub fn (f map[string]Any) str() string {
// return Any(f).json_str()
// }
// // str returns the JSON string representation of the `[]Any` type.
// pub fn (f []Any) str() string {
// return Any(f).json_str()
// }
// // str returns the string representation of the `Any` type. Use the `json_str` method
// // if you want to use the escaped str() version of the `Any` type.
// pub fn (f Any) str() string {
// if f is string {
// return f
// } else {
// return f.json_str()
// }
// }
// // json_str returns the JSON string representation of the `Any` type.
// pub fn (f Any) json_str() string {
// return encode(f)
// }
// // prettify_json_str returns the pretty-formatted JSON string representation of the `Any` type.
// @[manualfree]
// pub fn (f Any) prettify_json_str() string {
// mut params := []u8{}
// defer {
// unsafe { params.free() }
// }
// mut enc := Encoder{
// newline: `\n`
// newline_spaces_count: 2
// }
// enc.encode_value(f, mut params) or {}
// return params.bytestr()
// }

View File

@@ -1,124 +0,0 @@
module encoderherocomplex
import incubaid.herolib.data.paramsparser
import time
pub struct Decoder[T] {
pub mut:
object T
data string
}
pub fn decode[T](data string) !T {
return decode_struct[T](T{}, data)
}
// decode_struct is a generic function that decodes a JSON map into the struct T.
fn decode_struct[T](_ T, data string) !T {
mut typ := T{}
$if T is $struct {
obj_name := T.name.all_after_last('.').to_lower()
mut action_name := '${obj_name}.define'
if !data.contains(action_name) {
action_name = '${obj_name}.configure'
if !data.contains(action_name) {
action_name = 'define.${obj_name}'
if !data.contains(action_name) {
action_name = 'configure.${obj_name}'
if !data.contains(action_name) {
return error('Data does not contain action: ${obj_name}.define, ${obj_name}.configure, define.${obj_name}, or configure.${obj_name}')
}
}
}
}
// Split by !! and filter for relevant actions
actions_split := data.split('!!')
actions := actions_split.filter(it.trim_space().len > 0)
// Find and parse main action
main_actions := actions.filter(it.contains(action_name) && !it.contains('.${obj_name}.'))
if main_actions.len > 0 {
action_str := main_actions[0]
params_str := action_str.all_after(action_name).trim_space()
params := paramsparser.parse(params_str) or {
return error('Could not parse params: ${params_str}\n${err}')
}
typ = params.decode[T](typ)!
}
// Process nested fields
$for field in T.fields {
mut should_skip := false
for attr in field.attrs {
if attr.contains('skip') || attr.contains('skipdecode') {
should_skip = true
break
}
}
if !should_skip {
field_name := field.name.to_lower()
$if field.is_struct {
$if field.typ !is time.Time {
// Handle nested structs
if !field.name[0].is_capital() {
nested_action := '${action_name}.${field_name}'
nested_actions := actions.filter(it.contains(nested_action))
if nested_actions.len > 0 {
nested_data := '!!' + nested_actions.join('\n!!')
typ.$(field.name) = decode_struct(typ.$(field.name), nested_data)!
}
}
}
} $else $if field.is_array {
// Handle arrays of structs
elem_type_name := field.typ.all_after(']').to_lower()
array_action := '${action_name}.${elem_type_name}'
array_actions := actions.filter(it.contains(array_action))
if array_actions.len > 0 {
mut arr_data := []string{}
for action in array_actions {
arr_data << '!!' + action
}
// Decode each array item
decoded_arr := decode_array(typ.$(field.name), arr_data.join('\n'))!
typ.$(field.name) = decoded_arr
}
}
}
}
} $else {
return error("The type `${T.name}` can't be decoded.")
}
return typ
}
fn decode_array[T](_ []T, data string) ![]T {
mut arr := []T{}
$if T is $struct {
// Split by !! to get individual items
items := data.split('!!').filter(it.trim_space().len > 0)
for item in items {
item_data := '!!' + item
decoded := decode_struct(T{}, item_data)!
arr << decoded
}
} $else {
return error('Array decoding only supports structs')
}
return arr
}

View File

@@ -1,146 +0,0 @@
module encoderherocomplex
import time
import incubaid.herolib.data.paramsparser
import incubaid.herolib.core.texttools
struct TestStruct {
id int
name string
}
const blank_script = '!!define.test_struct'
const full_script = '!!define.test_struct id: 42 name: testobject'
const invalid_script = '!!define.another_struct'
fn test_decode_simple() ! {
mut object := decode[TestStruct](blank_script)!
assert object == TestStruct{}
object = decode[TestStruct](full_script)!
assert object == TestStruct{
id: 42
name: 'testobject'
}
object = decode[TestStruct](invalid_script) or {
assert true
TestStruct{}
}
}
struct ChildStruct {
text string
number int
}
struct ComplexStruct {
id int
name string
child ChildStruct
}
const blank_complex = '!!define.complex_struct
!!define.child_struct'
const partial_complex = '!!define.complex_struct id: 42 name: testcomplex
!!define.child_struct'
const full_complex = '!!define.complex_struct id:42 name:testobject
!!define.complex_struct.child_struct text:child_text number:24'
fn test_decode_complex() ! {
mut object := decode[ComplexStruct](blank_complex)!
assert object == ComplexStruct{}
object = decode[ComplexStruct](partial_complex)!
assert object == ComplexStruct{
id: 42
name: 'testcomplex'
}
object = decode[ComplexStruct](full_complex)!
assert object == ComplexStruct{
id: 42
name: 'testobject'
child: ChildStruct{
text: 'child_text'
number: 24
}
}
}
pub struct Base {
id int
// remarks []Remark TODO: add support
}
pub struct Remark {
text string
}
pub struct Person {
Base
mut:
name string
age int
birthday time.Time
deathday time.Time
car Car
profiles []Profile
}
pub struct Car {
name string
year int
insurance Insurance
}
pub struct Insurance {
provider string
expiration time.Time
}
pub struct Profile {
platform string
url string
}
const person_heroscript = "!!define.person id:1 name:Bob age:21 birthday:'2012-12-12 00:00:00'
!!define.person.car name:'Bob\\'s car' year:2014
!!define.person.car.insurance provider:insurer expiration:'0000-00-00 00:00:00'
!!define.person.profile platform:Github url:github.com/example"
const person = Person{
id: 1
name: 'Bob'
age: 21
birthday: time.new(
day: 12
month: 12
year: 2012
)
car: Car{
name: "Bob's car"
year: 2014
}
profiles: [
Profile{
platform: 'Github'
url: 'github.com/example'
},
]
}
fn test_decode() ! {
// Test decoding with proper person data
object := decode[Person](person_heroscript)!
assert object == person
// Test that empty string fails as expected
decode[Person]('') or {
assert true // This should fail, which is correct
return
}
assert false // Should not reach here
}

View File

@@ -1,168 +0,0 @@
module encoderherocomplex
import incubaid.herolib.data.paramsparser
import time
import v.reflection
import incubaid.herolib.data.ourtime
// Helper function to check if field should be skipped
fn should_skip_field(attrs []string) bool {
for attr in attrs {
attr_clean := attr.to_lower().replace(' ', '').replace('\t', '')
if attr_clean == 'skip'
|| attr_clean.starts_with('skip;')
|| attr_clean.ends_with(';skip')
|| attr_clean.contains(';skip;')
|| attr_clean == 'skipdecode'
|| attr_clean.starts_with('skipdecode;')
|| attr_clean.ends_with(';skipdecode')
|| attr_clean.contains(';skipdecode;') {
return true
}
}
return false
}
// import incubaid.herolib.ui.console
// Encoder encodes the an `Any` type into HEROSCRIPT representation.
// It provides parameters in order to change the end result.
pub struct Encoder {
pub mut:
escape_unicode bool = true
action_name string
action_names []string
params paramsparser.Params
children []Encoder
parent ?&Encoder @[skip; str: skip]
}
// encode is a generic function that encodes a type into a HEROSCRIPT string.
pub fn encode[T](val T) !string {
mut e := Encoder{
params: paramsparser.Params{}
}
$if T is $struct {
e.encode_struct[T](val)!
} $else $if T is $array {
// TODO: need to make comma separated list only works if int,u8,u16,i8... or string if string put all elements in \''...\'',...
e.add_child_list[T](val, 'TODO')
} $else {
return error('can only add elements for struct or array of structs. \n${val}')
}
return e.export()!
}
// export exports an encoder into encoded heroscript
pub fn (e Encoder) export() !string {
mut script := e.params.export(
pre: '!!define.${e.action_names.join('.')}'
indent: '\t'
skip_empty: true
)
if e.children.len > 0 {
script += '\n' + e.children.map(it.export()!).join('\n')
}
return script
}
// needs to be a struct we are adding
// parent is the name of the action e.g define.customer:contact
pub fn (mut e Encoder) add_child[T](val T, parent string) ! {
$if T is $array {
mut counter := 0
for valitem in val {
mut e2 := e.add_child[T](valitem, '${parent}:${counter}')!
}
return
}
mut e2 := Encoder{
params: paramsparser.Params{}
parent: &e
action_names: e.action_names.clone() // careful, if not cloned gets mutated later
}
$if T is $struct {
e2.params.set('key', parent)
e2.encode_struct[T](val)!
e.children << e2
} $else {
return error('can only add elements for struct or array of structs. \n${val}')
}
}
pub fn (mut e Encoder) add_child_list[U](val []U, parent string) ! {
for i in 0 .. val.len {
mut counter := 0
$if U is $struct {
e.add_child(val[i], '${parent}:${counter}')!
counter += 1
}
}
}
// needs to be a struct we are adding
// parent is the name of the action e.g define.customer:contact
pub fn (mut e Encoder) add[T](val T) ! {
// $if T is []$struct {
// // panic("not implemented")
// for valitem in val{
// mut e2:=e.add[T](valitem)!
// }
// }
mut e2 := Encoder{
params: paramsparser.Params{}
parent: &e
action_names: e.action_names.clone() // careful, if not cloned gets mutated later
}
$if T is $struct && T !is time.Time {
e2.params.set('key', '${val}')
e2.encode_struct[T](val)!
e.children << e2
} $else {
return error('can only add elements for struct or array of structs. \n${val}')
}
}
pub fn (mut e Encoder) encode_array[U](val []U) ! {
for i in 0 .. val.len {
$if U is $struct {
e.add(val[i])!
}
}
}
// now encode the struct
pub fn (mut e Encoder) encode_struct[T](t T) ! {
mut mytype := reflection.type_of[T](t)
struct_attrs := attrs_get_reflection(mytype)
mut action_name := T.name.all_after_last('.').to_lower()
// println('action_name: ${action_name} ${T.name}')
if 'alias' in struct_attrs {
action_name = struct_attrs['alias'].to_lower()
}
e.action_names << action_name
params := paramsparser.encode[T](t, recursive: false)!
e.params = params
// encode children structs and array of structs
$for field in T.fields {
if !should_skip_field(field.attrs) {
val := t.$(field.name)
// time is encoded in the above params encoding step so skip and dont treat as recursive struct
$if val is time.Time || val is ourtime.OurTime {
} $else $if val is $struct {
if field.name[0].is_capital() {
embedded_params := paramsparser.encode(val, recursive: false)!
e.params.params << embedded_params.params
} else {
e.add(val)!
}
} $else $if val is $array {
e.encode_array(val)!
}
}
}
}

View File

@@ -1,42 +0,0 @@
module encoderherocomplex
import incubaid.herolib.data.paramsparser
import time
import v.reflection
struct MyStruct {
id int
name string
// skip attributes would be best way how to do the encoding but can't get it to work
other ?&Remark @[skip; str: skip]
}
// is the one we should skip
pub struct Remark {
id int
}
fn test_encode() ! {
mut o := MyStruct{
id: 1
name: 'test'
other: &Remark{
id: 123
}
}
script := encode[MyStruct](o)!
assert script.trim_space() == '!!define.my_struct id:1 name:test'
println(script)
o2 := decode[MyStruct](script)!
assert o2 == MyStruct{
id: 1
name: 'test'
}
println(o2)
}

View File

@@ -1,121 +0,0 @@
module encoderherocomplex
import incubaid.herolib.data.paramsparser
import incubaid.herolib.data.ourtime
import time
import v.reflection
struct Base {
id int
remarks []Remark
}
pub struct Remark {
text string
}
struct Company {
name string
founded ourtime.OurTime
employees []Person
}
const company = Company{
name: 'Tech Corp'
founded: ourtime.new('2022-12-05 20:14')!
employees: [
person,
Person{
id: 2
name: 'Alice'
age: 30
birthday: time.new(
day: 20
month: 6
year: 1990
)
car: Car{
name: "Alice's car"
year: 2018
}
profiles: [
Profile{
platform: 'LinkedIn'
url: 'linkedin.com/alice'
},
]
},
]
}
struct Person {
Base
mut:
name string
age ?int = 20
birthday time.Time
deathday ?time.Time
car Car
profiles []Profile
}
struct Car {
name string
year int
insurance Insurance
}
struct Insurance {
provider string
expiration time.Time
}
struct Profile {
platform string
url string
}
const person_heroscript = "!!define.person id:1 name:Bob age:21 birthday:'2012-12-12 00:00:00'
!!define.person.car name:'Bob\\'s car' year:2014
!!define.person.car.insurance provider:insurer expiration:'0000-00-00 00:00:00'
!!define.person.profile platform:Github url:github.com/example"
const person = Person{
id: 1
name: 'Bob'
age: 21
birthday: time.new(
day: 12
month: 12
year: 2012
)
car: Car{
name: "Bob's car"
year: 2014
insurance: Insurance{
provider: 'insurer'
}
}
profiles: [
Profile{
platform: 'Github'
url: 'github.com/example'
},
]
}
const company_script = "!!define.company name:'Tech Corp' founded:'2022-12-05 20:14'
!!define.company.person id:1 name:Bob age:21 birthday:'2012-12-12 00:00:00'
!!define.company.person.car name:'Bob\\'s car' year:2014
!!define.company.person.car.insurance provider:insurer expiration:'0000-00-00 00:00:00'
!!define.company.person.profile platform:Github url:github.com/example
!!define.company.person id:2 name:Alice age:30 birthday:'1990-06-20 00:00:00'
!!define.company.person.car name:'Alice\\'s car' year:2018
!!define.company.person.car.insurance provider:'' expiration:'0000-00-00 00:00:00'
!!define.company.person.profile platform:LinkedIn url:linkedin.com/alice"
fn test_encode() ! {
person_script := encode[Person](person)!
assert person_script.trim_space() == person_heroscript.trim_space()
assert encode[Company](company)!.trim_space() == company_script.trim_space()
}

View File

@@ -1,233 +0,0 @@
module encoderherocomplex
pub struct PostgresqlClient {
pub mut:
name string = 'default'
user string = 'root'
port int = 5432
host string = 'localhost'
password string
dbname string = 'postgres'
}
const postgres_client_blank = '!!postgresql_client.configure'
const postgres_client_full = '!!postgresql_client.configure name:production user:app_user port:5433 host:db.example.com password:secret123 dbname:myapp'
const postgres_client_partial = '!!postgresql_client.configure name:dev host:localhost password:devpass'
const postgres_client_complex = '
!!postgresql_client.configure name:staging user:stage_user port:5434 host:staging.db.com password:stagepass dbname:stagingdb
'
fn test_postgres_client_decode_blank() ! {
mut client := decode[PostgresqlClient](postgres_client_blank)!
assert client.name == 'default'
assert client.user == 'root'
assert client.port == 5432
assert client.host == 'localhost'
assert client.password == ''
assert client.dbname == 'postgres'
}
fn test_postgres_client_decode_full() ! {
mut client := decode[PostgresqlClient](postgres_client_full)!
assert client.name == 'production'
assert client.user == 'app_user'
assert client.port == 5433
assert client.host == 'db.example.com'
assert client.password == 'secret123'
assert client.dbname == 'myapp'
}
fn test_postgres_client_decode_partial() ! {
mut client := decode[PostgresqlClient](postgres_client_partial)!
assert client.name == 'dev'
assert client.user == 'root' // default value
assert client.port == 5432 // default value
assert client.host == 'localhost'
assert client.password == 'devpass'
assert client.dbname == 'postgres' // default value
}
fn test_postgres_client_decode_complex() ! {
mut client := decode[PostgresqlClient](postgres_client_complex)!
assert client.name == 'staging'
assert client.user == 'stage_user'
assert client.port == 5434
assert client.host == 'staging.db.com'
assert client.password == 'stagepass'
assert client.dbname == 'stagingdb'
}
fn test_postgres_client_encode_decode_roundtrip() ! {
// Test encoding and decoding roundtrip
original := PostgresqlClient{
name: 'testdb'
user: 'testuser'
port: 5435
host: 'test.host.com'
password: 'testpass123'
dbname: 'testdb'
}
// Encode to heroscript
encoded := encode[PostgresqlClient](original)!
// println('Encoded heroscript: ${encoded}')
// if true {
// panic("sss")
// }
// Decode back from heroscript
decoded := decode[PostgresqlClient](encoded)!
// Verify roundtrip
assert decoded.name == original.name
assert decoded.user == original.user
assert decoded.port == original.port
assert decoded.host == original.host
assert decoded.password == original.password
assert decoded.dbname == original.dbname
}
fn test_postgres_client_encode() ! {
// Test encoding with different configurations
test_cases := [
PostgresqlClient{
name: 'minimal'
user: 'root'
port: 5432
host: 'localhost'
password: ''
dbname: 'postgres'
},
PostgresqlClient{
name: 'full_config'
user: 'admin'
port: 5433
host: 'remote.server.com'
password: 'securepass'
dbname: 'production'
},
PostgresqlClient{
name: 'localhost_dev'
user: 'dev'
port: 5432
host: '127.0.0.1'
password: 'devpassword'
dbname: 'devdb'
},
]
for client in test_cases {
encoded := encode[PostgresqlClient](client)!
decoded := decode[PostgresqlClient](encoded)!
assert decoded.name == client.name
assert decoded.user == client.user
assert decoded.port == client.port
assert decoded.host == client.host
assert decoded.password == client.password
assert decoded.dbname == client.dbname
}
}
// Play script for interactive testing
const play_script = '
# PostgresqlClient Encode/Decode Play Script
# This script demonstrates encoding and decoding PostgresqlClient configurations
!!postgresql_client.configure name:playground user:play_user
port:5432
host:localhost
password:playpass
dbname:playdb
# You can also use partial configurations
!!postgresql_client.configure name:quick_test host:127.0.0.1
# Default configuration (all defaults)
!!postgresql_client.configure
'
fn test_play_script() ! {
// Test the play script with multiple configurations
lines := play_script.split_into_lines().filter(fn (line string) bool {
return line.trim(' ') != '' && !line.starts_with('#')
})
mut clients := []PostgresqlClient{}
for line in lines {
if line.starts_with('!!postgresql_client.configure') {
client := decode[PostgresqlClient](line)!
clients << client
}
}
assert clients.len == 3
// First client: full configuration
assert clients[0].name == 'playground'
assert clients[0].user == 'play_user'
assert clients[0].port == 5432
// Second client: partial configuration
assert clients[1].name == 'quick_test'
assert clients[1].host == '127.0.0.1'
assert clients[1].user == 'root' // default
// Third client: defaults only
assert clients[2].name == 'default'
assert clients[2].host == 'localhost'
assert clients[2].port == 5432
}
// Utility function for manual testing
pub fn run_play_script() ! {
println('=== PostgresqlClient Encode/Decode Play Script ===')
println('Testing encoding and decoding of PostgresqlClient configurations...')
// Test 1: Basic encoding
println('\n1. Testing basic encoding...')
client := PostgresqlClient{
name: 'example'
user: 'example_user'
port: 5432
host: 'example.com'
password: 'example_pass'
dbname: 'example_db'
}
encoded := encode[PostgresqlClient](client)!
println('Encoded: ${encoded}')
decoded := decode[PostgresqlClient](encoded)!
println('Decoded name: ${decoded.name}')
println('Decoded host: ${decoded.host}')
// Test 2: Play script
println('\n2. Testing play script...')
test_play_script()!
println('Play script test passed!')
// Test 3: Edge cases
println('\n3. Testing edge cases...')
edge_client := PostgresqlClient{
name: 'edge'
user: ''
port: 0
host: ''
password: ''
dbname: ''
}
edge_encoded := encode[PostgresqlClient](edge_client)!
edge_decoded := decode[PostgresqlClient](edge_encoded)!
assert edge_decoded.name == 'edge'
assert edge_decoded.user == ''
assert edge_decoded.port == 0
println('Edge cases test passed!')
println('\n=== All tests completed successfully! ===')
}

View File

@@ -1,138 +0,0 @@
# HeroEncoder - Struct Serialization to HeroScript
HeroEncoder provides bidirectional conversion between V structs and HeroScript format.
## HeroScript Format
HeroScript uses a structured action-based format:
```heroscript
!!define.typename param1:value1 param2:'value with spaces'
!!define.typename.nested_field field1:value
!!define.typename.array_item field1:value
!!define.typename.array_item field1:value2
```
## Basic Usage
### Simple Struct
```v
#!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
import incubaid.herolib.data.encoderhero
import time
struct Person {
mut:
name string
age int = 20
birthday time.Time
}
mut person := Person{
name: 'Bob'
age: 25
birthday: time.now()
}
// Encode to heroscript
heroscript := encoderhero.encode[Person](person)!
println(heroscript)
// Output: !!define.person name:Bob age:25 birthday:'2024-01-15 10:30:00'
// Decode back
person2 := encoderhero.decode[Person](heroscript)!
println(person2)
```
### Nested Structs
```v
struct Car {
name string
year int
}
struct Person {
mut:
name string
car Car
}
person := Person{
name: 'Alice'
car: Car{
name: 'Tesla'
year: 2024
}
}
heroscript := encoderhero.encode[Person](person)!
// Output:
// !!define.person name:Alice
// !!define.person.car name:Tesla year:2024
```
### Arrays of Structs
```v
struct Profile {
platform string
url string
}
struct Person {
mut:
name string
profiles []Profile
}
person := Person{
name: 'Bob'
profiles: [
Profile{platform: 'GitHub', url: 'github.com/bob'},
Profile{platform: 'LinkedIn', url: 'linkedin.com/bob'}
]
}
heroscript := encoderhero.encode[Person](person)!
// Output:
// !!define.person name:Bob
// !!define.person.profile platform:GitHub url:github.com/bob
// !!define.person.profile platform:LinkedIn url:linkedin.com/bob
```
## Skip Attributes
Use `@[skip]` or `@[skipdecode]` to exclude fields from encoding:
```v
struct MyStruct {
id int
name string
other ?&Remark @[skip]
}
```
## Current Limitations
⚠️ **IMPORTANT**: The decoder currently has limited functionality:
-**Encoding**: Fully supports nested structs and arrays
- ⚠️ **Decoding**: Only supports flat structs (no nesting or arrays)
- 🔧 **In Progress**: Full decoder implementation for nested structures
For production use, only use simple flat structs for encoding/decoding roundtrips.
## Time Handling
`time.Time` fields are automatically converted to string format:
- Format: `YYYY-MM-DD HH:mm:ss`
- Example: `2024-01-15 14:30:00`
Use `incubaid.herolib.data.ourtime` for more flexible time handling.
```
<line_count>120</line_count>
</write_to_file>

View File

@@ -1,83 +0,0 @@
module encoderherocomplex
import time
struct FullPerson {
id int
name string
age int
birthday time.Time
car FullCar
profiles []FullProfile
}
struct FullCar {
name string
year int
insurance FullInsurance
}
struct FullInsurance {
provider string
expiration time.Time
}
struct FullProfile {
platform string
url string
}
fn test_roundtrip_nested_struct() ! {
original := FullPerson{
id: 1
name: 'Alice'
age: 30
birthday: time.new(year: 1993, month: 6, day: 15)
car: FullCar{
name: 'Tesla'
year: 2024
insurance: FullInsurance{
provider: 'StateFarm'
expiration: time.new(year: 2025, month: 12, day: 31)
}
}
profiles: [
FullProfile{platform: 'GitHub', url: 'github.com/alice'},
FullProfile{platform: 'LinkedIn', url: 'linkedin.com/alice'},
]
}
// Encode
encoded := encode[FullPerson](original)!
println('Encoded:\n${encoded}\n')
// Decode
decoded := decode[FullPerson](encoded)!
// Verify
assert decoded.id == original.id
assert decoded.name == original.name
assert decoded.age == original.age
assert decoded.car.name == original.car.name
assert decoded.car.year == original.car.year
assert decoded.car.insurance.provider == original.car.insurance.provider
assert decoded.profiles.len == original.profiles.len
assert decoded.profiles[0].platform == original.profiles[0].platform
assert decoded.profiles[1].url == original.profiles[1].url
}
fn test_roundtrip_flat_struct() ! {
struct Simple {
id int
name string
age int
}
original := Simple{id: 123, name: 'Bob', age: 25}
encoded := encode[Simple](original)!
decoded := decode[Simple](encoded)!
assert decoded.id == original.id
assert decoded.name == original.name
assert decoded.age == original.age
}

View File

@@ -1,26 +0,0 @@
module encoderherocomplex
import v.reflection
// if at top of struct we have: @[name:"teststruct " ; params] .
// will return {'name': 'teststruct', 'params': ''}
fn attrs_get_reflection(mytype reflection.Type) map[string]string {
if mytype.sym.info is reflection.Struct {
return attrs_get(mytype.sym.info.attrs)
}
return map[string]string{}
}
// will return {'name': 'teststruct', 'params': ''}
fn attrs_get(attrs []string) map[string]string {
mut out := map[string]string{}
for i in attrs {
if i.contains('=') {
kv := i.split('=')
out[kv[0].trim_space().to_lower()] = kv[1].trim_space().to_lower()
} else {
out[i.trim_space().to_lower()] = ''
}
}
return out
}

View File

@@ -1,91 +0,0 @@
module encoderherocomplex
// byte array versions of the most common tokens/chars to avoid reallocations
const null_in_bytes = 'null'
const true_in_string = 'true'
const false_in_string = 'false'
const empty_array = [u8(`[`), `]`]!
const comma_rune = `,`
const colon_rune = `:`
const quote_rune = `"`
const back_slash = [u8(`\\`), `\\`]!
const quote = [u8(`\\`), `"`]!
const slash = [u8(`\\`), `/`]!
const null_unicode = [u8(`\\`), `u`, `0`, `0`, `0`, `0`]!
const ascii_control_characters = ['\\u0000', '\\t', '\\n', '\\r', '\\u0004', '\\u0005', '\\u0006',
'\\u0007', '\\b', '\\t', '\\n', '\\u000b', '\\f', '\\r', '\\u000e', '\\u000f', '\\u0010',
'\\u0011', '\\u0012', '\\u0013', '\\u0014', '\\u0015', '\\u0016', '\\u0017', '\\u0018', '\\u0019',
'\\u001a', '\\u001b', '\\u001c', '\\u001d', '\\u001e', '\\u001f']!
const curly_open_rune = `{`
const curly_close_rune = `}`
const ascii_especial_characters = [u8(`\\`), `"`, `/`]!
// // `Any` is a sum type that lists the possible types to be decoded and used.
// pub type Any = Null
// | []Any
// | bool
// | f32
// | f64
// | i16
// | i32
// | i64
// | i8
// | int
// | map[string]Any
// | string
// | time.Time
// | u16
// | u32
// | u64
// | u8
// // Decodable is an interface, that allows custom implementations for decoding structs from JSON encoded values
// pub interface Decodable {
// from_json(f Any)
// }
// Decodable is an interface, that allows custom implementations for encoding structs to their string based JSON representations
pub interface Encodable {
heroscript() string
}
// `Null` struct is a simple representation of the `null` value in JSON.
pub struct Null {
is_null bool = true
}
pub const null = Null{}
// ValueKind enumerates the kinds of possible values of the Any sumtype.
pub enum ValueKind {
unknown
array
object
string_
number
}
// str returns the string representation of the specific ValueKind
pub fn (k ValueKind) str() string {
return match k {
.unknown { 'unknown' }
.array { 'array' }
.object { 'object' }
.string_ { 'string' }
.number { 'number' }
}
}