${key}: ${value.string()}
\n' + } + out += '${it}
').join('\n') + } else { + self.content + } + out += self.DocBase.html()! + + mut html := out + + // Replace **text** with text + mut bold_re := regex.regex_opt(r'\*\*(.+)\*\*') or { panic(err) } + html = bold_re.replace_by_fn(html, fn (re regex.RE, in_txt string, start int, end int) string { + // Extract the matched group + matched_text := re.get_group_by_id(in_txt, 0) + return '${matched_text}' + }) + + // Replace __text__ with text + mut underline_bold_re := regex.regex_opt(r'__(.+)__') or { panic(err) } + html = underline_bold_re.replace_by_fn(html, fn (re regex.RE, in_txt string, start int, end int) string { + matched_text := re.get_group_by_id(in_txt, 0) + return '${matched_text}' + }) + + // Optionally handle *text* as text + mut italic_re := regex.regex_opt(r'\*(.+)\*') or { panic(err) } + html = italic_re.replace_by_fn(html, fn (re regex.RE, in_txt string, start int, end int) string { + matched_text := re.get_group_by_id(in_txt, 0) + return '${matched_text}' + }) + + // Optionally handle _text_ as text + mut underline_italic_re := regex.regex_opt(r'_(.+)_') or { panic(err) } + html = underline_italic_re.replace_by_fn(html, fn (re regex.RE, in_txt string, start int, end int) string { + matched_text := re.get_group_by_id(in_txt, 0) + return '${matched_text}' + }) + + return html +} diff --git a/lib/data/markdownparser/elements/interface_element.v b/lib/data/markdownparser/elements/interface_element.v new file mode 100644 index 00000000..d45aa7f3 --- /dev/null +++ b/lib/data/markdownparser/elements/interface_element.v @@ -0,0 +1,37 @@ +module elements + +import freeflowuniverse.herolib.core.playbook +// import freeflowuniverse.herolib.data.paramsparser + +@[heap] +pub interface Element { + markdown() !string + html() !string + pug() !string + actions(ActionsGetArgs) []playbook.Action + treeview_(string, mut []string) + children_recursive() []Element + children_recursive_(mut []Element) +mut: + id int + content string + processed bool + // params paramsparser.Params + type_name string + changed bool + // trailing_lf bool + children []Element + process() !int + content_set(int, string) + id_set(int) int + actionpointers(ActionsGetArgs) []&Action + defpointers() []&Def + header_name() !string +} + +// @[heap] +// pub struct ActionPointer { +// pub mut: +// action playbook.Action +// element_id int +// } diff --git a/lib/data/markdownparser/elements/parser_char.v b/lib/data/markdownparser/elements/parser_char.v new file mode 100644 index 00000000..fd19ec6c --- /dev/null +++ b/lib/data/markdownparser/elements/parser_char.v @@ -0,0 +1,126 @@ +module elements + +import os +// import freeflowuniverse.herolib.ui.console + +// is a char parser + +// error while parsing +struct ParserCharError { +mut: + error string + charnr int +} + +struct ParserChar { +mut: + charnr int + chars string + errors []ParserCharError +} + +fn parser_char_new_path(path string) !ParserChar { + if !os.exists(path) { + return error("path: '${path}' does not exist, cannot parse.") + } + mut content := os.read_file(path) or { return error('Failed to load file ${path}') } + return ParserChar{ + chars: content + charnr: 0 + } +} + +pub fn parser_char_new_text(text string) ParserChar { + return ParserChar{ + chars: text + charnr: 0 + } +} + +// return a specific char +fn (mut parser ParserChar) error_add(msg string) { + parser.errors << ParserCharError{ + error: msg + charnr: parser.charnr + } +} + +// return a specific char +fn (mut parser ParserChar) char(nr int) !string { + if nr < 0 { + return error('before file') + } + if parser.eof() { + return '' + } + mut c := parser.chars.substr(nr, nr + 1) + if c == '“' { // TODO: doesn't seem to be working, it can't because unicode chars are not 1 char, they are more than 1 + c = '"' + } + // console.print_debug(" +++ '${c}' ${c[0]}") + return c +} + +// get current char +// will return error if out of scope +fn (mut parser ParserChar) char_current() string { + return parser.char(parser.charnr) or { '' } +} + +fn (mut parser ParserChar) forward(nr int) { + parser.charnr += nr +} + +// get next char, if end of file will return empty char +fn (mut parser ParserChar) char_next() string { + if parser.eof() { + return '' + } + return parser.char(parser.charnr + 1) or { panic(err) } +} + +// if at start will return an empty char +fn (mut parser ParserChar) char_prev() string { + if parser.charnr - 1 < 0 { + return '' + } + return parser.char(parser.charnr - 1) or { panic(err) } +} + +// check if starting from position we are on, offset is to count further +fn (mut parser ParserChar) text_next_is(tofind string, offset int) bool { + startpos := parser.charnr + offset + if startpos + tofind.len > parser.chars.len { + return false + } + text := parser.chars.substr(startpos, startpos + tofind.len).replace('\n', '\\n') + didfind := (text == tofind) + // console.print_debug(" -NT${offset}($tofind):'$text':$didfind .. ") + return didfind +} + +// check if previous text was, current possition does not count +// offset can be used to include current one (1 means current is last) +// fn (mut parser ParserChar) text_previous_is(tofind string, offset int) bool { +// startpos:=parser.charnr - tofind.len + offset +// if startpos <0 { +// return false +// } +// text := parser.chars.substr(startpos, startpos + tofind.len).replace("\n","\\n") +// console.print_debug(" -PT${offset}($tofind):'$text'") +// return text == tofind +// } +// FOR NOW NOT USED, IS BETTER TO FORCE EVERYONE TO USE text_next_is + +// move further +fn (mut parser ParserChar) next() { + parser.charnr += 1 +} + +// return true if end of file +fn (mut parser ParserChar) eof() bool { + if parser.charnr > parser.chars.len - 1 { + return true + } + return false +} diff --git a/lib/data/markdownparser/elements/parser_paragraph.v b/lib/data/markdownparser/elements/parser_paragraph.v new file mode 100644 index 00000000..66d91591 --- /dev/null +++ b/lib/data/markdownparser/elements/parser_paragraph.v @@ -0,0 +1,184 @@ +module elements + +import freeflowuniverse.herolib.core.texttools +// import freeflowuniverse.herolib.ui.console + +// DO NOT CHANGE THE WAY HOW THIS WORKS, THIS HAS BEEN DONE AS A STATEFUL PARSER BY DESIGN +// THIS ALLOWS FOR EASY ADOPTIONS TO DIFFERENT REALITIES +// adds the found links, text, comments to the paragraph +fn (mut paragraph Paragraph) paragraph_parse() ! { + mut parser := parser_char_new_text(paragraph.content) + + // mut d := para.doc or { panic('no doc') } + paragraph.text_new(mut paragraph.parent_doc(), '') // the initial one + + mut potential_link := false + mut link_in_link := false + + for { + mut llast := paragraph.children.last() + mut char_ := parser.char_current() + + // console.print_debug("[[[${char_}]]]") + + // char == '' means end of file + if mut llast is Def { + if (char_ == '' || char_ == ' ' || char_ == '\n') && parser.char_prev() != '*' { + if llast.content.len < 3 { + paragraph.children.pop() + mut llast2 := paragraph.children.last() + if mut llast2 is Text { + llast2.content += llast.content + char_ + } else { + paragraph.text_new(mut paragraph.parent_doc(), llast.content + char_) + } + parser.next() + char_ = '' + continue + } else { + // means we did find a def, we can stop + // console.print_debug(" -- end def") + paragraph.text_new(mut paragraph.parent_doc(), char_) + parser.next() + char_ = '' + continue + } + } else if !(texttools.is_upper_text(char_) || char_ == '_') { + // this means it wasn't a def, we need to add text + // console.print_debug(' -- no def: ${char_}') + paragraph.children.pop() + // console.print_debug(' -- no def: ${paragraph.children.last()}') + mut llast2 := paragraph.children.last() + if mut llast2 is Text { + llast2_content := llast2.content + llast2.content = llast2_content + llast.content + char_ + // llast2.content += llast.content + char_ + } else { + paragraph.text_new(mut paragraph.parent_doc(), llast.content + char_) + } + parser.next() + char_ = '' + continue + } + // console.print_debug(" -- def: ${char_}") + } + + if parser.eof() { + assert char_ == '' + break + } + + // check for comments end + if mut llast is Comment { + if char_ == '\n' { + if llast.singleline { + // means we are at end of line of a single line comment + paragraph.text_new(mut paragraph.parent_doc(), '\n') + parser.next() + char_ = '' + continue + } else { + // now we know for sure comment is not single line + llast.singleline = false + } + } + if parser.text_next_is('-->', 1) { + // means is end of comment + llast.content += char_ // need to add current content + // need to move forward not to have the 3 next + parser.forward(3) + paragraph.text_new(mut paragraph.parent_doc(), '') + parser.next() + char_ = '' + continue + } + } + + if mut llast is Link { + // means there is image in link description, is allowed + if parser.text_next_is('![', 0) && llast.content == '[' { + link_in_link = true + } + if char_ == ']' { + if !parser.text_next_is('(', 1) { + // means is not link, need to convert link to normal text + if link_in_link { + link_in_link = false + continue + } + + mut c := llast.content + paragraph.children.delete_last() // remove the link + paragraph.text_new(mut paragraph.parent_doc(), '') + llast = paragraph.children.last() // fetch last again + llast_content := llast.content + llast.content = llast_content + c + char_ // need to add current content + parser.next() + + char_ = '' + continue + } + potential_link = true + } + if char_ == ')' && potential_link { + // end of link + if link_in_link { + // the parsed content was actually the child links in the description + llast.link_new(mut paragraph.parent_doc(), '${llast.content.trim_string_left('[')})') + link_in_link = false + potential_link = false + continue + } else { + llast.content += char_ // need to add current content + paragraph.text_new(mut paragraph.parent_doc(), '') + parser.next() + char_ = '' + potential_link = false + } + continue + } + } + + if mut llast is Text { + if char_ != '' { + if char_ == '*' { + paragraph.def_new(mut paragraph.parent_doc(), '*') + parser.next() + char_ = '' + continue + } + // check for comments start + for totry in ['') { + mut comment := trimmed_line.all_after_first('') + doc.comment_new(mut &doc, comment) + parser.next_start_lf()! + continue + } + + llast.content += line + } + + match mut llast { + elements.Paragraph { + if !parser.next_is_eof() { + // console.print_debug("LFLFLFLFLF") + llast.content += '\n' + } + } + else { + panic('parser error, means we got element which is not supported') + } + } + + parser.next() + } + mut llast2 := parser.lastitem()! + // console.print_debug("parser lf end:${parser.endlf}") + if parser.endlf { + llast2.content += '\n' + } + // match mut llast2 { + // elements.List {} + // elements.Paragraph { + // if parser.endlf { + // llast2.content += '\n' + // } + // } + // else { + // console.print_debug(llast2) + // panic('parser error for last of last, means we got element which is not supported') + // } + // } + + doc.process() or { return error('Failed to process doc\n${err}') } +} diff --git a/lib/data/markdownparser/parsers/parser_line.v b/lib/data/markdownparser/parsers/parser_line.v new file mode 100644 index 00000000..6c2a658a --- /dev/null +++ b/lib/data/markdownparser/parsers/parser_line.v @@ -0,0 +1,159 @@ +module parsers + +import freeflowuniverse.herolib.data.markdownparser.elements +// is a line parser, useful to quickly parse a file in any format as long as it is line based + +// error while parsing +struct ParserError { +mut: + error string + linenr int + line string +} + +struct Parser { +mut: + doc &elements.Doc + linenr int + lines []string + errors []ParserError + endlf bool // if there is a linefeed or \n at end +} + +fn parser_line_new(mut doc elements.Doc) !Parser { + mut parser := Parser{ + doc: doc + } + + // Parse frontmatter if present + if doc.content.starts_with('+++') { + mut frontmatter_content := '' + mut lines := doc.content.split_into_lines() + lines = lines[1..] // Skip the opening '+++' + + for line in lines { + if line.trim_space() == '+++' { + // End of frontmatter + doc.content = lines.join('\n') // Update content to exclude frontmatter + break + } + frontmatter_content += '${line}\n' + } + + // Create and process the Frontmatter element + mut frontmatter := doc.frontmatter_new(mut &doc, frontmatter_content) + frontmatter.process() or { return error('Failed to parse frontmatter: ${err.msg()}') } + } + + doc.paragraph_new(mut parser.doc, '') + parser.lines = doc.content.split_into_lines() + if doc.content.ends_with('\n') { + parser.endlf = true + } + parser.lines = parser.lines.map(it.replace('\t', ' ')) // replace tabs with spaces + parser.linenr = 0 + return parser +} + +fn (mut parser Parser) lastitem() !elements.Element { + return parser.doc.last()! +} + +// return a specific line +fn (mut parser Parser) error_add(msg string) { + parser.errors << ParserError{ + error: msg + linenr: parser.linenr + line: parser.line_current() + } +} + +// return a specific line +fn (mut parser Parser) line(nr int) !string { + if nr < 0 { + return error('before file') + } + if parser.eof() { + return error('end of file') + } + if nr >= parser.lines.len { + return error('accessing line out of range') + } + return parser.lines[nr] +} + +// get current line +// will return error if out of scope +fn (mut parser Parser) line_current() string { + return parser.line(parser.linenr) or { panic(err) } +} + +// get name of the element +fn (mut parser Parser) elementname() !string { + if parser.doc.children.len == 0 { + return 'start' + } + return parser.doc.last()!.type_name().all_after_last('.').to_lower() +} + +// get next line, if end of file will return **EOF** +fn (mut parser Parser) line_next() string { + if parser.eof() { + return '**EOF**' + } + return parser.line(parser.linenr + 1) or { panic(err) } +} + +// if at start will return **EOF** +fn (mut parser Parser) line_prev() string { + if parser.linenr - 1 < 0 { + return '**EOF**' + } + return parser.line(parser.linenr - 1) or { panic(err) } +} + +// move further +fn (mut parser Parser) next() { + parser.linenr += 1 +} + +// move further and reset the state +fn (mut parser Parser) next_start() ! { + // means we need to add paragraph because we don't know what comes next + if parser.doc.last()! !is elements.Paragraph { + parser.doc.paragraph_new(mut parser.doc, '') + } + parser.next() +} + +fn (mut parser Parser) next_start_lf() ! { + if parser.doc.last()! !is elements.Paragraph { + parser.doc.paragraph_new(mut parser.doc, '\n') + } + parser.next() +} + +fn (mut parser Parser) ensure_last_is_paragraph() ! { + if parser.doc.last()! !is elements.Paragraph { + parser.doc.paragraph_new(mut parser.doc, '') + } +} + +// fn (mut parser Parser) append_paragraph() { +// elements.paragraph_new(parent:parser.doc) +// } + +// return true if end of file +fn (mut parser Parser) eof() bool { + if parser.linenr > (parser.lines.len - 1) { + return true + } + return false +} + +fn (mut parser Parser) next_is_eof() bool { + if parser.linenr > (parser.lines.len - 2) { + return true + } + return false +} diff --git a/lib/data/markdownparser/parsers/specs.md b/lib/data/markdownparser/parsers/specs.md new file mode 100644 index 00000000..aebe3fe3 --- /dev/null +++ b/lib/data/markdownparser/parsers/specs.md @@ -0,0 +1,48 @@ +# Markdown parser specs + +This document specifies the requirements for elements, or how the parser sees them, and how the parser works. + +## Element Specs + +### Action element + +- Starts with `!` +- Content is considered after `!` and until a line is reached that does not begin with a space or EOF (line that does not start with a space is excluded). + +### Codeblock element + +- Starts with ```. +- Content is considered from next line after \```, and until a line that starts with \```. +- Text after beginning or ending backticks is ignored. + +### HTML element + +- Starts with `` +- Content is considered from next line after ``, and until a line that starts with `` +- Text after beginning or ending tags is ignored. + +### Header element + +- Starts with `#` +- Content is considered only from the same line, excluding any `#` characters +- Depth, or number of `#`, must be positive and <= 6 + +### Table element + +- Starts and ends with a `|` +- Content is considered for each line that starts and ends with `|` and until a line is reached that does not meet this requirement. + +### Comment element + +- Starts with `` +- Content is considered from after start and before end. + +### Paragraph element + +- This is the default element, any content that does not belong to another element belongs to a paragraph. + +## How it works + +The markdown parser is very simple. it reads content line by line, with the default element specified as a paragraph. +If the last element is a paragraph, then the parser searches for a new element's beginning, and if there is a match, the new element is appended to the list of parsed elements, if there is no match, the content is added to the last paragraph. +If the last element is not a paragraph, then the parser searches for the last element's ending, and if there is a match, a new paragraph is appended to the list of parsed elements, if there is no match, content is added to the last element. diff --git a/lib/data/markdownparser/readme.md b/lib/data/markdownparser/readme.md new file mode 100644 index 00000000..133a359d --- /dev/null +++ b/lib/data/markdownparser/readme.md @@ -0,0 +1,59 @@ + + + +## the lifecycle of a markdown doc in this parser + +- parse (as done by parse_doc()) and create doc with only the following elements + - Action + - Codeblock + - Comment + - Header + - Html + - Paragraph + - Table +- then the process function will be called on each of the found elements + - this process function can find children elements which can be + - Action -> will parse the action and empty .content + - Codeblock -> can create Actions as children other text founds stays as content + - Comment, Header, Html not much too do + - Paragraph is the big one, will use char parser + - see below makes children as specified there + - these children are just used to be able to detect relevant items in the Paragraph for e.g. html or pug output +- now each element has been processed the .processed has been set +- now the doctree or another processor can find the elements he is interested in e.g. actions, links, ... + - if needed the doctree will set .content so that the markdown() will give another outcome + + +## The Doctree with do the following process + +- process includes first +- process definitions + + +## The Elements + +- Action + - cannot have children + - once action processed the .content has been set +- Codeblock + - children: actions + - content is the text which are not actions + - markdown() -> gets the actions reformatted + other text after (.content) +- Comment + - cannot have children + - markdown() -> returns the original +- Header + - cannot have children + - markdown() -> returns the original +- Html + - cannot have children + - markdown() -> returns the original +- Paragraph + - has children which were parsed with the char parser + - children + - Comment + - Def + - Link + - List / ListItem + - Table + - markdown() -> returns the original (reformat based on children) diff --git a/lib/data/markdownparser/table_test.v b/lib/data/markdownparser/table_test.v new file mode 100644 index 00000000..c11ef821 --- /dev/null +++ b/lib/data/markdownparser/table_test.v @@ -0,0 +1,225 @@ +module markdownparser + +import freeflowuniverse.herolib.data.markdownparser.elements { Table } + +fn test_table_no_rows_invalid() { + mut content := ' +| Column1 | Column2 | Column3 | +|-------------|--------------|------------| +' + if _ := new(content: content) { + assert true + } else { + assert false + } + + content = ' +| Column1 | Column2 | Column3 | +' + if doc := new(content: content) { + // this shouldn't be parsed as table since it is only single row + assert doc.children.len == 1 + elem := doc.children[0] + assert elem !is Table + } +} + +fn test_table_one_row() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | +|-------------|--------------|------------| +| Row1Col1 | Row1Col2 | Row1Col3 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 3 + assert table.header.len == 3 + assert table.header_markdown()! == '| Column1 | Column2 | Column3 | +' + assert table.alignments == [.left, .left, .left] + assert table.rows.len == 1 + + // assert table.rows == [ + // Row{ + // cells: ['Row1Col1', 'Row1Col2', 'Row1Col3'] + // }, + // ] + assert table.markdown()! == '| Column1 | Column2 | Column3 | +| :-- | :-- | :-- | +| Row1Col1 | Row1Col2 | Row1Col3 | +' + } +} + +fn test_table_two_rows() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | +|-------------|--------------|------------| +| Row1Col1 | Row1Col2 | Row1Col3 | +| Row2Col1 | Row2Col2 | Row2Col3 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 3 + assert table.header.len == 3 + assert table.header_markdown()! == '| Column1 | Column2 | Column3 | +' + assert table.alignments == [.left, .left, .left] + assert table.rows.len == 2 + // assert table.rows == [ + // Row{ + // cells: ['Row1Col1', 'Row1Col2', 'Row1Col3'] + // }, + // Row{ + // cells: ['Row2Col1', 'Row2Col2', 'Row2Col3'] + // }, + // ] + assert table.markdown()! == '| Column1 | Column2 | Column3 | +| :-- | :-- | :-- | +| Row1Col1 | Row1Col2 | Row1Col3 | +| Row2Col1 | Row2Col2 | Row2Col3 | +' + } +} + +fn test_table_two_rows_one_is_half_filled_invalid() { + content := ' +| Column1 | Column2 | Column3 | +|-------------|--------------|------------| +| Row1Col1 | +| Row2Col1 | Row2Col2 | Row2Col3 | +' + if _ := new(content: content) { + assert false, 'should return an error: wrongly formatted row' + } +} + +fn test_table_two_rows_one_is_filled_too_much() { + content := ' +| Column1 | Column2 | Column3 | +|-------------|--------------|------------| +| Row1Col1 | Row1Col2 | Row1Col3 | Row1Col4 | +| Row2Col1 | Row2Col2 | Row2Col3 | +' + if _ := new(content: content) { + assert false, 'should return an error: wrongly formatted row' + } +} + +// fn test_table_two_rows_weird_format_yet_valid() { +// mut docs := new( +// content: ' +// | Column1 | Column2 | Column3 | +// -|--------------------------|-- +// | Row1Col1 | Row1Col2 | Row1Col3 +// Row2Col1 | Row2Col2 | Row2Col3 +// ' +// )! +// assert docs.children.len == 1 +// table := docs.children[0] +// if table is Table { +// assert table.num_columns == 3 +// assert table.header == ['Column1', 'Column2', 'Column3'] +// assert table.alignments == [.left, .left, .left] +// assert table.rows == [ +// Row{ +// cells: ['Row1Col1', 'Row1Col2', 'Row1Col3'] +// }, +// Row{ +// cells: ['Row2Col1', 'Row2Col2', 'Row2Col3'] +// }, +// ] +// assert table.markdown()! == '| Column1 | Column2 | Column3 | +// | :-- | :-- | :-- | +// | Row1Col1 | Row1Col2 | Row1Col3 | +// | Row2Col1 | Row2Col2 | Row2Col3 | + +// ' +// } +// } + +fn test_table_one_row_alignment_left() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | +|:------------|:-------------|:-----------| +| Row1Col1 | Row1Col2 | Row1Col3 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 3 + assert table.alignments == [.left, .left, .left] + assert table.markdown()! == '| Column1 | Column2 | Column3 | +| :-- | :-- | :-- | +| Row1Col1 | Row1Col2 | Row1Col3 | +' + } +} + +fn test_table_one_row_alignment_right() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | +|------------:|-------------:|-----------:| +| Row1Col1 | Row1Col2 | Row1Col3 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 3 + assert table.alignments == [.right, .right, .right] + assert table.markdown()! == '| Column1 | Column2 | Column3 | +| --: | --: | --: | +| Row1Col1 | Row1Col2 | Row1Col3 | +' + } +} + +fn test_table_one_row_alignment_center() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | +|:-----------:|:------------:|:----------:| +| Row1Col1 | Row1Col2 | Row1Col3 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 3 + assert table.alignments == [.center, .center, .center] + assert table.markdown()! == '| Column1 | Column2 | Column3 | +| :-: | :-: | :-: | +| Row1Col1 | Row1Col2 | Row1Col3 | +' + } +} + +fn test_table_one_row_alignment_mixed() { + mut docs := new( + content: ' +| Column1 | Column2 | Column3 | Column4 | +|:------------|:------------:|-----------:|-----------| +| Row1Col1 | Row1Col2 | Row1Col3 | Row1Col4 | +' + )! + assert docs.children.len == 2 + table := docs.children[1] + if table is Table { + assert table.num_columns == 4 + assert table.alignments == [.left, .center, .right, .left] + assert table.markdown()! == '| Column1 | Column2 | Column3 | Column4 | +| :-- | :-: | --: | :-- | +| Row1Col1 | Row1Col2 | Row1Col3 | Row1Col4 | +' + } +} diff --git a/lib/data/markdownparser/testdata/test.md b/lib/data/markdownparser/testdata/test.md new file mode 100644 index 00000000..721244e3 --- /dev/null +++ b/lib/data/markdownparser/testdata/test.md @@ -0,0 +1,25 @@ +# this is a test + +- [this is link](something.md) +-  + +!!hello.world k:v k2:v2 + +## ts + +!!domain.actor + key1: val1 + key2:val2 + +!!domain2.actor2 + key:val + +``` +""" this is a doc string""" +def name(x){ + +y = """ this is a y""" +print(x) +``` + +} diff --git a/lib/data/markdownparser/tools/markdown_tools.v b/lib/data/markdownparser/tools/markdown_tools.v new file mode 100644 index 00000000..943b4a85 --- /dev/null +++ b/lib/data/markdownparser/tools/markdown_tools.v @@ -0,0 +1,36 @@ +module markdownparser + +import freeflowuniverse.herolib.ui.console + +pub fn markdown_min_header(text string, minlevel int) string { + mut nrhash := 100 + mut out := []string{} + + for line in text.split('\n') { + if line.starts_with('#') { + splitted := line.split_nth(' ', 2) + nrhash2 := splitted[0].count('#') + if nrhash2 < nrhash { + nrhash = nrhash2 + } + } + } + + // nrhash has now the min header level found + // console.print_debug('minlevel: $minlevel nrhash:$nrhash') + + mut addhashstring := '' + if minlevel > nrhash { + for _ in 0 .. (minlevel - nrhash) { + addhashstring += '#' + } + for mut line2 in text.split('\n') { + if line2.starts_with('#') { + line2 = addhashstring + '${line2}' + } + out << line2 + } + } + + return out.join('\n') +} diff --git a/lib/data/mnemonic/README.md b/lib/data/mnemonic/README.md new file mode 100644 index 00000000..d0e416c5 --- /dev/null +++ b/lib/data/mnemonic/README.md @@ -0,0 +1,40 @@ +# BIP39 (mnemonic) + +This is a pure v-lang implementation of BIP39. + +**WARNING:** seed generation is not supported yet, only mnemonic to entropy and vice-versa is supported. + +# Wordlists + +Only `English` is supported right now and is embeded on compile time. +Wordlist comes from official `bitcoin/bip-0039/english.txt`. + +# Mnemonic + +The module needs to be instanciated with: + +``` +m := mnemonic.new()! +``` + +Possible error are wordlist parsing issue. + +# Get entropy from mnemonic + +``` +words := "winter hire open opinion frown turtle this bulb mouse spell endorse thumb regret useless expand nerve improve impulse" +entropy := m.to_entropy(words) +``` + +# Get mnemonic from entropy + +``` +entropy := m.generate_entropy(224)! // or your own entropy buffer +words := m.to_mnemonic(entropy)! +``` + +# Tests + +In order to validate implementation, tests vector from python-mnemonicvectors.json is included. + +This is implemented via `mnemonic_test.v` diff --git a/lib/data/mnemonic/english.txt b/lib/data/mnemonic/english.txt new file mode 100644 index 00000000..63285b2a --- /dev/null +++ b/lib/data/mnemonic/english.txt @@ -0,0 +1,2048 @@ +abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +hero +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo diff --git a/lib/data/mnemonic/mnemonic.v b/lib/data/mnemonic/mnemonic.v new file mode 100644 index 00000000..73bafa85 --- /dev/null +++ b/lib/data/mnemonic/mnemonic.v @@ -0,0 +1,245 @@ +module mnemonic + +import crypto.rand +import crypto.sha256 +import math.big +import strings +import strconv + +// pure v implementation of BIP39 following specification from +// https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki + +pub struct BIP39 { +mut: + wordlist string + words []string +} + +pub fn new() !BIP39 { + mut b := BIP39{ + // only english supported for now + wordlist: 'english' + } + + embedded_english := $embed_file('english.txt', .zlib) + b.words = embedded_english.to_string().split('\n') + + // remove last empty line + if b.words.len == 2049 { + b.words.pop() + } + + // ensure our wordlist is sane + if b.words.len != 2048 { + return error('could not load wordlist') + } + + return b +} + +// +// parse mnemonic +// + +pub fn (b BIP39) to_entropy(mnemonics string) ![]u8 { + words := mnemonics.split(' ') + mut ms := words.len + + // only supports 12, 15, 18, 21 and 24 words. + if (ms in [12, 15, 18, 21, 24]) == false { + return error('mnemonics out of range, only [12, 15, 18, 21 or 24] words supported') + } + + /* table from specification, to match entropy, checksum and words + + ENT = entropy length in bits + CS = checksum length in bits + MS = number of words + + | ENT | CS | ENT+CS | MS | + +-------+----+--------+------+ + | 128 | 4 | 132 | 12 | + | 160 | 5 | 165 | 15 | + | 192 | 6 | 198 | 18 | + | 224 | 7 | 231 | 21 | + | 256 | 8 | 264 | 24 | + */ + + // extract entropy and checksum size, starting at 12 words + // (we can't go lower) + // mut cs := 4 + mut ent := 128 + mut i := ms + + // go further til we reach the minimum (nothing to do for 12 words) + for i > 12 { + // cs += 1 + ent += 32 + i -= 3 + } + + // starting from an empty string + mut buffer := '' + + for word in words { + index := b.words.index(word) + if index < 0 { + return error("unexpected word '${word}' found") + } + + // building binary representation of the index value + // in order to always have 11 bits, we use bit_len to know + // how many bits will be returned with bin_str() and add missing + // zeros as prefix + inter := big.integer_from_int(index) // convert to Integer + repr := inter.bit_len() + + // pad with missing zero to reach 11 bits + buffer += strings.repeat_string('0', 11 - repr) + + // append the binary representation + buffer += inter.bin_str() + } + + // buffer is now a string representing binary value of the mnemonic (entropy + checksum) + mut final := []u8{} + + // let's convert this binary string to bytes now + for a in 0 .. (ent / 8) { + start := a * 8 + final << u8(strconv.parse_uint(buffer[start..start + 8], 2, 8)!) + } + + // remaining bits are checksum (we don't use them here) + // TODO: verify checksum + + // entropy restored + return final +} + +// +// create mnemonic +// + +pub fn (b BIP39) generate_entropy(size int) ![]u8 { + if size < 128 || size > 256 { + return error('size out of range (min 128, max 256)') + } + + if size % 32 != 0 { + return error('size must be a multiple of 32 bits') + } + + // size can only be 128, 160, 192, 224, 256 + entropy := rand.bytes(size / 8)! + + return entropy +} + +pub fn (b BIP39) compute_checksum(entropy []u8) string { + // we assume entropy length is valid + + // computing checksum length + cs := (entropy.len * 8) / 32 + + // computing sha256 of the entropy + entro256 := sha256.sum(entropy) + + // convert the first byte to Integer + first := big.integer_from_u32(entro256[0]) + + // adding leading zero if missing then adding + // binary bits + repr := first.bit_len() + + mut source := strings.repeat_string('0', 8 - repr) + source += first.bin_str() + + // source is always 8 bits now + // truncate to expected length + + return source[0..cs] +} + +pub fn (b BIP39) generate_binary_from_entropy(entropy []u8) !string { + checksum := b.compute_checksum(entropy) + + // println(entropy.hex()) + // println(checksum) + + mut buffer := '' + + for i in 0 .. entropy.len { + segment := big.integer_from_u32(entropy[i]) + + // converting each byte to it's binary representation + // into the main buffer + repr := segment.bit_len() + buffer += strings.repeat_string('0', 8 - repr) + + // if byte is zero, padding already filled all the zeros + if entropy[i] > 0 { + buffer += segment.bin_str() + } + } + + // adding the checksum to the buffer + // buffer will now be (entropy + checksum) in the table (see to_entropy) + buffer += checksum + + return buffer +} + +pub fn (b BIP39) to_mnemonic(source []u8) ![]string { + binary := b.generate_binary_from_entropy(source)! + mut words := []string{} + + for i in 0 .. (binary.len / 11) { + start := i * 11 + + // converting the next 11 bits to it's integer value + // and mapping the corresponding word in wordlist + value := binary[start..start + 11] + index := strconv.parse_int(value, 2, 16)! + + words << b.words[index] + } + + return words +} + +pub fn (b BIP39) generate(size int) ![]string { + entropy := b.generate_entropy(size)! + return b.to_mnemonic(entropy) +} + +// +// seed +// + +pub fn (b BIP39) to_seed(words string, passphrase string) ![]u8 { + // derivated := pbkdf2(.sha512, words, "mnemonic" + passphrase, 2048, 512) + // return derivated + + return error('not implemented') +} + +// +// small debug and example +// + +pub fn debug() !bool { + m := new()! + + words := m.generate(128)! + println(words) + + native := m.generate_entropy(224)! + wordsx := m.to_mnemonic(native)! + println(wordsx) + + back := m.to_entropy(words.join(' '))! + println(back.hex()) + + return true +} diff --git a/lib/data/mnemonic/mnemonic_test.v b/lib/data/mnemonic/mnemonic_test.v new file mode 100644 index 00000000..5386fa1e --- /dev/null +++ b/lib/data/mnemonic/mnemonic_test.v @@ -0,0 +1,58 @@ +import mnemonic +import strconv +import json + +struct Vectors { + english [][]string +} + +fn entropy_from_hexstring(input string) ![]u8 { + mut final := []u8{} + + for i in 0 .. (input.len / 2) { + start := i * 2 + b := strconv.parse_uint(input[start..start + 2], 16, 8)! + final << u8(b) + } + + return final +} + +fn test_python_vectors() { + vectors := $embed_file('vectors.json') + data := vectors.to_string() + + root := json.decode(Vectors, data) or { + eprintln('Failed to decode json, error: ${err}') + assert 'json parsing failed' == '${err}' + return + } + + m := mnemonic.new()! + + for vector in root.english { + expected_entropy := vector[0] + expected_words := vector[1] + + // load entropy from words + entropy := m.to_entropy(expected_words)! + assert entropy.hex() == expected_entropy + + // create words from entropy + parsed := entropy_from_hexstring(expected_entropy)! + words := m.to_mnemonic(parsed)! + assert words.join(' ') == expected_words + + // TODO: implement seed + } +} + +fn test_loop() { + m := mnemonic.new()! + + entropy := m.generate_entropy(224)! + words := m.to_mnemonic(entropy)! + wayback := m.to_entropy(words.join(' '))! + + assert wayback == entropy +} diff --git a/lib/data/mnemonic/vectors.json b/lib/data/mnemonic/vectors.json new file mode 100644 index 00000000..bfe88aee --- /dev/null +++ b/lib/data/mnemonic/vectors.json @@ -0,0 +1,1462 @@ +{ + "english": [ + [ + "00000000000000000000000000000000", + "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about", + "c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04", + "xprv9s21ZrQH143K3h3fDYiay8mocZ3afhfULfb5GX8kCBdno77K4HiA15Tg23wpbeF1pLfs1c5SPmYHrEpTuuRhxMwvKDwqdKiGJS9XFKzUsAF" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "legal winner thank year wave sausage worth useful legal winner thank yellow", + "2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607", + "xprv9s21ZrQH143K2gA81bYFHqU68xz1cX2APaSq5tt6MFSLeXnCKV1RVUJt9FWNTbrrryem4ZckN8k4Ls1H6nwdvDTvnV7zEXs2HgPezuVccsq" + ], + [ + "80808080808080808080808080808080", + "letter advice cage absurd amount doctor acoustic avoid letter advice cage above", + "d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8", + "xprv9s21ZrQH143K2shfP28KM3nr5Ap1SXjz8gc2rAqqMEynmjt6o1qboCDpxckqXavCwdnYds6yBHZGKHv7ef2eTXy461PXUjBFQg6PrwY4Gzq" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong", + "ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069", + "xprv9s21ZrQH143K2V4oox4M8Zmhi2Fjx5XK4Lf7GKRvPSgydU3mjZuKGCTg7UPiBUD7ydVPvSLtg9hjp7MQTYsW67rZHAXeccqYqrsx8LcXnyd" + ], + [ + "000000000000000000000000000000000000000000000000", + "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent", + "035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa", + "xprv9s21ZrQH143K3mEDrypcZ2usWqFgzKB6jBBx9B6GfC7fu26X6hPRzVjzkqkPvDqp6g5eypdk6cyhGnBngbjeHTe4LsuLG1cCmKJka5SMkmU" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will", + "f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd", + "xprv9s21ZrQH143K3Lv9MZLj16np5GzLe7tDKQfVusBni7toqJGcnKRtHSxUwbKUyUWiwpK55g1DUSsw76TF1T93VT4gz4wt5RM23pkaQLnvBh7" + ], + [ + "808080808080808080808080808080808080808080808080", + "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always", + "107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65", + "xprv9s21ZrQH143K3VPCbxbUtpkh9pRG371UCLDz3BjceqP1jz7XZsQ5EnNkYAEkfeZp62cDNj13ZTEVG1TEro9sZ9grfRmcYWLBhCocViKEJae" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when", + "0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528", + "xprv9s21ZrQH143K36Ao5jHRVhFGDbLP6FCx8BEEmpru77ef3bmA928BxsqvVM27WnvvyfWywiFN8K6yToqMaGYfzS6Db1EHAXT5TuyCLBXUfdm" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art", + "bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8", + "xprv9s21ZrQH143K32qBagUJAMU2LsHg3ka7jqMcV98Y7gVeVyNStwYS3U7yVVoDZ4btbRNf4h6ibWpY22iRmXq35qgLs79f312g2kj5539ebPM" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title", + "bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87", + "xprv9s21ZrQH143K3Y1sd2XVu9wtqxJRvybCfAetjUrMMco6r3v9qZTBeXiBZkS8JxWbcGJZyio8TrZtm6pkbzG8SYt1sxwNLh3Wx7to5pgiVFU" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless", + "c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f", + "xprv9s21ZrQH143K3CSnQNYC3MqAAqHwxeTLhDbhF43A4ss4ciWNmCY9zQGvAKUSqVUf2vPHBTSE1rB2pg4avopqSiLVzXEU8KziNnVPauTqLRo" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote", + "dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad", + "xprv9s21ZrQH143K2WFF16X85T2QCpndrGwx6GueB72Zf3AHwHJaknRXNF37ZmDrtHrrLSHvbuRejXcnYxoZKvRquTPyp2JiNG3XcjQyzSEgqCB" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic", + "274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028", + "xprv9s21ZrQH143K2oZ9stBYpoaZ2ktHj7jLz7iMqpgg1En8kKFTXJHsjxry1JbKH19YrDTicVwKPehFKTbmaxgVEc5TpHdS1aYhB2s9aFJBeJH" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog", + "628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac", + "xprv9s21ZrQH143K3uT8eQowUjsxrmsA9YUuQQK1RLqFufzybxD6DH6gPY7NjJ5G3EPHjsWDrs9iivSbmvjc9DQJbJGatfa9pv4MZ3wjr8qWPAK" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length", + "64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440", + "xprv9s21ZrQH143K2XTAhys3pMNcGn261Fi5Ta2Pw8PwaVPhg3D8DWkzWQwjTJfskj8ofb81i9NP2cUNKxwjueJHHMQAnxtivTA75uUFqPFeWzk" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "scheme spot photo card baby mountain device kick cradle pact join borrow", + "ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612", + "xprv9s21ZrQH143K3FperxDp8vFsFycKCRcJGAFmcV7umQmcnMZaLtZRt13QJDsoS5F6oYT6BB4sS6zmTmyQAEkJKxJ7yByDNtRe5asP2jFGhT6" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave", + "fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d", + "xprv9s21ZrQH143K3R1SfVZZLtVbXEB9ryVxmVtVMsMwmEyEvgXN6Q84LKkLRmf4ST6QrLeBm3jQsb9gx1uo23TS7vo3vAkZGZz71uuLCcywUkt" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside", + "72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d", + "xprv9s21ZrQH143K2WNnKmssvZYM96VAr47iHUQUTUyUXH3sAGNjhJANddnhw3i3y3pBbRAVk5M5qUGFr4rHbEWwXgX4qrvrceifCYQJbbFDems" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "cat swing flag economy stadium alone churn speed unique patch report train", + "deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5", + "xprv9s21ZrQH143K4G28omGMogEoYgDQuigBo8AFHAGDaJdqQ99QKMQ5J6fYTMfANTJy6xBmhvsNZ1CJzRZ64PWbnTFUn6CDV2FxoMDLXdk95DQ" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access", + "4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02", + "xprv9s21ZrQH143K3wtsvY8L2aZyxkiWULZH4vyQE5XkHTXkmx8gHo6RUEfH3Jyr6NwkJhvano7Xb2o6UqFKWHVo5scE31SGDCAUsgVhiUuUDyh" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform", + "26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d", + "xprv9s21ZrQH143K3rEfqSM4QZRVmiMuSWY9wugscmaCjYja3SbUD3KPEB1a7QXJoajyR2T1SiXU7rFVRXMV9XdYVSZe7JoUXdP4SRHTxsT1nzm" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "vessel ladder alter error federal sibling chat ability sun glass valve picture", + "2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f", + "xprv9s21ZrQH143K2QWV9Wn8Vvs6jbqfF1YbTCdURQW9dLFKDovpKaKrqS3SEWsXCu6ZNky9PSAENg6c9AQYHcg4PjopRGGKmdD313ZHszymnps" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump", + "7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88", + "xprv9s21ZrQH143K4aERa2bq7559eMCCEs2QmmqVjUuzfy5eAeDX4mqZffkYwpzGQRE2YEEeLVRoH4CSHxianrFaVnMN2RYaPUZJhJx8S5j6puX" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold", + "01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998", + "xprv9s21ZrQH143K39rnQJknpH1WEPFJrzmAqqasiDcVrNuk926oizzJDDQkdiTvNPr2FYDYzWgiMiC63YmfPAa2oPyNB23r2g7d1yiK6WpqaQS" + ] + ], + "chinese_simplified": [ + [ + "00000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 在", + "7f7c7f91ef81f0fb6a3b95b346c50e6472c1d554f8ba90637bad8afce4a4de87c322c1acafa2f6f5e9a8f9b2d2c40e9d389efdc2adbe4445c21a0939fb39e91f", + "xprv9s21ZrQH143K2LTAgxJMxVMKie6n9HQHMUohP6x2cx1TVBr6dxnL3mnSLRiXjiCM7g2ZF3BHzpdbFuhdeh7ZRrzv2EEjg5Tv7kgKZrqbVLc" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "枪 疫 霉 尝 俩 闹 饿 贤 枪 疫 霉 卿", + "816a69d6866891b246b4d33f54d6d2be624470141754396205d039bdd8003949fec4340253dde4c8e11437a181ad992f56d5b976eb9fbe48f4c5e5fec60a27e1", + "xprv9s21ZrQH143K2t2fMBqtVAVWU3JSpmEbbddwouLX8NoBbcTykD1Tm4s9api6K9zvoKSESUsA7aVxbZRunM5yrjZRNZnZckve98hxUorv2Uv" + ], + [ + "80808080808080808080808080808080", + "壤 对 据 人 三 谈 我 表 壤 对 据 不", + "07b6eada2601141ef9748bdf5af296a134f0f9215a946813b84338dcfba93c8247b0c3429a91e0a1b85a93bd9f1275a9524acecadc9b516c3cf4c8990f44052c", + "xprv9s21ZrQH143K2cgeQUKgCSmaRVXFjEGThqrnNFmH71qG8z3bWqYcbX9zakkRxmDp583tqf3cQzmxtn4C2XqinMNb2HkhXBDYhekCB8AwZWV" + ], + [ + "ffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 逻", + "08ac5d9bed9441013b32bc317aaddeb8310011f219b48239faa4adeeb8b79cb0a3e4d1cb460d2dd37888c0a19bef6edd90ced0fd613d48899eab9ee649d77fcd", + "xprv9s21ZrQH143K3Zkkh1w8EbXYQWAS5ekbitA2WVrswJY9uEJzig2BtairT72n98ySwQUAhYBsLW9EBjJ1XUinrSb69Ty4mttMLnaUooJwsJ3" + ], + [ + "000000000000000000000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 动", + "b8fb8047e84951d846dbfbbce3edd0c9e316dc40f35b39f03a837db85f5587ac209088e883b5d924a0a43ad154a636fb65df28fdae821226f0f014a49e773356", + "xprv9s21ZrQH143K36LufUjTLqXnTiY6ach28pUYMkJ63swx4FhjPkzqD9YRqDZ452whYcNzKpPC8yfBm1eomL2z3VLC4zcwU71oKvMQ5NnD4h1" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "枪 疫 霉 尝 俩 闹 饿 贤 枪 疫 霉 尝 俩 闹 饿 贤 枪 殿", + "74187bbdce2dba25eed3b9aebdc65dcb7c61e74c58591451d47f9c7b7b17545a527880640bfb9cab36989eba1edddf57bfce7340697926de7f0b9ec1e0345c38", + "xprv9s21ZrQH143K47jLpKzSgdpMLSKP2ZMLXHHUqYYGKbXNE4k3TA2czvJCx5JAJFNkWKZf2B1AbYoUBpc96YoiwM7yfxyr8gvfNNgL7sFNum6" + ], + [ + "808080808080808080808080808080808080808080808080", + "壤 对 据 人 三 谈 我 表 壤 对 据 人 三 谈 我 表 壤 民", + "e3629a601f4b87101c4bb36496e3dbd146063351f5e47c048211faddab78efdb91910f0eea5c8e53cfb851aa3e156b0bb5c501b83baaf5f5d4a1679a5bb7d885", + "xprv9s21ZrQH143K4RfeWihCeh1FJL9SobvinRW4z76RL2X1TB6xreXgaMvJGggUgagkaNr7zX47YHEDYdzJmig8SG3Scuet8smspn7HicCtwHa" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 裕", + "013c8d6868537176fac7bfa966e6219830008f03b650b0f18a12fd67d9ebf871c400c5f980aa073ddd1b23d60846e357aee193ce7644b574bf65e04cf913e39c", + "xprv9s21ZrQH143K2YiskWzQq8kpFFCoFKKU4L8D6Y593dS2sExuVQ4GjnS57RhibwTWjnD7NTCE7ye4cQbCK6Bw674SHb3xWaQYH3NBLFCGYJb" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 性", + "1981c3e3ddfd80f6e9ee1c5ef27ba2697df3d1468496f1d56ae3d8e0b3f0677bbbdfca954e48eb86fe6a36fc0f597bf18ea00248757a01e82182badff94abbbd", + "xprv9s21ZrQH143K25ttGBGbx6h9VBpa9ELbpw35XQqDR8deXRyVP2AbtgJ79Nq2cW8KaDizbwuoHUYR1o4tLPhYvSCTNMft4ZEfiDztmjXKPCj" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "枪 疫 霉 尝 俩 闹 饿 贤 枪 疫 霉 尝 俩 闹 饿 贤 枪 疫 霉 尝 俩 闹 饿 搭", + "b1eb831927f1c488e233725f9c409dd9bdb9342324393fa56d958e8842623d222510c322f5ba2899428ae08ece8bd87788748c67bdfa73588669ab816c5f3555", + "xprv9s21ZrQH143K4VNNDqCgnETDkPiihzHpxC9wGE6TBGXaeEd9VkQRHQPotwheeaNFGHGKaPWv5zTqfknzgdWiKFC6DqaqBFKjNSmxas968Vz" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "壤 对 据 人 三 谈 我 表 壤 对 据 人 三 谈 我 表 壤 对 据 人 三 谈 我 五", + "470e61f7e976fa18c7d559e842ba7f39849b2f72ef15428f4276c5160002f36416cd22c2a86bb686d69f6b91818538aa57ae1aab27b3181b92132c59be2b329b", + "xprv9s21ZrQH143K3bUoGmLq8aXRKzvUhseqrXw1t7XYCirduP5XLJtVxCos3rYLDvW8V7pK3voZ4EWSdeXiKdbWNjxmiPRDfet23Av9VRaR3ej" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 佳", + "8e6607a07fa664d6e4ead23fcc08caf72216d6f078c3b2e5be94e4b6e8d64c784d36bf9b70144fa05840e9a49899128111be5093a2b552b6ab76c0906e9b0e65", + "xprv9s21ZrQH143K2ghKxX47TRr4GnQh3diJFN5rJfybjxuwr3xP6pafXrBhwXJsw4HwoiPZ1f6fFPR964eoXybV2su498Ant3kYuYKE3CszLsU" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "蒙 台 脱 纪 构 硫 浆 霉 感 仅 鱼 汤", + "decd71d2824a1bbadf8c3942f43504a648a8db5f1cac0ae1d0f787728353002a12644b1a6b725147c91682e7f33aec13493b9a779a7dd8ee15a5d10ab21d49e5", + "xprv9s21ZrQH143K44Xrktko35a7gPGVo91Va79LNer1MzVokcrYKFP6GMLAcuJP3fBSdbRuG2DauFC48H6LmyZLfkUyjpm1R2AxYVbnT2P5tur" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "父 泥 炼 胁 鞋 控 载 政 惨 逐 整 碗 环 惯 案 棒 订 移", + "ff66373b70b72b34842f936bf3bb44d661fdafaee7740d574fed6aa2ef07783cb6111f2862cbd3fc5528e322dfe054557a74a568a1b46c020cb88938e2293ca0", + "xprv9s21ZrQH143K3JLu5XeRDQA5RwHh6gUXaQfRf7ihtVguJvd6EFoAzsoyotvNTDZfC6cciies7fhcqaMRTEsZUSbRYqBdaviWRHatRMoHX6s" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "宁 照 违 材 交 养 违 野 悉 偷 梅 设 贵 帝 鲜 仰 圈 首 荷 钩 隙 抓 养 熟", + "6ba622f907c61e29e44833b08441b7afa84889a48ca90ebf90f585e257662b2c1b0c35ad54088e745c73689921209fdd4b5b8ace5d850e366d7c2042a076e660", + "xprv9s21ZrQH143K3dE2RQqFbapci6WBj47vZCkLVf4r14QsRZ6Ny2ck6s8tzZQzQaM55Tt4d2tRS9AuFrEJ4yBXsmP8yKeVGXT8E97iRVkDzCj" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "伐 旱 泡 口 线 揭 县 杨 断 芳 额 件", + "7346996be5f2b02c67ec465c677197375b589b6e8871c842505b139c2d47feca75a2a941623d6486aff6b21c95193a8177960d123cf610f03f3224a9fa7d0eed", + "xprv9s21ZrQH143K3A2p7cttKM5L39rutYgY4jqZ16z7hpAEdyiT8fr5eKdaHGLMM2ZmsUFNvGcSfyGMp1sz9nVyg3ZXop1hesUbgAvVBzx4rix" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "福 惜 怀 叔 筋 酵 货 科 牙 冒 辈 罩 悬 耕 浇 呵 连 级", + "09098e00fcc1bfa7d5b9f0c12dfe1993bbd5a0915200a53fb40b2d6d487b969a18463565c1e035569796a7d8b99f82a4c4b17002b0c582037da95bacfeb422b3", + "xprv9s21ZrQH143K4LdjFgCrosa76XBrNQ4imGB7XNc5MhfkASrmAHVNWYLNGY6kNj8foXBLS9aUD8RDubyj3NKWarmXxRQ28AeWVPbLD3a1FEq" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "仪 未 九 茶 队 梯 妇 孤 托 病 泉 贺 产 绘 吹 测 局 碳 征 墨 晶 帮 息 延", + "6d55f2dd8d42f1cc5e0b4ef6e8a95200580ff4e29d2a3dfa7f9ddb1af0aa2e93780d84d952d39776a379ddc017847ea01aa01b85dc208e7f69891d5b7cbf2eb0", + "xprv9s21ZrQH143K2tkCCjLXj2L9Ds9FttWwH6Eqt3uvihwUvpSpTm3RmRtSkapxASEUW5HXE6Qx2H1viBxXbLZVKxyfQ7fyFn6NcDspgJfdPaT" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "济 扶 块 言 穗 定 万 绘 姻 逃 颗 焰", + "bf8dcc2fb4dc8fd2311943b527864feabfebd5fffb6641555519da3606265e895bab5aa1647f6e5afb0cb6ea4d0b27e8a9f2f49251b68ad6bf898937581351cb", + "xprv9s21ZrQH143K2i53x2geCJi6A2QJpaHpxeGK8oeMxYs1tUrUwU3ddu9jHVucF3ePQ1koQ1TVFHnfP21ScWzp8xKnMinNUSW5itm7PicXWYW" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "虑 铺 目 祸 英 钩 尤 添 醇 嘛 触 独 起 赋 连 剪 邦 中", + "07e1a2dc2eea79bb12be53d6fb662edf87796cad60e8d10a655ba39a95c5a68eb21f865a1b2f37d780286adbbddeccba3f7844c8a2b1a82029e6a855c713aecf", + "xprv9s21ZrQH143K4V2oh58ZSb1CAbYkwB6ThJmTpRC51uKha9hy51R6RVzzbcEog3n6h4u7FtotS9arBvLofX7A3Apvcaedmeg1jkt7Vkt9TtA" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "而 怕 夏 客 盖 古 松 面 解 谓 鲜 唯 障 烯 共 吴 永 丁 赤 副 醒 分 猛 埔", + "0402ae511062cfacbd5e33637a95e57e2e14fde0c5dd471fe66fc1154b6373802aa8641a78b91658052bff0a5c5bd075f01fc74b0d73e95a890430ff6f0e728e", + "xprv9s21ZrQH143K4LSoFcKBWtmUHjzuH56srzrkpPfGx3i36UXpVUEtGvCuZ3egRTyDkaqRN5Ec1HXvXGSGTXLyAKzCRnTspi1D9NdmQmWqM5x" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "昏 途 所 够 请 乃 风 一 雕 缺 垫 阀", + "aa7e38f64810007db63e31c479b9848cd5ffda839546749669bf53476dd036a33fd77d0a13d4418fb536ea78b028fc19533db4bc9e0e12a14a9432cb9fd112a2", + "xprv9s21ZrQH143K31afj91bWiGw2aC2xHJwrWsMs9MuvEWZETkpWr15ZNF6LFjkHh53iD2bwJYEawvCCvDRaqDsr37fhapgGDkA7UATtwBsZu3" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "瓶 顾 床 圈 倡 励 炭 柄 且 招 价 紧 折 将 乎 硬 且 空", + "2a6181cf2b069ba30a87228d54770ed5abf61e8151abdb0b27646a87a6100d4b7b496c3ca26f027d0b06724c6c5a469f43a7f1ffb7782e5afb01d143ca65973d", + "xprv9s21ZrQH143K3ji6aQ9QQi4WkFhAqnrzRANJkYkRWkMqbr51mAV6JYUt85oR8Jt3D7DreQdsb9292ips6VKwpitAhLR7rfLdhYz7zgYei2F" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "柄 需 固 姆 色 斥 霍 握 宾 琴 况 团 抵 经 摸 郭 沙 鸣 拖 妙 阳 辈 掉 迁", + "4dccb0a3578716975b840c51e279c2af728567ff42e98dd09b9e61742b41d9f30d411a501172cce9b7d5706a480dd4d4e7fb26021a36a74381156b09d251d65a", + "xprv9s21ZrQH143K2hLNNA7KnimwonwFXCiGVM3K29DgZiTcVUJ8t8jkc2mXUzFZmoFgXWh9UhJFbyKM44Qm2KeGsrEevajZZfKyoLyFmyoDpUx" + ] + ], + "chinese_traditional": [ + [ + "00000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 在", + "7f7c7f91ef81f0fb6a3b95b346c50e6472c1d554f8ba90637bad8afce4a4de87c322c1acafa2f6f5e9a8f9b2d2c40e9d389efdc2adbe4445c21a0939fb39e91f", + "xprv9s21ZrQH143K2LTAgxJMxVMKie6n9HQHMUohP6x2cx1TVBr6dxnL3mnSLRiXjiCM7g2ZF3BHzpdbFuhdeh7ZRrzv2EEjg5Tv7kgKZrqbVLc" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "槍 疫 黴 嘗 倆 鬧 餓 賢 槍 疫 黴 卿", + "f38af46f6bc3222b0f5aa14dd5b8b506e51131510f2450ec9fb52c28617cfa59d436055fe542e25dfa01415639d2171e41796f169f8bbc18516941dfdee8fb72", + "xprv9s21ZrQH143K2HYJ8dR81cQGust8Gm4MeyfC6off5BvCfffAxE33WhiYxB4aLV6meXP6QmoKZkLX8UJgrZPcA4A2EKU4iaPWenb6Wg9kxzd" + ], + [ + "80808080808080808080808080808080", + "壤 對 據 人 三 談 我 表 壤 對 據 不", + "33f373da1a6b4300dad5cc70d2329ed614512e3c8a423673c294110521326ca66753b9663bdd7c844f17d81609a410a61809dd5113823009f729e2f2f940cab9", + "xprv9s21ZrQH143K4SV7MzxYhSQtxP5gWGCD75QiUsp7z1s7Kuc8b4V4F6wRqKvhdczy3qi2uNeN9Vw4PnKYJtoaFdHFX4qbxweuRDmQqgMjKRJ" + ], + [ + "ffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 邏", + "cfd5f4fa6f2a422811951739b1dad9f5291f9cbc977a14ae9dd35dc8ab17aeec9ee6f1455b20f881838f4f945850765dd002a9abcdbe7be002ffcdaf6f63fdaa", + "xprv9s21ZrQH143K2BptXPaTm7CCWCs4v3tfG5jGAa9zLqSTKNL1ah8veMWc53hio5grKVriWhKjNKbCA2w6svkL4NC6pkiRwVkbwazVXtjdgEv" + ], + [ + "000000000000000000000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 動", + "717f4f70c7550da57e42c6b49ac47b5bad3249605ed2f869900596c2de7653a8528380e5c31709ed9c2d19b868bc530158712e97276886b4863d036177bcab33", + "xprv9s21ZrQH143K39TvGFp5nfiw3zXib4v4waTbxFYoDNoD4pp4DMBneg9MgSqfK4xL7hK1YjDEa5BWMXKVgTxMiNXNSdT3Wv59pD4PB3LDDKP" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "槍 疫 黴 嘗 倆 鬧 餓 賢 槍 疫 黴 嘗 倆 鬧 餓 賢 槍 殿", + "2b219a8be0a8e27a6b50d0a74eb42175bd23e22cf4081518c9a74cbfe2cbace46f0adad8d390f8a2ac30feb26226db14fbc545d18ba0e56a853cbf103c92539e", + "xprv9s21ZrQH143K2wC9BUJ9F5CsmV7a6PJQkxc9TT8gUrkoTXurPztKKosq5REGzdEzEuKn221vm2A5KnjrDBC1KcLo4VeYGSkkwxXWYrqSrXW" + ], + [ + "808080808080808080808080808080808080808080808080", + "壤 對 據 人 三 談 我 表 壤 對 據 人 三 談 我 表 壤 民", + "d29225f73231521784d98820ebf0ae4d827c5a9e0c0f8845fd63866cdc70b3a40a2281f3f6c6181c5a53e440528dbf83947a4b2056749cb9cc9c83dcd5c91b0f", + "xprv9s21ZrQH143K2MQFKxX1ReLYY2rNunsfLpU8F5WRVWeLfauPhD5huvXEzxoPvmLhD3QtSj1Z5jnM51q9NrRjYBaHG5XJzfVsjXWYQop9pXz" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 裕", + "013c8d6868537176fac7bfa966e6219830008f03b650b0f18a12fd67d9ebf871c400c5f980aa073ddd1b23d60846e357aee193ce7644b574bf65e04cf913e39c", + "xprv9s21ZrQH143K2YiskWzQq8kpFFCoFKKU4L8D6Y593dS2sExuVQ4GjnS57RhibwTWjnD7NTCE7ye4cQbCK6Bw674SHb3xWaQYH3NBLFCGYJb" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 的 性", + "1981c3e3ddfd80f6e9ee1c5ef27ba2697df3d1468496f1d56ae3d8e0b3f0677bbbdfca954e48eb86fe6a36fc0f597bf18ea00248757a01e82182badff94abbbd", + "xprv9s21ZrQH143K25ttGBGbx6h9VBpa9ELbpw35XQqDR8deXRyVP2AbtgJ79Nq2cW8KaDizbwuoHUYR1o4tLPhYvSCTNMft4ZEfiDztmjXKPCj" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "槍 疫 黴 嘗 倆 鬧 餓 賢 槍 疫 黴 嘗 倆 鬧 餓 賢 槍 疫 黴 嘗 倆 鬧 餓 搭", + "fd50ad67903b2046356e67e55d67309b6f0ccd7c23bfefd049a5b8a40d56c507d73a5517e2d2785f024a7794854594aaad845dd0fbd0432c25a96f2a7181a2cc", + "xprv9s21ZrQH143K4TP9sQD1LnxuSy6WUe1hF7JMPo4qN6TMX5udfEcJh9x4PqbetYoC9c1hpx7RxP6VcgzdPxPCJ91De4R1TgGNVC9AFhxMwkX" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "壤 對 據 人 三 談 我 表 壤 對 據 人 三 談 我 表 壤 對 據 人 三 談 我 五", + "d029fc9737b801cb4f9aadf5feed02a117b76ead7058e055cc39cb44864023eb492e6a15c68569d6a03a5b11bf15a456c64e1781a553589b47ab569801239a00", + "xprv9s21ZrQH143K4bYXrWpQYgiSHDM1iVkNVnyqbpjyZksLS2fmxKCbwQjz3sBFTD1aFY4xWrYdHTyeFYjnYWfKGLc5WCkpokdAZyP9XEGJCsa" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 歇 佳", + "8e6607a07fa664d6e4ead23fcc08caf72216d6f078c3b2e5be94e4b6e8d64c784d36bf9b70144fa05840e9a49899128111be5093a2b552b6ab76c0906e9b0e65", + "xprv9s21ZrQH143K2ghKxX47TRr4GnQh3diJFN5rJfybjxuwr3xP6pafXrBhwXJsw4HwoiPZ1f6fFPR964eoXybV2su498Ant3kYuYKE3CszLsU" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "蒙 台 脫 紀 構 硫 漿 黴 感 僅 魚 湯", + "27ca577f0318b6c6067acce7aefacd12bc9fbbc8e365fdc16bfc0ffd76379b0768dc56877f19eee4c1222dfb5a94a5516c5707e6a6ad070af9a0fe7f7799ac5e", + "xprv9s21ZrQH143K3FtWQPZHP7Gpf5qgbvXqPNo5iCSfkGhWnATeqM5FuQ3YTx4sSciJx1MjVnbM3XQ16N83x5gNwcsVG7PTf1cRDhvyGZ45EY5" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "父 泥 煉 脅 鞋 控 載 政 慘 逐 整 碗 環 慣 案 棒 訂 移", + "fcac6cdda6c67e46ea46e66d00df3cfb1e437aa05f1b280f5427c0ce521a94b5a01ab016d235b7944f36d76ba0a297968ae0d882fde95c96cae34e35f2433c82", + "xprv9s21ZrQH143K4YANCnajhJxfsFsDEZGitTmP934osTHvUmTEtfSaYk8rmvj914uGUYaJH5ALjDgVyNYW5gRGPdQBFaqoSbGkHXhfdVkNVY2" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "寧 照 違 材 交 養 違 野 悉 偷 梅 設 貴 帝 鮮 仰 圈 首 荷 鉤 隙 抓 養 熟", + "969aaf00b9af97a1c3fd0b7b35480aebf51577658067df966caaf5cace472d2ecdaa2978470be83463262340527c0564d8c57f86764d48e9bebd1ce594955a6e", + "xprv9s21ZrQH143K3aULbWnFhpGS2h19R5JdizbLLLQTDsGmKP5A1sqQop1Ff8Q7NEetRHRNFR8AAsPm1kr5hGU5VLYKURZeiVe9k5hqVmhZ5zF" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "伐 旱 泡 口 線 揭 縣 楊 斷 芳 額 件", + "09c172005e7dd81fcd55b87d13f114207ce7726376ea74a1b9085a799b2afbd5ac5526059e722987a65f858e5301edd5f4c91deaf9d7b4f9bcc38919e5ec3725", + "xprv9s21ZrQH143K2sMUzpWRrPMF1Xx29Yos3Kah6H4E9YaJqDm2yEMXypoKdX1ugkn1Vx3k9Pr2LsKNQnHqoDFP5Jepm2PCkUS1GrQZps1wxkD" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "福 惜 懷 叔 筋 酵 貨 科 牙 冒 輩 罩 懸 耕 澆 呵 連 級", + "3e09d89450ae45cc1a07ab308649f291ad5c1452da509d7269daef52ddd04db8bbb6bcb8a71322c4d25ed4686d910e84156fccfbac2838ba482bdd1e4b2ea693", + "xprv9s21ZrQH143K4bLxh3ir6ziqU7t6URBK7uA8Md4tzAd3TAuBNyZREqEoZBevwu8Uw6tfMujFRWrZvCy47nbnKgDhsFaC2hvFJwRdkhnoM1v" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "儀 未 九 茶 隊 梯 婦 孤 托 病 泉 賀 產 繪 吹 測 局 碳 徵 墨 晶 幫 息 延", + "d687bb89cb435fe1de166e953b41500f3717a497ca35c78322f66cd63e675fe0c8aba92463544631cdd6a985db03bdfcbfd839002ec609879e8768a3ffdb5fea", + "xprv9s21ZrQH143K2LB4bPuHTErkGLt8DV5BbRsS82ySFPdQr57fzBY5D7VadhJHruxFzVcYdEbDqK8QWPSqj6LCiyQgFFurfnWWm1v6N2nGovN" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "濟 扶 塊 言 穗 定 萬 繪 姻 逃 顆 焰", + "806655cee21d12c952d6a11c12e742809c4452b6e07458c6ddc2cc2a8920e308476f3c6ba7fbbdab3de3a7bcecd4de5dd82dee7a217d0cd071eaa2313ca390da", + "xprv9s21ZrQH143K3Kcz7sz3UyDYJhSjUYujBdxFwDHCbqmKWqN3hSf1pjnqoxuAKsQXbnvrxzRBLqPN9BKxHUiYgn1DjhwaStTfZQTCsaTEWxZ" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "慮 鋪 目 禍 英 鉤 尤 添 醇 嘛 觸 獨 起 賦 連 剪 邦 中", + "b609a4e17fa8c3c0b4da704b1699631f0d85f5b7bcc7d1488270551670b5393a0dfcb4d8eba9860c2c211324bbf3b587763ad1ac6a9e61a4e2e015bb6cc6a58a", + "xprv9s21ZrQH143K2Gmewuzc56mbdKTqPezqWqk7ih9AdnLc9A2865TYtywU9sZ547mKYFKnbVatAuUUGtqgAz1ENkv9FU85jARHkCmtAGLmExn" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "而 怕 夏 客 蓋 古 松 面 解 謂 鮮 唯 障 烯 共 吳 永 丁 赤 副 醒 分 猛 埔", + "8ce6b92bf95337a49bfd3d80774c9a73d05046eb2cb41789092a3bfbe7005ca668c427a42f1a93982d9076511330817b6d0bd49ba4f5a39e5756472b162f7ba0", + "xprv9s21ZrQH143K41g7SsRpGA4g25xDiqjkhq8DNbJD7jgSksvt9kakaTzukN4SfVrUM2A9yGVPggP9eFAmuriRSpPLahm4ngaxvNHyQp82cee" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "昏 途 所 夠 請 乃 風 一 雕 缺 墊 閥", + "e62457aa7f30c24fa46b90aeba2cbb9e77c28fcafa0c10dab01f5323eb1cef22f23c0e52cb5dffa2b2911a29992213c2cb20564af268eed03ea11292fff1a737", + "xprv9s21ZrQH143K3JPEGRnwbdy5xdTvVdXEDGJX8gc385oC3H4veWiCVDrdeeFPWrU7Pz3PNMHcZrbutLY1UGs5dE83U925xKXLUrXXpz2ihKw" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "瓶 顧 床 圈 倡 勵 炭 柄 且 招 價 緊 折 將 乎 硬 且 空", + "6b5591c758a069d3425bf93399398e8ef3e1c32c27f46e0a5284976dcacf25895f5d7747b84f38596247557debd133576932d394ad24c7a00aa24555fa668c5b", + "xprv9s21ZrQH143K2hh1em7CqtG7uL1bTNG5hfP35X6Y8uwnTDiS9Cm87WNcZvmFHLf3JwoP2W82VzaRejpqr7oPnFLguhVSr5pWnG9YSV4SdeH" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "柄 需 固 姆 色 斥 霍 握 賓 琴 況 團 抵 經 摸 郭 沙 鳴 拖 妙 陽 輩 掉 遷", + "17ec1a79121f3541e2d78ece35c8cfe7f5763b39d93fa90492c4beca26ee69d3aa7f4b1e6a2ac5e8225e08dded19357ee44b852dca425792842ec8eae09ae43f", + "xprv9s21ZrQH143K4RoVseL4UENdN7Ag1WmcK7Q6Pk329krQW4RifHJ5sNizkG1PiyRXAouyL7KDFJSQAD1VarGTPftD1yZZAni3QczW8V5gNVG" + ] + ], + "czech": [ + [ + "00000000000000000000000000000000", + "abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace agrese", + "872501bed75c98fbf943a67907bf394995f337e9adfa23687282d1135c262421715a0bcccfe2d3f5f8b72c8e2fa12a7a7267f8047b744557f4a9d49d11ccc75f", + "xprv9s21ZrQH143K3rnjkVvSaFkwgg1J2tnUAeqv8SCEWTWdLVZiJsjM6Z5ieeUnrR1Ws6sDb8Guqp43CXVRmPooUs2cjwefSXh3EyXx2vmzZ96" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "obrazec znak uznat zubovina zeman skupina zrcadlo vzchopit obrazec znak uznat zubr", + "68e1bd31ed5f20c9ab108c03b524e85209b0b27af80cb5d48fa71d03dbb528b73c2349bb8576f9b68825272984061594f520e54605a4898ba61c433d06bf5de7", + "xprv9s21ZrQH143K4JXKqKovjKKSb7zgQcgbuKNNjaWyYBjjdny1SyXiPbjzSNUR6uLmNmPJ1NcWzeHDLqmopeTTbZ3DbPobsm4pbDrNrqhEDbK" + ], + [ + "80808080808080808080808080808080", + "obvinit bageta doma amputace bidlo jedle arogance butik obvinit bageta doma akce", + "067089f8edbbb8bc8ab6d0f3e29f250d136955745797a20b63fd4372627c51c4576ebd5fb6c6d4825d21f448cc24b342ce3b0117fedf41369cb5a6be77494aa7", + "xprv9s21ZrQH143K4YKjBNHoBtUM2qhAvjVvXsBZJGAhogbqhBbgTvjRjPSgC5ShPUeGRCRTdRMYLZ7wWxRWwcvgQm9xX7b5udk9p5yfiekNMQK" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zticha", + "04d0a733d43c640a4492b670a9549c60a358a681891cc2337a01a3c8288cd2941b7e057dbcf2dffd1e614cf5fcc9d38d9228fbd3ea5ceb508b8aacac5f35ccd9", + "xprv9s21ZrQH143K3pcHonm1bGBQvbLf6Ty3c7HKCyzFr4C4DnvLPaT1JUUJ4MxkWyNhPpCjVsLMJEndqUocNxhqZ3UfFwxAsn7zajkE7n3yXYq" + ], + [ + "000000000000000000000000000000000000000000000000", + "abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace balonek", + "b5eb0b74cb5f2c616e7136182597ab61dd94594d22f15ce6c94e04eb7336a56d3e445ec1279c1f04b861de5f7c6b2fc95227db53be4996de3ba87d6d76b09098", + "xprv9s21ZrQH143K2Edeaippdsom8CKbdVxcorYqTexQoxhdci7pVuv6K1yFYnGunPP3d6oKyE3ax354UgfNUu6Kes1hvjvJcoyHfyjxwbjhFqM" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "obrazec znak uznat zubovina zeman skupina zrcadlo vzchopit obrazec znak uznat zubovina zeman skupina zrcadlo vzchopit obrazec zmije", + "93ff13dee31715a6568609df3f7ea295d58728a65611ea03620d2105a0efbbaa39d8b6541b3b5a57a25dbdfd5006f0c58779a7ed196e25a1a97d1442e3f080fa", + "xprv9s21ZrQH143K3SW9C3X4jc47HaXuDHjRvpCDxoo7PgTpMG5EZoFkDCkCzhs2yMvaUQidKKXBDChe2ns524iFEbrJFqNCPFvR1bjyRmq5rxj" + ], + [ + "808080808080808080808080808080808080808080808080", + "obvinit bageta doma amputace bidlo jedle arogance butik obvinit bageta doma amputace bidlo jedle arogance butik obvinit bezinka", + "1843be39a115dad287e10d256d2e9bb81244cefda2b7ead8a762f53033512abc7b6db26e2ebe8053fb82e313c24bcf62ae84ba4aa2900ca0fcdcb1affc38887a", + "xprv9s21ZrQH143K3uYc2wzd6UfPLXqfQKsYJVyWL2HeCZ1YKhqd3jt5y4w9bfLTf4bAqvD3NbJ8ifa6vbTvHL2t87wsgi22RXd8c8Moknbzdcc" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zlehka", + "43b7d9b1b25d046f8a89fb57ba10bed11b5273574bb820eb01cc0733a421f1c98ceb2db42d299e7e96aa2c58435e916821bc9d505525b3b5448ecd4c97babe0b", + "xprv9s21ZrQH143K3LM3GwVcGcoMnK9UmQKxTaq1GiX1HsG5u76FmfUi5cTcH97WSXxVNLgURJCgL8RT1qDNsiQ9Sw1DDVAhiu4h4xMz6mtFDzL" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace abdikace branka", + "dd2a9f662649585707dadc6e8b2df2c0e0e2691d53bacea2212aff4063ab4fdc79b703a7ce6744da31cd2ee12e56b9ee0f430a238b892fa660ed0ce879f2c472", + "xprv9s21ZrQH143K3RHkXA6CEXUxT6nXGMM1zKuxgGQBD6k9gErraXywHoiqx4syweAftp5SCU7XSoFz5T3RbPs3HNZtZFa5JcR9ZD5TUmV15tB" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "obrazec znak uznat zubovina zeman skupina zrcadlo vzchopit obrazec znak uznat zubovina zeman skupina zrcadlo vzchopit obrazec znak uznat zubovina zeman skupina zrcadlo veskrze", + "f4e4e2d8817cbb3925d6a0e8a2a466dbe1353a5885ec203030722607b8b5f229c71066c18681fda4291d0e323e4f6ba099b5b7efff442adfa14124fd07147fa8", + "xprv9s21ZrQH143K4bgmXSbkNXn4ntJoQWUwRscM7yiZjzHbBFc5nMmgMeVcVJS6ZqXcWaTp6Phy8PAji336TDSNmiSWK7cmuzCSxkobg11RTN2" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "obvinit bageta doma amputace bidlo jedle arogance butik obvinit bageta doma amputace bidlo jedle arogance butik obvinit bageta doma amputace bidlo jedle arogance cihla", + "bb8b82baced0db7764e102d1d1f68035269e84ec6c1ed0e09b2a31094330967aff9e1a490a407fef736fb8719c60bfb8cb0be9b27fce97c3b619409c195e2f1d", + "xprv9s21ZrQH143K2aufrtmAWe6Ppto9BA4jSHu3XmKbMemyWEJ37Zfwxie44JJi8pXDxfjkSv8LxdKKs9rmV1R9SxCfAtcttf6g2JK3mRwPyzM" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zvyk zavolat", + "3991e1ccc78af78d58cae577b786f7c950e1f23311d0c5f6d51b884d6142a6b4fc91a227c895313bf3d35731682678653101f51546717d60438d54dead32f834", + "xprv9s21ZrQH143K26BAmMqZUKy3rpA3bYvGiip1tjQ2gnLwKrm9zucVU6n5PSJ9FLWeW1JjQJyZJ52QAV3HtevGNQ3BnTh91mAWgaacd6UenQ7" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "pokoj jogurt malovat kroupa holub malvice rachot uznat hnout kasa karamel potupa", + "f3922b8086d559436ba2d04bc2aae4174e6504d7d4d451f7282d0b41a1b8cc958b45a896985e0b9316ad09c62f7d62dac85bc3d3e2e2423bcad3336412fd33f8", + "xprv9s21ZrQH143K3gt3UuAosyvKpAmRpGfWuM19LVhrpWshTnv8o9Z2SPx7PxfgPQSZtm7U34cWo2NUrUtWCtXLhKDFZxg9pjM9KRde9dLwsZC" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "masakr odtok pijavice tajga upravit krmelec krvinka buditel zavinit lakomec flirt traktor kresba plevel kapitola tlupa paseka ladnost", + "8c7c2b7767caa63139099cc5faf955cc582abc43494fc0f94b1f490ddfb7c221df55b663711755602c1354862c1da7a1241d17888f1be6f4ba7f58634758ea0b", + "xprv9s21ZrQH143K4TFHpb3sbWvTgFjmH2b2r6TAWi1L9UxyL2hBXQeDwVgQmR7CG3agahDGRPWJZDWZjcAYwEGjRzycaRzTmbmxjaJxujKScF1" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "migrace iluze pukavec kaktus drogerie hrobka pukavec onehdy suchar veterina rorejs cukr mihule koza makovice uvozovka oklika inzerce odhadce zprudka spousta namluvit hrobka obsluha", + "4b315b6c57139dfd19187b6029ad8b2fc6165dd97a43e59c4606a11deb192b25df5ad4e8fc3b4c2da9b9e80eae48946d769bc7bc95786f93b934bbc842cb8002", + "xprv9s21ZrQH143K2ro2PraMEHNWQX4w969ZUPVBZFY8pViiRNPhsRjFbj2exS12cg2YCa6kkYUCSJBhuHCicMAGqj1ZtSvKYXLjaErhCBve7XT" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "sledovat ticho potkan dotaz carevna pahorek ihned nikterak hematom pokrok neochota cvik", + "ce64d48ded9f32127bc7ef66c829e32b576927ffa1f323f0020f58c3256fdeb5ee2ec7bc257bb492e4fa1ae1e7f41b8affd9f68a2143e2d54e443e54d866e6e0", + "xprv9s21ZrQH143K24d38NZ3eGjiZ5cuqmVJjsC2piMq6ZPSsEfgPf6F9XZJvfsExo6M2P5NCgdaUYgeLchyBw8sAWkH8joq4yMeNF1i12zXWWN" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "most uvolnit novota usmrtit terapie tehdy litovat filozof radon svrab surovina zdivo stanice pejsek ukrojit vymizet helma decibel", + "8efd8d7625a41d02e53b8f363678acb389136ee9b19512381417e7f3295cc5bea28a7feeadf29ed8c2bd617e67feee3c736bed06f29ed3538777d58187458955", + "xprv9s21ZrQH143K3Vd6GfvEYmC1ut3Z9uKS5weiaKT77zoz4uc4MaJMnQWsXYFLK2wp3DiARFbUyEv8yLqAirLmWMDmsXFhPioprwBiqQbTPb7" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "poloha koruna dobytek makak domluvit svatba paluba utahovat orlice jakost sypat podvod barva technika pastelka kurt ikona obvod mokro recept napnout kabel lord nasadit", + "e69afd8c8b83713ce327570ca2dd9d588dd1f266fde95d7848059fdcec016e8f9f587a8fcbdbd061ecd9a5e1e90f51f9453af914df7d8e9b5758f91a1963a413", + "xprv9s21ZrQH143K2zXve6cknQXePKGSMLedfyGDpgQZ5oNB7YKgyJqjXoZeQ4qj1vd2HZpnF77g1nqihMeAi7wnYd3c3hrgczNAadCKdetb2tf" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "drak uniforma kuna kapka tmel bedna evoluce technika vypustit popadat rychlost vodstvo", + "cb409aa4d44356187fc1f0777afc3f0057bad31090e589e1a8a13911a0604e9ebe976bbbd1633b3320049e58ae4939591150f1b84fc552975d4c7b5774efd20f", + "xprv9s21ZrQH143K3S5abSGNCkH4QWovuVGtqjrufboCgkqDvPGUtnUgPFNqXwVyWMD45MmwRFDYUe7Ny89sqvCqxPMWtq2CDa14t1CYxLjefgT" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "ochladit silnice exkurze zrnitost jiskra zprudka pstruh tlukot vytasit valoun najisto kralovat bobek uklidnit helma ubytovna pysk andulka", + "a295037a0335fc58e639e1eb4ad8c6679386cc0b6696c6c0947b9d4420083740c02a3c7e429c698b3650c5d370b87427ff094e17a18778d917cf5fa274c89b23", + "xprv9s21ZrQH143K3se5sFVgT6vXcaaKoaHse8ATQJAgDHVBfdwAT2BDgm5YoEPJKR7NN7o5ukvzfKvjZ16NsLG6qngxkRfQRKuSsqyFmcyjpf9" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "bavlna mozaika ofsajd kukla obliba kormidlo monarcha batoh chmura mdloba makovice obilnice popel pohnutka duchovno panika neuron okupant rukavice kouzlo stehno badatel sklenice rozchod", + "a3314b5d32a47a746f1605029ef41e446e589ec3879b8509a93779bdb5df018c102dc93d3925b1bc04badc7b7e78ed3c79f05485a289d3a8f4731282f4b65f2e", + "xprv9s21ZrQH143K2NiewxC1w9akTDfjsfRQ7yyv42HTukKMZ2qqx9zRrq4YYoHUnEeSR9kVa8hY8vHzCYhPxzK8VF4vKJhQNAjGpn94j9r7nEi" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "zajet nutrie beton kobyla kriket sranda elektron abeceda uboze lump vzorek potvora", + "a5fe5fd9fc7a02f2753029978da50e8c1af2a773977ecfda7147c184374376fc1780d4c2516d23eb1559e9cf46cc6f60d30418ba05fc789295adc483f26641ea", + "xprv9s21ZrQH143K3Pg1Wcq1fLNYrSSbfioYAiYMqkY4bEN9JrUFnJNLsK2Z7b7stHtJae53w7GD7rrDjJ56CCMNYJzwpdC54pSFkCGV3VYwGbY" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "slezina navzdory odjinud oklika ucho ropucha rohovka zavalit graf panenka invalida katedra odebrat deska metoda ohryzek graf flotila", + "73716db54d10471971852df2bda795452cfe9b38aaef918da7a2216a3b4249d76b4627b603a03537e5dc2152e54c709acba85d361f4978733af4b7366b13a61c", + "xprv9s21ZrQH143K2YLLxKi6GZjifUEoqww1zAAUd26JY5T54Z6HgEip3hkFKUfAAeVqEmZrdoHoahhm71MvBwkMTZALYrJtZfe4eLqMyZE1op1" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "zavalit genetika kapusta tvrdost dopad ujmout zdobit mistr splav ptactvo fosfor hoch pocit beztak slon poplach mazivo tancovat pravda uvalit konkurs surovina nazvat vypadat", + "dba03d22ab6f6963abff6d9f9433a6aa733490dab58b39e3eef635c9f4fd6ad8242c3eecc4db0f26a447af06a089d935b3225e36615a07babdf2177ea3fd1670", + "xprv9s21ZrQH143K29UvdRPWan6npJHgHs7bmiduFR6qWzf5z7gFuqqcR7XoHpSBamvgTn1Qzy3JqMSuyxJEnFN5s2s9mbMna7WAiKheLFAA4x4" + ] + ], + "french": [ + [ + "00000000000000000000000000000000", + "abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abeille", + "3bf3366c40256d7e2fca716fddf8673425c7c7e444af290ee1edf1bbf095e6e78a7190253f3e46f1e2069345d4b05ac17b242faa225c0a3e4d268976744e0698", + "xprv9s21ZrQH143K4ZsEXSdGmcpsqn4YxjPgHqa4DFvMRmD4oTkiFuYia3srKewVsU75LN4jP6PeXPYFWYpgP8B74tQjwG1GoQ9T8eJxGDjzibF" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "implorer visage sonnette voyage véloce pourpre volaille tribunal implorer visage sonnette voyelle", + "ab9180b7dfdde74e5cf8781e5692e2c0b55afa8bc1987fa8e14e3fb83c88b195c53e9f939f8febc33d2958f5fcd8add57843cb318d8886130ef9c9879c826357", + "xprv9s21ZrQH143K3MCzhsD85FFZ2d8vDN2QdRTN63gbU3qHjZxn7utU1kwYGR5bxQkssS1Tji4Tuw8vpTzDWogXxhbWYDKuZveeRBe5wiNsuFJ" + ], + [ + "80808080808080808080808080808080", + "indexer acompte bolide abrasif agréable dédale abusif appuyer indexer acompte bolide abolir", + "0c1ece83a464688d74744723d609e30e191d05ab8c082cf34bb2405bc4363dbcf6a9f83707b577d230728b3943920f876ec844e86dd0d117152c23802d25be3f", + "xprv9s21ZrQH143K36u4b8J8pxUwCCnciUPZF3JjZsq5mPCDEpNWA1j4DnYDJ53NV7b6cmNTM6Je6DG11hJw8Fq1HXKEasrpxDC8WqaEsD5BPGq" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie voter", + "7d2f168ce71ba3e40e74baf47a072a94e49973c0dbdb33a62b3a285ab167c704a85d6ce0d15cc6a4dd3bf1311334ee0d290ae7d20115863d5f5633b8dfacf2d4", + "xprv9s21ZrQH143K2AsqW9AAdu5C4zUaV45MgyzBQAYbEKtKusR83UzLwCZdqDwqJ59ebNryoNuVA5pEiY1eBYqr64UGkwZGezwaceCqxPWia7M" + ], + [ + "000000000000000000000000000000000000000000000000", + "abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser adéquat", + "93d81d146eccb7c624cc25daa4cd52736d64bdc0fe020940157e73c108a87ee34d94d7e9554e02ea0f9a7ea5574426220bae7c4959c197a6c9e2318cb252683c", + "xprv9s21ZrQH143K2iCVwUFixFL9fh5f6XrcBfaLgRzozbBgzj3dTo7fBTn4EsQ8ERgq9hTPxFf2hgE57vJDfEGKwsegQFoo2mEM6HEgFavJkQU" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "implorer visage sonnette voyage véloce pourpre volaille tribunal implorer visage sonnette voyage véloce pourpre volaille tribunal implorer vinaigre", + "dcf42783150cdb92672c9ea7d13f145401661f10b89bfb012a803ca7713e97181ee28ac327a982060a7f8aaa6e8c649ca2c5b83c24458393fe41739ced31d987", + "xprv9s21ZrQH143K2Lq1fbvL5nedyALbSBmmm4EbcVduaPFArtunM6azzNy96pcKbLvVBQcFFDa3Agp54eMhCBH5Ehtbxitv525n2SrhYvALPiD" + ], + [ + "808080808080808080808080808080808080808080808080", + "indexer acompte bolide abrasif agréable dédale abusif appuyer indexer acompte bolide abrasif agréable dédale abusif appuyer indexer agencer", + "b039606212ccadb0d05c7a0c08605c5137028d0253d26b9ad6ee113f9595700d9834b2eec8b224975a6d9585d7ad39e962036edcf07d5b125b0fc225d519982f", + "xprv9s21ZrQH143K3m5hJrQxzUdU4bDDvLSisrf1Cv5HQNCEzbUokwaVa8X1huTY2JaTPJug14EbopY3gtdfWjmVoo4CHLzuwjf5Jhdjgaqsh9n" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie viande", + "e12d20a535ef5e9e2f87e05b5261bdb51451e052fe484feb87543f5cb7a8822c4aa0152492be1259fba00a28c1e95518a90f0645bdd0eb822516d37ac881f7e0", + "xprv9s21ZrQH143K2woSZYBmSoRYygWNGUUrkLiy2i4FmReSqyc568kb1siagkBpHFj6MzLbfuh8TaWKDckfnfphrYyKTAG2rk5y4sGv4fjn5gD" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser abaisser anaphore", + "0f3eec3279b55f3cacdbf1aef705a086078d7eb8048e402202572e7038e9487e39104b4794e88a42192af030a176b034fa36ca6641fb8128fd23c30806b96c23", + "xprv9s21ZrQH143K4UeephsshHgatV7uj2D2DotKpGrWx6ShwxPdRxXtyQWxnadSvjT8Df7UDxmRuiFJa92evePgqwgJSG5pY8VPJ8hNiyGHr1C" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "implorer visage sonnette voyage véloce pourpre volaille tribunal implorer visage sonnette voyage véloce pourpre volaille tribunal implorer visage sonnette voyage véloce pourpre volaille studieux", + "8f12b35fe92a7586dfbdab9721a91300d0dbe3185d0943021667e62fd5a643e0cf2443e544738c5234009aa50faac0dbb123ac847c31dc25d875c56fe39c6186", + "xprv9s21ZrQH143K43qySSryByKtU1Df5foSz6kDEHUsR524HZJzxnKrrU3wTdeSvHVvpitC8nAb2kdeVWGREu22KaiaMF3YjEZQdb7htpY6CXP" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "indexer acompte bolide abrasif agréable dédale abusif appuyer indexer acompte bolide abrasif agréable dédale abusif appuyer indexer acompte bolide abrasif agréable dédale abusif axiome", + "53ab1d10dc8de3a80171b5f00495a3b49e2c5afd486f8111b1afd0ad24f43eb0aab4acab1d4c51126beea32405947924c237157b29dca69fcf64eb635708895f", + "xprv9s21ZrQH143K31JiBbFJEViuVwAuWSD5X8PKzS4d2PoSCxHpBXpicCEMKndL8TURUZ2zeoA4vDGkECeJS9wMhrRcmW6ATmPmmwC3ziuBRdP" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie zoologie valable", + "b5e96f552ba44ec827c1bc5ef362e8cea68dd6f36f2c8640aeb171cf9b66198fbdf155fdbcf7dc505431068f972a92442f33cda0065afc1e9a7f5f7097ea6c6a", + "xprv9s21ZrQH143K2umcmDhRrUZ8wsZ7ACj6rvFCEuDK1coHWpo1RwYPvy3dpWmvPjstSMm9fm3igm9gjsAesjqAU6Tnejizy919FSmyofhRTyS" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "monument dépenser féroce entasser comédie ferveur optique sonnette codifier discuter dioxyde nerveux", + "d322acd69a849cce8719674eeb7cd76520de01ea35210012a44a5dcc19faf285202c3fb3c749a46d338ad54ddd398029ee308ee352a89f65180dbd3ff750dd50", + "xprv9s21ZrQH143K4RswQfcyMEjx6Kc5tEBR43MNp7XJH5mWushqBryZTc97VNPNVfRop62sGwu7Tg3vsd559P1G8h6M2N9j1Zj1vVn1919uvmb" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "fiasco ivoire mardi révulsif signal enlever envahir anormal vaisseau essayer céleste sagesse engager mener différer ruisseau lutter esprit", + "3c0c90b30e1a8bd7aafda95f92fb09bae64988e2431d6c3896c8502f76203652f0db1d4640417d8d3f00ea4de59f1719513f1c01145eb8ee4b0fd73d4c4f706a", + "xprv9s21ZrQH143K3eSdUPkhpFZamFuu35iaZqRwFWijYkE54fvUKoSNBHkXetNx56yvcJSFoeHT9y4AFuWTcFFzezPuk6D6LqnNscjFnvdGYDt" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "flatteur cultiver oisillon destrier brusque crainte oisillon labourer remède substrat parfumer banquier flèche enclave fémur sombre jongler damier insigne voguer rasage gomme crainte incendie", + "7363c9fd3127cb683ad39697f3a7282a06f1fd1ab1ceae8e2e0d7ab2766f3b8fb29162bf46e0d6a4917a0085b763f7f6f36adfdde742b6aa4ff1973149b5d239", + "xprv9s21ZrQH143K3YEnEhMfv1Mwoc91DekvEqXxL85fFtRKtfFk3Jsqu72bUd35jHsTA7aBShvTmhhBrHhqGZ1uVg7RTwzTpnarDUoGU8ZXQ9D" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "prélude routine négation brasier arlequin logique cuivre hiberner cirque moqueur halte barque", + "c46b545d5e7398d0b5344ecbcc20769fb0fbf674848eef1591725a1113f5bed0edf6d78925798cf87994157f43bd9d0eb5e6f3de7959e2e88f6a586e7499b79a", + "xprv9s21ZrQH143K4U7WubDzyDun3RrpuVsFECM9u9tNxjhJhZFCJL5DJvGhKkUAbmmhqpwUV7UvgPPG2QFoDq7jYY6Neb6MnQmQYTo3RF57ZXS" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "froid soluble horde sinistre rouge rocheux exiler causer orbite résineux renfort vaste récolter maison serrure tonique cirer bélier", + "93856e02d3ab2e6738958350f2a96a18183c0c02aa7cf50e4e6877b1d4f9eb4be1806b034e4a4a271390b7b6ba6b4209f5e293840e93a41a2ecb16ad47936c03", + "xprv9s21ZrQH143K2wjyyXXC3Cra6k8WJ3CPEWc1EgwEyUNjLAxGVoegrni9eMik71k4L6wWxLtqcwgD9TzKLLiaSddwLFp4F6hJ4LuDBWqVJcq" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "mouche embryon bison femme bondir renvoi louer social largeur déborder rétablir miracle adresse rivière machine époque culminer indice frégate ouvrage gourmand déposer exulter grappin", + "08cd47b905df56e3bfbca6d1ddb7ee7ae75d45f6e5928d337bacf34754d392c7225c611136148e130dc516cdc7ade8e8a95ba62ccfdac01a107875ce3e2cefd2", + "xprv9s21ZrQH143K3BFxuNuyFSUgQQ3f4tv6HLktHoVxvMfBKVBow3i9GqeTF9oBbG87rZBX79QTCQqgPbn3ywWc8G4ZiUdhdY3xDAciXLtE9aJ" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "brochure sextuple épisode digérer ruser affecter cantine rivière torse muscle permuter talisman", + "8e4635efc7352a6fa18723aff498fa297c1ed1997c0f3e77a11e65155b25934cf90e74ac66d207175507887068a5c85d24b825d06a31ce75651fc9893e509869", + "xprv9s21ZrQH143K2CpRxCahnStujg79zUscDimXKHemoY4A1Kep85cPai2e8wJ4Y5R4uhrG76merXXjSuukqjVrzJMBPsGC8apQqHymDsBPf9e" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "informer poivre capable volcan dénicher voguer offenser ruiner tragique sortir glace enduire allouer serein cirer semaine opportun abriter", + "ed4ed89acf10eb53fd67c9f81f4e8cbe39dafe42c27e942e67559a825c6083d3373a3e98215c37318f0f28c13546895e76a080521222f6d70a9528a582dcdcef", + "xprv9s21ZrQH143K3N7dvQS9PoT9BemrTHWfp4qzUPT7hbGdZ5uMn6RgMwKerbmrrAqjrMGUskpGJaaorTXnbgtidgEbboJVjH18qDMkLFPf36V" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "adverbe fuite jaune épaule imbiber éluder frémir adulte attentif filou fémur idylle muséum mobile bureau loyal hélium jugement péplum encadrer rédiger acier posséder pavillon", + "81ecca7ce712963df79d6611d2510e9fa31d307557a5eeea9513a9a940c2531472fec2c6988b70f649b8a3416f8f90f5c9c8f0ac4897f4a5a1304c651226f330", + "xprv9s21ZrQH143K2ZnKtKjoyhEcsuEzLEzq5oC8sGegz59C3vf4T8VjvRN2UQQsHw2ndFj1UMNo1uuVtxKzjmNLvVAd2SURTgkVXKykFc1S8jG" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "ultrason hublot agacer éclore englober ravin caféine abandon séduire farfelu tropical nettoyer", + "2efa119637c044ba28eb610178d7de49dabed93fc16f5af675aa661b731567ed3ad7aeb36a04adfbfb694bbd065f6f840ab80369ec3c253ca122deb208ef9f7d", + "xprv9s21ZrQH143K4NWbtgfxvJK1Us782vFXDdb2bTK839KWEUZFnQTXXt85hAHXML9xkKyuiaiKgD1n6ytUZdgT8hirdn4SQ4DR4T4MPiNQz3K" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "prétexte grogner instinct jongler sembler paresse papier vaillant chenille louve cynique dissiper inoculer besogne flairer jeunesse chenille cellule", + "887a87c38340befd47d650b73849907b5892a0db26e17ab55601e4e789ae1d0dd4bc3e7fcae0fae25c3e0d3315456fe8a5d84944d2b799cb63fb9544fbd0e568", + "xprv9s21ZrQH143K4G36f6wnN8JWRYHdbvANd8fF6c1pjb6T11LKxw3Xch5iacc9vyGQDcf51GrF7kknKumq5csscAByQiC1gaY7vFygMghGtJ4" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "vaillant chance dimanche sécable bonus séparer vecteur forcer raideur officier censurer cohésion meuble agiter prison mutation filière rincer novice solitude élargir renfort gronder tornade", + "e59bf24814adb55cfc2399e03d94e81df4a906ca5e75f36f2e297623ffc418b8202e9b1444e0e97234e2d55e194d45f89491dc9533a1c799fbb86c5838cc3454", + "xprv9s21ZrQH143K2gBerhfhuxyfCEXGiLW3sxY1scEJUtvroAtxPqqatzqTzDmkgxDsL7C7MMbBk1ZKBXHpYiJziBM2VFbTbA3G1qjDkTt8PU5" + ] + ], + "italian": [ + [ + "00000000000000000000000000000000", + "abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abete", + "d2ae4bbd4efc4aba345b66dc2bfa4ea280d85810945ba4e100707694d5731c5a42ac0d0308ba9ad176966879328f1aa014fbcbeb46d671d9475c38254bf1eeb7", + "xprv9s21ZrQH143K3ZxfinfrsmnuKNwdvRtypJ1TEs8JuE6MEmAMDwsSZApCyBFopme4iR7RnRt9XKFprfLKs9vooFuFK6h2a2hzHuXTmE9md1a" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "mimosa vita sussurro zinco vero saltare zattera ulisse mimosa vita sussurro zircone", + "f8c609647319a50116e9b7d1a0ec5535c6d08d6c958911fd2c8b2dfd55a61e63e9c6c60c22b5c3aec725acb41980e63cb3ed75fb80648092dee1bbbeab476a6d", + "xprv9s21ZrQH143K3yxi91AAWZvT8797G7kL34h3453QNFSiQybwss1gmx2zYCmbg4tiZdYSEsd7arPWYd5MQh28VBvcEqvXSBRu1zYajyJnzFD" + ], + [ + "80808080808080808080808080808080", + "misurare afoso bravura accadere alogeno dottore acrilico arazzo misurare afoso bravura abisso", + "4025269bc4f7550bbc3c61592944946b0d4ac855a5e4582bf86069cc0c9429455cc40d84ba215ed1cec28e27ffc88460c38b9c4e8c486ae878d7c85e95b222bf", + "xprv9s21ZrQH143K4Hh5BqryXtMu7QLbJC7yDh9kscJ4h3PuxA382w6YyjMMkiFVyfmdYFwfP8sVWR1eLygHmczccbzH7pTGXbeqAy54fNVA13M" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zerbino", + "24182cf43f956410b5def9df90e3db0d6f3199c2ebd26e7ddef888ee3bece9101d132e449bb9e1c23dd9ccc6131d2f649c021ee591e88cef8d17cb434ef69efb", + "xprv9s21ZrQH143K2hiPzq8SzzER9TQDFYpnYbfg1hE8wUYwcb7JNRyM4aDB14WfoUghRrBKFGffUx5YsTZvPbdCbuVMJG7egUZVsRTJCviSror" + ], + [ + "000000000000000000000000000000000000000000000000", + "abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco agitare", + "2161a4b869f98778b6321714e2502adb11ea120c12163b46fa34e36442ad1981b911a2f9ec82b497e7cd206fa7af2f21a94bb6e4a90159965854784e1558658b", + "xprv9s21ZrQH143K48ZSAvHY5BjAyPXxL3pREwQPZ9DxPagzRuK8f5TcKnDr3z2MSjB58uC871CjjjsNTaUa9BxEzEwraQutT42co4mfFGusE2B" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "mimosa vita sussurro zinco vero saltare zattera ulisse mimosa vita sussurro zinco vero saltare zattera ulisse mimosa virulento", + "d9a6205a985fde8c2337f6cc6acf77a93d6ec7dc792551c01400f5d9aaa86aa943416c99fe60be141ca27ab333d9f96648b40b266d6b2d6a6e5b07c8939568be", + "xprv9s21ZrQH143K2wPR9TuAQcWLNSM8X9oRjUCC3GN6aGevV1zfm8cwi2JtrzS21GzAaqd2MJDszoka7xiduQrDc666Wk27qYhUTagEfr4E218" + ], + [ + "808080808080808080808080808080808080808080808080", + "misurare afoso bravura accadere alogeno dottore acrilico arazzo misurare afoso bravura accadere alogeno dottore acrilico arazzo misurare allievo", + "cfb1f800cd5a0f7a8cffb12231fc61739f5f87c963ead5e205dd48221c3417eb1173d3209d9a8ffc4f00ab291bc22c1480b4a0a4fdeef9a1f3916d0ccbed5591", + "xprv9s21ZrQH143K2EQGrM2arQhKSEhd4DwraP86UrUKsVcLvM2NujY9rPJK78rGUCD4Z3AG8tz8brxcSaDQYDw18jZz8FuT1H6ZdwFyUQVERus" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa vile", + "05a43b9c258f6e83f4073fe4a66d6309e94610fe12dd5d598f4725e4e85ff1fde5ff5b1e61b40e09a481a98953f9dc818342172a460e5e6d17d9ab14874447e2", + "xprv9s21ZrQH143K39Y9dY1q4sf3Bv7dkFfDasdSbWUKNx7GCsMp1JTo8KFDRq5TNkGr1wQYm3QZaALpFhcHWhPV67oVZQFMRFu51yVxeL9KDMF" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco abaco angelo", + "84055239f41c182bbfe6ede6db2e8bc4a97cf86746643b7ea6910c71d67bb2a678a97ecd378cfbf59e30db720b1cfde0faaee73afd3c5deef2188e307d04442c", + "xprv9s21ZrQH143K2b5TRb8ReAEzasjVej3ttSzyy5YRu91SdQKP4XUZtgeUipuJx6YoArxkiRSBU5eP2wu6dmgLhgBQJ8Bx5UXmwFudc423DdN" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "mimosa vita sussurro zinco vero saltare zattera ulisse mimosa vita sussurro zinco vero saltare zattera ulisse mimosa vita sussurro zinco vero saltare zattera tarpare", + "f0e226efcd929216020a9e8f879f06b146d28fecd2856bd401a62ecc0ece8bc6ea717e3f9df523a6a00bd4ca8965e0498d63e779e3156dbf174ebac74ad7be31", + "xprv9s21ZrQH143K2dys2Z5k2tZoxJuBENLaf3cTEpb7JUeXDp9i63vbPMqVNqHqx5bGJ5tdNug3JFWJCC4amzYLoFRi3YS55MiHnnUN6w1V57h" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "misurare afoso bravura accadere alogeno dottore acrilico arazzo misurare afoso bravura accadere alogeno dottore acrilico arazzo misurare afoso bravura accadere alogeno dottore acrilico baco", + "ef549c1e44a7b183031b41f9f692795406de605e43ecc628911a38d7c92f392660c48313a08cf1a055a420d4a8c6b12bef7ff354c903303bc3a5dc12948ff5be", + "xprv9s21ZrQH143K2mWU418GviWbPrYTveuukEHsYJRn8aUxV5PrE4XacaDX74amiRkH3m3Qmnz7YPRRXkF33z5472Trb39RksWRmcJitkT5uJx" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa zuppa vedetta", + "5089f33aee7852d86a01e8afbfdc8a0ad5af51538e62e3f007d098fa4fc9817ddc990fa87b7235273798e2df52228b62738df923bc2d711fed9cc0558b3ebfec", + "xprv9s21ZrQH143K2VqjZNjnyCMUrTw6XhLt2BsaPVpHD8MKpKq5mq9S5SLyMSv58y7nCofNGZSgQQpky6aujCURjVuAM9jCcN6xjzbeRQYYACi" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "pesista educare imballo formica curvo imbevuto raddoppio sussurro croce eppure epilogo poligono", + "4ffd8b7879c0c6d7eee14682a26465d6429b8b921d6ea3299fb8a448d84d19b47ead5b23fd14449539cbd358abd19a23560dbd8c4bf6c153d98ea0fce7f474de", + "xprv9s21ZrQH143K2bsiFmYu4jzf5fjcHrrEarDgY3NYhmUEU5bxz3eS3NwS924WYnioF6rf6Fij6XQdmCNjcCJwqdFJ1AKKLMxa4obLnbdFgA6" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "immolato mummia oviparo sigla stirpe fonetico fosso appetito vasca galoppo cigno solubile foderato pargolo enduro sociale ormeggio galateo", + "188305ae9b45e400f6a3ad839061265f36e6050118283b85a3ea842aae1cca29c808978b3b0e297dbd794b74916fc43da57172e90c9fdab930638863c3472522", + "xprv9s21ZrQH143K4RNJpMYxxAuHtA77jzNEAjh3MznXe6FejAGGUPHTDUFL3z6LB1FioxHnMirHNcHyV4QVsXs8rZACW8YSJcNo3Wqjg7dNoUC" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "infatti dire pudica elica camola deposito pudica nobile servire taverna restauro baritono inflitto flacone ilare suonare nastrare dito montato vulcano scrutinio lisca deposito mirtillo", + "093ef04fe24f1c45148f3d4d9a54fb033638011507418cd7cbd91a8fa12157e1cbd9d095b2a660db26e8d674cbf6033a384954fdeadcd7c20cbbd3da46d90f1a", + "xprv9s21ZrQH143K2ZBg22DZJyspAmZcVMa1efjpsz89Hmcsu7dPZ39sH5RCCNr2kBRNFQUUmcwsNUTyZ6UGk7zjZKgCdXrmxg8gJ7FBCEeE5Au" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "sarto smottato podismo burlone aria omissione dipolo marmo coricato peso malto basso", + "d9e2a2e18ca8173859b0030186941149f630483cc9fcf3b189e5752d4f8b7dce2b285008f52ff1301dd2e2a673a4c76f8ffec9f8617fd577173b90c6af95631f", + "xprv9s21ZrQH143K2FR5gEkuFMhmYGVyUKiSrZspfn52HonhhbvwWrn5fL7aST1yxtAfd8cQSyWVGH9TamV4SPvyKRZbSV2U7ZTurX1Zoe86ngE" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "italia sultano meccanico strappo smeraldo sipario gommone chimera raffica sforzato sfamato vendemmia segnalato oscurare staffa trio cordata benda", + "c40130a2db00d82c2dfb127c768724c522cbf7f47b464061198c65e9bf4e3879262dd112cb7a526bf4450785e9f7f7e7511f05985d9104d9e75e1baf038c91e6", + "xprv9s21ZrQH143K3QTUdUYC375Cvtyea83NqatsgA5z3BMp95Fbd7QfQRvzqgjuCMU9zhSgiVtAYuDoR1GK9gQRP8juxnFR2Hd4v3mNgfMySGq" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "piacere feudo bisonte ignorato brevetto sfida onorevole stufo nulla docente sfuso perbene albo sinusoide orologio fulmine diradare mitezza iride rata londra egoismo gravoso luce", + "e384b6486328949618978c6d2607df3e7a9db9acc94ab24183aa4e7c1af0107ecbcee2dcead27d7f20acaa427d3d6eeac620ff24ae4ac2ba3b6ef01585418f25", + "xprv9s21ZrQH143K32TYshJvJnEHVnwukKyvaQauzBPx42JYyGT1FfyUvjT5iEfYuaETkYiAUJE434hs5Qkh7FRSqK7kwy5GwjsfS4S5txFKwbL" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "calmo statuto fucsia energia sodale aliante cedibile sinusoide trovare pila rinnovo tiro", + "5d5faba1d0db08a9f0cdb602e571a9b73565707429d2482e4fcde5a9bac1728b053c65853199fbdba73716bcb8da0616820fc817a309c99607dc56dddb34c344", + "xprv9s21ZrQH143K2h5d8kmFvaQT4xHzXrAVwmwAMyF1p7jBg2QoE8RGGFWe1NyqcNPhy18CJ6UKcvHTXkijihhLE18yrpec1DxggywYDVWGiUo" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "modulo rubizzo cefalo zavorra economia vulcano prudente soccorso tuta svedese limitare fluente amico srotolato cordata sportivo querela accusato", + "7f7bd54b8bf5c99a949d3ddc1d4baeec78e503f14ddd20500e307be89e940e5ead97530c014c33053a9b0c942094ea1bad649b2d23d6288dea8fcfe2e3a83c52", + "xprv9s21ZrQH143K4AwqRHE1Swki4WW3ok7NF5cCs57wYAEhcaDhw9oVh1xwSbCeofP9GMZkTmZo6kSjj5Xh36tuuPebZhDBQRcgRnzLaSgrpL7" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "alcolico lacrima muto frigo michele fessura irrigato alce ateismo incendio ilare metallo pilifero pergamena canotto opposto manovra nemmeno rimorchio fisico selettivo aforisma sabotato riciclato", + "197457046ab546a171b247c54bb8392aa2ee2d40f07831019776745f17aee46fe9f1611f86f9d7f0cbcacc03ce696082fc13529ba0cab0d57f76934383be0f3c", + "xprv9s21ZrQH143K416VC54NFDGXsQeXLSAf2JaUD8jQR4SDqAFq2S3BdcSRW66avfcoRK5FsPPCuBKaShrskgoGchxdAeBzVMPFHrJkCwAWgiE" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "utopia melodia allegro evoluto folata scuola carisma abbaglio spillato guanto unificato pollice", + "53c4c5de8a16381908e397fcb8ce5dcd8c90911d9b538afe83862468816889768d94d040bd249f4eb25d915b05b31addfa0b06d89fe15f521fbf3c8545bbb434", + "xprv9s21ZrQH143K3u8cCFUJiaxUCKdGZ9Z2u76hUbKNMoCy51gq982aahsMbN5ArmhR1VHFJSMKLsFLkTsSX2gwjrEfj31QsDqCh3nE6FmcV4e" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "satira lusinga mordere nastrare sposo responso replica varcato colza opinione distanza erario monetario bici india narice colza cilindro", + "5c8c80b1e440dad220a295b282fad7e8a44bfee5210d853fd52d26e8a006787ac7bf4b0a4f81d029e2ae9cdf71814f193bbb23e4b3e149d2f99b03e2417b39a0", + "xprv9s21ZrQH143K2gMABxA98Be2P18eaj1zxU5qV6sCiMT42cuvLQ3dM2VBnEAvpFwWEDtDcbSkteNAF8nUuCyBETqpAeKmqV6niTPFEECGcLx" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "varcato codice enzima spessore brillante squillo vento insieme scoprire prugna circa cruciale peccato allusivo savio pilota inarcare simulato precluso sugo fegato sfamato lusso trono", + "e89b83bd1a5fa859922e0045acc84cd04edeb4bf6b5352d197fbed50af0938b17bca7ab9beb8c882d0e0a67597d9e14e88c10e63b824e9206d2848fbb8a55b64", + "xprv9s21ZrQH143K42yTQvEavGhekzrzpxHY1nspnh5qcgThCuNVAyJPWRMeznMV9FnChg5f7k51DBMawNZKWTFCLy2az1CnyGgCJ2Jk5PGb6Qv" + ] + ], + "japanese": [ + [ + "00000000000000000000000000000000", + "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あおぞら", + "5a6c23b5abdd5c3e1f7d77ad25ecd715647bdafb44dab324c730a76a45d7421daccee1a4ff0739715a2c56a8a9f1e527a5e3496224d91293bfcd9b5393bfff83", + "xprv9s21ZrQH143K2TDo8AAss7eUkUqLFzBnypFpqjQUMVUrSMvrrgLiRxQPrYnhfoS9NPp3rex725rcuN8pkDL6pwqWfdPtiqa9ib1B37vZwfy" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかめ", + "9d269b22155b3c915b09abfefd4e1104573c528f6977cde89c6a68152c3c714dc6c7e0e62f221c322f3f76e4d0bcca66c06e3d2f6a8d70d612c87dd6dee63976", + "xprv9s21ZrQH143K3kavBMu7K49k18vjQHhNL1ciMgn7S9kDMKdyK1vEpF46UWyoXCvdBLEp8U2bhissPkC6iwXjMgRXyQ6SHbyYYGcnFqNXTW1" + ], + [ + "80808080808080808080808080808080", + "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あかちゃん", + "17914bd3fe4b9e1224c968ec6b967fc6144a5795adbb2636a17f77da9b6b118200ad788672fd06096ca62683940523f5178f6ce3845c967cbd4ad2b3643cc660", + "xprv9s21ZrQH143K2NAPUK7UVbLB4Dd7Hvb7fqysvFyKES5iujX4BfrwUmy1wvWJb3kBc1Zs2jxTTBxBPHuHziB1ZWWUxELrn8g8VLrbjaWDmRe" + ], + [ + "ffffffffffffffffffffffffffffffff", + "われる われる われる われる われる われる われる われる われる われる われる ろんぶん", + "4bd21b75de4f262b0771a97d6fc877ee19329236ced6e974c4c81a094a5f896758033f7eae270216d727539eee3bc9ba5cad21132a1c6e41a50820e0ac928e83", + "xprv9s21ZrQH143K43f5tXeZRJ2RMiS6nLhxcqvopg9mc84xTiX1UjLjVQDBHrwmypY45SPscFqT7zJet5b3riay95fJMM4dfZXtoBTcKKkKEao" + ], + [ + "000000000000000000000000000000000000000000000000", + "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あらいぐま", + "a59401a14bb821cce86ec32add8f273a3e07e9c8b1ed430d5d1a06dbf3c083ff2ffb4bb26a384b8faecb58f6cb4c07cfbf2c91108385f6773f2fefd1581926b5", + "xprv9s21ZrQH143K3VMwP9nGq47t86uaVENmykbCRuUDKFDSfgDFjJUFuF88JfPPMzwgwFPoaJaAD3YeqrCQBFY5S2jaCXXuc5Vg7u9jb6iB6XF" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れいぎ", + "809861f80877e3adc842b0204e401d5aeac1d16d24072f387107f9cf95b639d0a76141ab25d3dc90752472787307a7d8b1a534bea237c2bb348faac973e17488", + "xprv9s21ZrQH143K29NaQCx2DxPkszvvbYj5FmZ7RMMAoCE932mUkfzaAYFf18JiaFC4bJujA7XAsm7TFddxdkXfn6U34te59Bp27CWp71mw55c" + ], + [ + "808080808080808080808080808080808080808080808080", + "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら いきなり", + "01187da93480d0369fff3fc5331284ad6a60cd3ce1f60dbec60899191afa2a2b807cd030038a93ddaf14d4f75d6de4a0e049ee58c92197eb9ca995770b558486", + "xprv9s21ZrQH143K312KY5MRB74dWa9BDHG7rV4oK7VAfA1eVKPDddPEUTXRZ5ShDzVQJp7d8q8xQLPYVcHYVuxb7sUo6EQpJUytZRHxyytSYbV" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる りんご", + "a1385ef66f20a905bbfc70f8be6ecfec341ff76d208e89e1a400ccea34313c99e93f4fba9c6f0729397b9002972af93179dc9dd8af7704fa3d28e656248274dc", + "xprv9s21ZrQH143K2LmZMWVM3JxKTHZEDZg1ZEUZH6hx9yJhBWRSgGGYD8TAaPa6MYto1t1bBXgPYFMLx1Gidw8fJADdFNzvqrAcXiadUPfTTVh" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん あいこくしん いってい", + "c91afc204a8b098524c5e2134bf4955b9a9ddd5d4bb78c2184bb4378a306e851b60f3e4032fc910ecb48acfb9e441dd3ceaaab9e14700b11396b94e27e8ac2da", + "xprv9s21ZrQH143K2WD9BSegbAkGzg4XbqeY7gzCYGWaWfmRifMJJtmDo1pjXmyuEwPnKcLwTZ1uqosvTRBAcPdUUvdHxY6rKj6R28vWVWGLKuu" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん やちん そつう れきだい ほんやく わかす りくつ ばいか ろせん まんきつ", + "79aff5bc7868b9054f6c35bb3fa286c72a6931d5999c6c45a029ad31da550b71c8db72e594875e1d61788371b31a03b70fe1d9484840d403e56a1a2783bf9d7e", + "xprv9s21ZrQH143K3ZPQiQ24sxzu5PsqazSLn1W48saAFWiDhugDPp7dKB3v5JLfMqzTbqjdwE8P2UxFSKwnFc5CpgMaH2dSmoWDRuaAbrZbJF2" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる いよく そとづら あまど おおう あこがれる いくぶん けいけん あたえる うめる", + "0f46c02350b3f1227c3566dea2ff0f2caf716495a95725b320a31a3058d5d62596fdb816be75909d2c5f7094beb171dc504ea8ea60f5e2e40bd8aa0d9339aab0", + "xprv9s21ZrQH143K2TSJ2oumYNzqQGKmvehh1NKNzAjpu6Ue5yPvtzFvX8aCvBk2eTg8TJfwjiGLfA2KCiajZ1VXBvtQXqk7Wryxgps4BYyNmZt" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる われる らいう", + "a0705c2feebefb61509dcc49c57586c35379c1981c688fc1d452da44443d9a651a374f1ad2ee3d7847b50655cf9241d7e607be436c0df7c8bac42f2a82985a79", + "xprv9s21ZrQH143K2k4V9TkTiFB2LviDq1oSHbRha8AmnPvBtRRVAnf9WJERojPPdki6sbiuNbxv91VhhdreJnaZh29Ay892Mj6KB2aZnysqfvR" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "ておくれ げざん しねま こりる きぼう しねん ななおし ほんやく きない けむり けまり てんない", + "b80f83f27ec3a6cbe804be0661e9bcc30583484dbbd37f689d4952bdf4ad29d9b9f5774fc4c87b733169416418b81f272a3eab37feb22f5c8f6deea6bb08f8c1", + "xprv9s21ZrQH143K3VLugmPmzLDwfFeo8cgoZ2ajCQwWhGKfXut8C1XhdSNuAnXjz5W2bbbNPmr87yMHAaPMaUhE6MNnfGc21WRc5Lj94Tbbnnh" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "しはつ たいちょう ちめいど ひりつ ほくろ こやく こんかい いひん よろしい さくら がはく ふっかつ こまる つごう けぬき ふすま ちから さくし", + "926ce4647a8f91552ae00efa8880ed7e43b6f8e9cf51c38851b0e242569ea96d77a19c777d28dc33d8912c3e3bc6c59f7a82b6daa25add2c39a492fdebae79da", + "xprv9s21ZrQH143K3eVmFcDa4FcZCGeNryYBv99mdgidpSeFBt6ppQQjE6MXegs3xmT4arCDXvtg6dGpr7vFY8A9bzzeJ7E9jyzWLPDSrixwqub" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "しやくしょ くちこみ どんぶり けつじょ おとしもの くうぐん どんぶり たずさわる ひたむき みうち にほん うわさ しゃけん このよ じどう ほめる たいよう くふう そんちょう ろくが はんこ せあぶら くうぐん そっこう", + "94308a93dc1bc12f8e917b2445581240d83cd82ad3c52f9ba4125aa6ce5490a3624fb3dfd7e22923ef7ff3b778157e8bec76392b122bf465fcc56ab4f5a73401", + "xprv9s21ZrQH143K3ResRww4txupLREzrNjYYi6X5FKpKvFUoCwhJS1HtSn21gDkXcBYxQBd9RngafHfWqktMmLgPrVJTisEQbRn4strGawHGpQ" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "はいち ふかい てんすう おさない いろえんぴつ だんち くださる せんちょう きさらぎ てきとう せもたれ うんどう", + "5fae92448afda85f10a50236144cee3068ce21e20a34447a9a4b6e9d5b000a4347a151d7024c2068c4d3c29c46a6f541a94b98624cc25b2c8ff42cafe6a3087b", + "xprv9s21ZrQH143K3PD5fc3CCepnLDsenZAqyKpcbquY3BR9sfosi7rbjXMo5nL9qUb91FwYUFdN9wJt9v95YoKJJFm9V7a2Xb1sZgstQkZ54cL" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "すいえい ほとんど せんやく ほしい ふうふ ひんそう ざんしょ がちょう なにわ ひはん ひつじゅひん られつ はんぼうき ちそう ほいく めだつ きさま えがお", + "a49ff09e55b62b8fd3d4b88b4fae2c9062e18c06105c796505fde3cc7655cfb9c922c02817d18dd40832ecd19d80a71fd62d915c34cd8d95fc55591d12b6f677", + "xprv9s21ZrQH143K4AKun3sek391JX1qnWpxriP3eyJviCUq6ouiH3RDzJ1f7Gm6c7dgAkxn18EcaZ6CBNvVkmLxVvB1fdBdLiGeHXf69j4eCR1" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "てそう こつこつ えんちょう じてん おおや ぴっちり だんねつ ほそく たなばた くらべる ひまん ていき あんい ひんしゅ ちきん ざいげん くたびれる そなえる しんか にいがた せきむ けしょう しあさって せたい", + "55d101db3cb8872853a3e84ec97fdeac63fdab33d92def4dc4694beff0f504da29f953bb463d9cbaf0c4d442672d40c5a58d6aed35d5fdbb2768dcc482b59bc0", + "xprv9s21ZrQH143K2dmFtHdM2tu7mDdKwsbeU7ekYU2dsFDBt7dNbXcsLTnN2MNi3hQssGdcPgEfiXNJcgnoo76t1nAHaoyVBXZ1z6hP8L2UGTs" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "おたく ほうりつ さいかい げねつ ふせい いいだす かいてん ひんしゅ もえる てのひら ねいき むいか", + "1b0fca340ccf977258987a793b3e61b0e20291f8c27645e74621e87c01d0e881cd601bbb0ed98388d22dea341498ed74aea32975a56bd3bdc6027196b7a21640", + "xprv9s21ZrQH143K3Z6vsGR17YLtiGF2gTkCm9Z7XnJm1ffrLQ9ah7AVye6NyACmsWLZBoREcdCRS7tq4Hrx1NDWqHj3MctxWixcH7XBuigtJri" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "そむく のぞく かいふく ろてん げきやく ろくが ともだち ふじみ やおや まかせる すらすら こぼれる いぜん へんたい きさま へきが なたでここ あさひ", + "d7919c8a5458fa4ff55431021938b15bf39b7542df902f371625d9f064a37b1d06a34cba920a9c8b8d6bf34514f542f5ee91b9eab81a063f9acfad932b43c62b", + "xprv9s21ZrQH143K462YnNY39Q7Pq8ZXzCXs9k9GexnGbyjVvu3QhrNLCkPxNUtaKZBzqffnmiuU4ZiA5UQKtwVqr1WmXyM3QjVaVg8Cc93xMDQ" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "あんぜん すうじつ たいふう こんぽん そこそこ こたつ しんせいじ あんこ うしなう しまる じどう そうり てはい ていし おめでとう たんまつ せんげん たおる ぬめり このまま ひいき あまい のらねこ にんそう", + "1003a30a516cfa1c30de2a53fe6c5936dcb8ae893f944f459ea4e1f2202716320350dc2ee5d92289dae3c5b1771fec863fbbc40146fed04d0855c6af70b0c7aa", + "xprv9s21ZrQH143K24jAnV3a4K7xAoD486pMXSvAj41fkB61DopHqcyznUX1zmkFgEGBpNkXi6dckNcpcwXB65i71eBwP25t24QZHVRGJYURZ2Y" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "ようきゅう そあく いきおい こうつう こもじ はんだん おんしゃ あいさつ へいたく しすう ゆうびんきょく てんぷら", + "17593d396c66d776bc15c06e5348bdfa38927daf92402c335041dc500d7c8ea9eccf4f4d1c187785b8c9128d06bd7048a1706006fab82abece74185448caa811", + "xprv9s21ZrQH143K3P24yRmaXsA2pntRyM375Kb6cF4P73bMQNKyXUPPR86X5o83UmTi4iyQWaCL48EoJUVqC7KxsPXbrAC4nKxjMh7FZJcucRE" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "はえる せっさたくま そんみん たいよう へこむ になう にっさん よゆう きあつ だんぼう くねくね けらい そんけい えほうまき しゃうん たいむ きあつ かぶか", + "2567b4dc469b5d1d7c4aece40e642dea3d5cebd80a577b5a4d72fc2b60da6ca657d3a01270c47530ac71f812e648bac01aaadc62c444749fddec8982430fce7c", + "xprv9s21ZrQH143K2UfZfzss5CkDCtkRJvLN7DmM4sWjUaYMF4rfeWDERACvg7jGSSkGeFLrcHdpXUFty7wJ5yzyzcbRDqv7b9H6usZ16PnKy2c" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "よゆう かんけい けぶかい へいこう おかず べんごし りえき じゆう はんい ともる かほご きぬごし つみき いきる はかる てふだ しほう ひろう とくてん ほったん こさめ ひつじゅひん せつぞく めんどう", + "909c8c992019adde332a11f0ebd1b0c0fbc9dd96e4d3d30ca4ecb0d06f743841cd25380f87b3a538f46dfa3fb3a5ab330487f99d128b1c6bcdbe476d3bbe2af2", + "xprv9s21ZrQH143K3a5iyuaeKiPGQbhQLUaBhzfd7inUA5ndrmcYEc7zZzTLGM37Du2M11nXChrzXyZ8ZYtH2dG1CkWE39R749XhcKUcVs9avTR" + ] + ], + "korean": [ + [ + "00000000000000000000000000000000", + "가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가능", + "a253d07f616223e337b6fa257632a2cc37e1ba36ff0bc7cf5a943366fa1b9ef02d6aa0333da51c17902951634b8aa81b6692a194b07f4f8c542335d73c96aad3", + "xprv9s21ZrQH143K2EgqD4YCzE4rJ8rojXAMzqftKGS7694ApSkSoLgheNMawZYv2sArwHhLq4AaJfV6WWtGLjFWdFBT47wsjnq9v6iMXojWZ1V" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "실장 활동 큰절 흔적 형제 제대로 훈련 한글 실장 활동 큰절 흔히", + "e6995bf885f5c64932ca28bbb00bc100a6b89cb6edc987bb05f05f99ae7caf78329029c189834c1cca938000bcf08423da011558a60cf3d90c9035eaaf241b9e", + "xprv9s21ZrQH143K3CgBqqMtQxnP4VvSCjaTs2GfqHadwo4ddmKkvzasAHRwAsSDqjvJQu8jmAdEgBrLkVYhHdyZKWkZbyfuwPQG84PrWjEuCLQ" + ], + [ + "80808080808080808080808080808080", + "실현 감소 기법 가상 걱정 무슨 가족 공간 실현 감소 기법 가득", + "1bb52039a6cc288cf806740836002abce493724edac3d3b9458e3581427df76414b422171ef115d823a01c6b39fa68bd0fed20bf5e64dec008fcb22e4b7f26bb", + "xprv9s21ZrQH143K3v6uaDUdZX7Cr8uC3hCE6gJ1MiGsnmRojRtUN6Y8nBiWVfMUc1skLwuUb3NshkJSdCuFew5G2y6YTDCRsSWNxYsJgZCpRxL" + ], + [ + "ffffffffffffffffffffffffffffffff", + "힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 흑백", + "b6eb986d6aaf7d0cd0eae2a667ff8bde68c8780fb5a728cf500e29119ce99c9b079a4217836879c1e73b8a85422a85b564d819699a4310a1d007b5be24c24b6d", + "xprv9s21ZrQH143K2P5vpGbzebpVw9pZzmHHyuoTFDAFCR5UTVwQPdkuKPJB5fP4a6YgAT6seAuv9uGsMcnoyVFkVxoKZxQgB1SXjy89nHvvSHx" + ], + [ + "000000000000000000000000000000000000000000000000", + "가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 강도", + "f40a8db48df9a7fdd73a7b3ceb45f668e4eff098f275a0a5cd739d31572c90aa92bc08b9043d0adf059a945e47e2fdbc26c89dcc15b3893a2a705e4539523ae3", + "xprv9s21ZrQH143K4FQh3Y65u7oFeQhimhg8fjRV12vp7kYxB1EnjiBVFY1B7AdjQgLcfzK8qLjaZKFq4gfPC5pQtFtDnHi6Y988Skm2oRr758w" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "실장 활동 큰절 흔적 형제 제대로 훈련 한글 실장 활동 큰절 흔적 형제 제대로 훈련 한글 실장 환갑", + "3162bc17e0f2f01ee571022444d2c5fbddf6a68dedfe734c319fb574592e9c0328f6526116b3b0b025b23391781d0bef8f43bc8ddc2b054b9f52e1fd6a88e3d2", + "xprv9s21ZrQH143K2cE8CZ9tqfnmvNZcEJpwnyRRcsqDPokupVqx1hecEjQF2zrM6amGPiDiEKGZdNWrps6DdEcP83UABBjV7yJSkAFUWcP1FDF" + ], + [ + "808080808080808080808080808080808080808080808080", + "실현 감소 기법 가상 걱정 무슨 가족 공간 실현 감소 기법 가상 걱정 무슨 가족 공간 실현 거액", + "9fa92e4524e0f7412935b2deea23593c0955f9679d3285e3b955f5cdd2a659ee005ee99bd385f63d82cbdb54a3849229fc9a700e198b65a1452b511884b543eb", + "xprv9s21ZrQH143K2XL77upQ6FjJr8a9oDGHakG1HQozxUzHJ1aVbv6JLPuX2nXdbopUnxBoKgVBHiZphPg4Krqm1xxUm6YPcZg9NeAiqT1MAcK" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 화살", + "2543a88c8a31570dc9ee868a7b153f7f2e42700778bae7a3aba7017357e708b5cea97e0d9753c9226abc90b83c76ae369d74515ac64102c51a5fd0f809cf8b92", + "xprv9s21ZrQH143K3AK2RE2fUZyLssNUUJPyAYjXGDGE7Ysq8hTx7MG43NG7KDALxx6TaVjXCmxFwB2wZnNtSBM2asDYC4YcDMEc9sbCyPPYjdo" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 가격 계단", + "edb71011bc0c227103ba8a769cc36ba609e5407a771727fc0c8cba1b5a44d21ab9163d9deaa37427ccc579864e21f08d0fdd3a53a6be258d3c73b898a01ce2b2", + "xprv9s21ZrQH143K2vZcddzTch42FzN5kcvsdtyMNR6QfEgZy1JEor4wzwFKj4JGq8kJEyC4zMQHhN7Q9mhRZJdYceTALfwxmpUAD6AtSzU3n7L" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "실장 활동 큰절 흔적 형제 제대로 훈련 한글 실장 활동 큰절 흔적 형제 제대로 훈련 한글 실장 활동 큰절 흔적 형제 제대로 훈련 통로", + "dbd640cc9d3e99939bb0fc4473738571e314c29468f01fa85f57e296cf6e8e269d6e32434e46aaa63384930cae83728623195a932a48ccb71a9ea247720d9371", + "xprv9s21ZrQH143K38C9hPGqNHuxsvKmNVwY2gRiRjjbzuBNme4PgfyLzkhYEt4fGm73xs6c9QDh2GxxkntMiFyeDsMDPkLDa9TkubnWZFCsJP9" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "실현 감소 기법 가상 걱정 무슨 가족 공간 실현 감소 기법 가상 걱정 무슨 가족 공간 실현 감소 기법 가상 걱정 무슨 가족 구속", + "9a0ec04a48287ae628d61428f921de5f40fc1035f21883798e05c36f9705b2525a00ebd6bb89fcae9b8af8e9861d0083de331199d6b85b24cff598609a49b305", + "xprv9s21ZrQH143K3PRHhckbBdnb1HMFJWXd2oPh4sXWWP8iMrH5fkxKnDucKqdWFv895Z7BRWKmbHj5gRvB6QddHeMUrk7wzSRzf7ZqkkVCipS" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 힘껏 허용", + "340bd57209e54e8bde6ca750147933f7e44995047da87b61f64f70f26f289a377e25a65f5efb11f9e651917ec9866d54846516ae0fba956f5f536422bb47d91c", + "xprv9s21ZrQH143K4Cr2Z2NiQPe5Xz6W92ZCE6SpW5NVmfvjtJgJUAG6WwkQ4iZ1AyfbUDb3yoiQsdoGHSSXNEMtAX3xPdtD38n5JeD8Lx7XNR9" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "원고 물질 생일 부산 마요네즈 생활 일찍 큰절 동화책 반성 반드시 의식", + "8d148c7f8ed529d7a88fe2bc8bff574b56406f9928ab5426df793f4d3a5121c7c6974c856ad20f66ecf04fbecd3bc025912b3e41d500f1e5be896505e01d08d6", + "xprv9s21ZrQH143K3t9XU9hQ1sEMc3LTT3MZ4Ar51RchXDvo5sjxDF7bFNaLfz7DttnvUqKAThXzv99QKccqpLQB6DdgR3NJV58AixcCdbw6ZTu" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "서비스 알코올 오로지 착각 카운터 부근 부정 고양이 허락 비디오 단맛 체온 본인 완전 바람 철학 영하 비닐", + "3b67b06a2386240f75abe8f7905fd0fdb4cc2baa88c090eb9bca3cf144e6e33bbf3dd9085addfa52cd0ff9f2f9cd63ca69e7e77ce903ace942ec7f5b451148a2", + "xprv9s21ZrQH143K315xG5w3u3mrnhSMVsNVdKmA9VZVXeV3fAcLX94k4N9oDEgSAxsMi3RhACEKFfgWkFex6T1uHSjSeHPxnnVF7muEAVNycgE" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "설렁탕 모범 일곱 민간 낙엽 매스컴 일곱 언어 지진 통장 잠시 국제 설문 복도 생방송 큰딸 약수 목소리 아직 횡단보도 중독 수필 매스컴 실컷", + "06b321dd10cd2d0dec17212163c5d31f5ebda67027c0159380348d31ec5c5e7914ec75a44d4e225bbe5ce3db967e2f1ae2c9d463a638951b3e16d75ecb92cb17", + "xprv9s21ZrQH143K4TY4YjtQLxBPerqUshALn696njp6gNVkBTYWWZ5jrQPZRuqxiMoJ6RjQpVgVZFqKBRsWnGcci6UNgPdHbRBuVn93aKdCD1n" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "제주도 처음 의견 김밥 공부 연출 모델 시장 대합실 원래 시금치 군인", + "5e68ec0b343b62e221ede6dd5d6f33dcf8b5b4f4925ce6a30f49b17182ed0a40f7c7f3248463843f1999dd671a2e9c2abf4e5443a4e88f2bbf10b79524cab827", + "xprv9s21ZrQH143K4B4gzTqVD1szP9igPch2u3xqQZ2gBsYe4u9kunYh5smSB3Md8EXktwjWafLwMasGuLPiSGXPCwNTgdcEPBsgtq6MiGMscje" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "소음 큰길 식당 커튼 책임 창구 사흘 다양성 일행 질병 진급 혈액 증가 예산 치약 하룻밤 대한민국 그날", + "c8a07b4a163c3cf4ef400a96bdb7edc012dacb957326de185e66f7804e912c02329ab07520ef05dba38b2b3f6ded8a8691e1b17a38658aaddaed7ca95ff1588b", + "xprv9s21ZrQH143K2h4eVKk7zbdHmjz5U5poyBbddPTS38xb7rjA9NMdUjYC3Dy2X7top1kVw51cFuJ2UHmFU3WGAzb5Hz4imvuz3ZKWy2R7gAP" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "월급 보너스 금고 생물 기분 진리 열매 콘서트 에어컨 몸무게 집중 우체국 강제 창고 영혼 분필 모든 심리 소나기 자랑 순서 미디어 삼십 술집", + "683d1f6324fa54a4c4efa9b0573fae573ebc1c8b373890eb9b1e6f760f586126af2a3a39e0494f653ce6dbb954353023c304dd42d80aa939eb5a31acaaa3a60b", + "xprv9s21ZrQH143K48a2XvjEnkvM3ry54SZdbERB51GvtK6S5hbSwV5SbBkqa2KNW7b3gwY7hC2JMgiNMD45aMhABCDvXUoZTvrq6typLaUAz6k" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "나들이 침대 분야 바이러스 첫날 개선 논문 창고 하필 윗사람 저고리 팩스", + "f767f63c4febb5c832890f6129d0c3721555de40c28ac11093d23447f507b98f134cfef190cf0f12f1e41278fae5334f460c24c69cadc9aacc5d98efb3903f06", + "xprv9s21ZrQH143K3K9kkX6aq8Tmqp5RjoDwbrEbqStVfNmSWBofcYA3wYA2MQsDsTZjkfkeu7gz9yJB4foKXE1mveXDf48vdA7L5mPZfC8aYdW" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "씨름 정도 놀이 훨씬 물결 횡단보도 인천 철저히 학비 킬로 수염 본격적 결심 취업 대한민국 출연 일정 가운데", + "739584c55ab1c8053a44ca3fb50237e066590c92043cf3f45748768df65778bb79175d511543d96112f0a0e7960df081f74e6e477b953a1681cb5331de8abc3e", + "xprv9s21ZrQH143K3XQJtWHu2YWz62N1SFBLpbweVNpCUoeFEWdukPL2rXvQ7HoDBwPWCnBc5otLQnHomemkmZ19m49YjQTqZY6uvjbDQaWpS5B" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "개구리 속담 액수 북한 실내 병아리 소망 같이 관찰 선물 생방송 신용 유난히 운반 남대문 열차 시설 양주 재판 보편적 증세 감기 정오 장미", + "068f3943d3b3ba61b74e7900d936fcf4d73fc74852bc011e7405213edebed9f1d6b9a25db10c3ad5552b779225321a36304c757d0479e8b591655d0188961120", + "xprv9s21ZrQH143K4ameDhGSneS8UaxoYBfQ547c6eDEBvDJJ2cqzFHCCfqcoybLTVreVRjQo6SUDKzoHeNtR9bCQY2qF3iWCJTheeDYTUSGdTs" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "해결 식초 거실 백성 볼펜 중세 냄새 가끔 출근 상인 한번 의심", + "5f7125457857a8870d1ace1eb0f87479385d08ab8827998f57cb0cab5289d31a360310cdffaf4e8d1202a13fd8bba2ed9bc240a59b6d486d418647c55c7bca44", + "xprv9s21ZrQH143K2fhpmAA6AkCVJWY7je61aLnS882ga5H3cMuoSp4G4SCzg47MRUjVFQYVytFdSu3M1hYyhW3xkuGqxnUXx1K6jFTg4C5gmst" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "제한 스위치 아프리카 약수 출입 잠수함 잔디 향상 당장 열정 목록 반장 아시아 그토록 선풍기 약간 당장 단순", + "8c6f94c633c8752381e7bb207083025d7cef6c448695393fc21553e1ac269991a3ace1a2562a6129bdc34494c7a6c01d19f600da9af985eb001d71d2fb9e1480", + "xprv9s21ZrQH143K2b5dBGtut1we5Wz9S2L2XAXfj6xftmT2omNHimaPwbTWszDvwSLfpjpQCjXKfqW827u8a4ADhCQEnTiiFj4dtbc2DZJRucn" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "향상 담배 박수 추측 기술 충분히 협력 성적 줄무늬 인체 단위 딸아이 왼손 거짓 조깅 유명 석사 참석 이야기 크림 변동 진급 스케이트 하지만", + "0ecef71bd6f0948d9186c2786086a00f7140a00d37c836d01567077aac0dbc69f62189c02a9138dcc79a74dbb676b74aad4959fdbbf1d06a7798385f8eec97b0", + "xprv9s21ZrQH143K3zjSpKnVTgqBrJfMK5iZXLrGb5Z4zcWKo9URusFq4n7YjnLR3RCqD8Mytzk4qjHnk52PfdQUHCCJUnKnZxqhMXGGVVe1NXZ" + ] + ], + "portuguese": [ + [ + "00000000000000000000000000000000", + "abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abater", + "ab9742b024a1e8bd241b76f8b3a157e9d442da60277bc8f36b8b23afe163de79414fb49fd1a8dd26f4ea7f0dc965c760b3b80727557bdca61e1f0b0f069952f2", + "xprv9s21ZrQH143K4ayH97er9xirrbGL9hEywmMfh1LdQjwtrsnrvENN7c2yKs82HypXo4GAMt4wpnna9doa1FDFYXQTSFReWDY4XAf4imN5m6s" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "imitador vinheta sogro xerife veleiro pomar volumoso tratador imitador vinheta sogro xingar", + "298d1614ff06ae803709f5be5331135cb74e6cc77fa09e07a3e887c2e370401f9a73a409dadf58b5a5197b27ffb3fa5dd528aad9a1a8750d7669ce950ee60c2c", + "xprv9s21ZrQH143K3H4uYufypQuRqf28rLoZpLTwjpeCiPBpcZtJpupYd2oS39oGnrHd7fPAgU5d8bfTpYd97MShp4f9fBGj5K1F9NqgZFpmbVj" + ], + [ + "80808080808080808080808080808080", + "inalador acirrar barulho abotoar afivelar coruja abutre amostra inalador acirrar barulho abduzir", + "800fd4e7691fbc3ceed246c211a38949c3607fe269a35829e40ca9d3e26515a4ebd64d8bfe9b66b49543fe9dab78bde7cb7102968ce669f55293bcc02e26ba0e", + "xprv9s21ZrQH143K2qEuVaHnXDY1HbcQ9nMyNoi5tgGyFxfYXVxHipQ9hcmS2QitJxsNFatCZk4EYoyyNdWamAjJiMLhuimpjiWHPvJCw28EMSr" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido xeque", + "7fb404372815ea28ef97a64249acd71a293ea0437b3dac8f7e193a10f3584e2055753cc8d6f025229f65e61318fc4e10d4017bd3cc3496f535eca3247d26acd6", + "xprv9s21ZrQH143K3tfxkHq74PSrq4HqTjs5sHHxBpjoP26GnJ5Q8w7tEn5kmPESuqN5TQb9Lu4LysqMAcXHWNsr3DKSnvuu3vx6sK7V6Hhv2iD" + ], + [ + "000000000000000000000000000000000000000000000000", + "abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate acumular", + "81c66b6789e8b91c169335be4436fd9736ca9c06425acd09b0525e1d6836383130f7f7d31378aaef8b7109503972f40d42f6c6b9f99765827bea762515d3404d", + "xprv9s21ZrQH143K4LsdmZ3NinpZHGbQ6n7aQr5uV8Rv7Fq8yNPdAzsyrezon8NZ3ZCzode5jS2PXzN1C68amwDmp2bgAEF4jDG7NUbdRYCjR8r" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "imitador vinheta sogro xerife veleiro pomar volumoso tratador imitador vinheta sogro xerife veleiro pomar volumoso tratador imitador viga", + "17040704dd985478b7d0666c7078201e3cd7d1fd1aca0d7d47c98a91ec7845500c611d987339a1d4c12bc506feb7c486eef0aa8ce679b1d184db5ca40fe8ef67", + "xprv9s21ZrQH143K3XAGBJxF36qFzxkFaSEsNjqKvCtT3s7WfzgMBRcv8G9K2CRGXXhLYPWKSipBrd9dyLSRuTzQWbAJkhghki7Nn13QfHqCSBq" + ], + [ + "808080808080808080808080808080808080808080808080", + "inalador acirrar barulho abotoar afivelar coruja abutre amostra inalador acirrar barulho abotoar afivelar coruja abutre amostra inalador afastar", + "0f637bf3a487c26fb73f6a464f62ef1f6ca73a6ae083e220374c82881bf4ed2dafd874956ce368c4441e6269759c5864197e87421fbcdb7f6d63df17b4f7df81", + "xprv9s21ZrQH143K25VJsZdwtDCmzZ5C2LkJyNDKrHiFfHHBEmtvn77g7qrBRGHTum98TqU7sWCGFZ8vjtcPVAfgGcV51vi2BrPR66KC7wNxb3z" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido viaduto", + "042e40dacea1df76335445c37171e5c0fde334236abe1b5d69378548875d157968dfff5889641f16690dca9baa4d9e5fbf56e3aaf0765144ba96b819f37fd0ae", + "xprv9s21ZrQH143K3xAXH3i9VReyK3HSyzSmN11DXikYFTA3senE3iWDCdYTFXL9JkBM7CeCS31NBBHNu2tH5T1iDWCBEu1TeWLfELH4CaGAiRR" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate abacate alinhar", + "8fe67c9f53a30f75513830e18f6bd0950354297a4977393fae3577363393e679cc13452bcfc9460b28a913ab8de9efc55f5901d1ba77e5eec791afd967768607", + "xprv9s21ZrQH143K3UVmtSkznKJCHUGjkjoHkUd6dxdfo91K8sdpEVmzjyRMB3Sf2WC7WKzmZLw2cfJ43LAEENq2v1gASK4TmMUTkbWVoJaZLge" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "imitador vinheta sogro xerife veleiro pomar volumoso tratador imitador vinheta sogro xerife veleiro pomar volumoso tratador imitador vinheta sogro xerife veleiro pomar volumoso sucata", + "feac9ad4e1a4a4399a7d57fe47bf64b404a7588eca1025abfa299365f7a75639317e2c89a94812db33405aa0213846bfd6d53dfd02743e2cf3b6984eb9fcf19f", + "xprv9s21ZrQH143K2Syz6ZuTcKmPtPee4Xn5HMEnuZcNZ1s7hJdgjRLFJ8f4d8DnoBBTUchCBjBnjubsz3wum7zbhdp5h9hm4g6DVDo2LaHGJ2a" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "inalador acirrar barulho abotoar afivelar coruja abutre amostra inalador acirrar barulho abotoar afivelar coruja abutre amostra inalador acirrar barulho abotoar afivelar coruja abutre asilado", + "32c8feae6a0bee33166468a770cb28459727e10f4f5ffef64977d5ef52a68ec51d832751a10c025058612ab256052cdfa9d8c5c87560de0453efe5a7d4597771", + "xprv9s21ZrQH143K4XMzpdgrkDGxnU31ezynvtw3iBEfRB8sriHpEwyu2Gb65ao9YLcTTWTyxNh2g2AdtYmZ4wpTaAxLpJBzWY7jAvG1HyotoEt" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido zumbido validade", + "739a6edd208d09e28fa97f8e8709aaf185e173125b1b427c04d1539173c88b78e81610a759e14f97a2038dcdc2c466a072788e3d7c88cc9bf36b96cb29510e77", + "xprv9s21ZrQH143K45qUnDL5tCHKGpht6amXS4d278iTXWRJXthNTufwnbnvr6rU8kd4Nb5s9aBWdYFtfcdGkg4GdL91jRhGRtP8mZbjtTvpGsy" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "mexicano crosta farpa empolgar chatice fartura olaria sogro centeio defesa dedal multar", + "1f0397e6d2aaf8d6867d648e9bc27b12a4ee1b61a47fb63c6676c153c472d708f02344ac56fd1a8e135e18cce4eef711e7e88529bd6c54b90715e9b3d9fb8467", + "xprv9s21ZrQH143K2ynvNymKuhxopGpKrkH5skB2FqBYXP3N4goobBbXfYS8Vqhr6QAZQqxxr5caCjvSC3P3itpqqbntU9jTmmEdoM1E8Z5f1GU" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "fazenda invasor magreza repolho separado emenda enchente amassar valente enxerto cachorro roteiro eliminar marinho deboche rodovia locutor envolver", + "1335d5a679638f63a494f1b71f6c6c448ccb0384167f3ef3ad07456f0b70f13b8f6097c1315186543a09325ca3581f329cdb66674d0f97c4474950e3aef3b9e9", + "xprv9s21ZrQH143K4MHxCTSXTFPDppSnKcW22kjYEg7NMCcHEobe5G337nzsfnuud8asiYwnyMQMG9WkAwPW78E1otspYE4T5t2AU2cArUhhQGg" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "filho comentar obrigado cunhado biologia clone obrigado julgador rebolar sufixo pacote atingir filtrar edital fardado siri janta conhecer inibido vogal quiosque genoma clone impor", + "ca0462775b11a28cb36c497a36da8e46c1ab618facf7640b42fd4974085e1d682a6ca660560a3b72dc602ff56e4027b53a8ffb691a96e4c827ef4e6d665565bf", + "xprv9s21ZrQH143K4B1eibfqLNQbrVycyNgNCgQvDFtBLDYbEPtVXRBn44GPmw8fasfeCrV5tc8BFhzSnbg3pHdafof5rrXHm4XUK6EBrpoZni5" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "pote ringue muda benzer andaime levitar comando guloso carreira micro grilo atracar", + "556b77fc49cd57f7c9c92fafedac1c8341598666721b874db50da261c7cded491c22ecd3235e508822507212698f645bb198f0bb1aecd50d22339e77619a765e", + "xprv9s21ZrQH143K2NhKv3Pg9xyGsPMDwukoomCFE74Fk4w4K6pe8hetKjNrSWEmzPSr482zkiZ73aE5wTYY8jvLN4Wruv83p6mmxnxkF7qkUBM" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "frieza sirene hibernar setembro rigidez retalho exagero cabana oliveira refugiar recrutar vazar ramal lousa segmento tocha carpete autoria", + "3059ded790eac1a5bb614c2a88989a4b5d7d4db20acc95f2a9eee1a5889012fa2b7ad2c0b0c5b81abe94d3e4c6c58265058fb257bb051552fe5f82fc45389132", + "xprv9s21ZrQH143K2EN6vqxgJ2XMXNZx6GWcdcFXajpxqDfWvsrYFETHVWUEZr34sP49GinmReyfUgpakD6xT99vcV7yiAvPnf8BbEJ2PsWf8EH" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "mimado drible baioneta fantoche bastante redonda ligeiro simpatia lamber copeiro reitor mensagem adeus resumir lombo enraizar combinar inapto fosco orfanato gincana cubano exibir global", + "f26d817eb6474c6ac3358737afd31a4b4be74ba268cb7dd0bab44f959c9b4be5ef7d65245bd0da8f21694bf380e3e4a9fe7f82777b4d59a8c738fc460b0c3210", + "xprv9s21ZrQH143K4QArHuoZ7djwjgsdKRekuqKqFEBZbYpAQJU2a1mDMm4z8iG28oBs2kAEaGQDPjLu3kiNz3BV5Lio26Zt1z5KzEBxtSJGtCR" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "bifocal semanal enlatar debulhar roedor adquirir branco resumir tora modular patrono tangente", + "fccd6ccf9cad7af20eaaba21e9f4fbb149907c41b5e000ce398e8956c15a5436e04091402d4851c4cc8cccf9e8a90dc8219f3104698d1022064538c6303dcdef", + "xprv9s21ZrQH143K25Rf7o8uUBtDjSZkJ4S2Cspm3JN9USxBAXmmUYvw9n7W3qsxbkkptFhejjufMUtKx5QhCMG8Z6RjT58F9kVrRGKMQhatmtb" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "indeciso pires braveza vontade criada vogal nutrir rodeio toxina soletrar gelo elaborar ajudar sediado carpete sarjeta oficina abreviar", + "c22c6e716250a3b98b0f342f77d4f1fe9a4eab81304fbcc9eb9f9852db769b4164daf2aecf72e6d9879713768628780d0b9900ece13c75aa1f433d48ea5c9839", + "xprv9s21ZrQH143K4UsyxJK2nxQFmMtrqR5QUuHaK1KiZkp1LZKMqPuUCCe3xqVWDycrKjDSctxm2sPQ72MBM3yZx6tX7Jvemccncy4nw3HEjFb" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "adjetivo fugir irritado engenho iluminar dourado fralda aditivo aprovar ferrugem fardado ignorado moeda mesada bisneto linda guache jejum pastel ecologia rapel acionado pneu palmada", + "6fc110cbcad37d6a43aadf52643ca9172f51c6fac45bf0a17e60e0fc24b5d1cffc73dd0606c427ef789f9e96c1d05af88a806005a21a4f2f93a5e41ab19fd7d6", + "xprv9s21ZrQH143K2VBbZdEtqReQrWkiSUvySRGbPbyP9sm8vHAWDwXc1GwsBHc8LQU82gydYVQ2YCDGZKKzKPX8SaWCf2jrQRC7hVAgb2GYZuD" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "turbo hoje aeronave diagrama embargo rachar bochecha abaixo sanidade extinto tridente mundial", + "d666e6f21c3f8934aa38e45db96ee64eb490156655c2be5d4da4359fc9b11cf9ffda5802ef0eedcc154fb790c41f50ec0cb40b4236972538d8a6e27e54115706", + "xprv9s21ZrQH143K4CtpNWJhUqGJ7sMBw5MfYzKSNUmFABS3NwoqGtdAXUMyci5F11H9Vbx3kbiEqjDPMvTj84nMVZM9PF7u8ouNEruefbPFc9G" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "povoar gorjeta injetar janta saturar paciente ovelha vaidoso cancelar limpador confuso degelo inflamar avisar ficheiro italiano cancelar cacique", + "cb72a39ca9d39c4283398a9995754bdff1785ccc0d96f842d748f4fe904f9dc9a963b7e89ca8070154edbc7d1efc90e1554ac4dc34646c38d6950ffaabeee350", + "xprv9s21ZrQH143K2GgN5EwWK5f6MXbTAvTvN4uiu5pZSgb4MxBAy8VTabQfb7Dc3kp1VmEuKTWJbUtM2Hu8KpFUKWPm48ubQ5JHUHg5PeLwz2m" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "vaidoso calota decote sambar batida seda vazio flora queda nuvem cadeado certeiro matinal afetivo praxe moinho feno resgatar nervoso sintonia dobrador recrutar gorro tonel", + "8c6d156ba11fbc606a92071e7230fda2446333510ef5f9bed4712b2d737ab43d2e06c4fb3929dfd072ccc8b9003c6bfa62d5b8fcf04396508c54215357f6f8cf", + "xprv9s21ZrQH143K3fnoXd9pUHy7b6hH79o9bMW45Zc9486jaN1KKJVZrm1XosHw3sduFw58ADzqLZR1TvaB7enqXZoB8hyptgcijczutBxqKNB" + ] + ], + "spanish": [ + [ + "00000000000000000000000000000000", + "ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco abierto", + "29a2ee16de47d07025de37e7d9c596869439f9bcd26a702d2bae64db2bf0f68383841c5444b5b3bd39dd720d2ebe59969e110e5955c8e6d32c6c3294fd87439b", + "xprv9s21ZrQH143K49iYfUTNyLe6mVRHvYSg58nfiLkcSREsu5QefrsvQ9KWsMtX7SXsXwvs6J1esWdna2weySpUFZNN6qiXuHcobEMWGLfyaHG" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "ligero vista talar yogur venta queso yacer trozo ligero vista talar zafiro", + "1580aa5d5d67057b3a0a12253c283b93921851555529d0bbe9634349d641029216f791ddce3527819d44d833a0df3500b15fd8ba4cae7ca24e1464b9167de633", + "xprv9s21ZrQH143K27g7EMkgY2F1fuyqSEKq6n1iJCHuiUX5F3oGESmJSS6DcKW5JZ6qWWJ7x8wS1FCrd1NhRS4xCWDn9Bb1HzBuNpitD7FeYGv" + ], + [ + "80808080808080808080808080808080", + "lino admitir bolero abrir álbum dejar acelga aprender lino admitir bolero abogado", + "a89366f7f9c4bd98afca8edf1242507506562b8eb8a3a60468cafcb6f3037aba1e4d9a7497f6d49fa94aca87c95703873741441a719325af371f8eda9b59dc83", + "xprv9s21ZrQH143K2dG5ptughpsWSbdXdRVg6ZZtF8RehVMaDNM2TJ6NZAtorTDm2EVpt5nwpsgtjgYW3GD6oP8Nk3ZhibRQFRJ4f5JoDrunY1m" + ], + [ + "ffffffffffffffffffffffffffffffff", + "zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo yodo", + "a9d1f751178872cc53fc5433e9b2a97526448adc4b824cedeadd8a127c2416481345dfbef2bfc78275f3498e40b4e8e2e00560100e543aba3f324e752f032bc9", + "xprv9s21ZrQH143K4TU3oETVCyPLTqmC8C7zqqSR7L8JpMiR68YNhyvfEmXpRh6pP8gPghpFbvNSQCQppPDf55iNnZhT9iza6HRpTvKeLSDNFCg" + ], + [ + "000000000000000000000000000000000000000000000000", + "ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco afición", + "6c9f21d46c56f723cd734e308f10ebf44b5b92a2e0d80fd66a2952b8d37af5219e0b93c59e1d8e63b47ac657ec2c524e5fb951d87cac824f84a3ac6264b7aaac", + "xprv9s21ZrQH143K3AjqVTqs8gbpkEoSkCVEM6dAGvRm2TJgxTLih4TuPPSp6gheAkXpPRhG63Fb9Cr5MEto2j6hGJ4ZVPa3C1GHXUyPPwMRXCV" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "ligero vista talar yogur venta queso yacer trozo ligero vista talar yogur venta queso yacer trozo ligero violín", + "f73b28d7e180e0a92c57276a29489c10a992c8a465ab61be0ade4708543436a682b2a3c22de57c48736ae6f29bebf3e506779c74bc1a835ad6b9f4e174126ca8", + "xprv9s21ZrQH143K4PEMCi1dMq3ZwveC5um6cXR3tp4Z6LUGLhz4pmkaDU44UoSTiMQHv5icYPjH5EooZNorbDB7fLMDa531HHrKKnEEqCT5Tfs" + ], + [ + "808080808080808080808080808080808080808080808080", + "lino admitir bolero abrir álbum dejar acelga aprender lino admitir bolero abrir álbum dejar acelga aprender lino alacrán", + "f799e5c2782b50d0eb1d25b5f94984c5b4037ade236c6aa3b48b3df01b703d8ede5f94555f4e78f87a642a9676ba052865418c469c5739b3e93acc528fad30b7", + "xprv9s21ZrQH143K2UeG5FGYFmkW7oTy35ZFgQ7qdR5tzBdKxGLCEfVJPPA98e2wUpfD3Eg6GS4833dFVVKafB3RNGMSZtxoWc56Yxo7PaBgYds" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffff", + "zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo viejo", + "2fd3964ac77c52232dc0eb2ab237fea2de9b7509005214101ecbbaeb40f34bce7735e848fca6339f76f289904c6db959fa573fc0aa607d969ac256693b4fb7af", + "xprv9s21ZrQH143K3zwjASrAazc9EGeoVcQXA3unTmgxG9ZS7nc75inZw19oktj1y3n2Y7yetBatSN3v2UpS9ms3PvmrgQEMC4jox4ZV1ZrW6Qz" + ], + [ + "0000000000000000000000000000000000000000000000000000000000000000", + "ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ábaco ancla", + "f600536eca941ed937318828e9ebab24b3b571558250e7a8342fc3cf16c458b2d7b36c36155a86cc308f7bef6d87b05d5dbe347f1a83c3dfbabd89e9c45b7883", + "xprv9s21ZrQH143K2J842VWEPWH3ssnSvZYhRCzPoEcifAN4UgETeYkTdvnMUgnuftLyeGttvdvec8F1YJCmQz5mS956jsb2m8yDXZtnxRgiYgX" + ], + [ + "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", + "ligero vista talar yogur venta queso yacer trozo ligero vista talar yogur venta queso yacer trozo ligero vista talar yogur venta queso yacer teatro", + "3d2a3aec779195f2628e800879d600cfaf2d7fcfa998657068db53906a00608fcc94fc78ceab8c97d6191389c4e468815ea0d11ffa4280c34c3cf17721a27c73", + "xprv9s21ZrQH143K2WBRPum95TFxfz8niK5sbiDpQjyr915SjEJc99BrYoRhPuYvfzFhPwqUNAFtEdw4khqQMK4ge8EnTZZASbv7oy8t6SMzVSM" + ], + [ + "8080808080808080808080808080808080808080808080808080808080808080", + "lino admitir bolero abrir álbum dejar acelga aprender lino admitir bolero abrir álbum dejar acelga aprender lino admitir bolero abrir álbum dejar acelga aumento", + "dd095dddb50de059f5cb6932d529ad37dd32d40f72da3d0c7671ffc6bd967b4392fe233e5e9a4d9e5e60413160ae215e34375db85e95ccbab4fd4712f32216ab", + "xprv9s21ZrQH143K3SQcqhHXxwZ793d4RXPpUQJQm8Bpf799AZPzicBxB87hu4Sm2DvWBqFvmxEjN3bY6EVzUxhJbHKdtV2k6u4c3ZNRFGhZDma" + ], + [ + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo zurdo varón", + "deea21c6902df5ef4a8efab8e14de53004c68817ea3de421cdd184f4159a6e9947376ed794c3ce67534f37f80b46674e85335555b5c53f44fdfef27991fedc0e", + "xprv9s21ZrQH143K4TyBobPSoDLEze5gKjiTZXzaJaND1QHqmrnx6kULMJhGvQkraSHgUsjmisepryPQqTfWyM3ETLjusTsW35KumQ3w3RyusSM" + ], + [ + "9e885d952ad362caeb4efe34a8e91bd2", + "obra diadema gorila farmacia colgar gorra pausa talar cocina duda dragón optar", + "fcf6ebfc7d9eebab56ca868cbd2d5d05a6f2142ba903c52855dad4ab8c0c2cf6b4e047a2dd97cf382ae717dc18d155a45fc798e6f0a0b89971a4224e2a285701", + "xprv9s21ZrQH143K3pxsjjkbjzEu1f9qaeGb9wTLZ39rUF4CoP125zApXELXDspSNV1Cn8vjvXKNQymm8iwSRpJr9AZyKvHTVVbg1Tq6kY48iTV" + ], + [ + "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", + "gráfico madera muro rutina suelo falso favor añadir variar firma casco semana fácil neón don sección morder fingir", + "5b48222ce814960e3b2f507ba58e96b4fa655f76060943b47c7a1396d431c570849e6f1595add9474934a72110bd3da06824428650be819f8d093e0023fccee6", + "xprv9s21ZrQH143K3JpnkDoEPYdCo6QEBW4eUfKe1CCweKfHF5DkonVNPQRkDcYEva6pBBfGLeRt56UB6oSnhemWzbSBXCpdefk3hBDWqUAPb2E" + ], + [ + "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", + "guion culebra parcela diluir buitre crecer parcela marzo roce tejado picar azafrán guitarra exilio goloso tabla mando curar loco voto reparto insecto crecer lince", + "5dd9ecc2a8f504413ee001e4f27f25ad14533a35b3986b4ad505a9774740d0c0a6bbac6953a1ee47104357f4a5bc4acbc0f71813f9532fa667f3d3b6f2d6dd6d", + "xprv9s21ZrQH143K4XB8BVsQ9TsrC12DWTkizmcY3X6kVqycT1pAVLhx3EHfWow3H27QV4YH8kMLum7nzjSexrAxh8tKZzHRYPPexmAaaM1KEYQ" + ], + [ + "c0ba5a8e914111210f2bd131f3d5e08d", + "ración sapo opción brinco árbol mismo cueva lamer cigarro obrero júpiter azufre", + "49b0de91db6c84527afe1bccb2525b93dbdae0306bd3ea8a1f629ea1704195d450a0a3211894c417f586fde217f024b4159a4f6ac7f5d18bb8b7bbf72c4f4d20", + "xprv9s21ZrQH143K3W9fk6GQj1oNT2gYszJhNBhZjPmkTsR2jmoE89oRHHknYt8WLWREtopcnPs1ZWoJjV3A5QxrW5crBA5s6bWgp2DCfLUDA5Y" + ], + [ + "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", + "honor tabique lata sultán sanidad salón gafas carga payaso rostro rojizo vena retrato móvil soplar trabajo cifra balde", + "484af722d01c9cdc9ac50f3fdfeec010c7f713fb90dbfe84dae21d8215b683e660ddeec44d685faf3e653f396ef8ce0d341097c50bffcf67ea094ebb44294df4", + "xprv9s21ZrQH143K2ouVUYDQXJpeurwoTTmfvUThuo1vRhuSXrVrKgsxAtaet1U9M5WZDpqaiLAPz17LLSuHmQG4eMMAy48Zt4AMzUwgNnEXY1Q" + ], + [ + "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", + "océano eterno bestia golfo bomba ron moda sur médula danza rueda núcleo agrio salmón morir ficha cuidar linterna higiene pensar iris diario ganso jamón", + "8cc9507c9ccafaf341a243e5b82c348e374b24c8c594131add8684cfc1e61ab51e5476a4006d4d780bd2b82e9d9581ae1af67c8845e40246d5b1110814a88088", + "xprv9s21ZrQH143K3rpBRE8Wy6o6fZ6P6SX9ZrPCATgfgMHBHK4y9vbkj3VW19wRGR6tAeT7td2iB5bvwRXqXZ2FkBCmeCsJD4Uxf363BEuBwtx" + ], + [ + "23db8160a31d3e0dca3688ed941adbf3", + "bucle sótano fibra donar seco aire campo salmón trato odio poco tierra", + "55b603a9cd15a9769e21fd22a384d12de9afe0b9c0af0f07aee688cddd792b2863064767a6df9e8aebb4bf10d4482de07ffe6d7f7440df73f04fc544236fee06", + "xprv9s21ZrQH143K31JznDuVnKRYfF538PE1GSe5W75rLGwpSuDFWaQrW48bSuzvkD7cPWqLHcjxi1AjQYFjzCEVkwpU8sq4XScgZigY4TQrSEk" + ], + [ + "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", + "llaga pudor candil yate detalle voto papá saxofón tribu talla infiel exponer altivo sonoro cifra solapa pata abuso", + "b63a7651d8655add895fd8a45f0fd4c0c71bd8863a8e0fd72782b2f36d43ef2fa8830ab46647afc8c437e701aed41b0bc6b2df9f11887c44457aefe2c11d413d", + "xprv9s21ZrQH143K3CSbDbXN4ckMcyDHyLwkrQqvXL4xBRATernArEtGvZG8eFhxLbbRBZqaBnRc3ZyrN349dRxUeAozveLohZ5AWRKsDuk6XT1" + ], + [ + "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", + "águila hoyo maldad fértil libertad estilo historia agudo asilo grosor goloso leopardo odisea nueve butaca molde lacio mañana plomo exento rey adicto puño piña", + "e4df51858246fe7a1f5b7e0045704ba76ff9d2b099707ea1d8b731dc3216c3de4edc63bad0911179d818b20e2c2a4e8da9e62dac242f6369221802e25abd0ceb", + "xprv9s21ZrQH143K381uk3B2rYMvPGDjn8oqzxK2tGqoNmB9eN2puxmZtB3BximsnfUd6u3eKP58aVTrUmKW4xA4ckETJwd6YPVa7G8nLkdBknC" + ], + [ + "f30f8c1da665478f49b001d94c5fc452", + "urbe lección ajuste enero faena reptil caimán abdomen sobre genio túnel óptica", + "f5e417f1f68c479cd3058e836ce47aaa52629ac4cb93e99e8025ab38e76a6fab56f6b5a6c1f20637bf29e108f41bca76a1a061d8f8ea40f7c0e5a15552c23ae2", + "xprv9s21ZrQH143K3sNRU3fx6SQaMRXAAJPHtmBFpXF5MhZ6AEURc2zE2gmVexqAA6EFyTBUBfxv982cwPLR2fM81SivaJXcN2FdbmgzrFbEdqs" + ], + [ + "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", + "rama jeringa logro mando soldado pezuña pésimo vampiro cerrar mojar cupón dueño llover barro guerra mambo cerrar casero", + "0ae0e69a6ab7c290e1319018a36a7481b6969f73745db1fe56ed4b928b17458bd86e580b6925ec6b64558e4a1431b4761d0928928b689c37efad8122edd7762c", + "xprv9s21ZrQH143K2VwsYAPzaKfUBfB8WtQXQux4e5R2cnLhipQ7zQfwWqg2Hozhrh44r1ZtMDzxVDA2PyUqEBtu9o2BidJjQ2psh8QspU8siVK" + ], + [ + "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", + "vampiro célula dos simio bono sondeo vencer haz remar papel castor codo nivel alarma rapaz ofensa gripe sagaz otro tabaco esfuerzo rojizo jinete traje", + "c87970357a0faf4ebf604d9c486726e1af8d2874d40f3ba30e5774d615c6eb7ecc6cc04d85d6be4e3e36cf4771f8e15350152351f918bf4a555a33d57f90d61c", + "xprv9s21ZrQH143K4UfkWbEcDPVrme1ea7d9BcQR6XtJ1VJucg1haNWCKCxqkXBshF3QkTSq3HHX2V3qiZmob9bJTnnj6ny5SbvwMp26v6HwxTd" + ] + ] +} diff --git a/lib/data/mnemonic/vnemonic.v b/lib/data/mnemonic/vnemonic.v new file mode 100644 index 00000000..744a9966 --- /dev/null +++ b/lib/data/mnemonic/vnemonic.v @@ -0,0 +1,37 @@ +module mnemonic + +import freeflowuniverse.herolib.ui.console + +#include "@VMODROOT/vnemonic.h" + +#flag @VMODROOT/vnemonic.o + +fn C.mnemonic_from_bytes_en(bytes &u8, len usize) &char + +fn C.mnemonic_to_bytes_en(mnemonic &i8, bytes_out &u8, len usize, written &usize) int + +pub fn parse(mnemonic string) []u8 { + buffer := []u8{len: 128} + written := usize(0) + + C.mnemonic_to_bytes_en(mnemonic.str, buffer.data, 128, &written) + mut target := []u8{len: int(written)} + copy(mut target, buffer) + + return target +} + +pub fn dumps(buffer []u8) string { + str := C.mnemonic_from_bytes_en(buffer.data, buffer.len) + clean := unsafe { cstring_to_vstring(str) } + return clean +} + +fn test() { + source := 'turtle soda patrol vacuum turn fault bracket border angry rookie okay anger' + data := parse(source) + console.print_debug(data) + + value := dumps(data) + console.print_debug(value) +} diff --git a/lib/data/models/hr/company.v b/lib/data/models/hr/company.v new file mode 100644 index 00000000..1eacbd21 --- /dev/null +++ b/lib/data/models/hr/company.v @@ -0,0 +1,59 @@ +module hr + +import freeflowuniverse.herolib.core.base +import freeflowuniverse.herolib.core.playbook + +pub struct Company { +pub mut: + name string + country &Country + description string + purpose string + address string + shares ?CompanyShares +} + +pub struct CompanyShares { +pub mut: + number_of_shares u64 + share_value string + valuation_date string +} + +fn (mut self HRData) add_companies(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:company_define') or { [] } { + company := self.extract_company_information(mut action) or { + self.errors << 'invalid company information: ${err}' + continue + } + + if _ := self.companies[company.name] { + self.errors << 'redefinition of company ${company.name}' + continue + } + + self.companies[company.name] = company + } +} + +fn (mut self HRData) extract_company_information(mut action playbook.Action) !Company { + mut p := action.params + name := p.get('name')! + purpose := p.get_default('purpose', '')!.trim_space() + description := p.get_default('description', '')!.trim_space() + country_str := p.get('country')! + country_id := country_id_from_str(country_str)! + mut country := self.countries[country_id] or { + return error('missing definition of country "${country_str}"') + } + + address := p.get_default('address', '')!.trim_space() + + return Company{ + name: name + country: &country + description: if description != '' { description } else { none } + purpose: if purpose != '' { purpose } else { none } + address: if address != '' { address } else { none } + } +} diff --git a/lib/data/models/hr/country.v b/lib/data/models/hr/country.v new file mode 100644 index 00000000..d3af079f --- /dev/null +++ b/lib/data/models/hr/country.v @@ -0,0 +1,55 @@ +module hr + +import freeflowuniverse.herolib.core.base +import freeflowuniverse.herolib.core.playbook + +pub enum CountryID { + zanzibar + belgium + egypt +} + +pub struct Country { +pub mut: + name string + id CountryID +} + +fn country_id_from_str(str string) !CountryID { + country := match str.to_lower() { + 'znz', 'zanzibar' { CountryID.zanzibar } + 'egypt' { CountryID.egypt } + 'belgium' { CountryID.belgium } + else { return error('invalid country name ${str}') } + } + + return country +} + +fn (mut self HRData) add_countries(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:country_define') or { [] } { + country := self.extract_country_information(mut action) or { + self.errors << 'invalid country information: ${err}' + continue + } + + if _ := self.countries[country.id] { + self.errors << 'redefinition of country ${country.name}' + continue + } + + self.countries[country.id] = country + } +} + +fn (mut self HRData) extract_country_information(mut action playbook.Action) !Country { + mut p := action.params + name := p.get('name')! + + id := country_id_from_str(name)! + + return Country{ + name: name + id: id + } +} diff --git a/lib/data/models/hr/hr.v b/lib/data/models/hr/hr.v new file mode 100644 index 00000000..c2802931 --- /dev/null +++ b/lib/data/models/hr/hr.v @@ -0,0 +1,24 @@ +module hr + +import freeflowuniverse.herolib.core.base + +pub struct HRData { +pub mut: + people map[string]Person + companies map[string]Company + share_holdres []ShareHolder + countries map[CountryID]Country + errors []string +} + +pub fn new_from_session(mut session base.Session) !HRData { + mut data := HRData{} + + data.add_countries(mut session) + + data.add_companies(mut session) + + data.add_people(mut session) + + return data +} diff --git a/lib/data/models/hr/person.v b/lib/data/models/hr/person.v new file mode 100644 index 00000000..fefc6012 --- /dev/null +++ b/lib/data/models/hr/person.v @@ -0,0 +1,386 @@ +module hr + +import freeflowuniverse.herolib.core.base +import freeflowuniverse.herolib.core.playbook + +pub struct Person { +pub mut: + username string + first_name string + last_name string + telephone_numbers []string + emails []string + linkedin ?string + github ?string + gitourworld ?string + url ?string + description ?string + country_of_residence CountryID + + salary_outstanding ?SalaryOutstanding + shares ?Shares + tokens []Tokens + loan_outstanding []LoanOutstanding + remuneration ?Remuneration +} + +pub struct SalaryOutstanding { +pub mut: + company string + amount string + comment ?string + url ?string +} + +pub struct Shares { +pub mut: + company string + amount u64 + comment string +} + +pub struct Tokens { +pub mut: + amount u64 + wallet_address string + wallet_type string + wallet_currency string + comment ?string +} + +pub struct LoanOutstanding { +pub mut: + amount string + start_date string + payback_period string + payback_period_number u32 + comment ?string + url ?string +} + +pub struct Remuneration { +pub mut: + monthly_salary string + reward_pool_points u32 + car_per_month string + lodging_per_month string + travel_max string + travel_average string + expense_max string + expense_average string + payout_digital map[string]DigitalPayout + comment ?string + url ?string +} + +pub struct DigitalPayout { +pub mut: + amount string + wallet_address string + wallet_type string + wallet_currency string + period string = 'month' + number_of_months_behind u32 +} + +pub struct ShareHolder { +pub mut: + person &Person + company_name string + amount u32 + comment ?string +} + +fn (mut self HRData) add_people(mut session base.Session) { + self.add_person_definitions(mut session) + self.add_salary_outstandings(mut session) + self.add_tokens(mut session) + self.add_loan_outstandings(mut session) + self.add_remunerations(mut session) + self.add_share_holders(mut session) +} + +fn (mut self HRData) add_share_holders(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:shareholding_define') or { [] } { + share_holder := self.extract_share_holder_infomation(mut action) or { + self.errors << 'invalid share holder information: ${err}' + continue + } + + self.share_holdres << share_holder + } +} + +fn (mut self HRData) extract_share_holder_infomation(mut action playbook.Action) !ShareHolder { + mut p := action.params + person_name := p.get('person_name')! + mut person := self.people[person_name] or { + return error('a person with the name ${person_name} is not defined') + } + + compay_name := p.get('company_name')! + amount := p.get_u32('amount')! + comment := p.get_default('comment', '')!.trim_space() + + return ShareHolder{ + person: &person + amount: amount + company_name: compay_name + comment: if comment != '' { comment } else { none } + } +} + +fn (mut self HRData) add_remunerations(mut session base.Session) { + mut digital_payout := self.get_digital_payout(mut session) + for mut action in session.plbook.find(filter: 'cos:remuneration') or { [] } { + username, remuneration := self.extract_remuneration_information(mut action, digital_payout) or { + self.errors << 'invalid remuneration information: ${err}' + continue + } + + if mut person := self.people[username] { + person.remuneration = remuneration + } else { + self.errors << 'a person with the name ${username} is not defined' + continue + } + } +} + +fn (mut self HRData) get_digital_payout(mut session base.Session) map[string]DigitalPayout { + mut digital_payout_map := map[string]DigitalPayout{} + for mut action in session.plbook.find(filter: 'cos:payout_digital') or { [] } { + name, digital_payout := self.extract_digital_payout_information(mut action) or { + self.errors << 'invalid digital payout information: ${err}' + continue + } + + if _ := digital_payout_map[name] { + self.errors << 'a digital payout with the same name "${name}" is already defined' + } else { + digital_payout_map[name] = digital_payout + } + } + + return digital_payout_map +} + +fn (mut self HRData) extract_digital_payout_information(mut action playbook.Action) !(string, DigitalPayout) { + mut p := action.params + name := p.get('name')! + amount := p.get('amount')! + wallet_address := p.get('wallet_addr')! + wallet_type := p.get('wallet_type')! + wallet_currency := p.get('wallet_currency')! + period := p.get_default('period', 'month')! + months_behind := p.get_u32_default('nrmonths_behind', 0)! + + return name, DigitalPayout{ + amount: amount + number_of_months_behind: months_behind + period: period + wallet_address: wallet_address + wallet_currency: wallet_currency + wallet_type: wallet_type + } +} + +fn (mut self HRData) extract_remuneration_information(mut action playbook.Action, digital_payout map[string]DigitalPayout) !(string, Remuneration) { + mut p := action.params + username := p.get('person_name')! + salary_month := p.get('salary_month')! + reward_pool_points := p.get_u32('reward_pool_points')! + car_month := p.get_default('car_month', '')! + lodging_month := p.get_default('lodgin_month', '')! + travel_max := p.get_default('travel_max', '')! + travel_avg := p.get_default('travel_avg', '')! + expense_max := p.get_default('expense_max', '')! + expense_avg := p.get_default('expense_avg', '')! + comment := p.get_default('comment', '')!.trim_space() + url := p.get_default('url', '')!.trim_space() + payout_digital_str := p.get_default('payout_digital', '')!.split(',') + mut payout_digital := map[string]DigitalPayout{} + for str in payout_digital_str { + if v := digital_payout[str] { + payout_digital[str] = v + } else { + return error('missing digital payout definition "${str}"') + } + } + + remuneration := Remuneration{ + car_per_month: car_month + expense_average: expense_avg + expense_max: expense_max + lodging_per_month: lodging_month + monthly_salary: salary_month + reward_pool_points: reward_pool_points + travel_average: travel_avg + travel_max: travel_max + comment: if comment != '' { comment } else { none } + url: if url != '' { url } else { none } + payout_digital: payout_digital + } + + return username, remuneration +} + +fn (mut self HRData) add_loan_outstandings(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:loan_outstanding') or { [] } { + username, loan := self.extract_loan_outstanding_information(mut action) or { + self.errors << 'invalid loan outstanding information: ${err}' + continue + } + + if mut person := self.people[username] { + person.loan_outstanding << loan + } else { + self.errors << 'a person with the name ${username} is not defined' + continue + } + } +} + +fn (mut self HRData) extract_loan_outstanding_information(mut action playbook.Action) !(string, LoanOutstanding) { + mut p := action.params + username := p.get('person_name')! + amount := p.get('amount')! + start_date := p.get('start_date')! + payback_period := p.get('payback_period')! + payback_period_number := p.get_u32('payback_period_nr')! + comment := p.get_default('comment', '')!.trim_space() + url := p.get_default('url', '')!.trim_space() + + loan := LoanOutstanding{ + amount: amount + start_date: start_date + payback_period: payback_period + payback_period_number: payback_period_number + comment: if comment != '' { comment } else { none } + url: if url != '' { url } else { none } + } + + return username, loan +} + +fn (mut self HRData) add_tokens(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:tokens') or { [] } { + username, tokens := self.extract_tokens_information(mut action) or { + self.errors << 'invalid tokens information: ${err}' + continue + } + + if mut person := self.people[username] { + person.tokens << tokens + } else { + self.errors << 'a person with the name ${username} is not defined' + continue + } + } +} + +fn (mut self HRData) extract_tokens_information(mut action playbook.Action) !(string, Tokens) { + mut p := action.params + username := p.get('person_name')! + amount := p.get_u64('amount')! + wallet_address := p.get('wallet_address')! + wallet_type := p.get('wallet_type')! + wallet_currency := p.get('wallet_currency')! + comment := p.get_default('comment', '')!.trim_space() + + tokens := Tokens{ + amount: amount + comment: if comment != '' { comment } else { none } + wallet_address: wallet_address + wallet_currency: wallet_currency + wallet_type: wallet_type + } + + return username, tokens +} + +fn (mut self HRData) add_salary_outstandings(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:salary_outstanding') or { [] } { + username, salary := self.extract_salary_outstanding_information(mut action) or { + self.errors << 'invalid salary outstanding data: ${err}' + continue + } + + if mut person := self.people[username] { + person.salary_outstanding = salary + } else { + self.errors << 'a person with the name ${username} is not defined' + } + } +} + +fn (mut self HRData) extract_salary_outstanding_information(mut action playbook.Action) !(string, SalaryOutstanding) { + mut p := action.params + username := p.get('person_name')! + company := p.get('company')! + amount := p.get('amount')! + comment := p.get_default('comment', '')!.trim_space() + url := p.get_default('url', '')!.trim_space() + + salary := SalaryOutstanding{ + company: company + amount: amount + comment: if comment != '' { comment } else { none } + url: if url != '' { url } else { none } + } + + return username, salary +} + +fn (mut self HRData) add_person_definitions(mut session base.Session) { + for mut action in session.plbook.find(filter: 'cos:person_define') or { [] } { + person := self.extract_person_information(mut action) or { + self.errors << 'invalid person data: ${err}' + continue + } + + if _ := self.people[person.username] { + self.errors << 'a person with the same name "${person.username}" is already defined' + continue + } + + self.people[person.username] = person + } +} + +fn (mut self HRData) extract_person_information(mut action playbook.Action) !Person { + mut p := action.params + username := p.get('name')! + firstname := p.get_default('firstname', '')! + lastname := p.get_default('lastname', '')! + telephone_numbers_str := p.get_default('tel', '')! + emails_str := p.get_default('email', '')! + linkedin := p.get_default('linkedin', '')!.trim_space() + github := p.get_default('github', '')!.trim_space() + gitourworld := p.get_default('gitourworld', '')!.trim_space() + url := p.get_default('url', '')!.trim_space() + description := p.get_default('description', '')!.trim_space() + country_of_residence_str := p.get_default('country_residence', '')! + + telephone_numbers := telephone_numbers_str.split(',') + emails := emails_str.split(',') + country := country_id_from_str(country_of_residence_str)! + + person := Person{ + username: username + first_name: firstname + last_name: lastname + telephone_numbers: telephone_numbers + emails: emails + linkedin: if linkedin != '' { linkedin } else { none } + github: if github != '' { github } else { none } + gitourworld: if gitourworld != '' { gitourworld } else { none } + url: if url != '' { url } else { none } + description: if description != '' { description } else { none } + country_of_residence: country + } + + action.done = true + + return person +} diff --git a/lib/data/models/hr/readme.md b/lib/data/models/hr/readme.md new file mode 100644 index 00000000..41a2c7c3 --- /dev/null +++ b/lib/data/models/hr/readme.md @@ -0,0 +1,3 @@ +# HR Models + +> this is probably experimental and not used yet, has set of objects as can be used for modelling a company structure \ No newline at end of file diff --git a/lib/data/ourdb/README.md b/lib/data/ourdb/README.md new file mode 100644 index 00000000..308f0e34 --- /dev/null +++ b/lib/data/ourdb/README.md @@ -0,0 +1,115 @@ +# OurDB Module + +OurDB is a lightweight, efficient key-value database implementation in V that provides data persistence with history tracking capabilities. It's designed for scenarios where you need fast key-value storage with the ability to track changes over time. + +## Usage Example + +```v + +//record_nr_max u32 = 16777216 - 1 // max number of records +//record_size_max u32 = 1024*4 // max record size (4KB default) +//file_size u32 = 500 * (1 << 20) // file size (500MB default) +//path string // storage directory + +import freeflowuniverse.herolib.data.ourdb + +mut db := ourdb.new(path:"/tmp/mydb")! + +// Store data (note: set() takes []u8 as value) +db.set(1, 'Hello World'.bytes())! + +// Retrieve data +data := db.get(1)! // Returns []u8 + +// Get history +history := db.get_history(1, 5)! // Get last 5 versions + +// Delete data +db.delete(1)! +``` + + +## Features + +- Efficient key-value storage +- History tracking for values +- Data integrity verification using CRC32 +- Support for multiple backend files +- Configurable record sizes and counts +- Memory and disk-based lookup tables + +## Architecture + +OurDB consists of three main components working together in a layered architecture: + +### 1. Frontend (db.v) +- Provides the public API for database operations +- Handles high-level operations (set, get, delete, history) +- Coordinates between lookup and backend components +- Located in `db.v` + +### 2. Lookup Table (lookup.v) +- Maps keys to physical locations in the backend storage +- Supports both memory and disk-based lookup tables +- Configurable key sizes for optimization +- Handles sparse data efficiently +- Located in `lookup.v` + +### 3. Backend Storage (backend.v) +- Manages the actual data storage in files +- Handles data integrity with CRC32 checksums +- Supports multiple file backends for large datasets +- Implements the low-level read/write operations +- Located in `backend.v` + +## File Structure + +- `db.v`: Frontend interface providing the public API +- `lookup.v`: Implementation of the lookup table system +- `backend.v`: Low-level data storage implementation +- `factory.v`: Database initialization and configuration +- `db_test.v`: Test suite for verifying functionality + +## How It Works + +1. **Frontend Operations** + - When you call `set(key, value)`, the frontend: + 1. Gets the storage location from the lookup table + 2. Passes the data to the backend for storage + 3. Updates the lookup table with any new location + +2. **Lookup Table** + - Maintains a mapping between keys and physical locations + - Optimizes key size based on maximum record count + - Can be memory-based for speed or disk-based for large datasets + - Supports sparse data storage for efficient space usage + +3. **Backend Storage** + - Stores data in one or multiple files + - Each record includes: + - Data size + - CRC32 checksum + - Previous record location (for history) + - Actual data + - Automatically handles file selection and management + +## Implementation Details + +### Record Format +Each record in the backend storage includes: +- 2 bytes: Data size +- 4 bytes: CRC32 checksum +- 6 bytes: Previous record location +- N bytes: Actual data + +### Lookup Table Optimization +The lookup table automatically optimizes its key size based on: +- Total number of records (affects address space) +- Record size and count (determines file splitting) +- Available memory (can switch to disk-based lookup) + +### File Management +- Supports splitting data across multiple files when needed +- Each file is limited to 500MB by default (configurable) +- Automatic file selection based on record location +- Files are created as needed with format: `${path}/${file_nr}.db` diff --git a/lib/data/ourdb/backend.v b/lib/data/ourdb/backend.v new file mode 100644 index 00000000..89168e0f --- /dev/null +++ b/lib/data/ourdb/backend.v @@ -0,0 +1,260 @@ +module ourdb + +import os +import hash.crc32 + +// this is the implementation of the lowlevel datastor, doesnt know about ACL or signature, it just stores a binary blob + +// the backend can be in 1 file or multiple files, if multiple files each file has a nr, we support max 65536 files in 1 dir + +// calculate_crc computes CRC32 for the data +fn calculate_crc(data []u8) u32 { + return crc32.sum(data) +} + +fn (mut db OurDB) db_file_select(file_nr u16) ! { + // open file for read/write + // file is in "${db.path}/${nr}.db" + // return the file for read/write + if file_nr > 65535 { + return error('file_nr needs to be < 65536') + } + + path := '${db.path}/${file_nr}.db' + + if db.file_nr == file_nr { + // make sure file is opened + if !db.file.is_opened { + if !os.exists(path) { + db.create_new_db_file(file_nr)! + } + + mut file := os.open_file(path, 'r+')! + db.file = file + } + return + } + + if db.file.is_opened { + db.file.close() + } + + if !os.exists(path) { + db.create_new_db_file(file_nr)! + } + mut file := os.open_file(path, 'r+')! + + db.file = file + db.file_nr = file_nr +} + +fn (mut db OurDB) create_new_db_file(file_nr u16) ! { + new_file_path := '${db.path}/${file_nr}.db' + mut f := os.create(new_file_path)! + f.write([u8(0)])! // to make all positions start from 1 + f.close() +} + +fn (mut db OurDB) get_file_nr() !u16 { + path := '${db.path}/${db.last_used_file_nr}.db' + if !os.exists(path) { + db.create_new_db_file(db.last_used_file_nr)! + return db.last_used_file_nr + } + + stat := os.stat(path)! + if stat.size >= db.file_size { + db.last_used_file_nr += 1 + db.create_new_db_file(db.last_used_file_nr)! + } + + return db.last_used_file_nr +} + +// set stores data at position x +pub fn (mut db OurDB) set_(x u32, old_location Location, data []u8) ! { + // Convert u64 to Location + file_nr := db.get_file_nr()! + + // TODO: can't file_nr change between two revisions? + db.db_file_select(file_nr)! + + // Get current file position for lookup + db.file.seek(0, .end)! + new_location := Location{ + file_nr: file_nr + position: u32(db.file.tell()!) + } + + // Calculate CRC of data + crc := calculate_crc(data) + + // Write size as u16 (2 bytes) + size := u16(data.len) + mut header := []u8{len: header_size, init: 0} + + // Write size (2 bytes) + header[0] = u8(size & 0xFF) + header[1] = u8((size >> 8) & 0xFF) + + // Write CRC (4 bytes) + header[2] = u8(crc & 0xFF) + header[3] = u8((crc >> 8) & 0xFF) + header[4] = u8((crc >> 16) & 0xFF) + header[5] = u8((crc >> 24) & 0xFF) + + // Convert previous location to bytes and store in header + prev_bytes := old_location.to_bytes()! + for i := 0; i < 6; i++ { + header[6 + i] = prev_bytes[i] + } + + // Write header + db.file.write(header)! + + // Write actual data + db.file.write(data)! + db.file.flush() + + // Update lookup table with new position + db.lookup.set(x, new_location)! +} + +// get retrieves data at specified location +fn (mut db OurDB) get_(location Location) ![]u8 { + db.db_file_select(location.file_nr)! + + if location.position == 0 { + return error('Record not found') + } + + // Seek to position + db.file.seek(i64(location.position), .start)! + + // Read header + mut header := []u8{len: header_size} + header_read_bytes := db.file.read(mut header)! + if header_read_bytes != header_size { + return error('failed to read header') + } + + // Parse size (2 bytes) + size := u16(header[0]) | (u16(header[1]) << 8) + + // Parse CRC (4 bytes) + stored_crc := u32(header[2]) | (u32(header[3]) << 8) | (u32(header[4]) << 16) | (u32(header[5]) << 24) + + // Read data + mut data := []u8{len: int(size)} + data_read_bytes := db.file.read(mut data)! + if data_read_bytes != int(size) { + return error('failed to read data bytes') + } + + // Verify CRC + calculated_crc := calculate_crc(data) + if calculated_crc != stored_crc { + return error('CRC mismatch: data corruption detected') + } + + return data +} + +// get_prev_pos retrieves the previous position for a record +fn (mut db OurDB) get_prev_pos_(location Location) !Location { + if location.position == 0 { + return error('Record not found') + } + + // Skip size and CRC (6 bytes) + db.file.seek(i64(location.position + 6), .start)! + + // Read previous location (6 bytes) + mut prev_bytes := []u8{len: 6} + read_bytes := db.file.read(mut prev_bytes)! + if read_bytes != 6 { + return error('failed to read previous location bytes: read ${read_bytes} while expected to read 6 bytes') + } + + return db.lookup.location_new(prev_bytes)! +} + +// delete zeros out the record at specified location +fn (mut db OurDB) delete_(x u32, location Location) ! { + if location.position == 0 { + return error('Record not found') + } + + // Seek to position + db.file.seek(i64(location.position), .start)! + + // Read size first + size_bytes := db.file.read_bytes(2) + size := u16(size_bytes[0]) | (u16(size_bytes[1]) << 8) + + // Write zeros for the entire record (header + data) + zeros := []u8{len: int(size) + header_size, init: 0} + db.file.seek(i64(location.position), .start)! + db.file.write(zeros)! + + // Clear lookup entry + db.lookup.delete(x)! +} + +// condense removes empty records and updates positions +fn (mut db OurDB) condense() ! { + temp_path := db.path + '.temp' + mut temp_file := os.create(temp_path)! + + // Track current position in temp file + mut new_pos := Location{ + file_nr: 0 + position: 0 + } + + // Iterate through lookup table + entry_size := int(db.lookup.keysize) + for i := 0; i < db.lookup.data.len / entry_size; i++ { + location := db.lookup.get(u32(i)) or { continue } + if location.position == 0 { + continue + } + + // Read record from original file + db.file.seek(i64(location.position), .start)! + header := db.file.read_bytes(header_size) + size := u16(header[0]) | (u16(header[1]) << 8) + + if size == 0 { + continue + } + + data := db.file.read_bytes(int(size)) + + // Write to temp file + temp_file.write(header)! + temp_file.write(data)! + + // Update lookup with new position + db.lookup.set(u32(i), new_pos)! + + // Update position counter + new_pos.position += u32(size) + header_size + } + + // Close both files + temp_file.close() + db.file.close() + + // Replace original with temp + os.rm(db.path)! + os.mv(temp_path, db.path)! + + // Reopen the file + db.file = os.open_file(db.path, 'c+')! +} + +// close closes the database file +fn (mut db OurDB) close_() { + db.file.close() +} diff --git a/lib/data/ourdb/db.v b/lib/data/ourdb/db.v new file mode 100644 index 00000000..721abc73 --- /dev/null +++ b/lib/data/ourdb/db.v @@ -0,0 +1,120 @@ +module ourdb + +import os + +// OurDB is a simple key-value database implementation that provides: +// - Efficient key-value storage with history tracking +// - Data integrity verification using CRC32 +// - Support for multiple backend files +// - Lookup table for fast data retrieval +// +// The database consists of three main components: +// 1. DB Interface (this file) - Provides the public API for database operations +// 2. Lookup Table - Maps keys to data locations for efficient retrieval +// 3. Backend Storage - Handles the actual data storage and file management + +// set stores data at the specified key position +// The data is stored with a CRC32 checksum for integrity verification +// and maintains a linked list of previous values for history tracking +// Returns the ID used (either x if specified, or auto-incremented if x=0) +@[params] +struct OurDBSetArgs { + id ?u32 + data []u8 @[required] +} + +pub fn (mut db OurDB) set(args OurDBSetArgs) !u32 { + if db.incremental_mode { + // if id points to an empty location, return an error + // else, overwrite data + if id := args.id { + // this is an update + location := db.lookup.get(id)! + if location.position == 0 { + return error('cannot set id for insertions when incremental mode is enabled') + } + + db.set_(id, location, args.data)! + db.lookup.set(id, location)! // TODO: maybe not needed + return id + } + + // this is an insert + id := db.lookup.get_next_id()! + db.set_(id, Location{}, args.data)! + return id + } + + // using key-value mode + id := args.id or { return error('id must be provided when incremental is disabled') } + location := db.lookup.get(id)! // Get location from lookup table if exists + db.set_(id, location, args.data)! + return id +} + +// get retrieves data stored at the specified key position +// Returns error if the key doesn't exist or data is corrupted +pub fn (mut db OurDB) get(x u32) ![]u8 { + location := db.lookup.get(x)! // Get location from lookup table + return db.get_(location)! +} + +// get_history retrieves a list of previous values for the specified key +// depth parameter controls how many historical values to retrieve (max) +// Returns error if key doesn't exist or if there's an issue accessing the data +pub fn (mut db OurDB) get_history(x u32, depth u8) ![][]u8 { + mut result := [][]u8{} + mut current_location := db.lookup.get(x)! // Start with current location + + // Traverse the history chain up to specified depth + for i := u8(0); i < depth; i++ { + // Get current value + data := db.get_(current_location)! + result << data + + // Try to get previous location + current_location = db.get_prev_pos_(current_location) or { break } + if current_location.position == 0 { + break + } + } + + return result +} + +// delete removes the data at the specified key position +// This operation zeros out the record but maintains the space in the file +// Use condense() to reclaim space from deleted records (happens in step after) +pub fn (mut db OurDB) delete(x u32) ! { + location := db.lookup.get(x)! // Get location from lookup table + db.delete_(x, location)! + db.lookup.delete(x)! +} + +// close closes the database file +fn (mut db OurDB) lookup_dump_path() string { + return '${db.path}/lookup_dump.db' +} + +// load metadata i exists +fn (mut db OurDB) load() ! { + if os.exists(db.lookup_dump_path()) { + db.lookup.import_sparse(db.lookup_dump_path())! + } +} + +// make sure we have the metata stored on disk +fn (mut db OurDB) save() ! { + // make sure we remember the data + db.lookup.export_sparse(db.lookup_dump_path())! +} + +// close closes the database file +fn (mut db OurDB) close() ! { + db.save()! + db.close_() +} + +fn (mut db OurDB) destroy() ! { + os.rmdir_all(db.path)! +} diff --git a/lib/data/ourdb/db_test.v b/lib/data/ourdb/db_test.v new file mode 100644 index 00000000..1753eec5 --- /dev/null +++ b/lib/data/ourdb/db_test.v @@ -0,0 +1,171 @@ +module ourdb + +import os + +const test_dir = '/tmp/ourdb' + +fn test_basic_operations() { + mut db := new( + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + // Test set and get + test_data := 'Hello, World!'.bytes() + id := db.set(data: test_data)! + + retrieved := db.get(id)! + assert retrieved == test_data + + // Test overwrite + new_data := 'Updated data'.bytes() + id2 := db.set(data: new_data)! + retrieved2 := db.get(id2)! + assert retrieved2 == new_data +} + +fn test_auto_increment() { + mut db := new( + record_nr_max: 10 // max size of records + record_size_max: 2 + path: test_dir + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + // Create 5 objects with no ID specified (x=0) + mut ids := []u32{} + for i in 0 .. 5 { + data := 'Object ${i + 1}'.bytes() + id := db.set(data: data)! + ids << id + } + + // Verify IDs are incremental + assert ids.len == 5 + for i in 0 .. 5 { + assert ids[i] == i + // Verify data can be retrieved + data := db.get(ids[i])! + assert data == 'Object ${i + 1}'.bytes() + } +} + +fn test_history_tracking() { + mut db := new( + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + incremental_mode: false + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + // Create multiple versions of data + key := u32(1) + data1 := 'Version 1'.bytes() + data2 := 'Version 2'.bytes() + data3 := 'Version 3'.bytes() + + db.set(id: key, data: data1)! + db.set(id: key, data: data2)! + db.set(id: key, data: data3)! + + // Get history with depth 3 + history := db.get_history(key, 3)! + assert history.len == 3 + assert history[0] == data3 // Most recent first + assert history[1] == data2 + assert history[2] == data1 +} + +fn test_delete_operation() { + mut db := new( + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + incremental_mode: false + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + // Set and then delete data + test_data := 'Test data'.bytes() + key := u32(1) + db.set(id: key, data: test_data)! + + // Verify data exists + retrieved := db.get(key)! + assert retrieved == test_data + + // Delete data + db.delete(key)! + + // Verify data is deleted + retrieved_after_delete := db.get(key) or { + assert err.msg() == 'Record not found' + []u8{} + } + assert retrieved_after_delete.len == 0 +} + +fn test_error_handling() { + mut db := new( + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + // Test getting non-existent key + result := db.get(999) or { + assert err.msg() == 'Record not found' + []u8{} + } + assert result.len == 0 + + // Test deleting non-existent key + db.delete(999) or { + assert err.msg() == 'Record not found' + return + } +} + +fn test_file_switching() { + mut db := new( + record_nr_max: 16777216 - 1 // max size of records + record_size_max: 1024 + path: test_dir + file_size: 10 + incremental_mode: false + )! + + defer { + db.destroy() or { panic('failed to destroy db: ${err}') } + } + + test_data1 := 'Test data'.bytes() + key := u32(1) + db.set(id: key, data: test_data1)! + stat := os.stat('${db.path}/${db.last_used_file_nr}.db')! + + test_data2 := 'Test data 2222'.bytes() + db.set(id: u32(2), data: test_data2)! + + location := db.lookup.get(u32(2))! + assert location.file_nr == 1 +} diff --git a/lib/data/ourdb/factory.v b/lib/data/ourdb/factory.v new file mode 100644 index 00000000..3f11b113 --- /dev/null +++ b/lib/data/ourdb/factory.v @@ -0,0 +1,70 @@ +module ourdb + +import os + +const mbyte_ = 1000000 + +// OurDB represents a binary database with variable-length records +@[heap] +pub struct OurDB { +mut: + lookup &LookupTable +pub: + path string // is the directory in which we will have the lookup db as well as all the backend + incremental_mode bool + file_size u32 = 500 * (1 << 20) // 500MB +pub mut: + file os.File + file_nr u16 // the file which is open + last_used_file_nr u16 +} + +const header_size = 12 + +@[params] +pub struct OurDBConfig { +pub: + record_nr_max u32 = 16777216 - 1 // max size of records + record_size_max u32 = 1024 * 4 // max size in bytes of a record, is 4 KB default + file_size u32 = 500 * (1 << 20) // 500MB + path string // directory where we will stor the DB + + incremental_mode bool = true +} + +// new_memdb creates a new memory database with the given path and lookup table +pub fn new(args OurDBConfig) !OurDB { + mut keysize := u8(4) + + if args.record_nr_max < 65536 { + keysize = 2 + } else if args.record_nr_max < 16777216 { + keysize = 3 + } else if args.record_nr_max < 4294967296 { + keysize = 4 + } else { + return error('max supported records is 4294967296 in OurDB') + } + + if f64(args.record_size_max * args.record_nr_max) / 2 > mbyte_ * 10 { + keysize = 6 // will use multiple files + } + + mut l := new_lookup( + size: args.record_nr_max + keysize: keysize + incremental_mode: args.incremental_mode + )! + + os.mkdir_all(args.path)! + mut db := OurDB{ + path: args.path + lookup: &l + file_size: args.file_size + incremental_mode: args.incremental_mode + } + + db.load()! + + return db +} diff --git a/lib/data/ourdb/lookup.v b/lib/data/ourdb/lookup.v new file mode 100644 index 00000000..fccdfdfd --- /dev/null +++ b/lib/data/ourdb/lookup.v @@ -0,0 +1,383 @@ +module ourdb + +import os + +// LOOKUP table is link between the id and the posititon in a file with the data + +const data_file_name = 'data' +const incremental_file_name = '.inc' + +@[params] +pub struct LookupConfig { +pub: + size u32 // size of the table + keysize u8 // size of each entry in bytes (2-6), 6 means we store data over multiple files + lookuppath string // if set, use disk-based lookup + + incremental_mode bool = true +} + +pub struct LookupTable { + keysize u8 + lookuppath string +mut: + data []u8 + incremental ?u32 // points to next empty slot in the lookup table if incremental mode is enabled +} + +// Method to create a new lookup table +fn new_lookup(config LookupConfig) !LookupTable { + // First verify keysize is valid + if config.keysize !in [2, 3, 4, 6] { + return error('keysize must be 2,3,4 or 6') + } + + if config.lookuppath.len > 0 { + if !os.exists(config.lookuppath) { + os.mkdir_all(config.lookuppath)! + } + + // For disk-based lookup, create empty file if it doesn't exist + if !os.exists(os.join_path(config.lookuppath, data_file_name)) { + data := []u8{len: int(config.size * config.keysize), init: 0} + os.write_file(os.join_path(config.lookuppath, data_file_name), data.bytestr())! + } + + return LookupTable{ + // size: config.size + data: []u8{} + keysize: config.keysize + lookuppath: config.lookuppath + incremental: get_incremental_info(config) + } + } + + return LookupTable{ + // size: config.size + data: []u8{len: int(config.size * config.keysize), init: 0} + keysize: config.keysize + lookuppath: '' + incremental: get_incremental_info(config) + } +} + +fn get_incremental_info(config LookupConfig) ?u32 { + if !config.incremental_mode { + return none + } + + if config.lookuppath.len > 0 { + if !os.exists(os.join_path(config.lookuppath, incremental_file_name)) { + // Create a separate file for storing the incremental value + os.write_file(os.join_path(config.lookuppath, incremental_file_name), '0') or { + panic('failed to write .inc file: ${err}') + } + } + + inc_str := os.read_file(os.join_path(config.lookuppath, incremental_file_name)) or { + panic('failed to read .inc file: ${err}') + } + + incremental := inc_str.u32() + return incremental + } + + return 0 +} + +// Method to get value from a specific position +fn (lut LookupTable) get(x u32) !Location { + entry_size := int(lut.keysize) + if lut.lookuppath.len > 0 { + // Check file size first + file_size := os.file_size(lut.get_data_file_path()!) + start_pos := x * entry_size + + if start_pos + entry_size > file_size { + return error('Invalid read for get in lut: ${lut.lookuppath}: ${start_pos + entry_size} would exceed file size ${file_size}') + } + + // Read directly from file for disk-based lookup + mut file := os.open(lut.get_data_file_path()!)! + defer { file.close() } + + mut data := []u8{len: entry_size} + bytes_read := file.read_from(u64(start_pos), mut data)! + if bytes_read < entry_size { + return error('Incomplete read: expected ${entry_size} bytes but got ${bytes_read}') + } + return lut.location_new(data)! + } + + if x * entry_size >= u32(lut.data.len) { + return error('Index out of bounds') + } + + start := u32(x * entry_size) + return lut.location_new(lut.data[start..start + entry_size])! +} + +fn (mut lut LookupTable) get_next_id() !u32 { + incremental := lut.incremental or { return error('lookup table not in incremental mode') } + + table_size := if lut.lookuppath.len > 0 { + u32(os.file_size(lut.get_data_file_path()!)) + } else { + u32(lut.data.len) + } + + if incremental * lut.keysize >= table_size { + return error('lookup table is full') + } + + return incremental +} + +fn (mut lut LookupTable) increment_index() ! { + mut incremental := lut.incremental or { return error('lookup table not in incremental mode') } + + incremental += 1 + lut.incremental = incremental + if lut.lookuppath.len > 0 { + os.write_file(lut.get_inc_file_path()!, incremental.str())! + } +} + +// Method to set a value at a specific position +fn (mut lut LookupTable) set(x u32, location Location) ! { + entry_size := int(lut.keysize) + + mut id := x + if incremental := lut.incremental { + if x == incremental { + lut.increment_index()! + } + + if x > incremental { + return error('cannot set id for insertions when incremental mode is enabled') + } + } + + // Always store at the ID position + if lut.lookuppath.len > 0 { + // Check file size first + file_size := os.file_size(lut.lookuppath) + start_pos := id * entry_size + data_file_path := lut.get_data_file_path()! + if start_pos + entry_size > file_size { + return error('Invalid write position: ${start_pos + entry_size} would exceed file size ${file_size}') + } + + // Write directly to file for disk-based lookup + mut file := os.open_file(data_file_path, 'r+')! + defer { + file.flush() + file.close() + } + + data := location.to_bytes()! + bytes_written := file.write_to(u64(start_pos), data[(6 - entry_size)..])! // Only write the required bytes based on keysize + if bytes_written < entry_size { + return error('Incomplete write: expected ${entry_size} bytes but wrote ${bytes_written}') + } + + return + } + + if id * u32(entry_size) >= u32(lut.data.len) { + return error('Index out of bounds') + } + + start := int(id) * entry_size + bytes := location.to_bytes()! + + for i in 0 .. entry_size { + lut.data[start + i] = bytes[6 - entry_size + i] // Only use the required bytes based on keysize + } +} + +// Method to delete an entry (set bytes to 0) +fn (mut lut LookupTable) delete(x u32) ! { + entry_size := int(lut.keysize) + + if lut.lookuppath.len > 0 { + // Check file size first + file_size := os.file_size(lut.get_data_file_path()!) + start_pos := x * entry_size + + if start_pos + entry_size > file_size { + return error('Invalid read for get in lut: ${lut.lookuppath}: ${start_pos + entry_size} would exceed file size ${file_size}') + } + + // Write zeros directly to file for disk-based lookup + mut file := os.open_file(lut.get_data_file_path()!, 'r+')! + defer { + file.flush() + file.close() + } + + zeros := []u8{len: entry_size, init: 0} + bytes_written := file.write_to(u64(start_pos), zeros)! + if bytes_written < entry_size { + return error('Incomplete delete: expected ${entry_size} bytes but wrote ${bytes_written}') + } + return + } + + if x * u32(entry_size) >= u32(lut.data.len) { + return error('Index out of bounds') + } + + start := int(x) * entry_size + for i in 0 .. entry_size { + lut.data[start + i] = 0 + } +} + +// Method to export the lookup table to a file +fn (lut LookupTable) export_data(path string) ! { + if lut.lookuppath.len > 0 { + // For disk-based lookup, copy both the main file and incremental value + os.cp(lut.get_data_file_path()!, os.join_path(path, data_file_name))! + if _ := lut.incremental { + os.cp(lut.get_inc_file_path()!, os.join_path(path, incremental_file_name))! + } + return + } + + os.write_file(os.join_path(path, data_file_name), lut.data.bytestr())! + if incremental := lut.incremental { + os.write_file(os.join_path(path, incremental_file_name), incremental.str())! + } +} + +// Method to export the table in a sparse format +fn (lut LookupTable) export_sparse(path string) ! { + mut output := []u8{} + entry_size := int(lut.keysize) + + if lut.lookuppath.len > 0 { + // For disk-based lookup, read the file in chunks + mut file := os.open(lut.get_data_file_path()!)! + defer { file.close() } + + file_size := os.file_size(lut.get_data_file_path()!) + mut buffer := []u8{len: entry_size} + mut pos := u32(0) + + for { + if i64(pos) * i64(entry_size) >= file_size { + break + } + + bytes_read := file.read(mut buffer)! + if bytes_read == 0 { + break + } + if bytes_read < entry_size { + break + } + + location := lut.location_new(buffer)! + if location.position != 0 || location.file_nr != 0 { + // Write position (4 bytes) + output << u8(pos & 0xff) + output << u8((pos >> 8) & 0xff) + output << u8((pos >> 16) & 0xff) + output << u8((pos >> 24) & 0xff) + + // Write value + output << buffer + } + pos++ + } + } else { + for i := u32(0); i < u32(lut.data.len / entry_size); i++ { + location := lut.get(i) or { continue } + if location.position != 0 || location.file_nr != 0 { + // Write position (4 bytes) + output << u8(i & 0xff) + output << u8((i >> 8) & 0xff) + output << u8((i >> 16) & 0xff) + output << u8((i >> 24) & 0xff) + + // Write value + bytes := location.to_bytes()! + output << bytes[6 - entry_size..] // Only write the required bytes based on keysize + } + } + } + os.write_file(os.join_path(path, data_file_name), output.bytestr())! + // Also export the incremental value + if incremental := lut.incremental { + os.write_file(os.join_path(path, incremental_file_name), incremental.str())! + } +} + +// Method to import a lookup table from a file +fn (mut lut LookupTable) import_data(path string) ! { + if lut.lookuppath.len > 0 { + // For disk-based lookup, copy both files + os.cp(os.join_path(path, data_file_name), lut.get_data_file_path()!)! + + if _ := lut.incremental { + os.cp(os.join_path(path, incremental_file_name), os.join_path(lut.lookuppath, + incremental_file_name))! + // Update the incremental value in memory + inc_str := os.read_file(os.join_path(path, incremental_file_name))! + println('inc_str: ${inc_str}') + lut.incremental = inc_str.u32() + } + return + } + + lut.data = os.read_bytes(os.join_path(path, data_file_name))! + if _ := lut.incremental { + // Import the incremental value + inc_str := os.read_file(os.join_path(path, incremental_file_name))! + lut.incremental = inc_str.u32() + } +} + +// Method to import a sparse lookup table +fn (mut lut LookupTable) import_sparse(path string) ! { + sparse_data := os.read_bytes(os.join_path(path, data_file_name))! + entry_size := int(lut.keysize) + chunk_size := 4 + entry_size // 4 bytes for position + entry_size for value + + if sparse_data.len % chunk_size != 0 { + return error('Invalid sparse data format: data length must be multiple of ${chunk_size}') + } + + for i := 0; i < sparse_data.len; i += chunk_size { + // Read position from first 4 bytes + position := u32(sparse_data[i]) | (u32(sparse_data[i + 1]) << 8) | (u32(sparse_data[i + 2]) << 16) | (u32(sparse_data[ + i + 3]) << 24) + + // Read value bytes + value_bytes := sparse_data[i + 4..i + 4 + entry_size] + location := lut.location_new(value_bytes)! + + lut.set(position, location)! + } + + if _ := lut.incremental { + // Import the incremental value + inc_str := os.read_file(os.join_path(path, incremental_file_name))! + lut.incremental = inc_str.u32() + } +} + +fn (lut LookupTable) get_data_file_path() !string { + if lut.lookuppath.len == 0 { + return error('lookup table is memory based') + } + + return os.join_path(lut.lookuppath, data_file_name) +} + +fn (lut LookupTable) get_inc_file_path() !string { + _ := lut.incremental or { return error('incremental mode is disabled') } + + return os.join_path(lut.lookuppath, incremental_file_name) +} diff --git a/lib/data/ourdb/lookup_id_test.v b/lib/data/ourdb/lookup_id_test.v new file mode 100644 index 00000000..b7d151a1 --- /dev/null +++ b/lib/data/ourdb/lookup_id_test.v @@ -0,0 +1,159 @@ +module ourdb + +fn test_location_new_keysize_2() { + lt := LookupTable{ + keysize: 2 + } + // Test valid cases + loc1 := lt.location_new([u8(0), 1])! + assert loc1.position == 1 + assert loc1.file_nr == 0 + + loc2 := lt.location_new([u8(255), 255])! + assert loc2.position == 65535 + assert loc2.file_nr == 0 + + // Test padding + loc3 := lt.location_new([u8(1)])! + assert loc3.position == 1 + + // Test errors + if _ := lt.location_new([u8(1), 1, 1]) { + assert false, 'should fail on too many bytes' + } +} + +fn test_location_new_keysize_3() { + lt := LookupTable{ + keysize: 3 + } + // Test valid cases + loc1 := lt.location_new([u8(0), 1, 1])! + assert loc1.position == 257 + assert loc1.file_nr == 0 + + loc2 := lt.location_new([u8(255), 255, 255])! + assert loc2.position == 0xFFFFFF + assert loc2.file_nr == 0 + + // Test padding + loc3 := lt.location_new([u8(0), 1])! + assert loc3.position == 1 + + // Test errors + if _ := lt.location_new([u8(1), 1, 1, 1]) { + assert false, 'should fail on too many bytes' + } +} + +fn test_location_new_keysize_4() { + lt := LookupTable{ + keysize: 4 + } + // Test valid cases + loc1 := lt.location_new([u8(0), 0, 1, 1])! + assert loc1.position == 257 + assert loc1.file_nr == 0 + + loc2 := lt.location_new([u8(255), 255, 255, 255])! + assert loc2.position == 0xFFFFFFFF + assert loc2.file_nr == 0 + + // Test padding + loc3 := lt.location_new([u8(0), 0, 1])! + assert loc3.position == 1 + + // Test errors + if _ := lt.location_new([u8(1), 1, 1, 1, 1]) { + assert false, 'should fail on too many bytes' + } +} + +fn test_location_new_keysize_6() { + lt := LookupTable{ + keysize: 6 + } + // Test valid cases + loc1 := lt.location_new([u8(0), 1, 0, 0, 1, 1])! + assert loc1.file_nr == 1 + assert loc1.position == 257 + + loc2 := lt.location_new([u8(255), 255, 255, 255, 255, 255])! + assert loc2.file_nr == 0xFFFF + assert loc2.position == 0xFFFFFFFF + + // Test padding + loc3 := lt.location_new([u8(0), 1, 0, 0, 1])! + assert loc3.file_nr == 0 + assert loc3.position == (1 << 24) + 1 + + // Test errors + if _ := lt.location_new([u8(1), 1, 1, 1, 1, 1, 1]) { + assert false, 'should fail on too many bytes' + } +} + +fn test_invalid_keysize() { + // Test invalid keysizes + invalid_sizes := [u8(0), 1, 5, 7, 8] + for size in invalid_sizes { + lt := LookupTable{ + keysize: size + } + if _ := lt.location_new([u8(0)]) { + assert false, 'should fail on invalid keysize ${size}' + } + } +} + +fn test_to_bytes() { + // Test keysize 2 + lt2 := LookupTable{ + keysize: 2 + } + loc1 := lt2.location_new([u8(1), 1])! + bytes1 := loc1.to_bytes()! + assert bytes1 == [u8(0), 0, 0, 0, 1, 1] + + // Test keysize 3 + lt3 := LookupTable{ + keysize: 3 + } + loc2 := lt3.location_new([u8(1), 1, 1])! + bytes2 := loc2.to_bytes()! + assert bytes2 == [u8(0), 0, 0, 1, 1, 1] + + // Test keysize 4 + lt4 := LookupTable{ + keysize: 4 + } + loc3 := lt4.location_new([u8(1), 1, 1, 1])! + bytes3 := loc3.to_bytes()! + assert bytes3 == [u8(0), 0, 1, 1, 1, 1] + + // Test keysize 6 + lt6 := LookupTable{ + keysize: 6 + } + loc4 := lt6.location_new([u8(1), 1, 1, 1, 1, 1])! + bytes4 := loc4.to_bytes()! + assert bytes4 == [u8(1), 1, 1, 1, 1, 1] +} + +fn test_u64() { + // Test keysize 2 + lt2 := LookupTable{ + keysize: 2 + } + loc1 := lt2.location_new([u8(1), 1])! + val1 := loc1.u64()! + assert val1 == 257 + + // Test keysize 6 with file_nr + lt6 := LookupTable{ + keysize: 6 + } + loc2 := lt6.location_new([u8(0), 1, 0, 0, 1, 1])! + val2 := loc2.u64()! + assert val2 == (u64(1) << 32) | 257 +} diff --git a/lib/data/ourdb/lookup_location.v b/lib/data/ourdb/lookup_location.v new file mode 100644 index 00000000..cbc2915f --- /dev/null +++ b/lib/data/ourdb/lookup_location.v @@ -0,0 +1,116 @@ +module ourdb + +// in lookuptable.keysize we specify what nr of bytes are we use as id +// they are encoded big_endian +// if + 2^32 then we know we store the data in multiple files, the most significant parts define the filenr, the others the id + +pub struct Location { +pub mut: + file_nr u16 + position u32 +} + +// keysize = 2, means we have 2^16 nr of records in 1 file = position +// keysize = 3, means we have 2^24 nr of records in 1 file +// keysize = 4, means we have 2^32 nr of records in 1 file +// keysize = 6, means we have 2^32 nr of records per file, and there can be maximum 2^16 nr of files +// check validity, keysize needs to define max position +fn (lookuptable LookupTable) location_new(b_ []u8) !Location { + mut new_location := Location{ + file_nr: 0 + position: 0 + } + + // First verify keysize is valid + if lookuptable.keysize !in [2, 3, 4, 6] { + return error('keysize must be 2,3,4 or 6') + } + + // Create padded b + mut b := []u8{len: int(lookuptable.keysize), init: 0} + start_idx := int(lookuptable.keysize) - b_.len + if start_idx < 0 { + return error('input bytes exceed keysize') + } + + for i := 0; i < b_.len; i++ { + b[start_idx + i] = b_[i] + } + + match lookuptable.keysize { + 2 { + // Only position, 2 bytes big endian + new_location.position = u32(b[0]) << 8 | u32(b[1]) + new_location.file_nr = 0 + } + 3 { + // Only position, 3 bytes big endian + new_location.position = u32(b[0]) << 16 | u32(b[1]) << 8 | u32(b[2]) + new_location.file_nr = 0 + } + 4 { + // Only position, 4 bytes big endian + new_location.position = u32(b[0]) << 24 | u32(b[1]) << 16 | u32(b[2]) << 8 | u32(b[3]) + new_location.file_nr = 0 + } + 6 { + // 2 bytes file_nr + 4 bytes position, all big endian + new_location.file_nr = u16(b[0]) << 8 | u16(b[1]) + new_location.position = u32(b[2]) << 24 | u32(b[3]) << 16 | u32(b[4]) << 8 | u32(b[5]) + } + else {} + } + + // Verify limits based on keysize + match lookuptable.keysize { + 2 { + if new_location.position > 0xFFFF { + return error('position exceeds max value for keysize=2 (max 65535)') + } + if new_location.file_nr != 0 { + return error('file_nr must be 0 for keysize=2') + } + } + 3 { + if new_location.position > 0xFFFFFF { + return error('position exceeds max value for keysize=3 (max 16777215)') + } + if new_location.file_nr != 0 { + return error('file_nr must be 0 for keysize=3') + } + } + 4 { + if new_location.file_nr != 0 { + return error('file_nr must be 0 for keysize=4') + } + } + 6 { + // For keysize 6: both file_nr and position can use their full range + // No additional checks needed as u16 and u32 already enforce limits + } + else {} + } + + return new_location +} + +fn (self Location) to_bytes() ![]u8 { + mut bytes := []u8{len: 6} + + // Put file_nr first (2 bytes) + bytes[0] = u8(self.file_nr >> 8) + bytes[1] = u8(self.file_nr) + + // Put position next (4 bytes) + bytes[2] = u8(self.position >> 24) + bytes[3] = u8(self.position >> 16) + bytes[4] = u8(self.position >> 8) + bytes[5] = u8(self.position) + + return bytes +} + +// Convert Location to u64, with file_nr as most significant (big endian) +fn (self Location) u64() !u64 { + return (u64(self.file_nr) << 32) | u64(self.position) +} diff --git a/lib/data/ourdb/lookup_location_test.v b/lib/data/ourdb/lookup_location_test.v new file mode 100644 index 00000000..ce2f7b4f --- /dev/null +++ b/lib/data/ourdb/lookup_location_test.v @@ -0,0 +1,179 @@ +module ourdb + +fn test_location_new_keysize_2() { + mut lt := LookupTable{ + keysize: 2 + } + + // Test small number + loc1 := lt.location_new([u8(0), 5])! + assert loc1.file_nr == 0 + assert loc1.position == 5 + + // Test medium number + loc2 := lt.location_new([u8(1), 244])! // 500 + assert loc2.file_nr == 0 + assert loc2.position == 500 + + // Test max allowed (0xFFFF = 65535) + loc3 := lt.location_new([u8(255), 255])! + assert loc3.file_nr == 0 + assert loc3.position == 65535 + + // Test error cases + if _ := lt.location_new([u8(1), 0, 0]) { + assert false, 'should error on too many bytes' + } +} + +fn test_location_new_keysize_3() { + mut lt := LookupTable{ + keysize: 3 + } + + // Test small number + loc1 := lt.location_new([u8(0), 0, 5])! + assert loc1.file_nr == 0 + assert loc1.position == 5 + + // Test medium number + loc2 := lt.location_new([u8(0), 1, 244])! // 500 + assert loc2.file_nr == 0 + assert loc2.position == 500 + + // Test larger number + loc3 := lt.location_new([u8(1), 0, 0])! // 65536 + assert loc3.file_nr == 0 + assert loc3.position == 65536 + + // Test max allowed (0xFFFFFF = 16777215) + loc4 := lt.location_new([u8(255), 255, 255])! + assert loc4.file_nr == 0 + assert loc4.position == 16777215 +} + +fn test_location_new_keysize_4() { + mut lt := LookupTable{ + keysize: 4 + } + + // Test small number + loc1 := lt.location_new([u8(0), 0, 0, 5])! + assert loc1.file_nr == 0 + assert loc1.position == 5 + + // Test medium number + loc2 := lt.location_new([u8(0), 0, 1, 244])! // 500 + assert loc2.file_nr == 0 + assert loc2.position == 500 + + // Test larger number + loc3 := lt.location_new([u8(0), 1, 0, 0])! // 65536 + assert loc3.file_nr == 0 + assert loc3.position == 65536 + + // Test max value + loc4 := lt.location_new([u8(255), 255, 255, 255])! + assert loc4.file_nr == 0 + assert loc4.position == 0xFFFFFFFF +} + +fn test_location_new_keysize_6() { + mut lt := LookupTable{ + keysize: 6 + } + + // Test small numbers for both file_nr and position + loc1 := lt.location_new([u8(0), 1, 0, 0, 0, 5])! + assert loc1.file_nr == 1 + assert loc1.position == 5 + + // Test medium numbers + loc2 := lt.location_new([u8(0), 100, 0, 0, 1, 244])! + assert loc2.file_nr == 100 + assert loc2.position == 500 + + // Test max values + loc3 := lt.location_new([u8(255), 255, 255, 255, 255, 255])! + assert loc3.file_nr == 0xFFFF + assert loc3.position == 0xFFFFFFFF +} + +fn test_to_bytes() ! { + // Test small numbers + loc1 := Location{ + file_nr: 1 + position: 5 + } + bytes1 := loc1.to_bytes()! + assert bytes1 == [u8(0), 1, 0, 0, 0, 5] + + // Test medium numbers + loc2 := Location{ + file_nr: 100 + position: 500 + } + bytes2 := loc2.to_bytes()! + assert bytes2 == [u8(0), 100, 0, 0, 1, 244] + + // Test max values + loc3 := Location{ + file_nr: 0xFFFF + position: 0xFFFFFFFF + } + bytes3 := loc3.to_bytes()! + assert bytes3 == [u8(255), 255, 255, 255, 255, 255] +} + +fn test_u64() ! { + // Test small numbers + loc1 := Location{ + file_nr: 1 + position: 5 + } + val1 := loc1.u64()! + assert val1 == (u64(1) << 32) | 5 + + // Test medium numbers + loc2 := Location{ + file_nr: 100 + position: 500 + } + val2 := loc2.u64()! + assert val2 == (u64(100) << 32) | 500 + + // Test max values + loc3 := Location{ + file_nr: 0xFFFF + position: 0xFFFFFFFF + } + val3 := loc3.u64()! + assert val3 == (u64(0xFFFF) << 32) | 0xFFFFFFFF +} + +fn test_roundtrip() ! { + mut lt := LookupTable{ + keysize: 6 + } + + // Test various combinations + locations := [ + Location{0, 5}, + Location{1, 500}, + Location{100, 65536}, + Location{0xFFFF, 0xFFFFFFFF}, + ] + + for original in locations { + // Test to_bytes() -> location_new() roundtrip + bytes := original.to_bytes()! + reconstructed := lt.location_new(bytes)! + assert reconstructed.file_nr == original.file_nr + assert reconstructed.position == original.position + + // Test u64() conversion roundtrip + u64_val := original.u64()! + assert (u64_val >> 32) == original.file_nr + assert (u64_val & 0xFFFFFFFF) == original.position + } +} diff --git a/lib/data/ourdb/lookup_test.v b/lib/data/ourdb/lookup_test.v new file mode 100644 index 00000000..cf1d2b96 --- /dev/null +++ b/lib/data/ourdb/lookup_test.v @@ -0,0 +1,410 @@ +module ourdb + +import os +import rand + +const test_dir = '/tmp/lookuptest' + +fn testsuite_begin() { + if os.exists(test_dir) { + os.rmdir_all(test_dir)! + } + os.mkdir_all(test_dir)! +} + +fn testsuite_end() { + if os.exists(test_dir) { + os.rmdir_all(test_dir)! + } +} + +fn test_incremental() { + config := LookupConfig{ + size: 100 + keysize: 2 + } + mut lut := new_lookup(config)! + + assert lut.get_next_id()! == 0 + + lut.set(0, Location{ position: 23, file_nr: 0 })! + assert lut.get_next_id()! == 1 + + lut.set(1, Location{ position: 2, file_nr: 3 })! + assert lut.get_next_id()! == 2 +} + +fn test_new_lookup() { + // Test memory-based lookup + config := LookupConfig{ + size: 100 + keysize: 2 + } + lut := new_lookup(config)! + assert lut.keysize == 2 + assert lut.data.len == 200 // size * keysize + assert lut.lookuppath == '' + + // Test disk-based lookup + disk_config := LookupConfig{ + size: 100 + keysize: 2 + lookuppath: os.join_path(test_dir, 'test.lut') + } + disk_lut := new_lookup(disk_config)! + assert disk_lut.keysize == 2 + assert disk_lut.lookuppath == os.join_path(test_dir, 'test.lut') + assert os.exists(os.join_path(test_dir, 'test.lut')) + + // Test invalid keysize + invalid_config := LookupConfig{ + size: 100 + keysize: 7 + } + if _ := new_lookup(invalid_config) { + assert false, 'should fail on invalid keysize' + } +} + +fn test_set_get() { + config := LookupConfig{ + size: 100 + keysize: 2 + incremental_mode: true + } + + mut lut := new_lookup(config)! + + // Test setting and getting values + loc1 := Location{ + position: 1234 + file_nr: 0 + } + + id := lut.get_next_id()! + lut.set(id, loc1)! + + result1 := lut.get(id)! + assert result1.position == 1234 + assert result1.file_nr == 0 + + // Test setting with specific ID + loc2 := Location{ + position: 5678 + file_nr: 0 + } + + id2 := lut.get_next_id()! + lut.set(id2, loc2)! + assert id2 == 1 // Should return the specified ID + result2 := lut.get(id2)! + assert result2.position == 5678 + assert result2.file_nr == 0 + + // Test out of bounds + if _ := lut.get(100) { + assert false, 'should fail on out of bounds' + } + if _ := lut.set(100, loc1) { + assert false, 'should fail on out of bounds' + } +} + +fn test_disk_set_get() { + config := LookupConfig{ + size: 100 + keysize: 2 + lookuppath: os.join_path(test_dir, rand.string(4)) + } + mut lut := new_lookup(config)! + + // Test setting and getting values on disk + loc1 := Location{ + position: 1234 + file_nr: 0 + } + + id := lut.get_next_id()! + lut.set(id, loc1)! + assert id == 0 // First auto-increment should be 1 + result1 := lut.get(id)! + assert result1.position == 1234 + assert result1.file_nr == 0 + + // Test persistence by creating new instance + mut lut2 := new_lookup(config)! + result2 := lut2.get(id)! + assert result2.position == 1234 + assert result2.file_nr == 0 + + // Test next auto-increment continues from previous value + loc2 := Location{ + position: 5678 + file_nr: 0 + } + + id2 := lut2.get_next_id()! + lut2.set(id2, loc2)! + assert id2 == 1 // Should increment from previous value +} + +fn test_delete() { + config := LookupConfig{ + size: 100 + keysize: 2 + } + mut lut := new_lookup(config)! + + // Set and then delete a value + loc1 := Location{ + position: 1234 + file_nr: 0 + } + + id := lut.get_next_id()! + lut.set(id, loc1)! + assert id == 0 + + lut.delete(id)! + result := lut.get(id)! + assert result.position == 0 + assert result.file_nr == 0 + + // Test out of bounds + if _ := lut.delete(100) { + assert false, 'should fail on out of bounds' + } +} + +fn test_export_import() { + config := LookupConfig{ + size: 100 + keysize: 2 + } + mut lut := new_lookup(config)! + + // Set some values + loc1 := Location{ + position: 1234 + file_nr: 0 + } + + id1 := lut.get_next_id()! + lut.set(id1, loc1)! + assert id1 == 0 + + loc2 := Location{ + position: 5678 + file_nr: 0 + } + id2 := lut.get_next_id()! + lut.set(id2, loc2)! + assert id2 == 1 + + // Export and then import to new table + export_path := os.join_path(test_dir, 'export.lut') + os.mkdir(export_path)! + + lut.export_data(export_path)! + mut lut2 := new_lookup(config)! + lut2.import_data(export_path)! + + // Verify values + result1 := lut2.get(id1)! + assert result1.position == 1234 + assert result1.file_nr == 0 + result2 := lut2.get(id2)! + assert result2.position == 5678 + assert result2.file_nr == 0 + + // Verify incremental was imported + assert lut2.incremental! == 2 +} + +fn test_export_import_sparse() { + config := LookupConfig{ + size: 100 + keysize: 2 + incremental_mode: false + } + + mut lut := new_lookup(config)! + + // Set some values with gaps + loc1 := Location{ + position: 1234 + file_nr: 0 + } + + id1 := u32(0) + lut.set(id1, loc1)! + + loc2 := Location{ + position: 5678 + file_nr: 0 + } + id2 := u32(50) + lut.set(id2, loc2)! // Create a gap + + // Export and import sparse + sparse_path := os.join_path(test_dir, 'sparse.lut') + os.mkdir(sparse_path)! + + lut.export_sparse(sparse_path)! + mut lut2 := new_lookup(config)! + lut2.import_sparse(sparse_path)! + + // Verify values + result1 := lut2.get(id1)! + assert result1.position == 1234 + assert result1.file_nr == 0 + result2 := lut2.get(id2)! + assert result2.position == 5678 + assert result2.file_nr == 0 +} + +fn test_incremental_memory() { + config := LookupConfig{ + size: 100 + keysize: 2 + } + mut lut := new_lookup(config)! + + // Initial value should be 0 + if incremental := lut.incremental { + assert incremental == 0 + } else { + assert false, 'incremental should have a value' + } + + // Set at x=0 should increment and return new ID + loc1 := Location{ + position: 1234 + file_nr: 0 + } + id1 := lut.get_next_id()! + lut.set(id1, loc1)! + assert id1 == 0 + assert lut.incremental! == 1 + + // Set at x=1 should not increment and return specified ID + loc2 := Location{ + position: 5678 + file_nr: 0 + } + id2 := lut.get_next_id()! + lut.set(id2, loc2)! + assert id2 == 1 + assert lut.incremental! == 2 + + // Another set at x=0 should increment and return new ID + loc3 := Location{ + position: 9012 + file_nr: 0 + } + + id3 := lut.get_next_id()! + lut.set(id3, loc3)! + assert id3 == 2 + assert lut.incremental! == 3 + + // Test persistence through export/import + export_path := os.join_path(test_dir, 'inc_export.lut') + os.mkdir(export_path)! + + lut.export_data(export_path)! + + mut lut2 := new_lookup(config)! + lut2.import_data(export_path)! + assert lut2.incremental! == 3 + + // Further operations should continue from last value + loc4 := Location{ + position: 3456 + file_nr: 0 + } + id4 := lut2.get_next_id()! + lut2.set(id4, loc4)! + assert id4 == 3 + assert lut2.incremental! == 4 +} + +fn test_incremental_disk() { + config := LookupConfig{ + size: 100 + keysize: 2 + lookuppath: os.join_path(test_dir, 'inc_test.lut') + } + mut lut := new_lookup(config)! + + // Initial value should be 0 + assert lut.incremental! == 0 + assert os.exists(lut.get_inc_file_path()!) + inc_content := os.read_file(lut.get_inc_file_path()!)! + assert inc_content == '0' + + // Set at x=0 should increment + loc1 := Location{ + position: 1234 + file_nr: 0 + } + id1 := lut.get_next_id()! + lut.set(id1, loc1)! + assert id1 == 0 + assert lut.incremental! == 1 + inc_content1 := os.read_file(lut.get_inc_file_path()!)! + assert inc_content1 == '1' + + // Set at x=1 should not increment + loc2 := Location{ + position: 5678 + file_nr: 0 + } + id2 := lut.get_next_id()! + lut.set(id2, loc2)! + assert id2 == 1 + assert lut.incremental! == 2 + inc_content2 := os.read_file(lut.get_inc_file_path()!)! + assert inc_content2 == '2' + + // Test persistence by creating new instance + mut lut2 := new_lookup(config)! + assert lut2.incremental! == 2 + + // Further operations at x=0 should continue from last value + loc3 := Location{ + position: 9012 + file_nr: 0 + } + id3 := lut2.get_next_id()! + lut2.set(id3, loc3)! + assert id3 == 2 + assert lut2.incremental! == 3 + inc_content3 := os.read_file(lut.get_inc_file_path()!)! + assert inc_content3 == '3' +} + +fn test_multiple_sets() { + config := LookupConfig{ + size: 100 + keysize: 2 + } + mut lut := new_lookup(config)! + + // Set at x=0 five times + mut ids := []u32{} + for i in 0 .. 5 { + loc := Location{ + position: 1000 * (i + 1) + file_nr: 0 + } + id := lut.get_next_id()! + lut.set(id, loc)! + assert id == i + ids << id + } + + // Verify incremental is 5 + assert lut.incremental! == 5 + assert ids == [u32(0), 1, 2, 3, 4] +} diff --git a/lib/data/ourtime/ourtime.v b/lib/data/ourtime/ourtime.v new file mode 100644 index 00000000..a4b4c846 --- /dev/null +++ b/lib/data/ourtime/ourtime.v @@ -0,0 +1,135 @@ +module ourtime + +import time +import freeflowuniverse.herolib.ui.console + +pub struct OurTime { +pub mut: + unixt i64 +} + +// Get Expiration object from time string input . +// input can be either relative or absolute . +// if input is empty then is now +// ``` +// ## Relative time +// #### time periods: +// - s : second +// - h : hour +// - d : day +// - w : week +// - M : month +// - Q : quarter +// - Y : year +// 0 means right now +// input string example: "+1w +2d -4h" +// +// ## Absolute time +// inputs must be of the form: "YYYY-MM-DD HH:mm:ss" or "YYYY-MM-DD" +// the time can be HH:mm:ss or HH:mm +// inputs also supported are: "DD-MM-YYYY" but then the YYYY needs to be 4 chars +// for date we also support / in stead of - +// input string examples: +// +//'2022-12-5 20:14:35' +//'2022-12-5' - sets hours, mins, seconds to 00 +// +// ``` +pub fn new(txt_ string) !OurTime { + if txt_.trim_space() == '' { + mut ot := OurTime{} + ot.now() + return ot + } + unixt := parse(txt_)! + return OurTime{ + unixt: unixt + } +} + +pub fn new_from_epoch(ut u64) OurTime { + mut ot := OurTime{ + unixt: ut + } + return ot +} + +pub fn now() OurTime { + mut ot := OurTime{} + ot.now() + return ot +} + +// will check if the time is empty, if yes then will fill it in +pub fn (mut ot OurTime) check() { + if ot.unixt == 0 { + ot.now() + } +} + +// print the wiki formatting for time +pub fn (ot OurTime) md() string { + return ot.time().format() +} + +// format as a usable key . +// "YYYY_MM_DD_HH_mm_ss" +pub fn (ot OurTime) key() string { + t := ot.time() + mut o := t.ymmdd() + '_' + t.hhmmss() + o = o.replace('-', '_') + o = o.replace(':', '_') + return o +} + +// returns a date-time string in "YYYY-MM-DD HH:mm" format (24h). +pub fn (ot OurTime) str() string { + return ot.time().format() +} + +// returns a date string in "YYYY-MM-DD" format +pub fn (ot OurTime) day() string { + return ot.time().ymmdd() +} + +// returns as epoch (seconds) +pub fn (ot OurTime) int() int { + return int(ot.time().unix()) +} + +// set ourtime to now +pub fn (mut t OurTime) now() { + t.unixt = i64(time.now().unix()) +} + +// get time from vlang +pub fn (t OurTime) time() time.Time { + return time.unix(i64(t.unixt)) +} + +// get time from vlang +pub fn (t OurTime) unix() i64 { + return t.unixt +} + +pub fn (t OurTime) empty() bool { + return t.unixt == 0 +} + +// move the time, e.g. +1h means we go 1 h further +// ## Relative time +// #### time periods: +// - s : second +// - h : hour +// - d : day +// - w : week +// - M : month +// - Q : quarter +// - Y : year +// input string example: "+1w +2d -4h" +// +pub fn (mut t OurTime) warp(warp string) ! { + r := relative_sec(warp)! + console.print_debug(r.str()) + t.unixt += relative_sec(warp)! +} diff --git a/lib/data/ourtime/parser.v b/lib/data/ourtime/parser.v new file mode 100644 index 00000000..122159c9 --- /dev/null +++ b/lib/data/ourtime/parser.v @@ -0,0 +1,162 @@ +module ourtime + +import time + +// const ( +// numbers = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0'] +// letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', +// 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'] + +// months = { +// 'january': 1 +// 'february': 2 +// 'march': 3 +// 'april': 4 +// 'may': 5 +// 'june': 6 +// 'july': 7 +// 'august': 8 +// 'september': 9 +// 'october': 10 +// 'november': 11 +// 'december': 12 +// } +// ) + +fn parse(timestr string) !i64 { + trimmed := timestr.trim_space() + if trimmed == '' { + n := now() + time_unix := n.unix() + return time_unix + } + mut relative_bool := false + if trimmed.starts_with('+') || trimmed.starts_with('-') { + relative_bool = true + } + + if relative_bool == true { + time_unix := get_unix_from_relative(trimmed) or { + return error('Failed to get unix from relative time: ${err}') + } + return time_unix + } else { + time_unix := get_unix_from_absolute(trimmed) or { + return error('Failed to get unix from absolute time: ${err}') + } + return time_unix + } + return error('bug') +} + +fn relative_sec(timestr string) !i64 { + // removes all spaces from the string + mut full_exp := timestr.replace(' ', '') + + // If input is empty or contains just a 0 + if full_exp == '' || full_exp.trim(' ') == '0' { + time_unix := time.now().unix() + return time_unix + } + + // duplicates the + and - signs + full_exp = full_exp.replace('+', '£+') + full_exp = full_exp.replace('-', '£-') + // create an array of periods + mut exps := full_exp.split_any('£') + exps = exps.filter(it.len > 0) + mut total := 0 + + for mut exp in exps { + mut mult := 0 + if exp.ends_with('s') { + mult = 1 + } else if exp.ends_with('m') { + mult = 60 + } else if exp.ends_with('h') { + mult = 60 * 60 + } else if exp.ends_with('d') { + mult = 60 * 60 * 24 + } else if exp.ends_with('w') { + mult = 60 * 60 * 24 * 7 + } else if exp.ends_with('M') { + mult = 60 * 60 * 24 * 30 + } else if exp.ends_with('Q') { + mult = 60 * 60 * 24 * 30 * 3 + } else if exp.ends_with('Y') { + mult = 60 * 60 * 24 * 365 + } else { + return error('could not parse time suffix for: ${exp}') + } + if exp.starts_with('-') { + mult *= -1 + } + // remove +/- and period + exp = exp[1..(exp.len - 1)] + // multiplies the value by the multiplier + exp_int := exp.int() * mult + total += exp_int + } + return total +} + +fn get_unix_from_relative(timestr string) !i64 { + r := relative_sec(timestr)! + time_unix := i64(r) + time.now().unix() + return time_unix +} + +pub fn get_unix_from_absolute(timestr_ string) !i64 { + timestr := timestr_.trim_space() + split_time_hour := timestr.split(' ') + if split_time_hour.len > 2 { + return error('format of date/time not correct: ${timestr_}') + } + mut datepart := '' + mut timepart := '' + if split_time_hour.len == 2 { + // there is a date and time part + datepart = split_time_hour[0] + timepart = split_time_hour[1] + } else if split_time_hour.len == 1 { + datepart = split_time_hour[0] + } else { + return error("format of date/time not correct: '${timestr_}'") + } + datepart = datepart.replace('/', '-') + if timepart.contains('-') || timepart.contains('/') { + return error("format of date/time not correct, no - or / in time: '${timestr_}'") + } + + split := datepart.split('-') + if split.len != 3 { + return error("unrecognized date format, time must either be YYYY/MM/DD or DD/MM/YYYY, or : in stead of /. Input was:'${timestr_}'") + } + if split[2].len == 4 { + datepart = split.reverse().join('-') + } else if !(split[0].len == 4) { + return error("unrecognized time format, time must either be YYYY/MM/DD or DD/MM/YYYY, or : in stead of /. Input was:'${timestr_}'") + } + + timparts := timepart.split(':') + if timparts.len > 3 { + return error("format of date/time not correct, in time part: '${timepart}'") + } + + if timparts.len == 2 { + timepart = '${timepart}:00' + } else if timparts.len == 1 { + if timepart.len == 0 { + timepart = '00:00:00' + } else { + timepart = '${timepart}:00:00' + } + } + + full_string := '${datepart} ${timepart}' + + time_struct := time.parse(full_string) or { + return error("could not parse date/time string '${timestr_}': ${err}") + } + return time_struct.unix() +} diff --git a/lib/data/ourtime/readme.md b/lib/data/ourtime/readme.md new file mode 100644 index 00000000..f53a9347 --- /dev/null +++ b/lib/data/ourtime/readme.md @@ -0,0 +1,186 @@ +# OurTime Module + +The OurTime module provides a flexible and user-friendly way to work with time in V. It supports both relative and absolute time formats, making it easy to handle various time-related operations. + +## Features + +- Create time objects from various string formats +- Support for relative time expressions +- Support for absolute time formats +- Unix timestamp handling +- Time formatting utilities +- Time warping capabilities + +## Usage + +### Basic Usage + +```v +import freeflowuniverse.herolib.data.ourtime + +// Create time object for current time +mut t := ourtime.now() + +// Create from string +t2 := ourtime.new('2022-12-05 20:14:35')! + +// Get formatted string +println(t2.str()) // Output: 2022-12-05 20:14 + +// Get unix timestamp +println(t2.unix()) // Output: 1670271275 +``` + +### Time Formats + +#### Relative Time Format + +Relative time expressions use the following period indicators: +- `s`: seconds +- `h`: hours +- `d`: days +- `w`: weeks +- `M`: months +- `Q`: quarters +- `Y`: years + +Examples: +```v +// Create time object with relative time +mut t := ourtime.new('+1w +2d -4h')! // 1 week forward, 2 days forward, 4 hours back + +// Warp existing time object +mut t2 := ourtime.now() +t2.warp('+1h')! // Move 1 hour forward +``` + +#### Absolute Time Format + +Supported date formats: +- `YYYY-MM-DD HH:mm:ss` +- `YYYY-MM-DD HH:mm` +- `YYYY-MM-DD` +- `DD-MM-YYYY` (YYYY must be 4 digits) +- Also supports '/' instead of '-' for dates + +Examples: +```v +// Various absolute time formats +t1 := ourtime.new('2022-12-05 20:14:35')! +t2 := ourtime.new('2022-12-05')! // Sets time to 00:00:00 +t3 := ourtime.new('05-12-2022')! // DD-MM-YYYY format +``` + +### Methods + +#### Creation Methods + +```v +// Create for current time +now := ourtime.now() + +// Create from string +t := ourtime.new('2022-12-05 20:14:35')! + +// Create from unix timestamp +t2 := ourtime.new_from_epoch(1670271275) +``` + +#### Formatting Methods + +```v +mut t := ourtime.now() + +// Get as YYYY-MM-DD HH:mm format +println(t.str()) + +// Get as YYYY-MM-DD format +println(t.day()) + +// Get as formatted key (YYYY_MM_DD_HH_mm_ss) +println(t.key()) + +// Get as markdown formatted string +println(t.md()) +``` + +#### Time Operations + +```v +mut t := ourtime.now() + +// Move time forward or backward +t.warp('+1h')! // 1 hour forward +t.warp('-30m')! // 30 minutes backward +t.warp('+1w +2d -4h')! // Complex time warp + +// Get unix timestamp +unix := t.unix() + +// Get as integer +i := t.int() + +// Check if time is empty (zero) +is_empty := t.empty() +``` + +## Examples + +### Working with Relative Time + +```v +mut t := ourtime.now() + +// Add time periods +t.warp('+1w')! // Add 1 week +t.warp('+2d')! // Add 2 days +t.warp('-4h')! // Subtract 4 hours + +// Complex time warping +t.warp('+1Y -2Q +2M +4h -60s')! // Add 1 year, subtract 2 quarters, add 2 months, add 4 hours, subtract 60 seconds +``` + +### Working with Absolute Time + +```v +// Create time from absolute format +t1 := ourtime.new('2022-12-05 20:14:35')! +println(t1.str()) // 2022-12-05 20:14 + +// Create time from date only +t2 := ourtime.new('2022-12-05')! +println(t2.str()) // 2022-12-05 00:00 + +// Get different formats +println(t1.day()) // 2022-12-05 +println(t1.key()) // 2022_12_05_20_14_35 +``` + +### Time Comparisons and Checks + +```v +mut t := ourtime.now() + +// Check if time is empty +if t.empty() { + t.now() // Set to current time if empty +} + +// Get unix timestamp for comparisons +unix := t.unix() +``` + +## Error Handling + +All time parsing operations that can fail return a Result type, so they should be called with `!` or handled with `or` blocks: + +```v +// Using ! operator (panics on error) +t1 := ourtime.new('2022-12-05')! + +// Using or block for error handling +t2 := ourtime.new('invalid-date') or { + println('Error parsing date: ${err}') + ourtime.now() // fallback to current time +} +``` diff --git a/lib/data/ourtime/timetools_test.v b/lib/data/ourtime/timetools_test.v new file mode 100644 index 00000000..26564f9b --- /dev/null +++ b/lib/data/ourtime/timetools_test.v @@ -0,0 +1,153 @@ +module ourtime + +import freeflowuniverse.herolib.data.ourtime +import time + +// TODO: need to update the tests + +fn check_input(input_string string, seconds int) { + nnow := time.now().unix() + thetime := parse(input_string) or { panic('cannot get expiration for ${input_string}') } + assert thetime == (nnow + seconds), 'expiration was incorrect for ${input_string}' +} + +// check every period +fn test_every_period() { + input_strings := { + '+5s': 5 + '+3m': 180 + '+2h': 7200 + '+1d': 86_400 + '+1w': 604_800 + '+1M': 2_592_000 + '+1Q': 7_776_000 + '+1Y': 31_536_000 + } + + for key, value in input_strings { + check_input(key, value) + } +} + +// check multiple periods input +fn test_combined_periods() { + input_strings := { + '+5s +1h +1d': 90_005 + '+1h +2s +1Y': 31_539_602 + } + + for key, value in input_strings { + check_input(key, value) + } +} + +// check negative inputs +fn test_negative_periods() { + input_strings := { + '-15s': -15 + '-5m': -300 + '-2h': -7200 + '-1d': -86_400 + '-1w': -604_800 + '-1M': -2_592_000 + '-1Q': -7_776_000 + '-1Y': -31_536_000 + } + + for key, value in input_strings { + check_input(key, value) + } +} + +// check positive and negative combinations +fn test_combined_signs() { + input_strings := { + '+1h -10s': 3_590 + '+1d -2h': 79_200 + '+1Y -2Q +2M +4h -60s': 21_182_340 + } + + for key, value in input_strings { + check_input(key, value) + } +} + +// check varied input styles +fn test_input_variations() { + input_strings := { + ' +1s ': 1 + ' - 1 s ': -1 + '+ 1s-1h ': -3599 + '- 1s+ 1 h': 3599 + } + + for key, value in input_strings { + check_input(key, value) + } +} + +// check that standard formats can be inputted +fn test_absolute_time() { + input_strings := { + '2022-12-5 20:14:35': 1670271275 + '2022-12-5': 1670198400 + // '2022-12': 1669842000 // Should be the beginning of december + // '2022': 1640984400 // should be beginning of 2022 + } + for key, value in input_strings { + thetime := new(key) or { panic('cannot get expiration for ${key}') } + assert thetime.unix() == value, 'expiration was incorrect for ${key}' + } +} + +fn test_from_epoch() { + mut t := new_from_epoch(1670271275) + assert t.str() == '2022-12-05 20:14' +} + +// NEXT: need some better test to see if time parsing works, more full tests + +fn test_parse_date() { + input_strings := { + '12/01/1999': 916099200 + '2000/09/09': 968457600 + } + + for key, value in input_strings { + test_value := new(key) or { panic('parse_date failed for ${key}, with error ${err}') } + + assert test_value.unix() == value + } +} + +// fn test_parse_time() { +// input_strings := { +// '12:20': { +// 'hour': 12 +// 'minute': 20 +// } +// '15;30': { +// 'hour': 15 +// 'minute': 30 +// } +// '12:30pm': { +// 'hour': 12 +// 'minute': 30 +// } +// '3pm': { +// 'hour': 15 +// 'minute': 0 +// } +// '8.40 pm': { +// 'hour': 20 +// 'minute': 40 +// } +// } + +// for key, value in input_strings { +// test_value := parse(key) or { +// panic('parse_time failed for ${key}, with error ${err}') +// } +// // assert test_value == value, 'hour, minute was incorrect for ${key}' +// } +// } diff --git a/lib/data/paramsparser/get_params.v b/lib/data/paramsparser/get_params.v new file mode 100644 index 00000000..91523646 --- /dev/null +++ b/lib/data/paramsparser/get_params.v @@ -0,0 +1,8 @@ +module paramsparser + +// Looks for a string of params in the parameters. If it doesn't exist this function will return an error. +// Furthermore an error will be returned if the params is not properly formatted +pub fn (params &Params) get_params(key string) !Params { + mut valuestr := params.get(key)! + return parse(valuestr)! +} diff --git a/lib/data/paramsparser/jsonschema.v b/lib/data/paramsparser/jsonschema.v new file mode 100644 index 00000000..83cfd2da --- /dev/null +++ b/lib/data/paramsparser/jsonschema.v @@ -0,0 +1,25 @@ +module paramsparser + +// import freeflowuniverse.herolib.data.jsonschema + +// pub struct ParamsPub { +// pub mut: +// params []Param +// args []string //are commands without key/val, best not to use +// } + +// pub struct ParamPub { +// pub: +// key string +// value string +// } + +// pub fn (mut params Params) json_schema() jsonschema.Schema { +// return jsonschema.Schema { + +// } +// } + +// pub fn json_import(data string) !Params { +// return json.decode(Params, data) +// } diff --git a/lib/data/paramsparser/params.v b/lib/data/paramsparser/params.v new file mode 100644 index 00000000..e7e33fbe --- /dev/null +++ b/lib/data/paramsparser/params.v @@ -0,0 +1,144 @@ +module paramsparser + +@[heap] +pub struct Params { +pub mut: + params []Param + args []string + comments []string +} + +pub struct Param { +pub mut: + key string + value string + comment string +} + +// get params from txt, same as parse() +pub fn new(txt string) !Params { + return parse(txt)! +} + +pub fn new_from_dict(kwargs map[string]string) !Params { + mut p := Params{} + for key, val in kwargs { + p.set(key, val) + } + return p +} + +pub fn new_params() Params { + return Params{} +} + +pub fn (mut params Params) delete(key string) { + key2 := key.to_lower().trim_space() + if params.exists(key) { + mut params_out := []Param{} + for p in params.params { + if p.key != key2 { + params_out << p + } + } + params.params = params_out + } +} + +pub fn (mut params Params) set(key string, value string) { + key2 := key.to_lower().trim_space().trim_left('/') + params.delete(key2) + params.params << Param{ + key: key2 + value: str_normalize(value) + } +} + +pub fn (mut params Params) set_with_comment(key string, value string, comment string) { + key2 := key.to_lower().trim_space().trim_left('/') + params.delete(key2) + params.params << Param{ + key: key2 + value: str_normalize(value) + comment: comment_normalize(comment) + } +} + +pub fn (mut params Params) delete_arg(key string) { + key2 := key.to_lower().trim_space() + if params.exists_arg(key2) { + params.args.delete(params.args.index(key2)) + } +} + +pub fn (mut params Params) set_arg(value string) { + mut value2 := value.trim(" '").trim_left('/') + value2 = value2.replace('<