This commit is contained in:
2024-12-25 09:23:31 +01:00
parent 01ca5897db
commit 4e030b794d
306 changed files with 35071 additions and 22 deletions

View File

@@ -5,12 +5,11 @@ a smaller version of crystallib with only the items we need for hero
## automated install
```bash
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/main/install_v.sh' > /tmp/install_v.sh
bash /tmp/install_v.sh --analyzer --herolib
```
## to install v
### details
```bash
@@ -18,7 +17,7 @@ a smaller version of crystallib with only the items we need for hero
V & HeroLib Installer Script
Usage: /Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh [options]
Usage: ~/code/github/freeflowuniverse/herolib/install_v.sh [options]
Options:
-h, --help Show this help message
@@ -28,12 +27,12 @@ Options:
--herolib Install our herolib
Examples:
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh --reset
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh --remove
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh --analyzer
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh --herolib
/Users/despiegk1/code/github/freeflowuniverse/herolib/install_v.sh --reset --analyzer # Fresh install of both
~/code/github/freeflowuniverse/herolib/install_v.sh
~/code/github/freeflowuniverse/herolib/install_v.sh --reset
~/code/github/freeflowuniverse/herolib/install_v.sh --remove
~/code/github/freeflowuniverse/herolib/install_v.sh --analyzer
~/code/github/freeflowuniverse/herolib/install_v.sh --herolib
~/code/github/freeflowuniverse/herolib/install_v.sh --reset --analyzer # Fresh install of both
```

View File

@@ -3,6 +3,34 @@
import os
import flag
fn addtoscript(tofind string, toadd string) ! {
home_dir := os.home_dir()
mut rc_file := '${home_dir}/.zshrc'
if !os.exists(rc_file) {
rc_file = '${home_dir}/.bashrc'
if !os.exists(rc_file) {
return error('No .zshrc or .bashrc found in home directory')
}
}
// Read current content
mut content := os.read_file(rc_file)!
// Remove existing alias if present
lines := content.split('\n')
mut new_lines := []string{}
for line in lines {
if !line.contains(tofind) {
new_lines << line
}
}
new_lines << toadd
// Write back to file
new_content := new_lines.join('\n')
os.write_file(rc_file, new_content)!
}
vroot := @VROOT
abs_dir_of_script := dir(@FILE)
@@ -18,8 +46,15 @@ os.mkdir_all('${os.home_dir()}/.vmodules/freeflowuniverse') or {
}
// Create new symlinks
os.symlink('${abs_dir_of_script}/herolib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
os.symlink('${abs_dir_of_script}/lib', '${os.home_dir()}/.vmodules/freeflowuniverse/herolib') or {
panic('Failed to create herolib symlink: ${err}')
}
println('Herolib installation completed successfully!')
// Add vtest alias
addtoscript('vtest ', 'alias vtest \'v -stats -enable-globals -n -w -gc none -no-retry-compilation -cc tcc test\' %') or {
eprintln('Failed to add vtest alias: ${err}')
}
println('Added vtest alias to shell configuration')

View File

@@ -0,0 +1,8 @@
module httpconnection
import encoding.base64
pub fn (mut conn HTTPConnection) basic_auth(username string, password string) {
credentials := base64.encode_str('${username}:${password}')
conn.default_header.add(.authorization, 'Basic ${credentials}')
}

View File

@@ -0,0 +1,100 @@
module httpconnection
import crypto.md5
import json
import net.http { Method }
// https://cassiomolin.com/2016/09/09/which-http-status-codes-are-cacheable/
const default_cacheable_codes = [200, 203, 204, 206, 300, 404, 405, 410, 414, 501]
const unsafe_http_methods = [Method.put, .patch, .post, .delete]
pub struct CacheConfig {
pub mut:
key string // as used to identity in redis
allowable_methods []Method = [.get, .head]
allowable_codes []int = default_cacheable_codes
disable bool = true // default cache is not working
expire_after int = 3600 // default expire_after is 1h
match_headers bool // cache the request header to be matched later
}
pub struct Result {
pub mut:
code int
data string
}
// calculate the key for the cache starting from data and url
fn (mut h HTTPConnection) cache_key(req Request) string {
url := h.url(req).split('!')
encoded_url := md5.hexhash(url[0]) // without params
mut key := 'http:${h.cache.key}:${req.method}:${encoded_url}'
mut req_data := req.data
if h.cache.match_headers {
req_data += json.encode(h.header())
}
req_data += if url.len > 1 { url[1] } else { '' } // add url param if exist
key += if req_data.len > 0 { ':${md5.hexhash(req_data)}' } else { '' }
return key
}
// Get request result from cache, return -1 if missed.
fn (mut h HTTPConnection) cache_get(req Request) !Result {
key := h.cache_key(req)
mut data := h.redis.get(key) or {
assert '${err}' == 'none'
// console.print_debug("cache get: ${key} not in redis")
return Result{
code: -1
}
}
if data == '' {
// console.print_debug("cache get: ${key} empty data")
return Result{
code: -1
}
}
result := json.decode(Result, data) or {
// console.print_debug("cache get: ${key} coud not decode")
return error('failed to decode result with error: ${err}.\ndata:\n${data}')
}
// console.print_debug("cache get: ${key} ok")
return result
}
// Set response result in cache
fn (mut h HTTPConnection) cache_set(req Request, res Result) ! {
key := h.cache_key(req)
value := json.encode(res)
h.redis.set(key, value)!
h.redis.expire(key, h.cache.expire_after)!
}
// Invalidate cache for specific url
fn (mut h HTTPConnection) cache_invalidate(req Request) ! {
url := h.url(req).split('!')
encoded_url := md5.hexhash(url[0])
mut to_drop := []string{}
to_drop << 'http:${h.cache.key}:*:${encoded_url}*'
if req.id.len > 0 {
url_no_id := url[0].trim_string_right('/${req.id}')
encoded_url_no_id := md5.hexhash(url_no_id)
to_drop << 'http:${h.cache.key}:*:${encoded_url_no_id}*'
}
for pattern in to_drop {
all_keys := h.redis.keys(pattern)!
for key in all_keys {
h.redis.del(key)!
}
}
}
// drop full cache for specific cache_key
pub fn (mut h HTTPConnection) cache_drop() ! {
todrop := 'http:${h.cache.key}*'
all_keys := h.redis.keys(todrop)!
for key in all_keys {
h.redis.del(key)!
}
}

View File

@@ -0,0 +1,22 @@
module httpconnection
import net.http { Header }
import freeflowuniverse.herolib.clients.redisclient { Redis }
@[heap]
pub struct HTTPConnection {
pub mut:
redis Redis @[str: skip]
base_url string // the base url
default_header Header
cache CacheConfig
retry int = 5
}
// Join headers from httpconnection and Request
fn (mut h HTTPConnection) header(req Request) Header {
mut header := req.header or { return h.default_header }
return h.default_header.join(header)
}

View File

@@ -0,0 +1,212 @@
// /*
// METHODS NOTES
// * Our target to wrap the default http methods used in V to be cached using redis
// * By default cache enabled in all Request, if you need to disable cache, set req.cache_disable true
// *
// * Flow will be:
// * 1 - Check cache if enabled try to get result from cache
// * 2 - Check result
// * 3 - Do request, if needed
// * 4 - Set in cache if enabled or invalidate cache
// * 5 - Return result
// Suggestion: Send function now enough to do what we want, no need to any post*, get* additional functions
// */
module httpconnection
import x.json2
import net.http
import freeflowuniverse.herolib.core.herojson
import freeflowuniverse.herolib.ui.console
// Build url from Request and httpconnection
fn (mut h HTTPConnection) url(req Request) string {
mut u := '${h.base_url}/${req.prefix.trim('/')}'
if req.id.len > 0 {
u += '/${req.id}'
}
if req.params.len > 0 && req.method != .post {
u += '?${http.url_encode_form_data(req.params)}'
}
return u
}
// Return if request cacheable, depeds on connection cache and request arguments.
fn (h HTTPConnection) is_cacheable(req Request) bool {
return !(h.cache.disable || req.cache_disable) && req.method in h.cache.allowable_methods
}
// Return true if we need to invalidate cache after unsafe method
fn (h HTTPConnection) needs_invalidate(req Request, result_code int) bool {
return !(h.cache.disable || req.cache_disable) && req.method in unsafe_http_methods
&& req.method !in h.cache.allowable_methods && result_code >= 200 && result_code <= 399
}
// Core fucntion to be used in all other function
pub fn (mut h HTTPConnection) send(req_ Request) !Result {
mut result := Result{}
mut response := http.Response{}
mut err_message := ''
mut from_cache := false // used to know if result came from cache
mut req := req_
is_cacheable := h.is_cacheable(req)
// console.print_debug("is cacheable: ${is_cacheable}")
// 1 - Check cache if enabled try to get result from cache
if is_cacheable {
result = h.cache_get(req)!
if result.code != -1 {
from_cache = true
}
}
// 2 - Check result
if result.code in [0, -1] {
// 3 - Do request, if needed
if req.method == .post {
if req.dataformat == .urlencoded && req.data == '' && req.params.len > 0 {
req.data = http.url_encode_form_data(req.params)
}
}
url := h.url(req)
// println("----")
// println(url)
// println(req.data)
// println("----")
mut new_req := http.new_request(req.method, url, req.data)
// joining the header from the HTTPConnection with the one from Request
new_req.header = h.header()
if new_req.header.contains(http.CommonHeader.content_type) {
panic('bug: content_type should not be set as part of default header')
}
match req.dataformat {
.json {
new_req.header.set(http.CommonHeader.content_type, 'application/json')
}
.urlencoded {
new_req.header.set(http.CommonHeader.content_type, 'application/x-www-form-urlencoded')
}
.multipart_form {
new_req.header.set(http.CommonHeader.content_type, 'multipart/form-data')
}
}
println(new_req)
if req.debug {
console.print_debug('http request:\n${new_req.str()}')
}
for _ in 0 .. h.retry {
response = new_req.do() or {
err_message = 'Cannot send request:${req}\nerror:${err}'
// console.print_debug(err_message)
continue
}
break
}
if req.debug {
console.print_debug(response.str())
}
if response.status_code == 0 {
return error(err_message)
}
result.code = response.status_code
result.data = response.body
}
// 4 - Set in cache if enabled
if !from_cache && is_cacheable && result.code in h.cache.allowable_codes {
h.cache_set(req, result)!
}
if h.needs_invalidate(req, result.code) {
h.cache_invalidate(req)!
}
// 5 - Return result
return result
}
pub fn (r Result) is_ok() bool {
return r.code >= 200 && r.code <= 399
}
// dict_key string //if the return is a dict, then will take the element out of the dict with the key and process further
pub fn (mut h HTTPConnection) post_json_str(req_ Request) !string {
mut req := req_
req.method = .post
result := h.send(req)!
if result.is_ok() {
mut data_ := result.data
if req.dict_key.len > 0 {
data_ = herojson.json_dict_get_string(data_, false, req.dict_key)!
}
return data_
}
return error('Could not post ${req}\result:\n${result}')
}
// do a request with certain prefix on the already specified url
// parse as json
pub fn (mut h HTTPConnection) get_json_dict(req Request) !map[string]json2.Any {
data_ := h.get(req)!
mut data := map[string]json2.Any{}
data = herojson.json_dict_filter_any(data_, false, [], [])!
return data
}
// dict_key string //if the return is a dict, then will take the element out of the dict with the key and process further
// list_dict_key string //if the output is a list of dicts, then will process each element of the list to take the val with key out of that dict
// e.g. the input is a list of dicts e.g. [{"key":{"name":"kristof@incubaid.com",...},{"key":...}]
pub fn (mut h HTTPConnection) get_json_list(req Request) ![]string {
mut data_ := h.get(req)!
if req.dict_key.len > 0 {
data_ = herojson.json_dict_get_string(data_, false, req.dict_key)!
}
if req.list_dict_key.len > 0 {
return herojson.json_list_dict_get_string(data_, false, req.list_dict_key)!
}
data := herojson.json_list(data_, false)
return data
}
// dict_key string //if the return is a dict, then will take the element out of the dict with the key and process further
pub fn (mut h HTTPConnection) get_json(req Request) !string {
h.default_header.add(.content_language, 'Content-Type: application/json')
mut data_ := h.get(req)!
if req.dict_key.len > 0 {
data_ = herojson.json_dict_get_string(data_, false, req.dict_key)!
}
return data_
}
// Get Request with json data and return response as string
pub fn (mut h HTTPConnection) get(req_ Request) !string {
mut req := req_
req.debug = true
req.method = .get
result := h.send(req)!
return result.data
}
// Delete Request with json data and return response as string
pub fn (mut h HTTPConnection) delete(req_ Request) !string {
mut req := req_
req.method = .delete
result := h.send(req)!
return result.data
}
// performs a multi part form data request
pub fn (mut h HTTPConnection) post_multi_part(req Request, form http.PostMultipartFormConfig) !http.Response {
mut req_form := form
mut header := h.header()
header.set(http.CommonHeader.content_type, 'multipart/form-data')
req_form.header = header
url := h.url(req)
return http.post_multipart_form(url, req_form)!
}

View File

@@ -0,0 +1,22 @@
module httpconnection
import json
pub fn (mut h HTTPConnection) get_json_generic[T](req Request) !T {
data := h.get_json(req)!
return json.decode(T, data) or { return error("couldn't decode json for ${req} for ${data}") }
}
pub fn (mut h HTTPConnection) post_json_generic[T](req Request) !T {
data := h.post_json_str(req)!
return json.decode(T, data) or { return error("couldn't decode json for ${req} for ${data}") }
}
pub fn (mut h HTTPConnection) get_json_list_generic[T](req Request) ![]T {
mut r := []T{}
for item in h.get_json_list(req)! {
// println(item)
r << json.decode(T, item) or { return error("couldn't decode json for ${req} for ${item}") }
}
return r
}

View File

@@ -0,0 +1,39 @@
module httpconnection
import net.http
import freeflowuniverse.herolib.clients.redisclient { RedisURL }
@[params]
pub struct HTTPConnectionArgs {
pub:
name string @[required]
url string @[required]
cache bool
retry int = 1
}
pub fn new(args HTTPConnectionArgs) !&HTTPConnection {
// mut f := factory
mut header := http.new_header()
if args.url.replace(' ', '') == '' {
panic("URL is empty, can't create http connection with empty url")
}
// Init connection
mut conn := HTTPConnection{
redis: redisclient.core_get(RedisURL{})!
default_header: header
cache: CacheConfig{
disable: !args.cache
key: args.name
}
retry: args.retry
base_url: args.url.trim('/')
}
return &conn
}

View File

@@ -0,0 +1,171 @@
# HTTPConnection Module
The HTTPConnection module provides a robust HTTP client implementation with support for JSON handling, custom headers, retries, and caching.
## Features
- Generic JSON methods for type-safe requests
- Custom header support
- Built-in retry mechanism
- Cache configuration
- URL encoding support
## Basic Usage
```v
import freeflowuniverse.herolib.clients.httpconnection
// Create a new HTTP connection
mut conn := HTTPConnection{
base_url: 'https://api.example.com'
retry: 5 // number of retries for failed requests
}
```
## Examples
### GET Request with JSON Response
```v
// Define your data structure
struct User {
id int
name string
email string
}
// Make a GET request and decode JSON response
user := conn.get_json_generic[User](
method: .get
prefix: 'users/1'
dataformat: .urlencoded
)!
```
### GET Request for List of Items
```v
// Get a list of items and decode each one
users := conn.get_json_list_generic[User](
method: .get
prefix: 'users'
list_dict_key: 'users' // if response is wrapped in a key
dataformat: .urlencoded
)!
```
### POST Request with JSON Data
```v
// Create new resource with POST
new_user := conn.post_json_generic[User](
method: .post
prefix: 'users'
dataformat: .urlencoded
params: {
'name': 'John Doe'
'email': 'john@example.com'
}
)!
```
### Real-World Example: SSH Key Management
Here's a practical example inspired by SSH key management in a cloud API:
```v
// Define the SSH key structure
struct SSHKey {
pub mut:
name string
fingerprint string
type_ string @[json: 'type']
size int
created_at string
data string
}
// Get all SSH keys
fn get_ssh_keys(mut conn HTTPConnection) ![]SSHKey {
return conn.get_json_list_generic[SSHKey](
method: .get
prefix: 'key'
list_dict_key: 'key'
dataformat: .urlencoded
)!
}
// Create a new SSH key
fn create_ssh_key(mut conn HTTPConnection, name string, key_data string) !SSHKey {
return conn.post_json_generic[SSHKey](
method: .post
prefix: 'key'
dataformat: .urlencoded
params: {
'name': name
'data': key_data
}
)!
}
// Delete an SSH key
fn delete_ssh_key(mut conn HTTPConnection, fingerprint string) ! {
conn.delete(
method: .delete
prefix: 'key/${fingerprint}'
dataformat: .urlencoded
)!
}
```
## Custom Headers
You can set default headers for all requests or specify headers for individual requests:
```v
import net.http { Header }
// Set default headers for all requests
conn.default_header = http.new_header(
key: .authorization
value: 'Bearer your-token-here'
)
// Add custom headers for specific request
response := conn.get_json(
method: .get
prefix: 'protected/resource'
header: http.new_header(
key: .content_type
value: 'application/json'
)
)!
```
## Error Handling
The module uses V's built-in error handling. All methods that can fail return a Result type:
```v
// Handle potential errors
user := conn.get_json_generic[User](
method: .get
prefix: 'users/1'
) or {
println('Error: ${err}')
return
}
```
## Cache Configuration
The module supports caching of responses. Configure caching behavior through the `CacheConfig` struct:
```v
mut conn := HTTPConnection{
base_url: 'https://api.example.com'
cache: CacheConfig{
enabled: true
// Add other cache configuration as needed
}
}

View File

@@ -0,0 +1,25 @@
module httpconnection
import net.http { Header, Method }
pub enum DataFormat {
json // application/json
urlencoded //
multipart_form //
}
@[params]
pub struct Request {
pub mut:
method Method
prefix string
id string
params map[string]string
data string
cache_disable bool // do not put this default on true, this is set on the connection, this is here to be overruled in specific cases
header ?Header
dict_key string // if the return is a dict, then will take the element out of the dict with the key and process further
list_dict_key string // if the output is a list of dicts, then will process each element of the list to take the val with key out of that dict
debug bool
dataformat DataFormat
}

View File

@@ -0,0 +1,7 @@
!!hero_code.generate_client
name:'mailclient'
classname:'MailClient'
singleton:0
default:1
reset:0

View File

@@ -0,0 +1,71 @@
module mailclient
import freeflowuniverse.herolib.core.texttools
import net.smtp
import time
@[params]
pub struct SendArgs {
pub mut:
markdown bool
from string
to string
cc string
bcc string
date time.Time = time.now()
subject string
body_type BodyType
body string
}
pub enum BodyType {
text
html
markdown
}
// ```
// cl.send(markdown:true,subject:'this is a test',to:'kds@something.com,kds2@else.com',body:'
// this is my email content
// ')!
// args:
// markdown bool
// from string
// to string
// cc string
// bcc string
// date time.Time = time.now()
// subject string
// body_type BodyType (.html, .text, .markdown)
// body string
// ```
pub fn (mut cl MailClient) send(args_ SendArgs) ! {
mut args := args_
args.body = texttools.dedent(args.body)
mut body_type := smtp.BodyType.text
if args.body_type == .html || args.body_type == .markdown {
body_type = smtp.BodyType.html
}
mut m := smtp.Mail{
from: args.from
to: args.to
cc: args.cc
bcc: args.bcc
date: args.date
subject: args.subject
body: args.body
body_type: body_type
}
mut smtp_client := smtp.new_client(
server: cl.mail_server
port: cl.mail_port
username: cl.mail_username
password: cl.mail_password
from: cl.mail_from
ssl: cl.ssl
starttls: cl.tls
)!
return smtp_client.send(m)
}

View File

@@ -0,0 +1,107 @@
module mailclient
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
__global (
mailclient_global map[string]&MailClient
mailclient_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string = 'default'
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = mailclient_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MailClient {
mut args := args_get(args_)
if args.name !in mailclient_global {
if !config_exists() {
if default {
config_save()!
}
}
config_load()!
}
return mailclient_global[args.name] or { panic('bug') }
}
// switch instance to be used for mailclient
pub fn switch(name string) {
mailclient_default = name
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('mailclient', args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('mailclient', args.name)!
play(heroscript: heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('mailclient', args.name, heroscript_default())!
}
fn set(o MailClient) ! {
mut o2 := obj_init(o)!
mailclient_global['default'] = &o2
}
@[params]
pub struct InstallPlayArgs {
pub mut:
name string = 'default'
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
start bool
stop bool
restart bool
delete bool
configure bool // make sure there is at least one installed
}
pub fn play(args_ InstallPlayArgs) ! {
mut args := args_
println('debguzo1')
mut plbook := args.plbook or {
println('debguzo2')
heroscript := if args.heroscript == '' {
heroscript_default()
} else {
args.heroscript
}
playbook.new(text: heroscript)!
}
mut install_actions := plbook.find(filter: 'mailclient.configure')!
println('debguzo3 ${install_actions}')
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
}

View File

@@ -0,0 +1,70 @@
module mailclient
import freeflowuniverse.herolib.data.paramsparser
import os
pub const version = '1.0.0'
const singleton = false
const default = true
// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() string {
mail_from := os.getenv_opt('MAIL_FROM') or { 'info@example.com' }
mail_password := os.getenv_opt('MAIL_PASSWORD') or { 'secretpassword' }
mail_port := (os.getenv_opt('MAIL_PORT') or { '465' }).int()
mail_server := os.getenv_opt('MAIL_SERVER') or { 'smtp-relay.brevo.com' }
mail_username := os.getenv_opt('MAIL_USERNAME') or { 'kristof@incubaid.com' }
heroscript := "
!!mailclient.configure name:'default'
mail_from: '${mail_from}'
mail_password: '${mail_password}'
mail_port: ${mail_port}
mail_server: '${mail_server}'
mail_username: '${mail_username}'
"
return heroscript
}
pub struct MailClient {
pub mut:
name string = 'default'
mail_from string
mail_password string @[secret]
mail_port int = 465
mail_server string
mail_username string
ssl bool = true
tls bool
}
fn cfg_play(p paramsparser.Params) ! {
mut mycfg := MailClient{
name: p.get_default('name', 'default')!
mail_from: p.get('mail_from')!
mail_password: p.get('mail_password')!
mail_port: p.get_int_default('mail_port', 465)!
mail_server: p.get('mail_server')!
mail_username: p.get('mail_username')!
}
set(mycfg)!
}
fn obj_init(obj_ MailClient) !MailClient {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
return obj
}
// user needs to us switch to make sure we get the right object
pub fn configure(config MailClient) !MailClient {
client := MailClient{
...config
}
set(client)!
return client
// THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED
// implement if steps need to be done for configuration
}

View File

@@ -0,0 +1,50 @@
# mailclient
To get started
```vlang
import freeflowuniverse.herolib.clients. mailclient
mut client:= mailclient.get()!
client.send(subject:'this is a test',to:'kds@something.com,kds2@else.com',body:'
this is my email content
')!
```
## example heroscript
```hero
!!mailclient.configure
secret: '...'
host: 'localhost'
port: 8888
```
## use of env variables
if you have a secrets file you could import as
```bash
//e.g. source ~/code/git.ourworld.tf/despiegk/hero_secrets/mysecrets.sh
```
following env variables are supported
- MAIL_FROM=
- MAIL_PASSWORD=
- MAIL_PORT=465
- MAIL_SERVER=smtp-relay.brevo.com
- MAIL_USERNAME=kristof@incubaid.com
these variables will only be set at configure time
## brevo remark
- use ssl
- use port: 465

View File

@@ -0,0 +1,7 @@
!!hero_code.generate_client
name:'meilisearch'
classname:'MeilisearchClient'
singleton:0
default:1
reset:0

View File

@@ -0,0 +1,457 @@
module meilisearch
import freeflowuniverse.herolib.clients.httpconnection
import x.json2
import json
// health checks if the server is healthy
pub fn (mut client MeilisearchClient) health() !Health {
req := httpconnection.Request{
prefix: 'health'
}
mut http := client.httpclient()!
response := http.get_json(req)!
return json2.decode[Health](response)
}
// version gets the version of the Meilisearch server
pub fn (mut client MeilisearchClient) version() !Version {
req := httpconnection.Request{
prefix: 'version'
}
mut http := client.httpclient()!
response := http.get_json(req)!
return json2.decode[Version](response)
}
// create_index creates a new index with the given UID
pub fn (mut client MeilisearchClient) create_index(args CreateIndexArgs) !CreateIndexResponse {
req := httpconnection.Request{
prefix: 'indexes'
method: .post
data: json2.encode(args)
}
mut http := client.httpclient()!
response := http.post_json_str(req)!
return json2.decode[CreateIndexResponse](response)
}
// get_index retrieves information about an index
pub fn (mut client MeilisearchClient) get_index(uid string) !GetIndexResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}'
}
mut http := client.httpclient()!
response := http.get_json(req)!
return json2.decode[GetIndexResponse](response)
}
// list_indexes retrieves all indexes
pub fn (mut client MeilisearchClient) list_indexes(args ListIndexArgs) ![]GetIndexResponse {
req := httpconnection.Request{
prefix: 'indexes?limit=${args.limit}&offset=${args.offset}'
}
mut http := client.httpclient()!
response := http.get_json(req)!
list_response := json.decode(ListResponse[GetIndexResponse], response)!
return list_response.results
}
// delete_index deletes an index
pub fn (mut client MeilisearchClient) delete_index(uid string) !DeleteIndexResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}'
}
mut http := client.httpclient()!
response := http.delete(req)!
return json2.decode[DeleteIndexResponse](response)
}
// get_settings retrieves all settings of an index
pub fn (mut client MeilisearchClient) get_settings(uid string) !IndexSettings {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
mut settings := IndexSettings{}
if ranking_rules := response['rankingRules'] {
settings.ranking_rules = ranking_rules.arr().map(it.str())
}
if distinct_attribute := response['distinctAttribute'] {
settings.distinct_attribute = distinct_attribute.str()
}
if searchable_attributes := response['searchableAttributes'] {
settings.searchable_attributes = searchable_attributes.arr().map(it.str())
}
if displayed_attributes := response['displayedAttributes'] {
settings.displayed_attributes = displayed_attributes.arr().map(it.str())
}
if stop_words := response['stopWords'] {
settings.stop_words = stop_words.arr().map(it.str())
}
if filterable_attributes := response['filterableAttributes'] {
settings.filterable_attributes = filterable_attributes.arr().map(it.str())
}
if sortable_attributes := response['sortableAttributes'] {
settings.sortable_attributes = sortable_attributes.arr().map(it.str())
}
return settings
}
// update_settings updates all settings of an index
pub fn (mut client MeilisearchClient) update_settings(uid string, settings IndexSettings) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings'
method: .patch
data: json2.encode(settings)
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_settings resets all settings of an index to default values
pub fn (mut client MeilisearchClient) reset_settings(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_ranking_rules retrieves ranking rules of an index
pub fn (mut client MeilisearchClient) get_ranking_rules(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/ranking-rules'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['rankingRules']!.arr().map(it.str())
}
// update_ranking_rules updates ranking rules of an index
pub fn (mut client MeilisearchClient) update_ranking_rules(uid string, rules []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/ranking-rules'
method: .put
data: json2.encode({
'rankingRules': rules
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_ranking_rules resets ranking rules of an index to default values
pub fn (mut client MeilisearchClient) reset_ranking_rules(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/ranking-rules'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_distinct_attribute retrieves distinct attribute of an index
pub fn (mut client MeilisearchClient) get_distinct_attribute(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/distinct-attribute'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['distinctAttribute']!.str()
}
// update_distinct_attribute updates distinct attribute of an index
pub fn (mut client MeilisearchClient) update_distinct_attribute(uid string, attribute string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/distinct-attribute'
method: .put
data: json2.encode({
'distinctAttribute': attribute
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_distinct_attribute resets distinct attribute of an index
pub fn (mut client MeilisearchClient) reset_distinct_attribute(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/distinct-attribute'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_searchable_attributes retrieves searchable attributes of an index
pub fn (mut client MeilisearchClient) get_searchable_attributes(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/searchable-attributes'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['searchableAttributes']!.arr().map(it.str())
}
// update_searchable_attributes updates searchable attributes of an index
pub fn (mut client MeilisearchClient) update_searchable_attributes(uid string, attributes []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/searchable-attributes'
method: .put
data: json2.encode({
'searchableAttributes': attributes
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_searchable_attributes resets searchable attributes of an index
pub fn (mut client MeilisearchClient) reset_searchable_attributes(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/searchable-attributes'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_displayed_attributes retrieves displayed attributes of an index
pub fn (mut client MeilisearchClient) get_displayed_attributes(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/displayed-attributes'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['displayedAttributes']!.arr().map(it.str())
}
// update_displayed_attributes updates displayed attributes of an index
pub fn (mut client MeilisearchClient) update_displayed_attributes(uid string, attributes []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/displayed-attributes'
method: .put
data: json2.encode({
'displayedAttributes': attributes
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_displayed_attributes resets displayed attributes of an index
pub fn (mut client MeilisearchClient) reset_displayed_attributes(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/displayed-attributes'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_stop_words retrieves stop words of an index
pub fn (mut client MeilisearchClient) get_stop_words(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/stop-words'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['stopWords']!.arr().map(it.str())
}
// update_stop_words updates stop words of an index
pub fn (mut client MeilisearchClient) update_stop_words(uid string, words []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/stop-words'
method: .put
data: json2.encode({
'stopWords': words
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_stop_words resets stop words of an index
pub fn (mut client MeilisearchClient) reset_stop_words(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/stop-words'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_synonyms retrieves synonyms of an index
pub fn (mut client MeilisearchClient) get_synonyms(uid string) !map[string][]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/synonyms'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
mut synonyms := map[string][]string{}
for key, value in response['synonyms']!.as_map() {
synonyms[key] = value.arr().map(it.str())
}
return synonyms
}
// update_synonyms updates synonyms of an index
pub fn (mut client MeilisearchClient) update_synonyms(uid string, synonyms map[string][]string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/synonyms'
method: .put
data: json2.encode({
'synonyms': synonyms
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_synonyms resets synonyms of an index
pub fn (mut client MeilisearchClient) reset_synonyms(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/synonyms'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_filterable_attributes retrieves filterable attributes of an index
pub fn (mut client MeilisearchClient) get_filterable_attributes(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/filterable-attributes'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['filterableAttributes']!.arr().map(it.str())
}
// update_filterable_attributes updates filterable attributes of an index
pub fn (mut client MeilisearchClient) update_filterable_attributes(uid string, attributes []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/filterable-attributes'
method: .put
data: json.encode(attributes)
}
mut http := client.httpclient()!
response := http.send(req)!
return response.data
}
// reset_filterable_attributes resets filterable attributes of an index
pub fn (mut client MeilisearchClient) reset_filterable_attributes(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/filterable-attributes'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_sortable_attributes retrieves sortable attributes of an index
pub fn (mut client MeilisearchClient) get_sortable_attributes(uid string) ![]string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/sortable-attributes'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
return response['sortableAttributes']!.arr().map(it.str())
}
// update_sortable_attributes updates sortable attributes of an index
pub fn (mut client MeilisearchClient) update_sortable_attributes(uid string, attributes []string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/sortable-attributes'
method: .put
data: json2.encode({
'sortableAttributes': attributes
})
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_sortable_attributes resets sortable attributes of an index
pub fn (mut client MeilisearchClient) reset_sortable_attributes(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/sortable-attributes'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
// get_typo_tolerance retrieves typo tolerance settings of an index
pub fn (mut client MeilisearchClient) get_typo_tolerance(uid string) !TypoTolerance {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/typo-tolerance'
}
mut http := client.httpclient()!
response := http.get_json_dict(req)!
min_word_size_for_typos := json2.decode[MinWordSizeForTypos](response['minWordSizeForTypos']!.json_str())!
mut typo_tolerance := TypoTolerance{
enabled: response['enabled']!.bool()
min_word_size_for_typos: min_word_size_for_typos
}
if disable_on_words := response['disableOnWords'] {
typo_tolerance.disable_on_words = disable_on_words.arr().map(it.str())
}
if disable_on_attributes := response['disableOnAttributes'] {
typo_tolerance.disable_on_attributes = disable_on_attributes.arr().map(it.str())
}
return typo_tolerance
}
// update_typo_tolerance updates typo tolerance settings of an index
pub fn (mut client MeilisearchClient) update_typo_tolerance(uid string, typo_tolerance TypoTolerance) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/typo-tolerance'
method: .patch
data: json2.encode(typo_tolerance)
}
mut http := client.httpclient()!
return http.post_json_str(req)
}
// reset_typo_tolerance resets typo tolerance settings of an index
pub fn (mut client MeilisearchClient) reset_typo_tolerance(uid string) !string {
req := httpconnection.Request{
prefix: 'indexes/${uid}/settings/typo-tolerance'
method: .delete
}
mut http := client.httpclient()!
return http.delete(req)
}
@[params]
pub struct EperimentalFeaturesArgs {
pub mut:
vector_store bool @[json: 'vectorStore']
metrics bool @[json: 'metrics']
logs_route bool @[json: 'logsRoute']
contains_filter bool @[json: 'containsFilter']
edit_documents_by_function bool @[json: 'editDocumentsByFunction']
}
pub fn (mut client MeilisearchClient) enable_eperimental_feature(args EperimentalFeaturesArgs) !EperimentalFeaturesArgs {
req := httpconnection.Request{
prefix: 'experimental-features'
method: .patch
data: json.encode(args)
}
mut http := client.httpclient()!
response := http.send(req)!
return json.decode(EperimentalFeaturesArgs, response.data)
}

View File

@@ -0,0 +1,287 @@
module meilisearch
import rand
import time
struct MeiliDocument {
pub mut:
id int
title string
content string
}
// Set up a test client instance
fn setup_client() !&MeilisearchClient {
mut client := get()!
return client
}
fn test_add_document() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
content: 'Shazam is a 2019 American superhero film based on the DC Comics character of the same name.'
title: 'Shazam'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
}
fn test_get_document() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Shazam'
content: 'Shazam is a 2019 American superhero film based on the DC Comics character of the same name.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
doc_ := client.get_document[MeiliDocument](
uid: index_name
document_id: 1
fields: ['id', 'title']
)!
assert doc_.title == 'Shazam'
assert doc_.id == 1
}
fn test_get_documents() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'The Kit kat'
content: 'The kit kat is an Egypton film that was released in 2019.'
},
MeiliDocument{
id: 2
title: 'Elli Bali Balak'
content: 'Elli Bali Balak is an Egyptian film that was released in 2019.'
},
]
q := DocumentsQuery{
fields: ['title', 'id']
}
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
mut docs := client.get_documents[MeiliDocument](index_name, q)!
assert docs.len > 0
assert docs[0].title == 'The Kit kat'
assert docs[0].id == 1
assert docs[1].title == 'Elli Bali Balak'
assert docs[1].id == 2
}
fn test_delete_document() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Shazam'
content: 'Shazam is a 2019 American superhero film based on the DC Comics character of the same name.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
mut doc_ := client.delete_document(
uid: index_name
document_id: 1
)!
assert doc_.index_uid == index_name
assert doc_.type_ == 'documentDeletion'
}
fn test_delete_documents() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Shazam'
content: 'Shazam is a 2019 American superhero film based on the DC Comics character of the same name.'
},
MeiliDocument{
id: 2
title: 'Shazam2'
content: 'Shazam2 is a 2019 American superhero film based on the DC Comics character of the same name.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
mut doc_ := client.delete_all_documents(index_name)!
assert doc_.index_uid == index_name
assert doc_.type_ == 'documentDeletion'
time.sleep(500 * time.millisecond)
q := DocumentsQuery{
fields: ['title', 'id']
}
mut docs := client.get_documents[MeiliDocument](index_name, q)!
assert docs.len == 0
}
fn test_search() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Power of rich people'
content: 'Power of rich people is an American film.'
},
MeiliDocument{
id: 2
title: 'Capten America'
content: 'Capten America is an American film.'
},
MeiliDocument{
id: 3
title: 'Coldplay'
content: 'Coldplay is a british rock band.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
mut doc_ := client.search[MeiliDocument](index_name, q: 'Coldplay')!
assert doc_.hits[0].id == 3
}
fn test_facet_search() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Life'
content: 'Two men in 1930s Mississippi become friends after being sentenced to life in prison together for a crime they did not commit.'
},
MeiliDocument{
id: 2
title: 'Life'
content: 'In 1955, young photographer Dennis Stock develops a close bond with actor James Dean while shooting pictures of the rising Hollywood star.'
},
MeiliDocument{
id: 3
title: 'Coldplay'
content: 'Coldplay is a british rock band.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
res := client.update_filterable_attributes(index_name, ['title'])!
time.sleep(500 * time.millisecond)
settings := client.get_settings(index_name)!
assert ['title'] == settings.filterable_attributes
mut doc_ := client.facet_search(index_name,
facet_name: 'title'
filter: 'title = life'
)!
assert doc_.facet_hits[0].count == 2
}
fn test_similar_documents() {
mut client := setup_client()!
index_name := rand.string(5)
documents := [
MeiliDocument{
id: 1
title: 'Life'
content: 'Two men in 1930s Mississippi become friends after being sentenced to life in prison together for a crime they did not commit.'
},
MeiliDocument{
id: 2
title: 'Life'
content: 'In 1955, young photographer Dennis Stock develops a close bond with actor James Dean while shooting pictures of the rising Hollywood star.'
},
MeiliDocument{
id: 3
title: 'Coldplay'
content: 'Coldplay is a british rock band.'
},
]
mut doc := client.add_documents(index_name, documents)!
assert doc.index_uid == index_name
assert doc.type_ == 'documentAdditionOrUpdate'
time.sleep(500 * time.millisecond)
mut doc_ := client.similar_documents(index_name,
id: 1
)!
// TODO: Check the meilisearch.SimilarDocumentsResponse error
println('doc_: ${doc_}')
// assert doc_.facet_hits[0].count == 2
}
// Delete all created indexes
fn test_delete_index() {
mut client := setup_client()!
mut index_list := client.list_indexes(limit: 100)!
for index in index_list {
client.delete_index(index.uid)!
time.sleep(500 * time.millisecond)
}
index_list = client.list_indexes(limit: 100)!
assert index_list.len == 0
}

View File

@@ -0,0 +1,236 @@
module meilisearch
import freeflowuniverse.herolib.clients.httpconnection
import x.json2
import json
// add_documents adds documents to an index
pub fn (mut client MeilisearchClient) add_documents[T](uid string, documents []T) !AddDocumentResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}/documents'
method: .post
data: json2.encode(documents)
}
mut http := client.httpclient()!
response := http.post_json_str(req)!
return json2.decode[AddDocumentResponse](response)!
}
@[params]
struct GetDocumentArgs {
pub mut:
uid string @[required]
document_id int @[required]
fields []string
retrieve_vectors bool @[json: 'retrieveVectors']
}
// get_document retrieves one document by its id
pub fn (mut client MeilisearchClient) get_document[T](args GetDocumentArgs) !T {
mut params := map[string]string{}
if args.fields.len > 0 {
params['fields'] = args.fields.join(',')
}
params['retrieveVectors'] = args.retrieve_vectors.str()
req := httpconnection.Request{
prefix: 'indexes/${args.uid}/documents/${args.document_id}'
params: params
}
mut http := client.httpclient()!
response := http.get_json(req)!
return json.decode(T, response)
}
// get_documents retrieves documents with optional parameters
pub fn (mut client MeilisearchClient) get_documents[T](uid string, query DocumentsQuery) ![]T {
mut params := map[string]string{}
params['limit'] = query.limit.str()
params['offset'] = query.offset.str()
if query.fields.len > 0 {
params['fields'] = query.fields.join(',')
}
if query.filter.len > 0 {
params['filter'] = query.filter
}
if query.sort.len > 0 {
params['sort'] = query.sort.join(',')
}
req := httpconnection.Request{
prefix: 'indexes/${uid}/documents'
params: params
}
mut http := client.httpclient()!
response := http.get_json(req)!
decoded := json.decode(ListResponse[T], response)!
return decoded.results
}
@[params]
struct DeleteDocumentArgs {
pub mut:
uid string @[required]
document_id int @[required]
}
// delete_document deletes one document by its id
pub fn (mut client MeilisearchClient) delete_document(args DeleteDocumentArgs) !DeleteDocumentResponse {
req := httpconnection.Request{
prefix: 'indexes/${args.uid}/documents/${args.document_id}'
method: .delete
}
mut http := client.httpclient()!
response := http.delete(req)!
return json2.decode[DeleteDocumentResponse](response)!
}
// delete_all_documents deletes all documents in an index
pub fn (mut client MeilisearchClient) delete_all_documents(uid string) !DeleteDocumentResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}/documents'
method: .delete
}
mut http := client.httpclient()!
response := http.delete(req)!
return json2.decode[DeleteDocumentResponse](response)!
}
// update_documents updates documents in an index
pub fn (mut client MeilisearchClient) update_documents(uid string, documents string) !TaskInfo {
req := httpconnection.Request{
prefix: 'indexes/${uid}/documents'
method: .put
data: documents
}
mut http := client.httpclient()!
response := http.post_json_str(req)!
return json2.decode[TaskInfo](response)!
}
@[params]
struct SearchArgs {
pub mut:
q string @[json: 'q'; required]
offset int @[json: 'offset']
limit int = 20 @[json: 'limit']
hits_per_page int = 1 @[json: 'hitsPerPage']
page int = 1 @[json: 'page']
filter ?string
facets ?[]string
attributes_to_retrieve []string = ['*'] @[json: 'attributesToRetrieve']
attributes_to_crop ?[]string @[json: 'attributesToCrop']
crop_length int = 10 @[json: 'cropLength']
crop_marker string = '...' @[json: 'cropMarker']
attributes_to_highlight ?[]string @[json: 'attributesToHighlight']
highlight_pre_tag string = '<em>' @[json: 'highlightPreTag']
highlight_post_tag string = '</em>' @[json: 'highlightPostTag']
show_matches_position bool @[json: 'showMatchesPosition']
sort ?[]string
matching_strategy string = 'last' @[json: 'matchingStrategy']
show_ranking_score bool @[json: 'showRankingScore']
show_ranking_score_details bool @[json: 'showRankingScoreDetails']
ranking_score_threshold ?f64 @[json: 'rankingScoreThreshold']
attributes_to_search_on []string = ['*'] @[json: 'attributesToSearchOn']
hybrid ?map[string]string
vector ?[]f64
retrieve_vectors bool @[json: 'retrieveVectors']
locales ?[]string
}
// search performs a search query on an index
pub fn (mut client MeilisearchClient) search[T](uid string, args SearchArgs) !SearchResponse[T] {
req := httpconnection.Request{
prefix: 'indexes/${uid}/search'
method: .post
data: json.encode(args)
}
mut http := client.httpclient()!
rsponse := http.post_json_str(req)!
return json.decode(SearchResponse[T], rsponse)
}
@[params]
struct FacetSearchArgs {
facet_name ?string @[json: 'facetName'] // Facet name to search values on
facet_query ?string @[json: 'facetQuery'] // Search query for a given facet value. Defaults to placeholder search if not specified.
q string // Query string
filter ?string // Filter queries by an attribute's value
matching_strategy string = 'last' @[json: 'matchingStrategy'] // Strategy used to match query terms within documents
attributes_to_search_on ?[]string @[json: 'attributesToSearchOn'] // Restrict search to the specified attributes
}
@[params]
struct FacetSearchHitsResponse {
value string @[json: 'value'] // Facet value matching the facetQuery
count int @[json: 'count'] // Number of documents with a facet value matching value
}
@[params]
struct FacetSearchResponse {
facet_hits []FacetSearchHitsResponse @[json: 'facetHits'] // Facet value matching the facetQuery
facet_query string @[json: 'facetQuery'] // The original facetQuery
processing_time_ms int @[json: 'processingTimeMs'] // Processing time of the query
}
pub fn (mut client MeilisearchClient) facet_search(uid string, args FacetSearchArgs) !FacetSearchResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}/facet-search'
method: .post
data: json.encode(args)
}
mut http := client.httpclient()!
rsponse := http.post_json_str(req)!
return json.decode(FacetSearchResponse, rsponse)
}
@[params]
struct SimilarDocumentsArgs {
id SimilarDocumentsID @[json: 'id'] // Identifier of the target document (mandatory)
embedder string = 'default' @[json: 'embedder'] // Embedder to use when computing recommendations
attributes_to_retrieve []string = ['*'] @[json: 'attributesToRetrieve'] // Attributes to display in the returned documents
offset int @[json: 'offset'] // Number of documents to skip
limit int = 20 @[json: 'limit'] // Maximum number of documents returned
filter ?string @[json: 'filter'] // Filter queries by an attribute's value
show_ranking_score bool @[json: 'showRankingScore'] // Display the global ranking score of a document
show_ranking_score_details bool @[json: 'showRankingScoreDetails'] // Display detailed ranking score information
ranking_score_threshold ?f64 @[json: 'rankingScoreThreshold'] // Exclude results with low ranking scores
retrieve_vectors bool @[json: 'retrieveVectors'] // Return document vector data
}
type SimilarDocumentsID = string | int
@[params]
struct SimilarDocumentsResponse {
hits []SimilarDocumentsHit @[json: 'hits'] // List of hit items
id string @[json: 'id'] // Identifier of the response
processing_time_ms int @[json: 'processingTimeMs'] // Processing time in milliseconds
limit int = 20 @[json: 'limit'] // Maximum number of documents returned
offset int @[json: 'offset'] // Number of documents to skip
estimated_total_hits int @[json: 'estimatedTotalHits'] // Estimated total number of hits
}
struct SimilarDocumentsHit {
id SimilarDocumentsID @[json: 'id'] // Identifier of the hit item
title string @[json: 'title'] // Title of the hit item
}
pub fn (mut client MeilisearchClient) similar_documents(uid string, args SimilarDocumentsArgs) !SimilarDocumentsResponse {
req := httpconnection.Request{
prefix: 'indexes/${uid}/similar'
method: .post
data: json.encode(args)
}
res := client.enable_eperimental_feature(vector_store: true)! // Enable the feature first.
mut http := client.httpclient()!
rsponse := http.post_json_str(req)!
println('rsponse: ${rsponse}')
return json.decode(SimilarDocumentsResponse, rsponse)
}

View File

@@ -0,0 +1,86 @@
module meilisearch
import rand
import time
__global (
created_indices []string
)
// Set up a test client instance
fn setup_client() !&MeilisearchClient {
mut client := get()!
return client
}
// Tests the health endpoint for server status
fn test_health() {
mut client := setup_client()!
health := client.health()!
assert health.status == 'available'
}
// Tests the version endpoint to ensure version information is present
fn test_version() {
mut client := setup_client()!
version := client.version()!
assert version.pkg_version.len > 0
assert version.commit_sha.len > 0
assert version.commit_date.len > 0
}
// Tests index creation and verifies if the index UID matches
fn test_create_index() {
index_name := 'test_' + rand.string(4)
mut client := setup_client()!
index := client.create_index(uid: index_name)!
created_indices << index_name
assert index.index_uid == index_name
assert index.type_ == 'indexCreation'
}
// Tests index retrieval and verifies if the retrieved index UID matches
fn test_get_index() {
index_name := 'test_' + rand.string(4)
indes_primary_key := 'id'
mut client := setup_client()!
created_index := client.create_index(uid: index_name, primary_key: indes_primary_key)!
created_indices << index_name
assert created_index.index_uid == index_name
assert created_index.type_ == 'indexCreation'
time.sleep(1 * time.second) // Wait for the index to be created.
retrieved_index := client.get_index(index_name)!
assert retrieved_index.uid == index_name
assert retrieved_index.primary_key == indes_primary_key
}
// Tests listing all indexes to ensure the created index is in the list
fn test_list_indexes() {
mut client := setup_client()!
index_name := 'test_' + rand.string(4)
mut index_list := client.list_indexes()!
assert index_list.len > 0
}
// Tests deletion of an index and confirms it no longer exists
fn test_delete_index() {
mut client := setup_client()!
mut index_list := client.list_indexes(limit: 100)!
for index in index_list {
client.delete_index(index.uid)!
time.sleep(500 * time.millisecond)
}
index_list = client.list_indexes(limit: 100)!
assert index_list.len == 0
created_indices.clear()
assert created_indices.len == 0
}

View File

@@ -0,0 +1,11 @@
{
"folders": [
{
"path": "."
},
{
"path": "../../../herolib/clients/httpconnection"
}
],
"settings": {}
}

View File

@@ -0,0 +1,104 @@
module meilisearch
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
__global (
meilisearch_global map[string]&MeilisearchClient
meilisearch_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string = 'default'
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = meilisearch_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&MeilisearchClient {
mut args := args_get(args_)
if args.name !in meilisearch_global {
if !config_exists() {
if default {
config_save()!
}
}
config_load()!
}
return meilisearch_global[args.name] or {
println(meilisearch_global)
panic('bug in get from factory: ')
}
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('meilisearch', args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('meilisearch', args.name)!
play(heroscript: heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('meilisearch', args.name, heroscript_default()!)!
}
fn set(o MeilisearchClient) ! {
mut o2 := obj_init(o)!
meilisearch_global['default'] = &o2
}
@[params]
pub struct PlayArgs {
pub mut:
name string = 'default'
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
start bool
stop bool
restart bool
delete bool
configure bool // make sure there is at least one installed
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
if args.heroscript == '' {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
mut install_actions := plbook.find(filter: 'meilisearch.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
}
// switch instance to be used for meilisearch
pub fn switch(name string) {
meilisearch_default = name
}

View File

@@ -0,0 +1,59 @@
module meilisearch
import freeflowuniverse.herolib.data.paramsparser
import freeflowuniverse.herolib.clients.httpconnection
import os
pub const version = '1.0.0'
const singleton = false
const default = true
// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
heroscript := "
!!meilisearch.configure
name:'default'
host:'http://localhost:7700'
api_key:'be61fdce-c5d4-44bc-886b-3a484ff6c531'
"
return heroscript
}
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
pub struct MeilisearchClient {
pub mut:
name string = 'default'
api_key string @[secret]
host string
}
fn cfg_play(p paramsparser.Params) ! {
// THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := MeilisearchClient{
name: p.get_default('name', 'default')!
host: p.get('host')!
api_key: p.get('api_key')!
}
set(mycfg)!
}
fn obj_init(obj_ MeilisearchClient) !MeilisearchClient {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
// set the http client
return obj
}
fn (mut self MeilisearchClient) httpclient() !&httpconnection.HTTPConnection {
mut http_conn := httpconnection.new(
name: 'meilisearch'
url: self.host
)!
// Add authentication header if API key is provided
if self.api_key.len > 0 {
http_conn.default_header.add(.authorization, 'Bearer ${self.api_key}')
}
return http_conn
}

View File

@@ -0,0 +1,166 @@
module meilisearch
// ClientConfig holds configuration for MeilisearchClient
pub struct ClientConfig {
pub:
host string // Base URL of Meilisearch server (e.g., "http://localhost:7700")
api_key string // Master key or API key for authentication
timeout int = 30 // Request timeout in seconds
max_retry int = 3 // Maximum number of retries for failed requests
}
// Health represents the health status of the Meilisearch server
pub struct Health {
pub:
status string @[json: 'status']
}
// Version represents version information of the Meilisearch server
pub struct Version {
pub:
pkg_version string @[json: 'pkgVersion']
commit_sha string @[json: 'commitSha']
commit_date string @[json: 'commitDate']
}
// IndexSettings represents all configurable settings for an index
pub struct IndexSettings {
pub mut:
ranking_rules []string @[json: 'rankingRules']
distinct_attribute string @[json: 'distinctAttribute']
searchable_attributes []string @[json: 'searchableAttributes']
displayed_attributes []string @[json: 'displayedAttributes']
stop_words []string @[json: 'stopWords']
synonyms map[string][]string @[json: 'synonyms']
filterable_attributes []string @[json: 'filterableAttributes']
sortable_attributes []string @[json: 'sortableAttributes']
typo_tolerance TypoTolerance @[json: 'typoTolerance']
}
// TypoTolerance settings for controlling typo behavior
pub struct TypoTolerance {
pub mut:
enabled bool = true @[json: 'enabled']
min_word_size_for_typos MinWordSizeForTypos @[json: 'minWordSizeForTypos']
disable_on_words []string @[json: 'disableOnWords']
disable_on_attributes []string @[json: 'disableOnAttributes']
}
// MinWordSizeForTypos controls minimum word sizes for one/two typos
pub struct MinWordSizeForTypos {
pub mut:
one_typo int = 5 @[json: 'oneTypo']
two_typos int = 9 @[json: 'twoTypos']
}
// DocumentsQuery represents query parameters for document operations
pub struct DocumentsQuery {
pub mut:
limit int = 20
offset int
fields []string
filter string
sort []string
}
// TaskInfo represents information about an asynchronous task
pub struct TaskInfo {
pub:
uid int @[json: 'taskUid']
index_uid string @[json: 'indexUid']
status string @[json: 'status']
task_type string @[json: 'type']
details map[string]string @[json: 'details']
error string @[json: 'error']
duration string @[json: 'duration']
enqueued_at string @[json: 'enqueuedAt']
started_at string @[json: 'startedAt']
finished_at string @[json: 'finishedAt']
}
// CreateIndexArgs represents the arguments for creating an index
@[params]
pub struct CreateIndexArgs {
pub mut:
uid string
primary_key string @[json: 'primaryKey']
}
// IndexCreation represents information about the index creation
pub struct CreateIndexResponse {
pub mut:
uid int @[json: 'taskUid']
index_uid string @[json: 'indexUid']
status string @[json: 'status']
type_ string @[json: 'type']
enqueued_at string @[json: 'enqueuedAt']
}
// IndexCreation represents information about the index creation
pub struct GetIndexResponse {
pub mut:
uid string @[json: 'uid']
created_at string @[json: 'createdAt']
updated_at string @[json: 'updatedAt']
primary_key string @[json: 'primaryKey']
}
// ListIndexResponse represents information about the index list
pub struct ListResponse[T] {
pub mut:
results []T
total int
offset int
limit int
}
// ListIndexArgs represents the arguments for listing indexes
@[params]
pub struct ListIndexArgs {
pub mut:
limit int = 20
offset int
}
// DeleteIndexResponse represents information about the index deletion
pub struct DeleteIndexResponse {
pub mut:
uid int @[json: 'taskUid']
index_uid string @[json: 'indexUid']
status string @[json: 'status']
type_ string @[json: 'type']
enqueued_at string @[json: 'enqueuedAt']
}
struct AddDocumentResponse {
pub mut:
task_uid int @[json: 'taskUid']
index_uid string @[json: 'indexUid']
status string
type_ string @[json: 'type']
enqueued_at string @[json: 'enqueuedAt']
}
struct DeleteDocumentResponse {
pub mut:
task_uid int @[json: 'taskUid']
index_uid string @[json: 'indexUid']
status string
type_ string @[json: 'type']
enqueued_at string @[json: 'enqueuedAt']
}
struct SearchResponse[T] {
pub mut:
hits []T @[json: 'hits']
offset int @[json: 'offset']
limit int @[json: 'limit']
estimated_total_hits int @[json: 'estimatedTotalHits']
total_hits int @[json: 'totalHits']
total_pages int @[json: 'totalPages']
hits_per_page int @[json: 'hitsPerPage']
page int @[json: 'page']
facet_stats map[string]map[string]f64 @[json: 'facetStats']
processing_time_ms int @[json: 'processingTimeMs']
query string @[json: 'query']
}

View File

@@ -0,0 +1,59 @@
## Meilisearch V Client
This is a simple V client for interacting with a [self-hosted Meilisearch instance](https://www.meilisearch.com/docs/learn/self_hosted/getting_started_with_self_hosted_meilisearch?utm_campaign=oss&utm_medium=home-page&utm_source=docs#setup-and-installation), enabling you to perform operations such as adding, retrieving, deleting, and searching documents within indexes.
### Getting Started with Self-Hosted Meilisearch
To use this V client, ensure you have a **self-hosted Meilisearch instance installed and running**.
This quick start will walk you through installing Meilisearch, adding documents, and performing your first search.
#### Requirements
To follow this setup, you will need `curl` installed
### Setup and Installation
To install Meilisearch locally, run the following command:
```bash
# Install Meilisearch
curl -L https://install.meilisearch.com | sh
```
### Running Meilisearch
Start Meilisearch with the following command, replacing `"aSampleMasterKey"` with your preferred master key:
```bash
# Launch Meilisearch
meilisearch --master-key="aSampleMasterKey"
```
---
### Running the V Client Tests
This client includes various test cases that demonstrate common operations in Meilisearch, such as creating indexes, adding documents, retrieving documents, deleting documents, and performing searches. To run the tests, you can use the following commands:
```bash
# Run document-related tests
v -enable-globals -stats herolib/clients/meilisearch/document_test.v
# Run index-related tests
v -enable-globals -stats herolib/clients/meilisearch/index_test.v
```
### Example: Getting Meilisearch Server Version
Here is a quick example of how to retrieve the Meilisearch server version using this V client:
```v
import freeflowuniverse.herolib.clients.meilisearch
mut client := meilisearch.get() or { panic(err) }
version := client.version() or { panic(err) }
println('Meilisearch version: $version')
```
This example connects to your local Meilisearch instance and prints the server version to verify your setup is correct.

View File

@@ -0,0 +1,109 @@
module mycelium
import net.http
import json
const server_url = 'http://localhost:8989/api/v1/messages'
pub struct MessageDestination {
pub:
pk string
}
pub struct PushMessageBody {
pub:
dst MessageDestination
payload string
}
pub struct InboundMessage {
pub:
id string
src_ip string @[json: 'srcIP']
src_pk string @[json: 'srcPk']
dst_ip string @[json: 'dstIp']
dst_pk string @[json: 'dstPk']
payload string
}
pub struct MessageStatusResponse {
pub:
id string
dst string
state string
created string
deadline string
msg_len string @[json: 'msgLen']
}
pub fn send_msg(pk string, payload string, wait bool) !InboundMessage {
mut url := server_url
if wait {
url = '${url}?reply_timeout=120'
}
msg_req := PushMessageBody{
dst: MessageDestination{
pk: pk
}
payload: payload
}
mut req := http.new_request(http.Method.post, url, json.encode(msg_req))
req.add_custom_header('content-type', 'application/json')!
if wait {
req.read_timeout = 1200000000000
}
res := req.do()!
msg := json.decode(InboundMessage, res.body)!
return msg
}
pub fn receive_msg(wait bool) !InboundMessage {
mut url := server_url
if wait {
url = '${url}?timeout=60'
}
mut req := http.new_request(http.Method.get, url, '')
if wait {
req.read_timeout = 600000000000
}
res := req.do()!
msg := json.decode(InboundMessage, res.body)!
return msg
}
pub fn receive_msg_opt(wait bool) ?InboundMessage {
mut url := server_url
if wait {
url = '${url}?timeout=60'
}
mut req := http.new_request(http.Method.get, url, '')
if wait {
req.read_timeout = 600000000000
}
res := req.do() or { panic(error) }
if res.status_code == 204 {
return none
}
msg := json.decode(InboundMessage, res.body) or { panic(err) }
return msg
}
pub fn get_msg_status(id string) !MessageStatusResponse {
mut url := '${server_url}/status/${id}'
res := http.get(url)!
msg_res := json.decode(MessageStatusResponse, res.body)!
return msg_res
}
pub fn reply_msg(id string, pk string, payload string) !http.Status {
mut url := '${server_url}/reply/${id}'
msg_req := PushMessageBody{
dst: MessageDestination{
pk: pk
}
payload: payload
}
res := http.post_json(url, json.encode(msg_req))!
return res.status()
}

View File

@@ -0,0 +1,23 @@
module openai
// run heroscript starting from path, text or giturl
//```
// !!OpenAIclient.define
// name:'default'
// openaikey: ''
// description:'...'
//```
pub fn heroplay(mut plbook playbook.PlayBook) ! {
for mut action in plbook.find(filter: 'openaiclient.define')! {
mut p := action.params
instance := p.get_default('instance', 'default')!
// cfg.keyname = p.get('keyname')!
mut cl := get(instance,
openaikey: p.get('openaikey')!
description: p.get_default('description', '')!
)!
cl.config_save()!
}
}
//>TODO: this needs to be extended to chats, ...

110
lib/clients/openai/audio.v Normal file
View File

@@ -0,0 +1,110 @@
module openai
import json
import freeflowuniverse.herolib.clients.httpconnection
import os
import net.http
pub enum AudioRespType {
json
text
srt
verbose_json
vtt
}
const audio_model = 'whisper-1'
const audio_mime_types = {
'.mp3': 'audio/mpeg'
'.mp4': 'audio/mp4'
'.mpeg': 'audio/mpeg'
'.mpga': 'audio/mp4'
'.m4a': 'audio/mp4'
'.wav': 'audio/vnd.wav'
'.webm': 'application/octet-stream'
}
fn audio_resp_type_str(i AudioRespType) string {
return match i {
.json {
'json'
}
.text {
'text'
}
.srt {
'srt'
}
.verbose_json {
'verbose_json'
}
.vtt {
'vtt'
}
}
}
pub struct AudioArgs {
pub mut:
filepath string
prompt string
response_format AudioRespType
temperature int
language string
}
pub struct AudioResponse {
pub mut:
text string
}
// create transcription from an audio file
// supported audio formats are mp3, mp4, mpeg, mpga, m4a, wav, or webm
pub fn (mut f OpenAIClient[Config]) create_transcription(args AudioArgs) !AudioResponse {
return f.create_audio_request(args, 'audio/transcriptions')
}
// create translation to english from an audio file
// supported audio formats are mp3, mp4, mpeg, mpga, m4a, wav, or webm
pub fn (mut f OpenAIClient[Config]) create_tranlation(args AudioArgs) !AudioResponse {
return f.create_audio_request(args, 'audio/translations')
}
fn (mut f OpenAIClient[Config]) create_audio_request(args AudioArgs, endpoint string) !AudioResponse {
file_content := os.read_file(args.filepath)!
ext := os.file_ext(args.filepath)
mut file_mime_type := ''
if ext in audio_mime_types {
file_mime_type = audio_mime_types[ext]
} else {
return error('file extenion not supported')
}
file_data := http.FileData{
filename: os.base(args.filepath)
content_type: file_mime_type
data: file_content
}
form := http.PostMultipartFormConfig{
files: {
'file': [file_data]
}
form: {
'model': audio_model
'prompt': args.prompt
'response_format': audio_resp_type_str(args.response_format)
'temperature': args.temperature.str()
'language': args.language
}
}
req := httpconnection.Request{
prefix: endpoint
}
r := f.connection.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
return json.decode(AudioResponse, r.body)!
}

View File

@@ -0,0 +1,70 @@
module openai
import json
pub struct ChatCompletion {
pub mut:
id string
object string
created u32
choices []Choice
usage Usage
}
pub struct Choice {
pub mut:
index int
message MessageRaw
finish_reason string
}
pub struct Message {
pub mut:
role RoleType
content string
}
pub struct Usage {
pub mut:
prompt_tokens int
completion_tokens int
total_tokens int
}
pub struct Messages {
pub mut:
messages []Message
}
pub struct MessageRaw {
pub mut:
role string
content string
}
struct ChatMessagesRaw {
mut:
model string
messages []MessageRaw
}
// creates a new chat completion given a list of messages
// each message consists of message content and the role of the author
pub fn (mut f OpenAIClient[Config]) chat_completion(model_type ModelType, msgs Messages) !ChatCompletion {
model_type0 := modelname_str(model_type)
mut m := ChatMessagesRaw{
model: model_type0
}
for msg in msgs.messages {
mr := MessageRaw{
role: roletype_str(msg.role)
content: msg.content
}
m.messages << mr
}
data := json.encode(m)
r := f.connection.post_json_str(prefix: 'chat/completions', data: data)!
res := json.decode(ChatCompletion, r)!
return res
}

View File

@@ -0,0 +1,54 @@
module openai
import json
pub enum EmbeddingModel {
text_embedding_ada
}
fn embedding_model_str(e EmbeddingModel) string {
return match e {
.text_embedding_ada {
'text-embedding-ada-002'
}
}
}
@[params]
pub struct EmbeddingCreateArgs {
input []string @[required]
model EmbeddingModel @[required]
user string
}
pub struct EmbeddingCreateRequest {
input []string
model string
user string
}
pub struct Embedding {
pub mut:
object string
embedding []f32
index int
}
pub struct EmbeddingResponse {
pub mut:
object string
data []Embedding
model string
usage Usage
}
pub fn (mut f OpenAIClient[Config]) create_embeddings(args EmbeddingCreateArgs) !EmbeddingResponse {
req := EmbeddingCreateRequest{
input: args.input
model: embedding_model_str(args.model)
user: args.user
}
data := json.encode(req)
r := f.connection.post_json_str(prefix: 'embeddings', data: data)!
return json.decode(EmbeddingResponse, r)!
}

View File

@@ -0,0 +1,64 @@
module openai
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui as gui
import freeflowuniverse.herolib.clients.httpconnection
// import freeflowuniverse.herolib.ui.console
pub struct OpenAIClient[T] {
base.BaseConfig[T]
pub mut:
connection &httpconnection.HTTPConnection
}
@[params]
pub struct Config {
pub mut:
openaikey string @[secret]
description string
}
pub fn get(instance string, cfg Config) !OpenAIClient[Config] {
mut self := OpenAIClient[Config]{
connection: &httpconnection.HTTPConnection{}
}
if cfg.openaikey.len > 0 {
// first the type of the instance, then name of instance, then action
self.init('openaiclient', instance, .set, cfg)!
} else {
self.init('openaiclient', instance, .get)!
}
mut conn := httpconnection.new(
name: 'openai'
url: 'https://api.openai.com/v1/'
)!
conn.default_header.add(.authorization, 'Bearer ${self.config()!.openaikey}')
// req.add_custom_header('x-disable-pagination', 'True') !
self.connection = conn
return self
}
// get a new OpenAI client, will create if it doesn't exist or ask for new configuration
pub fn configure(instance_ string) ! {
mut cfg := Config{}
mut ui := gui.new()!
mut instance := instance_
if instance == '' {
instance = ui.ask_question(
question: 'name for Dagu client'
default: instance
)!
}
cfg.openaikey = ui.ask_question(
question: '\nPlease specify your openai secret (instance:${instance}).'
)!
get(instance, cfg)!
}

View File

@@ -0,0 +1,90 @@
module openai
import json
import freeflowuniverse.herolib.clients.httpconnection
import os
import net.http
const jsonl_mime_type = 'text/jsonl'
@[params]
pub struct FileUploadArgs {
pub:
filepath string
purpose string
}
pub struct File {
pub mut:
id string
object string
bytes int
created_at int
filename string
purpose string
}
pub struct Files {
pub mut:
data []File
}
pub struct DeleteResp {
pub mut:
id string
object string
deleted bool
}
// upload file to client org, usually used for fine tuning
pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File {
file_content := os.read_file(args.filepath)!
file_data := http.FileData{
filename: os.base(args.filepath)
data: file_content
content_type: jsonl_mime_type
}
form := http.PostMultipartFormConfig{
files: {
'file': [file_data]
}
form: {
'purpose': args.purpose
}
}
req := httpconnection.Request{
prefix: 'files'
}
r := f.connection.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
return json.decode(File, r.body)!
}
// list all files in client org
pub fn (mut f OpenAIClient[Config]) list_files() !Files {
r := f.connection.get(prefix: 'files')!
return json.decode(Files, r)!
}
// deletes a file
pub fn (mut f OpenAIClient[Config]) delete_file(file_id string) !DeleteResp {
r := f.connection.delete(prefix: 'files/' + file_id)!
return json.decode(DeleteResp, r)!
}
// returns a single file metadata
pub fn (mut f OpenAIClient[Config]) get_file(file_id string) !File {
r := f.connection.get(prefix: 'files/' + file_id)!
return json.decode(File, r)!
}
// returns the content of a specific file
pub fn (mut f OpenAIClient[Config]) get_file_content(file_id string) !string {
r := f.connection.get(prefix: 'files/' + file_id + '/content')!
return r
}

View File

@@ -0,0 +1,93 @@
module openai
import json
pub struct FineTune {
pub:
id string
object string
model string
created_at int
events []FineTuneEvent
fine_tuned_model string
hyperparams FineTuneHyperParams
organization_id string
result_files []File
status string
validation_files []File
training_files []File
updated_at int
}
pub struct FineTuneEvent {
pub:
object string
created_at int
level string
message string
}
pub struct FineTuneHyperParams {
pub:
batch_size int
learning_rate_multiplier f64
n_epochs int
prompt_loss_weight f64
}
pub struct FineTuneList {
pub:
object string
data []FineTune
}
pub struct FineTuneEventList {
pub:
object string
data []FineTuneEvent
}
@[params]
pub struct FineTuneCreateArgs {
pub mut:
training_file string @[required]
model string
n_epochs int = 4
batch_size int
learning_rate_multiplier f32
prompt_loss_weight f64
compute_classification_metrics bool
suffix string
}
// creates a new fine-tune based on an already uploaded file
pub fn (mut f OpenAIClient[Config]) create_fine_tune(args FineTuneCreateArgs) !FineTune {
data := json.encode(args)
r := f.connection.post_json_str(prefix: 'fine-tunes', data: data)!
return json.decode(FineTune, r)!
}
// returns all fine-tunes in this account
pub fn (mut f OpenAIClient[Config]) list_fine_tunes() !FineTuneList {
r := f.connection.get(prefix: 'fine-tunes')!
return json.decode(FineTuneList, r)!
}
// get a single fine-tune information
pub fn (mut f OpenAIClient[Config]) get_fine_tune(fine_tune string) !FineTune {
r := f.connection.get(prefix: 'fine-tunes/' + fine_tune)!
return json.decode(FineTune, r)!
}
// cancel a fine-tune that didn't finish yet
pub fn (mut f OpenAIClient[Config]) cancel_fine_tune(fine_tune string) !FineTune {
r := f.connection.post_json_str(prefix: 'fine-tunes/' + fine_tune + '/cancel')!
return json.decode(FineTune, r)!
}
// returns all events for a fine tune in this account
pub fn (mut f OpenAIClient[Config]) list_fine_tune_events(fine_tune string) !FineTuneEventList {
r := f.connection.get(prefix: 'fine-tunes/' + fine_tune + '/events')!
return json.decode(FineTuneEventList, r)!
}

189
lib/clients/openai/images.v Normal file
View File

@@ -0,0 +1,189 @@
module openai
import json
import net.http
import os
import freeflowuniverse.herolib.clients.httpconnection
const image_mine_type = 'image/png'
pub enum ImageSize {
size_256_256
size_512_512
size_1024_1024
}
fn image_size_str(i ImageSize) string {
return match i {
.size_256_256 {
'256x256'
}
.size_512_512 {
'512x512'
}
.size_1024_1024 {
'1024x1024'
}
}
}
pub enum ImageRespType {
url
b64_json
}
fn image_resp_type_str(i ImageRespType) string {
return match i {
.url {
'url'
}
.b64_json {
'b64_json'
}
}
}
pub struct ImageCreateArgs {
pub mut:
prompt string
num_images int
size ImageSize
format ImageRespType
user string
}
pub struct ImageEditArgs {
pub mut:
image_path string
mask_path string
prompt string
num_images int
size ImageSize
format ImageRespType
user string
}
pub struct ImageVariationArgs {
pub mut:
image_path string
num_images int
size ImageSize
format ImageRespType
user string
}
pub struct ImageRequest {
pub mut:
prompt string
n int
size string
response_format string
user string
}
pub struct ImageResponse {
pub mut:
url string
b64_json string
}
pub struct Images {
pub mut:
created int
data []ImageResponse
}
// Create new images generation given a prompt
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_image(args ImageCreateArgs) !Images {
image_size := image_size_str(args.size)
response_format := image_resp_type_str(args.format)
request := ImageRequest{
prompt: args.prompt
n: args.num_images
size: image_size
response_format: response_format
user: args.user
}
data := json.encode(request)
r := f.connection.post_json_str(prefix: 'images/generations', data: data)!
return json.decode(Images, r)!
}
// edit images generation given a prompt and an existing image
// image needs to be in PNG format and transparent or else a mask of the same size needs
// to be specified to indicate where the image should be in the generated image
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: image_mine_type
data: image_content
}
mut mask_file := []http.FileData{}
if args.mask_path != '' {
mask_content := os.read_file(args.mask_path)!
mask_file << http.FileData{
filename: os.base(args.mask_path)
content_type: image_mine_type
data: mask_content
}
}
form := http.PostMultipartFormConfig{
files: {
'image': [image_file]
'mask': mask_file
}
form: {
'prompt': args.prompt
'n': args.num_images.str()
'response_format': image_resp_type_str(args.format)
'size': image_size_str(args.size)
'user': args.user
}
}
req := httpconnection.Request{
prefix: 'images/edits'
}
r := f.connection.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
return json.decode(Images, r.body)!
}
// create variations of the given image
// image needs to be in PNG format
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_variation_image(args ImageVariationArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: image_mine_type
data: image_content
}
form := http.PostMultipartFormConfig{
files: {
'image': [image_file]
}
form: {
'n': args.num_images.str()
'response_format': image_resp_type_str(args.format)
'size': image_size_str(args.size)
'user': args.user
}
}
req := httpconnection.Request{
prefix: 'images/variations'
}
r := f.connection.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
return json.decode(Images, r.body)!
}

View File

@@ -0,0 +1,75 @@
module openai
pub enum ModelType {
gpt_3_5_turbo
gpt_4
gpt_4_0613
gpt_4_32k
gpt_4_32k_0613
gpt_3_5_turbo_0613
gpt_3_5_turbo_16k
gpt_3_5_turbo_16k_0613
whisper_1
}
fn modelname_str(e ModelType) string {
if e == .gpt_4 {
return 'gpt-4'
}
if e == .gpt_3_5_turbo {
return 'gpt-3.5-turbo'
}
return match e {
.gpt_4 {
'gpt-4'
}
.gpt_3_5_turbo {
'gpt-3.5-turbo'
}
.gpt_4_0613 {
'gpt-4-0613'
}
.gpt_4_32k {
'gpt-4-32k'
}
.gpt_4_32k_0613 {
'gpt-4-32k-0613'
}
.gpt_3_5_turbo_0613 {
'gpt-3.5-turbo-0613'
}
.gpt_3_5_turbo_16k {
'gpt-3.5-turbo-16k'
}
.gpt_3_5_turbo_16k_0613 {
'gpt-3.5-turbo-16k-0613'
}
.whisper_1 {
'whisper-1'
}
}
}
pub enum RoleType {
system
user
assistant
function
}
fn roletype_str(x RoleType) string {
return match x {
.system {
'system'
}
.user {
'user'
}
.assistant {
'assistant'
}
.function {
'function'
}
}
}

View File

@@ -0,0 +1,46 @@
module openai
import json
pub struct Model {
pub mut:
id string
created int
object string
owned_by string
root string
parent string
permission []ModelPermission
}
pub struct ModelPermission {
pub mut:
id string
created int
object string
allow_create_engine bool
allow_sampling bool
allow_logprobs bool
allow_search_indices bool
allow_view bool
allow_fine_tuning bool
organization string
is_blocking bool
}
pub struct Models {
pub mut:
data []Model
}
// list current models available in Open AI
pub fn (mut f OpenAIClient[Config]) list_models() !Models {
r := f.connection.get(prefix: 'models')!
return json.decode(Models, r)!
}
// returns details of a model using the model id
pub fn (mut f OpenAIClient[Config]) get_model(model string) !Model {
r := f.connection.get(prefix: 'models/' + model)!
return json.decode(Model, r)!
}

View File

@@ -0,0 +1,80 @@
module openai
import json
pub enum ModerationModel {
text_moderation_latest
text_moderation_stable
}
fn moderation_model_str(m ModerationModel) string {
return match m {
.text_moderation_latest {
'text-moderation-latest'
}
.text_moderation_stable {
'text-moderation-stable'
}
}
}
@[params]
pub struct ModerationRequest {
mut:
input string
model string
}
pub struct ModerationResult {
pub mut:
categories ModerationResultCategories
category_scores ModerationResultCategoryScores
flagged bool
}
pub struct ModerationResultCategories {
pub mut:
sexual bool
hate bool
harassment bool
selfharm bool @[json: 'self-harm']
sexual_minors bool @[json: 'sexual/minors']
hate_threatening bool @[json: 'hate/threatening']
violence_graphic bool @[json: 'violence/graphic']
selfharm_intent bool @[json: 'self-harm/intent']
selfharm_instructions bool @[json: 'self-harm/instructions']
harassment_threatening bool @[json: 'harassment/threatening']
violence bool
}
pub struct ModerationResultCategoryScores {
pub mut:
sexual f32
hate f32
harassment f32
selfharm f32 @[json: 'self-harm']
sexual_minors f32 @[json: 'sexual/minors']
hate_threatening f32 @[json: 'hate/threatening']
violence_graphic f32 @[json: 'violence/graphic']
selfharm_intent f32 @[json: 'self-harm/intent']
selfharm_instructions f32 @[json: 'self-harm/instructions']
harassment_threatening f32 @[json: 'harassment/threatening']
violence f32
}
pub struct ModerationResponse {
pub mut:
id string
model string
results []ModerationResult
}
pub fn (mut f OpenAIClient[Config]) create_moderation(input string, model ModerationModel) !ModerationResponse {
req := ModerationRequest{
input: input
model: moderation_model_str(model)
}
data := json.encode(req)
r := f.connection.post_json_str(prefix: 'moderations', data: data)!
return json.decode(ModerationResponse, r)!
}

View File

@@ -0,0 +1,50 @@
# OpenAI
An implementation of an OpenAI client using Vlang.
## Supported methods
- List available models
- Chat Completion
- Translate Audio
- Transcribe Audio
- Create image based on prompt
- Edit an existing image
- Create variation of an image
## Usage
To use the client you need a OpenAi key which can be generated from [here](https://platform.openai.com/account/api-keys).
The key should be exposed in an environment variable as following:
```bash
export OPENAI_API_KEY=<your-api-key>
```
To get a new instance of the client:
```v
import freeflowuniverse.herolib.clients.openai
ai_cli := openai.new()!
```
Then it is possible to perform all the listed operations:
```v
// listing models
models := ai_cli.list_models()!
// creating a new chat completion
mut msg := []op.Message{}
msg << op.Message{
role: op.RoleType.user
content: 'Say this is a test!'
}
mut msgs := op.Messages{
messages: msg
}
res := ai_cli.chat_completion(op.ModelType.gpt_3_5_turbo, msgs)!
```

View File

@@ -0,0 +1,56 @@
module postgres
import freeflowuniverse.herolib.core.base
import db.pg
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.ui.console
// pub struct PostgresClient {
// base.BaseConfig
// pub mut:
// config Config
// db pg.DB
// }
// @[params]
// pub struct ClientArgs {
// pub mut:
// instance string @[required]
// // playargs ?play.PlayArgs
// }
// pub fn get(clientargs ClientArgs) !PostgresClient {
// // mut plargs := clientargs.playargs or {
// // // play.PlayArgs
// // // {
// // // }
// // }
// // mut cfg := configurator(clientargs.instance, plargs)!
// // mut args := cfg.get()!
// args.instance = texttools.name_fix(args.instance)
// if args.instance == '' {
// args.instance = 'default'
// }
// // console.print_debug(args)
// mut db := pg.connect(
// host: args.host
// user: args.user
// port: args.port
// password: args.password
// dbname: args.dbname
// )!
// // console.print_debug(postgres_client)
// return PostgresClient{
// instance: args.instance
// db: db
// config: args
// }
// }
struct LocalConfig {
name string
path string
passwd string
}

107
lib/clients/postgres/cmds.v Normal file
View File

@@ -0,0 +1,107 @@
module postgres
import db.pg
import freeflowuniverse.herolib.core.texttools
import freeflowuniverse.herolib.osal
import os
import freeflowuniverse.herolib.ui.console
pub fn (mut self PostgresClient[Config]) check() ! {
mut db := self.db
db.exec('SELECT version();') or { return error('can\t select version from database.\n${self}') }
}
pub fn (mut self PostgresClient[Config]) exec(c_ string) ![]pg.Row {
mut db := self.db
mut c := c_
if !(c.trim_space().ends_with(';')) {
c += ';'
}
config := self.config()!
return db.exec(c) or {
return error('can\t execute query on ${config.host}:${config.dbname}.\n${c}\n${err}')
}
}
pub fn (mut self PostgresClient[Config]) db_exists(name_ string) !bool {
mut db := self.db
r := db.exec("SELECT datname FROM pg_database WHERE datname='${name_}';")!
if r.len == 1 {
// console.print_header(' db exists: ${name_}')
return true
}
if r.len > 1 {
return error('should not have more than 1 db with name ${name_}')
}
return false
}
pub fn (mut self PostgresClient[Config]) db_create(name_ string) ! {
name := texttools.name_fix(name_)
mut db := self.db
db_exists := self.db_exists(name_)!
if !db_exists {
console.print_header(' db create: ${name}')
db.exec('CREATE DATABASE ${name};')!
}
db_exists2 := self.db_exists(name_)!
if !db_exists2 {
return error('Could not create db: ${name_}, could not find in DB.')
}
}
pub fn (mut self PostgresClient[Config]) db_delete(name_ string) ! {
mut db := self.db
name := texttools.name_fix(name_)
self.check()!
db_exists := self.db_exists(name_)!
if db_exists {
console.print_header(' db delete: ${name_}')
db.exec('DROP DATABASE ${name};')!
}
db_exists2 := self.db_exists(name_)!
if db_exists2 {
return error('Could not delete db: ${name_}, could not find in DB.')
}
}
pub fn (mut self PostgresClient[Config]) db_names() ![]string {
mut res := []string{}
sqlstr := "SELECT datname FROM pg_database WHERE datistemplate = false and datname != 'postgres' and datname != 'root';"
for row in self.exec(sqlstr)! {
v := row.vals[0] or { '' }
res << v or { '' }
}
return res
}
@[params]
pub struct BackupParams {
pub mut:
dbname string
dest string
}
pub fn (mut self PostgresClient[Config]) backup(args BackupParams) ! {
if args.dest == '' {
return error('specify the destination please')
}
if !os.exists(args.dest) {
os.mkdir_all(args.dest)!
}
if args.dbname == '' {
for dbname in self.db_names()! {
self.backup(dbname: dbname, dest: args.dest)!
}
} else {
config := self.config()!
cmd := '
export PGPASSWORD=\'${config.password}\'
pg_dump -h ${config.host} -p ${config.port} -U ${config.user} --dbname=${args.dbname} --format=c > "${args.dest}/${args.dbname}.bak"
' // console.print_debug(cmd)
osal.exec(cmd: cmd, stdout: true)!
}
}

View File

@@ -0,0 +1,91 @@
module postgres
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.ui
import freeflowuniverse.herolib.ui.console
@[params]
pub struct Config {
pub mut:
instance string = 'default'
user string = 'root'
port int = 5432
host string = 'localhost'
password string
dbname string = 'postgres'
heroscript string
reset bool
}
pub fn configure(instance string, cfg_ Config) !PostgresClient[Config] {
mut config := cfg_
mut server := PostgresClient[Config]{}
server.init('postgres', instance, .set, config)!
return get(instance)!
}
pub fn configure_interactive(args_ Config, mut session base.Session) ! {
mut args := args_
mut myui := ui.new()!
console.clear()
console.print_debug('\n## Configure Postgres Client')
console.print_debug('============================\n\n')
instance := myui.ask_question(
question: 'name for postgres client'
default: args.instance
)!
args.user = myui.ask_question(
question: 'user'
minlen: 3
default: args.user
)!
args.password = myui.ask_question(
question: 'password'
minlen: 3
default: args.password
)!
args.dbname = myui.ask_question(
question: 'dbname'
minlen: 3
default: args.dbname
)!
args.host = myui.ask_question(
question: 'host'
minlen: 3
default: args.host
)!
mut port := myui.ask_question(
question: 'port'
default: '${args.port}'
)!
args.port = port.int()
mut client := PostgresClient[Config]{}
client.init('postgres', instance, .set, args)!
}
// pub fn play_session(mut session base.Session) ! {
// for mut action in session.plbook.find(filter: 'postgresclient.define')! {
// mut p := action.params
// mut args := config()
// panic('implement')
// // args.instance = p.get_default('name','')!
// // if args.instance == ""{
// // args.instance = p.get_default('instance', 'default')!
// // }
// // args.mail_from = p.get('mail_from')!
// // args.smtp_addr = p.get('smtp_addr')!
// // args.smtp_login = p.get('smtp_login')!
// // args.smtp_passwd = p.get('smtp_passwd')!
// // args.smpt_port = p.get_int('smpt_port')!
// // mut c:=configurator(args.instance,session:session)!
// // c.set(args)!
// }
// }

View File

@@ -0,0 +1,29 @@
module postgres
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.ui as gui
import freeflowuniverse.herolib.ui.console
import db.pg
pub struct PostgresClient[T] {
base.BaseConfig[T]
pub mut:
db pg.DB
}
pub fn get(instance string) !PostgresClient[Config] {
mut self := PostgresClient[Config]{}
self.init('postgres', instance, .get)!
config := self.config()!
mut db := pg.connect(
host: config.host
user: config.user
port: config.port
password: config.password
dbname: config.dbname
)!
self.db = db
return self
}

View File

@@ -0,0 +1,76 @@
# postgres client
## use hero to work with postgres
```bash
Usage: hero postgres [flags] [commands]
manage postgresql
Flags:
-help Prints help information.
-man Prints the auto-generated manpage.
Commands:
exec execute a query
check check the postgresql connection
configure configure a postgresl connection.
backup backup
print print configure info.
list list databases
```
## configure
the postgres configuration is stored on the filesystem for further use, can be configured as follows
```v
import freeflowuniverse.herolib.clients.postgres
postgres.configure(name:'default',
user :'root'
port : 5432
host : 'localhost'
password : 'ssss'
dbname :'postgres')!
mut db:=postgres.get(name:'default')!
```
## configure through heroscript
```v
import freeflowuniverse.herolib.clients.postgres
heroscript:='
!!postgresclient.define name:'default'
//TO IMPLEMENT
'
postgres.configure(heroscript:heroscript)!
//can also be done through get directly
mut cl:=postgres.get(reset:true,name:'default',heroscript:heroscript)
```
## some postgresql cmds
```v
import freeflowuniverse.herolib.clients.postgres
mut cl:=postgres.get()! //will default get postgres client with name 'default'
cl.db_exists("mydb")!
```
## use the good module of v
- [https://modules.vlang.io/db.pg.html#DB.exec](https://modules.vlang.io/db.pg.html#DB.exec)

View File

@@ -0,0 +1,58 @@
module redisclient
// original code see https://github.com/patrickpissurno/vredis/blob/master/vredis_test.v
// credits see there as well (-:
import net
// import sync
// import strconv
__global (
redis_connections []Redis
)
const default_read_timeout = net.infinite_timeout
@[heap]
pub struct Redis {
pub:
addr string
mut:
socket net.TcpConn
}
// https://redis.io/topics/protocol
// examples:
// localhost:6379
// /tmp/redis-default.sock
pub fn new(addr string) !Redis {
// lock redis_connections {
for mut conn in redis_connections {
if conn.addr == addr {
return conn
}
}
// means there is no connection yet
mut r := Redis{
addr: addr
}
r.socket_connect()!
redis_connections << r
return r
//}
// panic("bug")
}
pub fn reset() ! {
// lock redis_connections {
for mut conn in redis_connections {
conn.disconnect()
}
redis_connections = []Redis{}
//}
}
pub fn checkempty() {
// lock redis_connections {
assert redis_connections.len == 0
//}
}

View File

@@ -0,0 +1,19 @@
# Redisclient
## basic example to connect to local redis on 127.0.0.1:6379
```v
import freeflowuniverse.herolib.clients.redisclient
mut redis := redisclient.core_get()!
redis.set('test', 'some data') or { panic('set' + err.str() + '\n' + c.str()) }
r := redis.get('test')?
if r != 'some data' {
panic('get error different result.' + '\n' + c.str())
}
```
> redis commands can be found on https://redis.io/commands/

View File

@@ -0,0 +1,57 @@
module redisclient
import freeflowuniverse.herolib.ui.console
pub struct RedisCache {
mut:
redis &Redis @[str: skip]
namespace string
enabled bool = true
}
// return a cache object starting from a redis connection
pub fn (mut r Redis) cache(namespace string) RedisCache {
return RedisCache{
redis: &r
namespace: namespace
}
}
pub fn (mut h RedisCache) get(key string) ?string {
if !h.enabled {
return none
}
key2 := h.namespace + ':' + key
hit := h.redis.get('cache:${key2}') or {
console.print_debug('[-] cache: cache miss, ${key2}')
return none
}
console.print_debug('[+] cache: cache hit: ${key2}')
return hit
}
pub fn (mut h RedisCache) set(key string, val string, expire int) ! {
if !h.enabled {
return
}
key2 := h.namespace + ':' + key
h.redis.set_ex('cache:${key2}', val, expire.str())!
}
pub fn (mut h RedisCache) exists(key string) bool {
h.get(key) or { return false }
return true
}
pub fn (mut h RedisCache) reset() ! {
key_check := 'cache:' + h.namespace
// console.print_debug(key_check)
keys := h.redis.keys(key_check)!
// console.print_debug(keys)
for key in keys {
// console.print_debug(key)
h.redis.del(key)!
}
}

View File

@@ -0,0 +1,305 @@
module redisclient
import freeflowuniverse.herolib.data.resp
import time
pub fn (mut r Redis) ping() !string {
return r.send_expect_strnil(['PING'])
}
pub fn (mut r Redis) set(key string, value string) ! {
return r.send_expect_ok(['SET', key, value])
}
pub fn (mut r Redis) set_ex(key string, value string, ex string) ! {
return r.send_expect_ok(['SET', key, value, 'EX', ex])
}
pub fn (mut r Redis) set_opts(key string, value string, opts SetOpts) !bool {
ex := if opts.ex == -4 && opts.px == -4 {
''
} else if opts.ex != -4 {
' EX ${opts.ex}'
} else {
' PX ${opts.px}'
}
nx := if opts.nx == false && opts.xx == false {
''
} else if opts.nx == true {
' NX'
} else {
' XX'
}
keep_ttl := if opts.keep_ttl == false { '' } else { ' KEEPTTL' }
message := 'SET "${key}" "${value}"${ex}${nx}${keep_ttl}\r\n'
r.write(message.bytes()) or { return false }
time.sleep(1 * time.millisecond)
res := r.read_line()!
match res {
'+OK\r\n' {
return true
}
else {
return false
}
}
}
pub fn (mut r Redis) get(key string) !string {
// mut key2 := key.trim("\"'")
return r.send_expect_strnil(['GET', key])
}
pub fn (mut r Redis) exists(key string) !bool {
r2 := r.send_expect_int(['EXISTS', key])!
return r2 == 1
}
pub fn (mut r Redis) del(key string) !int {
return r.send_expect_int(['DEL', key])
}
pub fn (mut r Redis) hset(key string, skey string, value string) ! {
r.send_expect_int(['HSET', key, skey, value])!
}
pub fn (mut r Redis) hget(key string, skey string) !string {
// mut key2 := key.trim("\"'")
return r.send_expect_strnil(['HGET', key, skey])
}
pub fn (mut r Redis) hgetall(key string) !map[string]string {
// mut key2 := key.trim("\"'")
res := r.send_expect_list_str(['HGETALL', key])!
mut mapped := map[string]string{}
mut i := 0
for i < res.len && i + 1 < res.len {
mapped[res[i]] = res[i + 1]
i += 2
}
return mapped
}
pub fn (mut r Redis) hexists(key string, skey string) !bool {
return r.send_expect_bool(['HEXISTS', key, skey])
}
pub fn (mut r Redis) hdel(key string, skey string) !int {
return r.send_expect_int(['HDEL', key, skey])
}
pub fn (mut r Redis) incrby(key string, increment int) !int {
return r.send_expect_int(['INCRBY', key, increment.str()])
}
pub fn (mut r Redis) incr(key string) !int {
return r.incrby(key, 1)
}
pub fn (mut r Redis) decr(key string) !int {
return r.incrby(key, -1)
}
pub fn (mut r Redis) decrby(key string, decrement int) !int {
return r.incrby(key, -decrement)
}
pub fn (mut r Redis) incrbyfloat(key string, increment f64) !f64 {
res := r.send_expect_str(['INCRBYFLOAT', key, increment.str()])!
count := res.f64()
return count
}
pub fn (mut r Redis) append(key string, value string) !int {
return r.send_expect_int(['APPEND', key, value])
}
pub fn (mut r Redis) setrange(key string, offset int, value string) !int {
return r.send_expect_int(['SETRANGE', key, offset.str(), value.str()])
}
pub fn (mut r Redis) lpush(key string, element string) !int {
return r.send_expect_int(['LPUSH', key, element])
}
pub fn (mut r Redis) rpush(key string, element string) !int {
return r.send_expect_int(['RPUSH', key, element])
}
pub fn (mut r Redis) lrange(key string, start int, end int) ![]resp.RValue {
return r.send_expect_list(['LRANGE', key, start.str(), end.str()])
}
pub fn (mut r Redis) expire(key string, seconds int) !int {
return r.send_expect_int(['EXPIRE', key, seconds.str()])
}
pub fn (mut r Redis) pexpire(key string, millis int) !int {
return r.send_expect_int(['PEXPIRE', key, millis.str()])
}
pub fn (mut r Redis) expireat(key string, timestamp int) !int {
return r.send_expect_int(['EXPIREAT', key, timestamp.str()])
}
pub fn (mut r Redis) pexpireat(key string, millistimestamp i64) !int {
return r.send_expect_int(['PEXPIREAT', key, millistimestamp.str()])
}
pub fn (mut r Redis) persist(key string) !int {
return r.send_expect_int(['PERSIST', key])
}
pub fn (mut r Redis) getset(key string, value string) !string {
return r.send_expect_strnil(['GETSET', key, value])
}
pub fn (mut r Redis) getrange(key string, start int, end int) !string {
return r.send_expect_str(['GETRANGE', key, start.str(), end.str()])
}
pub fn (mut r Redis) keys(pattern string) ![]string {
response := r.send_expect_list(['KEYS', pattern])!
mut result := []string{}
for item in response {
result << resp.get_redis_value(item)
}
return result
}
pub fn (mut r Redis) hkeys(key string) ![]string {
response := r.send_expect_list(['HKEYS', key])!
mut result := []string{}
for item in response {
result << resp.get_redis_value(item)
}
return result
}
pub fn (mut r Redis) randomkey() !string {
return r.send_expect_strnil(['RANDOMKEY'])
}
pub fn (mut r Redis) strlen(key string) !int {
return r.send_expect_int(['STRLEN', key])
}
pub fn (mut r Redis) lpop(key string) !string {
return r.send_expect_strnil(['LPOP', key])
}
pub fn (mut r Redis) blpop(keys []string, timeout f64) ![]string {
mut request := ['BLPOP']
request << keys
request << '${timeout}'
res := r.send_expect_list_str(request)!
if res.len != 2 || res[1] == '' {
return error('timeout on blpop')
}
return res
}
pub fn (mut r Redis) brpop(keys []string, timeout f64) ![]string {
mut request := ['BRPOP']
request << keys
request << '${timeout}'
res := r.send_expect_list_str(request)!
if res.len != 2 {
return error('timeout on brpop')
}
return res
}
pub fn (mut r Redis) rpop(key string) !string {
return r.send_expect_strnil(['RPOP', key])
}
pub fn (mut r Redis) llen(key string) !int {
return r.send_expect_int(['LLEN', key])
}
pub fn (mut r Redis) ttl(key string) !int {
return r.send_expect_int(['TTL', key])
}
pub fn (mut r Redis) pttl(key string) !int {
return r.send_expect_int(['PTTL', key])
}
pub fn (mut r Redis) rename(key string, newkey string) ! {
return r.send_expect_ok(['RENAME', key, newkey])
}
pub fn (mut r Redis) renamenx(key string, newkey string) !int {
return r.send_expect_int(['RENAMENX', key, newkey])
}
pub fn (mut r Redis) setex(key string, second i64, value string) ! {
return r.send_expect_ok(['SETEX', key, second.str(), value])
}
pub fn (mut r Redis) psetex(key string, millisecond i64, value string) ! {
return r.send_expect_ok(['PSETEX', key, millisecond.str(), value])
}
pub fn (mut r Redis) setnx(key string, value string) !int {
return r.send_expect_int(['SETNX', key, value])
}
pub fn (mut r Redis) type_of(key string) !string {
return r.send_expect_strnil(['TYPE', key])
}
pub fn (mut r Redis) flushall() ! {
return r.send_expect_ok(['FLUSHALL'])
}
pub fn (mut r Redis) flushdb() ! {
return r.send_expect_ok(['FLUSHDB'])
}
// select is reserved
pub fn (mut r Redis) selectdb(database int) ! {
return r.send_expect_ok(['SELECT', database.str()])
}
pub fn (mut r Redis) scan(cursor int) !(string, []string) {
res := r.send_expect_list(['SCAN', cursor.str()])!
if res[0] !is resp.RBString {
return error('Redis SCAN wrong response type (cursor)')
}
if res[1] !is resp.RArray {
return error('Redis SCAN wrong response type (list content)')
}
mut values := []string{}
for i in 0 .. resp.get_redis_array_len(res[1]) {
values << resp.get_redis_value_by_index(res[1], i)
}
return resp.get_redis_value(res[0]), values
}
// Add the specified members to the set stored at key. Specified members that are already a member
// of this set are ignored. If key does not exist, a new set is created before adding the specified members.
// An error is returned when the value stored at key is not a set.
pub fn (mut r Redis) sadd(key string, members []string) !int {
mut tosend := ['SADD', key]
for k in members {
tosend << k
}
return r.send_expect_int(tosend)
}
// Returns if member is a member of the set stored at key.
pub fn (mut r Redis) smismember(key string, members []string) ![]int {
// mut key2 := key.trim("\"'")
mut tosend := ['SMISMEMBER', key]
for k in members {
tosend << k
}
res := r.send_expect_list_int(tosend)!
return res
}

View File

@@ -0,0 +1,24 @@
module redisclient
@[params]
pub struct RedisURL {
address string = '127.0.0.1'
port int = 6379
// db int
}
pub fn get_redis_url(url string) !RedisURL {
if !url.contains(':') {
return error('url doesnt contain port')
} else {
return RedisURL{
address: url.all_before_last(':')
port: url.all_after_last(':').u16()
}
}
}
pub fn core_get(url RedisURL) !Redis {
mut r := new('${url.address}:${url.port}')!
return r
}

View File

@@ -0,0 +1,173 @@
module redisclient
import freeflowuniverse.herolib.data.resp
pub fn (mut r Redis) get_response() !resp.RValue {
line := r.read_line()!
if line.starts_with('-') {
return resp.RError{
value: line[1..]
}
}
if line.starts_with(':') {
return resp.RInt{
value: line[1..].int()
}
}
if line.starts_with('+') {
return resp.RString{
value: line[1..]
}
}
if line.starts_with('$') {
mut bulkstring_size := line[1..].int()
if bulkstring_size == -1 {
return resp.RNil{}
}
if bulkstring_size == 0 {
// extract final \r\n and not reading
// any payload
r.read_line()!
return resp.RString{
value: ''
}
}
// read payload
buffer := r.read(bulkstring_size) or { panic(err) }
// extract final \r\n
r.read_line()!
// console.print_debug("readline result:'$buffer.bytestr()'")
return resp.RBString{
value: buffer
} // TODO: won't support binary (afaik), need to fix? WHY not (despiegk)?
}
if line.starts_with('*') {
mut arr := resp.RArray{
values: []resp.RValue{}
}
items := line[1..].int()
// proceed each entries, they can be of any types
for _ in 0 .. items {
value := r.get_response()!
arr.values << value
}
return arr
}
return error('unsupported response type')
}
// TODO: needs to use the resp library
pub fn (mut r Redis) get_int() !int {
line := r.read_line()!
if line.starts_with(':') {
return line[1..].int()
} else {
return error("Did not find int, did find:'${line}'")
}
}
pub fn (mut r Redis) get_list_int() ![]int {
line := r.read_line()!
mut res := []int{}
if line.starts_with('*') {
items := line[1..].int()
// proceed each entries, they can be of any types
for _ in 0 .. items {
value := r.get_int()!
res << value
}
return res
} else {
return error("Did not find int, did find:'${line}'")
}
}
pub fn (mut r Redis) get_list_str() ![]string {
line := r.read_line()!
mut res := []string{}
if line.starts_with('*') {
items := line[1..].int()
// proceed each entries, they can be of any types
for _ in 0 .. items {
value := r.get_string()!
res << value
}
return res
} else {
return error("Did not find int, did find:'${line}'")
}
}
pub fn (mut r Redis) get_string() !string {
line := r.read_line()!
if line.starts_with('+') {
// console.print_debug("getstring:'${line[1..]}'")
return line[1..]
}
if line.starts_with('$') {
r2 := r.get_bytes_from_line(line)!
return r2.bytestr()
} else {
return error("Did not find string, did find:'${line}'")
}
}
pub fn (mut r Redis) get_string_nil() !string {
r2 := r.get_bytes_nil()!
return r2.bytestr()
}
pub fn (mut r Redis) get_bytes_nil() ![]u8 {
line := r.read_line()!
if line.starts_with('+') {
return line[1..].bytes()
}
if line.starts_with('$-1') {
return []u8{}
}
if line.starts_with('$') {
return r.get_bytes_from_line(line)
} else {
return error("Did not find string or nil, did find:'${line}'")
}
}
pub fn (mut r Redis) get_bool() !bool {
i := r.get_int()!
return i == 1
}
pub fn (mut r Redis) get_bytes() ![]u8 {
line := r.read_line()!
if line.starts_with('$') {
return r.get_bytes_from_line(line)
} else {
return error("Did not find bulkstring, did find:'${line}'")
}
}
fn (mut r Redis) get_bytes_from_line(line string) ![]u8 {
mut bulkstring_size := line[1..].int()
if bulkstring_size == -1 {
// return none
return error('bulkstring_size is -1')
}
if bulkstring_size == 0 {
// extract final \r\n, there is no payload
r.read_line()!
return []
}
// read payload
buffer := r.read(bulkstring_size) or { panic('Could not read payload: ${err}') }
// extract final \r\n
r.read_line()!
return buffer
}

View File

@@ -0,0 +1,121 @@
module redisclient
import os
import net
import freeflowuniverse.herolib.data.resp
import time
import net.unix
pub struct SetOpts {
ex int = -4
px int = -4
nx bool
xx bool
keep_ttl bool
}
pub enum KeyType {
t_none
t_string
t_list
t_set
t_zset
t_hash
t_stream
t_unknown
}
fn (mut r Redis) socket_connect() ! {
// print_backtrace()
addr := os.expand_tilde_to_home(r.addr)
// console.print_debug(' - REDIS CONNECT: ${addr}')
if !addr.contains(':') {
unix_socket := unix.connect_stream(addr)!
tcp_socket := net.tcp_socket_from_handle_raw(unix_socket.sock.Socket.handle)
tcp_conn := net.TcpConn{
sock: tcp_socket
handle: unix_socket.sock.Socket.handle
}
r.socket = tcp_conn
} else {
r.socket = net.dial_tcp(addr)!
}
r.socket.set_blocking(true)!
r.socket.set_read_timeout(1 * time.second)
// console.print_debug("---OK")
}
fn (mut r Redis) socket_check() ! {
r.socket.peer_addr() or {
// console.print_debug(' - re-connect socket for redis')
r.socket_connect()!
}
}
pub fn (mut r Redis) read_line() !string {
return r.socket.read_line().trim_right('\r\n')
}
// write *all the data* into the socket
// This function loops, till *everything is written*
// (some of the socket write ops could be partial)
fn (mut r Redis) write(data []u8) ! {
r.socket_check()!
mut remaining := data.len
for remaining > 0 {
// zdbdata[data.len - remaining..].bytestr())
written_bytes := r.socket.write(data[data.len - remaining..])!
remaining -= written_bytes
}
}
fn (mut r Redis) read(size int) ![]u8 {
r.socket_check() or {}
mut buf := []u8{len: size}
mut remaining := size
for remaining > 0 {
read_bytes := r.socket.read(mut buf[buf.len - remaining..])!
remaining -= read_bytes
}
return buf
}
pub fn (mut r Redis) disconnect() {
r.socket.close() or {}
}
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
// TODO: need to implement a way how to use multiple connections at once
const cr_lf_bytes = [u8(`\r`), `\n`]
fn (mut r Redis) write_line(data []u8) ! {
r.write(data)!
r.write(cr_lf_bytes)!
}
// write resp value to the redis channel
pub fn (mut r Redis) write_rval(val resp.RValue) ! {
r.write(val.encode())!
}
// write list of strings to redis challen
fn (mut r Redis) write_cmd(item string) ! {
a := resp.r_bytestring(item.bytes())
r.write_rval(a)!
}
// write list of strings to redis challen
fn (mut r Redis) write_cmds(items []string) ! {
// if items.len==1{
// a := resp.r_bytestring(items[0].bytes())
// r.write_rval(a)!
// }{
a := resp.r_list_bstring(items)
r.write_rval(a)!
// }
}

View File

@@ -0,0 +1,41 @@
module redisclient
import time
pub struct RedisQueue {
pub mut:
key string
redis &Redis
}
pub fn (mut r Redis) queue_get(key string) RedisQueue {
return RedisQueue{
key: key
redis: r
}
}
pub fn (mut q RedisQueue) add(val string) ! {
q.redis.lpush(q.key, val)!
}
// timeout in msec
pub fn (mut q RedisQueue) get(timeout u64) !string {
start := u64(time.now().unix_milli())
for {
r := q.redis.rpop(q.key) or { '' }
if r != '' {
return r
}
if u64(time.now().unix_milli()) > (start + timeout) {
break
}
time.sleep(time.microsecond)
}
return error('timeout on ${q.key}')
}
// get without timeout, returns none if nil
pub fn (mut q RedisQueue) pop() !string {
return q.redis.rpop(q.key)!
}

View File

@@ -0,0 +1,136 @@
module redisclient
import rand
import time
import json
pub struct RedisRpc {
pub mut:
key string // queue name as used by this rpc
redis &Redis
}
// return a rpc mechanism
pub fn (mut r Redis) rpc_get(key string) RedisRpc {
return RedisRpc{
key: key
redis: r
}
}
pub struct RPCArgs {
pub:
cmd string @[required]
data string @[required]
timeout u64 = 60000 // 60 sec
wait bool = true
}
pub struct Message {
pub:
ret_queue string
now i64
cmd string
data string
}
pub struct Response {
pub:
result string
error string
}
// send data to a queue and wait till return comes back
// timeout in milliseconds
// params
// cmd string @[required]
// data string @[required]
// timeout u64=60000 //60 sec
// wait bool=true
pub fn (mut q RedisRpc) call(args RPCArgs) !string {
retqueue := rand.uuid_v4()
now := time.now().unix()
message := Message{
ret_queue: retqueue
now: now
cmd: args.cmd
data: args.data
}
encoded := json.encode(message)
q.redis.lpush(q.key, encoded)!
if args.wait {
return q.result(args.timeout, retqueue)!
}
return ''
}
// get return once result processed
pub fn (mut q RedisRpc) result(timeout u64, retqueue string) !string {
start := u64(time.now().unix_milli())
for {
r := q.redis.rpop(retqueue) or { '' }
if r != '' {
res := json.decode(Response, r)!
if res.error != '' {
return res.error
}
return res.result
}
if u64(time.now().unix_milli()) > (start + timeout) {
break
}
time.sleep(time.millisecond)
}
return error('timeout on returnqueue: ${retqueue}')
}
@[params]
pub struct ProcessParams {
pub:
timeout u64
}
// to be used by processor, to get request and execute, this is the server side of a RPC mechanism
// 2nd argument is a function which needs to execute the job: fn (string,string) !string
pub fn (mut q RedisRpc) process(op fn (string, string) !string, params ProcessParams) !string {
start := u64(time.now().unix_milli())
for {
r := q.redis.rpop(q.key) or { '' }
if r != '' {
msg := json.decode(Message, r)!
returnqueue := msg.ret_queue
// epochtime:=parts[1].u64() //we don't do anything with it now
cmd := msg.cmd
data := msg.data
// if true{panic("sd")}
datareturn := op(cmd, data) or {
response := Response{
result: ''
error: err.str()
}
encoded := json.encode(response)
q.redis.lpush(returnqueue, encoded)!
return ''
}
response := Response{
result: datareturn
error: ''
}
encoded := json.encode(response)
q.redis.lpush(returnqueue, encoded)!
return returnqueue
}
if params.timeout != 0 && u64(time.now().unix_milli()) > (start + params.timeout) {
break
}
time.sleep(time.millisecond)
}
return error('timeout for waiting for cmd on ${q.key}')
}
// get without timeout, returns none if nil
pub fn (mut q RedisRpc) delete() ! {
q.redis.del(q.key)!
}

View File

@@ -0,0 +1,25 @@
import freeflowuniverse.herolib.clients.redisclient
fn setup() !&redisclient.Redis {
mut redis := redisclient.core_get()!
redis.selectdb(10) or { panic(err) }
return &redis
}
fn cleanup(mut redis redisclient.Redis) ! {
redis.flushall()!
// redis.disconnect()
}
fn test_sadd() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.sadd('mysadd', ['a', 'b', 'c']) or { panic(err) }
r := redis.smismember('mysadd', ['a', 'b', 'c']) or { panic(err) }
assert r == [1, 1, 1]
r2 := redis.smismember('mysadd', ['a', 'd', 'c']) or { panic(err) }
assert r2 == [1, 0, 1]
}

View File

@@ -0,0 +1,6 @@
module redisclient
// load a script and return the hash
pub fn (mut r Redis) script_load(script string) !string {
return r.send_expect_str(['SCRIPT LOAD', script])!
}

View File

@@ -0,0 +1,55 @@
module redisclient
import freeflowuniverse.herolib.data.resp
import freeflowuniverse.herolib.ui.console
// send list of strings, expect OK back
pub fn (mut r Redis) send_expect_ok(items []string) ! {
r.write_cmds(items)!
res := r.get_string()!
if res != 'OK' {
console.print_debug("'${res}'")
return error('did not get ok back')
}
}
// send list of strings, expect int back
pub fn (mut r Redis) send_expect_int(items []string) !int {
r.write_cmds(items)!
return r.get_int()
}
pub fn (mut r Redis) send_expect_bool(items []string) !bool {
r.write_cmds(items)!
return r.get_bool()
}
// send list of strings, expect string back
pub fn (mut r Redis) send_expect_str(items []string) !string {
r.write_cmds(items)!
return r.get_string()
}
// send list of strings, expect string or nil back
pub fn (mut r Redis) send_expect_strnil(items []string) !string {
r.write_cmds(items)!
d := r.get_string_nil()!
return d
}
// send list of strings, expect list of strings back
pub fn (mut r Redis) send_expect_list_str(items []string) ![]string {
r.write_cmds(items)!
return r.get_list_str()
}
pub fn (mut r Redis) send_expect_list_int(items []string) ![]int {
r.write_cmds(items)!
return r.get_list_int()
}
pub fn (mut r Redis) send_expect_list(items []string) ![]resp.RValue {
r.write_cmds(items)!
res := r.get_response()!
return resp.get_redis_array(res)
}

View File

@@ -0,0 +1,864 @@
import freeflowuniverse.herolib.clients.redisclient
import time
import freeflowuniverse.herolib.ui.console
// original code see https://github.com/patrickpissurno/vredis/blob/master/vredis_test.v
// credits see there as well (-:
fn setup() !&redisclient.Redis {
mut redis := redisclient.core_get()!
// Select db 10 to be away from default one '0'
redis.selectdb(10) or { panic(err) }
return &redis
}
fn cleanup(mut redis redisclient.Redis) ! {
redis.flushall()!
// redis.disconnect()
}
fn test_set() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
// console.print_debug('start')
// for _ in 0 .. 10000 {
// redis.set('test0', '123')!
// }
console.print_debug('stop')
redis.set('test0', '456')!
res := redis.get('test0')!
assert res == '456'
redis.hset('x', 'a', '222')!
redis.hset('x', 'b', '333')!
mut res3 := redis.hget('x', 'b')!
assert res3 == '333'
redis.hdel('x', 'b')!
res3 = redis.hget('x', 'b')!
assert res3 == ''
e := redis.hexists('x', 'a')!
assert e
}
fn test_large_value() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
rr := 'SSS' + 'a'.repeat(40000) + 'EEE'
mut rr2 := ''
for i in 0 .. 50 {
redis.set('test_large_value0', rr)!
rr2 = redis.get('test_large_value0')!
assert rr.len == rr2.len
assert rr == rr2
}
for i3 in 0 .. 100 {
redis.set('test_large_value${i3}', rr)!
}
for i4 in 0 .. 100 {
rr4 := redis.get('test_large_value${i4}')!
assert rr.len == rr4.len
redis.del('test_large_value${i4}')!
}
}
fn test_queue() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
mut q := redis.queue_get('kds:q')
q.add('test1')!
q.add('test2')!
mut res := q.get(1)!
assert res == 'test1'
res = q.get(1)!
assert res == 'test2'
console.print_debug('start')
res = q.get(100) or { '' }
console.print_debug('stop')
assert res == ''
console.print_debug(res)
}
fn test_scan() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
console.print_debug('stop')
redis.set('test3', '12')!
redis.set('test4', '34')!
redis.set('test5', '56')!
redis.set('test6', '78')!
redis.set('test7', '9')!
cursor, data := redis.scan(0)!
console.print_debug(data)
assert cursor == '0'
}
// fn test_set_opts() {
// mut redis := setup()!
// defer {
// cleanup(mut redis) or { panic(err) }
// }
// assert redis.set_opts('test8', '123', redisclient.SetOpts{
// ex: 2
// }) or {false}== true
// assert redis.set_opts('test8', '456', redisclient.SetOpts{
// px: 2000
// xx: true
// }) or {false} == true
// assert redis.set_opts('test8', '789', redisclient.SetOpts{
// px: 1000
// nx: true
// }) or {false}== false
// // Works with redis version > 6
// assert redis.set_opts('test8', '012', redisclient.SetOpts{ keep_ttl: true }) or {false}== true
// }
fn test_setex() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.setex('test9', 2, '123')!
mut r := redis.get('test9')!
assert r == '123'
time.sleep(2100 * time.millisecond)
r = redis.get('test9')!
assert r == ''
}
fn test_psetex() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.psetex('test10', 200, '123')!
mut r := redis.get('test10') or {
assert false
return
}
assert r == '123'
time.sleep(220 * time.millisecond)
r = redis.get('test10')!
assert r == ''
}
fn test_setnx() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
mut r1 := redis.setnx('test11', '123')!
assert r1 == 1
r1 = redis.setnx('test11', '456')!
assert r1 == 0
val := redis.get('test11') or {
assert false
return
}
assert val == '123'
}
fn test_incrby() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test12', '100')!
r1 := redis.incrby('test12', 4) or {
assert false
return
}
assert r1 == 104
r2 := redis.incrby('test13', 2) or {
assert false
return
}
assert r2 == 2
redis.set('test14', 'nan')!
redis.incrby('test14', 1) or {
assert true
return
}
assert false
}
fn test_incr() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test15', '100')!
r1 := redis.incr('test15') or {
assert false
return
}
assert r1 == 101
r2 := redis.incr('test16') or {
assert false
return
}
assert r2 == 1
redis.set('test17', 'nan')!
redis.incr('test17') or {
assert true
return
}
assert false
}
fn test_decr() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test18', '100')!
r1 := redis.decr('test18') or {
assert false
return
}
assert r1 == 99
r2 := redis.decr('test19') or {
assert false
return
}
assert r2 == -1
redis.set('test20', 'nan')!
redis.decr('test20') or {
assert true
return
}
assert false
}
fn test_decrby() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test21', '100')!
r1 := redis.decrby('test21', 4) or {
assert false
return
}
assert r1 == 96
r2 := redis.decrby('test22', 2) or {
assert false
return
}
assert r2 == -2
redis.set('test23', 'nan')!
redis.decrby('test23', 1) or {
assert true
return
}
assert false
}
fn test_incrbyfloat() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test24', '3.1415')!
r1 := redis.incrbyfloat('test24', 3.1415) or {
assert false
return
}
assert r1 == 6.283
r2 := redis.incrbyfloat('test25', 3.14) or {
assert false
return
}
assert r2 == 3.14
r3 := redis.incrbyfloat('test25', -3.14) or {
assert false
return
}
assert r3 == 0
redis.set('test26', 'nan')!
redis.incrbyfloat('test26', 1.5) or {
assert true
return
}
assert false
}
fn test_append() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test27', 'bac')!
r1 := redis.append('test27', 'on') or {
assert false
return
}
assert r1 == 5
r2 := redis.get('test27') or {
assert false
return
}
assert r2 == 'bacon'
}
fn test_lpush() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r := redis.lpush('test28', 'item 1') or {
assert false
return
}
assert r == 1
}
fn test_rpush() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r := redis.rpush('test29', 'item 1') or {
assert false
return
}
assert r == 1
}
fn test_setrange() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.setrange('test30', 0, 'bac') or {
assert false
return
}
assert r1 == 3
r2 := redis.setrange('test30', 3, 'on') or {
assert false
return
}
assert r2 == 5
}
fn test_expire() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.expire('test31', 2) or {
assert false
return
}
assert r1 == 0
redis.set('test31', '123')!
r2 := redis.expire('test31', 2) or {
assert false
return
}
assert r2 == 1
}
fn test_pexpire() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.pexpire('test32', 200) or {
assert false
return
}
assert r1 == 0
redis.set('test32', '123')!
r2 := redis.pexpire('test32', 200) or {
assert false
return
}
assert r2 == 1
}
fn test_expireat() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.expireat('test33', 1293840000) or {
assert false
return
}
assert r1 == 0
redis.set('test33', '123')!
r2 := redis.expireat('test33', 1293840000) or {
assert false
return
}
assert r2 == 1
}
fn test_pexpireat() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.pexpireat('test34', 1555555555005) or {
assert false
return
}
assert r1 == 0
redis.set('test34', '123')!
r2 := redis.pexpireat('test34', 1555555555005) or {
assert false
return
}
assert r2 == 1
}
fn test_persist() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.persist('test35') or {
assert false
return
}
assert r1 == 0
redis.setex('test35', 2, '123')!
r2 := redis.persist('test35') or {
assert false
return
}
assert r2 == 1
}
fn test_get() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test36', '123')!
mut r := redis.get('test36')!
assert r == '123'
assert helper_get_key_not_found(mut redis, 'test37') == true
}
fn test_getset() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
mut r1 := redis.getset('test38', '10') or { '' }
assert r1 == ''
r2 := redis.getset('test38', '15') or {
assert false
return
}
assert r2 == '10'
r3 := redis.get('test38') or {
assert false
return
}
assert r3 == '15'
}
fn test_getrange() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test39', 'community')!
r1 := redis.getrange('test39', 4, -1) or {
assert false
return
}
assert r1 == 'unity'
r2 := redis.getrange('test40', 0, -1) or {
assert false
return
}
assert r2 == ''
}
fn test_randomkey() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
assert helper_randomkey_database_empty(mut redis) == true
redis.set('test41', '123')!
r2 := redis.randomkey() or {
assert false
return
}
assert r2 == 'test41'
assert helper_get_key_not_found(mut redis, 'test42') == true
}
fn test_strlen() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test43', 'bacon')!
r1 := redis.strlen('test43') or {
assert false
return
}
assert r1 == 5
r2 := redis.strlen('test44') or {
assert false
return
}
assert r2 == 0
}
fn test_lpop() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.lpush('test45', '123') or {
assert false
return
}
r1 := redis.lpop('test45') or {
assert false
return
}
assert r1 == '123'
assert helper_lpop_key_not_found(mut redis, 'test46') == true
}
fn test_rpop() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.lpush('test47', '123') or {
assert false
return
}
r1 := redis.rpop('test47') or {
assert false
return
}
assert r1 == '123'
assert helper_rpop_key_not_found(mut redis, 'test48') == true
}
fn test_brpop() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.lpush('test47', '123')!
redis.lpush('test48', 'balbal')!
r1 := redis.brpop(['test47', 'test48'], 1)!
assert r1[0] == 'test47'
assert r1[1] == '123'
r2 := redis.brpop(['test47', 'test48'], 1)!
assert r2[0] == 'test48'
assert r2[1] == 'balbal'
r3 := redis.brpop(['test47'], 1) or { return }
assert false, 'brpop should timeout'
}
fn test_lrpop() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.lpush('test47', '123')!
redis.lpush('test48', 'balbal')!
r1 := redis.blpop(['test47', 'test48'], 1)!
assert r1[0] == 'test47'
assert r1[1] == '123'
r2 := redis.blpop(['test47', 'test48'], 1)!
assert r2[0] == 'test48'
assert r2[1] == 'balbal'
r3 := redis.blpop(['test47'], 1) or { return }
assert false, 'blpop should timeout'
}
fn test_llen() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.lpush('test49', '123') or {
assert false
return
}
r2 := redis.llen('test49') or {
assert false
return
}
assert r2 == r1
r3 := redis.llen('test50') or {
assert false
return
}
assert r3 == 0
redis.set('test51', 'not a list')!
redis.llen('test51') or {
assert true
return
}
assert false
}
fn test_ttl() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.setex('test52', 15, '123')!
r1 := redis.ttl('test52') or {
assert false
return
}
assert r1 == 15
redis.set('test53', '123')!
r2 := redis.ttl('test53') or {
assert false
return
}
assert r2 == -1
r3 := redis.ttl('test54') or {
assert false
return
}
assert r3 == -2
}
fn test_pttl() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.psetex('test55', 1500, '123')!
r1 := redis.pttl('test55') or {
assert false
return
}
assert r1 >= 1490 && r1 <= 1500
redis.set('test56', '123')!
r2 := redis.pttl('test56') or {
assert false
return
}
assert r2 == -1
r3 := redis.pttl('test57') or {
assert false
return
}
assert r3 == -2
}
fn test_exists() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
r1 := redis.exists('test58') or {
assert false
return
}
assert r1 == false
redis.set('test59', '123')!
r2 := redis.exists('test59') or {
assert false
return
}
assert r2 == true
}
fn test_type_of() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
_ := redis.type_of('test60') or {
assert true
return
}
redis.set('test61', '123')!
mut r := redis.type_of('test61') or {
assert false
return
}
assert r == 'string'
_ := redis.lpush('test62', '123')!
r = redis.type_of('test62') or {
assert false
return
}
assert r == 'list'
}
fn test_del() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test63', '123')!
c := redis.del('test63') or {
assert false
return
}
assert c == 1
assert helper_get_key_not_found(mut redis, 'test63') == true
}
fn test_rename() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.rename('test64', 'test65') or { console.print_debug('key not found') }
redis.set('test64', 'will be 65')!
redis.rename('test64', 'test65')!
r := redis.get('test65') or {
assert false
return
}
assert r == 'will be 65'
}
fn test_renamenx() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
assert helper_renamenx_err_helper(mut redis, 'test66', 'test67') == 'no such key'
redis.set('test68', '123')!
redis.set('test66', 'will be 67')!
r1 := redis.renamenx('test66', 'test67') or {
assert false
return
}
assert r1 == 1
r2 := redis.get('test67') or {
assert false
return
}
assert r2 == 'will be 67'
r3 := redis.renamenx('test67', 'test68') or {
assert false
return
}
assert r3 == 0
}
fn test_flushall() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test69', '123')!
redis.flushall()!
assert helper_get_key_not_found(mut redis, 'test69') == true
}
fn test_keys() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
redis.set('test70:1', '1')!
redis.set('test70:2', '2')!
r1 := redis.keys('test70:*') or {
assert false
return
}
assert r1.len == 2
}
fn helper_get_key_not_found(mut redis redisclient.Redis, key string) bool {
return redis.get(key) or {
if err.msg() == 'key not found' || err.msg() == '' {
return true
} else {
return false
}
} == ''
}
fn helper_randomkey_database_empty(mut redis redisclient.Redis) bool {
return redis.randomkey() or {
if err.msg() == 'database is empty' || err.msg() == '' {
return true
} else {
return false
}
} == ''
}
fn helper_renamenx_err_helper(mut redis redisclient.Redis, key string, newkey string) string {
redis.renamenx(key, newkey) or { return 'no such key' }
return ''
}
fn helper_lpop_key_not_found(mut redis redisclient.Redis, key string) bool {
return redis.lpop(key) or {
if err.msg() == 'key not found' || err.msg() == '' {
return true
} else {
return false
}
} == ''
}
fn helper_rpop_key_not_found(mut redis redisclient.Redis, key string) bool {
return redis.rpop(key) or {
if err.msg() == 'key not found' || err.msg() == '' {
return true
} else {
return false
}
} == ''
}

View File

@@ -0,0 +1,33 @@
import freeflowuniverse.herolib.clients.redisclient
import freeflowuniverse.herolib.ui.console
fn setup() !&redisclient.Redis {
mut redis := redisclient.core_get()!
// Select db 10 to be away from default one '0'
redis.selectdb(10) or { panic(err) }
return &redis
}
fn cleanup(mut redis redisclient.Redis) ! {
redis.flushall()!
// redis.disconnect()
}
fn process_test(cmd string, data string) !string {
return '${cmd}+++++${data}\n\n\n\n'
}
fn test_rpc() {
mut redis := setup()!
defer {
cleanup(mut redis) or { panic(err) }
}
mut r := redis.rpc_get('testrpc')
r.call(cmd: 'test.cmd', data: 'this is my data, normally json', wait: false)!
returnqueue := r.process(10000, process_test)!
mut res := r.result(10000, returnqueue)!
console.print_debug(res)
assert res.str().trim_space() == 'test.cmd+++++this is my data, normally json'
}

View File

@@ -0,0 +1,53 @@
# SendGrid Client
The SendGrid module allows you to use SendGrid services.
## About SendGrid
SendGrid is a cloud-based email delivery and communication platform that empowers businesses and developers to send transactional and marketing emails to their customers or users. It offers tools and APIs to manage email campaigns, monitor delivery, and gather analytics on recipient engagement.
## Requirements
To utilize this module, you will need:
- A SendGrid API key: Create a SendGrid account and acquire your API key [here](https://sendgrid.com/).
## Usage
To send an email using the SendGrid module, follow these steps:
### 1. Set Up a new email
In your V code, set up the email as shown below:
```v
email := sendgrid.new_email(
['target_email@example.com', 'target_email2@example.com'],
'source_email@example.com',
'Email Title', 'Email content; can include HTML')
```
### 2. Execute the program
You can execute the program using the following command:
```shell
v run sendgrid/example/main.v -t "YOUR_API_TOKEN"
```
You can provide the API key using the -t command-line argument, or you can export the API key using the following command:
```shell
export SENDGRID_AUTH_TOKEN="YOUR_API_TOKEN"
```
Additionally, you can enable debug mode by passing the -d flag:
```shell
v run sendgrid/example/main.v -d -t "YOUR_API_TOKEN"
```
## Advanced
We provide some useful structs and methods in [email](./email) and [personalization](./personalizations.v) that you can leverage to tailor the emails according to your specific requirements.
You can check the SendGrid API reference [here](https://docs.sendgrid.com/api-reference/how-to-use-the-sendgrid-v3-api/)

View File

@@ -0,0 +1,41 @@
module sendgrid
import net.http
import json
pub struct Client {
pub:
token string
}
const send_api_endpoint = 'https://api.sendgrid.com/v3/mail/send'
pub fn new_client(token string) !Client {
if token.len == 0 {
return error('empty token')
}
return Client{
token: token
}
}
fn (c Client) get_headers() !http.Header {
headers_map := {
'Authorization': 'Bearer ${c.token}'
'Content-Type': 'application/json'
}
headers := http.new_custom_header_from_map(headers_map)!
return headers
}
pub fn (c Client) send(email Email) ! {
mut request := http.new_request(http.Method.post, send_api_endpoint, json.encode(email))
request.header = c.get_headers()!
res := request.do()!
if res.status_code != int(http.Status.accepted) {
return error(res.body)
}
}

View File

@@ -0,0 +1,152 @@
module sendgrid
pub struct Content {
type_ string = 'text/html' @[json: 'type']
value string
}
struct Recipient {
email string @[required]
name ?string
}
struct Attachment {
content string @[required]
type_ ?string @[json: 'type']
filename string @[required]
disposition ?string
content_id ?string
}
struct UnsubscribeGroups {
group_id i64 @[required]
group_to_display []i64
}
struct BypassListManagement {
enable ?bool
}
struct BypassBounceManagement {
enable ?bool
}
struct BypassUnsubscribeManagement {
enable ?bool
}
struct Footer {
enable ?bool
text ?string
html ?string
}
struct SandboxMode {
enable ?bool
}
struct MailSettings {
bypass_list_management ?BypassListManagement
bypass_bounce_management ?BypassBounceManagement
bypass_unsubscribe_management ?BypassUnsubscribeManagement
footer ?Footer
sandbox_mode ?SandboxMode
}
struct ClickTrackingSettings {
enable ?bool
enable_text ?bool
}
struct OpenTrackingSettings {
enable ?bool
substitution_tag ?string
}
struct SubscriptionTrackingSettings {
enable ?bool
text ?string
html ?string
substitution_tag ?string
}
struct GoogleAnalyticsSettings {
enable ?bool
utm_source ?string
utm_medium ?string
utm_term ?string
utm_content ?string
utm_campaign ?string
}
struct TrackingSettings {
click_tracking ?ClickTrackingSettings
open_tracking ?OpenTrackingSettings
subscription_tracking ?SubscriptionTrackingSettings
ganalytics ?GoogleAnalyticsSettings
}
pub struct Email {
pub mut:
personalizations []Personalizations @[required]
from Recipient @[required]
subject string @[required]
content []Content @[required]
reply_to ?Recipient
reply_to_list ?[]Recipient
attachments ?[]Attachment
template_id ?string
headers ?map[string]string
categories ?[]string
custom_args ?string
send_at ?i64
batch_id ?string
asm_ ?UnsubscribeGroups @[json: 'asm']
ip_pool_name ?string
mail_settings ?MailSettings
tracking_settings ?TrackingSettings
}
pub fn (mut e Email) add_personalization(personalizations []Personalizations) {
e.personalizations << personalizations
}
pub fn (mut e Email) add_content(content []Content) {
e.content << content
}
pub fn (mut e Email) add_headers(headers map[string]string) {
e.headers or {
e.headers = headers.clone()
return
}
for k, v in headers {
e.headers[k] = v
}
}
pub fn new_email(to []string, from string, subject string, content string) Email {
mut recipients := []Recipient{}
for email in to {
recipients << Recipient{
email: email
}
}
personalization := Personalizations{
to: recipients
}
return Email{
personalizations: [personalization]
from: Recipient{
email: from
}
subject: subject
content: [Content{
value: content
}]
}
}

View File

@@ -0,0 +1,102 @@
module sendgrid
@[params]
pub struct Personalizations {
pub mut:
to []Recipient @[required]
from ?Recipient
cc ?[]Recipient
bcc ?[]Recipient
subject ?string
headers ?map[string]string
substitutions ?map[string]string
dynamic_template_data ?map[string]string
custom_args ?map[string]string
send_at ?i64
}
// add_to adds a list of recipients to which this email should be sent.
fn (mut p Personalizations) add_to(r []Recipient) {
p.to << r
}
// set_from assigns the from field in the email.
fn (mut p Personalizations) set_from(r Recipient) {
p.from = r
}
// add_cc adds an array of recipients who will receive a copy of your email.
fn (mut p Personalizations) add_cc(r []Recipient) {
p.cc or {
p.cc = r
return
}
for item in r {
p.cc << item
}
}
// set_subject assigns the subject of the email.
fn (mut p Personalizations) set_subject(s string) {
p.subject = s
}
// add_headers adds a playbook of key/value pairs to specify handling instructions for your email.
// if some of the new headers already existed, their values are overwritten.
fn (mut p Personalizations) add_headers(new_headers map[string]string) {
p.headers or {
p.headers = new_headers.clone()
return
}
for k, v in new_headers {
p.headers[k] = v
}
}
// add_substitution adds a playbook of key/value pairs to allow you to insert data without using Dynamic Transactional Templates.
// if some of the keys already existed, their values are overwritten.
fn (mut p Personalizations) add_substitution(new_subs map[string]string) {
p.substitutions or {
p.substitutions = new_subs.clone()
return
}
for k, v in new_subs {
p.substitutions[k] = v
}
}
// add_dynamic_template_data adds a playbook of key/value pairs to dynamic template data.
// Dynamic template data is available using Handlebars syntax in Dynamic Transactional Templates.
// if some of the keys already existed, their values are overwritten.
fn (mut p Personalizations) add_dynamic_template_data(new_dynamic_template_data map[string]string) {
p.dynamic_template_data or {
p.dynamic_template_data = new_dynamic_template_data.clone()
return
}
for k, v in new_dynamic_template_data {
p.dynamic_template_data[k] = v
}
}
// add_custom_args adds a playbook of key/value pairs to custom_args.
// custom args are values that are specific to this personalization that will be carried along with the email and its activity data.
// if some of the keys already existed, their values are overwritten.
fn (mut p Personalizations) add_custom_args(new_custom_args map[string]string) {
p.custom_args or {
p.custom_args = new_custom_args.clone()
return
}
for k, v in new_custom_args {
p.custom_args[k] = v
}
}
// set_send_at specifies when your email should be delivered. scheduling delivery more than 72 hours in advance is forbidden.
fn (mut p Personalizations) set_send_at(send_at i64) {
p.send_at = send_at
}

26
lib/clients/zdb/readme.md Normal file
View File

@@ -0,0 +1,26 @@
## Vlang ZDB Client
to use:
- build zero db from source: https://github.com/threefoldtech/0-db
- run zero db from root of 0db folder:
`./zdbd/zdb --help || true` for more info
## to use test
```bash
#must set unix domain with --socket argument when running zdb
#run zdb as following:
mkdir -p ~/.zdb
zdb --socket ~/.zdb/socket --admin 1234
redis-cli -s ~/.zdb/socket
#or easier:
redis-cli -s ~/.zdb/socket --raw nsinfo default
```
then in the redis-cli can do e.g.
```
nsinfo default
```

229
lib/clients/zdb/zdb.v Normal file
View File

@@ -0,0 +1,229 @@
module zdb
import freeflowuniverse.herolib.clients.redisclient
import freeflowuniverse.herolib.ui.console
pub struct ZDB {
pub mut:
redis redisclient.Redis
}
// https://redis.io/topics/protocol
// examples:
// localhost:6379
// /tmp/redis-default.sock
pub fn get(addr string, auth string, namespace string) !ZDB {
console.print_header(' ZDB get: addr:${addr} namespace:${namespace}')
mut redis := redisclient.get(addr)!
mut zdb := ZDB{
redis: redis
}
if auth != '' {
zdb.redis.send_expect_ok(['AUTH', auth])!
}
if namespace != '' {
mut namespaces := zdb.redis.send_expect_list_str(['NSLIST'])!
namespaces.map(it.to_lower())
if namespace.to_lower() !in namespaces {
zdb.redis.send_expect_ok(['NSNEW', namespace])!
}
}
return zdb
}
pub fn (mut zdb ZDB) ping() !string {
return zdb.redis.send_expect_str(['PING'])!
}
// if key not specified will get incremental key
pub fn (mut zdb ZDB) set(key string, val string) !string {
return zdb.redis.send_expect_str(['SET', key, val])!
}
pub fn (mut zdb ZDB) get(key string) !string {
return zdb.redis.send_expect_str(['GET', key])!
}
pub fn (mut zdb ZDB) mget(key string) !string {
return zdb.redis.send_expect_str(['GET', key])!
}
pub fn (mut zdb ZDB) del(key string) !string {
return zdb.redis.send_expect_str(['DEL', key])!
}
// used only for debugging, to check memory leaks
pub fn (mut zdb ZDB) stop() !string {
return zdb.redis.send_expect_str(['STOP'])!
}
pub fn (mut zdb ZDB) exists(key string) !string {
return zdb.redis.send_expect_str(['EXISTS', key])!
}
pub fn (mut zdb ZDB) check(key string) !string {
return zdb.redis.send_expect_str(['CHECK', key])!
}
pub fn (mut zdb ZDB) keycur(key string) !string {
return zdb.redis.send_expect_str(['KEYCUR', key])!
}
pub fn (mut zdb ZDB) info() !string {
i := zdb.redis.send_expect_str(['INFO'])!
return i
}
pub fn (mut zdb ZDB) nsnew(namespace string) !string {
i := zdb.redis.send_expect_str(['NSNEW', namespace])!
return i
}
pub fn (mut zdb ZDB) nsdel(namespace string) !string {
i := zdb.redis.send_expect_str(['NSDEL', namespace])!
return i
}
pub fn (mut zdb ZDB) nsinfo(namespace string) !map[string]string {
i := zdb.redis.send_expect_str(['NSINFO', namespace])!
mut res := map[string]string{}
for line in i.split_into_lines() {
if line.starts_with('#') {
continue
}
if !(line.contains(':')) {
continue
}
splitted := line.split(':')
key := splitted[0]
val := splitted[1]
res[key.trim_space()] = val.trim_space()
}
return res
}
pub fn (mut zdb ZDB) nslist() ![]string {
i := zdb.redis.send_expect_list_str(['NSLIST'])!
return i
}
pub fn (mut zdb ZDB) nssset(ns string, prop string, val string) !string {
i := zdb.redis.send_expect_str(['NSSET', ns, prop, val])!
return i
}
struct SelectArgs {
namespace string
password string
}
pub fn (mut zdb ZDB) select_ns(args SelectArgs) !string {
mut redis_args := ['SELECT', args.namespace]
if args.password != '' {
redis_args << 'SECURE'
redis_args << args.password
}
i := zdb.redis.send_expect_str(redis_args)!
return i
}
pub fn (mut zdb ZDB) dbsize() !string {
i := zdb.redis.send_expect_str(['DBSIZE'])!
return i
}
pub fn (mut zdb ZDB) time() !string {
i := zdb.redis.send_expect_str(['TIME'])!
return i
}
pub fn (mut zdb ZDB) auth(password string) !string {
i := zdb.redis.send_expect_str(['AUTH', password])!
return i
}
pub fn (mut zdb ZDB) auth_secure() !string {
i := zdb.redis.send_expect_str(['AUTH', 'SECURE'])!
return i
}
pub struct ScanArgs {
cursor string
}
pub fn (mut zdb ZDB) scan(args ScanArgs) !string {
mut redis_args := ['SCAN']
if args.cursor != '' {
redis_args << args.cursor
}
i := zdb.redis.send_expect_str(redis_args)!
return i
}
// this is just an alias for SCAN
pub fn (mut zdb ZDB) scanx(args ScanArgs) !string {
mut redis_args := ['SCANX']
if args.cursor != '' {
redis_args << args.cursor
}
i := zdb.redis.send_expect_str(redis_args)!
return i
}
pub fn (mut zdb ZDB) rscan(args ScanArgs) !string {
mut redis_args := ['RSCAN']
if args.cursor != '' {
redis_args << args.cursor
}
i := zdb.redis.send_expect_str(redis_args)!
return i
}
struct WaitArgs {
cmd string
timeout string = '5'
}
pub fn (mut zdb ZDB) wait(args WaitArgs) !string {
i := zdb.redis.send_expect_str(['WAIT', args.cmd, args.timeout])!
return i
}
struct HistoryArgs {
key string
bin_data string
}
pub fn (mut zdb ZDB) history(args HistoryArgs) ![]string {
mut redis_args := ['HISTORY', args.key]
if args.bin_data != '' {
redis_args << args.bin_data
}
i := zdb.redis.send_expect_list_str(redis_args)!
return i
}
pub fn (mut zdb ZDB) flush() !string {
i := zdb.redis.send_expect_str(['FLUSH'])!
return i
}
pub fn (mut zdb ZDB) hooks() ![]string {
i := zdb.redis.send_expect_list_str(['HOOKS'])!
return i
}
pub fn (mut zdb ZDB) index_dirty() ![]string {
i := zdb.redis.send_expect_list_str(['INDEX DIRTY'])!
return i
}
pub fn (mut zdb ZDB) index_dirty_reset() !string {
i := zdb.redis.send_expect_str(['INDEX DIRTY RESET'])!
return i
}

View File

@@ -0,0 +1,19 @@
module zdb
// TODO: enable this test when we have running zdb in ci also implement missing tests
fn test_get() {
// // must set unix domain with --socket argument when running zdb
// // run zdb as following:
// // mkdir -p ~/.zdb/ && zdb --socket ~/.zdb/socket --admin 1234
// mut zdb := get('~/.zdb/socket', '1234', 'test')!
// // check info returns info about zdb
// info := zdb.info()!
// assert info.contains('server_name: 0-db')
// nslist := zdb.nslist()!
// assert nslist == ['default', 'test']
// nsinfo := zdb.nsinfo('default')!
// assert 'name: default' in nsinfo
}

View File

@@ -42,7 +42,7 @@ pub fn install(args_ InstallArgs) ! {
if osal.is_linux() {
osal.package_install('redis-server')!
} else {
osal.package_install('redis')!/Users/despiegk1/code/github/freeflowuniverse/crystallib/crystallib/installers/db/redis/template
osal.package_install('redis')!/Users/despiegk1/code/github/freeflowuniverse/herolib/herolib/installers/db/redis/template
}
}
osal.execute_silent('mkdir -p ${args.datadir}')!

136
lib/core/pathlib/factory.v Normal file
View File

@@ -0,0 +1,136 @@
module pathlib
import os
// gets Path object, will check if it exists, is dir_file, ...
pub fn get(path_ string) Path {
mut p2 := get_no_check(path_)
p2.check()
return p2
}
pub fn get_no_check(path_ string) Path {
mut path := path_
if path.contains('~') {
path = path.replace('~', os.home_dir())
}
if path.contains('file://') {
path = path.trim_string_left('file://')
}
mut p2 := Path{
path: path
}
if p2.path.contains('..') {
p2.path = p2.absolute()
}
return p2
}
@[params]
pub struct GetArgs {
pub mut:
path string
create bool
check bool = true // means will check the dir, link or file exists
empty bool // will empty the dir or the file
delete bool
}
// get a directory, or needs to be created
// if the dir doesn't exist and is not created, then there will be an error
pub fn get_dir(args_ GetArgs) !Path {
mut args := args_
if args.empty {
args.create = true
}
if args.create {
args.check = true
}
mut p2 := get_no_check(args.path)
if args.check {
p2.check()
p2.absolute()
if p2.exist == .no {
if args.create {
os.mkdir_all(p2.absolute()) or { return error('cannot create path ${p2}, ${err}') } // Make sure that all the needed paths created
p2.check()
}
return p2
}
if !p2.is_dir() {
return error('Path ${args.path} is not a dir.')
}
if args.empty {
p2.empty()!
}
if args.delete {
p2.delete()!
}
}
return p2
}
pub fn get_file(args_ GetArgs) !Path {
mut args := args_
if args.empty {
args.create = true
}
if args.create {
args.check = true
}
mut p2 := get_no_check(args.path)
if args.check {
p2.check()
if args.create {
mut parent_ := p2.parent()!
parent_.check()
if parent_.exist == .no {
os.mkdir_all(parent_.path) or { return error('cannot create path:${args.path}') }
}
if p2.exist == .no || args.empty {
os.write_file(args.path, '') or {
return error('cannot create empty file:${args.path} ${err}')
}
p2.check()
}
}
if p2.exists() && !p2.is_file() {
return error('Path ${args.path} is not a file.')
}
if args.delete {
p2.delete()!
}
}
return p2
}
pub fn get_link(args_ GetArgs) !Path {
mut args := args_
if args.create {
return error("can't create link out of nothing")
}
mut p2 := get_no_check(args.path)
if args.check {
p2.check()
if !p2.exists() {
p2.cat = Category.linkfile
return p2
}
if !p2.is_link() {
return error('Path ${args.path} is not a link.')
}
if args.delete {
p2.delete()!
}
if args.empty {
mut p3 := p2.getlink()!
p3.empty()!
}
}
return p2
}
// gets working directory
pub fn get_wd() Path {
return get_dir(path: os.getwd()) or { panic('This should never happen') }
}

133
lib/core/pathlib/path.v Normal file
View File

@@ -0,0 +1,133 @@
module pathlib
import freeflowuniverse.herolib.core.texttools
import os
@[heap]
pub struct Path {
pub mut:
path string
cat Category
exist UYN
}
pub enum Category {
unknown
file
dir
linkdir
linkfile
}
pub enum UYN {
unknown
yes
no
}
// return absolute path .
// careful symlinks will not be resolved
pub fn (path Path) absolute() string {
mut p := path.path.replace('~', os.home_dir())
return os.abs_path(p)
}
// return absolute path .
// careful the symlinks will be followed !!!
pub fn (path Path) realpath() string {
mut p := path.path.replace('~', os.home_dir())
mut p2 := os.real_path(p)
p2 = os.abs_path(p2)
return p2
}
pub fn (path Path) shortpath() string {
return path.realpath().replace(os.home_dir(), '~')
}
// check the inside of pathobject, is like an init function
pub fn (mut path Path) check() {
if os.exists(path.path) {
path.exist = .yes
if os.is_file(path.path) {
if os.is_link(path.path) {
path.cat = Category.linkfile
} else {
path.cat = Category.file
}
} else if os.is_dir(path.path) {
if os.is_link(path.path) {
path.cat = Category.linkdir
} else {
path.cat = Category.dir
}
} else {
panic('cannot define type: ${path.path}, is bug')
}
} else {
path.exist = .no
}
}
fn (mut path Path) check_exists() ! {
if !path.exists() {
return error('Path ${path} needs to exist, error')
}
}
// returns name with extension
pub fn (path Path) name() string {
return os.base(path.path)
}
// return name with all lowercase_special chars done and also no extension
pub fn (mut path Path) name_fix_no_underscore_no_ext() string {
return texttools.name_fix_no_underscore_no_ext(path.name_no_ext())
}
// return name with all lowercase_special chars done but keep extension
pub fn (mut path Path) name_fix_keepext() string {
return texttools.name_fix_keepext(path.name())
}
pub fn (mut path Path) name_fix_no_ext() string {
return texttools.name_fix_no_ext(path.name())
}
// full path of dir
pub fn (mut path Path) path_dir() string {
return os.dir(path.path)
}
// QUESTION: should this mutate path's name, probably not?
pub fn (mut path Path) name_no_ext() string {
mut name := path.name()
if name.contains('.') {
name = name.all_before_last('.')
}
if name == '' {
return path.name()
}
return name
}
pub fn (mut path Path) path_no_ext() string {
return path.path_dir() + '/' + path.name_no_ext()
}
pub fn (mut path Path) name_ends_with_underscore() bool {
return path.name_no_ext().ends_with('_')
}
// return a path which has name ending with _
pub fn (mut path Path) path_get_name_with_underscore() string {
if path.name_ends_with_underscore() {
return path.path
} else {
return path.path.all_before_last('.') + '_.' + path.extension()
}
}
// pub fn (mut p Path) str() string {
// return 'path: $p.path'
// }

View File

@@ -0,0 +1,169 @@
module pathlib
import os
import freeflowuniverse.herolib.ui.console
// import time
@[params]
pub struct BackupArgs {
pub mut:
root string
dest string
overwrite bool
restore bool // if we want to find the latest one, if we can't find one then its error
}
// start from existing name and look for name.$nr.$ext, nr need to be unique, ideal for backups
// if dest "" then will use the directory of the fileitself + "/.backup"
// e.g. /code/myaccount/despiegk/somedir/test.v if
// would be backed up to /code/myaccount/despiegk/somedir/.backup/test.1.v
// root is the start of the dir we process
// e.g. /code/myaccount/despiegk/somedir/test.v if
// if source = /code/myaccount/despiegk and dest = /backup then the file will be backed up to /backup/somedir/test.1.v
//
// struct BackupArgs{
// root string
// dest string
// overwrite bool
// restore bool //if we want to find the latest one, if we can't find one then its error
// }
// if overwrite this means will overwrite the last one in the directory
pub fn (mut path Path) backup_path(args BackupArgs) !Path {
if !path.exists() && args.restore == false {
error('cannot find path, so cannot create backup for ${path}')
}
mut dest := ''
mut rel := ''
if args.dest == '' {
dest = path.path_dir() + '/.backup'
}
if !os.exists(dest) {
os.mkdir_all(dest)!
}
if args.dest != '' || args.root != '' {
panic('not implemented')
}
// if source != '' {
// path_abs := path.absolute()
// mut source_path := Path{
// path: source
// }.absolute()
// if path_abs.starts_with(source_path) {
// rel = os.dir(path_abs.substr(source_path.len + 1, path_abs.len)) + '/'
// }
// }
// os.mkdir_all('$dest/$rel')!
for i in 0 .. 1000 {
console.print_debug(i.str())
path_str := '${dest}/${rel}${path.name_no_ext()}.${path.extension()}.${i}'
path_str_next := '${dest}/${rel}${path.name_no_ext()}.${path.extension()}.${i + 1}'
mut path_found := Path{
path: path_str
cat: .file
}
mut path_found_next := Path{
path: path_str_next
cat: .file
}
if !path_found.exists() {
if args.restore {
return error('could not find a backup file in ${path_found.path} for restore')
}
path_found.exists()
return path_found
}
size := path_found.size()!
if size > 0 {
// console.print_debug("size > 0 ")
// this makes sure we only continue if there is no next file, we only need to check size for latest one
if !path_found_next.exists() {
// means is the last file
// console.print_debug("current: ${path_found}")
// console.print_debug("next: ${path_found_next}")
// console.print_debug(args)
if args.restore || args.overwrite {
// console.print_debug("RESTORE: $path_found")
return path_found
}
size2 := path_found.size()!
if size2 == size {
// means we found the last one which is same as the one we are trying to backup
// console.print_debug("*** SIZE EQUAL EXISTS")
path_found.exist = .yes
return path_found
}
// console.print_debug("nothing")
}
}
}
return error('cannot find path for backup')
}
// create a backup, will maintain the extension
pub fn (mut path Path) backup(args BackupArgs) !Path {
// console.print_debug(path.path)
mut pbackup := path.backup_path(args)!
if !pbackup.exists() {
os.cp(path.path, pbackup.path)!
}
return pbackup
}
pub fn (mut path Path) restore(args BackupArgs) ! {
// console.print_debug("restore")
// console.print_debug(path.path)
mut args2 := args
args2.restore = true
mut prestore := path.backup_path(args2)!
if args.overwrite || path.exists() {
os.cp(prestore.path, path.path)!
} else {
return error('Cannot restore, because to be restored file exists: ${path.path}\n${args}')
}
}
pub fn (mut path Path) backups_remove(args BackupArgs) ! {
mut pl := path.list(recursive: true)!
for mut p in pl.paths {
if p.is_dir() {
if p.name() == '.backup' {
p.delete()!
}
}
}
// TODO: is not good enough, can be other path
}
// //represents one directory in which backup was done
// struct BackupDir{
// pub mut:
// items []BackupItem
// path Path //path where the backed up items are in
// }
// pub struct BackupItem{
// pub:
// name string //only the base name of the file
// hash string
// time time.Time
// backupdir &BackupDir
// }
// get the pathobject
// pub fn (bi BackupItem) path_get() Path {
// return get("${bi.backupdir.path.path}/${bi.name}")
// }
// //save the metadata for the backups
// pub fn (mut backupdir BackupDir) metadate_save() ! {
// mut out :=[]string{}
// // for item in backupdir.items{
// // out << item.metadata()
// // }
// }

View File

@@ -0,0 +1,64 @@
module pathlib
import os
@[params]
pub struct CopyArgs {
pub mut:
dest string // path
delete bool // if true will remove files which are on dest which are not on source
rsync bool = true // we use rsync as default
ssh_target string // e.g. root@195.192.213.2:999
ignore []string // arguments to ignore e.g. ['*.pyc','*.bak']
ignore_default bool = true // if set will ignore a common set
}
// copy file,dir is always recursive
// if ssh_target used then will copy over ssh e.g. .
// dest needs to be a directory or file .
// return Path of the destination file or dir .
pub fn (mut path Path) copy(args_ CopyArgs) ! {
mut args := args_
if args.ignore.len > 0 || args.ssh_target.len > 0 {
args.rsync = true
}
path.check()
if !path.exists() {
return error("can't find path for copy operation on ${path.path}")
}
if args.rsync == true {
rsync(
source: path.path
dest: args.dest
delete: args.delete
ipaddr_dst: args.ssh_target
ignore: args.ignore
ignore_default: args.ignore_default
)!
} else {
mut dest := get(args.dest)
if dest.exists() {
if !(path.cat in [.file, .dir] && dest.cat in [.file, .dir]) {
return error('Source or Destination path is not file or directory.\n\n${path.path} cat:${path.cat}---${dest.path} cat:${dest.cat}')
}
if path.cat == .dir && dest.cat == .file {
return error("Can't copy directory to file")
}
}
if path.cat == .file && dest.cat == .dir {
// In case src is a file and dest is dir, we need to join the file name to the dest file
file_name := os.base(path.path)
dest.path = os.join_path(dest.path, file_name)
}
if !os.exists(dest.path_dir()) {
os.mkdir_all(dest.path_dir())!
}
// $if debug {
// console.print_debug(' copy: ${path.path} ${dest.path}')
// }
os.cp_all(path.path, dest.path, true)! // Always overwite if needed
dest.check()
}
}

View File

@@ -0,0 +1,9 @@
module pathlib
import crypto.sha256
// return sha256 hash of a file
pub fn (mut path Path) sha256() !string {
c := path.read()!
return sha256.hexhash(c)
}

View File

@@ -0,0 +1,95 @@
module pathlib
// join parts to a path and return path, returns a new path, create if needed
pub fn (mut p Path) extend_dir_create(parts ...string) !Path {
mut out := p.path
if !p.is_dir() {
return error('Cannot only extend a dir.')
}
if p.exists() == false {
return error("Cannot extend a dir if it doesn't exist")
}
for part in parts {
if part.contains('~') {
return error('cannot extend part ${part} if ~ in')
}
part2 := part.trim(' ')
out += '/' + part2.trim('/')
}
out = out.replace('//', '/')
mut p2 := get_dir(path: out, create: true)!
return p2
}
// only works for a dir
pub fn (mut p Path) extend_file(name string) !Path {
if !p.is_dir() {
return error('Cannot only extend a dir.')
}
if p.exists() == false {
return error("Cannot extend a dir if it doesn't exist")
}
mut out := p.path
if name.contains('~') {
return error('cannot extend dir if ~ in name: ${name}')
}
out += '/' + name.trim('/')
out = out.replace('//', '/')
mut p2 := get_file(path: out)!
return p2
}
// extend the path, path stays same, no return
// if dir, needs to stay dir
// anything else fails
pub fn (mut path Path) extend(parts ...string) ! {
if !path.is_dir() {
return error('can only extend dir, ${path}')
}
for part in parts {
if part.contains('~') {
return error('cannot extend part to ${part} if ~ in')
}
part2 := part.trim(' ')
path.path += '/' + part2
}
if path.exists() {
if !path.is_dir() {
return error('can only extend dir if is dir again.')
}
}
path.path = path.path.replace('//', '/')
path.check()
}
// pub fn (path Path) extend_dir(relpath string) ! {
// relpath2 = relpath2.replace("\\","/")
// if path.cat != Category.dir{
// return error("cannot only extend a dir, not a file or a link. $path")
// }
// return dir_new("$path/relpath2")
// }
// pub fn (path Path) extend_file_exists(relpath string) !Path {
// mut relpath2 := relpath.trim(" ")
// relpath2 = relpath2.replace("\\","/")
// if path.cat != Category.dir{
// return error("cannot only extend a dir, not a file or a link. $path")
// }
// return file_new_exists("$path/relpath2")
// }
// pub fn (path Path) extend_exists(relpath string) !Path {
// p2 := path.extend(relpath)!
// if ! p2.exists(){
// return error("cannot extend $path with $relpath, directory does not exist")
// }
// return p2
// }

View File

@@ -0,0 +1,59 @@
module pathlib
const image_exts = ['jpg', 'jpeg', 'png', 'gif', 'svg']
const image_exts_basic = ['jpg', 'jpeg', 'png']
pub fn (mut path Path) is_dir() bool {
if path.cat == Category.unknown {
// panic('did not check path yet, category unknown')
path.check()
}
return path.cat == Category.dir || path.cat == Category.linkdir
}
// check is dir and a link
pub fn (mut path Path) is_dir_link() bool {
if path.cat == .unknown {
// panic('did not check path yet')
path.check()
}
return path.cat == Category.linkdir
}
// is a file but no link
pub fn (mut path Path) is_file() bool {
if path.cat == .unknown {
// panic('did not check path yet')
path.check()
}
return path.cat == Category.file
}
pub fn is_image(path string) bool {
if path.contains('.') {
ext := path.all_after_last('.').to_lower()
return image_exts.contains(ext)
}
return false
}
pub fn (path Path) is_image() bool {
e := path.extension().to_lower()
// console.print_debug("is image: $e")
return image_exts.contains(e)
}
pub fn (path Path) is_image_jpg_png() bool {
e := path.extension().to_lower()
// console.print_debug("is image: $e")
return image_exts_basic.contains(e)
}
pub fn (path Path) is_link() bool {
if path.cat == .unknown {
// console.print_debug(path)
panic('did not check path yet.')
}
return path.cat == Category.linkfile || path.cat == Category.linkdir
}

View File

@@ -0,0 +1,117 @@
module pathlib
import os
// import freeflowuniverse.herolib.ui.console
// path needs to be existing
// linkpath is where the link will be (the symlink who points to path)
pub fn (mut path Path) link(linkpath string, delete_exists bool) !Path {
if !path.exists() {
return error('cannot link because source ${path.path} does not exist')
}
if !(path.cat == .file || path.cat == .dir) {
return error('cannot link because source ${path.path} can only be dir or file')
}
if path_equal(path.path, linkpath) {
return error('try to link to myself. Link dest & source same. ${linkpath}')
}
// TODO: add test to confirm existing faulty link also are removed
// os.exists for faulty links returns false so also checks if path is link
if os.exists(linkpath) || os.is_link(linkpath) {
if delete_exists {
mut linkpath_obj := get(linkpath)
linkpath_obj.delete()!
} else {
return error('cannot link ${path.path} to ${linkpath}, because dest exists.')
}
}
mut origin_path := ''
dest_dir := os.dir(linkpath)
if !os.exists(dest_dir) {
os.mkdir_all(dest_dir)!
}
if path.cat == .dir {
origin_path = path_relative(dest_dir, path.path)!
} else {
origin_path = path_relative(dest_dir, path.path)!
}
// console.print_debug("${dest_dir} ::: ${origin_path} ::: ${linkpath}")
msg := 'link to origin (source): ${path.path} \nthe link:${linkpath} \nlink rel: ${origin_path}'
// TODO: figure out why os.symlink doesn't work for linking file into dir
os.symlink(origin_path, linkpath) or { return error('cant symlink ${msg}\n${err}') }
return get(linkpath)
}
// will make sure that the link goes from file with largest path to smallest
// good to make sure we have links always done in same way
pub fn (mut path Path) relink() ! {
if !path.is_link() {
return
}
link_abs_path := path.absolute() // symlink not followed
link_real_path := path.realpath() // this is with the symlink resolved
if compare_strings(link_abs_path, link_real_path) >= 0 {
// means the shortest path is the target (or if same size its sorted and the first)
return
}
// need to switch link with the real content
path.unlink()! // make sure both are files now (the link is the file)
path.link(link_real_path, true)! // re-link
path.check()
// TODO: in test script
}
// resolve link to the real content
// copy the target of the link to the link
pub fn (mut path Path) unlink() ! {
if !path.is_link() {
// nothing to do because not link, will not giver error
return
}
if path.is_dir() {
return error('Cannot unlink a directory: ${path.path}')
}
link_abs_path := path.absolute()
link_real_path := path.realpath() // this is with the symlink resolved
mut link_path := get(link_real_path)
// $if debug {
// console.print_header(' copy source file:'$link_real_path' of link to link loc:'$link_abs_path'")
// }
mut destpath := get(link_abs_path + '.temp') // lets first copy to the .temp location
link_path.copy(dest: destpath.path)! // copy to the temp location
path.delete()! // remove the file or dir which is link
destpath.rename(path.name())! // rename to the new path
path.path = destpath.path // put path back
path.check()
// TODO: in test script
}
// return string
pub fn (mut path Path) readlink() !string {
// console.print_stdout('path: $path')
if path.is_link() {
// console.print_stdout('path2: $path')
cmd := 'readlink ${path.path}'
res := os.execute(cmd)
if res.exit_code > 0 {
return error('cannot define result for link of ${path} \n${error}')
}
return res.output.trim_space()
} else {
return error('can only read link info when the path is a filelink or dirlink. ${path}')
}
}
// return path object which is the result of the link (path link points too)
pub fn (mut path Path) getlink() !Path {
if path.is_link() {
return get(path.realpath())
} else {
return error('can only get link when the path is a filelink or dirlink. ${path}')
}
}

View File

@@ -0,0 +1,147 @@
import freeflowuniverse.herolib.core.pathlib { Path }
import freeflowuniverse.herolib.ui.console
import os
const testpath = os.dir(@FILE) + '/examples/test_path'
fn testsuite_begin() {
console.print_debug('create files for link test')
os.rmdir_all(os.dir(@FILE) + '/examples') or {}
assert !os.is_dir(testpath)
os.mkdir_all(testpath) or { panic(err) }
os.mkdir_all('${testpath}/test_parent') or { panic(err) }
os.create('${testpath}/testfile1.md') or { panic(err) }
os.create('${testpath}/test_parent/testfile2.md') or { panic(err) }
os.create('${testpath}/test_parent/testfile3.md') or { panic(err) }
}
fn testsuite_end() {
os.rmdir_all(os.dir(@FILE) + '/examples') or {}
}
fn test_link() {
testsuite_begin()
console.print_stdout('************ TEST_link ************')
mut source1 := pathlib.get('${testpath}/test_parent/testfile2.md')
mut source2 := pathlib.get('${testpath}/test_parent/testfile3.md')
mut source3 := pathlib.get('${testpath}/testfile1.md')
assert source1.exists()
assert source2.exists()
assert source3.exists()
// link to a parent
mut link11 := source3.link('${testpath}/test_parent/uplink', true) or {
panic('no uplink: ${err}')
}
mut link11_link := pathlib.get('${testpath}/test_parent/uplink')
path11 := link11_link.readlink() or { panic(err) }
assert path11 == '../testfile1.md'
// test delete exists with nonexistent dest
mut dest := pathlib.get('${testpath}/test_link.md')
assert !dest.exists()
mut link1 := source1.link(dest.path, true) or { panic('no link: ${err}') }
assert link1.path == '${testpath}/test_link.md'
dest = pathlib.get('${testpath}/test_link.md')
assert dest.exists()
// test delete exists with existing dest
assert dest.realpath() == source1.path
mut link2 := source2.link(dest.path, true) or { panic('no link ${err}') }
assert link2.path == '${testpath}/test_link.md'
assert link2.realpath() != source1.path
assert link2.realpath() == source2.path
// test delete_exists false with existing dest
dest = pathlib.get('${testpath}/test_link.md')
assert dest.realpath() == source2.path
mut link3 := source1.link(dest.path, false) or { Path{} }
assert link3.path == '' // link should error so check empty path obj
dest = pathlib.get('${testpath}/test_link.md')
assert dest.realpath() == source2.path // dest reamins unchanged
dest.delete() or {}
console.print_stdout('Link function working correctly')
}
fn test_readlink() {
testsuite_begin()
console.print_stdout('************ TEST_readlink ************')
// test with none link path
mut source := pathlib.get('${testpath}/test_parent/testfile2.md')
mut dest_ := '${testpath}/test_readlink.md'
path := source.readlink() or { '' }
assert path == '' // is not a link so cannot read
// test with filelink path
mut link := source.link(dest_, true) or { panic('error: ${err}') }
mut dest := pathlib.get(dest_)
assert dest.cat == .linkfile
assert dest.path == dest_
link_source := dest.readlink() or { panic(err) }
assert link_source == 'test_parent/testfile2.md'
dest.delete() or {}
console.print_stdout('Readlink function working correctly')
}
// fn test_unlink() {
// console.print_stdout('************ TEST_unlink ************')
// // test with filelink path
// mut source := pathlib.get('${testpath}/test_parent/testfile3.md')
// mut dest_ := '${testpath}/test_unlink.md'
// mut link := source.link(dest_, true) or { panic('error: ${err}') }
// mut dest := pathlib.get(dest_)
// // TODO: check if content is from source
// assert dest.cat == .linkfile
// dest.unlink() or { panic('Failed to unlink: ${err}') }
// assert dest.exists()
// assert dest.cat == .file
// dest.delete()!
// // TODO: maybe more edge cases?
// console.print_stdout('Unlink function working correctly')
// }
fn test_relink() {
testsuite_begin()
console.print_stdout('************ TEST_relink ************')
mut source := pathlib.get('${testpath}/test_parent/testfile2.md')
mut dest_ := '${testpath}/test_relink.md'
mut link := source.link(dest_, true) or { panic('error: ${err}') }
mut dest := pathlib.get(dest_)
// linked correctly so doesn't change
assert source.cat == .file
assert dest.cat == .linkfile
dest.relink() or { panic('Failed to relink: ${err}') }
source_new := pathlib.get(source.path)
assert source_new.cat == .file
assert dest.cat == .linkfile
// switching source and destination
mut source2 := pathlib.get(dest_)
source2.unlink() or { panic('Failed to unlink: ${err}') }
mut dest2_ := source.path
// linked incorrectly so should relink
mut link2 := source2.link(dest2_, true) or { panic('error: ${err}') }
mut dest2 := pathlib.get(dest2_)
assert source2.cat == .file
assert dest2.cat == .linkfile
dest2.relink() or { panic('Failed to relink: ${err}') }
source2_new := pathlib.get(source2.path)
assert source2_new.cat == .linkfile
assert dest2.cat == .file
dest.delete()!
}

View File

@@ -0,0 +1,174 @@
module pathlib
import os
import regex
// import freeflowuniverse.herolib.core.smartid
import freeflowuniverse.herolib.ui.console
@[params]
pub struct ListArgs {
pub mut:
regex []string
recursive bool = true
ignoredefault bool = true // ignore files starting with . and _
include_links bool // wether to include links in list
dirs_only bool
files_only bool
}
// the result of pathlist
pub struct PathList {
pub mut:
// is the root under which all paths are, think about it like a changeroot environment
root string
paths []Path
}
// list all files & dirs, follow symlinks .
// will sort all items .
// return as list of Paths .
// .
// params: .
// ```
// regex []string
// recursive bool // std off, means we recursive not over dirs by default
// ignoredefault bool = true // ignore files starting with . and _
// dirs_only bool
//
// example see https://github.com/freeflowuniverse/herolib/blob/development/examples/core/pathlib/examples/list/path_list.v
//
// e.g. p.list(regex:[r'.*\.v$'])! //notice the r in front of string, this is regex for all files ending with .v
//
// ```
// please note links are ignored for walking over dirstructure (for files and dirs)
pub fn (mut path Path) list(args_ ListArgs) !PathList {
// $if debug {
// console.print_header(' list: ${args_}')
// }
mut r := []regex.RE{}
for regexstr in args_.regex {
mut re := regex.regex_opt(regexstr) or {
return error("cannot create regex for:'${regexstr}'")
}
// console.print_debug(re.get_query())
r << re
}
mut args := ListArgsInternal{
regex: r
recursive: args_.recursive
ignoredefault: args_.ignoredefault
dirs_only: args_.dirs_only
files_only: args_.files_only
include_links: args_.include_links
}
paths := path.list_internal(args)!
mut pl := PathList{
root: path.path
paths: paths
}
return pl
}
@[params]
pub struct ListArgsInternal {
mut:
regex []regex.RE // only put files in which follow one of the regexes
recursive bool = true
ignoredefault bool = true // ignore files starting with . and _
dirs_only bool
files_only bool
include_links bool
}
fn (mut path Path) list_internal(args ListArgsInternal) ![]Path {
debug := false
path.check()
if !path.is_dir() && (!path.is_dir_link() || !args.include_links) {
// return error('Path must be directory or link to directory')
return []Path{}
}
if debug {
console.print_header(' ${path.path}')
}
mut ls_result := os.ls(path.path) or { []string{} }
ls_result.sort()
mut all_list := []Path{}
for item in ls_result {
if debug {
console.print_stdout(' - ${item}')
}
p := os.join_path(path.path, item)
mut new_path := get(p)
// Check for dir and linkdir
if !new_path.exists() {
// to deal with broken link
continue
}
if new_path.is_link() && !args.include_links {
continue
}
if args.ignoredefault {
if item.starts_with('_') || item.starts_with('.') {
continue
}
}
if new_path.is_dir() || (new_path.is_dir_link() && args.include_links) {
// If recusrive
if args.recursive {
mut rec_list := new_path.list_internal(args)!
all_list << rec_list
} else {
if !args.files_only {
all_list << new_path
}
continue
}
}
mut addthefile := true
for r in args.regex {
if !(r.matches_string(item)) {
addthefile = false
}
}
if addthefile && !args.dirs_only {
if !args.files_only || new_path.is_file() {
all_list << new_path
}
}
}
return all_list
}
// copy all
pub fn (mut pathlist PathList) copy(dest string) ! {
for mut path in pathlist.paths {
path.copy(dest: dest)!
}
}
// delete all
pub fn (mut pathlist PathList) delete() ! {
for mut path in pathlist.paths {
path.delete()!
}
}
// sids_acknowledge .
// pub fn (mut pathlist PathList) sids_acknowledge(cid smartid.CID) ! {
// for mut path in pathlist.paths {
// path.sids_acknowledge(cid)!
// }
// }
// // sids_replace .
// // find parts of text in form sid:*** till sid:****** .
// // replace all occurrences with new sid's which are unique .
// // cid = is the circle id for which we find the id's .
// // sids will be replaced in the files if they are different
// pub fn (mut pathlist PathList) sids_replace(cid smartid.CID) ! {
// for mut path in pathlist.paths {
// path.sids_replace(cid)!
// }
// }

View File

@@ -0,0 +1,51 @@
import freeflowuniverse.herolib.core.pathlib
import freeflowuniverse.herolib.ui.console
import os
const testpath = os.dir(@FILE) + '/testdata'
fn testsuite_begin() {
os.rmdir_all(testpath) or {}
assert !os.is_dir(testpath)
os.mkdir_all(testpath) or { panic(err) }
os.mkdir_all('${testpath}/test_parent') or { panic(err) }
// create some files for testing
os.create('${testpath}/testfile.txt')!
os.create('${testpath}/test_parent/subfile.txt')!
os.mkdir('${testpath}/test_parent/test_child')!
os.create('${testpath}/test_parent/test_child/subsubfile.txt')!
}
fn testsuite_end() {
os.rmdir_all(testpath) or {}
}
fn test_list() {
console.print_stdout('************ TEST_list ************')
mut test_path_dir := pathlib.get('${testpath}')
result := test_path_dir.list(recursive: true) or { panic(err) }
console.print_debug('${result}')
}
fn test_list_dirs() {
console.print_stdout('************ TEST_list_dir ************')
mut test_path_dir := pathlib.get('${testpath}')
result := test_path_dir.list(recursive: true) or { panic(err) }
console.print_debug('${result}')
}
fn test_list_files() {
console.print_stdout('************ TEST_list_files ************')
mut test_path_dir := pathlib.get('${testpath}')
mut fl := test_path_dir.list() or { panic(err) }
result := fl.paths
assert result.len == 5
}
fn test_list_links() {
console.print_stdout('************ TEST_list_link ************')
mut test_path_dir := pathlib.get('${testpath}')
result := test_path_dir.list(pathlib.ListArgs{}) or { panic(err) }
console.print_debug('${result}')
}

View File

@@ -0,0 +1,56 @@
module pathlib
import crypto.md5
import os
import io
import encoding.hex
// return in hex format
pub fn (mut path Path) md5hex() !string {
mut r := path.md5()!
return hex.encode(r)
}
// calculate md5 in reproducable way for directory as well as large file
pub fn (mut path Path) md5() ![]u8 {
path.check_exists()!
// console.print_header(' md5: $path.path")
if path.cat == .file {
mut d := md5.new()
mut ff := os.open(path.path)!
defer {
ff.close()
}
mut buffered_reader := io.new_buffered_reader(reader: ff)
chunk_size := 128 * 1024 // 128KB chunks, adjust as needed
mut buffer := []u8{len: chunk_size}
for {
bytes_read := buffered_reader.read(mut buffer) or {
if err.type_name() == 'io.Eof' {
break
} else {
return err
}
}
d.write(buffer[0..bytes_read])!
}
md5bytes := d.sum([]u8{})
return md5bytes
} else {
mut pl := path.list(recursive: true)!
mut out := []string{}
for mut p in pl.paths {
md5bytes := p.md5()!
out << hex.encode(md5bytes)
}
// now we need to sort out, to make sure we always aggregate in same way
out.sort()
mut d := md5.new()
for o in out {
md5bytes2 := hex.decode(o)!
d.write(md5bytes2)!
}
md5bytes2 := d.sum([]u8{})
return md5bytes2
}
}

View File

@@ -0,0 +1,18 @@
module pathlib
// get all text for path and underneith (works for dir & file)
pub fn (mut path Path) recursive_text() ![]string {
mut res := []string{}
// path.check_exists()!
// console.print_debug("path recursive text: $path.path")
if path.cat == .file {
c := path.read()!
res << c.split_into_lines()
} else {
mut pl := path.list(recursive: true)!
for mut p in pl.paths {
res << p.recursive_text()!
}
}
return res
}

View File

@@ -0,0 +1,151 @@
module pathlib
import os
import freeflowuniverse.herolib.ui.console
@[params]
pub struct RsyncArgs {
pub mut:
source string
dest string
ipaddr_src string // e.g. root@192.168.5.5:33 (can be without root@ or :port)
ipaddr_dst string
delete bool // do we want to delete the destination
ignore []string // arguments to ignore e.g. ['*.pyc','*.bak']
ignore_default bool = true // if set will ignore a common set
debug bool = true
fast_rsync bool
sshkey string
}
// flexible tool to sync files from to, does even support ssh .
// args: .
// ```
// source string
// dest string
// delete bool //do we want to delete the destination
// ipaddr_src string //e.g. root@192.168.5.5:33 (can be without root@ or :port)
// ipaddr_dst string //can only use src or dst, not both
// ignore []string //arguments to ignore
// ignore_default bool = true //if set will ignore a common set
// stdout bool = true
// ```
// .
pub fn rsync(args_ RsyncArgs) ! {
mut args := args_
if args.ipaddr_src.len == 0 {
get(args.source)
}
cmdoptions := rsync_cmd_options(args)!
$if debug {
console.print_debug(' rsync command:\nrsync ${cmdoptions}')
}
r := os.execute('which rsync')
if r.exit_code > 0 {
return error('Could not find the rsync command, please install.')
}
cmd := 'rsync ${cmdoptions}'
res := os.execute(cmd)
if res.exit_code > 0 {
return error('could not execute rsync:\n${cmd}')
}
// cmdoptions2:=cmdoptions.replace(" "," ").split(" ").filter(it.trim_space()!="")
// os.execvp(rsyncpath, cmdoptions2)!
}
// return the cmd with all rsync arguments .
// see rsync for usage of args
pub fn rsync_cmd_options(args_ RsyncArgs) !string {
mut args := args_
mut cmd := ''
// normalize
args.source = os.norm_path(args.source)
args.dest = os.norm_path(args.dest)
mut delete := ''
if args.delete {
delete = '--delete'
}
mut options := '-rvz --no-perms'
if args.fast_rsync {
options += ' --size-only'
}
mut sshpart := ''
mut addrpart := ''
mut exclude := ''
if args.ignore_default {
defaultset := ['*.pyc', '*.bak', '*dSYM']
for item in defaultset {
if item !in args.ignore {
args.ignore << item
}
}
}
for excl in args.ignore {
exclude += " --exclude='${excl}'"
}
args.source = args.source.trim_right('/ ')
args.dest = args.dest.trim_right('/ ')
// if file is being copied to file dest, trailing slash shouldn't be there
mut src_path := get(args.source)
if !src_path.is_file() {
args.source = args.source + '/'
}
if !src_path.is_file() {
args.dest = args.dest + '/'
}
if args.ipaddr_src.len > 0 && args.ipaddr_dst.len == 0 {
sshpart, addrpart = rsync_ipaddr_format(ipaddr: args.ipaddr_src, sshkey: args.sshkey)!
cmd = '${options} ${delete} ${exclude} ${sshpart} ${addrpart}:${args.source} ${args.dest}'
} else if args.ipaddr_dst.len > 0 && args.ipaddr_src.len == 0 {
sshpart, addrpart = rsync_ipaddr_format(ipaddr: args.ipaddr_dst, sshkey: args.sshkey)!
cmd = '${options} ${delete} ${exclude} ${sshpart} ${args.source} ${addrpart}:${args.dest}'
} else if args.ipaddr_dst.len > 0 && args.ipaddr_src.len > 0 {
return error('cannot have source and dest as ssh')
} else {
cmd = '${options} ${delete} ${exclude} ${args.source} ${args.dest}'
}
return cmd
}
@[params]
struct RsyncFormatArgs {
mut:
ipaddr string
user string = 'root'
port int = 22
sshkey string
}
fn rsync_ipaddr_format(args_ RsyncFormatArgs) !(string, string) {
mut args := args_
if args.ipaddr.contains('@') {
args.user, args.ipaddr = args.ipaddr.split_once('@') or { panic('bug') }
}
if args.ipaddr.contains(':') {
mut port := ''
args.ipaddr, port = args.ipaddr.rsplit_once(':') or { panic('bug') }
args.port = port.int()
}
args.user = args.user.trim_space()
args.ipaddr = args.ipaddr.trim_space()
if args.ipaddr.len == 0 {
panic('ip addr cannot be empty')
}
// console.print_debug("- rsync cmd: ${args.user}@${args.ipaddr}:${args.port}")
mut sshkey := ''
if args.sshkey.len > 0 {
if !os.exists(args.sshkey) {
return error("can't find sshkey on path: ${args.sshkey}")
}
sshkey = '-i ${args.sshkey}'
}
return '-e \'ssh -o StrictHostKeyChecking=no ${sshkey} -p ${args.port}\'', '${args.user}@${args.ipaddr}'
}

View File

@@ -0,0 +1,79 @@
module pathlib
import freeflowuniverse.herolib.data.paramsparser
type Filter0 = fn (mut Path, mut paramsparser.Params) !bool
type Executor0 = fn (mut Path, mut paramsparser.Params) !paramsparser.Params
// the filters are function which needs to return true if to process with alle executors .
// see https://github.com/freeflowuniverse/herolib/blob/development/examples/core/pathlib/examples/scanner/path_scanner.v .
// if any of the filters returns false then we don't continue .
// if we return True then it means the dir or file is processed .
// .
// type Filter0 = fn (mut Path, mut paramsparser.Params) bool
// type Executor0 = fn (mut Path, mut paramsparser.Params) !paramsparser.Params
//
pub fn (mut path Path) scan(mut parameters paramsparser.Params, filters []Filter0, executors []Executor0) !paramsparser.Params {
if !path.is_dir() {
return error('can only scan on dir.\n${path}')
}
return scan_recursive(mut path, mut parameters, filters, executors)
}
fn scan_recursive(mut path Path, mut parameters paramsparser.Params, filters []Filter0, executors []Executor0) !paramsparser.Params {
// console.print_debug("recursive: $path")
// walk over filters if any of them returns false return and don't process
for f in filters {
needs_to_be_true := f(mut path, mut parameters) or {
msg := 'Cannot filter for ${path.path}\n${error}'
// console.print_debug(msg)
return error(msg)
}
if !needs_to_be_true {
return parameters
}
}
if path.is_dir() {
for e in executors {
parameters = e(mut path, mut parameters) or {
msg := 'Cannot process execution on dir ${path.path}\n${error}'
// console.print_debug(msg)
return error(msg)
}
}
mut pl := path.list(recursive: false) or {
return error('cannot list: ${path.path} \n${error}')
}
// llist.sort()
// first process the files and link
for mut p_in in pl.paths {
if !p_in.is_dir() {
scan_recursive(mut p_in, mut parameters, filters, executors) or {
msg := 'Cannot process recursive on ${p_in.path}\n${error}'
// console.print_debug(msg)
return error(msg)
}
}
}
// now process the dirs
for mut p_in in pl.paths {
if p_in.is_dir() {
scan_recursive(mut p_in, mut parameters, filters, executors) or {
msg := 'Cannot process recursive on ${p_in.path}\n${error}'
// console.print_debug(msg)
return error(msg)
}
}
}
} else {
for e in executors {
parameters = e(mut path, mut parameters) or {
msg := 'Cannot process execution on file ${path.path}\n${error}'
// console.print_debug(msg)
return error(msg)
}
}
}
return parameters
}

View File

@@ -0,0 +1,24 @@
module pathlib
// import freeflowuniverse.herolib.core.smartid
// // sids_acknowledge .
// // means our redis server knows about the sid's found, so we know which ones to generate new
// pub fn (mut path Path) sids_acknowledge(cid smartid.CID) ! {
// t := path.read()!
// cid.sids_acknowledge(t)!
// }
// // sids_replace .
// // find parts of text in form sid:*** till sid:****** .
// // replace all occurrences with new sid's which are unique .
// // cid = is the circle id for which we find the id's .
// // sids will be replaced in the files if they are different
// pub fn (mut path Path) sids_replace(cid smartid.CID) ! {
// t := path.read()!
// t2 := cid.sids_replace(t)!
// if t2 != t {
// // means we have change and we need to write it
// path.write(t2)!
// }
// }

View File

@@ -0,0 +1,23 @@
module pathlib
import os
pub fn (mut path Path) size_kb() !int {
s := path.size()!
return int(s / 1000)
}
pub fn (mut path Path) size() !f64 {
path.check_exists()!
// console.print_header(' filesize: $path.path")
if path.cat == .file {
return os.file_size(path.path)
} else {
mut pl := path.list(recursive: true)!
mut totsize := 0.0
for mut p in pl.paths {
totsize += p.size()!
}
return totsize
}
}

View File

@@ -0,0 +1,314 @@
module pathlib
import freeflowuniverse.herolib.core.texttools
import os
@[params]
pub struct SubGetParams {
pub mut:
name string
name_fix_find bool // means we will also find if name is same as the name_fix
name_fix bool // if file found and name fix was different than file on filesystem, will rename
dir_ensure bool // if dir_ensure on will fail if its not a dir
file_ensure bool // if file_ensure on will fail if its not a dir
}
// An internal struct for representing failed jobs.
pub struct SubGetError {
Error
pub mut:
msg string
path string
error_type JobErrorType
}
pub enum JobErrorType {
error
nodir
notfound
wrongtype // asked for dir or file, but found other type
islink
}
pub fn (err SubGetError) msg() string {
mut msg := ''
if err.error_type == .nodir {
msg = 'could not get sub of path, because was no dir'
}
if err.error_type == .notfound {
msg = 'could not find'
}
if err.error_type == .wrongtype {
msg = 'asked for a dir or a file, but this did not correspond on filesystem.'
}
if err.error_type == .islink {
msg = 'we found a link, this is not supported for now.'
}
return "Dir Get Error for path:'${err.path}' -- (${err.code()}) failed with error: ${msg}"
}
pub fn (err SubGetError) code() int {
return int(err.error_type)
}
// will get dir or file underneith a dir .
// e.g. mypath.sub_get(name:"mysub_file.md",name_fix_find:true,name_fix:true)! .
// this will find Mysubfile.md as well as mysub_File.md and rename to mysub_file.md and open .
// params: .
// - name .
// - name_fix_find bool :means we will also find if name is same as the name_fix.
// - name_fix bool :if file found and name fix was different than file on filesystem, will rename .
// - dir_ensure bool :if dir_ensure on will fail if its not a dir .
// - file_ensure bool :if file_ensure on will fail if its not a dir .
// .
// will return SubGetError if error .
//
// returns a path
pub fn (mut path Path) sub_get(args_ SubGetParams) !Path {
mut args := args_
if path.cat != Category.dir {
return SubGetError{
error_type: .nodir
path: path.path
}
}
if args.name == '' {
return error('name cannot be empty')
}
if args.name_fix {
args.name_fix_find = true
}
if args.name_fix_find {
args.name = texttools.name_fix(args.name)
}
items := os.ls(path.path) or { []string{} }
for item in items {
mut itemfix := item
if args.name_fix_find {
itemfix = texttools.name_fix(item)
}
if itemfix == args.name {
// we found what we were looking for
mut p := get(os.join_path(path.path, item)) // get the path
if args.dir_ensure {
if !p.is_dir() {
return SubGetError{
error_type: .wrongtype
path: path.path
}
}
}
if args.file_ensure {
if !p.is_file() {
return SubGetError{
error_type: .wrongtype
path: path.path
}
}
}
if args.name_fix {
p.path_normalize() or {
return SubGetError{
msg: 'could not normalize path: ${err}'
path: path.path
}
}
}
return p
}
}
return SubGetError{
error_type: .notfound
path: path.path
}
}
// will check if dir exists
// params: .
// - name
// - name_fix_find bool :means we will also find if name is same as the name_fix .
// - name_fix bool :if file found and name fix was different than file on filesystem, will rename .
// - dir_ensure bool :if dir_ensure on will fail if its not a dir .
// - file_ensure bool :if file_ensure on will fail if its not a dir .
//
pub fn (mut path Path) sub_exists(args_ SubGetParams) !bool {
_ := path.sub_get() or {
if err.code() == 2 {
return false // means did not exist
}
return err
}
return true
// TODO: need to write test for sub_get and sub_exists
}
//////////////FILE
// find file underneith dir path, if exists return True
pub fn (path Path) file_exists(tofind string) bool {
if path.cat != Category.dir {
return false
}
if os.exists('${path.path}/${tofind}') {
if os.is_file('${path.path}/${tofind}') {
return true
}
}
return false
}
// is case insensitive
pub fn (mut path Path) file_exists_ignorecase(tofind string) bool {
return path.file_name_get_ignorecase(tofind) != ''
}
fn (mut path Path) file_name_get_ignorecase(tofind string) string {
if path.cat != Category.dir {
return ''
}
files := os.ls(path.path) or { []string{} }
for item in files {
if tofind.to_lower() == item.to_lower() {
file_path := os.join_path(path.path, item)
if os.is_file(file_path) {
return item
}
}
}
return ''
}
// find file underneith path, if exists return as Path, otherwise error .
pub fn (mut path Path) file_get(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('File get for ${tofind} in ${path.path}: is not a dir or dir does not exist.')
}
if path.file_exists(tofind) {
file_path := os.join_path(path.path, tofind)
return Path{
path: file_path
cat: Category.file
exist: .yes
}
}
return error("Could not find file '${tofind}' in ${path.path}.")
}
pub fn (mut path Path) file_get_ignorecase(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('File get ignore case for ${tofind} in ${path.path}: is not a dir or dir does not exist.')
}
filename := path.file_name_get_ignorecase(tofind)
if filename == '' {
return error("Could not find file (igore case) '${tofind}' in ${path.path}.")
}
file_path := os.join_path(path.path, filename)
return Path{
path: file_path
cat: Category.file
exist: .yes
}
}
// get file, if not exist make new one
pub fn (mut path Path) file_get_new(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('File get new for ${tofind} in ${path.path}: is not a dir or dir does not exist.')
}
mut p := path.file_get(tofind) or {
return get_file(path: '${path.path}/${tofind}', create: true)!
}
return p
}
//////////////LINK
// find link underneith path, if exists return True
// is case insensitive
pub fn (mut path Path) link_exists(tofind string) bool {
if path.cat != Category.dir {
return false
}
// TODO: need to check, if this is correct, make test
if os.exists('${path.path}/${tofind}') {
if os.is_link('${path.path}/${tofind}') {
return true
}
}
return false
}
// find link underneith path, if exists return True
// is case insensitive
pub fn (mut path Path) link_exists_ignorecase(tofind string) bool {
if path.cat != Category.dir {
return false
}
files := os.ls(path.path) or { []string{} }
if tofind.to_lower() in files.map(it.to_lower()) {
file_path := os.join_path(path.path.to_lower(), tofind.to_lower())
if os.is_link(file_path) {
return true
}
}
return false
}
// find link underneith path, return as Path, can only be one
// tofind is part of link name
pub fn (mut path Path) link_get(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('Link get for ${tofind} in ${path.path}: is not a dir or dir does not exist.')
}
if path.link_exists(tofind) {
file_path := os.join_path(path.path, tofind)
return Path{
path: file_path
cat: Category.linkfile
exist: .yes
}
}
return error("Could not find link '${tofind}' in ${path.path}.")
}
///////// DIR
// find dir underneith path, if exists return True
pub fn (mut path Path) dir_exists(tofind string) bool {
if path.cat != Category.dir {
return false
}
if os.exists('${path.path}/${tofind}') {
if os.is_dir('${path.path}/${tofind}') {
return true
}
}
return false
}
// find dir underneith path, return as Path
pub fn (mut path Path) dir_get(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('is not a dir or dir does not exist: ${path.path}')
}
if path.dir_exists(tofind) {
dir_path := os.join_path(path.path, tofind)
return Path{
path: dir_path
cat: Category.dir
exist: .yes
}
}
return error('${tofind} is not in ${path.path}')
}
// get file, if not exist make new one
pub fn (mut path Path) dir_get_new(tofind string) !Path {
if path.cat != Category.dir || !(path.exists()) {
return error('is not a dir or dir does not exist: ${path.path}')
}
mut p := path.dir_get(tofind) or {
return get_dir(path: '${path.path}/${tofind}', create: true)!
}
return p
}

View File

@@ -0,0 +1,549 @@
module pathlib
import os
import freeflowuniverse.herolib.core.texttools
import time
import crypto.md5
import rand
import freeflowuniverse.herolib.ui.console
// check path exists
pub fn (mut path Path) exists() bool {
// if path.cat == .unknown || path.exist == .unknown {
// path.check()
// }
path.check()
return path.exist == .yes
}
// case insentive check on paths
pub fn path_equal(a_ string, b_ string) bool {
a := os.abs_path(a_.replace('~', os.home_dir())).to_lower()
b := os.abs_path(b_.replace('~', os.home_dir())).to_lower()
return a == b
}
// rename the file or directory
pub fn (mut path Path) rename(name string) ! {
if name.contains('/') {
return error("should only be a name no dir inside: '${name}'")
}
mut dest := ''
if path.path.contains('/') {
before := path.path.all_before_last('/')
dest = before + '/' + name
} else {
dest = name
}
os.mv(path.path, dest)!
path.path = dest
path.check()
}
// TODO: make part of pathlib of Path
// uncompress to specified directory .
// if copy then will keep the original
pub fn (mut path Path) expand(dest string) !Path {
$if debug {
console.print_header('expand ${path.path}')
}
if dest.len < 4 {
return error("Path dest needs to be mentioned and +4 char. Now '${dest}'")
}
filext := os.file_ext(path.name()).to_lower()
// the ones who return a filepath
if filext == '.xz' {
cmd := 'xz --decompress ${path.path} --stdout > ${dest}'
if os.is_file(dest) {
os.rm(dest)!
}
os.mkdir_all(dest)!
os.rmdir(dest)!
res := os.execute(cmd)
// console.print_debug(res)
if res.exit_code > 0 {
// console.print_debug(cmd)
return error('Could not expand xz.\n${res}')
}
return get_file(path: dest, create: false)!
}
mut desto := get_dir(path: dest, create: true)!
desto.empty()!
if path.name().to_lower().ends_with('.tar.gz') || path.name().to_lower().ends_with('.tgz') {
cmd := 'tar -xzvf ${path.path} -C ${desto.path}'
console.print_debug(cmd)
res := os.execute(cmd)
if res.exit_code > 0 {
return error('Could not expand.\n${res}')
}
} else if path.name().to_lower().ends_with('.zip') {
cmd := 'unzip ${path.path} -d ${dest}'
// console.print_debug(cmd)
res := os.execute(cmd)
// console.print_debug(res)
if res.exit_code > 0 {
return error('Could not expand zip.\n${res}')
}
} else if path.name().to_lower().ends_with('.bz2') {
cmd := '
bunzip2 -f -k ${path.path}
' // console.print_debug(cmd)
res := os.execute(cmd)
if res.exit_code > 0 {
return error('Could not expand bz2.\n${res.output}')
}
dest_tmp := path.path.all_before_last('.bz2')
desto.delete()!
mut desto2 := get_file(path: dest, create: false)!
os.mv(dest_tmp, desto2.path)!
return desto2
} else {
panic('expand not implemented yet for : ${path.path}')
}
return desto
}
// chown changes the owner and group attributes of path to owner and group.
pub fn (mut path Path) chown(owner int, group int) ! {
os.chown(path.path, owner, group)!
}
// chmod change file access attributes of path to mode.
// Octals like 0o600 can be used.
pub fn (mut path Path) chmod(mode int) ! {
os.chmod(path.path, mode)!
}
// get relative path in relation to destpath .
// will not resolve symlinks
pub fn (path Path) path_relative(destpath string) !string {
// console.print_header(' path relative: '$path.path' '$destpath'")
return path_relative(destpath, path.path)
}
// recursively finds the least common ancestor of array of paths .
// will always return the absolute path (relative gets changed to absolute).
pub fn find_common_ancestor(paths_ []string) string {
for p in paths_ {
if p.trim_space() == '' {
panic('cannot find commone ancestors if any of items in paths is empty.\n${paths_}')
}
}
paths := paths_.map(os.abs_path(os.real_path(it))) // get the real path (symlinks... resolved)
console.print_debug(paths.str())
parts := paths[0].split('/')
mut totest_prev := '/'
for i in 1 .. parts.len {
totest := parts[0..i + 1].join('/')
if paths.any(!it.starts_with(totest)) {
return totest_prev
}
totest_prev = totest
}
return totest_prev
}
// same as above but will treat symlinks as if normal links
// allowing finding relative paths between links as well
// QUESTION: should we merge with above?
pub fn find_simple_common_ancestor(paths_ []string) string {
for p in paths_ {
if p.trim_space() == '' {
panic('cannot find commone ancestors if any of items in paths is empty.\n${paths_}')
}
}
paths := paths_.map(os.abs_path(it))
parts := paths[0].split('/')
mut totest_prev := '/'
for i in 1 .. parts.len {
totest := parts[0..i + 1].join('/')
if paths.any(!it.starts_with(totest)) {
return totest_prev
}
totest_prev = totest
}
return totest_prev
}
// find parent of path
pub fn (path Path) parent() !Path {
mut p := path.absolute()
parent := os.dir(p) // get parent directory
if parent == '.' || parent == '/' {
return error('no parent for path ${path.path}')
} else if parent == '' {
return Path{
path: '/'
cat: Category.dir
exist: .unknown
}
}
return Path{
path: parent
cat: Category.dir
exist: .unknown
}
}
pub struct MoveArgs {
pub mut:
dest string // path
delete bool // if true will remove files which are on dest which are not on source
chmod_execute bool
}
// move to other location
// ```
// dest string // path
// delete bool // if true will remove files which are on dest which are not on source
// ```
pub fn (mut path Path) move(args MoveArgs) ! {
mut d := get(args.dest)
if d.exists() {
if args.delete {
d.delete()!
} else {
return error("Found dest dir in move and can't delete. \n${args}")
}
}
os.mv(path.path, d.path)!
if args.chmod_execute {
d.chmod(0o770)!
}
}
// the path will move itself up 1 level .
// e.g. path is /tmp/rclone and there is /tmp/rclone/rclone-v1.64.2-linux-amd64 .
// that last dir needs to move 1 up
pub fn (mut path Path) moveup_single_subdir() ! {
mut plist := path.list(recursive: false, ignoredefault: true, dirs_only: true)!
console.print_debug(plist.str())
if plist.paths.len != 1 {
return error('could not find one subdir in ${path.path} , so cannot move up')
}
mut pdest := plist.paths[0]
pdest.moveup()!
}
// the path will move itself up 1 level .
// the e.g. /tmp/rclone/rclone-v1.64.2-linux-amd64/ -> /tmp/rclone
pub fn (mut path Path) moveup() ! {
console.print_stdout('move up: ${path}')
pdest := path.parent()!
tmpdir := '${os.temp_dir()}/${rand.u16()}'
path.move(dest: tmpdir, delete: true)!
mut tmpdirpath := get_dir(path: tmpdir)!
tmpdirpath.move(dest: pdest.path, delete: true)!
path.path = pdest.path
path.check()
}
// returns extension without .
pub fn (path Path) extension() string {
return os.file_ext(path.path).trim('.')
}
// returns extension without and all lower case
pub fn (path Path) extension_lower() string {
return path.extension().to_lower()
}
// will rewrite the path to lower_case if not the case yet
// will also remove weird chars
// if changed will return true
// the file will be moved to the new location
pub fn (mut path Path) path_normalize() !bool {
path_original := path.path + '' // make sure is copy, needed?
// if path.cat == .file || path.cat == .dir || !path.exists() {
// return error('path $path does not exist, cannot namefix (only support file and dir)')
// }
if path.extension().to_lower() == 'jpeg' {
path.path = path.path_no_ext() + '.jpg'
}
namenew := texttools.name_fix_keepext(path.name())
if namenew != path.name() {
path.path = os.join_path(os.dir(path.path), namenew)
}
if path.path != path_original {
os.mv(path_original, path.path)!
path.check()
return true
}
return false
}
// walk upwards starting from path untill dir or file tofind is found
// works recursive
pub fn (path Path) parent_find(tofind string) !Path {
if os.exists(os.join_path(path.path, tofind)) {
return path
}
path2 := path.parent()!
return path2.parent_find(tofind)
}
// delete
pub fn (mut path Path) rm() ! {
return path.delete()
}
// delete
pub fn (mut path Path) delete() ! {
if path.exists() {
// console.print_debug("exists: $path")
match path.cat {
.file, .linkfile, .linkdir {
os.rm(path.path.replace('//', '/'))!
}
.dir {
os.rmdir_all(path.path)!
}
.unknown {
return error('Path cannot be unknown type')
}
}
path.exist = .no
}
if os.is_link(path.path) {
os.rm(path.path.replace('//', '/'))!
}
}
// remove all content but if dir let the dir exist
pub fn (mut path Path) empty() ! {
if path.cat == .dir {
os.mkdir_all(path.path)!
path.exist = .yes
mut list := path.list()!
for mut subpath in list.paths {
subpath.delete()!
}
} else if path.cat == Category.linkfile {
mut p2 := path.getlink()!
p2.empty()!
} else {
path.write('')!
}
}
// write content to the file, check is file
// if the path is a link to a file then will change the content of the file represented by the link
pub fn (mut path Path) write(content string) ! {
if !os.exists(path.path_dir()) {
os.mkdir_all(path.path_dir())!
}
if path.exists() && path.cat == Category.linkfile {
mut pathlinked := path.getlink()!
pathlinked.write(content)!
}
if path.exists() && path.cat != Category.file && path.cat != Category.linkfile {
return error('Path must be a file for ${path}')
}
os.write_file(path.path, content)!
}
// write bytes to file
pub fn (mut path Path) writeb(content []u8) ! {
if !os.exists(path.path_dir()) {
os.mkdir_all(path.path_dir())!
}
if path.exists() && path.cat == Category.linkfile {
mut pathlinked := path.getlink()!
pathlinked.writeb(content)!
}
if path.exists() && path.cat != Category.file && path.cat != Category.linkfile {
return error('Path must be a file for ${path}')
}
os.write_file_array(path.path, content)!
}
// read content from file
pub fn (mut path Path) read() !string {
path.check()
match path.cat {
.file, .linkfile {
p := path.absolute()
if !os.exists(p) {
return error('File is not exist, ${p} is a wrong path')
}
return os.read_file(p)
}
else {
return error('Path is not a file when reading. ${path.path}')
}
}
}
// read bytes from file
pub fn (mut path Path) readb() ![]u8 {
path.check()
match path.cat {
.file, .linkfile {
p := path.absolute()
if !os.exists(p) {
return error('File does not exist, ${p} is a wrong path')
}
return os.read_bytes(p)
}
else {
return error('Path is not a file when reading. ${path.path}')
}
}
}
// recalc path between target & source .
// we only support if source_ is an existing dir, links will not be supported .
// a0 := pathlib.path_relative('$testpath/a/b/c', '$testpath/a/d.txt') or { panic(err) } .
// assert a0 == '../../d.txt' .
// a2 := pathlib.path_relative('$testpath/a/b/c', '$testpath/d.txt') or { panic(err) } .
// assert a2 == '../../../d.txt' .
// a8 := pathlib.path_relative('$testpath/a/b/c', '$testpath/a/b/c/d/e/e.txt') or { panic(err) } .
// assert a8 == 'd/e/e.txt' .
// symlinks will not be resolved, as it leads to unexpected behaviour
pub fn path_relative(source_ string, linkpath_ string) !string {
mut source := os.abs_path(source_)
mut linkpath := os.abs_path(linkpath_)
// now both start with /
mut p := get(source_)
// converts file source to dir source
if source.all_after_last('/').contains('.') {
source = source.all_before_last('/')
p = p.parent() or { return error("Parent of source ${source_} doesn't exist") }
}
p.check()
if p.cat != .dir && p.cat != .linkdir {
return error('Cannot do path_relative()! if source is not a dir Now:${source_} is ${p.cat}')
} else if !p.exists() {
return error('Cannot do path_relative()! if source doesnt exist. Now:${source_}')
}
common := find_simple_common_ancestor([source, linkpath])
// if source is common, returns source
if source.len <= common.len + 1 {
// TODO: this should be safer
path := linkpath_.trim_string_left(source)
if path.starts_with('/') {
return path[1..]
} else {
return path
}
}
mut source_short := source[(common.len)..]
mut linkpath_short := linkpath[(common.len)..]
source_short = source_short.trim_string_left('/')
linkpath_short = linkpath_short.trim_string_left('/')
// console.print_stdout('source: ${source_short}')
// console.print_stdout('link: ${linkpath_short}')
source_count := source_short.count('/')
// link_count := linkpath_short.count('/')
// console.print_debug(" + source_short:$source_short ($source_count)")
// console.print_debug(" + linkpath_short:$linkpath_short ($link_count)")
mut dest := ''
if source_short == '' { // source folder is common ancestor
dest = linkpath_short
} else {
go_up := ['../'].repeat(source_count + 1).join('')
dest = '${go_up}${linkpath_short}'
}
dest = dest.replace('//', '/')
return dest
}
@[params]
pub struct TMPWriteArgs {
pub mut:
name string // optional name to remember it more easily
tmpdir string
text string // text to put in file
path string // to overrule the path where script will be stored
ext string = 'sh'
}
// write temp file and return path
pub fn temp_write(args_ TMPWriteArgs) !string {
mut args := args_
if args.path.len == 0 {
if args.tmpdir.len == 0 {
if 'TMPDIR' in os.environ() {
args.tmpdir = os.environ()['TMPDIR'] or { '/tmp' }
} else {
args.tmpdir = '/tmp'
}
}
mut t := time.now().format_ss_milli().replace(' ', '-').replace('.', ':')
texthash := md5.hexhash(args.text)
t += '_${texthash}'
mut tmppath := '${args.tmpdir}/execscripts/${t}.${args.ext}'
if args.name.len > 0 {
tmppath = '${args.tmpdir}/execscripts/${args.name}_${t}.${args.ext}'
}
if !os.exists('${args.tmpdir}/execscripts/') {
os.mkdir('${args.tmpdir}/execscripts') or {
return error('Cannot create ${args.tmpdir}/execscripts,${err}')
}
}
if os.exists(tmppath) {
for i in 1 .. 200 {
// console.print_debug(i)
tmppath = '${args.tmpdir}/execscripts/{${t}}_${i}.${args.ext}'
if !os.exists(tmppath) {
break
}
// TODO: would be better to remove older files, e.g. if older than 1 day, remove
if i > 99 {
// os.rmdir_all('$tmpdir/execscripts')!
// return temp_write(text)
panic("should not get here, can't find temp file to write for process job.")
}
}
}
args.path = tmppath
}
os.write_file(args.path, args.text)!
os.chmod(args.path, 0o777)!
return args.path
}
// pub fn path_relative(source_ string, dest_ string) !string {
// mut source := source_.trim_right('/')
// mut dest := dest_.replace('//', '/').trim_right('/')
// // console.print_debug("path relative: '$source' '$dest' ")
// if source !="" {
// if source.starts_with('/') && !dest.starts_with('/') {
// return error('if source starts with / then dest needs to start with / as well.\n - $source\n - $dest')
// }
// if !source.starts_with('/') && dest.starts_with('/') {
// return error('if source starts with / then dest needs to start with / as well\n - $source\n - $dest')
// }
// }
// if dest.starts_with(source) {
// return dest[source.len..]
// } else {
// msg := "Destination path is not in source directory: $source_ $dest_"
// return error(msg)
// }
// }

View File

@@ -0,0 +1,223 @@
import freeflowuniverse.herolib.core.pathlib
import os
import freeflowuniverse.herolib.ui.console
const testpath = os.dir(@FILE) + '/examples/test_path'
fn testsuite_begin() {
os.rmdir_all(testpath) or {}
assert !os.is_dir(testpath)
os.mkdir_all(testpath) or { panic(err) }
os.mkdir_all('${testpath}/test_parent') or { panic(err) }
os.mkdir_all('${testpath}/a/b/c') or { panic(err) }
os.create('${testpath}/testfile1') or { panic(err) }
os.create('${testpath}/test_parent/testfile2') or { panic(err) }
os.create('${testpath}/test_parent/testfile3') or { panic(err) }
}
fn testsuite_end() {
os.rmdir_all(testpath) or {}
}
fn test_get() {
console.print_stdout('************ TEST_Get ************')
console.print_debug(testpath)
fp := pathlib.get('${testpath}/testfile1')
assert fp.cat == pathlib.Category.file
console.print_stdout('File Result: ${fp}')
dp := pathlib.get('${testpath}')
assert dp.cat == pathlib.Category.dir
console.print_stdout('Dir Result: ${dp}')
}
fn test_exists() {
console.print_stdout('************ TEST_exists ************')
mut p1 := pathlib.get_file(path: '${testpath}/testfile1') or { panic('${err}') }
assert p1.exists()
console.print_stdout('File found')
mut p2 := pathlib.get_file(path: '${testpath}/NotARealFile') or { panic('${err}') }
assert !p2.exists()
console.print_stdout('File not found')
mut p3 := pathlib.get_file(path: '${testpath}/NotARealFile2', create: true) or {
panic('${err}')
}
assert p3.exists()
console.print_stdout('File found')
p3.delete() or { panic('${err}') }
assert !p3.exists()
}
fn test_parent() {
console.print_stdout('************ TEST_test_parent ************')
mut test_path_dir := pathlib.get('${testpath}')
mut p := pathlib.get('${testpath}/testfile1')
parent_dir := p.parent() or { panic(err) }
assert parent_dir.path == test_path_dir.path
console.print_stdout('Parent Function working correctly')
}
fn test_parent_find() {
console.print_stdout('************ TEST_test_parent_find ************')
// - testfile1 is located in test_path
// - will start search from test_parent that is inside test_path
// - Result must be test_path
mut test_path_dir := pathlib.get('${testpath}')
mut p := pathlib.get('${testpath}/test_parent')
parent_dir := p.parent_find('testfile1') or { panic(err) }
assert parent_dir.path == test_path_dir.path
console.print_stdout('Find Parent Function working correctly')
}
fn test_dir_exists() {
console.print_stdout('************ TEST_dir_exists ************')
mut test_path_dir := pathlib.get('${testpath}')
assert test_path_dir.dir_exists('test_parent')
console.print_stdout('test_parent found in ${test_path_dir.path}')
assert !test_path_dir.dir_exists('test_parent_2')
console.print_stdout('test_paren_2 not found in ${test_path_dir.path}')
}
fn test_dir_find() {
console.print_stdout('************ TEST_dir_find ************')
mut test_path_dir := pathlib.get('${testpath}')
mut test_parent_dir := test_path_dir.dir_get('test_parent') or { panic(err) }
console.print_stdout('Dir found: ${test_parent_dir}')
mut test_parent_dir2 := test_path_dir.dir_get('test_parent_2') or { return }
panic('should not get here')
}
fn testfile1_exists() {
console.print_stdout('************ testfile1_exists ************')
mut test_path_dir := pathlib.get('${testpath}')
assert test_path_dir.file_exists('testfile1')
console.print_stdout('testfile1 found in ${test_path_dir.path}')
assert !test_path_dir.file_exists('newfile2')
console.print_stdout('newfile2 not found in ${test_path_dir.path}')
}
fn testfile1_find() {
console.print_stdout('************ testfile1_find ************')
mut test_path_dir := pathlib.get('${testpath}')
mut file := test_path_dir.file_get('testfile1') or { panic(err) }
console.print_stdout('file ${file} found')
test_path_dir.file_get('newfile2') or { return }
panic('should not get here')
}
fn test_real_path() {
console.print_stdout('************ TEST_real_path ************')
mut source := pathlib.get('${testpath}/test_parent/testfile2')
mut dest_ := '${testpath}/link_remove_rp.md'
mut link := source.link(dest_, true) or { panic('error: ${err}') }
mut dest := pathlib.get(dest_)
link_real := dest.realpath()
assert link_real == '${testpath}/test_parent/testfile2'
// dest.delete() or {panic(err)}
console.print_stdout('Real path function working correctly')
}
fn test_real_path2() {
console.print_stdout('************ TEST_real_path ************')
mut source := pathlib.get('${testpath}/testfile1')
mut dest_ := '${testpath}/test_parent/link_remove_rp2.md'
mut link := source.link(dest_, true) or { panic('error: ${err}') }
mut dest := pathlib.get(dest_)
link_real := dest.realpath()
assert link_real == '${testpath}/testfile1'
dest.delete() or { panic(err) }
console.print_stdout('Real path2 function working correctly')
}
fn test_link_path_relative() {
os.mkdir_all('${testpath}/a/b/c') or { panic(err) }
console.print_stdout('************ TEST_link_path_relative()! ************')
a0 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/d.txt') or { panic(err) }
assert a0 == '../../d.txt'
a2 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/d.txt') or { panic(err) }
assert a2 == '../../../d.txt'
a3 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/b/c/e.txt') or { panic(err) }
assert a3 == 'e.txt' // ? is this the correct path?
a4 := pathlib.path_relative('${testpath}/a/b/c/', '${testpath}/a/b/d/e.txt') or { panic(err) }
assert a4 == '../d/e.txt'
a5 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/b/c/d/e/e.txt') or {
panic(err)
}
assert a5 == 'd/e/e.txt'
a6 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/b/c/e.txt') or { panic(err) }
assert a6 == 'e.txt'
a7 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/b/c/e.txt') or { panic(err) }
assert a7 == 'e.txt'
a8 := pathlib.path_relative('${testpath}/a/b/c', '${testpath}/a/b/c/d/e/e.txt') or {
panic(err)
}
assert a8 == 'd/e/e.txt'
// TODO: lets make to work in test setup
// c := pathlib.path_relative('/Users/despiegk1/code4/books/content/mytwin/intro','/Users/despiegk1/code4/books/content/mytwin/funny_comparison.md') or {panic(err)}
// assert c=="../funny_comparison.md"
// d := pathlib.path_relative('/Users/despiegk1/code4/books/content/mytwin/intro/','/Users/despiegk1/code4/books/content/mytwin/funny_comparison.md') or {panic(err)}
// assert d=="../funny_comparison.md"
console.print_stdout('Link path relative function working correctly')
}
// TODO need to enable all tests
// TODO have more than 1 test file, make more modular, now its 1 too big file
fn test_write_and_read() {
console.print_stdout('************ TEST_write_and_read ************')
mut fp := pathlib.get('${testpath}/testfile1')
fp.write('Test Write Function') or { panic(err) }
fcontent := fp.read() or { panic(err) }
assert fcontent == 'Test Write Function'
console.print_stdout('Write and read working correctly')
// mut test_path_dir := pathlib.get("$testpath")
}
// fn test_copy() {
// console.print_stdout('************ TEST_copy ************')
// //- Copy /test_path/testfile1 to /test_path/test_parent
// mut dest_dir := pathlib.get('${testpath}/test_parent')
// mut src_f := pathlib.get('${testpath}/testfile1')
// src_f.copy(dest: '${dest_dir.path}/testfile2') or { panic(err) }
// mut dest_file := pathlib.get('${testpath}/test_parent/testfile2')
// dest_file.delete()!
// console.print_stdout('Copy function works correctly')
// }
// TODO need other test
// fn test_link(){
// console.print_stdout('************ TEST_link ************')
// mut dest_p:= path.path{path:"$testpath/linkdir1", cat:pathlib.Category.linkdir, exists:path.false}
// mut lp := path.path{path:"/workspace/herolib/path", cat:pathlib.Category.dir, exists:path.true}
// lp.link(mut dest_p) or {panic(err)}
// mut get_link := pathlib.get("$testpath/linkdir1")
// assert get_link.exists()
// console.print_debug("Link path: $get_link.path")
// real:= get_link.absolute()
// console.print_debug("Real path: $real")
// }
fn test_find_common_ancestor() {
console.print_stdout('************ TEST_find_common_ancestor ************')
res := pathlib.find_common_ancestor(['/test/a/b/c/d', '/test/a/'])
assert res == '/test/a'
b1 := pathlib.find_common_ancestor(['/a/b/c/d.txt', '/a/d.txt'])
assert b1 == '/a'
b2 := pathlib.find_common_ancestor(['/a/b/c/d.txt', '/c/d.txt'])
assert b2 == '/'
b3 := pathlib.find_common_ancestor(['/a/b/c/d.txt', '/a/b/c/e.txt'])
assert b3 == '/a/b/c'
b4 := pathlib.find_common_ancestor(['/a/b/c/d.txt', '/a/b/c/d.txt'])
assert b4 == '/a/b/c/d.txt'
b7 := pathlib.find_common_ancestor(['/', '/a/b/c/d.txt'])
assert b7 == '/'
console.print_stdout('Find common ancestor function works correctly')
}

View File

@@ -0,0 +1,81 @@
# Pathlib Module
The pathlib module provides a robust way to handle file system operations. Here's a comprehensive overview of how to use it:
## 1. Basic Path Creation
```v
import freeflowuniverse.herolib.core.pathlib
// Get a basic path object
mut path := pathlib.get('/some/path')
// Create a directory (with parent dirs)
mut dir := pathlib.get_dir(
path: '/some/dir'
create: true
)!
// Create/get a file
mut file := pathlib.get_file(
path: '/some/file.txt'
create: true
)!
```
## 2. Path Properties and Operations
```v
// Get various path forms
abs_path := path.absolute() // Full absolute path
real_path := path.realpath() // Resolves symlinks
short_path := path.shortpath() // Uses ~ for home dir
// Get path components
name := path.name() // Filename with extension
name_no_ext := path.name_no_ext() // Filename without extension
dir_path := path.path_dir() // Directory containing the path
// Check path properties
if path.exists() { /* exists */ }
if path.is_file() { /* is file */ }
if path.is_dir() { /* is directory */ }
if path.is_link() { /* is symlink */ }
```
## 3. Common File Operations
```v
// Empty a directory
mut dir := pathlib.get_dir(
path: '/some/dir'
empty: true
)!
// Delete a path
mut path := pathlib.get_dir(
path: '/path/to/delete'
delete: true
)!
// Get working directory
mut wd := pathlib.get_wd()
```
## Features
The module handles common edge cases:
- Automatically expands ~ to home directory
- Creates parent directories as needed
- Provides proper error handling with V's result type
- Checks path existence and type
- Handles both absolute and relative paths
## Path Object Structure
Each Path object contains:
- `path`: The actual path string
- `cat`: Category (file/dir/link)
- `exist`: Existence status
This provides a safe and convenient API for all file system operations in V.

View File

@@ -0,0 +1,21 @@
module pathlib
import freeflowuniverse.herolib.core.texttools
import os
import freeflowuniverse.herolib.ui.console
// template is the text coming from template engine.
pub fn template_write(template_ string, dest string, overwrite bool) ! {
mut template := texttools.template_replace(template_)
if overwrite || !(os.exists(dest)) {
mut p := get_file(path: dest, create: true)!
$if debug {
console.print_header(" write template to '${dest}'")
}
p.write(template)!
}
}
pub fn (mut path Path) template_write(template_ string, overwrite bool) ! {
template_write(template_, path.path, overwrite)!
}

View File

@@ -20,7 +20,7 @@ import freeflowuniverse.herolib.core.playcmds
// session ?&base.Session is optional
mut plbook := playbook.new(path: "....")!
//now we run all the commands as they are pre-defined in crystallib (herolib)
//now we run all the commands as they are pre-defined in herolib (herolib)
playcmds.run(mut plbook)!

View File

@@ -0,0 +1,77 @@
module texttools
// a comma or \n separated list gets converted to a list of strings .
//'..' also gets converted to without ''
// check also splitsmart which is more intelligent
pub fn to_array(r string) []string {
mut res := []string{}
mut r2 := dedent(r)
r2 = r2.replace(',', '\n')
for mut line in r2.split_into_lines() {
line = line.trim_space()
if line.trim('\'"') == '' {
continue
}
res << line.trim("'")
}
return res
}
pub fn to_array_int(r string) []int {
mut r2 := to_array(r).map(it.int())
return r2
}
// intelligent way how to map a line to a map
//```
// r:=texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
// "root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted\n \n")
// assert {'name': 'root', 'pid': '1360', 'path': '/usr/sbin/distnoted'} == r
// r2:=texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
// "root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted anotherone anotherone\n \n")
// assert {'name': 'root', 'pid': '1360', 'path': '/usr/sbin/distnoted'} == r2
// r3:=texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
// "root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 \n \n")
// assert {'name': 'root', 'pid': '1360', 'path': ''} == r3
//```
pub fn to_map(mapstring string, line string, delimiter_ string) map[string]string {
mapstring_array := split_smart(mapstring, '')
mut line_array := split_smart(line, '')
mut result := map[string]string{}
for x in 0 .. mapstring_array.len {
mapstring_item := mapstring_array[x] or { '' }
if mapstring_item != '-' {
result[mapstring_item] = line_array[x] or { '' }
}
}
return result
}
// smart way how to get useful info out of text block
// ```
// t:='
// _cmiodalassistants 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted agent
// _locationd 281 0.0 0.0 408185328 1344 ?? S 16Dec23 0:35.80 /usr/sbin/distnoted agent
// root 275 0.0 0.0 408311904 7296 ?? Ss 16Dec23 2:00.56 /usr/libexec/storagekitd
// _coreaudiod 268 0.0 0.0 408185328 1344 ?? S 16Dec23 0:35.49 /usr/sbin/distnoted agent
// '
// r4:=texttools.to_list_map("name,-,-,-,-,pid,-,-,-,-,path",t)
// assert [{'name': '_cmiodalassistants', 'pid': '1360', 'path': '/usr/sbin/distnoted'},
// {'name': '_locationd', 'pid': '1344', 'path': '/usr/sbin/distnoted'},
// {'name': 'root', 'pid': '7296', 'path': '/usr/libexec/storagekitd'},
// {'name': '_coreaudiod', 'pid': '1344', 'path': '/usr/sbin/distnoted'}] == r4
// ```
pub fn to_list_map(mapstring string, txt_ string, delimiter_ string) []map[string]string {
mut result := []map[string]string{}
mut txt := remove_empty_lines(txt_)
txt = dedent(txt)
for line in txt.split_into_lines() {
result << to_map(mapstring, line, delimiter_)
}
return result
}

103
lib/core/texttools/clean.v Normal file
View File

@@ -0,0 +1,103 @@
// make sure that the names are always normalized so its easy to find them back
module texttools
const ignore_for_name = '\\/[]()?!@#$%^&*<>:;{}|~'
const keep_ascii = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_-+={}[]"\':;?/>.<,|\\~` '
pub fn name_clean(r string) string {
mut res := []string{}
for ch in r {
mut c := ch.ascii_str()
if ignore_for_name.contains(c) {
continue
}
res << c
}
return res.join('')
}
// remove all chars which are not ascii
pub fn ascii_clean(r string) string {
mut res := []string{}
for ch in r {
mut c := ch.ascii_str()
if keep_ascii.contains(c) {
res << c
}
}
return res.join('')
}
// https://en.wikipedia.org/wiki/Unicode#Standardized_subsets
pub fn remove_empty_lines(text string) string {
mut out := []string{}
for l in text.split_into_lines() {
if l.trim_space() == '' {
continue
}
out << l
}
return out.join('\n')
}
pub fn remove_double_lines(text string) string {
mut out := []string{}
mut prev := true
for l in text.split_into_lines() {
if l.trim_space() == '' {
if prev {
continue
}
out << ''
prev = true
continue
}
prev = false
out << l
}
if out.len > 0 && out.last() == '' {
out.pop()
}
return out.join('\n')
}
// remove ```?? ``` , can be over multiple lines .
// also removes double lines
pub fn remove_empty_js_blocks(text string) string {
mut out := []string{}
mut block_capture_pre := ''
mut block_capture_inside := []string{}
mut foundblock := false
for l in text.split_into_lines() {
lt := l.trim_space()
if lt.starts_with('```') || lt.starts_with("'''") || lt.starts_with('"""') {
if foundblock {
if block_capture_inside.filter(it.trim_space() != '').len > 0 {
// now we know the block inside is not empty
out << block_capture_pre
out << block_capture_inside
out << l // the last line
}
foundblock = false
block_capture_pre = ''
block_capture_inside = []string{}
continue
} else {
foundblock = true
block_capture_pre = l
continue
}
}
if foundblock {
block_capture_inside << l
} else {
out << l
}
}
if out.len > 0 && out.last() == '' {
out.pop()
}
return remove_double_lines(out.join('\n'))
}

View File

@@ -0,0 +1,49 @@
module texttools
fn test_clean1() {
mut text := "
'''js
'''
something
yes
else
```js
```
'''js
inside
'''
"
mut result := "
something
yes
else
'''js
inside
'''
"
text = dedent(text)
result = dedent(result)
text2 := remove_double_lines(remove_empty_js_blocks(text))
print('---')
print(text2)
print('---')
print(result)
print('---')
assert text2.trim_space() == result.trim_space()
}

View File

@@ -0,0 +1,106 @@
module texttools
enum TextArgsStatus {
start
quote // quote found means value in between ''
}
// remove all '..' and "..." from a text, so everything in between the quotes
pub fn text_remove_quotes(text string) string {
mut out := ''
mut inquote := false
mut ch := ''
mut char_previous := ''
for i in 0 .. text.len {
ch = text[i..i + 1]
if ch in ['"', "'"] {
if char_previous != '\\' {
inquote = !inquote
char_previous = ch
continue
}
}
if !inquote {
// unmodified add, because we are in quote
out += ch
}
char_previous = ch
}
return out
}
// test if an element off the array exists in the text but ignore quotes
pub fn check_exists_outside_quotes(text string, items []string) bool {
text2 := text_remove_quotes(text)
for i in items {
if text2.contains(i) {
return true
}
}
return false
}
// convert text string to arguments
// \n supported but will be \\n and only supported within '' or ""
// \' not modified, same for \"
pub fn cmd_line_args_parser(text string) ![]string {
mut res := []string{}
mut quote := ''
mut char_previous := ''
mut arg := ''
mut ch := ''
if check_exists_outside_quotes(text, ['<', '>', '|']) {
if !(text.contains(' ')) {
return error("cannot convert text '${text}' to args because no space to split")
}
splitted := text.split_nth(' ', 2)
return [splitted[0], splitted[1]]
}
for i in 0 .. text.len {
ch = text[i..i + 1]
// skip spaces which are not escaped
if ch == ' ' && arg == '' {
continue
}
if ch in ['"', "'"] {
if char_previous != '\\' {
if quote == '' {
// beginning of quote need to close off previous arg
if arg != '' {
res << arg.trim(' ')
arg = ''
}
quote = ch
char_previous = ch
continue
} else {
// end of quote
quote = ''
res << arg.trim(' ')
arg = ''
char_previous = ch
continue
}
}
}
if quote != '' {
// unmodified add, because we are in quote
arg += ch
} else {
if ch == ' ' && arg != '' {
res << arg.trim(' ')
arg = ''
} else {
arg += ch
}
}
char_previous = ch
}
if arg != '' {
res << arg.trim(' ')
}
return res
}

View File

@@ -0,0 +1,38 @@
module texttools
// how to process command lines
fn test_cmdline_args() {
mut r := []string{}
r = cmd_line_args_parser("'aa bb' ' cc dd' one -two") or { panic(err) }
assert r == ['aa bb', 'cc dd', 'one', '-two']
r = cmd_line_args_parser("'\taa bb' ' cc dd' one -two") or { panic(err) }
assert r == ['\taa bb', 'cc dd', 'one', '-two']
// now spaces
r = cmd_line_args_parser(" '\taa bb' ' cc dd' one -two ") or { panic(err) }
assert r == ['\taa bb', 'cc dd', 'one', '-two']
// now other quote
r = cmd_line_args_parser('"aa bb" " cc dd" one -two') or { panic(err) }
assert r == ['aa bb', 'cc dd', 'one', '-two']
r = cmd_line_args_parser('"aa bb" \' cc dd\' one -two') or { panic(err) }
assert r == ['aa bb', 'cc dd', 'one', '-two']
r = cmd_line_args_parser('find . /tmp') or { panic(err) }
assert r == ['find', '.', '/tmp']
r = cmd_line_args_parser("bash -c 'find /'") or { panic(err) }
assert r == ['bash', '-c', 'find /']
mut r2 := string('')
r2 = text_remove_quotes('echo "hi >" > /tmp/a.txt')
assert r2 == 'echo > /tmp/a.txt'
r2 = text_remove_quotes("echo 'hi >' > /tmp/a.txt")
assert r2 == 'echo > /tmp/a.txt'
r2 = text_remove_quotes("echo 'hi >' /tmp/a.txt")
assert r2 == 'echo /tmp/a.txt'
assert check_exists_outside_quotes("echo 'hi >' > /tmp/a.txt", ['<', '>', '|'])
assert check_exists_outside_quotes("echo 'hi ' /tmp/a.txt |", ['<', '>', '|'])
assert !check_exists_outside_quotes("echo 'hi >' /tmp/a.txt", ['<', '>', '|'])
r = cmd_line_args_parser('echo "hi" > /tmp/a.txt') or { panic(err) }
assert r == ['echo', '"hi" > /tmp/a.txt']
}

View File

@@ -0,0 +1,13 @@
module texttools
// texttools.expand('|', 20, ' ')
pub fn expand(txt_ string, l int, expand_with string) string {
mut txt := txt_
for _ in 0 .. l {
txt += expand_with
}
if txt.len > l {
txt = txt[0..l]
}
return txt
}

View File

@@ -0,0 +1,46 @@
module texttools
pub fn indent(text string, prefix string) string {
mut res := []string{}
for line in text.split_into_lines() {
res << prefix + line
}
mut t := res.join_lines()
if !t.ends_with('\n') {
t += '\n'
}
return t
}
// remove all leading spaces at same level
pub fn dedent(text string) string {
mut pre := 999
mut pre_current := 0
mut res := []string{}
text_lines := text.split_into_lines()
for line2 in text_lines {
if line2.trim_space() == '' {
continue
}
line2_expanded_tab := line2.replace('\t', ' ')
line2_expanded_tab_trimmed := line2_expanded_tab.trim_left(' ')
pre_current = line2_expanded_tab.len - line2_expanded_tab_trimmed.len
if pre > pre_current {
pre = pre_current
}
}
// now remove the prefix length
for line2 in text_lines {
line2_expanded_tab := line2.replace('\t', ' ') // important to deal with tabs
line2_expanded_tab_trimmed := line2.trim_space()
if line2_expanded_tab_trimmed == '' {
res << ''
} else {
res << line2_expanded_tab[pre..]
}
}
final_result := res.join_lines()
return final_result
}

View File

@@ -0,0 +1,15 @@
module texttools
fn test_dedent() {
mut text := '
a
b
c
d
'
text = dedent(text)
assert text.len == 20
}

View File

@@ -0,0 +1,31 @@
module texttools
pub fn is_int(text string) bool {
for cha in text {
if cha < 48 || cha > 57 {
return false
}
}
return true
}
pub fn is_upper_text(text string) bool {
for cha in text {
if cha < 65 || cha > 90 {
return false
}
}
return true
}
// fn sid_check(sid string) bool {
// if sid.len > 6 || sid.len < 2 {
// return false
// }
// for cha in sid {
// if (cha < 48 || cha > 57) && (cha < 97 || cha > 122) {
// return false
// }
// }
// return true
// }

View File

@@ -0,0 +1,18 @@
module texttools
fn test_istest1() {
assert is_int('0000')
assert is_int('999')
assert is_int('0')
assert is_int('9')
assert is_int('00 00') == false
assert is_int('00a00') == false
assert is_upper_text('A')
assert is_upper_text('Z')
assert is_upper_text('AAZZZZAAA')
assert is_upper_text('z') == false
assert is_upper_text('AAZZZZaAA') == false
assert is_upper_text('AAZZZZ?AA') == false
assert is_upper_text("AAZZZZ'AA") == false
}

View File

@@ -0,0 +1,163 @@
module texttools
pub enum MultiLineStatus {
start
multiline
comment
}
// converst a multiline to a single line, keeping all relevant information
// empty lines removed (unless if in parameter)
// commented lines removed as well (starts with // and #)
// multiline to 'line1\\nline2\\n'
// dedent also done before putting in '...'
// tabs also replaced to 4x space
pub fn multiline_to_single(text string) !string {
mut multiline_first := ''
mut multiline := ''
// mut comment_first:=""
mut comment := []string{}
mut line2 := ''
mut res := []string{}
mut state := MultiLineStatus.start
for line in text.split_into_lines() {
line2 = line
line2 = line2.replace('\t', ' ')
mut line2_trimmed := line2.trim_space()
if state == .multiline {
if multiline_end_check(line2_trimmed) {
// means we are out of multiline
res << multiline_end(multiline_first, multiline)
multiline_first = ''
multiline = ''
state = .start
} else {
multiline += '${line2}\n'
}
continue
}
if state == .comment {
if comment_end_check(line2_trimmed) {
// means we are out of multiline
res << comment_end(comment)
comment = []string{}
state = .start
} else {
comment << line2_trimmed
continue
}
}
if state == .start {
if line2_trimmed == '' {
continue
}
// deal with comments
mut commentpart := ''
line2_trimmed, commentpart = comment_start_check(mut res, line2_trimmed)
if commentpart.len > 0 {
state = .comment
comment = []string{}
comment << commentpart
continue
}
if multiline_start_check(line2_trimmed) {
// means is multiline
state = .multiline
multiline_first = line2_trimmed
continue
}
res << line2_trimmed.trim('\n ')
}
}
// last one
if state == .multiline {
res << multiline_end(multiline_first, multiline)
}
if state == .comment {
res << comment_end(comment)
}
return res.join(' ')
}
fn multiline_end(multiline_first string, multiline string) string {
mut multiline2 := multiline
multiline2 = dedent(multiline2)
multiline2 = multiline2.replace('\n', '\\\\n')
multiline2 = multiline2.replace("'", '"')
firstline_content := multiline_first.all_after_first(':').trim_left('" \'')
name := multiline_first.all_before(':').trim_space()
if firstline_content.trim_space() != '' {
multiline2 = "${name}:'${multiline_first}\\n${multiline2}'"
} else {
multiline2 = "${name}:'${multiline2}'"
}
return multiline2
}
// check that there is multiline start
fn multiline_start_check(text_ string) bool {
if text_ == '' {
return false
}
text := text_.replace(': ', ':').replace(': ', ':').replace(': ', ':')
for tocheck in [":'", ':"', ':"""', ":'''"] {
if text.ends_with(tocheck) {
return true
}
}
return false
}
fn multiline_end_check(text string) bool {
if text == "'" || text == '"' || text == '"""' || text == "'''" {
return true
}
return false
}
// return all before comment and if comment
// return trimmedtext,commentpart
fn comment_start_check(mut res []string, text_ string) (string, string) {
mut text := text_
if text.starts_with('<!--') {
text = text.replace('<!--', '').trim_space()
return '', text
}
if !(text.contains('//')) {
return text, ''
}
mightbecomment := text.all_after_last('//')
if !(mightbecomment.contains("'")) {
// means we found a comment at end of line, and is not part of string statement (value)
text = text.all_before_last('//').trim_space()
if text.len > 0 {
res << '//${mightbecomment}-/'
return text, ''
} else {
return '', mightbecomment
}
}
return text, ''
}
fn comment_end_check(text string) bool {
if text.ends_with('-->') {
return true
}
if !text.starts_with('//') {
return true
}
return false
}
fn comment_end(comment []string) string {
mut out := []string{}
for line in comment {
out << line.trim(' <->/\n')
}
mut outstr := out.join('\\\\n')
return '//${outstr}-/'
}

View File

@@ -0,0 +1,205 @@
module texttools
fn check_result(tocheck_ string, output string) {
mut tocheck := tocheck_
tocheck = tocheck.replace('\\n', '\\\\n')
// tocheck=tocheck.replace("\'","\\'")
tocheck = tocheck.trim_space()
if tocheck == output.trim_space() {
return
}
panic('required result not correct.')
}
fn test_multiline1() {
mut text := "
id:a1
name:'need to do something 1'
description:'
## markdown works in it
description can be multiline
lets see what happens
'yes, this needs to work too'
- a
- something else
- 'something
### subtitle
```python
#even code block in the other block, crazy parsing for sure
def test():
```
'
"
text = multiline_to_single(text) or { panic(err) }
required_result := 'id:a1 name:\'need to do something 1\' description:\'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens\\n\\n"yes, this needs to work too"\\n\\n- a\\n- something else\\n- "something\\n\\n### subtitle\\n\\n```python\\n#even code block in the other block, crazy parsing for sure\\ndef test():\\n\\n```\''
check_result(required_result, text)
}
fn test_multiline2() {
mut text := '
id:a1
name:\'need to do something 1\'
description:"
## markdown works in it
description can be multiline
lets see what happens
\'
'
text = multiline_to_single(text) or { panic(err) }
required_result := "id:a1 name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens'"
check_result(required_result, text)
}
fn test_multiline3() {
mut text := '
id:a1
name:\'need to do something 1\'
description: """
## markdown works in it
description can be multiline
lets see what happens
\'
'
text = multiline_to_single(text) or { panic(err) }
required_result := "id:a1 name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens'"
check_result(required_result, text)
}
fn test_multiline4() {
mut text := '
id:a1
name:\'need to do something 1\'
description: """
## markdown works in it
description can be multiline
lets see what happens
"""
'
text = multiline_to_single(text) or { panic(err) }
required_result := "id:a1 name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens'"
check_result(required_result, text)
}
fn test_multiline5() {
mut text := "
id:a1 //comment1
// a comment
name:'need to do something 1'
description: '
## markdown works in it
description can be multiline
lets see what happens
'
//another comment
"
text = multiline_to_single(text) or { panic(err) }
required_result := "//comment1-/ id:a1 //a comment-/ name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens' //another comment-/"
check_result(required_result, text)
}
fn test_multiline6() {
mut text := "
id:a1 //comment1
// comment m 1
// comment m 2
//
// comment m 3
//
name:'need to do something 1'
description: '
## markdown works in it
description can be multiline
lets see what happens
'
<!--another comment-->
"
text = multiline_to_single(text) or { panic(err) }
required_result := "//comment1-/ id:a1 //comment m 1\\ncomment m 2\\n\\ncomment m 3\\n-/ name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens' //another comment-/"
check_result(required_result, text)
}
// @[assert_continues]
// fn test_comment_start_check() {
// // TEST: `hello // world, this is mario'`, `hello //world //this is mario`
// mut res := []string{}
// mut str := "hello // world, this is mario'"
// mut text, mut comment := comment_start_check(mut res, str)
// assert text == 'hello'
// assert res == ["// world, this is mario'-/"]
// assert comment == ''
// res = []string{}
// str = 'hello //world //this is mario'
// text, comment = comment_start_check(mut res, str)
// assert text == 'hello'
// assert res == ['//world //this is mario-/']
// assert comment == ''
// }
// @[assert_continues]
// fn test_multiline_start_check() {
// // TEST: `hello '''world:'''`, `hello ' world:'`, `hello " world:"`, `hello """ world: """`
// mut text := ["hello '''world:'''", "hello ' world:'", 'hello " world:"', 'hello """ world: """',
// 'hello world: """\n"""']
// expected := [false, false, false, false, true]
// for idx, input in text {
// got := multiline_start_check(input)
// assert got == expected[idx]
// }
// }
// TODO: not supported yet, requires a Comment Struct, which knows its <!-- format
// fn test_multiline7() {
// mut text := "
// id:a1 //comment1
// <!-- comment m 1
// comment m 2
// comment m 3
// -->
// name:'need to do something 1'
// description: '
// ## markdown works in it
// description can be multiline
// lets see what happens
// '
// <!--another comment-->
// "
// text = multiline_to_single(text) or { panic(err) }
// required_result:="//comment1-/ id:a1 //comment m 1\\ncomment m 2\\n\\ncomment m 3\\n-/ name:'need to do something 1' description:'## markdown works in it\\n\\ndescription can be multiline\\nlets see what happens' //another comment-/"
// check_result(required_result,text)
// }

Some files were not shown because too many files have changed in this diff Show More