Compare commits
1005 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 40b3911781 | |||
| febe87c55e | |||
| e8d6193d06 | |||
| 1d98724c02 | |||
| 26731c38b5 | |||
| a484da769c | |||
| cbccce3413 | |||
|
|
27c8c06cdb | ||
|
|
35eee2dcad | ||
| cba4a6d7c1 | |||
| 1217d7b10d | |||
|
|
cfd5711c86 | ||
| 9642922445 | |||
| 9b2b7283c0 | |||
| 9658f1af8d | |||
| 13bed3d48a | |||
| e620dfc6e3 | |||
| 1171b7b6d6 | |||
| 3029bf661a | |||
|
|
26c945ed08 | ||
| 1b90ce41e6 | |||
| e0a8bc32e7 | |||
|
|
f5d9c6019b | ||
|
|
27f48de1f1 | ||
| add4fb5c48 | |||
| 17cdfd8a0d | |||
|
|
854eb9972b | ||
| c0339a0922 | |||
| 601e5db76a | |||
| b109299210 | |||
| 082dbed910 | |||
| 6e32a01faa | |||
| d73fe6eb25 | |||
| 4a0fd8edde | |||
| 34202de296 | |||
| 3988548fe9 | |||
| 3bcb6bee80 | |||
| 7e33acb425 | |||
| 75d20fd8e1 | |||
|
|
c38fdd86ac | ||
|
|
9069816db1 | ||
| 1903ebe045 | |||
| 3dee0d7eef | |||
| ba85e91c58 | |||
|
|
1ed08b3ca4 | ||
|
|
bcee46fa15 | ||
| f3449d6812 | |||
|
|
3c5e0a053e | ||
|
|
f6c077c6b5 | ||
| df5d91e7b6 | |||
| 25e01e308c | |||
| 7204aff27e | |||
| cd91734a84 | |||
| 6306883463 | |||
| 42bfecffb6 | |||
| f7d5415484 | |||
| 5825640c2c | |||
| de60c5f78e | |||
| be19609855 | |||
| 27bc172257 | |||
| 6732928156 | |||
| 3bd1117210 | |||
| 1cd8e8c299 | |||
| 97d506ecbf | |||
| 43ffedfad8 | |||
| 71298a9704 | |||
| e76f558f97 | |||
| e030309b7f | |||
| e77f923cd2 | |||
| bd86f2c4f7 | |||
|
|
2d00d6cf9f | ||
|
|
a58d72615d | ||
|
|
14771ed944 | ||
|
|
a6d4a23172 | ||
|
|
5c77c6bd8d | ||
| b0ff9e3fbf | |||
|
|
3f82240564 | ||
| 06a89aead9 | |||
| 23a723e17f | |||
| 1501a09e62 | |||
|
|
1f9b8c1e76 | ||
|
|
f7ed2ea31e | ||
| 15aeb136b2 | |||
| 50f33e9303 | |||
| f026565f77 | |||
| 0845feffac | |||
| 55f0621983 | |||
| 75363d7aeb | |||
| 1f9bc11a2e | |||
| aab018925d | |||
| 5fa361256a | |||
| 42fe7b0a0d | |||
| 011e5b039e | |||
|
|
e9bcf6ef69 | ||
| f885563982 | |||
| ffff44f347 | |||
|
|
0e1450b5db | ||
| f8734a7e9f | |||
| c05ec6be7f | |||
| 8677d177cb | |||
| dd37eeaa29 | |||
| d5753ee794 | |||
| 6b46b3dbaa | |||
| 4cd5b51085 | |||
| 0a7851b920 | |||
| a0fdaf395e | |||
| 2c5a2ace17 | |||
| 965a2bebb7 | |||
| 2c08ee8687 | |||
| e105dd73b5 | |||
| f5dfe8c0af | |||
| ac97e9e7bc | |||
|
|
beae2cef82 | ||
| 2b23771056 | |||
| 19632b4b4b | |||
| 31c033300a | |||
| ca4127319d | |||
|
|
d3d8f0d0f1 | ||
|
|
2968e4dddc | ||
|
|
30c7951058 | ||
|
|
af63e266d8 | ||
|
|
5d1e3d416e | ||
|
|
dd9dc59485 | ||
| b473630ceb | |||
| a34b8b70ba | |||
| fd195f0824 | |||
| a727d19281 | |||
| 52c88bccb5 | |||
| 85cb868bff | |||
| 4339220b42 | |||
| 2eb62063f2 | |||
| eaccbe610f | |||
| 53e685241a | |||
| 9d87fc62f3 | |||
| 682abdbda9 | |||
| 6308d232aa | |||
| 56d6a05ced | |||
| 0c93a5abe8 | |||
| 2726ecbe2e | |||
| aa26ae60e2 | |||
|
|
edefab866f | ||
|
|
b01e6a5a4c | ||
|
|
843b20804b | ||
| daf6ce5126 | |||
| 52214f79d6 | |||
|
|
6ae6cc35ee | ||
|
|
82a46e8149 | ||
| 93953ed570 | |||
| 2667856633 | |||
| 3bca5f661e | |||
| d4df226381 | |||
| d56a04e3ea | |||
| e4c204376e | |||
| 04403b62a4 | |||
|
|
3b0232ebe3 | ||
|
|
29e0efa3d6 | ||
|
|
c81af5e235 | ||
|
|
b15c4cd15a | ||
|
|
65d75a8148 | ||
|
|
b04e1d7f99 | ||
|
|
a848eaa18b | ||
|
|
ccf2d4a3a5 | ||
|
|
59acea177d | ||
| 5cb52ba6b1 | |||
| 6a2ea52e48 | |||
| c853f3b0ce | |||
| 07bd258e54 | |||
| 52f7a7a2b8 | |||
| cf4d9e86c8 | |||
|
|
44effe9a4b | ||
|
|
2502f0a655 | ||
|
|
d747977185 | ||
| 4b900e383a | |||
| 8ae0d6e401 | |||
|
|
198a394be8 | ||
|
|
697c500e35 | ||
|
|
e6c1d84836 | ||
| e837912363 | |||
| 50a76dd096 | |||
| a7f6548bea | |||
| 34da4f06e1 | |||
| 932b6508c9 | |||
| bfaa2446cd | |||
| bfe3110894 | |||
| 59386bb1c2 | |||
| b99f5ae6e9 | |||
| 4472dc7d04 | |||
| d3d7eccf3d | |||
| 7e250d1224 | |||
| 14d8d5af8d | |||
| 1cb6c486df | |||
| bc163eb61d | |||
| 802f6e074d | |||
| 4bf4294c42 | |||
| cb13febd78 | |||
| 685a58aa14 | |||
| 2bc2c07114 | |||
| 66757c52d7 | |||
| 9fd551e0f7 | |||
| 705f2d640e | |||
| 0eb4dd38f8 | |||
| 6233961371 | |||
| ddfb48f7ab | |||
| 800133ab89 | |||
|
|
2319912f8f | ||
| 0363e5e661 | |||
| d2bf58f514 | |||
| bc92114918 | |||
| f143e3cf48 | |||
| f20f5efcb6 | |||
| 1d383faa62 | |||
| 46e425cbc5 | |||
| 44d3edca55 | |||
|
|
a44072bd4d | ||
|
|
957d977147 | ||
| c038334e67 | |||
| 2f450f7b9b | |||
|
|
a840d1211a | ||
|
|
f8dc42499b | ||
|
|
78c5a7a3c6 | ||
|
|
983908e531 | ||
|
|
a76dfc3a28 | ||
|
|
914cba5388 | ||
| 4c7654fb3c | |||
| 5b48304fac | |||
|
|
6357ae43db | ||
| ab5430ddc7 | |||
| 6877de4626 | |||
| 181a81a84a | |||
| f2079c7c3d | |||
| 9b86f76eaa | |||
|
|
5cee9a4d5a | ||
| 835556b223 | |||
| 0ebc06589e | |||
| 2a23458b57 | |||
|
|
c27862262f | ||
|
|
5592d13d66 | ||
| 63fd9d1660 | |||
| d59c9a06fd | |||
| 21e43c69a8 | |||
| b560288962 | |||
| 57dfa87a9e | |||
| 4fb7996f99 | |||
| bd9cc74c60 | |||
| ff79c952f2 | |||
| 6c3032e65f | |||
| 740b243356 | |||
| bb3dd2dbf9 | |||
| 02c4229116 | |||
| 55794a208c | |||
| c81e342037 | |||
| 1fe0f04226 | |||
| bf01a35686 | |||
| 1ea0978776 | |||
| 8d7c8e8933 | |||
| 5cc32b3e9c | |||
| 0b8cba068e | |||
| 8282bdf6d5 | |||
| b8759d29fb | |||
| 1944d58c9f | |||
| 959c1319e7 | |||
| a538b2af90 | |||
| 0ec8fd58d2 | |||
| bd120180ea | |||
| 2127fb2ec0 | |||
| ddf5fcbbcc | |||
| 62932976dd | |||
| 63217506dd | |||
| fdd619df85 | |||
| 90fc08a816 | |||
| 874ad25fe1 | |||
| 35c3dd7955 | |||
| b6935cf673 | |||
| cbde29a8b4 | |||
| 8e47487a23 | |||
| 32fcda82e7 | |||
| 721352489d | |||
| 6092143ed8 | |||
| d469869b14 | |||
| 391283159d | |||
| 802dcdb8ac | |||
| a5146e4a27 | |||
| 2c8daf11d8 | |||
| e798187b89 | |||
| b9eb75d13e | |||
| d32f0f4245 | |||
| e8d4d0afe6 | |||
| 555272eeb3 | |||
| f53a1c3349 | |||
| 02ffc71aea | |||
| f092095e7b | |||
| 45c64b8184 | |||
| 1fdd30c147 | |||
| 8354ba4c8c | |||
| d9f1c93a01 | |||
| 0e7ea1a6f0 | |||
| 9ca0728929 | |||
| 7ea0a43b0c | |||
| c3b517c4f3 | |||
| 09dedf9669 | |||
| 1b03a9ef6d | |||
| 6ba074b29c | |||
| 731c4a801a | |||
| 0662b37915 | |||
| 545fd75d71 | |||
| ca6fc6f7a5 | |||
| 49bd1b84ff | |||
|
|
6a22412a72 | ||
|
|
35ae33f32a | ||
|
|
e2aa9e7c46 | ||
|
|
dbf18c7a34 | ||
|
|
8902d92534 | ||
|
|
483b6e3de3 | ||
| f769c34466 | |||
|
|
c0242a0729 | ||
|
|
df452ce258 | ||
| 7de290ae55 | |||
| fe161c5bea | |||
|
|
fca7c7364a | ||
|
|
ef705d1be0 | ||
|
|
3154733be1 | ||
| b285e85eb5 | |||
| 89b7f0d465 | |||
|
|
256d4e9bca | ||
|
|
54f4e83627 | ||
|
|
f7a770989b | ||
|
|
c5759ea30e | ||
|
|
aef9c84eb5 | ||
| d0baac83a9 | |||
| b6a2671665 | |||
| a96ae1252c | |||
| ac4db0f789 | |||
| 37f9ab78ec | |||
|
|
9b3ac150bd | ||
|
|
dd577d51b9 | ||
| 92f9714229 | |||
|
|
632a1c11c2 | ||
|
|
63d41352bc | ||
|
|
da8eef3711 | ||
|
|
f0a4732206 | ||
|
|
1f053edefc | ||
|
|
f93db1d23c | ||
|
|
105611bbfb | ||
|
|
4977c6de30 | ||
|
|
eb956bca3d | ||
|
|
5e511367c3 | ||
|
|
484bfe393e | ||
| a1404584d6 | |||
| 3ef1698c2c | |||
| a7fb704627 | |||
| 91ba6001cb | |||
| 345a79d8ff | |||
| 15d886e5e9 | |||
| d6224d1e60 | |||
| 83fb647ac3 | |||
| b410544ee1 | |||
| 2d5d1befae | |||
| fd8b8c8f42 | |||
| 8ffb8c8caf | |||
| b8b339b85c | |||
| 0789a38ea9 | |||
| 995d3c3f6d | |||
|
|
822b179ef4 | ||
| 4691971bd0 | |||
| 9226e8b490 | |||
| b7fc7734b6 | |||
| 8749e3a8cb | |||
| 61f9f2868a | |||
| 97dfcbeb51 | |||
| 238fabbcb2 | |||
| 49542b4bff | |||
| 46898112f5 | |||
| f9bdb22c67 | |||
| cb664b2115 | |||
|
|
761b9e031e | ||
|
|
0d8d11fe26 | ||
|
|
2d5fbd3337 | ||
| cd3c98280e | |||
| 39c6c37dee | |||
| 3438f74e60 | |||
| 4f79712570 | |||
| 8e85ce0678 | |||
| ff09e7bf1b | |||
| 46e1c6706c | |||
| d8a59d0726 | |||
| 108d2019cd | |||
| 3682ef2420 | |||
| a066db6624 | |||
| 7458d64c05 | |||
| 2a1787f28f | |||
| de78c229ce | |||
|
|
f386c67acf | ||
|
|
75f98bf349 | ||
|
|
9870fcbc5d | ||
|
|
d2b8379505 | ||
|
|
2dcb97255c | ||
|
|
f7dd227cd0 | ||
| e2c18c3a24 | |||
| 1bc6c6eab8 | |||
|
|
4b39b137de | ||
| e5de293919 | |||
| 8a10374570 | |||
| ad37a041ab | |||
| 44daea4447 | |||
| 6989a4da13 | |||
| de4583691c | |||
| d8c9b07a51 | |||
| 54d31f40b2 | |||
| ec73b5ff34 | |||
| 9fcdcc3aff | |||
| 05ab2b68f4 | |||
| 79330ef8f5 | |||
| 45ed369a78 | |||
| 37c17fc7da | |||
| 23640d2647 | |||
| be54ec8302 | |||
| 638f81a781 | |||
| e00306b6f8 | |||
| 3fec1c38a1 | |||
| edc9e3c150 | |||
| a155122898 | |||
| f0552f38a0 | |||
|
|
f99419371a | ||
|
|
86d47c218b | ||
|
|
bd5cafbad7 | ||
|
|
b71362eb9a | ||
|
|
673d982360 | ||
|
|
712b46864a | ||
|
|
186c3aae59 | ||
|
|
0794fe948b | ||
|
|
ba2d6e4310 | ||
|
|
b9e5d14b48 | ||
|
|
bf26b0af1d | ||
|
|
8e82b2865b | ||
|
|
367340d69d | ||
|
|
0b77c73809 | ||
|
|
51b432d911 | ||
|
|
f7a679b2a3 | ||
|
|
15c9d60760 | ||
|
|
c69b53fd4e | ||
|
|
9cdab1f392 | ||
|
|
34656cf1f9 | ||
|
|
cf98822749 | ||
|
|
2335d14623 | ||
| 4a72698402 | |||
| fcb857f756 | |||
|
|
ba07f85fd8 | ||
|
|
7b621243d0 | ||
| 598b312140 | |||
| 0df10f5cb3 | |||
| 2c748a9fc8 | |||
| a2e1b4fb27 | |||
| 9b0da9f245 | |||
| 5b9426ba11 | |||
| 8b6ad4d076 | |||
|
|
7d4565e56f | ||
|
|
92cceeb64b | ||
|
|
3d90e39781 | ||
|
|
efbf00830a | ||
|
|
b9969c69fd | ||
|
|
15d998bf76 | ||
|
|
8c966ae853 | ||
|
|
3e10db326f | ||
|
|
dc6f1bdf52 | ||
|
|
429f3b1fea | ||
|
|
70009f1846 | ||
|
|
5f9024c7bf | ||
|
|
f2138f104f | ||
|
|
04ee73e8dd | ||
|
|
bd83ad37bf | ||
|
|
7b175c8804 | ||
|
|
22cbc806dc | ||
|
|
02e0a073aa | ||
|
|
b2e5a84ff9 | ||
|
|
abd694015b | ||
|
|
57d30eab2d | ||
| b27f0a5017 | |||
|
|
c47002430e | ||
|
|
bd3abade55 | ||
|
|
ea0637423e | ||
|
|
a6756bfe5a | ||
|
|
4cdbc51343 | ||
|
|
2e5210a0b7 | ||
|
|
e8574383ee | ||
|
|
21da15da0a | ||
|
|
dca1d877ac | ||
|
|
007f65c27e | ||
|
|
ef572f402d | ||
|
|
133e7c9809 | ||
| 0d38c1b471 | |||
| cf7bfb7650 | |||
|
|
228abe36a3 | ||
| b48abc474c | |||
|
|
c3fe788a5b | ||
|
|
025e8fba69 | ||
| 672904d914 | |||
| fac783c58d | |||
| 6e7843f368 | |||
| 6a555a5fe3 | |||
| 4796e4fe82 | |||
| 3b1068a3a8 | |||
| 392f764acc | |||
| 122cba9f6b | |||
|
|
dc178f68c7 | ||
|
|
59a0519b4e | ||
|
|
dfcaeec85f | ||
|
|
ef922d162e | ||
|
|
02e4ea180d | ||
|
|
8b9b0678b8 | ||
|
|
475e812ba3 | ||
|
|
0a953f2c09 | ||
|
|
1e26162e00 | ||
|
|
dd68bf950c | ||
|
|
e374520654 | ||
|
|
47c95f76e9 | ||
|
|
918cfd83ec | ||
|
|
83387c47ec | ||
|
|
f6c22c733b | ||
|
|
269d0474c5 | ||
|
|
28359984ff | ||
|
|
c09e424890 | ||
| a55af220bf | |||
| 9c09af83a2 | |||
| e69d67d238 | |||
| 01c6ea66ac | |||
| f9ea731a6e | |||
|
|
a974091442 | ||
|
|
413b805823 | ||
|
|
46069ba924 | ||
|
|
f3e7b979a5 | ||
|
|
f6733a67af | ||
|
|
4988b241ef | ||
|
|
7807a8e736 | ||
| 5ba11aab46 | |||
|
|
4abb46b4bf | ||
|
|
a7976c45f9 | ||
|
|
5194fabe62 | ||
|
|
ff430c2e4d | ||
| 1581628ea3 | |||
| 237f9bd742 | |||
|
|
cf27e7880e | ||
|
|
ad300c068f | ||
|
|
1a02dcaf0f | ||
|
|
9ecc2444aa | ||
|
|
0e1836c5d0 | ||
|
|
7965883744 | ||
|
|
b006bb1e41 | ||
|
|
27c9018c48 | ||
|
|
f1991d89b3 | ||
| 9448ae85cf | |||
|
|
a64e964d83 | ||
|
|
5e321b6b0f | ||
| b1453e3580 | |||
| 3da895083b | |||
| ac583741a4 | |||
| 4358ba6471 | |||
| 46afb63f31 | |||
| f773ce168e | |||
| aa79df1fcd | |||
| 420c9cb9e5 | |||
| 83d935930f | |||
| 2e2c94e897 | |||
| a96903da0e | |||
| 3dbcf00e9f | |||
| 708147435e | |||
| 26289bb00f | |||
| 1489b9f44f | |||
| e4045ef179 | |||
| 13f482fa12 | |||
| 3e2013576f | |||
| e5aa8bca09 | |||
| 4dd3908ff7 | |||
| 09f388e2ff | |||
| baecb9bbe4 | |||
| 2c5986295e | |||
| 126f23dfa2 | |||
|
|
407f3f85bc | ||
|
|
94da4fff5e | ||
|
|
0fa54f1354 | ||
|
|
80677f7177 | ||
|
|
01cac0f741 | ||
|
|
30546a34f9 | ||
|
|
0ccf317564 | ||
|
|
0c49e83a68 | ||
| 51db8257f5 | |||
| ecc2977581 | |||
| ffafef0c88 | |||
| ca3bac1d76 | |||
| 43f7bc7943 | |||
| b3555aa54e | |||
|
|
54dc3d3f1f | ||
|
|
afad769066 | ||
|
|
2da37ee4a5 | ||
|
|
90c81a1234 | ||
|
|
34ea12ca23 | ||
|
|
d2c1be5396 | ||
|
|
fe934bba36 | ||
|
|
b01e40da40 | ||
|
|
ae7e7ecb84 | ||
|
|
fdf540cbd0 | ||
|
|
7cb26e5569 | ||
|
|
913b0cb790 | ||
|
|
485b47d145 | ||
| fb9c9b8070 | |||
|
|
5b69f935a5 | ||
|
|
9dbc36d634 | ||
|
|
42b0c4d48f | ||
|
|
5343d9bff6 | ||
|
|
eb47c8490d | ||
|
|
1de9be2a8a | ||
|
|
d852ecc5b1 | ||
|
|
368edcd93a | ||
|
|
71906fd891 | ||
|
|
dbd187a017 | ||
|
|
52a3546325 | ||
|
|
a23eb0ff0b | ||
|
|
b40c366335 | ||
|
|
d2ad18e8ec | ||
|
|
9b737c9280 | ||
| 834c413cfc | |||
| da9965bdc6 | |||
| 6bbaa0d1f7 | |||
| 834f612bfe | |||
|
|
171e54a68c | ||
|
|
a690f98cc1 | ||
|
|
400ea6e80e | ||
|
|
84c19ca9a4 | ||
|
|
3b7ec028f9 | ||
| 83f7bf41e3 | |||
| 634b8c5bad | |||
| 4aabc8d1b0 | |||
| 75255d8cd0 | |||
|
|
a0b53126ca | ||
|
|
38cd933d41 | ||
|
|
2da014c407 | ||
|
|
9d1752f4ed | ||
|
|
d06a806184 | ||
|
|
fe1becabaf | ||
| 2d0d196cd3 | |||
| 37573b0b59 | |||
| 35dace9155 | |||
| 69e7c1cce9 | |||
| d21e71e615 | |||
|
|
972bb9f755 | ||
|
|
a798b2347f | ||
|
|
17979b4fde | ||
|
|
68d25d3622 | ||
|
|
f38d4249ef | ||
| ded4a0b102 | |||
| 6184441706 | |||
| 293dc3f1ac | |||
| 6492e42358 | |||
| 4aaf1bd6db | |||
| a56a251d7f | |||
| f306ff728f | |||
| a10a6d6507 | |||
|
|
59efa18bce | ||
|
|
4ed80481aa | ||
| fff14183a4 | |||
|
|
1f58676278 | ||
|
|
b67db23e07 | ||
|
|
4fe1e70881 | ||
|
|
988602f90f | ||
| 6a2e143b98 | |||
|
|
306de32de8 | ||
|
|
c0b57e2a01 | ||
|
|
aeeacc877b | ||
| 6820a7e9c8 | |||
| 1c7621f20a | |||
| 5263798b11 | |||
|
|
0d96c5fc65 | ||
|
|
6b0cf48292 | ||
|
|
9160e95e4a | ||
|
|
296cb9adf5 | ||
|
|
88fe8f503f | ||
|
|
a26bb56b15 | ||
|
|
2f54f05cfd | ||
|
|
f61d6808e8 | ||
|
|
4719876feb | ||
|
|
975cca77b1 | ||
|
|
eb80294b0a | ||
|
|
0704c9421d | ||
|
|
f08af0e2c5 | ||
| 29c2fccbe5 | |||
| 975c07fc2e | |||
|
|
814b61f25e | ||
|
|
1c264815c3 | ||
|
|
33150846cc | ||
| ff45beac09 | |||
| 24eb709293 | |||
| 49e2146152 | |||
| cdaf64b3cf | |||
| 49af31776e | |||
| 0880823576 | |||
| 1fa1ecb8ec | |||
| d6108c9836 | |||
|
|
2e7efdf229 | ||
|
|
f47703f599 | ||
|
|
383fc9fade | ||
|
|
cc344fa60e | ||
|
|
4691046d5f | ||
|
|
7b453962ca | ||
|
|
528d594056 | ||
|
|
6305cf159e | ||
|
|
5e468359a1 | ||
|
|
2317dd2d4c | ||
|
|
b92647c52e | ||
|
|
906f13b562 | ||
|
|
84142b60a7 | ||
|
|
54024ee222 | ||
|
|
66f29fcb02 | ||
|
|
73c3c3bdb5 | ||
|
|
fa677c01b2 | ||
| 60d6474a42 | |||
| 8c52326550 | |||
| 52aba347a8 | |||
|
|
acd1a4a61d | ||
|
|
ee1ac54dde | ||
|
|
2599fa6859 | ||
|
|
61a0fd2aa6 | ||
|
|
d604d739e3 | ||
|
|
a57f53fdb4 | ||
|
|
f276cdf697 | ||
|
|
7fb46a4c0b | ||
|
|
fc993a95d7 | ||
| c6ff7e7ba5 | |||
| 3f9a3fb1cd | |||
| 7ae4f7dbd0 | |||
| dbb44ec30a | |||
| 7f4fc42a7a | |||
| 01db4540b1 | |||
| 5c0c096a79 | |||
| c1719a057a | |||
| e969eacd06 | |||
| 8ed3439cdc | |||
|
|
5241dfddd4 | ||
|
|
6e0572b48a | ||
|
|
9a4a39b19a | ||
|
|
8abf113715 | ||
|
|
0fb8901ab5 | ||
|
|
6753b87873 | ||
|
|
715f481364 | ||
|
|
776942cd8b | ||
|
|
f0c23eb4ae | ||
| 25c997fec5 | |||
|
|
1546bf7f87 | ||
|
|
02d4adcff0 | ||
|
|
147c889b53 | ||
|
|
f6e7644284 | ||
|
|
582da6d7f0 | ||
|
|
3a337b7b0a | ||
|
|
2953dd0172 | ||
|
|
08f0620305 | ||
|
|
ec22a8e0ec | ||
|
|
6644d3b11c | ||
|
|
13471d4ca5 | ||
|
|
80254739b0 | ||
|
|
be4d2547e4 | ||
|
|
a7f8893399 | ||
|
|
d0b52f40b7 | ||
| 30cb80efcd | |||
| 3117d288b1 | |||
|
|
6ecd190de8 | ||
|
|
dacd6d5afb | ||
| 299f6dea06 | |||
| ed025f9acb | |||
|
|
de0e66a94a | ||
|
|
e86504ecd5 | ||
|
|
1b192328b2 | ||
|
|
77a77ff87e | ||
|
|
439dff4a64 | ||
|
|
f8cb6f25f7 | ||
| 5a8daa3feb | |||
|
|
c408934efd | ||
| abe81190e6 | |||
| c4ea066927 | |||
| e997946c56 | |||
| 5f9c6ff2bb | |||
|
|
dd4bb73a78 | ||
| 8965f7ae89 | |||
|
|
c157c86600 | ||
| 9a931b65e2 | |||
| 2c149507f6 | |||
|
|
34dea39c52 | ||
|
|
f1a4547961 | ||
|
|
8ae56a8df6 | ||
| cb0110ed20 | |||
| 2bbf814003 | |||
|
86e3fdb910
|
|||
| b731c4c388 | |||
| e929ce029d | |||
| 5160096a1a | |||
| f219a4041a | |||
| 674eae1c11 | |||
| f62369bd01 | |||
| 7a6660ebd8 | |||
| e20d1bdcc5 | |||
| 3e309b6379 | |||
| ae4e92e090 | |||
| 7b69719f0e | |||
| 1d631fec21 | |||
|
|
1005576814 | ||
| 690b1b68c3 | |||
| 6e619622d2 | |||
| eb38bc5e60 | |||
| b0da6d1bd2 | |||
| 1377953dcf | |||
|
|
963b31b087 | ||
|
|
2aafab50ad | ||
| aa85172700 | |||
| eff269e911 | |||
| 65ec6ee1a3 | |||
| a86b23b2e9 | |||
| bcccd5f247 | |||
| cb8c550ed1 | |||
| 5fc7019dcc | |||
| 8c9248fd94 | |||
| d1a5f1c268 | |||
| 96bc5c9e5d | |||
| 20927485d9 | |||
| a034708f21 | |||
| 19a2577564 | |||
| e34d804dda | |||
| cd6c899661 | |||
| 4b2f83ceaf | |||
| 69b0944fdd | |||
| e99f484249 | |||
| ebdd7060b0 | |||
| 6f78151f7e | |||
| 133a8c7441 | |||
| 393dc98c73 | |||
| 105d4dd054 | |||
| 4a738b966e | |||
| 4fd3095a75 | |||
| 737457dbad | |||
| fb05951c5b | |||
| 094c915a20 | |||
| c6e83252cf | |||
| 0a757e8634 | |||
| 21a7d7506a | |||
| 6bae1a98ea | |||
| 3d76bc9c04 | |||
| 4495df4d2e | |||
| 99d3e6c00c | |||
| 8a005a2fd2 | |||
| f37999c10c | |||
| 5842fb9f26 | |||
| df5aa1f1a3 | |||
| f079122be5 | |||
| a3daefa7ce | |||
| e5a3c2cae1 | |||
| 313e241a72 | |||
| a66ef2e8b3 | |||
|
4a2753d32c
|
|||
| f4f5eb06a4 | |||
| c93fe755fd | |||
| 6bdc4a5b8f | |||
| 048c72b294 | |||
| 5ad2062e5c | |||
|
babbb610d9
|
|||
| 5bbb99c3f9 | |||
| 6eec7dbda2 | |||
| be9f37a459 | |||
| 757358fded | |||
| 430586cc89 | |||
| 69b405ba65 | |||
| c2eef5a6ab | |||
|
b89a9e87c8
|
|||
| 01991027cc | |||
| 9b81061e22 | |||
|
1c0535a8b4
|
|||
|
|
2d30e6f9cf | ||
|
62f64862be
|
|||
| ee205c4b07 | |||
|
|
10c15f6f8e | ||
|
|
af2f33d4f6 | ||
|
a0c253fa05
|
|||
| 9aee672450 | |||
| 2d17938c75 | |||
| 67562cacc2 | |||
| c8d715126f | |||
| 1d6af5204b | |||
| a5398094da | |||
|
17a67870ef
|
|||
| 0fd5062408 | |||
|
54cfd4c353
|
|||
| 717eb1e7d8 | |||
|
|
c886f85d21 | ||
|
749fa94312
|
|||
|
c27fcc6983
|
|||
|
|
7bd997e368 | ||
|
|
d803a49a85 | ||
| cfa9f877b3 | |||
| e4f883d35a | |||
| 0ad1d27327 | |||
| 3bf2473c3a | |||
|
|
1db2c3ee54 | ||
|
|
7e8a4c5c45 | ||
|
|
f6fe3d4fda | ||
|
|
eeb2602bcf | ||
|
|
565eec0292 | ||
|
|
a98fad32d3 | ||
|
|
10f0c0bd31 | ||
| 84c2b43595 | |||
| 1135b8cee5 | |||
| 8322280066 | |||
| 99ecf1b0d8 | |||
| bb69ee0be9 | |||
| fb0754b3b2 | |||
| d6a13f81e0 | |||
| 6e1f23b702 | |||
| 74ab68d05f | |||
| 27cb6cb0c6 | |||
| 5670efc4cb | |||
| 5a8ad0a47b | |||
|
45098785e9
|
|||
|
|
01552145a8 | ||
|
|
09ed341b97 | ||
|
|
5cb30e6783 | ||
| 6a8bd5c205 | |||
|
|
2e5f618d0b | ||
| 112f5eecb2 | |||
| 8cf611ca51 | |||
| 0f095a691d | |||
| 8b0f692673 | |||
| 03f5885980 | |||
| 04d891a0b8 | |||
|
f8d675dcaf
|
|||
|
9f6e49963e
|
|||
| 0ae8e227fc | |||
| 623f1a289e | |||
|
|
85ac9e5104 | ||
|
|
266205363d | ||
|
|
b9ad95a99d | ||
| ca8799af39 | |||
| 1cd176a626 | |||
|
|
22918434c3 | ||
|
|
23410e6109 | ||
|
|
77809423fd | ||
|
|
ee8fbbca09 | ||
|
|
b9b21ac44b | ||
|
34b1aad175
|
|||
| d4d3713cad | |||
|
01fff39e41
|
|||
| 12968cb580 | |||
| 888aac4867 | |||
| 6637756088 | |||
|
|
70856f7348 | ||
|
|
ac19671469 | ||
| f2b9b73528 | |||
| d1c907fc3a | |||
| df0c4ca857 | |||
|
|
02128e69ba | ||
|
|
885c4d9b32 | ||
|
|
12db34ddb0 | ||
|
|
c7d7e8b954 | ||
|
|
a95ce8abb2 | ||
|
|
cab7a47050 | ||
|
|
dce0b71530 | ||
|
|
915951d84f | ||
|
|
b3509611a2 | ||
|
|
e82e367e95 | ||
|
|
6f9d570a93 | ||
|
|
7486d561ec | ||
|
|
50116651de | ||
|
|
3fe350abe9 | ||
|
|
9e51604286 | ||
|
|
309496ef5d | ||
|
|
d403f84b6c | ||
|
|
dfeeb8cd4c | ||
|
|
135866e5b0 | ||
|
|
eef88b5375 | ||
|
|
8b9717bb74 | ||
|
4422d67701
|
|||
|
|
d54a1e5a34 | ||
|
|
0d2307acc8 | ||
|
|
de45ed3a07 | ||
| 480894372c | |||
|
|
9a7a66192b | ||
|
|
df950143b4 | ||
|
|
038c563843 | ||
|
|
4733e05c58 | ||
|
|
c9496f0973 | ||
|
|
31dffa14ce | ||
|
|
7459501c8f | ||
|
|
bc9fd08f7e | ||
|
|
be1cee5d6a | ||
|
|
c91f9ba43c | ||
|
|
357000ef13 | ||
|
|
6ba89a8b9c | ||
|
|
d5af1d19b8 | ||
|
|
ce1ce722d5 | ||
|
|
fa192e10b8 | ||
|
|
7ae3296ef5 | ||
|
|
a6ba22b0b1 | ||
|
|
d49e94e412 | ||
|
|
ff8ee3693c | ||
|
|
d43d4d8a9f | ||
|
|
6f814b5d09 | ||
|
|
5d3df608e1 | ||
|
|
86af42bf4a | ||
|
|
5869998f6e | ||
|
|
cbdc0fd313 | ||
|
|
bada9508ef | ||
|
|
7bc4da97ab |
19
.github/workflows/documentation.yml
vendored
19
.github/workflows/documentation.yml
vendored
@@ -2,9 +2,9 @@ name: Deploy Documentation to Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
branches: ["development"]
|
||||
workflow_dispatch:
|
||||
branches: ["main"]
|
||||
branches: ["development"]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -17,34 +17,31 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
deploy-documentation:
|
||||
#if: startsWith(github.ref, 'refs/tags/')
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install Vlang dependencies
|
||||
run: sudo apt update && sudo apt install -y libgc-dev
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Vlang
|
||||
run: ./install_v.sh
|
||||
|
||||
- name: Generate documentation
|
||||
run: |
|
||||
./doc.vsh
|
||||
# ls /home/runner/work/herolib/docs
|
||||
./doc.vsh
|
||||
find .
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v3
|
||||
uses: actions/configure-pages@v4
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v1
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: "/home/runner/work/herolib/herolib/docs"
|
||||
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v1
|
||||
uses: actions/deploy-pages@v4
|
||||
|
||||
87
.github/workflows/hero_build.yml
vendored
Normal file
87
.github/workflows/hero_build.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: Release Hero
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
timeout-minutes: 60
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
short-name: linux-i64
|
||||
- target: aarch64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
short-name: linux-arm64
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
short-name: macos-arm64
|
||||
# - target: x86_64-apple-darwin
|
||||
# os: macos-13
|
||||
# short-name: macos-i64
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
|
||||
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup V & Herolib
|
||||
id: setup
|
||||
run: ./install_v.sh --herolib
|
||||
timeout-minutes: 10
|
||||
|
||||
# - name: Do all the basic tests
|
||||
# timeout-minutes: 25
|
||||
# run: ./test_basic.vsh
|
||||
|
||||
- name: Build Hero
|
||||
timeout-minutes: 15
|
||||
run: |
|
||||
set -e
|
||||
v -w -d use_openssl -enable-globals cli/hero.v -o cli/hero-${{ matrix.target }}
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hero-${{ matrix.target }}
|
||||
path: cli/hero-${{ matrix.target }}
|
||||
|
||||
release_hero:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: cli/bins
|
||||
merge-multiple: true
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref_name }}
|
||||
name: Release ${{ github.ref_name }}
|
||||
draft: false
|
||||
fail_on_unmatched_files: true
|
||||
generate_release_notes: true
|
||||
files: cli/bins/*
|
||||
93
.github/workflows/hero_build_linux.yml
vendored
93
.github/workflows/hero_build_linux.yml
vendored
@@ -1,93 +0,0 @@
|
||||
name: Build Hero on Linux & Run tests
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
short-name: linux-i64
|
||||
# - target: aarch64-unknown-linux-musl
|
||||
# os: ubuntu-latest
|
||||
# short-name: linux-arm64
|
||||
# - target: aarch64-apple-darwin
|
||||
# os: macos-latest
|
||||
# short-name: macos-arm64
|
||||
# - target: x86_64-apple-darwin
|
||||
# os: macos-13
|
||||
# short-name: macos-i64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
|
||||
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Vlang
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/vlang/v
|
||||
cd v
|
||||
make
|
||||
sudo ./v symlink
|
||||
cd ..
|
||||
|
||||
- name: Setup Herolib
|
||||
run: |
|
||||
mkdir -p ~/.vmodules/freeflowuniverse
|
||||
ln -s $GITHUB_WORKSPACE/lib ~/.vmodules/freeflowuniverse/herolib
|
||||
|
||||
echo "Installing secp256k1..."
|
||||
# Install build dependencies
|
||||
sudo apt-get install -y build-essential wget autoconf libtool
|
||||
|
||||
# Download and extract secp256k1
|
||||
cd /tmp
|
||||
wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
||||
tar -xvf v0.3.2.tar.gz
|
||||
|
||||
# Build and install
|
||||
cd secp256k1-0.3.2/
|
||||
./autogen.sh
|
||||
./configure
|
||||
make -j 5
|
||||
sudo make install
|
||||
|
||||
# Cleanup
|
||||
rm -rf secp256k1-0.3.2 v0.3.2.tar.gz
|
||||
|
||||
echo "secp256k1 installation complete!"
|
||||
|
||||
- name: Install and Start Redis
|
||||
run: |
|
||||
# Import Redis GPG key
|
||||
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
|
||||
# Add Redis repository
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
|
||||
# Install Redis
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y redis
|
||||
|
||||
# Start Redis
|
||||
redis-server --daemonize yes
|
||||
|
||||
# Print versions
|
||||
redis-cli --version
|
||||
redis-server --version
|
||||
|
||||
- name: Build Hero
|
||||
run: |
|
||||
v -cg -enable-globals -w -n cli/hero.v
|
||||
|
||||
- name: Do all the basic tests
|
||||
run: |
|
||||
./test_basic.vsh
|
||||
66
.github/workflows/hero_build_macos.yml
vendored
66
.github/workflows/hero_build_macos.yml
vendored
@@ -1,66 +0,0 @@
|
||||
name: Build Hero on Macos & Run tests
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
short-name: macos-arm64
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-13
|
||||
short-name: macos-i64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
|
||||
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Vlang
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/vlang/v
|
||||
cd v
|
||||
make
|
||||
sudo ./v symlink
|
||||
cd ..
|
||||
|
||||
- name: Setup Herolib
|
||||
run: |
|
||||
mkdir -p ~/.vmodules/freeflowuniverse
|
||||
ln -s $GITHUB_WORKSPACE/lib ~/.vmodules/freeflowuniverse/herolib
|
||||
|
||||
echo "Installing secp256k1..."
|
||||
brew install secp256k1
|
||||
|
||||
echo "secp256k1 installation complete!"
|
||||
|
||||
- name: Install and Start Redis
|
||||
run: |
|
||||
brew update
|
||||
brew install redis
|
||||
|
||||
# Start Redis
|
||||
redis-server --daemonize yes
|
||||
|
||||
# Print versions
|
||||
redis-cli --version
|
||||
redis-server --version
|
||||
|
||||
- name: Build Hero
|
||||
run: |
|
||||
v -w -cg -gc none -no-retry-compilation -d use_openssl -enable-globals cli/hero.v
|
||||
|
||||
- name: Do all the basic tests
|
||||
run: |
|
||||
./test_basic.vsh
|
||||
132
.github/workflows/release.yml
vendored
132
.github/workflows/release.yml
vendored
@@ -1,132 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
short-name: macos-arm64
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-13
|
||||
short-name: macos-i64
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
short-name: linux-i64
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Vlang
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/vlang/v
|
||||
cd v
|
||||
make
|
||||
sudo ./v symlink
|
||||
cd ..
|
||||
|
||||
- name: Setup Herolib
|
||||
run: |
|
||||
mkdir -p ~/.vmodules/freeflowuniverse
|
||||
ln -s $GITHUB_WORKSPACE/lib ~/.vmodules/freeflowuniverse/herolib
|
||||
|
||||
echo "Installing secp256k1..."
|
||||
if [[ ${{ matrix.os }} == 'macos-latest' || ${{ matrix.os }} == 'macos-13' ]]; then
|
||||
brew install secp256k1
|
||||
|
||||
elif [[ ${{ matrix.os }} == 'ubuntu-latest' ]]; then
|
||||
# Install build dependencies
|
||||
sudo apt-get install -y build-essential wget autoconf libtool
|
||||
|
||||
# Download and extract secp256k1
|
||||
cd /tmp
|
||||
wget https://github.com/bitcoin-core/secp256k1/archive/refs/tags/v0.3.2.tar.gz
|
||||
tar -xvf v0.3.2.tar.gz
|
||||
|
||||
# Build and install
|
||||
cd secp256k1-0.3.2/
|
||||
./autogen.sh
|
||||
./configure
|
||||
make -j 5
|
||||
sudo make install
|
||||
|
||||
else
|
||||
echo "Unsupported OS: ${{ matrix.os }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "secp256k1 installation complete!"
|
||||
|
||||
- name: Build Hero
|
||||
run: |
|
||||
v -w -cg -gc none -no-retry-compilation -d use_openssl -enable-globals cli/hero.v -o cli/hero-${{ matrix.target }}
|
||||
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hero-${{ matrix.target }}
|
||||
path: cli/hero-${{ matrix.target }}
|
||||
|
||||
release_hero:
|
||||
needs: upload
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# TODO: this adds commits that don't belong to this branhc, check another action
|
||||
# - name: Generate changelog
|
||||
# id: changelog
|
||||
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||
# with:
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# headerLabel: "# 📑 Changelog"
|
||||
# breakingLabel: "### 💥 Breaking"
|
||||
# enhancementLabel: "### 🚀 Enhancements"
|
||||
# bugsLabel: "### 🐛 Bug fixes"
|
||||
# securityLabel: "### 🛡️ Security"
|
||||
# issuesLabel: "### 📁 Other issues"
|
||||
# prLabel: "### 📁 Other pull requests"
|
||||
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
|
||||
# onlyLastTag: true
|
||||
# issues: false
|
||||
# issuesWoLabels: false
|
||||
# pullRequests: true
|
||||
# prWoLabels: true
|
||||
# author: true
|
||||
# unreleased: true
|
||||
# compareLink: true
|
||||
# stripGeneratorNotice: true
|
||||
# verbose: true
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: cli/bins
|
||||
merge-multiple: true
|
||||
|
||||
- name: Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
name: Release ${{ github.ref_name }}
|
||||
draft: false
|
||||
fail_on_unmatched_files: true
|
||||
# body: ${{ steps.changelog.outputs.changelog }}
|
||||
files: cli/bins/*
|
||||
32
.github/workflows/test.yml
vendored
Normal file
32
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Build on Linux & Run tests
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
short-name: linux-i64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
|
||||
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!"
|
||||
- run: echo "🔎 The name of your branch is ${{ github.ref_name }} and your repository is ${{ github.repository }}."
|
||||
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup V & Herolib
|
||||
run: ./install_v.sh --herolib
|
||||
|
||||
- name: Do all the basic tests
|
||||
run: ./test_basic.vsh
|
||||
|
||||
31
.gitignore
vendored
31
.gitignore
vendored
@@ -1,4 +1,13 @@
|
||||
|
||||
# Additional ignore files and directories
|
||||
Thumbs.db
|
||||
# Logs
|
||||
logs/
|
||||
*.log
|
||||
*.out
|
||||
# Compiled Python files
|
||||
*.pyc
|
||||
*.pyo
|
||||
__pycache__/
|
||||
*dSYM/
|
||||
.vmodules/
|
||||
.vscode
|
||||
@@ -7,6 +16,7 @@ vls.*
|
||||
vls.log
|
||||
node_modules/
|
||||
docs/
|
||||
vdocs/
|
||||
photonwrapper.so
|
||||
x
|
||||
.env
|
||||
@@ -25,7 +35,22 @@ dump.rdb
|
||||
output/
|
||||
*.db
|
||||
.stellar
|
||||
vdocs/
|
||||
data.ms/
|
||||
test_basic
|
||||
cli/hero
|
||||
cli/hero
|
||||
.aider*
|
||||
storage/
|
||||
.qdrant-initialized
|
||||
.compile_cache
|
||||
compile_results.log
|
||||
tmp
|
||||
compile_summary.log
|
||||
.summary_lock
|
||||
.aider*
|
||||
*.dylib
|
||||
server
|
||||
HTTP_REST_MCP_DEMO.md
|
||||
MCP_HTTP_REST_IMPLEMENTATION_PLAN.md
|
||||
.roo
|
||||
.kilocode
|
||||
.continue
|
||||
183
CONTRIBUTING.md
Normal file
183
CONTRIBUTING.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Contributing to Herolib
|
||||
|
||||
Thank you for your interest in contributing to Herolib! This document provides guidelines and instructions for contributing to the project.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Getting Started](#getting-started)
|
||||
- [Setting Up Development Environment](#setting-up-development-environment)
|
||||
- [Repository Structure](#repository-structure)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Branching Strategy](#branching-strategy)
|
||||
- [Making Changes](#making-changes)
|
||||
- [Testing](#testing)
|
||||
- [Pull Requests](#pull-requests)
|
||||
- [Code Guidelines](#code-guidelines)
|
||||
- [CI/CD Process](#cicd-process)
|
||||
- [Documentation](#documentation)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Setting Up Development Environment
|
||||
|
||||
For developers, you can use the automated installation script:
|
||||
|
||||
```bash
|
||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
||||
bash /tmp/install_v.sh --analyzer --herolib
|
||||
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
||||
```
|
||||
|
||||
Alternatively, you can manually set up the environment:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/code/github/freeflowuniverse
|
||||
cd ~/code/github/freeflowuniverse
|
||||
git clone git@github.com:freeflowuniverse/herolib.git
|
||||
cd herolib
|
||||
# checkout development branch for most recent changes
|
||||
git checkout development
|
||||
bash install.sh
|
||||
```
|
||||
|
||||
### Repository Structure
|
||||
|
||||
Herolib is an opinionated library primarily used by ThreeFold to automate cloud environments. The repository is organized into several key directories:
|
||||
|
||||
- `/lib`: Core library code
|
||||
- `/cli`: Command-line interface tools, including the Hero tool
|
||||
- `/cookbook`: Examples and guides for using Herolib
|
||||
- `/scripts`: Installation and utility scripts
|
||||
- `/docs`: Generated documentation
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Branching Strategy
|
||||
|
||||
- `development`: Main development branch where all features and fixes are merged
|
||||
- `main`: Stable release branch
|
||||
|
||||
For new features or bug fixes, create a branch from `development` with a descriptive name.
|
||||
|
||||
### Making Changes
|
||||
|
||||
1. Create a new branch from `development`:
|
||||
```bash
|
||||
git checkout development
|
||||
git pull
|
||||
git checkout -b feature/your-feature-name
|
||||
```
|
||||
|
||||
2. Make your changes, following the code guidelines.
|
||||
|
||||
3. Run tests to ensure your changes don't break existing functionality:
|
||||
```bash
|
||||
./test_basic.vsh
|
||||
```
|
||||
|
||||
4. Commit your changes with clear, descriptive commit messages.
|
||||
|
||||
### Testing
|
||||
|
||||
Before submitting a pull request, ensure all tests pass:
|
||||
|
||||
```bash
|
||||
# Run all basic tests
|
||||
./test_basic.vsh
|
||||
|
||||
# Run tests for a specific module
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
|
||||
# Run tests for an entire directory
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
```
|
||||
|
||||
The test script (`test_basic.vsh`) manages test execution and caching to optimize performance. It automatically skips tests listed in the ignore or error sections of the script.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
1. Push your branch to the repository:
|
||||
```bash
|
||||
git push origin feature/your-feature-name
|
||||
```
|
||||
|
||||
2. Create a pull request against the `development` branch.
|
||||
|
||||
3. Ensure your PR includes:
|
||||
- A clear description of the changes
|
||||
- Any related issue numbers
|
||||
- Documentation updates if applicable
|
||||
|
||||
4. Wait for CI checks to pass and address any feedback from reviewers.
|
||||
|
||||
## Code Guidelines
|
||||
|
||||
- Follow the existing code style and patterns in the repository
|
||||
- Write clear, concise code with appropriate comments
|
||||
- Keep modules separate and focused on specific functionality
|
||||
- Maintain separation between the jsonschema and jsonrpc modules rather than merging them
|
||||
|
||||
## CI/CD Process
|
||||
|
||||
The repository uses GitHub Actions for continuous integration and deployment:
|
||||
|
||||
### 1. Testing Workflow (`test.yml`)
|
||||
|
||||
This workflow runs on every push and pull request to ensure code quality:
|
||||
- Sets up V and Herolib
|
||||
- Runs all basic tests using `test_basic.vsh`
|
||||
|
||||
All tests must pass before a PR can be merged to the `development` branch.
|
||||
|
||||
### 2. Hero Build Workflow (`hero_build.yml`)
|
||||
|
||||
This workflow builds the Hero tool for multiple platforms when a new tag is created:
|
||||
- Builds for Linux (x86_64, aarch64) and macOS (x86_64, aarch64)
|
||||
- Runs all basic tests
|
||||
- Creates GitHub releases with the built binaries
|
||||
|
||||
### 3. Documentation Workflow (`documentation.yml`)
|
||||
|
||||
This workflow automatically updates the documentation on GitHub Pages when changes are pushed to the `development` branch:
|
||||
- Generates documentation using `doc.vsh`
|
||||
- Deploys the documentation to GitHub Pages
|
||||
|
||||
## Documentation
|
||||
|
||||
To generate documentation locally:
|
||||
|
||||
```bash
|
||||
cd ~/code/github/freeflowuniverse/herolib
|
||||
bash doc.sh
|
||||
```
|
||||
|
||||
The documentation is automatically published to [https://freeflowuniverse.github.io/herolib/](https://freeflowuniverse.github.io/herolib/) when changes are pushed to the `development` branch.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### TCC Compiler Error on macOS
|
||||
|
||||
If you encounter the following error when using TCC compiler on macOS:
|
||||
|
||||
```
|
||||
In file included from /Users/timurgordon/code/github/vlang/v/thirdparty/cJSON/cJSON.c:42:
|
||||
/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h:614: error: ';' expected (got "__fabsf16")
|
||||
```
|
||||
|
||||
This is caused by incompatibility between TCC and the half precision math functions in the macOS SDK. To fix this issue:
|
||||
|
||||
1. Open the math.h file:
|
||||
```bash
|
||||
sudo nano /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
|
||||
```
|
||||
|
||||
2. Comment out the half precision math functions (around line 612-626).
|
||||
|
||||
For more details, see the [README.md](README.md) troubleshooting section.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Herolib Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
- [Cookbook Examples](https://github.com/freeflowuniverse/herolib/tree/development/cookbook)
|
||||
- [AI Prompts](aiprompts/starter/0_start_here.md)
|
||||
151
README.md
151
README.md
@@ -1,23 +1,53 @@
|
||||
# herolib
|
||||
# Herolib
|
||||
|
||||
a smaller version of herolib with only the items we need for hero
|
||||
Herolib is an opinionated library primarily used by ThreeFold to automate cloud environments. It provides a comprehensive set of tools and utilities for cloud automation, git operations, documentation building, and more.
|
||||
|
||||
> [documentation here](https://freeflowuniverse.github.io/herolib/)
|
||||
[](https://github.com/freeflowuniverse/herolib/actions/workflows/test.yml)
|
||||
[](https://github.com/freeflowuniverse/herolib/actions/workflows/documentation.yml)
|
||||
|
||||
## automated install
|
||||
> [Complete Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
|
||||
## Installation
|
||||
|
||||
### For Users
|
||||
|
||||
The Hero tool can be installed with a single command:
|
||||
|
||||
```bash
|
||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/main/install_v.sh' > /tmp/install_v.sh
|
||||
bash /tmp/install_v.sh --analyzer --herolib
|
||||
#DONT FORGET TO START A NEW SHELL (otherwise the paths will not be set)
|
||||
curl https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_hero.sh | bash
|
||||
```
|
||||
|
||||
### details
|
||||
Hero will be installed in:
|
||||
- `/usr/local/bin` for Linux
|
||||
- `~/hero/bin` for macOS
|
||||
|
||||
After installation on macOS, you may need to do source see below or restart your terminal to ensure the `hero` command is available:
|
||||
|
||||
```bash
|
||||
source ~/.zprofile
|
||||
```
|
||||
|
||||
#~/code/github/freeflowuniverse/herolib/install_v.sh --help
|
||||
The Hero tool can be used to work with git, build documentation, interact with Hero AI, and more.
|
||||
|
||||
### For Developers
|
||||
|
||||
For development purposes, use the automated installation script:
|
||||
|
||||
```bash
|
||||
curl 'https://raw.githubusercontent.com/freeflowuniverse/herolib/refs/heads/development/install_v.sh' > /tmp/install_v.sh
|
||||
bash /tmp/install_v.sh --analyzer --herolib
|
||||
|
||||
#do not forget to do the following this makes sure vtest and vrun exists
|
||||
cd ~/code/github/freeflowuniverse/herolib
|
||||
bash install_herolib.vsh
|
||||
|
||||
# IMPORTANT: Start a new shell after installation for paths to be set correctly
|
||||
|
||||
```
|
||||
|
||||
#### Installation Options
|
||||
|
||||
```
|
||||
V & HeroLib Installer Script
|
||||
|
||||
Usage: ~/code/github/freeflowuniverse/herolib/install_v.sh [options]
|
||||
@@ -36,29 +66,108 @@ Examples:
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --analyzer
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --herolib
|
||||
~/code/github/freeflowuniverse/herolib/install_v.sh --reset --analyzer # Fresh install of both
|
||||
|
||||
```
|
||||
|
||||
### to test
|
||||
## Features
|
||||
|
||||
to run the basic tests, important !!!
|
||||
Herolib provides a wide range of functionality:
|
||||
|
||||
- Cloud automation tools
|
||||
- Git operations and management
|
||||
### Offline Mode for Git Operations
|
||||
|
||||
Herolib now supports an `offline` mode for Git operations, which prevents automatic fetching from remote repositories. This can be useful in environments with limited or no internet connectivity, or when you want to avoid network calls during development or testing.
|
||||
|
||||
To enable offline mode:
|
||||
|
||||
- **Via `GitStructureConfig`**: Set the `offline` field to `true` in the `GitStructureConfig` struct.
|
||||
- **Via `GitStructureArgsNew`**: When creating a new `GitStructure` instance using `gittools.new()`, set the `offline` parameter to `true`.
|
||||
- **Via Environment Variable**: Set the `OFFLINE` environment variable to any value (e.g., `export OFFLINE=true`).
|
||||
|
||||
When offline mode is active, `git fetch --all` operations will be skipped, and a debug message "fetch skipped (offline)" will be printed.
|
||||
- Documentation building
|
||||
- Hero AI integration
|
||||
- System management utilities
|
||||
- And much more
|
||||
|
||||
Check the [cookbook](https://github.com/freeflowuniverse/herolib/tree/development/cookbook) for examples and use cases.
|
||||
|
||||
## Testing
|
||||
|
||||
Running tests is an essential part of development. To run the basic tests:
|
||||
|
||||
```bash
|
||||
# Run all basic tests
|
||||
~/code/github/freeflowuniverse/herolib/test_basic.vsh
|
||||
```
|
||||
|
||||
```bash
|
||||
# Run tests for a specific module
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
#for a full dir
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
|
||||
#to do al basic tests
|
||||
~/code/github/freeflowuniverse/herolib/test_basic.vsh
|
||||
# Run tests for an entire directory
|
||||
vtest ~/code/github/freeflowuniverse/herolib/lib/osal
|
||||
```
|
||||
|
||||
The `vtest` command is an alias for testing functionality.
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions to Herolib! Please see our [CONTRIBUTING.md](CONTRIBUTING.md) file for detailed information on:
|
||||
|
||||
- Setting up your development environment
|
||||
- Understanding the repository structure
|
||||
- Following our development workflow
|
||||
- Making pull requests
|
||||
- CI/CD processes
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### TCC Compiler Error on macOS
|
||||
|
||||
If you encounter the following error when using TCC compiler on macOS:
|
||||
|
||||
```
|
||||
vtest is an alias to test functionality
|
||||
In file included from /Users/timurgordon/code/github/vlang/v/thirdparty/cJSON/cJSON.c:42:
|
||||
/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h:614: error: ';' expected (got "__fabsf16")
|
||||
```
|
||||
|
||||
This is caused by incompatibility between TCC and the half precision math functions in the macOS SDK. To fix this issue:
|
||||
|
||||
## important to read
|
||||
1. Open the math.h file:
|
||||
```bash
|
||||
sudo nano /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include/math.h
|
||||
```
|
||||
|
||||
- [aiprompts/starter/0_start_here.md](aiprompts/starter/0_start_here.md)
|
||||
2. Comment out the following lines (around line 612-626):
|
||||
```c
|
||||
/* half precision math functions */
|
||||
// extern _Float16 __fabsf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __hypotf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __sqrtf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __ceilf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __floorf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __rintf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __roundf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __truncf16(_Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __copysignf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __nextafterf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fmaxf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fminf16(_Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
// extern _Float16 __fmaf16(_Float16, _Float16, _Float16) __API_AVAILABLE(macos(15.0), ios(18.0), watchos(11.0), tvos(18.0));
|
||||
```
|
||||
|
||||
3. Save the file and try compiling again.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Complete Documentation](https://freeflowuniverse.github.io/herolib/)
|
||||
- [Cookbook Examples](https://github.com/freeflowuniverse/herolib/tree/development/cookbook)
|
||||
- [AI Prompts](aiprompts/starter/0_start_here.md)
|
||||
|
||||
## Generating Documentation
|
||||
|
||||
To generate documentation locally:
|
||||
|
||||
```bash
|
||||
cd ~/code/github/freeflowuniverse/herolib
|
||||
bash doc.sh
|
||||
```
|
||||
|
||||
19
aiprompts/.openhands/setup.sh
Normal file
19
aiprompts/.openhands/setup.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Herolib Web Server Installation Script
|
||||
# This script sets up the necessary environment for the Flask web server.
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Script directory
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
/workspace/herolib/install_v.sh
|
||||
@@ -1,204 +0,0 @@
|
||||
# instructions how to work with heroscript in vlang
|
||||
|
||||
## heroscript
|
||||
|
||||
Heroscript is our small scripting language which has following structure
|
||||
|
||||
an example of a heroscript is
|
||||
|
||||
```heroscript
|
||||
|
||||
!!mailclient.configure
|
||||
name: 'myname'
|
||||
host: 'localhost'
|
||||
port: 25
|
||||
secure: 1
|
||||
reset: 1
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
```
|
||||
|
||||
Notice how:
|
||||
- every action starts with !!
|
||||
- the first part is the actor, mailclient in this case
|
||||
- the 2e part is the action name, configure in this case
|
||||
- multilines are supported see the description field
|
||||
|
||||
## how to process heroscript in Vlang
|
||||
|
||||
- heroscript can be converted to a struct,
|
||||
- the methods available to get the params are in 'params' section further in this doc
|
||||
|
||||
|
||||
```vlang
|
||||
//the object which will be configured
|
||||
pub struct mailclient {
|
||||
pub mut:
|
||||
name string
|
||||
host string
|
||||
port int
|
||||
secure bool
|
||||
description string
|
||||
}
|
||||
|
||||
mut plbook := playbook.new(text: $the_heroscript_from_above)!
|
||||
play_mailclient(mut plbook)! //see below in vlang block there it all happens
|
||||
|
||||
pub fn play_mailclient(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//find all actions are !!$actor.$actionname. in this case above the actor is !!mailclient, we check with the fitler if it exists, if not we return
|
||||
mailclient_actions := plbook.find(filter: 'mailclient.')!
|
||||
for action in mailclient_actions {
|
||||
if action.name == "configure"{
|
||||
mut p := action.params
|
||||
mut obj := mailclientScript{
|
||||
//INFO: all details about the get methods can be found in 'params get methods' section
|
||||
name : p.get('name')! //will give error if not exist
|
||||
homedir : p.get('homedir')!
|
||||
title : p.get_default('title', 'My Hero DAG')! //uses a default if not set
|
||||
reset : p.get_default_false('reset')
|
||||
start : p.get_default_true('start')
|
||||
colors : p.get_list('colors')
|
||||
description : p.get_default('description','')!
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
## params get methods (param getters)
|
||||
|
||||
above in the p.get...
|
||||
|
||||
below you can find the methods which can be used on the params
|
||||
|
||||
```vlang
|
||||
|
||||
exists(key_ string) bool
|
||||
|
||||
//check if arg exist (arg is just a value in the string e.g. red, not value:something)
|
||||
exists_arg(key_ string) bool
|
||||
|
||||
//see if the kwarg with the key exists if yes return as string trimmed
|
||||
get(key_ string) !string
|
||||
|
||||
//return the arg with nr, 0 is the first
|
||||
get_arg(nr int) !string
|
||||
|
||||
//return arg, if the nr is larger than amount of args, will return the defval
|
||||
get_arg_default(nr int, defval string) !string
|
||||
|
||||
get_default(key string, defval string) !string
|
||||
|
||||
get_default_false(key string) bool
|
||||
|
||||
get_default_true(key string) bool
|
||||
|
||||
get_float(key string) !f64
|
||||
|
||||
get_float_default(key string, defval f64) !f64
|
||||
|
||||
get_from_hashmap(key_ string, defval string, hashmap map[string]string) !string
|
||||
|
||||
get_int(key string) !int
|
||||
|
||||
get_int_default(key string, defval int) !int
|
||||
|
||||
//Looks for a list of strings in the parameters. ',' are used as deliminator to list
|
||||
get_list(key string) ![]string
|
||||
|
||||
get_list_default(key string, def []string) ![]string
|
||||
|
||||
get_list_f32(key string) ![]f32
|
||||
|
||||
get_list_f32_default(key string, def []f32) []f32
|
||||
|
||||
get_list_f64(key string) ![]f64
|
||||
|
||||
get_list_f64_default(key string, def []f64) []f64
|
||||
|
||||
get_list_i16(key string) ![]i16
|
||||
|
||||
get_list_i16_default(key string, def []i16) []i16
|
||||
|
||||
get_list_i64(key string) ![]i64
|
||||
|
||||
get_list_i64_default(key string, def []i64) []i64
|
||||
|
||||
get_list_i8(key string) ![]i8
|
||||
|
||||
get_list_i8_default(key string, def []i8) []i8
|
||||
|
||||
get_list_int(key string) ![]int
|
||||
|
||||
get_list_int_default(key string, def []int) []int
|
||||
|
||||
get_list_namefix(key string) ![]string
|
||||
|
||||
get_list_namefix_default(key string, def []string) ![]string
|
||||
|
||||
get_list_u16(key string) ![]u16
|
||||
|
||||
get_list_u16_default(key string, def []u16) []u16
|
||||
|
||||
get_list_u32(key string) ![]u32
|
||||
|
||||
get_list_u32_default(key string, def []u32) []u32
|
||||
|
||||
get_list_u64(key string) ![]u64
|
||||
|
||||
get_list_u64_default(key string, def []u64) []u64
|
||||
|
||||
get_list_u8(key string) ![]u8
|
||||
|
||||
get_list_u8_default(key string, def []u8) []u8
|
||||
|
||||
get_map() map[string]string
|
||||
|
||||
get_path(key string) !string
|
||||
|
||||
get_path_create(key string) !string
|
||||
|
||||
get_percentage(key string) !f64
|
||||
|
||||
get_percentage_default(key string, defval string) !f64
|
||||
|
||||
//convert GB, MB, KB to bytes e.g. 10 GB becomes bytes in u64
|
||||
get_storagecapacity_in_bytes(key string) !u64
|
||||
|
||||
get_storagecapacity_in_bytes_default(key string, defval u64) !u64
|
||||
|
||||
get_storagecapacity_in_gigabytes(key string) !u64
|
||||
|
||||
//Get Expiration object from time string input input can be either relative or absolute## Relative time
|
||||
get_time(key string) !ourtime.OurTime
|
||||
|
||||
get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime
|
||||
|
||||
get_time_interval(key string) !Duration
|
||||
|
||||
get_timestamp(key string) !Duration
|
||||
|
||||
get_timestamp_default(key string, defval Duration) !Duration
|
||||
|
||||
get_u32(key string) !u32
|
||||
|
||||
get_u32_default(key string, defval u32) !u32
|
||||
|
||||
get_u64(key string) !u64
|
||||
|
||||
get_u64_default(key string, defval u64) !u64
|
||||
|
||||
get_u8(key string) !u8
|
||||
|
||||
get_u8_default(key string, defval u8) !u8
|
||||
|
||||
```
|
||||
@@ -1,142 +0,0 @@
|
||||
# how to use params
|
||||
|
||||
works very well in combination with heroscript
|
||||
|
||||
## How to get the paramsparser
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
|
||||
// Create new params from text
|
||||
params := paramsparser.new("color:red size:'large' priority:1 enable:true")!
|
||||
|
||||
// Or create empty params and add later
|
||||
mut params := paramsparser.new_params()
|
||||
params.set("color", "red")
|
||||
```
|
||||
|
||||
## Parameter Format
|
||||
|
||||
The parser supports several formats:
|
||||
|
||||
1. Key-value pairs: `key:value`
|
||||
2. Quoted values: `key:'value with spaces'`
|
||||
3. Arguments without keys: `arg1 arg2`
|
||||
4. Comments: `// this is a comment`
|
||||
|
||||
Example:
|
||||
|
||||
```v
|
||||
text := "name:'John Doe' age:30 active:true // user details"
|
||||
params := paramsparser.new(text)!
|
||||
```
|
||||
|
||||
## Getting Values
|
||||
|
||||
The module provides various methods to retrieve values:
|
||||
|
||||
```v
|
||||
// Get string value
|
||||
name := params.get("name")! // returns "John Doe"
|
||||
|
||||
// Get with default value
|
||||
color := params.get_default("color", "blue")! // returns "blue" if color not set
|
||||
|
||||
// Get as integer
|
||||
age := params.get_int("age")! // returns 30
|
||||
|
||||
// Get as boolean (true if value is "1", "true", "y", "yes")
|
||||
is_active := params.get_default_true("active")
|
||||
|
||||
// Get as float
|
||||
score := params.get_float("score")!
|
||||
|
||||
// Get as percentage (converts "80%" to 0.8)
|
||||
progress := params.get_percentage("progress")!
|
||||
```
|
||||
|
||||
## Type Conversion Methods
|
||||
|
||||
The module supports various type conversions:
|
||||
|
||||
### Basic Types
|
||||
|
||||
- `get_int()`: Convert to int32
|
||||
- `get_u32()`: Convert to unsigned 32-bit integer
|
||||
- `get_u64()`: Convert to unsigned 64-bit integer
|
||||
- `get_u8()`: Convert to unsigned 8-bit integer
|
||||
- `get_float()`: Convert to 64-bit float
|
||||
- `get_percentage()`: Convert percentage string to float (e.g., "80%" → 0.8)
|
||||
|
||||
### Boolean Values
|
||||
|
||||
- `get_default_true()`: Returns true if value is empty, "1", "true", "y", or "yes"
|
||||
- `get_default_false()`: Returns false if value is empty, "0", "false", "n", or "no"
|
||||
|
||||
### Lists
|
||||
|
||||
The module provides robust support for parsing and converting lists:
|
||||
|
||||
```v
|
||||
// Basic list parsing
|
||||
names := params.get_list("users")! // parses ["user1", "user2", "user3"]
|
||||
|
||||
// With default value
|
||||
tags := params.get_list_default("tags", ["default"])!
|
||||
|
||||
// Lists with type conversion
|
||||
numbers := params.get_list_int("ids")! // converts each item to int
|
||||
amounts := params.get_list_f64("prices")! // converts each item to f64
|
||||
|
||||
// Name-fixed lists (normalizes each item)
|
||||
clean_names := params.get_list_namefix("categories")!
|
||||
```
|
||||
|
||||
Supported list types:
|
||||
|
||||
- `get_list()`: String list
|
||||
- `get_list_u8()`, `get_list_u16()`, `get_list_u32()`, `get_list_u64()`: Unsigned integers
|
||||
- `get_list_i8()`, `get_list_i16()`, `get_list_int()`, `get_list_i64()`: Signed integers
|
||||
- `get_list_f32()`, `get_list_f64()`: Floating point numbers
|
||||
|
||||
Each list method has a corresponding `_default` version that accepts a default value.
|
||||
|
||||
Valid list formats:
|
||||
|
||||
```v
|
||||
users: "john, jane,bob"
|
||||
ids: "1,2,3,4,5"
|
||||
```
|
||||
|
||||
### Advanced
|
||||
|
||||
```v
|
||||
get_map() map[string]string
|
||||
|
||||
get_path(key string) !string
|
||||
|
||||
get_path_create(key string) !string //will create path if it doesnt exist yet
|
||||
|
||||
get_percentage(key string) !f64
|
||||
|
||||
get_percentage_default(key string, defval string) !f64
|
||||
|
||||
//convert GB, MB, KB to bytes e.g. 10 GB becomes bytes in u64
|
||||
get_storagecapacity_in_bytes(key string) !u64
|
||||
|
||||
get_storagecapacity_in_bytes_default(key string, defval u64) !u64
|
||||
|
||||
get_storagecapacity_in_gigabytes(key string) !u64
|
||||
|
||||
//Get Expiration object from time string input input can be either relative or absolute## Relative time
|
||||
get_time(key string) !ourtime.OurTime
|
||||
|
||||
get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime
|
||||
|
||||
get_time_interval(key string) !Duration
|
||||
|
||||
get_timestamp(key string) !Duration
|
||||
|
||||
get_timestamp_default(key string, defval Duration) !Duration
|
||||
|
||||
```
|
||||
@@ -1,309 +0,0 @@
|
||||
# how to work with heroscript in vlang
|
||||
|
||||
## heroscript
|
||||
|
||||
Heroscript is our small scripting language which has following structure
|
||||
|
||||
an example of a heroscript is
|
||||
|
||||
```heroscript
|
||||
|
||||
!!dagu.script_define
|
||||
name: 'test_dag'
|
||||
homedir:''
|
||||
title:'a title'
|
||||
reset:1
|
||||
start:true //trie or 1 is same
|
||||
colors: 'green,red,purple' //lists are comma separated
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'hello_world'
|
||||
command: 'echo hello world'
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'last_step'
|
||||
command: 'echo last step'
|
||||
|
||||
|
||||
```
|
||||
|
||||
Notice how:
|
||||
- every action starts with !!
|
||||
- the first part is the actor e.g. dagu in this case
|
||||
- the 2e part is the action name
|
||||
- multilines are supported see the description field
|
||||
|
||||
## how to process heroscript in Vlang
|
||||
|
||||
- heroscript can be converted to a struct,
|
||||
- the methods available to get the params are in 'params' section further in this doc
|
||||
|
||||
|
||||
```vlang
|
||||
|
||||
fn test_play_dagu() ! {
|
||||
mut plbook := playbook.new(text: thetext_from_above)!
|
||||
play_dagu(mut plbook)! //see below in vlang block there it all happens
|
||||
}
|
||||
|
||||
|
||||
pub fn play_dagu(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//find all actions are !!$actor.$actionname. in this case above the actor is !!dagu, we check with the fitler if it exists, if not we return
|
||||
dagu_actions := plbook.find(filter: 'dagu.')!
|
||||
if dagu_actions.len == 0 {
|
||||
return
|
||||
}
|
||||
play_dagu_basic(mut plbook)!
|
||||
}
|
||||
|
||||
pub struct DaguScript {
|
||||
pub mut:
|
||||
name string
|
||||
homedir string
|
||||
title string
|
||||
reset bool
|
||||
start bool
|
||||
colors []string
|
||||
}
|
||||
|
||||
// play_dagu plays the dagu play commands
|
||||
pub fn play_dagu_basic(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//now find the specific ones for dagu.script_define
|
||||
mut actions := plbook.find(filter: 'dagu.script_define')!
|
||||
|
||||
if actions.len > 0 {
|
||||
for myaction in actions {
|
||||
mut p := myaction.params //get the params object from the action object, this can then be processed using the param getters
|
||||
mut obj := DaguScript{
|
||||
//INFO: all details about the get methods can be found in 'params get methods' section
|
||||
name : p.get('name')! //will give error if not exist
|
||||
homedir : p.get('homedir')!
|
||||
title : p.get_default('title', 'My Hero DAG')! //uses a default if not set
|
||||
reset : p.get_default_false('reset')
|
||||
start : p.get_default_true('start')
|
||||
colors : p.get_list('colors')
|
||||
description : p.get_default('description','')!
|
||||
}
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
//there can be more actions which will have other filter
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## params get methods (param getters)
|
||||
|
||||
```vlang
|
||||
|
||||
fn (params &Params) exists(key_ string) bool
|
||||
|
||||
//check if arg exist (arg is just a value in the string e.g. red, not value:something)
|
||||
fn (params &Params) exists_arg(key_ string) bool
|
||||
|
||||
//see if the kwarg with the key exists if yes return as string trimmed
|
||||
fn (params &Params) get(key_ string) !string
|
||||
|
||||
//return the arg with nr, 0 is the first
|
||||
fn (params &Params) get_arg(nr int) !string
|
||||
|
||||
//return arg, if the nr is larger than amount of args, will return the defval
|
||||
fn (params &Params) get_arg_default(nr int, defval string) !string
|
||||
|
||||
fn (params &Params) get_default(key string, defval string) !string
|
||||
|
||||
fn (params &Params) get_default_false(key string) bool
|
||||
|
||||
fn (params &Params) get_default_true(key string) bool
|
||||
|
||||
fn (params &Params) get_float(key string) !f64
|
||||
|
||||
fn (params &Params) get_float_default(key string, defval f64) !f64
|
||||
|
||||
fn (params &Params) get_from_hashmap(key_ string, defval string, hashmap map[string]string) !string
|
||||
|
||||
fn (params &Params) get_int(key string) !int
|
||||
|
||||
fn (params &Params) get_int_default(key string, defval int) !int
|
||||
|
||||
//Looks for a list of strings in the parameters. ',' are used as deliminator to list
|
||||
fn (params &Params) get_list(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_f32(key string) ![]f32
|
||||
|
||||
fn (params &Params) get_list_f32_default(key string, def []f32) []f32
|
||||
|
||||
fn (params &Params) get_list_f64(key string) ![]f64
|
||||
|
||||
fn (params &Params) get_list_f64_default(key string, def []f64) []f64
|
||||
|
||||
fn (params &Params) get_list_i16(key string) ![]i16
|
||||
|
||||
fn (params &Params) get_list_i16_default(key string, def []i16) []i16
|
||||
|
||||
fn (params &Params) get_list_i64(key string) ![]i64
|
||||
|
||||
fn (params &Params) get_list_i64_default(key string, def []i64) []i64
|
||||
|
||||
fn (params &Params) get_list_i8(key string) ![]i8
|
||||
|
||||
fn (params &Params) get_list_i8_default(key string, def []i8) []i8
|
||||
|
||||
fn (params &Params) get_list_int(key string) ![]int
|
||||
|
||||
fn (params &Params) get_list_int_default(key string, def []int) []int
|
||||
|
||||
fn (params &Params) get_list_namefix(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_namefix_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_u16(key string) ![]u16
|
||||
|
||||
fn (params &Params) get_list_u16_default(key string, def []u16) []u16
|
||||
|
||||
fn (params &Params) get_list_u32(key string) ![]u32
|
||||
|
||||
fn (params &Params) get_list_u32_default(key string, def []u32) []u32
|
||||
|
||||
fn (params &Params) get_list_u64(key string) ![]u64
|
||||
|
||||
fn (params &Params) get_list_u64_default(key string, def []u64) []u64
|
||||
|
||||
fn (params &Params) get_list_u8(key string) ![]u8
|
||||
|
||||
fn (params &Params) get_list_u8_default(key string, def []u8) []u8
|
||||
|
||||
fn (params &Params) get_map() map[string]string
|
||||
|
||||
fn (params &Params) get_path(key string) !string
|
||||
|
||||
fn (params &Params) get_path_create(key string) !string
|
||||
|
||||
fn (params &Params) get_percentage(key string) !f64
|
||||
|
||||
fn (params &Params) get_percentage_default(key string, defval string) !f64
|
||||
|
||||
//convert GB, MB, KB to bytes e.g. 10 GB becomes bytes in u64
|
||||
fn (params &Params) get_storagecapacity_in_bytes(key string) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_bytes_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_gigabytes(key string) !u64
|
||||
|
||||
//Get Expiration object from time string input input can be either relative or absolute## Relative time
|
||||
fn (params &Params) get_time(key string) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_interval(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp_default(key string, defval Duration) !Duration
|
||||
|
||||
fn (params &Params) get_u32(key string) !u32
|
||||
|
||||
fn (params &Params) get_u32_default(key string, defval u32) !u32
|
||||
|
||||
fn (params &Params) get_u64(key string) !u64
|
||||
|
||||
fn (params &Params) get_u64_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_u8(key string) !u8
|
||||
|
||||
fn (params &Params) get_u8_default(key string, defval u8) !u8
|
||||
|
||||
```
|
||||
|
||||
## how internally a heroscript gets parsed for params
|
||||
|
||||
- example to show how a heroscript gets parsed in action with params
|
||||
- params are part of action object
|
||||
|
||||
```heroscript
|
||||
example text to parse (heroscript)
|
||||
|
||||
id:a1 name6:aaaaa
|
||||
name:'need to do something 1'
|
||||
description:
|
||||
'
|
||||
## markdown works in it
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
|
||||
name2: test
|
||||
name3: hi
|
||||
name10:'this is with space' name11:aaa11
|
||||
|
||||
name4: 'aaa'
|
||||
|
||||
//somecomment
|
||||
name5: 'aab'
|
||||
```
|
||||
|
||||
the params are part of the action and are represented as follow for the above:
|
||||
|
||||
```vlang
|
||||
Params{
|
||||
params: [Param{
|
||||
key: 'id'
|
||||
value: 'a1'
|
||||
}, Param{
|
||||
key: 'name6'
|
||||
value: 'aaaaa'
|
||||
}, Param{
|
||||
key: 'name'
|
||||
value: 'need to do something 1'
|
||||
}, Param{
|
||||
key: 'description'
|
||||
value: '## markdown works in it
|
||||
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
}, Param{
|
||||
key: 'name2'
|
||||
value: 'test'
|
||||
}, Param{
|
||||
key: 'name3'
|
||||
value: 'hi'
|
||||
}, Param{
|
||||
key: 'name10'
|
||||
value: 'this is with space'
|
||||
}, Param{
|
||||
key: 'name11'
|
||||
value: 'aaa11'
|
||||
}, Param{
|
||||
key: 'name4'
|
||||
value: 'aaa'
|
||||
}, Param{
|
||||
key: 'name5'
|
||||
value: 'aab'
|
||||
}]
|
||||
}
|
||||
```
|
||||
@@ -1,441 +0,0 @@
|
||||
# module osal
|
||||
|
||||
import as
|
||||
|
||||
```vlang
|
||||
import freeflowuniverse.herolib.osal
|
||||
|
||||
osal.ping...
|
||||
|
||||
```
|
||||
|
||||
## ping
|
||||
|
||||
```go
|
||||
assert ping(address:"338.8.8.8")==.unknownhost
|
||||
assert ping(address:"8.8.8.8")==.ok
|
||||
assert ping(address:"18.8.8.8")==.timeout
|
||||
```
|
||||
|
||||
will do a panic if its not one of them, an unknown error
|
||||
|
||||
## platform
|
||||
|
||||
```go
|
||||
if platform()==.osx{
|
||||
//do something
|
||||
}
|
||||
|
||||
pub enum PlatformType {
|
||||
unknown
|
||||
osx
|
||||
ubuntu
|
||||
alpine
|
||||
}
|
||||
|
||||
pub enum CPUType {
|
||||
unknown
|
||||
intel
|
||||
arm
|
||||
intel32
|
||||
arm32
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## process
|
||||
|
||||
### execute jobs
|
||||
|
||||
```v
|
||||
mut job2:=osal.exec(cmd:"ls /")?
|
||||
println(job2)
|
||||
|
||||
//wont die, the result can be found in /tmp/execscripts
|
||||
mut job:=osal.exec(cmd:"ls dsds",ignore_error:true)?
|
||||
//this one has an error
|
||||
println(job)
|
||||
```
|
||||
|
||||
All scripts are executed from a file from /tmp/execscripts
|
||||
|
||||
If the script executes well then its removed, so no leftovers, if it fails the script stays in the dir
|
||||
|
||||
### check process logs
|
||||
|
||||
```
|
||||
mut pm:=process.processmap_get()?
|
||||
```
|
||||
|
||||
info returns like:
|
||||
|
||||
```json
|
||||
}, freeflowuniverse.herolib.process.ProcessInfo{
|
||||
cpu_perc: 0
|
||||
mem_perc: 0
|
||||
cmd: 'mc'
|
||||
pid: 84455
|
||||
ppid: 84467
|
||||
rss: 3168
|
||||
}, freeflowuniverse.herolib.process.ProcessInfo{
|
||||
cpu_perc: 0
|
||||
mem_perc: 0
|
||||
cmd: 'zsh -Z -g'
|
||||
pid: 84467
|
||||
ppid: 84469
|
||||
rss: 1360
|
||||
}]
|
||||
```
|
||||
|
||||
## other commands
|
||||
|
||||
fn bin*path() !string
|
||||
fn cmd_add(args* CmdAddArgs) !
|
||||
copy a binary to the right location on the local computer . e.g. is /usr/local/bin on linux . e.g. is ~/hero/bin on osx . will also add the bin location to the path of .zprofile and .zshrc (different per platform)
|
||||
fn cmd*exists(cmd string) bool
|
||||
fn cmd_exists_profile(cmd string) bool
|
||||
fn cmd_path(cmd string) !string
|
||||
is same as executing which in OS returns path or error
|
||||
fn cmd_to_script_path(cmd Command) !string
|
||||
will return temporary path which then can be executed, is a helper function for making script out of command
|
||||
fn cputype() CPUType
|
||||
fn cputype_enum_from_string(cpytype string) CPUType
|
||||
Returns the enum value that matches the provided string for CPUType
|
||||
fn dir_delete(path string) !
|
||||
remove all if it exists
|
||||
fn dir_ensure(path string) !
|
||||
remove all if it exists
|
||||
fn dir_reset(path string) !
|
||||
remove all if it exists and then (re-)create
|
||||
fn done_delete(key string) !
|
||||
fn done_exists(key string) bool
|
||||
fn done_get(key string) ?string
|
||||
fn done_get_int(key string) int
|
||||
fn done_get_str(key string) string
|
||||
fn done_print() !
|
||||
fn done_reset() !
|
||||
fn done_set(key string, val string) !
|
||||
fn download(args* DownloadArgs) !pathlib.Path
|
||||
if name is not specified, then will be the filename part if the last ends in an extension like .md .txt .log .text ... the file will be downloaded
|
||||
fn env_get(key string) !string
|
||||
Returns the requested environment variable if it exists or throws an error if it does not
|
||||
fn env_get_all() map[string]string
|
||||
Returns all existing environment variables
|
||||
fn env_get_default(key string, def string) string
|
||||
Returns the requested environment variable if it exists or returns the provided default value if it does not
|
||||
fn env_set(args EnvSet)
|
||||
Sets an environment if it was not set before, it overwrites the enviroment variable if it exists and if overwrite was set to true (default)
|
||||
fn env_set_all(args EnvSetAll)
|
||||
Allows to set multiple enviroment variables in one go, if clear_before_set is true all existing environment variables will be unset before the operation, if overwrite_if_exists is set to true it will overwrite all existing enviromnent variables
|
||||
fn env_unset(key string)
|
||||
Unsets an environment variable
|
||||
fn env_unset_all()
|
||||
Unsets all environment variables
|
||||
fn exec(cmd Command) !Job
|
||||
cmd is the cmd to execute can use ' ' and spaces . if \n in cmd it will write it to ext and then execute with bash . if die==false then will just return returncode,out but not return error . if stdout will show stderr and stdout . . if cmd starts with find or ls, will give to bash -c so it can execute . if cmd has no path, path will be found . . Command argument: .
|
||||
|
||||
````
|
||||
name string // to give a name to your command, good to see logs...
|
||||
cmd string
|
||||
description string
|
||||
timeout int = 3600 // timeout in sec
|
||||
stdout bool = true
|
||||
stdout_log bool = true
|
||||
raise_error bool = true // if false, will not raise an error but still error report
|
||||
ignore_error bool // means if error will just exit and not raise, there will be no error reporting
|
||||
work_folder string // location where cmd will be executed
|
||||
environment map[string]string // env variables
|
||||
ignore_error_codes []int
|
||||
scriptpath string // is the path where the script will be put which is executed
|
||||
scriptkeep bool // means we don't remove the script
|
||||
debug bool // if debug will put +ex in the script which is being executed and will make sure script stays
|
||||
shell bool // means we will execute it in a shell interactive
|
||||
retry int
|
||||
interactive bool = true // make sure we run on non interactive way
|
||||
async bool
|
||||
runtime RunTime (.bash, .python)
|
||||
|
||||
returns Job:
|
||||
start time.Time
|
||||
end time.Time
|
||||
cmd Command
|
||||
output []string
|
||||
error []string
|
||||
exit_code int
|
||||
status JobStatus
|
||||
process os.Process
|
||||
```
|
||||
return Job .
|
||||
|
||||
fn exec_string(cmd Command) !string
|
||||
cmd is the cmd to execute can use ' ' and spaces if \n in cmd it will write it to ext and then execute with bash if die==false then will just return returncode,out but not return error if stdout will show stderr and stdout
|
||||
|
||||
if cmd starts with find or ls, will give to bash -c so it can execute if cmd has no path, path will be found $... are remplaced by environment arguments TODO:implement
|
||||
|
||||
Command argument: cmd string timeout int = 600 stdout bool = true die bool = true debug bool
|
||||
|
||||
return what needs to be executed can give it to bash -c ...
|
||||
|
||||
fn execute*debug(cmd string) !string
|
||||
fn execute_interactive(cmd string) !
|
||||
shortcut to execute a job interactive means in shell
|
||||
fn execute_ok(cmd string) bool
|
||||
executes a cmd, if not error return true
|
||||
fn execute_silent(cmd string) !string
|
||||
shortcut to execute a job silent
|
||||
fn execute_stdout(cmd string) !string
|
||||
shortcut to execute a job to stdout
|
||||
fn file_read(path string) !string
|
||||
fn file_write(path string, text string) !
|
||||
fn get_logger() log.Logger
|
||||
Returns a logger object and allows you to specify via environment argument OSAL_LOG_LEVEL the debug level
|
||||
fn hero_path() !string
|
||||
fn hostname() !string
|
||||
fn initname() !string
|
||||
e.g. systemd, bash, zinit
|
||||
fn ipaddr_pub_get() !string
|
||||
Returns the ipaddress as known on the public side is using resolver4.opendns.com
|
||||
fn is_linux() bool
|
||||
fn is_linux_arm()! bool
|
||||
fn is_linux_intel() bool
|
||||
fn is_osx() bool
|
||||
fn is_osx_arm() bool
|
||||
fn is_osx_intel() bool
|
||||
fn is_ubuntu() bool
|
||||
fn load_env_file(file_path string) !
|
||||
fn memdb_exists(key string) bool
|
||||
fn memdb_get(key string) string
|
||||
fn memdb_set(key string, val string)
|
||||
fn package_install(name* string) !
|
||||
install a package will use right commands per platform
|
||||
fn package_refresh() !
|
||||
update the package list
|
||||
fn ping(args PingArgs) PingResult
|
||||
if reached in timout result will be True address is e.g. 8.8.8.8 ping means we check if the destination responds
|
||||
fn platform() PlatformType
|
||||
fn platform_enum_from_string(platform string) PlatformType
|
||||
fn process_exists(pid int) bool
|
||||
fn process_exists_byname(name string) !bool
|
||||
fn process_kill_recursive(args ProcessKillArgs) !
|
||||
kill process and all the ones underneith
|
||||
fn processinfo_children(pid int) !ProcessMap
|
||||
get all children of 1 process
|
||||
fn processinfo_get(pid int) !ProcessInfo
|
||||
get process info from 1 specific process returns
|
||||
` pub struct ProcessInfo {
|
||||
pub mut:
|
||||
cpu_perc f32
|
||||
mem_perc f32
|
||||
cmd string
|
||||
pid int
|
||||
ppid int
|
||||
//resident memory
|
||||
rss int
|
||||
}
|
||||
`
|
||||
fn processinfo_get_byname(name string) ![]ProcessInfo
|
||||
fn processinfo_with_children(pid int) !ProcessMap
|
||||
return the process and its children
|
||||
fn processmap_get() !ProcessMap
|
||||
make sure to use new first, so that the connection has been initted then you can get it everywhere
|
||||
fn profile_path() string
|
||||
fn profile_path_add(args ProfilePathAddArgs) !
|
||||
add the following path to a profile
|
||||
fn profile_path_add_hero() !string
|
||||
fn profile_path_source() string
|
||||
return the source statement if the profile exists
|
||||
fn profile_path_source_and() string
|
||||
return source $path && . or empty if it doesn't exist
|
||||
fn sleep(duration int)
|
||||
sleep in seconds
|
||||
fn tcp_port_test(args TcpPortTestArgs) bool
|
||||
test if a tcp port answers
|
||||
` address string //192.168.8.8
|
||||
port int = 22
|
||||
timeout u16 = 2000 // total time in milliseconds to keep on trying
|
||||
`
|
||||
fn user_add(args UserArgs) !int
|
||||
add's a user if the user does not exist yet
|
||||
fn user_exists(username string) bool
|
||||
fn user_id_get(username string) !int
|
||||
fn usr_local_path() !string
|
||||
/usr/local on linux, ${os.home_dir()}/hero on osx
|
||||
fn whoami() !string
|
||||
fn write_flags[T](options T) string
|
||||
enum CPUType {
|
||||
unknown
|
||||
intel
|
||||
arm
|
||||
intel32
|
||||
arm32
|
||||
}
|
||||
enum ErrorType {
|
||||
exec
|
||||
timeout
|
||||
args
|
||||
}
|
||||
enum JobStatus {
|
||||
init
|
||||
running
|
||||
error_exec
|
||||
error_timeout
|
||||
error_args
|
||||
done
|
||||
}
|
||||
enum PMState {
|
||||
init
|
||||
ok
|
||||
old
|
||||
}
|
||||
enum PingResult {
|
||||
ok
|
||||
timeout // timeout from ping
|
||||
unknownhost // means we don't know the hostname its a dns issue
|
||||
}
|
||||
enum PlatformType {
|
||||
unknown
|
||||
osx
|
||||
ubuntu
|
||||
alpine
|
||||
arch
|
||||
suse
|
||||
}
|
||||
enum RunTime {
|
||||
bash
|
||||
python
|
||||
heroscript
|
||||
herocmd
|
||||
v
|
||||
}
|
||||
struct CmdAddArgs {
|
||||
pub mut:
|
||||
cmdname string
|
||||
source string @[required] // path where the binary is
|
||||
symlink bool // if rather than copy do a symlink
|
||||
reset bool // if existing cmd will delete
|
||||
// bin_repo_url string = 'https://github.com/freeflowuniverse/freeflow_binary' // binary where we put the results
|
||||
}
|
||||
struct Command {
|
||||
pub mut:
|
||||
name string // to give a name to your command, good to see logs...
|
||||
cmd string
|
||||
description string
|
||||
timeout int = 3600 // timeout in sec
|
||||
stdout bool = true
|
||||
stdout_log bool = true
|
||||
raise_error bool = true // if false, will not raise an error but still error report
|
||||
ignore_error bool // means if error will just exit and not raise, there will be no error reporting
|
||||
work_folder string // location where cmd will be executed
|
||||
environment map[string]string // env variables
|
||||
ignore_error_codes []int
|
||||
scriptpath string // is the path where the script will be put which is executed
|
||||
scriptkeep bool // means we don't remove the script
|
||||
debug bool // if debug will put +ex in the script which is being executed and will make sure script stays
|
||||
shell bool // means we will execute it in a shell interactive
|
||||
retry int
|
||||
interactive bool = true
|
||||
async bool
|
||||
runtime RunTime
|
||||
}
|
||||
struct DownloadArgs {
|
||||
pub mut:
|
||||
name string // optional (otherwise derived out of filename)
|
||||
url string
|
||||
reset bool // will remove
|
||||
hash string // if hash is known, will verify what hash is
|
||||
dest string // if specified will copy to that destination
|
||||
timeout int = 180
|
||||
retry int = 3
|
||||
minsize_kb u32 = 10 // is always in kb
|
||||
maxsize_kb u32
|
||||
expand_dir string
|
||||
expand_file string
|
||||
}
|
||||
struct EnvSet {
|
||||
pub mut:
|
||||
key string @[required]
|
||||
value string @[required]
|
||||
overwrite bool = true
|
||||
}
|
||||
struct EnvSetAll {
|
||||
pub mut:
|
||||
env map[string]string
|
||||
clear_before_set bool
|
||||
overwrite_if_exists bool = true
|
||||
}
|
||||
struct Job {
|
||||
pub mut:
|
||||
start time.Time
|
||||
end time.Time
|
||||
cmd Command
|
||||
output string
|
||||
error string
|
||||
exit_code int
|
||||
status JobStatus
|
||||
process ?&os.Process @[skip; str: skip]
|
||||
runnr int // nr of time it runs, is for retry
|
||||
}
|
||||
fn (mut job Job) execute_retry() !
|
||||
execute the job and wait on result will retry as specified
|
||||
fn (mut job Job) execute() !
|
||||
execute the job, start process, process will not be closed . important you need to close the process later by job.close()! otherwise we get zombie processes
|
||||
fn (mut job Job) wait() !
|
||||
wait till the job finishes or goes in error
|
||||
fn (mut job Job) process() !
|
||||
process (read std.err and std.out of process)
|
||||
fn (mut job Job) close() !
|
||||
will wait & close
|
||||
struct JobError {
|
||||
Error
|
||||
pub mut:
|
||||
job Job
|
||||
error_type ErrorType
|
||||
}
|
||||
struct PingArgs {
|
||||
pub mut:
|
||||
address string @[required]
|
||||
count u8 = 1 // the ping is successful if it got count amount of replies from the other side
|
||||
timeout u16 = 1 // the time in which the other side should respond in seconds
|
||||
retry u8
|
||||
}
|
||||
struct ProcessInfo {
|
||||
pub mut:
|
||||
cpu_perc f32
|
||||
mem_perc f32
|
||||
cmd string
|
||||
pid int
|
||||
ppid int // parentpid
|
||||
// resident memory
|
||||
rss int
|
||||
}
|
||||
fn (mut p ProcessInfo) str() string
|
||||
struct ProcessKillArgs {
|
||||
pub mut:
|
||||
name string
|
||||
pid int
|
||||
}
|
||||
struct ProcessMap {
|
||||
pub mut:
|
||||
processes []ProcessInfo
|
||||
lastscan time.Time
|
||||
state PMState
|
||||
pids []int
|
||||
}
|
||||
struct ProfilePathAddArgs {
|
||||
pub mut:
|
||||
path string @[required]
|
||||
todelete string // see which one to remove
|
||||
}
|
||||
struct TcpPortTestArgs {
|
||||
pub mut:
|
||||
address string @[required] // 192.168.8.8
|
||||
port int = 22
|
||||
timeout u16 = 2000 // total time in milliseconds to keep on trying
|
||||
}
|
||||
struct UserArgs {
|
||||
pub mut:
|
||||
name string @[required]
|
||||
}
|
||||
|
||||
-
|
||||
````
|
||||
File diff suppressed because it is too large
Load Diff
52
aiprompts/ai_instruct/documentation_from_v.md
Normal file
52
aiprompts/ai_instruct/documentation_from_v.md
Normal file
@@ -0,0 +1,52 @@
|
||||
params:
|
||||
|
||||
- filepath: /Users/despiegk/code/github/freeflowuniverse/herolib/lib/clients/openai
|
||||
|
||||
make a dense overview of the code above, easy to understand for AI
|
||||
|
||||
the result is 1 markdown file called codeoverview.md and is stored in $filepath
|
||||
|
||||
try to figure out which functions are more important and which are less important, so that the most important functions are at the top of section you are working on
|
||||
|
||||
the template is as follows
|
||||
|
||||
```md
|
||||
# the name of the module
|
||||
|
||||
2-5 liner description
|
||||
|
||||
## factory
|
||||
|
||||
is there factory, which one and quick example how to call, don’t say in which file not relevant
|
||||
show how to import the module is as follows: import freeflowuniverse.herolib.
|
||||
and then starting from lib e.g. lib/clients/mycelium would result in import freeflowuniverse.herolib. clients.mycelium
|
||||
|
||||
## overview
|
||||
|
||||
quick overview as list with identations, of the structs and its methods
|
||||
|
||||
## structs
|
||||
|
||||
### structname
|
||||
|
||||
now list the methods & arguments, for arguments use table
|
||||
|
||||
for each method show the arguments needed to call the method, and what it returns
|
||||
|
||||
### methods
|
||||
|
||||
- if any methods which are on module
|
||||
- only show public methods, don't show the get/set/exists methods on module level as part of factory.
|
||||
|
||||
|
||||
```
|
||||
|
||||
don't mention what we don't show because of rules above.
|
||||
|
||||
the only output we want is markdown file as follows
|
||||
|
||||
===WRITE===
|
||||
$filepath
|
||||
===CONTENT===
|
||||
$the content of the generated markdown file
|
||||
===END===
|
||||
22
aiprompts/ai_instruct/documentation_from_v_md.md
Normal file
22
aiprompts/ai_instruct/documentation_from_v_md.md
Normal file
@@ -0,0 +1,22 @@
|
||||
remove all navigation elements, and index
|
||||
for each method, move the args as used in the methods to the method section so its easier to read
|
||||
|
||||
start of output file is:
|
||||
|
||||
# the name of the module
|
||||
|
||||
2-5 liner description
|
||||
|
||||
## factory
|
||||
|
||||
is there factory, which one and quick example how to call, don’t say in which file not relevant
|
||||
show how to import the module is as follows: import freeflowuniverse.herolib.
|
||||
and then starting from lib e.g. lib/clients/mycelium would result in import freeflowuniverse.herolib. clients.mycelium
|
||||
|
||||
## structs and methods
|
||||
|
||||
quick overview as list with identations, of the structs and its methods
|
||||
|
||||
|
||||
ONLY OUTPUT THE MARKDOWN FILE, NOTHING ELSE
|
||||
|
||||
2479
aiprompts/ai_instruct/models_from_v/complete.md
Normal file
2479
aiprompts/ai_instruct/models_from_v/complete.md
Normal file
File diff suppressed because it is too large
Load Diff
18
aiprompts/ai_instruct/models_from_v/generics.v
Normal file
18
aiprompts/ai_instruct/models_from_v/generics.v
Normal file
@@ -0,0 +1,18 @@
|
||||
in hero.db
|
||||
|
||||
make a generic function which takes any of the root objects (which inherits from Base)
|
||||
|
||||
and gets a json from it and add a save() function to it to store it in postgresql (see postgresql client)
|
||||
and also a get and deserializes
|
||||
|
||||
the json is put in table as follows
|
||||
|
||||
tablename: $dirname_$rootobjectname all lowercase
|
||||
|
||||
each table has
|
||||
|
||||
- id
|
||||
- ... the fields which represents indexes (see @[index])
|
||||
- data which is the json
|
||||
|
||||
information how to use generics see aiprompts/v_advanced/generics.md and aiprompts/v_advanced/reflection.md
|
||||
45
aiprompts/ai_instruct/models_from_v/prompt.md
Normal file
45
aiprompts/ai_instruct/models_from_v/prompt.md
Normal file
@@ -0,0 +1,45 @@
|
||||
$NAME = calendar
|
||||
|
||||
walk over all models from biz: db/heromodels/src/models/$NAME in the rust repo
|
||||
create nice structured public models in Vlang (V) see instructions in herlolib
|
||||
|
||||
put the results in /Users/despiegk/code/github/freeflowuniverse/herolib/lib/hero/models/$NAME
|
||||
|
||||
put decorator on fields which need to be indexed: use @[index] for that at end of line of the property of the struct
|
||||
|
||||
copy the documentation as well and put on the vstruct and on its fields
|
||||
|
||||
make instructions so a coding agent can execute it, put the models in files, ...
|
||||
|
||||
keep it all simple
|
||||
|
||||
don't do anything additional for modules, don't do import
|
||||
|
||||
at top of each file we have ```module $NAME```
|
||||
|
||||
|
||||
make sure all time related fields are in u64 format, use unix timestamp for that
|
||||
|
||||
don't create management classes, only output the structs, don't create a mod.v, don't make .v scripts executatble, don't create a main.v
|
||||
|
||||
|
||||
## now also make sure we use core.base as follows
|
||||
|
||||
```
|
||||
import freeflowuniverse.herolib.hero.models.core
|
||||
|
||||
// Account represents a financial account for tracking balances and transactions
|
||||
// Supports multiple account types (checking, savings, investment, etc.)
|
||||
pub struct Account {
|
||||
core.Base
|
||||
|
||||
```
|
||||
|
||||
remove Local BaseModel
|
||||
|
||||
make sure module ... is always at first line of file
|
||||
|
||||
- remove id from the model we update because it is in the Base
|
||||
- created_at u64 // Creation timestamp
|
||||
- updated_at u64 // Last modification timestamp
|
||||
- basically each property in the Base should be removed from the model
|
||||
1
aiprompts/ai_instruct/models_from_v/readme.md
Normal file
1
aiprompts/ai_instruct/models_from_v/readme.md
Normal file
@@ -0,0 +1 @@
|
||||
Kimi k2 on groq is doing well
|
||||
20
aiprompts/ai_instruct/models_from_v/update.md
Normal file
20
aiprompts/ai_instruct/models_from_v/update.md
Normal file
@@ -0,0 +1,20 @@
|
||||
in lib/hero/models
|
||||
for governance and legal
|
||||
|
||||
make sure we use core.base as follows
|
||||
|
||||
import freeflowuniverse.herolib.hero.models.core
|
||||
|
||||
// Account represents a financial account for tracking balances and transactions
|
||||
// Supports multiple account types (checking, savings, investment, etc.)
|
||||
pub struct Account {
|
||||
core.Base
|
||||
|
||||
remove Local BaseModel
|
||||
|
||||
make sure module ... is always at first line of file
|
||||
|
||||
- remove id from the model we update because it is in the Base
|
||||
- created_at u64 // Creation timestamp
|
||||
- updated_at u64 // Last modification timestamp
|
||||
- basically each property in the Base should be removed from the model
|
||||
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
109
aiprompts/ai_instruct/processing/heroscript.md
Normal file
@@ -0,0 +1,109 @@
|
||||
## INTENT
|
||||
|
||||
we use heroscript to communicate actions and events in a structured format.
|
||||
we want you to parse user intents and generate the corresponding heroscript.
|
||||
|
||||
ONLY RETURN THE HEROSCRIPT STATEMENTS, can be more than 1
|
||||
|
||||
## HEROSCRIPT FORMAT
|
||||
|
||||
HeroScript is a concise scripting language with the following structure:
|
||||
|
||||
```heroscript
|
||||
!!actor.action_name
|
||||
param1: 'value1'
|
||||
param2: 'value with spaces'
|
||||
multiline_description: '
|
||||
This is a multiline description.
|
||||
It can span multiple lines.
|
||||
'
|
||||
arg1 arg2 // Arguments without keys
|
||||
|
||||
!!actor.action_name2 param1:something param2:'something with spaces' nr:3
|
||||
```
|
||||
|
||||
Key characteristics:
|
||||
|
||||
- **Actions**: Start with `!!`, followed by `actor.action_name` (e.g., `!!mailclient.configure`).
|
||||
- **Parameters**: Defined as `key:value`. Values can be quoted for spaces.
|
||||
- **Multiline Support**: Parameters like `description` can span multiple lines.
|
||||
- **Arguments**: Values without keys (e.g., `arg1`).
|
||||
- params can be on 1 line, with spaces in between
|
||||
- time can be as +1h, +1d, +1w (hour, day, week), ofcourse 1 can be any number, +1 means 1 hour from now
|
||||
- time format is: dd/mm/yyyy hh:mm (ONLY USE THIS)
|
||||
- comma separation is used a lot in arguments e.g. 'jan,kristof' or 'jan , kristof' remove spaces, is list of 2
|
||||
- note only !! is at start of line, rest has spaces per instruction
|
||||
- make one empty line between 1 heroscript statements
|
||||
- everything after // is comment
|
||||
|
||||
## HEROSCRIPT SCHEMA
|
||||
|
||||
the language we understand
|
||||
|
||||
### calendar management
|
||||
|
||||
```heroscript
|
||||
!!calendar.create when:'+1h' descr:'this is event to discuss eng' attendees:'jan,kristof' name:'meet1' tags:'eng,urgent'
|
||||
!!calendar.delete name:'meet1'
|
||||
!!calendar.list tags:'urgent'
|
||||
|
||||
```
|
||||
|
||||
### contact management
|
||||
|
||||
```heroscript
|
||||
!!contact.add name:'jan' email:'jan@example.com' phone:'123-456-7890'
|
||||
!!contact.remove name:'jan'
|
||||
!!contact.list
|
||||
|
||||
```
|
||||
|
||||
### task management
|
||||
|
||||
```heroscript
|
||||
!!task.create title:'Prepare presentation' due:'+1d' assignee:'jan' name:'task1' tags:'eng,urgent'
|
||||
deadline:'+10d' duration:'1h'
|
||||
!!task.update name:'task1' status:'in progress'
|
||||
!!task.delete name:'task1'
|
||||
!!task.list
|
||||
|
||||
```
|
||||
|
||||
### project management
|
||||
|
||||
```heroscript
|
||||
!!project.create title:'Cloud Product Development' description:'Track progress of cloud product development' name:'cloud_prod'
|
||||
!!project.update name:'cloud_prod' status:'in progress'
|
||||
!!project.delete name:'cloud_prod'
|
||||
!!project.list
|
||||
!!project.tasks_list name:'cloud_prod' //required properties are name, description, and assignee of not given ask
|
||||
!!project.tasks_add names:'task1, task2'
|
||||
!!project.tasks_remove names:'task1, task2'
|
||||
|
||||
```
|
||||
|
||||
### SUPPORTED TAGS
|
||||
|
||||
only tags supported are:
|
||||
|
||||
- for intent: eng, prod, support, mgmt, marketing
|
||||
- for urgency: urgent, high, medium, low
|
||||
|
||||
### generic remarks
|
||||
|
||||
- names are lowercase and snake_case, can be distilled out of title if only title given, often a user will say name but that means title
|
||||
- time: format of returned data or time is always dd/mm/yyyy hh:min
|
||||
|
||||
## IMPORTANT STARTING INFO
|
||||
|
||||
- current time is 10/08/2025 05:10 , use this to define any time-related parameters
|
||||
|
||||
## USER INTENT
|
||||
|
||||
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||
|
||||
also let me know which other meetings I have which are urgent
|
||||
|
||||
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||
|
||||
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
64
aiprompts/ai_instruct/processing/heroscript2.md
Normal file
@@ -0,0 +1,64 @@
|
||||
SYSTEM
|
||||
You are a HeroScript compiler. Convert user intents into valid HeroScript statements.
|
||||
|
||||
OUTPUT RULES
|
||||
|
||||
1) Return ONLY HeroScript statements. No prose, no backticks.
|
||||
2) Separate each statement with exactly ONE blank line.
|
||||
3) Keys use snake_case. Names are lowercase snake_case derived from titles (non-alnum → "_", collapse repeats, trim).
|
||||
4) Lists are comma-separated with NO spaces (e.g., "jan,alex").
|
||||
5) Times: OUTPUT MUST BE ABSOLUTE in "dd/mm/yyyy hh:mm" (Europe/Zurich). Convert relative times (e.g., "tomorrow 10am") using CURRENT_TIME.
|
||||
6) Tags: include at most one intent tag and at most one urgency tag when present.
|
||||
- intent: eng,prod,support,mgmt,marketing
|
||||
- urgency: urgent,high,medium,low
|
||||
7) Quotes: quote values containing spaces; otherwise omit quotes (allowed either way).
|
||||
8) Comments only with // if the user explicitly asks for explanations; otherwise omit.
|
||||
|
||||
SCHEMA (exact actions & parameters)
|
||||
|
||||
!!calendar.create when:'dd/mm/yyyy hh:mm' name:'<name>' descr:'<text>' attendees:'a,b,c' tags:'intent,urgency'
|
||||
!!calendar.delete name:'<name>'
|
||||
!!calendar.list [tags:'tag1,tag2']
|
||||
|
||||
!!contact.add name:'<name>' email:'<email>' phone:'<phone>'
|
||||
!!contact.remove name:'<name>'
|
||||
!!contact.list
|
||||
|
||||
!!task.create title:'<title>' name:'<name>' [due:'dd/mm/yyyy hh:mm'] [assignee:'<name>'] [tags:'intent,urgency'] [deadline:'dd/mm/yyyy hh:mm'] [duration:'<Nd Nh Nm> or <Nh>']
|
||||
!!task.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||
!!task.delete name:'<name>'
|
||||
!!task.list
|
||||
|
||||
!!project.create title:'<title>' description:'<text>' name:'<name>'
|
||||
!!project.update name:'<name>' [status:'in progress|done|blocked|todo']
|
||||
!!project.delete name:'<name>'
|
||||
!!project.list
|
||||
!!project.tasks_list name:'<project_name>'
|
||||
!!project.tasks_add name:'<project_name>' names:'task_a,task_b'
|
||||
!!project.tasks_remove name:'<project_name>' names:'task_a,task_b'
|
||||
|
||||
NORMALIZATION & INFERENCE (silent)
|
||||
- Derive names from titles when missing (see rule 3). Ensure consistency across statements.
|
||||
- Map phrases to tags when obvious (e.g., "new product" ⇒ intent: prod; "high priority" ⇒ urgency: high).
|
||||
- Attendees: split on commas, trim, lowercase given names.
|
||||
- If the user asks for “urgent meetings,” use tags:'urgent' specifically.
|
||||
- Prefer concise descriptions pulled from the user’s phrasing.
|
||||
- Name's are required, if missing ask for clarification.
|
||||
- For calendar management, ensure to include all relevant details such as time, attendees, and description.
|
||||
|
||||
|
||||
CURRENT_TIME
|
||||
|
||||
10/08/2025 05:10
|
||||
|
||||
USER_MESSAGE
|
||||
|
||||
I want a meeting tomorrow 10am, where we will discuss our new product for the cloud with jan and alex, and the urgency is high
|
||||
|
||||
also let me know which other meetings I have which are urgent
|
||||
|
||||
can you make a project where we can track the progress of our new product development? Name is 'Cloud Product Development'
|
||||
|
||||
Please add tasks to the project in line to creating specifications, design documents, and implementation plans.
|
||||
|
||||
END
|
||||
82
aiprompts/ai_instruct/processing/intent.md
Normal file
82
aiprompts/ai_instruct/processing/intent.md
Normal file
@@ -0,0 +1,82 @@
|
||||
## INSTRUCTIONS
|
||||
|
||||
the user will send me multiple instructions what they wants to do, I want you to put them in separate categories
|
||||
|
||||
The categories we have defined are:
|
||||
|
||||
- calendar management
|
||||
- schedule meetings, events, reminders
|
||||
- list these events
|
||||
- delete them
|
||||
- contact management
|
||||
- add/remove contact information e.g. phone numbers, email addresses, address information
|
||||
- list contacts, search
|
||||
- task or project management
|
||||
- anything we need to do, anything we need to track and plan
|
||||
- create/update tasks, set deadlines
|
||||
- mark tasks as complete
|
||||
- delete tasks
|
||||
- project management
|
||||
- communication (chat, email)
|
||||
- see what needs to be communicate e.g. send a chat to ...
|
||||
- search statements
|
||||
- find on internet, find specific information from my friends
|
||||
|
||||
I want you to detect the intent and make multiple blocks out of the intent, each block should correspond to one of the identified intents, identify the intent with name of the category eg. calendar, only use above names
|
||||
|
||||
|
||||
|
||||
what user wants to do, stay as close as possible to the original instructions, copy the exact instructions as where given by the user, we only need to sort the instructions in these blocks
|
||||
|
||||
for each instruction make a separate block, e.g. if 2 tasks are given, create 2 blocks
|
||||
|
||||
the format to return is: (note newline after each title of block)
|
||||
|
||||
```template
|
||||
===CALENDAR===\n
|
||||
|
||||
$the copied text from what user wants
|
||||
|
||||
===CONTACT===\n
|
||||
...
|
||||
|
||||
===QUESTION===\n
|
||||
|
||||
put here what our system needs to ask to the user anything which is not clear
|
||||
|
||||
===END===\n
|
||||
|
||||
```
|
||||
|
||||
I want you to execute above on instructions as given by user below, give text back ONLY supporting the template
|
||||
|
||||
note for format is only ===$NAME=== and then on next lines the original instructions from the user, don't change
|
||||
|
||||
## special processing of info
|
||||
|
||||
- if a date or time specified e.g. tomorrow, time, ... calculate back from current date
|
||||
|
||||
## IMPORTANT STARTING INFO
|
||||
|
||||
- current time is 10/08/2025 05:10 (format of returned data is always dd/mm/yyyy hh:min)
|
||||
- use the current time to define formatted time out of instructions
|
||||
- only return the formatted time
|
||||
|
||||
## UNCLEAR INFO
|
||||
|
||||
check in instructions e.g. things specified like you, me, ...
|
||||
are not clear ask specifically who do you mean
|
||||
|
||||
if task, specify per task, who needs to do it and when, make sure each instruction (block) is complete and clear for further processing
|
||||
|
||||
be very specific with the questions e.g. who is you, ...
|
||||
|
||||
## EXECUTE ABOVE ON THE FOLLOWING
|
||||
|
||||
I am planning a birthday for my daughters tomorrow, there will be 10 people.
|
||||
|
||||
I would like to know if you can help me with the preparations.
|
||||
|
||||
I need a place for my daughter's birthday party.
|
||||
|
||||
I need to send message to my wife isabelle that she needs to pick up the cake.
|
||||
93
aiprompts/ai_instruct/prompt_processing_instructions.md
Normal file
93
aiprompts/ai_instruct/prompt_processing_instructions.md
Normal file
@@ -0,0 +1,93 @@
|
||||
We have our own instruction language called heroscript, below you will find details how to use it.
|
||||
|
||||
## heroscript
|
||||
|
||||
|
||||
Heroscript is our small scripting language which is used for communicating with our digital tools like calendar management.
|
||||
|
||||
which has following structure
|
||||
|
||||
```heroscript
|
||||
|
||||
!!calendar.event_add
|
||||
title: 'go to dentist'
|
||||
start: '2025/03/01'
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
!!calendar.event_delete
|
||||
title: 'go to dentist'
|
||||
|
||||
```
|
||||
|
||||
- the format is !!$actor.$action (there is no space before !!)
|
||||
- every parameter comes on next line with spaces in front (4 spaces, always use 4 spaces, dont make variation)
|
||||
- every actor.action starts with !!
|
||||
- the first part is the actor e.g. calendar in this case
|
||||
- the 2e part is the action name
|
||||
- multilines are supported see the description field
|
||||
|
||||
below you will find the instructions for different actors, comments how to use it are behind # which means not part of the the definition itself
|
||||
|
||||
## remarks on parameters used
|
||||
|
||||
- date
|
||||
- format of the date is yyyy/mm/dd hh:mm:ss
|
||||
- +1h means 1 hour later than now
|
||||
- +1m means 1 min later than now
|
||||
- +1d means 1 day later than now
|
||||
- same for -1h, -1m, -1d
|
||||
- money expressed as
|
||||
- $val $cursymbol
|
||||
- $cursymbol is 3 letters e.g. USD, capital
|
||||
- lists are comma separated and '...' around
|
||||
|
||||
|
||||
## generic instructions
|
||||
|
||||
- do not add information if not specifically asked for
|
||||
|
||||
|
||||
## circle
|
||||
|
||||
every actor action happens in a circle, a user can ask to switch circles, command available is
|
||||
|
||||
```
|
||||
!!circle.switch
|
||||
name: 'project x'
|
||||
|
||||
```
|
||||
|
||||
## calendar
|
||||
|
||||
```heroscript
|
||||
|
||||
!!calendar.event_add
|
||||
title: 'go to dentist'
|
||||
start: '2025/03/01'
|
||||
end: '+1h' #if + notation used is later than the start
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
attendees: 'tim, rob'
|
||||
|
||||
!!calendar.event_delete
|
||||
title: 'go to dentist'
|
||||
|
||||
```
|
||||
|
||||
## NOW DO ONE
|
||||
|
||||
schedule event tomorrow 10 am, for 1h, with tim & rob, we want to product management threefold
|
||||
now is friday jan 17
|
||||
|
||||
only give me the instructions needed, only return the heroscript no text around
|
||||
|
||||
if not clear enough ask the user for more info
|
||||
|
||||
if not sure do not invent, only give instructions as really asked for
|
||||
58
aiprompts/ai_instruct/prompt_processing_openrpc_like.md
Normal file
58
aiprompts/ai_instruct/prompt_processing_openrpc_like.md
Normal file
@@ -0,0 +1,58 @@
|
||||
|
||||
|
||||
# how to manage my agenda
|
||||
|
||||
## Metadata for function calling
|
||||
|
||||
functions_metadata = [
|
||||
{
|
||||
"name": "event_add",
|
||||
"description": "Adds a calendar event.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {"type": "string", "description": "Title of the event."},
|
||||
"start": {"type": "string", "description": "Start date and time in 'YYYY/MM/DD hh:mm' format."},
|
||||
"end": {"type": "string", "description": "End date or duration (e.g., +2h)."},
|
||||
"description": {"type": "string", "description": "Event description."},
|
||||
"attendees": {"type": "string", "description": "Comma-separated list of attendees' emails."},
|
||||
},
|
||||
"required": ["title", "start"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "event_delete",
|
||||
"description": "Deletes a calendar event by title.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {"type": "string", "description": "Title of the event to delete."},
|
||||
},
|
||||
"required": ["title"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
## example call
|
||||
|
||||
{
|
||||
"function": "event_add",
|
||||
"parameters": {
|
||||
"title": "Team Sync",
|
||||
"start": "2025/02/01 10:00",
|
||||
"end": "+1h",
|
||||
"description": "",
|
||||
"attendees": "alice@example.com, bob@example.com"
|
||||
}
|
||||
}
|
||||
|
||||
## how to use
|
||||
|
||||
Parse the user query to determine intent (e.g., "schedule" maps to event_add, "cancel" maps to event_delete).
|
||||
|
||||
Extract required parameters (e.g., title, start date).
|
||||
|
||||
Invoke the appropriate function with the extracted parameters.
|
||||
|
||||
Return the function's result as the response.
|
||||
|
||||
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
16344
aiprompts/ai_instruct/uppy/fastapi.md
Normal file
File diff suppressed because it is too large
Load Diff
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
1544
aiprompts/ai_instruct/uppy/fastapi_mcp.md
Normal file
File diff suppressed because it is too large
Load Diff
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
225
aiprompts/ai_instruct/uppy/tus.md
Normal file
@@ -0,0 +1,225 @@
|
||||
# tus Resumable Upload Protocol (Condensed for Coding Agents)
|
||||
|
||||
## Core Protocol
|
||||
|
||||
All Clients and Servers MUST implement the core protocol for resumable uploads.
|
||||
|
||||
### Resuming an Upload
|
||||
|
||||
1. **Determine Offset (HEAD Request):**
|
||||
* **Request:**
|
||||
```
|
||||
HEAD /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 200 OK
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server MUST include `Upload-Offset`.
|
||||
* Server MUST include `Upload-Length` if known.
|
||||
* Server SHOULD return `200 OK` or `204 No Content`.
|
||||
* Server MUST prevent caching: `Cache-Control: no-store`.
|
||||
|
||||
2. **Resume Upload (PATCH Request):**
|
||||
* **Request:**
|
||||
```
|
||||
PATCH /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Type: application/offset+octet-stream
|
||||
Content-Length: {chunk_size}
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
|
||||
[binary data chunk]
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {new_offset}
|
||||
```
|
||||
* `Content-Type` MUST be `application/offset+octet-stream`.
|
||||
* `Upload-Offset` in request MUST match server's current offset (else `409 Conflict`).
|
||||
* Server MUST acknowledge with `204 No Content` and `Upload-Offset` (new offset).
|
||||
* Server SHOULD return `404 Not Found` for non-existent resources.
|
||||
|
||||
### Common Headers
|
||||
|
||||
* **`Upload-Offset`**: Non-negative integer. Byte offset within resource.
|
||||
* **`Upload-Length`**: Non-negative integer. Total size of upload in bytes.
|
||||
* **`Tus-Version`**: Comma-separated list of supported protocol versions (Server response).
|
||||
* **`Tus-Resumable`**: Protocol version used (e.g., `1.0.0`). MUST be in every request/response (except `OPTIONS`). If client version unsupported, server responds `412 Precondition Failed` with `Tus-Version`.
|
||||
* **`Tus-Extension`**: Comma-separated list of supported extensions (Server response). Omitted if none.
|
||||
* **`Tus-Max-Size`**: Non-negative integer. Max allowed upload size in bytes (Server response).
|
||||
* **`X-HTTP-Method-Override`**: String. Client MAY use to override HTTP method (e.g., for `PATCH`/`DELETE` limitations).
|
||||
|
||||
### Server Configuration (OPTIONS Request)
|
||||
|
||||
* **Request:**
|
||||
```
|
||||
OPTIONS /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
Tus-Version: 1.0.0,0.2.2,0.2.1
|
||||
Tus-Max-Size: 1073741824
|
||||
Tus-Extension: creation,expiration
|
||||
```
|
||||
* Response MUST contain `Tus-Version`. MAY include `Tus-Extension` and `Tus-Max-Size`.
|
||||
* Client SHOULD NOT include `Tus-Resumable` in request.
|
||||
|
||||
## Protocol Extensions
|
||||
|
||||
Clients SHOULD use `OPTIONS` request and `Tus-Extension` header for feature detection.
|
||||
|
||||
### Creation (`creation` extension)
|
||||
|
||||
Create a new upload resource. Server MUST add `creation` to `Tus-Extension`.
|
||||
|
||||
* **Request (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: 0
|
||||
Upload-Length: {total_size} OR Upload-Defer-Length: 1
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Metadata: filename {base64_filename},is_confidential
|
||||
```
|
||||
* MUST include `Upload-Length` or `Upload-Defer-Length: 1`.
|
||||
* If `Upload-Defer-Length: 1`, client MUST set `Upload-Length` in subsequent `PATCH`.
|
||||
* `Upload-Length: 0` creates an immediately complete empty file.
|
||||
* Client MAY supply `Upload-Metadata` (key-value pairs, value Base64 encoded).
|
||||
* If `Upload-Length` exceeds `Tus-Max-Size`, server responds `413 Request Entity Too Large`.
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 201 Created
|
||||
Location: {upload_url}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server MUST respond `201 Created` and set `Location` header to new resource URL.
|
||||
* New resource has implicit offset `0`.
|
||||
|
||||
#### Headers
|
||||
|
||||
* **`Upload-Defer-Length`**: `1`. Indicates upload size is unknown. Server adds `creation-defer-length` to `Tus-Extension` if supported.
|
||||
* **`Upload-Metadata`**: Comma-separated `key value` pairs. Key: no spaces/commas, ASCII. Value: Base64 encoded.
|
||||
|
||||
### Creation With Upload (`creation-with-upload` extension)
|
||||
|
||||
Include initial upload data in the `POST` request. Server MUST add `creation-with-upload` to `Tus-Extension`. Depends on `creation` extension.
|
||||
|
||||
* **Request (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: {initial_chunk_size}
|
||||
Upload-Length: {total_size}
|
||||
Tus-Resumable: 1.0.0
|
||||
Content-Type: application/offset+octet-stream
|
||||
Expect: 100-continue
|
||||
|
||||
[initial binary data chunk]
|
||||
```
|
||||
* Similar rules as `PATCH` apply for content.
|
||||
* Client SHOULD include `Expect: 100-continue`.
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 201 Created
|
||||
Location: {upload_url}
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {accepted_offset}
|
||||
```
|
||||
* Server MUST include `Upload-Offset` with accepted bytes.
|
||||
|
||||
### Expiration (`expiration` extension)
|
||||
|
||||
Server MAY remove unfinished uploads. Server MUST add `expiration` to `Tus-Extension`.
|
||||
|
||||
* **Response (PATCH/POST):**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Upload-Expires: Wed, 25 Jun 2014 16:00:00 GMT
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Offset: {new_offset}
|
||||
```
|
||||
* **`Upload-Expires`**: Datetime in RFC 9110 format. Indicates when upload expires. Client SHOULD use to check validity. Server SHOULD respond `404 Not Found` or `410 Gone` for expired uploads.
|
||||
|
||||
### Checksum (`checksum` extension)
|
||||
|
||||
Verify data integrity of `PATCH` requests. Server MUST add `checksum` to `Tus-Extension`. Server MUST support `sha1`.
|
||||
|
||||
* **Request (PATCH):**
|
||||
```
|
||||
PATCH /files/{upload_id} HTTP/1.1
|
||||
Content-Length: {chunk_size}
|
||||
Upload-Offset: {current_offset}
|
||||
Tus-Resumable: 1.0.0
|
||||
Upload-Checksum: {algorithm} {base64_checksum}
|
||||
|
||||
[binary data chunk]
|
||||
```
|
||||
* **Response:**
|
||||
* `204 No Content`: Checksums match.
|
||||
* `400 Bad Request`: Algorithm not supported.
|
||||
* `460 Checksum Mismatch`: Checksums mismatch.
|
||||
* In `400`/`460` cases, chunk MUST be discarded, upload/offset NOT updated.
|
||||
* **`Tus-Checksum-Algorithm`**: Comma-separated list of supported algorithms (Server response to `OPTIONS`).
|
||||
* **`Upload-Checksum`**: `{algorithm} {Base64_encoded_checksum}`.
|
||||
|
||||
### Termination (`termination` extension)
|
||||
|
||||
Client can terminate uploads. Server MUST add `termination` to `Tus-Extension`.
|
||||
|
||||
* **Request (DELETE):**
|
||||
```
|
||||
DELETE /files/{upload_id} HTTP/1.1
|
||||
Host: tus.example.org
|
||||
Content-Length: 0
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* **Response:**
|
||||
```
|
||||
HTTP/1.1 204 No Content
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* Server SHOULD free resources, MUST respond `204 No Content`.
|
||||
* Future requests to URL SHOULD return `404 Not Found` or `410 Gone`.
|
||||
|
||||
### Concatenation (`concatenation` extension)
|
||||
|
||||
Concatenate multiple partial uploads into a single final upload. Server MUST add `concatenation` to `Tus-Extension`.
|
||||
|
||||
* **Partial Upload Creation (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Upload-Concat: partial
|
||||
Upload-Length: {partial_size}
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* `Upload-Concat: partial` header.
|
||||
* Server SHOULD NOT process partial uploads until concatenated.
|
||||
* **Final Upload Creation (POST):**
|
||||
```
|
||||
POST /files HTTP/1.1
|
||||
Upload-Concat: final;{url_partial1} {url_partial2} ...
|
||||
Tus-Resumable: 1.0.0
|
||||
```
|
||||
* `Upload-Concat: final;{space-separated_partial_urls}`.
|
||||
* Client MUST NOT include `Upload-Length`.
|
||||
* Final upload length is sum of partials.
|
||||
* Server MAY delete partials after concatenation.
|
||||
* Server MUST respond `403 Forbidden` to `PATCH` requests against final upload.
|
||||
* **`concatenation-unfinished`**: Server adds to `Tus-Extension` if it supports concatenation while partial uploads are in progress.
|
||||
* **HEAD Request for Final Upload:**
|
||||
* Response SHOULD NOT contain `Upload-Offset` unless concatenation finished.
|
||||
* After success, `Upload-Offset` and `Upload-Length` MUST be equal.
|
||||
* Response MUST include `Upload-Concat` header.
|
||||
* **HEAD Request for Partial Upload:**
|
||||
* Response MUST contain `Upload-Offset`.
|
||||
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
667
aiprompts/ai_instruct/uppy/tus_implementation.md
Normal file
@@ -0,0 +1,667 @@
|
||||
|
||||
# TUS (1.0.0) — Server-Side Specs (Concise)
|
||||
|
||||
## Always
|
||||
|
||||
* All requests/responses **except** `OPTIONS` MUST include: `Tus-Resumable: 1.0.0`.
|
||||
If unsupported → `412 Precondition Failed` + `Tus-Version`.
|
||||
* Canonical server features via `OPTIONS /files`:
|
||||
|
||||
* `Tus-Version: 1.0.0`
|
||||
* `Tus-Extension: creation,creation-with-upload,termination,checksum,concatenation,concatenation-unfinished` (as supported)
|
||||
* `Tus-Max-Size: <int>` (if hard limit)
|
||||
* `Tus-Checksum-Algorithm: sha1[,md5,crc32...]` (if checksum ext.)
|
||||
|
||||
## Core
|
||||
|
||||
* **Create:** `POST /files` with `Upload-Length: <int>` OR `Upload-Defer-Length: 1`. Optional `Upload-Metadata`.
|
||||
|
||||
* `201 Created` + `Location: /files/{id}`, echo `Tus-Resumable`.
|
||||
* *Creation-With-Upload:* If body present → `Content-Type: application/offset+octet-stream`, accept bytes, respond with `Upload-Offset`.
|
||||
* **Status:** `HEAD /files/{id}`
|
||||
|
||||
* Always return `Upload-Offset` for partial uploads, include `Upload-Length` if known; if deferred, return `Upload-Defer-Length: 1`. `Cache-Control: no-store`.
|
||||
* **Upload:** `PATCH /files/{id}`
|
||||
|
||||
* `Content-Type: application/offset+octet-stream` and `Upload-Offset` (must match server).
|
||||
* On success → `204 No Content` + new `Upload-Offset`.
|
||||
* Mismatch → `409 Conflict`. Bad type → `415 Unsupported Media Type`.
|
||||
* **Terminate:** `DELETE /files/{id}` (if supported) → `204 No Content`. Subsequent requests → `404/410`.
|
||||
|
||||
## Checksum (optional but implemented here)
|
||||
|
||||
* Client MAY send: `Upload-Checksum: <algo> <base64digest>` per `PATCH`.
|
||||
|
||||
* Server MUST verify request body’s checksum of the exact received bytes.
|
||||
* If algo unsupported → `400 Bad Request`.
|
||||
* If mismatch → **discard the chunk** (no offset change) and respond `460 Checksum Mismatch`.
|
||||
* If OK → `204 No Content` + new `Upload-Offset`.
|
||||
* `OPTIONS` MUST include `Tus-Checksum-Algorithm` (comma-separated algos).
|
||||
|
||||
## Concatenation (optional but implemented here)
|
||||
|
||||
* **Partial uploads:** `POST /files` with `Upload-Concat: partial` and `Upload-Length`. (MUST have length; may use creation-with-upload/patch thereafter.)
|
||||
* **Final upload:** `POST /files` with
|
||||
`Upload-Concat: final; /files/{a} /files/{b} ...`
|
||||
|
||||
* MUST NOT include `Upload-Length`.
|
||||
* Final uploads **cannot** be `PATCH`ed (`403`).
|
||||
* Server SHOULD assemble final (in order).
|
||||
* If `concatenation-unfinished` supported, final may be created before partials completed; server completes once all partials are done.
|
||||
* **HEAD semantics:**
|
||||
|
||||
* For *partial*: MUST include `Upload-Offset`.
|
||||
* For *final* before concatenation: SHOULD NOT include `Upload-Offset`. `Upload-Length` MAY be present if computable (= sum of partials’ lengths when known).
|
||||
* After finalization: `Upload-Offset == Upload-Length`.
|
||||
|
||||
---
|
||||
|
||||
# TUS FastAPI Server (disk-only, crash-safe, checksum + concatenation)
|
||||
|
||||
**Features**
|
||||
|
||||
* All persistent state on disk:
|
||||
|
||||
```
|
||||
TUS_ROOT/
|
||||
{upload_id}/
|
||||
info.json # canonical metadata & status
|
||||
data.part # exists while uploading or while building final
|
||||
data # final file after atomic rename
|
||||
```
|
||||
* Crash recovery: `HEAD` offset = size of `data.part` or `data`.
|
||||
* `.part` during upload; `os.replace()` (atomic) to `data` on completion.
|
||||
* Streaming I/O; `fsync` on file + parent directory.
|
||||
* Checksum: supports `sha1` (can easily add md5/crc32).
|
||||
* Concatenation: server builds final when partials complete; supports `concatenation-unfinished`.
|
||||
|
||||
> Run with: `uv pip install fastapi uvicorn` then `uvicorn tus_server:app --host 0.0.0.0 --port 8080` (or `python tus_server.py`).
|
||||
> Set `TUS_ROOT` env to choose storage root.
|
||||
|
||||
```python
|
||||
# tus_server.py
|
||||
from fastapi import FastAPI, Request, Response, HTTPException
|
||||
from typing import Optional, Dict, Any, List
|
||||
import os, json, uuid, base64, asyncio, errno, hashlib
|
||||
|
||||
# -----------------------------
|
||||
# Config
|
||||
# -----------------------------
|
||||
TUS_VERSION = "1.0.0"
|
||||
# Advertise extensions implemented below:
|
||||
TUS_EXTENSIONS = ",".join([
|
||||
"creation",
|
||||
"creation-with-upload",
|
||||
"termination",
|
||||
"checksum",
|
||||
"concatenation",
|
||||
"concatenation-unfinished",
|
||||
])
|
||||
# Supported checksum algorithms (keys = header token)
|
||||
CHECKSUM_ALGOS = ["sha1"] # add "md5" if desired
|
||||
|
||||
TUS_ROOT = os.environ.get("TUS_ROOT", "/tmp/tus")
|
||||
MAX_SIZE = 1 << 40 # 1 TiB default
|
||||
|
||||
os.makedirs(TUS_ROOT, exist_ok=True)
|
||||
app = FastAPI()
|
||||
|
||||
# Per-process locks to prevent concurrent mutations on same upload_id
|
||||
_locks: Dict[str, asyncio.Lock] = {}
|
||||
def _lock_for(upload_id: str) -> asyncio.Lock:
|
||||
if upload_id not in _locks:
|
||||
_locks[upload_id] = asyncio.Lock()
|
||||
return _locks[upload_id]
|
||||
|
||||
# -----------------------------
|
||||
# Path helpers
|
||||
# -----------------------------
|
||||
def upload_dir(upload_id: str) -> str:
|
||||
return os.path.join(TUS_ROOT, upload_id)
|
||||
|
||||
def info_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "info.json")
|
||||
|
||||
def part_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "data.part")
|
||||
|
||||
def final_path(upload_id: str) -> str:
|
||||
return os.path.join(upload_dir(upload_id), "data")
|
||||
|
||||
# -----------------------------
|
||||
# FS utils (crash-safe)
|
||||
# -----------------------------
|
||||
def _fsync_dir(path: str) -> None:
|
||||
fd = os.open(path, os.O_DIRECTORY)
|
||||
try:
|
||||
os.fsync(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
|
||||
def _write_json_atomic(path: str, obj: Dict[str, Any]) -> None:
|
||||
tmp = f"{path}.tmp"
|
||||
data = json.dumps(obj, separators=(",", ":"), ensure_ascii=False)
|
||||
with open(tmp, "w", encoding="utf-8") as f:
|
||||
f.write(data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
os.replace(tmp, path)
|
||||
_fsync_dir(os.path.dirname(path))
|
||||
|
||||
def _read_json(path: str) -> Dict[str, Any]:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
def _size(path: str) -> int:
|
||||
try:
|
||||
return os.path.getsize(path)
|
||||
except FileNotFoundError:
|
||||
return 0
|
||||
|
||||
def _exists(path: str) -> bool:
|
||||
return os.path.exists(path)
|
||||
|
||||
# -----------------------------
|
||||
# TUS helpers
|
||||
# -----------------------------
|
||||
def _ensure_tus_version(req: Request):
|
||||
if req.method == "OPTIONS":
|
||||
return
|
||||
v = req.headers.get("Tus-Resumable")
|
||||
if v is None:
|
||||
raise HTTPException(status_code=412, detail="Missing Tus-Resumable")
|
||||
if v != TUS_VERSION:
|
||||
raise HTTPException(status_code=412, detail="Unsupported Tus-Resumable",
|
||||
headers={"Tus-Version": TUS_VERSION})
|
||||
|
||||
def _parse_metadata(raw: Optional[str]) -> str:
|
||||
# Raw passthrough; validate/consume in your app if needed.
|
||||
return raw or ""
|
||||
|
||||
def _new_upload_info(upload_id: str,
|
||||
kind: str, # "single" | "partial" | "final"
|
||||
length: Optional[int],
|
||||
defer_length: bool,
|
||||
metadata: str,
|
||||
parts: Optional[List[str]] = None) -> Dict[str, Any]:
|
||||
return {
|
||||
"upload_id": upload_id,
|
||||
"kind": kind, # "single" (default), "partial", or "final"
|
||||
"length": length, # int or None if deferred/unknown
|
||||
"defer_length": bool(defer_length),
|
||||
"metadata": metadata, # raw Upload-Metadata header
|
||||
"completed": False,
|
||||
"parts": parts or [], # for final: list of upload_ids (not URLs)
|
||||
}
|
||||
|
||||
def _load_info_or_404(upload_id: str) -> Dict[str, Any]:
|
||||
p = info_path(upload_id)
|
||||
if not _exists(p):
|
||||
raise HTTPException(404, "Upload not found")
|
||||
try:
|
||||
return _read_json(p)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, f"Corrupt metadata: {e}")
|
||||
|
||||
def _set_info(upload_id: str, info: Dict[str, Any]) -> None:
|
||||
_write_json_atomic(info_path(upload_id), info)
|
||||
|
||||
def _ensure_dir(path: str):
|
||||
os.makedirs(path, exist_ok=False)
|
||||
|
||||
def _atomic_finalize_file(upload_id: str):
|
||||
"""Rename data.part → data and mark completed."""
|
||||
upath = upload_dir(upload_id)
|
||||
p = part_path(upload_id)
|
||||
f = final_path(upload_id)
|
||||
if _exists(p):
|
||||
with open(p, "rb+") as fp:
|
||||
fp.flush()
|
||||
os.fsync(fp.fileno())
|
||||
os.replace(p, f)
|
||||
_fsync_dir(upath)
|
||||
info = _load_info_or_404(upload_id)
|
||||
info["completed"] = True
|
||||
_set_info(upload_id, info)
|
||||
|
||||
def _current_offsets(upload_id: str):
|
||||
f, p = final_path(upload_id), part_path(upload_id)
|
||||
if _exists(f):
|
||||
return True, False, _size(f)
|
||||
if _exists(p):
|
||||
return False, True, _size(p)
|
||||
return False, False, 0
|
||||
|
||||
def _parse_concat_header(h: Optional[str]) -> Optional[Dict[str, Any]]:
|
||||
if not h:
|
||||
return None
|
||||
h = h.strip()
|
||||
if h == "partial":
|
||||
return {"type": "partial", "parts": []}
|
||||
if h.startswith("final;"):
|
||||
# format: final;/files/a /files/b
|
||||
rest = h[len("final;"):].strip()
|
||||
urls = [s for s in rest.split(" ") if s]
|
||||
return {"type": "final", "parts": urls}
|
||||
return None
|
||||
|
||||
def _extract_upload_id_from_url(url: str) -> str:
|
||||
# Accept relative /files/{id} (common) — robust split:
|
||||
segs = [s for s in url.split("/") if s]
|
||||
return segs[-1] if segs else url
|
||||
|
||||
def _sum_lengths_or_none(ids: List[str]) -> Optional[int]:
|
||||
total = 0
|
||||
for pid in ids:
|
||||
info = _load_info_or_404(pid)
|
||||
if info.get("length") is None:
|
||||
return None
|
||||
total += int(info["length"])
|
||||
return total
|
||||
|
||||
async def _stream_with_checksum_and_append(file_obj, request: Request, algo: Optional[str]) -> int:
|
||||
"""Stream request body to file, verifying checksum if header present.
|
||||
Returns bytes written. On checksum mismatch, truncate to original size and raise HTTPException(460)."""
|
||||
start_pos = file_obj.tell()
|
||||
# Choose hash
|
||||
hasher = None
|
||||
provided_digest = None
|
||||
if algo:
|
||||
if algo not in CHECKSUM_ALGOS:
|
||||
raise HTTPException(400, "Unsupported checksum algorithm")
|
||||
if algo == "sha1":
|
||||
hasher = hashlib.sha1()
|
||||
# elif algo == "md5": hasher = hashlib.md5()
|
||||
# elif algo == "crc32": ... (custom)
|
||||
# Read expected checksum
|
||||
if hasher:
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
if not uh:
|
||||
# spec: checksum header optional; if algo passed to this fn we must have parsed it already
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
name, b64 = uh.split(" ", 1)
|
||||
if name != algo:
|
||||
raise ValueError()
|
||||
provided_digest = base64.b64decode(b64.encode("ascii"))
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
written = 0
|
||||
async for chunk in request.stream():
|
||||
if not chunk:
|
||||
continue
|
||||
file_obj.write(chunk)
|
||||
if hasher:
|
||||
hasher.update(chunk)
|
||||
written += len(chunk)
|
||||
# Verify checksum if present
|
||||
if hasher and provided_digest is not None:
|
||||
digest = hasher.digest()
|
||||
if digest != provided_digest:
|
||||
# rollback appended bytes
|
||||
file_obj.truncate(start_pos)
|
||||
file_obj.flush()
|
||||
os.fsync(file_obj.fileno())
|
||||
raise HTTPException(status_code=460, detail="Checksum Mismatch")
|
||||
file_obj.flush()
|
||||
os.fsync(file_obj.fileno())
|
||||
return written
|
||||
|
||||
def _try_finalize_final(upload_id: str):
|
||||
"""If this is a final upload and all partials are completed, build final data and finalize atomically."""
|
||||
info = _load_info_or_404(upload_id)
|
||||
if info.get("kind") != "final" or info.get("completed"):
|
||||
return
|
||||
part_ids = info.get("parts", [])
|
||||
# Check all partials completed and have data
|
||||
for pid in part_ids:
|
||||
pinf = _load_info_or_404(pid)
|
||||
if not pinf.get("completed"):
|
||||
return # still not ready
|
||||
if not _exists(final_path(pid)):
|
||||
# tolerate leftover .part (e.g., if completed used .part->data). If data missing, can't finalize.
|
||||
return
|
||||
# Build final .part by concatenating parts' data in order, then atomically rename
|
||||
up = upload_dir(upload_id)
|
||||
os.makedirs(up, exist_ok=True)
|
||||
ppath = part_path(upload_id)
|
||||
# Reset/overwrite .part
|
||||
with open(ppath, "wb") as out:
|
||||
for pid in part_ids:
|
||||
with open(final_path(pid), "rb") as src:
|
||||
for chunk in iter(lambda: src.read(1024 * 1024), b""):
|
||||
out.write(chunk)
|
||||
out.flush()
|
||||
os.fsync(out.fileno())
|
||||
# If server can compute length now, set it
|
||||
length = _sum_lengths_or_none(part_ids)
|
||||
info["length"] = length if length is not None else info.get("length")
|
||||
_set_info(upload_id, info)
|
||||
_atomic_finalize_file(upload_id)
|
||||
|
||||
# -----------------------------
|
||||
# Routes
|
||||
# -----------------------------
|
||||
@app.options("/files")
|
||||
async def tus_options():
|
||||
headers = {
|
||||
"Tus-Version": TUS_VERSION,
|
||||
"Tus-Extension": TUS_EXTENSIONS,
|
||||
"Tus-Max-Size": str(MAX_SIZE),
|
||||
"Tus-Checksum-Algorithm": ",".join(CHECKSUM_ALGOS),
|
||||
}
|
||||
return Response(status_code=204, headers=headers)
|
||||
|
||||
@app.post("/files")
|
||||
async def tus_create(request: Request):
|
||||
_ensure_tus_version(request)
|
||||
|
||||
metadata = _parse_metadata(request.headers.get("Upload-Metadata"))
|
||||
concat = _parse_concat_header(request.headers.get("Upload-Concat"))
|
||||
|
||||
# Validate creation modes
|
||||
hdr_len = request.headers.get("Upload-Length")
|
||||
hdr_defer = request.headers.get("Upload-Defer-Length")
|
||||
|
||||
if concat and concat["type"] == "partial":
|
||||
# Partial MUST have Upload-Length (spec)
|
||||
if hdr_len is None:
|
||||
raise HTTPException(400, "Partial uploads require Upload-Length")
|
||||
if hdr_defer is not None:
|
||||
raise HTTPException(400, "Partial uploads cannot defer length")
|
||||
elif concat and concat["type"] == "final":
|
||||
# Final MUST NOT include Upload-Length
|
||||
if hdr_len is not None or hdr_defer is not None:
|
||||
raise HTTPException(400, "Final uploads must not include Upload-Length or Upload-Defer-Length")
|
||||
else:
|
||||
# Normal single upload: require length or defer
|
||||
if hdr_len is None and hdr_defer != "1":
|
||||
raise HTTPException(400, "Must provide Upload-Length or Upload-Defer-Length: 1")
|
||||
|
||||
# Parse length
|
||||
length: Optional[int] = None
|
||||
defer = False
|
||||
if hdr_len is not None:
|
||||
try:
|
||||
length = int(hdr_len)
|
||||
if length < 0: raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid Upload-Length")
|
||||
if length > MAX_SIZE:
|
||||
raise HTTPException(413, "Upload too large")
|
||||
elif not concat or concat["type"] != "final":
|
||||
# final has no length at creation
|
||||
defer = (hdr_defer == "1")
|
||||
|
||||
upload_id = str(uuid.uuid4())
|
||||
udir = upload_dir(upload_id)
|
||||
_ensure_dir(udir)
|
||||
|
||||
if concat and concat["type"] == "final":
|
||||
# Resolve part ids from URLs
|
||||
part_ids = [_extract_upload_id_from_url(u) for u in concat["parts"]]
|
||||
# Compute length if possible
|
||||
sum_len = _sum_lengths_or_none(part_ids)
|
||||
info = _new_upload_info(upload_id, "final", sum_len, False, metadata, part_ids)
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Prepare empty .part (will be filled when partials complete)
|
||||
with open(part_path(upload_id), "wb") as f:
|
||||
f.flush(); os.fsync(f.fileno())
|
||||
_fsync_dir(udir)
|
||||
|
||||
# If all partials already complete, finalize immediately
|
||||
_try_finalize_final(upload_id)
|
||||
|
||||
return Response(status_code=201,
|
||||
headers={"Location": f"/files/{upload_id}",
|
||||
"Tus-Resumable": TUS_VERSION})
|
||||
|
||||
# Create partial or single
|
||||
kind = "partial" if (concat and concat["type"] == "partial") else "single"
|
||||
info = _new_upload_info(upload_id, kind, length, defer, metadata)
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Create empty .part
|
||||
with open(part_path(upload_id), "wb") as f:
|
||||
f.flush(); os.fsync(f.fileno())
|
||||
_fsync_dir(udir)
|
||||
|
||||
# Creation-With-Upload (optional body)
|
||||
upload_offset = 0
|
||||
has_body = request.headers.get("Content-Length") or request.headers.get("Transfer-Encoding")
|
||||
if has_body:
|
||||
ctype = request.headers.get("Content-Type", "")
|
||||
if ctype != "application/offset+octet-stream":
|
||||
raise HTTPException(415, "Content-Type must be application/offset+octet-stream for creation-with-upload")
|
||||
# Checksum header optional; if present, parse algo token
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
algo = None
|
||||
if uh:
|
||||
try:
|
||||
algo = uh.split(" ", 1)[0]
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
|
||||
async with _lock_for(upload_id):
|
||||
with open(part_path(upload_id), "ab+") as f:
|
||||
f.seek(0, os.SEEK_END)
|
||||
upload_offset = await _stream_with_checksum_and_append(f, request, algo)
|
||||
|
||||
# If length known and we hit it, finalize
|
||||
inf = _load_info_or_404(upload_id)
|
||||
if inf["length"] is not None and upload_offset == int(inf["length"]):
|
||||
_atomic_finalize_file(upload_id)
|
||||
# If this is a partial that belongs to some final, a watcher could finalize final; here we rely on
|
||||
# client to create final explicitly (spec). Finalization of final is handled by _try_finalize_final
|
||||
# when final resource is created (or rechecked on subsequent HEAD/PATCH).
|
||||
headers = {"Location": f"/files/{upload_id}", "Tus-Resumable": TUS_VERSION}
|
||||
if upload_offset:
|
||||
headers["Upload-Offset"] = str(upload_offset)
|
||||
return Response(status_code=201, headers=headers)
|
||||
|
||||
@app.head("/files/{upload_id}")
|
||||
async def tus_head(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
info = _load_info_or_404(upload_id)
|
||||
is_final = info.get("kind") == "final"
|
||||
|
||||
headers = {
|
||||
"Tus-Resumable": TUS_VERSION,
|
||||
"Cache-Control": "no-store",
|
||||
}
|
||||
if info.get("metadata"):
|
||||
headers["Upload-Metadata"] = info["metadata"]
|
||||
|
||||
if info.get("length") is not None:
|
||||
headers["Upload-Length"] = str(int(info["length"]))
|
||||
elif info.get("defer_length"):
|
||||
headers["Upload-Defer-Length"] = "1"
|
||||
|
||||
exists_final, exists_part, offset = False, False, 0
|
||||
if is_final and not info.get("completed"):
|
||||
# BEFORE concatenation completes: SHOULD NOT include Upload-Offset
|
||||
# Try to see if we can finalize now (e.g., partials completed after crash)
|
||||
_try_finalize_final(upload_id)
|
||||
info = _load_info_or_404(upload_id)
|
||||
if info.get("completed"):
|
||||
# fallthrough to completed case
|
||||
pass
|
||||
else:
|
||||
# For in-progress final, no Upload-Offset; include Upload-Length if computable (already handled above)
|
||||
return Response(status_code=200, headers=headers)
|
||||
|
||||
# For partials or completed finals
|
||||
f = final_path(upload_id)
|
||||
p = part_path(upload_id)
|
||||
if _exists(f):
|
||||
exists_final, offset = True, _size(f)
|
||||
elif _exists(p):
|
||||
exists_part, offset = True, _size(p)
|
||||
else:
|
||||
# if info exists but no data, consider gone
|
||||
raise HTTPException(410, "Upload gone")
|
||||
|
||||
headers["Upload-Offset"] = str(offset)
|
||||
return Response(status_code=200, headers=headers)
|
||||
|
||||
@app.patch("/files/{upload_id}")
|
||||
async def tus_patch(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
info = _load_info_or_404(upload_id)
|
||||
|
||||
if info.get("kind") == "final":
|
||||
raise HTTPException(403, "Final uploads cannot be patched")
|
||||
|
||||
ctype = request.headers.get("Content-Type", "")
|
||||
if ctype != "application/offset+octet-stream":
|
||||
raise HTTPException(415, "Content-Type must be application/offset+octet-stream")
|
||||
|
||||
# Client offset must match server
|
||||
try:
|
||||
client_offset = int(request.headers.get("Upload-Offset", "-1"))
|
||||
if client_offset < 0: raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid or missing Upload-Offset")
|
||||
|
||||
# If length deferred, client may now set Upload-Length (once)
|
||||
if info.get("length") is None and info.get("defer_length"):
|
||||
if "Upload-Length" in request.headers:
|
||||
try:
|
||||
new_len = int(request.headers["Upload-Length"])
|
||||
if new_len < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise HTTPException(400, "Invalid Upload-Length")
|
||||
if new_len > MAX_SIZE:
|
||||
raise HTTPException(413, "Upload too large")
|
||||
info["length"] = new_len
|
||||
info["defer_length"] = False
|
||||
_set_info(upload_id, info)
|
||||
|
||||
# Determine current server offset
|
||||
f = final_path(upload_id)
|
||||
p = part_path(upload_id)
|
||||
if _exists(f):
|
||||
raise HTTPException(403, "Upload already finalized")
|
||||
if not _exists(p):
|
||||
raise HTTPException(404, "Upload not found")
|
||||
|
||||
server_offset = _size(p)
|
||||
if client_offset != server_offset:
|
||||
return Response(status_code=409)
|
||||
|
||||
# Optional checksum
|
||||
uh = request.headers.get("Upload-Checksum")
|
||||
algo = None
|
||||
if uh:
|
||||
try:
|
||||
algo = uh.split(" ", 1)[0]
|
||||
except Exception:
|
||||
raise HTTPException(400, "Invalid Upload-Checksum")
|
||||
|
||||
# Append data (with rollback on checksum mismatch)
|
||||
async with _lock_for(upload_id):
|
||||
with open(p, "ab+") as fobj:
|
||||
fobj.seek(0, os.SEEK_END)
|
||||
written = await _stream_with_checksum_and_append(fobj, request, algo)
|
||||
|
||||
new_offset = server_offset + written
|
||||
|
||||
# If length known and reached exactly, finalize
|
||||
info = _load_info_or_404(upload_id) # reload
|
||||
if info.get("length") is not None and new_offset == int(info["length"]):
|
||||
_atomic_finalize_file(upload_id)
|
||||
|
||||
# If this is a partial, a corresponding final may exist and be now completable
|
||||
# We don't maintain reverse index; finalization is triggered when HEAD on final is called.
|
||||
# (Optional: scan for finals to proactively finalize.)
|
||||
|
||||
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION, "Upload-Offset": str(new_offset)})
|
||||
|
||||
@app.delete("/files/{upload_id}")
|
||||
async def tus_delete(upload_id: str, request: Request):
|
||||
_ensure_tus_version(request)
|
||||
async with _lock_for(upload_id):
|
||||
udir = upload_dir(upload_id)
|
||||
for p in (part_path(upload_id), final_path(upload_id), info_path(upload_id)):
|
||||
try:
|
||||
os.remove(p)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.rmdir(udir)
|
||||
except OSError:
|
||||
pass
|
||||
return Response(status_code=204, headers={"Tus-Resumable": TUS_VERSION})
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Client Examples (manual)
|
||||
|
||||
```bash
|
||||
# OPTIONS
|
||||
curl -i -X OPTIONS http://localhost:8080/files
|
||||
|
||||
# 1) Single upload (known length)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 11" \
|
||||
-H "Upload-Metadata: filename Zm9vLnR4dA=="
|
||||
# → Location: /files/<ID>
|
||||
|
||||
# Upload with checksum (sha1 of "hello ")
|
||||
printf "hello " | curl -i -X PATCH http://localhost:8080/files/<ID> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
-H "Upload-Checksum: sha1 L6v8xR3Lw4N2n9kQox3wL7G0m/I=" \
|
||||
--data-binary @-
|
||||
# (Replace digest with correct base64 for your chunk)
|
||||
|
||||
# 2) Concatenation
|
||||
# Create partial A (5 bytes)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 5" \
|
||||
-H "Upload-Concat: partial"
|
||||
# → Location: /files/<A>
|
||||
printf "hello" | curl -i -X PATCH http://localhost:8080/files/<A> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
--data-binary @-
|
||||
|
||||
# Create partial B (6 bytes)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Length: 6" \
|
||||
-H "Upload-Concat: partial"
|
||||
# → Location: /files/<B>
|
||||
printf " world" | curl -i -X PATCH http://localhost:8080/files/<B> \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Content-Type: application/offset+octet-stream" \
|
||||
-H "Upload-Offset: 0" \
|
||||
--data-binary @-
|
||||
|
||||
# Create final (may be before or after partials complete)
|
||||
curl -i -X POST http://localhost:8080/files \
|
||||
-H "Tus-Resumable: 1.0.0" \
|
||||
-H "Upload-Concat: final; /files/<A> /files/<B>"
|
||||
# HEAD on final will eventually show Upload-Offset once finalized
|
||||
curl -i -X HEAD http://localhost:8080/files/<FINAL> -H "Tus-Resumable: 1.0.0"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Notes (agent hints)
|
||||
|
||||
* **Durability:** every data write `fsync(file)`; after `os.replace` of `*.part → data` or `info.json.tmp → info.json`, also `fsync(parent)`.
|
||||
* **Checksum:** verify against **this request’s** body only; on mismatch, **truncate back** to previous size and return `460`.
|
||||
* **Concatenation:** final upload is never `PATCH`ed. Server builds `final.data.part` by concatenating each partial’s **final file** in order, then atomically renames and marks completed. It’s triggered lazily in `HEAD` of final (and right after creation).
|
||||
* **Crash Recovery:** offset = `size(data.part)` or `size(data)`; `info.json` is canonical for `kind`, `length`, `defer_length`, `completed`, `parts`.
|
||||
* **Multi-process deployments:** replace `asyncio.Lock` with file locks (`fcntl.flock`) per `upload_id` to synchronize across workers.
|
||||
|
||||
|
||||
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
229
aiprompts/ai_instruct/uppy/uppy.md
Normal file
@@ -0,0 +1,229 @@
|
||||
```bash
|
||||
unpm install @uppy/react
|
||||
```
|
||||
|
||||
## Components
|
||||
|
||||
Pre-composed, plug-and-play components:
|
||||
|
||||
<Dashboard /> renders @uppy/dashboard
|
||||
<DashboardModal /> renders @uppy/dashboard as a modal
|
||||
<DragDrop /> renders @uppy/drag-drop
|
||||
<ProgressBar /> renders @uppy/progress-bar
|
||||
<StatusBar /> renders @uppy/status-bar
|
||||
|
||||
more info see https://uppy.io/docs/react
|
||||
|
||||
|
||||
we use tus server for the upload support
|
||||
|
||||
npm install @uppy/tus
|
||||
|
||||
e.g.
|
||||
|
||||
import Uppy from '@uppy/core';
|
||||
import Dashboard from '@uppy/dashboard';
|
||||
import Tus from '@uppy/tus';
|
||||
|
||||
import '@uppy/core/dist/style.min.css';
|
||||
import '@uppy/dashboard/dist/style.min.css';
|
||||
|
||||
new Uppy()
|
||||
.use(Dashboard, { inline: true, target: 'body' })
|
||||
|
||||
|
||||
|
||||
========================
|
||||
CODE SNIPPETS
|
||||
========================
|
||||
|
||||
TITLE: React Dashboard Modal Example with TUS
|
||||
DESCRIPTION: Demonstrates how to use the DashboardModal component from @uppy/react with the Tus plugin for resumable uploads.
|
||||
LANGUAGE: jsx
|
||||
CODE:
|
||||
```
|
||||
/** @jsx React */
|
||||
import React from 'react'
|
||||
import Uppy from '@uppy/core'
|
||||
import { DashboardModal } from '@uppy/react'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy({ debug: true, autoProceed: false })
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
|
||||
class Example extends React.Component {
|
||||
state = { open: false }
|
||||
|
||||
render() {
|
||||
const { open } = this.state
|
||||
return (
|
||||
<DashboardModal
|
||||
uppy={uppy}
|
||||
open={open}
|
||||
onRequestClose={this.handleClose}
|
||||
/>
|
||||
)
|
||||
}
|
||||
// ..snip..
|
||||
}
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Installation using npm for @uppy/react
|
||||
DESCRIPTION: Provides the command to install the @uppy/react package using npm.
|
||||
LANGUAGE: bash
|
||||
CODE:
|
||||
```
|
||||
$ npm install @uppy/react @uppy/core @uppy/dashboard @uppy/tus
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy Dashboard and Tus Integration Example (HTML & JavaScript)
|
||||
DESCRIPTION: This snippet demonstrates how to initialize Uppy with the Dashboard and Tus plugins, configure them, and handle upload success events.
|
||||
LANGUAGE: html
|
||||
CODE:
|
||||
```
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" href="https://releases.transloadit.com/uppy/v4.18.0/uppy.min.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="DashboardContainer"></div>
|
||||
<button class="UppyModalOpenerBtn">Upload</button>
|
||||
<div class="uploaded-files">
|
||||
<h5>Uploaded files:</h5>
|
||||
<ol></ol>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
<script type="module">
|
||||
import { Uppy, Dashboard, Tus } from 'https://releases.transloadit.com/uppy/v4.18.0/uppy.min.mjs'
|
||||
var uppy = new Uppy({
|
||||
debug: true,
|
||||
autoProceed: false,
|
||||
})
|
||||
.use(Dashboard, {
|
||||
browserBackButtonClose: false,
|
||||
height: 470,
|
||||
inline: false,
|
||||
replaceTargetContent: true,
|
||||
showProgressDetails: true,
|
||||
target: '.DashboardContainer',
|
||||
trigger: '.UppyModalOpenerBtn',
|
||||
})
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
.on('upload-success', function (file, response) {
|
||||
var url = response.uploadURL
|
||||
var fileName = file.name
|
||||
|
||||
document.querySelector('.uploaded-files ol').innerHTML +=
|
||||
'<li><a href="' + url + '" target="_blank">' + fileName + '</a></li>'
|
||||
})
|
||||
</script>
|
||||
</html>
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Initialize Uppy with Tus Plugin (JavaScript)
|
||||
DESCRIPTION: Demonstrates how to initialize Uppy and configure the Tus plugin for resumable uploads.
|
||||
LANGUAGE: js
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy()
|
||||
uppy.use(Tus, {
|
||||
endpoint: 'https://tusd.tusdemo.net/files/', // use your tus endpoint here
|
||||
resume: true,
|
||||
retryDelays: [0, 1000, 3000, 5000],
|
||||
})
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy Core Initialization and Plugin Usage (JavaScript)
|
||||
DESCRIPTION: This example demonstrates how to initialize Uppy with core functionality and integrate the Tus plugin. It also shows how to listen for upload completion events.
|
||||
LANGUAGE: javascript
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core'
|
||||
import Dashboard from '@uppy/dashboard'
|
||||
import Tus from '@uppy/tus'
|
||||
|
||||
const uppy = new Uppy()
|
||||
.use(Dashboard, { trigger: '#select-files' })
|
||||
.use(Tus, { endpoint: 'https://tusd.tusdemo.net/files/' })
|
||||
.on('complete', (result) => {
|
||||
console.log('Upload result:', result)
|
||||
})
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Uppy XHRUpload Configuration (JavaScript)
|
||||
DESCRIPTION: This snippet shows the basic JavaScript configuration for Uppy, initializing it with the XHRUpload plugin to send files to a specified endpoint.
|
||||
LANGUAGE: javascript
|
||||
CODE:
|
||||
```
|
||||
import Uppy from '@uppy/core';
|
||||
import XHRUpload from '@uppy/xhr-upload';
|
||||
|
||||
const uppy = new Uppy({
|
||||
debug: true,
|
||||
autoProceed: false,
|
||||
restrictions: {
|
||||
maxFileSize: 100000000,
|
||||
maxNumberOfFiles: 10,
|
||||
allowedFileTypes: ['image/*', 'video/*']
|
||||
}
|
||||
});
|
||||
|
||||
uppy.use(XHRUpload, {
|
||||
endpoint: 'YOUR_UPLOAD_ENDPOINT_URL',
|
||||
fieldName: 'files[]',
|
||||
method: 'post'
|
||||
});
|
||||
|
||||
uppy.on('complete', (result) => {
|
||||
console.log('Upload complete:', result);
|
||||
});
|
||||
|
||||
uppy.on('error', (error) => {
|
||||
console.error('Upload error:', error);
|
||||
});
|
||||
```
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TITLE: Install Uppy Core Packages for TUS
|
||||
DESCRIPTION: Installs the core Uppy package along with the Dashboard and Tus plugins using npm.
|
||||
LANGUAGE: bash
|
||||
CODE:
|
||||
```
|
||||
npm install @uppy/core @uppy/dashboard @uppy/tus @uppy/xhr-upload
|
||||
```
|
||||
|
||||
========================
|
||||
QUESTIONS AND ANSWERS
|
||||
========================
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: What is the purpose of the @uppy/react package?
|
||||
A: The @uppy/react package provides React component wrappers for Uppy's officially maintained UI plugins. It allows developers to easily integrate Uppy's file uploading capabilities into their React applications.
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: How can @uppy/react be installed in a project?
|
||||
A: The @uppy/react package can be installed using npm with the command '$ npm install @uppy/react'.
|
||||
|
||||
----------------------------------------
|
||||
|
||||
TOPIC: Uppy React Components
|
||||
Q: Where can I find more detailed documentation for the @uppy/react plugin?
|
||||
A: More detailed documentation for the @uppy/react plugin is available on the Uppy website at https://uppy.io/docs/react.
|
||||
72
aiprompts/ai_instruct/what_is_a_hero_twin.md
Normal file
72
aiprompts/ai_instruct/what_is_a_hero_twin.md
Normal file
@@ -0,0 +1,72 @@
|
||||
you represent a digital twin for a user, the user talks to you to get things done for his digital life
|
||||
|
||||
you will interprete the instructions the user prompts, and figure out the multiple instructions, break it up and categorize them as follows:
|
||||
|
||||
- cat: calendar
|
||||
- manage calendar for the user
|
||||
- cat: contacts
|
||||
- manage contacts for the user
|
||||
- cat: communicate
|
||||
- communicate with others using text
|
||||
- cat: tasks
|
||||
- manage my tasks
|
||||
- cat: circle
|
||||
- define circle we work in, a circle is like a project context in which we do above, so can be for a team or a project, try to find it
|
||||
- cat: sysadmin
|
||||
- system administration, e.g. creation of virtual machines (VM), containers, start stop see monitoring information
|
||||
- cat: notes
|
||||
- anything to do with transctiptions, note takings, summaries
|
||||
- how we recorded meetings e.g. zoom, google meet, ...
|
||||
- how we are looking for info in meeting
|
||||
- cat: unknown
|
||||
- anything we can't understand
|
||||
|
||||
try to understand what user wants and put it in blocks (one per category for the action e.g. calendar)
|
||||
|
||||
- before each block (instruction) put ###########################
|
||||
- in the first line mention the category as defined above, only mention this category once and there is only one per block
|
||||
- then reformulate in clear instructions what needs to be done after that
|
||||
- the instructions are put in lines following the instruction (not in the instruction line)
|
||||
- only make blocks for instructions as given
|
||||
|
||||
what you output will be used further to do more specific prompting
|
||||
|
||||
if circle, always put these instructions first
|
||||
|
||||
if time is specified put the time as follows
|
||||
|
||||
- if relative e.g. next week, tomorrow, after tomorrow, in one hour then start from the current time
|
||||
- time is in format: YYYY/MM/DD hh:mm format
|
||||
- current time is friday 2025/01/17 10:12
|
||||
- if e.g. next month jan, or next tuesday then don't repeat the browd instructions like tuesday, this just show the date as YYYY/MM/DD hh:mm
|
||||
|
||||
if not clear for a date, don't invent just repeat the original instruction
|
||||
|
||||
if the category is not clear, just use unknown
|
||||
|
||||
|
||||
NOW DO EXAMPLE 1
|
||||
|
||||
```
|
||||
hi good morning
|
||||
|
||||
Can you help me find meetings I have done around research of threefold in the last 2 weeks
|
||||
|
||||
I need to create a new VM, 4 GB of memory, 2 vcpu, in belgium, with ubuntu
|
||||
|
||||
I would like do schedule a meeting, need to go to the dentist tomorrow at 10am, its now friday jan 17
|
||||
|
||||
also remind me I need to do the dishes after tomorrow in the morning
|
||||
|
||||
can you also add jef as a contact, he lives in geneva, he is doing something about rocketscience
|
||||
|
||||
I need to paint my wall in my room next week wednesday
|
||||
|
||||
cancel all my meetings next sunday
|
||||
|
||||
can you give me list of my contacts who live in geneva and name sounds like tom
|
||||
|
||||
send a message to my mother, I am seeing here in 3 days at 7pm
|
||||
|
||||
```
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../lib/data/encoder/readme.md
|
||||
51
aiprompts/bizmodel/bizmodel_cost.md
Normal file
51
aiprompts/bizmodel/bizmodel_cost.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Cost Module Documentation
|
||||
|
||||
This module provides functionalities related to managing various costs within the business model.
|
||||
|
||||
## Actions
|
||||
|
||||
### `!!bizmodel.cost_define`
|
||||
|
||||
Defines a cost item and its associated properties.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `bizname` (string, required): The name of the business model instance to which this cost belongs.
|
||||
* `descr` (string, required): Description of the cost item. If `name` is not provided, it will be derived from this.
|
||||
* `name` (string, optional): Unique name for the cost item. If not provided, it will be generated from `descr`.
|
||||
* `cost` (string, required): The cost value. Can be a fixed value (e.g., '1000USD') or a growth rate (e.g., '0:1000,59:2000'). If `indexation` is used, this should not contain a colon. This value is extrapolated.
|
||||
* `indexation` (percentage, optional, default: '0%'): Annual indexation rate for the cost. Applied over 6 years if specified.
|
||||
* `costcenter` (string, optional): The costcenter associated with this cost.
|
||||
* `cost_percent_revenue` (percentage, optional, default: '0%'): Ensures the cost is at least this percentage of the total revenue.
|
||||
* `extrapolate`: If you want to extrapolate revenue or cogs do extrapolate:1, default is 0.
|
||||
|
||||
### `!!bizmodel.costcenter_define`
|
||||
|
||||
Defines a cost center.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `bizname` (string, required): The name of the business model instance to which this cost belongs.
|
||||
* `descr` (string, required): Description of the cost center. If `name` is not provided, it will be derived from this.
|
||||
* `name` (string, optional): Unique name for the cost center. If not provided, it will be generated from `descr`.
|
||||
* `department` (string, optional): The department associated with this cost center.
|
||||
|
||||
|
||||
## **Example:**
|
||||
|
||||
```js
|
||||
!!bizmodel.costcenter_define bizname:'test'
|
||||
descr:'Marketing Cost Center'
|
||||
name:'marketing_cc'
|
||||
department:'marketing'
|
||||
|
||||
|
||||
!!bizmodel.cost_define bizname:'test'
|
||||
descr:'Office Rent'
|
||||
cost:'5000USD'
|
||||
indexation:'3%'
|
||||
costcenter:'marketing_cc'
|
||||
cost_percent_revenue:'1%'
|
||||
|
||||
```
|
||||
|
||||
35
aiprompts/bizmodel/bizmodel_funding.md
Normal file
35
aiprompts/bizmodel/bizmodel_funding.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Funding Module Documentation
|
||||
|
||||
This module provides functionalities related to managing various funding sources within the business model.
|
||||
|
||||
## Actions
|
||||
|
||||
### `!!bizmodel.funding_define`
|
||||
|
||||
Defines a funding entity and its associated properties.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `bizname` (string, required): The name of the business model instance to which this funding belongs.
|
||||
* `name` (string, required): Identifier for the funding entity.
|
||||
* `descr` (string, optional): Human-readable description. If not provided, it will be derived from `description`.
|
||||
* `investment` (string, required): Format `month:amount`, e.g., '0:10000,12:5000'. This value is extrapolated.
|
||||
* `type` (string, optional, default: 'capital'): The type of funding. Allowed values: 'loan' or 'capital'.
|
||||
* `extrapolate`: If you want to extrapolate revenue or cogs do extrapolate:1, default is 0.
|
||||
|
||||
### `funding_total`
|
||||
|
||||
Calculates the total funding.
|
||||
|
||||
## **Example:**
|
||||
|
||||
```js
|
||||
!!bizmodel.funding_define bizname:'test' name:'seed_capital'
|
||||
descr:'Initial Seed Capital Investment'
|
||||
investment:'0:500000,12:200000'
|
||||
type:'capital'
|
||||
|
||||
!!bizmodel.funding_define bizname:'test' name:'bank_loan'
|
||||
descr:'Bank Loan for Expansion'
|
||||
investment:'6:100000,18:50000'
|
||||
type:'loan'
|
||||
24
aiprompts/bizmodel/bizmodel_generation_prompt.md
Normal file
24
aiprompts/bizmodel/bizmodel_generation_prompt.md
Normal file
@@ -0,0 +1,24 @@
|
||||
create a bizmodel for a startup called threefold
|
||||
|
||||
it has 4 departments
|
||||
- engineering
|
||||
- operations
|
||||
- sales
|
||||
- admin
|
||||
|
||||
I need modest engineering 10, 5 people, team nr of people grows 10% per year, with max of 20 people
|
||||
I need operational team of 2 people and 4% of revenue
|
||||
|
||||
I am selling services on a cloud, it starts at 10k USD a month after 10 months, then growing to 1 million a month after 3 years, then 5% up per year
|
||||
|
||||
we have 3 offices
|
||||
|
||||
we have 5m funding day 1
|
||||
|
||||
travelcost is 3% of revenue
|
||||
|
||||
create me the full heroscript which gives me this biz model
|
||||
|
||||
create bizmodel.heroscript in ~/code/github/freeflowuniverse/herolib/examples/biztools/generated_ai
|
||||
|
||||
as well as a do.vsh file which executes the heroscript and does a pprint, in do.vsh , call play with heroscript_path arg
|
||||
69
aiprompts/bizmodel/bizmodel_hr.md
Normal file
69
aiprompts/bizmodel/bizmodel_hr.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# HR Module Documentation
|
||||
|
||||
This module provides functionalities related to Human Resources within the business model.
|
||||
|
||||
## Actions
|
||||
|
||||
All actions in the `bizmodel` module accept a `bizname` parameter (string, required) which specifies the business model instance to which the action applies.
|
||||
|
||||
### `bizmodel.employee_define`
|
||||
|
||||
Defines an employee and their associated costs within the business model.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `bizname` (string, required): The name of the business model instance to which this cost belongs.
|
||||
* `descr` (string, required): Description of the employee (e.g., 'Junior Engineer'). If `name` is not provided, it will be derived from this.
|
||||
* `name` (string, optional): Unique name for the employee. If not provided, it will be generated from `descr`.
|
||||
* `cost` (string, required): The cost associated with the employee. Can be a fixed value (e.g., '4000USD') or a growth rate (e.g., '1:5,60:30'). If `indexation` is used, this should not contain a colon.
|
||||
* `nrpeople` (string, optional, default: '1'): The number of people for this employee definition. Can be a fixed number or a growth rate (e.g., '1:5,60:30').
|
||||
* `indexation` (percentage, optional, default: '0%'): Annual indexation rate for the cost. Applied over 6 years if specified.
|
||||
* `department` (string, optional): The department the employee belongs to.
|
||||
* `cost_percent_revenue` (percentage, optional, default: '0%'): Ensures the employee cost is at least this percentage of the total revenue.
|
||||
* `costcenter` (string, optional, default: 'default_costcenter'): The cost center for the employee.
|
||||
* `page` (string, optional): A reference to a page or document related to this employee.
|
||||
* `fulltime` (percentage, optional, default: '100%'): The full-time percentage of the employee.
|
||||
|
||||
|
||||
### `bizmodel.department_define`
|
||||
|
||||
Defines a department within the business model.
|
||||
|
||||
**Parameters:**
|
||||
|
||||
* `bizname` (string, required): The name of the business model instance to which this cost belongs.
|
||||
* `name` (string, required): Unique name for the department.
|
||||
* `descr` (string, optional): Description of the department. If not provided, `description` will be used.
|
||||
* `description` (string, optional): Description of the department. Used if `descr` is not provided.
|
||||
* `title` (string, optional): A title for the department.
|
||||
* `page` (string, optional): A reference to a page or document related to this department.
|
||||
|
||||
## **Example:**
|
||||
|
||||
```js
|
||||
|
||||
!!bizmodel.department_define bizname:'test'
|
||||
name:'engineering'
|
||||
descr:'Software Development Department'
|
||||
title:'Engineering Division'
|
||||
//optional, if set overrules the hr_params
|
||||
//avg_monthly_cost:'6000USD' avg_indexation:'5%'
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
name:'ourclo'
|
||||
descr:'CLO'
|
||||
cost:'10000EUR'
|
||||
indexation:'5%'
|
||||
|
||||
!!bizmodel.employee_define bizname:'test'
|
||||
name:'junior_engineer'
|
||||
descr:'Junior Engineer'
|
||||
nrpeople:'1:5,60:30'
|
||||
cost:'4000USD'
|
||||
indexation:'5%'
|
||||
department:'engineering'
|
||||
cost_percent_revenue:'4%'
|
||||
|
||||
|
||||
```
|
||||
|
||||
92
aiprompts/bizmodel/bizmodel_revenue.md
Normal file
92
aiprompts/bizmodel/bizmodel_revenue.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# Revenue
|
||||
|
||||
```
|
||||
!!bizmodel.revenue_define bizname:'test' name:'oem1' ...
|
||||
```
|
||||
|
||||
## Params
|
||||
|
||||
- bizname, is the name of the biz model we are populating
|
||||
- name, name of product, project
|
||||
- descr, description of the revenue line item
|
||||
- nr_months_recurring: e.g. 60 is 5 years
|
||||
|
||||
## discrete revenue/cogs (not per item)
|
||||
|
||||
cogs stands for cost of goods
|
||||
|
||||
- revenue: one of revenue, can be extrapolated if specified
|
||||
- cogs: cost of goods, this is the cost of the revenue, can be extrapolated if specified
|
||||
- cogs_percent: percent of revenue
|
||||
- cogs_delay: delay in months between cogs and revenue
|
||||
- extrapolate: if you want to extrapolate revenue or cogs do extrapolate:1, default is 0
|
||||
|
||||
### results in
|
||||
|
||||
follow rows in sheets
|
||||
|
||||
- {name}_ + all the arg names as mentioned above...
|
||||
- {name}_revenue_total
|
||||
- {name}_cogs_total
|
||||
|
||||
## grouped per items sold
|
||||
|
||||
- nr_sold: how many do we sell per month (is in growth format e.g. 10:100,20:200, default is 1)
|
||||
- revenue_item_setup, revenue for 1 item '1000usd'
|
||||
- revenue_item_setup_delay, delay between sell and recognition of sale in months e.g. 1
|
||||
- revenue_item_monthly, revenue per month for 1 item
|
||||
- revenue_item_monthly_delay, how many months before monthly revenue starts
|
||||
- revenue_item_monthly_perc, how much percent of revenue_item_setup will come back over months e.g. 20%
|
||||
- cogs_item_setup, cost of good for 1 item at setup
|
||||
- cogs_item_setup_rev_perc: what is percentage of the revenue which is cogs, e.g. 2%
|
||||
- cogs_item_monthly, cost of goods for the monthly per 1 item
|
||||
- cogs_item_monthly_rev_perc: what is percentage of the monthly revenue which is cogs, e.g. 10%
|
||||
- cogs_item_delay, how many months before cogs starts after sales
|
||||
|
||||
|
||||
|
||||
### results in
|
||||
|
||||
follow rows in sheets
|
||||
|
||||
- {name}_ + all the arg names as mentioned above...
|
||||
- {name}_revenue_item_setup_total
|
||||
- {name}_revenue_item_monthly_total
|
||||
- {name}_revenue_item_total
|
||||
|
||||
- {name}_cogs_item_total
|
||||
|
||||
## to use
|
||||
|
||||
### basic example
|
||||
|
||||
```v
|
||||
|
||||
import freeflowuniverse.herolib.biz.bizmodel
|
||||
import os
|
||||
|
||||
heroscript:="
|
||||
|
||||
Next will define an OEM product in month 10, 1 Million EUR, ... cogs is a percent which is 20% at start but goes to 10% after 20 months.
|
||||
|
||||
!!bizmodel.revenue_define bizname:'test' name:'oem1'
|
||||
descr:'OEM Deals'
|
||||
revenue:'10:1000000EUR,15:3333,20:1200000'
|
||||
cogs_percent: '1:20%,20:10%'
|
||||
|
||||
|
||||
This time we have the cogs defined in fixed manner, the default currency is USD doesn't have to be mentioned.
|
||||
|
||||
!!bizmodel.revenue_define bizname:'test' name:'oem2'
|
||||
descr:'OEM Deals'
|
||||
revenue:'10:1000000EUR,15:3333,20:1200000'
|
||||
cogs: '10:100000,15:1000,20:120000'
|
||||
"
|
||||
|
||||
bizmodel.play(heroscript:heroscript)!
|
||||
|
||||
mut bm:=bizmodel.get("test")!
|
||||
|
||||
bm.sheet.pprint()!
|
||||
|
||||
```
|
||||
38
aiprompts/bizmodel/costs.heroscript
Normal file
38
aiprompts/bizmodel/costs.heroscript
Normal file
@@ -0,0 +1,38 @@
|
||||
|
||||
//need to define some revenue because otherwise can't see how HR relates to it
|
||||
!!bizmodel.revenue_define bizname:'test' name:'oem1' extrapolate:1
|
||||
descr:'OEM Deals' revenue:'0:1000000,60:10000000'
|
||||
cogs_percent: '0:20%'
|
||||
|
||||
!!bizmodel.department_define bizname:'test' name:'marketing'
|
||||
descr:'Marketing Department'
|
||||
|
||||
!!bizmodel.department_define bizname:'test' name:'engineering'
|
||||
descr:'Engineering Department'
|
||||
|
||||
!!bizmodel.costcenter_define bizname:'test' name:'marketing_cc'
|
||||
descr:'Marketing Cost Center'
|
||||
department:'marketing'
|
||||
|
||||
!!bizmodel.cost_define bizname:'test' name:'office_rent'
|
||||
descr:'Office Rent'
|
||||
cost:'8000USD'
|
||||
indexation:'3%'
|
||||
costcenter:'marketing_cc'
|
||||
cost_percent_revenue:'0.5%'
|
||||
|
||||
!!bizmodel.cost_define bizname:'test' name:'travel'
|
||||
descr:'Office Rent'
|
||||
cost:'2:5000USD' //start month 3
|
||||
costcenter:'marketing_cc'
|
||||
|
||||
!!bizmodel.cost_define bizname:'test' name:'triptomoon'
|
||||
descr:'Office Rent'
|
||||
cost:'10:500000USD' extrapolate:0 //this means we do a one off cost in this case month 11
|
||||
costcenter:'marketing_cc'
|
||||
|
||||
|
||||
// !!bizmodel.cost_define bizname:'test' name:'software_licenses'
|
||||
// descr:'Annual Software Licenses'
|
||||
// cost:'0:10000 10:EUR:20kCHF,12:5000USD'
|
||||
// department:'engineering'
|
||||
@@ -1 +0,0 @@
|
||||
../lib/data/currency/readme.md
|
||||
@@ -1,340 +0,0 @@
|
||||
module datatypes
|
||||
|
||||
# datatypes
|
||||
|
||||
This module provides implementations of less frequently used, but still common data types.
|
||||
|
||||
V's `builtin` module is imported implicitly, and has implementations for arrays, maps and strings. These are good for many applications, but there are a plethora of other useful data structures/containers, like linked lists, priority queues, trees, etc, that allow for algorithms with different time complexities, which may be more suitable for your specific application.
|
||||
|
||||
It is implemented using generics, that you have to specialise for the type of your actual elements. For example:
|
||||
```v
|
||||
import datatypes
|
||||
|
||||
mut stack := datatypes.Stack[int]{}
|
||||
stack.push(1)
|
||||
println(stack)
|
||||
```
|
||||
|
||||
## Currently Implemented Datatypes:
|
||||
|
||||
- [x] Linked list
|
||||
- [x] Doubly linked list
|
||||
- [x] Stack (LIFO)
|
||||
- [x] Queue (FIFO)
|
||||
- [x] Min heap (priority queue)
|
||||
- [x] Set
|
||||
- [x] Quadtree
|
||||
- [x] Bloom filter
|
||||
- [ ] ...
|
||||
|
||||
|
||||
fn new_bloom_filter[T](hash_func fn (T) u32, table_size int, num_functions int) !&BloomFilter[T]
|
||||
new_bloom_filter creates a new bloom_filter. `table_size` should be greater than 0, and `num_functions` should be 1~16.
|
||||
fn new_bloom_filter_fast[T](hash_func fn (T) u32) &BloomFilter[T]
|
||||
new_bloom_filter_fast creates a new bloom_filter. `table_size` is 16384, and `num_functions` is 4.
|
||||
fn new_ringbuffer[T](s int) RingBuffer[T]
|
||||
new_ringbuffer creates an empty ring buffer of size `s`.
|
||||
fn (mut bst BSTree[T]) insert(value T) bool
|
||||
insert give the possibility to insert an element in the BST.
|
||||
fn (bst &BSTree[T]) contains(value T) bool
|
||||
contains checks if an element with a given `value` is inside the BST.
|
||||
fn (mut bst BSTree[T]) remove(value T) bool
|
||||
remove removes an element with `value` from the BST.
|
||||
fn (bst &BSTree[T]) is_empty() bool
|
||||
is_empty checks if the BST is empty
|
||||
fn (bst &BSTree[T]) in_order_traversal() []T
|
||||
in_order_traversal traverses the BST in order, and returns the result as an array.
|
||||
fn (bst &BSTree[T]) post_order_traversal() []T
|
||||
post_order_traversal traverses the BST in post order, and returns the result in an array.
|
||||
fn (bst &BSTree[T]) pre_order_traversal() []T
|
||||
pre_order_traversal traverses the BST in pre order, and returns the result as an array.
|
||||
fn (bst &BSTree[T]) to_left(value T) !T
|
||||
to_left returns the value of the node to the left of the node with `value` specified if it exists, otherwise the a false value is returned.
|
||||
|
||||
An example of usage can be the following one
|
||||
```v
|
||||
left_value, exist := bst.to_left(10)
|
||||
```
|
||||
fn (bst &BSTree[T]) to_right(value T) !T
|
||||
to_right return the value of the element to the right of the node with `value` specified, if exist otherwise, the boolean value is false An example of usage can be the following one
|
||||
|
||||
```v
|
||||
left_value, exist := bst.to_right(10)
|
||||
```
|
||||
fn (bst &BSTree[T]) max() !T
|
||||
max return the max element inside the BST. Time complexity O(N) if the BST is not balanced
|
||||
fn (bst &BSTree[T]) min() !T
|
||||
min return the minimum element in the BST. Time complexity O(N) if the BST is not balanced.
|
||||
fn (mut b BloomFilter[T]) add(element T)
|
||||
adds the element to bloom filter.
|
||||
fn (b &BloomFilter[T]) exists(element T) bool
|
||||
checks the element is exists.
|
||||
fn (l &BloomFilter[T]) @union(r &BloomFilter[T]) !&BloomFilter[T]
|
||||
@union returns the union of the two bloom filters.
|
||||
fn (l &BloomFilter[T]) intersection(r &BloomFilter[T]) !&BloomFilter[T]
|
||||
intersection returns the intersection of bloom filters.
|
||||
fn (list DoublyLinkedList[T]) is_empty() bool
|
||||
is_empty checks if the linked list is empty
|
||||
fn (list DoublyLinkedList[T]) len() int
|
||||
len returns the length of the linked list
|
||||
fn (list DoublyLinkedList[T]) first() !T
|
||||
first returns the first element of the linked list
|
||||
fn (list DoublyLinkedList[T]) last() !T
|
||||
last returns the last element of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) push_back(item T)
|
||||
push_back adds an element to the end of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) push_front(item T)
|
||||
push_front adds an element to the beginning of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) push_many(elements []T, direction Direction)
|
||||
push_many adds array of elements to the beginning of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) pop_back() !T
|
||||
pop_back removes the last element of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) pop_front() !T
|
||||
pop_front removes the last element of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) insert(idx int, item T) !
|
||||
insert adds an element to the linked list at the given index
|
||||
fn (list &DoublyLinkedList[T]) index(item T) !int
|
||||
index searches the linked list for item and returns the forward index or none if not found.
|
||||
fn (mut list DoublyLinkedList[T]) delete(idx int)
|
||||
delete removes index idx from the linked list and is safe to call for any idx.
|
||||
fn (list DoublyLinkedList[T]) str() string
|
||||
str returns a string representation of the linked list
|
||||
fn (list DoublyLinkedList[T]) array() []T
|
||||
array returns a array representation of the linked list
|
||||
fn (mut list DoublyLinkedList[T]) next() ?T
|
||||
next implements the iter interface to use DoublyLinkedList with V's `for x in list {` loop syntax.
|
||||
fn (mut list DoublyLinkedList[T]) iterator() DoublyListIter[T]
|
||||
iterator returns a new iterator instance for the `list`.
|
||||
fn (mut list DoublyLinkedList[T]) back_iterator() DoublyListIterBack[T]
|
||||
back_iterator returns a new backwards iterator instance for the `list`.
|
||||
fn (mut iter DoublyListIterBack[T]) next() ?T
|
||||
next returns *the previous* element of the list, or `none` when the start of the list is reached. It is called by V's `for x in iter{` on each iteration.
|
||||
fn (mut iter DoublyListIter[T]) next() ?T
|
||||
next returns *the next* element of the list, or `none` when the end of the list is reached. It is called by V's `for x in iter{` on each iteration.
|
||||
fn (list LinkedList[T]) is_empty() bool
|
||||
is_empty checks if the linked list is empty
|
||||
fn (list LinkedList[T]) len() int
|
||||
len returns the length of the linked list
|
||||
fn (list LinkedList[T]) first() !T
|
||||
first returns the first element of the linked list
|
||||
fn (list LinkedList[T]) last() !T
|
||||
last returns the last element of the linked list
|
||||
fn (list LinkedList[T]) index(idx int) !T
|
||||
index returns the element at the given index of the linked list
|
||||
fn (mut list LinkedList[T]) push(item T)
|
||||
push adds an element to the end of the linked list
|
||||
fn (mut list LinkedList[T]) push_many(elements []T)
|
||||
push adds an array of elements to the end of the linked list
|
||||
fn (mut list LinkedList[T]) pop() !T
|
||||
pop removes the last element of the linked list
|
||||
fn (mut list LinkedList[T]) shift() !T
|
||||
shift removes the first element of the linked list
|
||||
fn (mut list LinkedList[T]) insert(idx int, item T) !
|
||||
insert adds an element to the linked list at the given index
|
||||
fn (mut list LinkedList[T]) prepend(item T)
|
||||
prepend adds an element to the beginning of the linked list (equivalent to insert(0, item))
|
||||
fn (list LinkedList[T]) str() string
|
||||
str returns a string representation of the linked list
|
||||
fn (list LinkedList[T]) array() []T
|
||||
array returns a array representation of the linked list
|
||||
fn (mut list LinkedList[T]) next() ?T
|
||||
next implements the iteration interface to use LinkedList with V's `for` loop syntax.
|
||||
fn (mut list LinkedList[T]) iterator() ListIter[T]
|
||||
iterator returns a new iterator instance for the `list`.
|
||||
fn (mut iter ListIter[T]) next() ?T
|
||||
next returns the next element of the list, or `none` when the end of the list is reached. It is called by V's `for x in iter{` on each iteration.
|
||||
fn (mut heap MinHeap[T]) insert(item T)
|
||||
insert adds an element to the heap.
|
||||
fn (mut heap MinHeap[T]) insert_many(elements []T)
|
||||
insert array of elements to the heap.
|
||||
fn (mut heap MinHeap[T]) pop() !T
|
||||
pop removes the top-most element from the heap.
|
||||
fn (heap MinHeap[T]) peek() !T
|
||||
peek gets the top-most element from the heap without removing it.
|
||||
fn (heap MinHeap[T]) len() int
|
||||
len returns the number of elements in the heap.
|
||||
fn (queue Queue[T]) is_empty() bool
|
||||
is_empty checks if the queue is empty
|
||||
fn (queue Queue[T]) len() int
|
||||
len returns the length of the queue
|
||||
fn (queue Queue[T]) peek() !T
|
||||
peek returns the head of the queue (first element added)
|
||||
fn (queue Queue[T]) last() !T
|
||||
last returns the tail of the queue (last element added)
|
||||
fn (queue Queue[T]) index(idx int) !T
|
||||
index returns the element at the given index of the queue
|
||||
fn (mut queue Queue[T]) push(item T)
|
||||
push adds an element to the tail of the queue
|
||||
fn (mut queue Queue[T]) pop() !T
|
||||
pop removes the element at the head of the queue and returns it
|
||||
fn (queue Queue[T]) str() string
|
||||
str returns a string representation of the queue
|
||||
fn (queue Queue[T]) array() []T
|
||||
array returns a array representation of the queue
|
||||
fn (mut rb RingBuffer[T]) push(element T) !
|
||||
push adds an element to the ring buffer.
|
||||
fn (mut rb RingBuffer[T]) pop() !T
|
||||
pop returns the oldest element in the buffer.
|
||||
fn (mut rb RingBuffer[T]) push_many(elements []T) !
|
||||
push_many pushes an array to the buffer.
|
||||
fn (mut rb RingBuffer[T]) pop_many(n u64) ![]T
|
||||
pop_many returns `n` elements of the buffer starting with the oldest one.
|
||||
fn (rb RingBuffer[T]) is_empty() bool
|
||||
is_empty returns `true` if the ring buffer is empty, `false` otherwise.
|
||||
fn (rb RingBuffer[T]) is_full() bool
|
||||
is_full returns `true` if the ring buffer is full, `false` otherwise.
|
||||
fn (rb RingBuffer[T]) capacity() int
|
||||
capacity returns the capacity of the ring buffer.
|
||||
fn (mut rb RingBuffer[T]) clear()
|
||||
clear empties the ring buffer and all pushed elements.
|
||||
fn (rb RingBuffer[T]) occupied() int
|
||||
occupied returns the occupied capacity of the buffer.
|
||||
fn (rb RingBuffer[T]) remaining() int
|
||||
remaining returns the remaining capacity of the buffer.
|
||||
fn (set Set[T]) exists(element T) bool
|
||||
checks the element is exists.
|
||||
fn (mut set Set[T]) add(element T)
|
||||
adds the element to set, if it is not present already.
|
||||
fn (mut set Set[T]) remove(element T)
|
||||
removes the element from set.
|
||||
fn (set Set[T]) pick() !T
|
||||
pick returns an arbitrary element of set, if set is not empty.
|
||||
fn (mut set Set[T]) rest() ![]T
|
||||
rest returns the set consisting of all elements except for the arbitrary element.
|
||||
fn (mut set Set[T]) pop() !T
|
||||
pop returns an arbitrary element and deleting it from set.
|
||||
fn (mut set Set[T]) clear()
|
||||
delete all elements of set.
|
||||
fn (l Set[T]) == (r Set[T]) bool
|
||||
== checks whether the two given sets are equal (i.e. contain all and only the same elements).
|
||||
fn (set Set[T]) is_empty() bool
|
||||
is_empty checks whether the set is empty or not.
|
||||
fn (set Set[T]) size() int
|
||||
size returns the number of elements in the set.
|
||||
fn (set Set[T]) copy() Set[T]
|
||||
copy returns a copy of all the elements in the set.
|
||||
fn (mut set Set[T]) add_all(elements []T)
|
||||
add_all adds the whole `elements` array to the set
|
||||
fn (l Set[T]) @union(r Set[T]) Set[T]
|
||||
@union returns the union of the two sets.
|
||||
fn (l Set[T]) intersection(r Set[T]) Set[T]
|
||||
intersection returns the intersection of sets.
|
||||
fn (l Set[T]) - (r Set[T]) Set[T]
|
||||
- returns the difference of sets.
|
||||
fn (l Set[T]) subset(r Set[T]) bool
|
||||
subset returns true if the set `r` is a subset of the set `l`.
|
||||
fn (stack Stack[T]) is_empty() bool
|
||||
is_empty checks if the stack is empty
|
||||
fn (stack Stack[T]) len() int
|
||||
len returns the length of the stack
|
||||
fn (stack Stack[T]) peek() !T
|
||||
peek returns the top of the stack
|
||||
fn (mut stack Stack[T]) push(item T)
|
||||
push adds an element to the top of the stack
|
||||
fn (mut stack Stack[T]) pop() !T
|
||||
pop removes the element at the top of the stack and returns it
|
||||
fn (stack Stack[T]) str() string
|
||||
str returns a string representation of the stack
|
||||
fn (stack Stack[T]) array() []T
|
||||
array returns a array representation of the stack
|
||||
enum Direction {
|
||||
front
|
||||
back
|
||||
}
|
||||
struct AABB {
|
||||
pub mut:
|
||||
x f64
|
||||
y f64
|
||||
width f64
|
||||
height f64
|
||||
}
|
||||
struct BSTree[T] {
|
||||
mut:
|
||||
root &BSTreeNode[T] = unsafe { 0 }
|
||||
}
|
||||
Pure Binary Seach Tree implementation
|
||||
|
||||
Pure V implementation of the Binary Search Tree Time complexity of main operation O(log N) Space complexity O(N)
|
||||
struct DoublyLinkedList[T] {
|
||||
mut:
|
||||
head &DoublyListNode[T] = unsafe { 0 }
|
||||
tail &DoublyListNode[T] = unsafe { 0 }
|
||||
// Internal iter pointer for allowing safe modification
|
||||
// of the list while iterating. TODO: use an option
|
||||
// instead of a pointer to determine it is initialized.
|
||||
iter &DoublyListIter[T] = unsafe { 0 }
|
||||
len int
|
||||
}
|
||||
DoublyLinkedList[T] represents a generic doubly linked list of elements, each of type T.
|
||||
struct DoublyListIter[T] {
|
||||
mut:
|
||||
node &DoublyListNode[T] = unsafe { 0 }
|
||||
}
|
||||
DoublyListIter[T] is an iterator for DoublyLinkedList. It starts from *the start* and moves forwards to *the end* of the list. It can be used with V's `for x in iter {` construct. One list can have multiple independent iterators, pointing to different positions/places in the list. A DoublyListIter iterator instance always traverses the list from *start to finish*.
|
||||
struct DoublyListIterBack[T] {
|
||||
mut:
|
||||
node &DoublyListNode[T] = unsafe { 0 }
|
||||
}
|
||||
DoublyListIterBack[T] is an iterator for DoublyLinkedList. It starts from *the end* and moves backwards to *the start* of the list. It can be used with V's `for x in iter {` construct. One list can have multiple independent iterators, pointing to different positions/places in the list. A DoublyListIterBack iterator instance always traverses the list from *finish to start*.
|
||||
struct LinkedList[T] {
|
||||
mut:
|
||||
head &ListNode[T] = unsafe { 0 }
|
||||
len int
|
||||
// Internal iter pointer for allowing safe modification
|
||||
// of the list while iterating. TODO: use an option
|
||||
// instead of a pointer to determine if it is initialized.
|
||||
iter &ListIter[T] = unsafe { 0 }
|
||||
}
|
||||
struct ListIter[T] {
|
||||
mut:
|
||||
node &ListNode[T] = unsafe { 0 }
|
||||
}
|
||||
ListIter[T] is an iterator for LinkedList. It can be used with V's `for x in iter {` construct. One list can have multiple independent iterators, pointing to different positions/places in the list. An iterator instance always traverses the list from start to finish.
|
||||
struct ListNode[T] {
|
||||
mut:
|
||||
data T
|
||||
next &ListNode[T] = unsafe { 0 }
|
||||
}
|
||||
struct MinHeap[T] {
|
||||
mut:
|
||||
data []T
|
||||
}
|
||||
MinHeap is a binary minimum heap data structure.
|
||||
struct Quadtree {
|
||||
pub mut:
|
||||
perimeter AABB
|
||||
capacity int
|
||||
depth int
|
||||
level int
|
||||
particles []AABB
|
||||
nodes []Quadtree
|
||||
}
|
||||
fn (mut q Quadtree) create(x f64, y f64, width f64, height f64, capacity int, depth int, level int) Quadtree
|
||||
create returns a new configurable root node for the tree.
|
||||
fn (mut q Quadtree) insert(p AABB)
|
||||
insert recursively adds a particle in the correct index of the tree.
|
||||
fn (mut q Quadtree) retrieve(p AABB) []AABB
|
||||
retrieve recursively checks if a particle is in a specific index of the tree.
|
||||
fn (mut q Quadtree) clear()
|
||||
clear flushes out nodes and particles from the tree.
|
||||
fn (q Quadtree) get_nodes() []Quadtree
|
||||
get_nodes recursively returns the subdivisions the tree has.
|
||||
struct Queue[T] {
|
||||
mut:
|
||||
elements LinkedList[T]
|
||||
}
|
||||
struct RingBuffer[T] {
|
||||
mut:
|
||||
reader int // index of the tail where data is going to be read
|
||||
writer int // index of the head where data is going to be written
|
||||
content []T
|
||||
}
|
||||
RingBuffer represents a ring buffer also known as a circular buffer.
|
||||
struct Set[T] {
|
||||
mut:
|
||||
elements map[T]u8
|
||||
}
|
||||
struct Stack[T] {
|
||||
mut:
|
||||
elements []T
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
|
||||
## how internally a heroscript gets parsed for params
|
||||
|
||||
- example to show how a heroscript gets parsed in action with params
|
||||
- params are part of action object
|
||||
|
||||
```heroscript
|
||||
example text to parse (heroscript)
|
||||
|
||||
id:a1 name6:aaaaa
|
||||
name:'need to do something 1'
|
||||
description:
|
||||
'
|
||||
## markdown works in it
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
|
||||
name2: test
|
||||
name3: hi
|
||||
name10:'this is with space' name11:aaa11
|
||||
|
||||
name4: 'aaa'
|
||||
|
||||
//somecomment
|
||||
name5: 'aab'
|
||||
```
|
||||
|
||||
the params are part of the action and are represented as follow for the above:
|
||||
|
||||
```vlang
|
||||
Params{
|
||||
params: [Param{
|
||||
key: 'id'
|
||||
value: 'a1'
|
||||
}, Param{
|
||||
key: 'name6'
|
||||
value: 'aaaaa'
|
||||
}, Param{
|
||||
key: 'name'
|
||||
value: 'need to do something 1'
|
||||
}, Param{
|
||||
key: 'description'
|
||||
value: '## markdown works in it
|
||||
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
}, Param{
|
||||
key: 'name2'
|
||||
value: 'test'
|
||||
}, Param{
|
||||
key: 'name3'
|
||||
value: 'hi'
|
||||
}, Param{
|
||||
key: 'name10'
|
||||
value: 'this is with space'
|
||||
}, Param{
|
||||
key: 'name11'
|
||||
value: 'aaa11'
|
||||
}, Param{
|
||||
key: 'name4'
|
||||
value: 'aaa'
|
||||
}, Param{
|
||||
key: 'name5'
|
||||
value: 'aab'
|
||||
}]
|
||||
}
|
||||
```
|
||||
@@ -1 +0,0 @@
|
||||
../crystallib/virt/docker/readme.md
|
||||
344
aiprompts/docusaurus/docusaurus_ebook_manual.md
Normal file
344
aiprompts/docusaurus/docusaurus_ebook_manual.md
Normal file
@@ -0,0 +1,344 @@
|
||||
# HeroLib Docusaurus Ebook Manual for AI Prompts
|
||||
|
||||
This manual provides a comprehensive guide on how to leverage HeroLib's Docusaurus integration, Doctree, and HeroScript to create and manage technical ebooks, optimized for AI-driven content generation and project management.
|
||||
|
||||
## 1. Core Concepts
|
||||
|
||||
To effectively create ebooks with HeroLib, it's crucial to understand the interplay of three core components:
|
||||
|
||||
* **HeroScript**: A concise scripting language used to define the structure, configuration, and content flow of your Docusaurus site. It acts as the declarative interface for the entire process.
|
||||
* **Docusaurus**: A popular open-source static site generator. HeroLib uses Docusaurus as the underlying framework to render your ebook content into a navigable website.
|
||||
* **Doctree**: HeroLib's content management system. Doctree organizes your markdown files into "collections" and "pages," allowing for structured content retrieval and reuse across multiple projects.
|
||||
|
||||
## 2. Setting Up a Docusaurus Project with HeroLib
|
||||
|
||||
The `docusaurus` module in HeroLib provides the primary interface for managing your ebook projects.
|
||||
|
||||
### 2.1. Defining the Docusaurus Factory (`docusaurus.define`)
|
||||
|
||||
The `docusaurus.define` HeroScript directive configures the global settings for your Docusaurus build environment. This is typically used once at the beginning of your main HeroScript configuration.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!docusaurus.define
|
||||
path_build: "/tmp/my_ebook_build"
|
||||
path_publish: "/tmp/my_ebook_publish"
|
||||
production: true
|
||||
update: true
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* `path_build` (string, optional): The local path where the Docusaurus site will be built. Defaults to `~/hero/var/docusaurus/build`.
|
||||
* `path_publish` (string, optional): The local path where the final Docusaurus site will be published (e.g., for deployment). Defaults to `~/hero/var/docusaurus/publish`.
|
||||
* `production` (boolean, optional): If `true`, the site will be built for production (optimized). Default is `false`.
|
||||
* `update` (boolean, optional): If `true`, the Docusaurus template and dependencies will be updated. Default is `false`.
|
||||
|
||||
### 2.2. Adding a Docusaurus Site (`docusaurus.add`)
|
||||
|
||||
The `docusaurus.add` directive defines an individual Docusaurus site (your ebook). You can specify the source of your documentation content, whether it's a local path or a Git repository.
|
||||
|
||||
**HeroScript Example (Local Content):**
|
||||
|
||||
```heroscript
|
||||
!!docusaurus.add
|
||||
name:"my_local_ebook"
|
||||
path:"./my_ebook_content" // Path to your local docs directory
|
||||
open:true // Open in browser after generation
|
||||
```
|
||||
|
||||
**HeroScript Example (Git Repository Content):**
|
||||
|
||||
```heroscript
|
||||
!!docusaurus.add
|
||||
name:"tfgrid_tech_ebook"
|
||||
git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/ebooks/tech"
|
||||
git_reset:true // Reset Git repository before pulling
|
||||
git_pull:true // Pull latest changes
|
||||
git_root:"/tmp/git_clones" // Optional: specify a root directory for git clones
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* `name` (string, optional): A unique name for your Docusaurus site/ebook. Defaults to "main".
|
||||
* `path` (string, optional): The local file system path to the root of your documentation content (e.g., where your `docs` and `cfg` directories are).
|
||||
* `git_url` (string, optional): A Git URL to a repository containing your documentation content. HeroLib will clone/pull this repository.
|
||||
* `git_reset` (boolean, optional): If `true`, the Git repository will be reset to a clean state before pulling. Default is `false`.
|
||||
* `git_pull` (boolean, optional): If `true`, the Git repository will be pulled to get the latest changes. Default is `false`.
|
||||
* `git_root` (string, optional): An optional root directory where Git repositories will be cloned.
|
||||
* `nameshort` (string, optional): A shorter name for the Docusaurus site. Defaults to the value of `name`.
|
||||
* `path_publish` (string, optional): Overrides the factory's `path_publish` for this specific site.
|
||||
* `production` (boolean, optional): Overrides the factory's `production` setting for this specific site.
|
||||
* `watch_changes` (boolean, optional): If `true`, HeroLib will watch for changes in your source `docs` directory and trigger rebuilds. Default is `true`.
|
||||
* `update` (boolean, optional): If `true`, this specific documentation will be updated. Default is `false`.
|
||||
* `open` (boolean, optional): If `true`, the Docusaurus site will be opened in your default browser after generation/development server start. Default is `false`.
|
||||
* `init` (boolean, optional): If `true`, the Docusaurus site will be initialized (e.g., creating missing `docs` directories). Default is `false`.
|
||||
|
||||
## 3. Structuring Content with HeroScript and Doctree
|
||||
|
||||
The actual content and structure of your ebook are defined using HeroScript directives within your site's configuration files (e.g., in a `cfg` directory within your `path` or `git_url` source).
|
||||
|
||||
### 3.1. Site Configuration (`site.config`, `site.config_meta`)
|
||||
|
||||
These directives define the fundamental properties and metadata of your Docusaurus site.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.config
|
||||
name:"my_awesome_ebook"
|
||||
title:"My Awesome Ebook Title"
|
||||
tagline:"A comprehensive guide to everything."
|
||||
url:"https://my-ebook.example.com"
|
||||
url_home:"docs/"
|
||||
base_url:"/my-ebook/"
|
||||
favicon:"img/favicon.png"
|
||||
copyright:"© 2024 My Organization"
|
||||
|
||||
!!site.config_meta
|
||||
description:"This ebook covers advanced topics in AI and software engineering."
|
||||
image:"https://my-ebook.example.com/img/social_share.png"
|
||||
title:"Advanced AI & Software Engineering Ebook"
|
||||
keywords:"AI, software, engineering, manual, guide"
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.config`**:
|
||||
* `name` (string, required): Unique identifier for the site.
|
||||
* `title` (string, optional): Main title of the site. Defaults to "My Documentation Site".
|
||||
* `description` (string, optional): General site description.
|
||||
* `tagline` (string, optional): Short tagline for the site.
|
||||
* `favicon` (string, optional): Path to the favicon. Defaults to "img/favicon.png".
|
||||
* `image` (string, optional): General site image (e.g., for social media previews). Defaults to "img/tf_graph.png".
|
||||
* `copyright` (string, optional): Copyright notice. Defaults to "© [Current Year] Example Organization".
|
||||
* `url` (string, optional): The main URL where the site will be hosted.
|
||||
* `base_url` (string, optional): The base URL for Docusaurus (e.g., `/` or `/my-ebook/`).
|
||||
* `url_home` (string, optional): The path to the home page relative to `base_url`.
|
||||
* **`site.config_meta`**: Overrides for specific SEO metadata.
|
||||
* `title` (string, optional): Specific title for SEO (e.g., `<meta property="og:title">`).
|
||||
* `image` (string, optional): Specific image for SEO (e.g., `<meta property="og:image">`).
|
||||
* `description` (string, optional): Specific description for SEO.
|
||||
* `keywords` (string, optional): Comma-separated keywords for SEO.
|
||||
|
||||
### 3.2. Navigation Bar (`site.navbar`, `site.navbar_item`)
|
||||
|
||||
Define the main navigation menu of your Docusaurus site.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.navbar
|
||||
title:"Ebook Navigation"
|
||||
logo_alt:"Ebook Logo"
|
||||
logo_src:"img/logo.svg"
|
||||
logo_src_dark:"img/logo_dark.svg"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"Introduction"
|
||||
to:"/docs/intro" // Internal Docusaurus path
|
||||
position:"left"
|
||||
|
||||
!!site.navbar_item
|
||||
label:"External Link"
|
||||
href:"https://example.com/external" // External URL
|
||||
position:"right"
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.navbar`**:
|
||||
* `title` (string, optional): Title displayed in the navbar. Defaults to `site.config.title`.
|
||||
* `logo_alt` (string, optional): Alt text for the logo.
|
||||
* `logo_src` (string, optional): Path to the light mode logo.
|
||||
* `logo_src_dark` (string, optional): Path to the dark mode logo.
|
||||
* **`site.navbar_item`**:
|
||||
* `label` (string, required): Text displayed for the menu item.
|
||||
* `href` (string, optional): External URL for the link.
|
||||
* `to` (string, optional): Internal Docusaurus path (e.g., `/docs/my-page`).
|
||||
* `position` (string, optional): "left" or "right" for placement in the navbar. Defaults to "right".
|
||||
|
||||
### 3.3. Footer (`site.footer`, `site.footer_item`)
|
||||
|
||||
Configure the footer section of your Docusaurus site.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.footer
|
||||
style:"dark" // "dark" or "light"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Resources" // Grouping title for footer links
|
||||
label:"API Documentation"
|
||||
href:"https://api.example.com/docs"
|
||||
|
||||
!!site.footer_item
|
||||
title:"Community"
|
||||
label:"GitHub"
|
||||
href:"https://github.com/my-org"
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.footer`**:
|
||||
* `style` (string, optional): "dark" or "light" style for the footer. Defaults to "dark".
|
||||
* **`site.footer_item`**:
|
||||
* `title` (string, required): The title under which this item will be grouped in the footer.
|
||||
* `label` (string, required): Text displayed for the footer link.
|
||||
* `href` (string, optional): External URL for the link.
|
||||
* `to` (string, optional): Internal Docusaurus path.
|
||||
|
||||
### 3.4. Build Destinations (`site.build_dest`, `site.build_dest_dev`)
|
||||
|
||||
Specify where the built Docusaurus site should be deployed. This typically involves an SSH connection defined elsewhere (e.g., `!!site.ssh_connection`).
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.build_dest
|
||||
ssh_name:"production_server" // Name of a pre-defined SSH connection
|
||||
path:"/var/www/my-ebook" // Remote path on the server
|
||||
|
||||
!!site.build_dest_dev
|
||||
ssh_name:"dev_server"
|
||||
path:"/tmp/dev-ebook"
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* `ssh_name` (string, required): The name of the SSH connection to use for deployment.
|
||||
* `path` (string, required): The destination path on the remote server.
|
||||
|
||||
### 3.5. Importing External Content (`site.import`)
|
||||
|
||||
This powerful feature allows you to pull markdown content and assets from other Git repositories directly into your Docusaurus site's `docs` directory, with optional text replacement. This is ideal for integrating shared documentation or specifications.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
!!site.import
|
||||
url:'https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections/cloud_reinvented'
|
||||
dest:'cloud_reinvented' // Destination subdirectory within your Docusaurus docs folder
|
||||
replace:'NAME:MyName, URGENCY:red' // Optional: comma-separated key:value pairs for text replacement
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* `url` (string, required): The Git URL of the repository or specific path within a repository to import.
|
||||
* `dest` (string, required): The subdirectory within your Docusaurus `docs` folder where the imported content will be placed.
|
||||
* `replace` (string, optional): A comma-separated string of `KEY:VALUE` pairs. During import, all occurrences of `${KEY}` in the imported content will be replaced with `VALUE`.
|
||||
|
||||
### 3.6. Defining Pages and Categories (`site.page_category`, `site.page`)
|
||||
|
||||
This is where you define the actual content pages and how they are organized into categories within your Docusaurus sidebar.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
```heroscript
|
||||
// Define a category
|
||||
!!site.page_category path:'introduction' label:"Introduction to Ebook" position:10
|
||||
|
||||
// Define a page within that category, linking to Doctree content
|
||||
!!site.page path:'introduction' src:"my_doctree_collection:chapter_1_overview"
|
||||
title:"Chapter 1: Overview"
|
||||
description:"A brief introduction to the ebook's content."
|
||||
position:1 // Order within the category
|
||||
hide_title:true // Hide the title on the page itself
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
|
||||
* **`site.page_category`**:
|
||||
* `path` (string, required): The path to the category directory within your Docusaurus `docs` folder (e.g., `introduction` will create `docs/introduction/_category_.json`).
|
||||
* `label` (string, required): The display name for the category in the sidebar.
|
||||
* `position` (int, optional): The order of the category in the sidebar.
|
||||
* `sitename` (string, optional): If you have multiple Docusaurus sites defined, specify which site this category belongs to. Defaults to the current site's name.
|
||||
* **`site.page`**:
|
||||
* `src` (string, required): **Crucial for Doctree integration.** This specifies the source of the page content in the format `collection_name:page_name`. HeroLib will fetch the markdown content from the specified Doctree collection and page.
|
||||
* `path` (string, required): The relative path and filename for the generated markdown file within your Docusaurus `docs` folder (e.g., `introduction/chapter_1.md`). If only a directory is provided (e.g., `introduction/`), the `page_name` from `src` will be used as the filename.
|
||||
* `title` (string, optional): The title of the page. If not provided, HeroLib will attempt to extract it from the markdown content or use the `page_name`.
|
||||
* `description` (string, optional): A short description for the page, used in frontmatter.
|
||||
* `position` (int, optional): The order of the page within its category.
|
||||
* `hide_title` (boolean, optional): If `true`, the title will not be displayed on the page itself.
|
||||
* `draft` (boolean, optional): If `true`, the page will be marked as a draft and not included in production builds.
|
||||
* `title_nr` (int, optional): If set, HeroLib will re-number the markdown headings (e.g., `title_nr:3` will make `# Heading` become `### Heading`). Useful for consistent heading levels across imported content.
|
||||
|
||||
### 3.7. Doctree Integration Details
|
||||
|
||||
The `site.page` directive's `src` parameter (`collection_name:page_name`) is the bridge to your Doctree content.
|
||||
|
||||
**How Doctree Works:**
|
||||
|
||||
1. **Collections**: Doctree organizes markdown files into logical groups called "collections." A collection is typically a directory containing markdown files and an empty `.collection` file.
|
||||
2. **Scanning**: You define which collections Doctree should scan using `!!doctree.scan` in a HeroScript file (e.g., `doctree.heroscript`).
|
||||
**Example `doctree.heroscript`:**
|
||||
```heroscript
|
||||
!!doctree.scan git_url:"https://git.threefold.info/tfgrid/docs_tfgrid4/src/branch/main/collections"
|
||||
```
|
||||
This will pull the `collections` directory from the specified Git URL and make its contents available to Doctree.
|
||||
3. **Page Retrieval**: When `site.page` references `src:"my_collection:my_page"`, HeroLib's `doctreeclient` fetches the content of `my_page.md` from the `my_collection` collection that Doctree has scanned.
|
||||
|
||||
## 4. Building and Developing Your Ebook
|
||||
|
||||
Once your HeroScript configuration is set up, HeroLib provides commands to build and serve your Docusaurus ebook.
|
||||
|
||||
### 4.1. Generating Site Files (`site.generate()`)
|
||||
|
||||
The `site.generate()` function (called internally by `build`, `dev`, etc.) performs the core file generation:
|
||||
* Copies Docusaurus template files.
|
||||
* Copies your site's `src` and `static` assets.
|
||||
* Generates Docusaurus configuration JSON files (`main.json`, `navbar.json`, `footer.json`) from your HeroScript `site.config`, `site.navbar`, and `site.footer` directives.
|
||||
* Copies your source `docs` directory.
|
||||
* Processes `site.page` and `site.page_category` directives using the `sitegen` module to create the final markdown files and `_category_.json` files in the Docusaurus `docs` directory, fetching content from Doctree.
|
||||
* Handles `site.import` directives, pulling external content and performing replacements.
|
||||
|
||||
### 4.2. Local Development
|
||||
|
||||
HeroLib integrates with Docusaurus's development server for live preview.
|
||||
|
||||
**HeroScript Example:**
|
||||
|
||||
can be stored as example_docusaurus.vsh and then used to generate and develop an ebook
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.web.docusaurus
|
||||
import os
|
||||
|
||||
const cfgpath = os.dir(@FILE)
|
||||
|
||||
docusaurus.new(
|
||||
heroscript: '
|
||||
|
||||
// !!docusaurus.define
|
||||
// path_build: "/tmp/docusaurus_build"
|
||||
// path_publish: "/tmp/docusaurus_publish"
|
||||
|
||||
!!docusaurus.add name:"tfgrid_docs"
|
||||
path:"${cfgpath}"
|
||||
|
||||
!!docusaurus.dev
|
||||
|
||||
'
|
||||
)!
|
||||
|
||||
```
|
||||
|
||||
|
||||
the following script suggest to call it do.vsh and put in directory of where the ebook is
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -gc none -cg -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.web.docusaurus
|
||||
|
||||
const cfgpath = os.dir(@FILE) + '/cfg'
|
||||
|
||||
docusaurus.new(heroscript_path:cfgpath)!
|
||||
```
|
||||
|
||||
by just called do.vsh we can execute on the ebook
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../lib/develop/gittools/README.md
|
||||
141
aiprompts/herolib_advanced/advanced_paths.md
Normal file
141
aiprompts/herolib_advanced/advanced_paths.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# Pathlib Module: Advanced Listing and Filtering
|
||||
|
||||
The `pathlib` module provides powerful capabilities for listing and filtering files and directories, especially through its `list` method. This document explains how to leverage advanced features like regular expressions and various filtering options.
|
||||
|
||||
## Advanced File Listing with `path.list()`
|
||||
|
||||
The `path.list()` method allows you to retrieve a `PathList` object containing `Path` objects that match specified criteria.
|
||||
|
||||
### `ListArgs` Parameters
|
||||
|
||||
The `list` method accepts a `ListArgs` struct to control its behavior:
|
||||
|
||||
```v
|
||||
pub struct ListArgs {
|
||||
pub mut:
|
||||
regex []string // A slice of regular expressions to filter files.
|
||||
recursive bool = true // Whether to list files recursively (default true).
|
||||
ignoredefault bool = true // Whether to ignore files starting with . and _ (default true).
|
||||
include_links bool // Whether to include symbolic links in the list.
|
||||
dirs_only bool // Whether to include only directories in the list.
|
||||
files_only bool // Whether to include only files in the list.
|
||||
}
|
||||
```
|
||||
|
||||
### Usage Examples
|
||||
|
||||
Here are examples demonstrating how to use these advanced filtering options:
|
||||
|
||||
#### 1. Listing Files by Regex Pattern
|
||||
|
||||
You can use regular expressions to filter files based on their names or extensions. The `regex` parameter accepts a slice of strings, where each string is a regex pattern.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
// Get a directory path
|
||||
mut dir := pathlib.get('/some/directory')!
|
||||
|
||||
// List only Vlang files (ending with .v)
|
||||
mut vlang_files := dir.list(
|
||||
regex: [r'.*\.v$']
|
||||
)!
|
||||
|
||||
// List only image files (png, jpg, svg, jpeg)
|
||||
mut image_files := dir.list(
|
||||
regex: [r'.*\.png$', r'.*\.jpg$', r'.*\.svg$', r'.*\.jpeg$']
|
||||
)!
|
||||
|
||||
// List files containing "test" in their name (case-insensitive)
|
||||
mut test_files := dir.list(
|
||||
regex: [r'(?i).*test.*'] // (?i) makes the regex case-insensitive
|
||||
)!
|
||||
|
||||
for path_obj in vlang_files.paths {
|
||||
println(path_obj.path)
|
||||
}
|
||||
```
|
||||
|
||||
#### 2. Controlling Recursion
|
||||
|
||||
By default, `list()` is recursive. You can disable recursion to list only items in the current directory.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
mut dir := pathlib.get('/some/directory')!
|
||||
|
||||
// List only top-level files and directories (non-recursive)
|
||||
mut top_level_items := dir.list(
|
||||
recursive: false
|
||||
)!
|
||||
|
||||
for path_obj in top_level_items.paths {
|
||||
println(path_obj.path)
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Including or Excluding Hidden Files
|
||||
|
||||
The `ignoredefault` parameter controls whether files and directories starting with `.` or `_` are ignored.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
mut dir := pathlib.get('/some/directory')!
|
||||
|
||||
// List all files and directories, including hidden ones
|
||||
mut all_items := dir.list(
|
||||
ignoredefault: false
|
||||
)!
|
||||
|
||||
for path_obj in all_items.paths {
|
||||
println(path_obj.path)
|
||||
}
|
||||
```
|
||||
|
||||
#### 4. Including Symbolic Links
|
||||
|
||||
By default, symbolic links are ignored when walking the directory structure. Set `include_links` to `true` to include them.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
mut dir := pathlib.get('/some/directory')!
|
||||
|
||||
// List files and directories, including symbolic links
|
||||
mut items_with_links := dir.list(
|
||||
include_links: true
|
||||
)!
|
||||
|
||||
for path_obj in items_with_links.paths {
|
||||
println(path_obj.path)
|
||||
}
|
||||
```
|
||||
|
||||
#### 5. Listing Only Directories or Only Files
|
||||
|
||||
Use `dirs_only` or `files_only` to restrict the results to only directories or only files.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
|
||||
mut dir := pathlib.get('/some/directory')!
|
||||
|
||||
// List only directories (recursive)
|
||||
mut only_dirs := dir.list(
|
||||
dirs_only: true
|
||||
)!
|
||||
|
||||
// List only files (non-recursive)
|
||||
mut only_files := dir.list(
|
||||
files_only: true,
|
||||
recursive: false
|
||||
)!
|
||||
|
||||
for path_obj in only_dirs.paths {
|
||||
println(path_obj.path)
|
||||
}
|
||||
```
|
||||
|
||||
By combining these parameters, you can create highly specific and powerful file system listing operations tailored to your needs.
|
||||
323
aiprompts/herolib_advanced/builder.md
Normal file
323
aiprompts/herolib_advanced/builder.md
Normal file
@@ -0,0 +1,323 @@
|
||||
# Builder Module: System Automation and Remote Execution
|
||||
|
||||
The `builder` module in Herolib provides a powerful framework for automating system tasks and executing commands on both local and remote machines. It offers a unified interface to manage nodes, execute commands, perform file operations, and maintain persistent state.
|
||||
|
||||
## Key Components
|
||||
|
||||
- **`BuilderFactory`**: Responsible for creating and managing `Node` instances.
|
||||
- **`Node`**: Represents a target system (local or remote). It encapsulates system properties (platform, CPU type, environment variables) and provides methods for interaction.
|
||||
- **`Executor`**: An interface (implemented by `ExecutorLocal` and `ExecutorSSH`) that handles the actual command execution and file operations on the target system.
|
||||
- **NodeDB (via `Node.done` map)**: A key-value store within each `Node` for persistent state, caching, and tracking execution history.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Initializing a Builder and Node
|
||||
|
||||
First, import the `builder` module and create a new `BuilderFactory` instance. Then, create a `Node` object, which can represent either the local machine or a remote server.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder
|
||||
|
||||
// Create a new builder factory
|
||||
mut b := builder.new()!
|
||||
|
||||
// Create a node for the local machine
|
||||
mut local_node := b.node_local()!
|
||||
|
||||
// Create a node for a remote server via SSH
|
||||
// Format: "user@ip_address:port" or "ip_address:port" or "ip_address"
|
||||
mut remote_node := b.node_new(ipaddr: "root@195.192.213.2:2222")!
|
||||
|
||||
// Node with custom name and debug enabled
|
||||
mut named_debug_node := b.node_new(
|
||||
name: "my_remote_server",
|
||||
ipaddr: "user@server.example.com:22",
|
||||
debug: true
|
||||
)!
|
||||
```
|
||||
|
||||
### `Node` Properties
|
||||
|
||||
A `Node` object automatically detects and caches system information. You can access these properties:
|
||||
|
||||
```v
|
||||
// Get platform type (e.g., .osx, .ubuntu, .alpine, .arch)
|
||||
println(node.platform)
|
||||
|
||||
// Get CPU architecture (e.g., .intel, .arm)
|
||||
println(node.cputype)
|
||||
|
||||
// Get hostname
|
||||
println(node.hostname)
|
||||
|
||||
// Get environment variables
|
||||
env_vars := node.environ_get()!
|
||||
println(env_vars['HOME'])
|
||||
|
||||
// Get node information (category, sshkey, user, ipaddress, port)
|
||||
info := node.info()
|
||||
println(info['category'])
|
||||
```
|
||||
|
||||
## Command Execution
|
||||
|
||||
The `Node` object provides methods to execute commands on the target system.
|
||||
|
||||
### `node.exec(args ExecArgs) !string`
|
||||
|
||||
Executes a command and returns its standard output.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { ExecArgs }
|
||||
|
||||
// Execute a command with stdout
|
||||
result := node.exec(cmd: "ls -la /tmp", stdout: true)!
|
||||
println(result)
|
||||
|
||||
// Execute silently (no stdout)
|
||||
node.exec(cmd: "mkdir -p /tmp/my_dir", stdout: false)!
|
||||
```
|
||||
|
||||
### `node.exec_silent(cmd string) !string`
|
||||
|
||||
Executes a command silently (no stdout) and returns its output.
|
||||
|
||||
```v
|
||||
output := node.exec_silent("echo 'Hello from remote!'")!
|
||||
println(output)
|
||||
```
|
||||
|
||||
### `node.exec_interactive(cmd string) !`
|
||||
|
||||
Executes a command in an interactive shell.
|
||||
|
||||
```v
|
||||
// This will open an interactive shell session
|
||||
node.exec_interactive("bash")!
|
||||
```
|
||||
|
||||
### `node.exec_cmd(args NodeExecCmd) !string`
|
||||
|
||||
A more advanced command execution method that supports caching, periodic execution, and temporary script handling.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { NodeExecCmd }
|
||||
|
||||
// Execute a command, cache its result for 24 hours (48*3600 seconds)
|
||||
// and provide a description for logging.
|
||||
result := node.exec_cmd(
|
||||
cmd: "apt-get update",
|
||||
period: 48 * 3600,
|
||||
description: "Update system packages"
|
||||
)!
|
||||
println(result)
|
||||
|
||||
// Execute a multi-line script
|
||||
script_output := node.exec_cmd(
|
||||
cmd: "
|
||||
echo 'Starting script...'
|
||||
ls -la /
|
||||
echo 'Script finished.'
|
||||
",
|
||||
name: "my_custom_script",
|
||||
stdout: true
|
||||
)!
|
||||
println(script_output)
|
||||
```
|
||||
|
||||
### `node.exec_retry(args ExecRetryArgs) !string`
|
||||
|
||||
Executes a command with retries until it succeeds or a timeout is reached.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { ExecRetryArgs }
|
||||
|
||||
// Try to connect to a service, retrying every 100ms for up to 10 seconds
|
||||
result := node.exec_retry(
|
||||
cmd: "curl --fail http://localhost:8080/health",
|
||||
retrymax: 100, // 100 retries
|
||||
period_milli: 100, // 100ms sleep between retries
|
||||
timeout: 10 // 10 seconds total timeout
|
||||
)!
|
||||
println("Service is up: ${result}")
|
||||
```
|
||||
|
||||
### `node.cmd_exists(cmd string) bool`
|
||||
|
||||
Checks if a command exists on the target system.
|
||||
|
||||
```v
|
||||
if node.cmd_exists("docker") {
|
||||
println("Docker is installed.")
|
||||
} else {
|
||||
println("Docker is not installed.")
|
||||
}
|
||||
```
|
||||
|
||||
## File System Operations
|
||||
|
||||
The `Node` object provides comprehensive file and directory management capabilities.
|
||||
|
||||
### `node.file_write(path string, text string) !`
|
||||
|
||||
Writes content to a file on the target system.
|
||||
|
||||
```v
|
||||
node.file_write("/tmp/my_file.txt", "This is some content.")!
|
||||
```
|
||||
|
||||
### `node.file_read(path string) !string`
|
||||
|
||||
Reads content from a file on the target system.
|
||||
|
||||
```v
|
||||
content := node.file_read("/tmp/my_file.txt")!
|
||||
println(content)
|
||||
```
|
||||
|
||||
### `node.file_exists(path string) bool`
|
||||
|
||||
Checks if a file or directory exists on the target system.
|
||||
|
||||
```v
|
||||
if node.file_exists("/tmp/my_file.txt") {
|
||||
println("File exists.")
|
||||
}
|
||||
```
|
||||
|
||||
### `node.delete(path string) !`
|
||||
|
||||
Deletes a file or directory (recursively for directories) on the target system.
|
||||
|
||||
```v
|
||||
node.delete("/tmp/my_dir")!
|
||||
```
|
||||
|
||||
### `node.list(path string) ![]string`
|
||||
|
||||
Lists the contents of a directory on the target system.
|
||||
|
||||
```v
|
||||
files := node.list("/home/user")!
|
||||
for file in files {
|
||||
println(file)
|
||||
}
|
||||
```
|
||||
|
||||
### `node.dir_exists(path string) bool`
|
||||
|
||||
Checks if a directory exists on the target system.
|
||||
|
||||
```v
|
||||
if node.dir_exists("/var/log") {
|
||||
println("Log directory exists.")
|
||||
}
|
||||
```
|
||||
|
||||
### File Transfers (`node.upload` and `node.download`)
|
||||
|
||||
Transfer files between the local machine and the target node using `rsync` or `scp`.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { SyncArgs }
|
||||
|
||||
// Upload a local file to the remote node
|
||||
node.upload(
|
||||
source: "/local/path/to/my_script.sh",
|
||||
dest: "/tmp/remote_script.sh",
|
||||
stdout: true // Show rsync/scp output
|
||||
)!
|
||||
|
||||
// Download a file from the remote node to the local machine
|
||||
node.download(
|
||||
source: "/var/log/syslog",
|
||||
dest: "/tmp/local_syslog.log",
|
||||
stdout: false
|
||||
)!
|
||||
|
||||
// Upload a directory, ignoring .git and examples folders, and deleting extra files on destination
|
||||
node.upload(
|
||||
source: "/local/repo/",
|
||||
dest: "~/code/my_project/",
|
||||
ignore: [".git/*", "examples/"],
|
||||
delete: true,
|
||||
fast_rsync: true
|
||||
)!
|
||||
```
|
||||
|
||||
## Node Database (`node.done`)
|
||||
|
||||
The `node.done` map provides a simple key-value store for persistent data on the node. This data is cached in Redis.
|
||||
|
||||
```v
|
||||
// Store a value
|
||||
node.done_set("setup_complete", "true")!
|
||||
|
||||
// Retrieve a value
|
||||
status := node.done_get("setup_complete") or { "false" }
|
||||
println("Setup complete: ${status}")
|
||||
|
||||
// Check if a key exists
|
||||
if node.done_exists("initial_config") {
|
||||
println("Initial configuration done.")
|
||||
}
|
||||
|
||||
// Print all stored 'done' items
|
||||
node.done_print()
|
||||
|
||||
// Reset all stored 'done' items
|
||||
node.done_reset()!
|
||||
```
|
||||
|
||||
## Bootstrapping and Updates
|
||||
|
||||
The `bootstrapper` module provides functions for installing and updating Herolib components on nodes.
|
||||
|
||||
### `node.hero_install() !`
|
||||
|
||||
Installs the Herolib environment on the node.
|
||||
|
||||
```v
|
||||
node.hero_install()!
|
||||
```
|
||||
|
||||
### `node.hero_update(args HeroUpdateArgs) !`
|
||||
|
||||
Updates the Herolib code on the node, with options for syncing from local, git reset, or git pull.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { HeroUpdateArgs }
|
||||
|
||||
// Sync local Herolib code to the remote node (full sync)
|
||||
node.hero_update(sync_from_local: true, sync_full: true)!
|
||||
|
||||
// Reset git repository on the remote node and pull latest from 'dev' branch
|
||||
node.hero_update(git_reset: true, branch: "dev")!
|
||||
```
|
||||
|
||||
### `node.vscript(args VScriptArgs) !`
|
||||
|
||||
Uploads and executes a Vlang script (`.vsh` or `.v`) on the remote node.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { VScriptArgs }
|
||||
|
||||
// Upload and execute a local V script on the remote node
|
||||
node.vscript(path: "/local/path/to/my_script.vsh", sync_from_local: true)!
|
||||
```
|
||||
|
||||
## Port Forwarding
|
||||
|
||||
The `portforward_to_local` function allows forwarding a remote port on an SSH host to a local port.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.builder { portforward_to_local, ForwardArgsToLocal }
|
||||
|
||||
// Forward remote port 8080 on 192.168.1.100 to local port 9000
|
||||
portforward_to_local(
|
||||
name: "my_app_forward",
|
||||
address: "192.168.1.100",
|
||||
remote_port: 8080,
|
||||
local_port: 9000
|
||||
)!
|
||||
```
|
||||
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
420
aiprompts/herolib_advanced/osal.md
Normal file
420
aiprompts/herolib_advanced/osal.md
Normal file
@@ -0,0 +1,420 @@
|
||||
# OSAL Core Module (freeflowuniverse.herolib.osal.core)
|
||||
|
||||
This document describes the core functionalities of the Operating System Abstraction Layer (OSAL) module, designed for platform-independent system operations in V.
|
||||
|
||||
```v
|
||||
//example how to get started
|
||||
|
||||
import freeflowuniverse.herolib.osal.core as osal
|
||||
|
||||
osal.exec(...)!
|
||||
|
||||
```
|
||||
|
||||
|
||||
## 1. Process Management
|
||||
|
||||
### `osal.exec(cmd: Command) !Job`
|
||||
Executes a shell command with extensive configuration.
|
||||
* **Parameters**:
|
||||
* `cmd` (`Command` struct):
|
||||
* `cmd` (string): The command string.
|
||||
* `timeout` (int, default: 3600): Max execution time in seconds.
|
||||
* `retry` (int): Number of retries on failure.
|
||||
* `work_folder` (string): Working directory.
|
||||
* `environment` (map[string]string): Environment variables.
|
||||
* `stdout` (bool, default: true): Show command output.
|
||||
* `raise_error` (bool, default: true): Raise V error on failure.
|
||||
* `ignore_error` (bool): Do not raise error, just report.
|
||||
* `debug` (bool): Enable debug output.
|
||||
* `shell` (bool): Execute in interactive shell.
|
||||
* `async` (bool): Run command asynchronously.
|
||||
* `runtime` (`RunTime` enum): Specify runtime (`.bash`, `.python`, etc.).
|
||||
* **Returns**: `Job` struct (contains `status`, `output`, `error`, `exit_code`, `start`, `end`).
|
||||
* **Error Handling**: Returns `JobError` with `error_type` (`.exec`, `.timeout`, `.args`).
|
||||
|
||||
### `osal.execute_silent(cmd string) !string`
|
||||
Executes a command silently.
|
||||
* **Parameters**: `cmd` (string): The command string.
|
||||
* **Returns**: `string` (command output).
|
||||
|
||||
### `osal.execute_debug(cmd string) !string`
|
||||
Executes a command with debug output.
|
||||
* **Parameters**: `cmd` (string): The command string.
|
||||
* **Returns**: `string` (command output).
|
||||
|
||||
### `osal.execute_stdout(cmd string) !string`
|
||||
Executes a command and prints output to stdout.
|
||||
* **Parameters**: `cmd` (string): The command string.
|
||||
* **Returns**: `string` (command output).
|
||||
|
||||
### `osal.execute_interactive(cmd string) !`
|
||||
Executes a command in an interactive shell.
|
||||
* **Parameters**: `cmd` (string): The command string.
|
||||
|
||||
### `osal.cmd_exists(cmd string) bool`
|
||||
Checks if a command exists in the system's PATH.
|
||||
* **Parameters**: `cmd` (string): The command name.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.processmap_get() !ProcessMap`
|
||||
Scans and returns a map of all running processes.
|
||||
* **Returns**: `ProcessMap` struct (contains `processes` (`[]ProcessInfo`), `lastscan`, `state`, `pids`).
|
||||
|
||||
### `osal.processinfo_get(pid int) !ProcessInfo`
|
||||
Retrieves detailed information for a specific process by PID.
|
||||
* **Parameters**: `pid` (int): Process ID.
|
||||
* **Returns**: `ProcessInfo` struct (contains `cpu_perc`, `mem_perc`, `cmd`, `pid`, `ppid`, `rss`).
|
||||
|
||||
### `osal.processinfo_get_byname(name string) ![]ProcessInfo`
|
||||
Retrieves detailed information for processes matching a given name.
|
||||
* **Parameters**: `name` (string): Process name (substring match).
|
||||
* **Returns**: `[]ProcessInfo`.
|
||||
|
||||
### `osal.process_exists(pid int) bool`
|
||||
Checks if a process with a given PID exists.
|
||||
* **Parameters**: `pid` (int): Process ID.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.processinfo_with_children(pid int) !ProcessMap`
|
||||
Returns a process and all its child processes.
|
||||
* **Parameters**: `pid` (int): Parent Process ID.
|
||||
* **Returns**: `ProcessMap`.
|
||||
|
||||
### `osal.processinfo_children(pid int) !ProcessMap`
|
||||
Returns all child processes for a given PID.
|
||||
* **Parameters**: `pid` (int): Parent Process ID.
|
||||
* **Returns**: `ProcessMap`.
|
||||
|
||||
### `osal.process_kill_recursive(args: ProcessKillArgs) !`
|
||||
Kills a process and all its children by name or PID.
|
||||
* **Parameters**:
|
||||
* `args` (`ProcessKillArgs` struct):
|
||||
* `name` (string): Process name.
|
||||
* `pid` (int): Process ID.
|
||||
|
||||
### `osal.whoami() !string`
|
||||
Returns the current username.
|
||||
* **Returns**: `string`.
|
||||
|
||||
## 2. Network Utilities
|
||||
|
||||
### `osal.ping(args: PingArgs) !PingResult`
|
||||
Checks host reachability.
|
||||
* **Parameters**:
|
||||
* `args` (`PingArgs` struct):
|
||||
* `address` (string, required): IP address or hostname.
|
||||
* `count` (u8, default: 1): Number of pings.
|
||||
* `timeout` (u16, default: 1): Timeout in seconds per ping.
|
||||
* `retry` (u8): Number of retry attempts.
|
||||
* **Returns**: `PingResult` enum (`.ok`, `.timeout`, `.unknownhost`).
|
||||
|
||||
### `osal.tcp_port_test(args: TcpPortTestArgs) bool`
|
||||
Tests if a TCP port is open on a given address.
|
||||
* **Parameters**:
|
||||
* `args` (`TcpPortTestArgs` struct):
|
||||
* `address` (string, required): IP address or hostname.
|
||||
* `port` (int, default: 22): TCP port number.
|
||||
* `timeout` (u16, default: 2000): Total timeout in milliseconds.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.ipaddr_pub_get() !string`
|
||||
Retrieves the public IP address.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.is_ip_on_local_interface(ip string) !bool`
|
||||
Checks if a given IP address is bound to a local network interface.
|
||||
* **Parameters**: `ip` (string): IP address to check.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
## 3. File System Operations
|
||||
|
||||
### `osal.file_write(path string, text string) !`
|
||||
Writes text content to a file.
|
||||
* **Parameters**:
|
||||
* `path` (string): File path.
|
||||
* `text` (string): Content to write.
|
||||
|
||||
### `osal.file_read(path string) !string`
|
||||
Reads content from a file.
|
||||
* **Parameters**: `path` (string): File path.
|
||||
* **Returns**: `string` (file content).
|
||||
|
||||
### `osal.dir_ensure(path string) !`
|
||||
Ensures a directory exists, creating it if necessary.
|
||||
* **Parameters**: `path` (string): Directory path.
|
||||
|
||||
### `osal.dir_delete(path string) !`
|
||||
Deletes a directory if it exists.
|
||||
* **Parameters**: `path` (string): Directory path.
|
||||
|
||||
### `osal.dir_reset(path string) !`
|
||||
Deletes and then recreates a directory.
|
||||
* **Parameters**: `path` (string): Directory path.
|
||||
|
||||
### `osal.rm(todelete string) !`
|
||||
Removes files or directories.
|
||||
* **Parameters**: `todelete` (string): Comma or newline separated list of paths (supports `~` for home directory).
|
||||
|
||||
## 4. Environment Variables
|
||||
|
||||
### `osal.env_set(args: EnvSet)`
|
||||
Sets an environment variable.
|
||||
* **Parameters**:
|
||||
* `args` (`EnvSet` struct):
|
||||
* `key` (string, required): Environment variable name.
|
||||
* `value` (string, required): Value to set.
|
||||
* `overwrite` (bool, default: true): Overwrite if exists.
|
||||
|
||||
### `osal.env_unset(key string)`
|
||||
Unsets a specific environment variable.
|
||||
* **Parameters**: `key` (string): Environment variable name.
|
||||
|
||||
### `osal.env_unset_all()`
|
||||
Unsets all environment variables.
|
||||
|
||||
### `osal.env_set_all(args: EnvSetAll)`
|
||||
Sets multiple environment variables.
|
||||
* **Parameters**:
|
||||
* `args` (`EnvSetAll` struct):
|
||||
* `env` (map[string]string): Map of key-value pairs.
|
||||
* `clear_before_set` (bool): Clear all existing variables before setting.
|
||||
* `overwrite_if_exists` (bool, default: true): Overwrite existing variables.
|
||||
|
||||
### `osal.env_get(key string) !string`
|
||||
Retrieves the value of a specific environment variable.
|
||||
* **Parameters**: `key` (string): Environment variable name.
|
||||
* **Returns**: `string` (variable value).
|
||||
|
||||
### `osal.env_exists(key string) !bool`
|
||||
Checks if an environment variable exists.
|
||||
* **Parameters**: `key` (string): Environment variable name.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.env_get_default(key string, def string) string`
|
||||
Retrieves an environment variable or a default value if not found.
|
||||
* **Parameters**:
|
||||
* `key` (string): Environment variable name.
|
||||
* `def` (string): Default value.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.load_env_file(file_path string) !`
|
||||
Loads environment variables from a specified file.
|
||||
* **Parameters**: `file_path` (string): Path to the environment file.
|
||||
|
||||
## 5. Command & Profile Management
|
||||
|
||||
### `osal.cmd_add(args: CmdAddArgs) !`
|
||||
Adds (copies or symlinks) a binary to system paths and updates user profiles.
|
||||
* **Parameters**:
|
||||
* `args` (`CmdAddArgs` struct):
|
||||
* `cmdname` (string): Name of the command (optional, derived from source if empty).
|
||||
* `source` (string, required): Path to the binary.
|
||||
* `symlink` (bool): Create a symlink instead of copying.
|
||||
* `reset` (bool, default: true): Delete existing command if found.
|
||||
|
||||
### `osal.profile_path_add_hero() !string`
|
||||
Ensures the `~/hero/bin` path is added to the user's profile.
|
||||
* **Returns**: `string` (the `~/hero/bin` path).
|
||||
|
||||
### `osal.bin_path() !string`
|
||||
Returns the preferred binary installation path (`~/hero/bin`).
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.hero_path() !string`
|
||||
Returns the `~/hero` directory path.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.usr_local_path() !string`
|
||||
Returns `/usr/local` for Linux or `~/hero` for macOS.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.profile_path_source() !string`
|
||||
Returns a source statement for the preferred profile file (e.g., `. /home/user/.zprofile`).
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.profile_path_source_and() !string`
|
||||
Returns a source statement followed by `&&` for command chaining, or empty if profile doesn't exist.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.profile_path_add_remove(args: ProfilePathAddRemoveArgs) !`
|
||||
Adds and/or removes paths from specified or preferred user profiles.
|
||||
* **Parameters**:
|
||||
* `args` (`ProfilePathAddRemoveArgs` struct):
|
||||
* `paths_profile` (string): Comma/newline separated list of profile file paths (optional, uses preferred if empty).
|
||||
* `paths2add` (string): Comma/newline separated list of paths to add.
|
||||
* `paths2delete` (string): Comma/newline separated list of paths to delete.
|
||||
* `allprofiles` (bool): Apply to all known profile files.
|
||||
|
||||
### `osal.cmd_path(cmd string) !string`
|
||||
Returns the full path of an executable command using `which`.
|
||||
* **Parameters**: `cmd` (string): Command name.
|
||||
* **Returns**: `string` (full path).
|
||||
|
||||
### `osal.cmd_delete(cmd string) !`
|
||||
Deletes commands from their found locations.
|
||||
* **Parameters**: `cmd` (string): Command name.
|
||||
|
||||
### `osal.profile_paths_all() ![]string`
|
||||
Lists all possible profile file paths in the OS.
|
||||
* **Returns**: `[]string`.
|
||||
|
||||
### `osal.profile_paths_preferred() ![]string`
|
||||
Lists preferred profile file paths based on the operating system.
|
||||
* **Returns**: `[]string`.
|
||||
|
||||
### `osal.profile_path() !string`
|
||||
Returns the most preferred profile file path.
|
||||
* **Returns**: `string`.
|
||||
|
||||
## 6. System Information & Utilities
|
||||
|
||||
### `osal.platform() !PlatformType`
|
||||
Identifies the operating system.
|
||||
* **Returns**: `PlatformType` enum (`.unknown`, `.osx`, `.ubuntu`, `.alpine`, `.arch`, `.suse`).
|
||||
|
||||
### `osal.cputype() !CPUType`
|
||||
Identifies the CPU architecture.
|
||||
* **Returns**: `CPUType` enum (`.unknown`, `.intel`, `.arm`, `.intel32`, `.arm32`).
|
||||
|
||||
### `osal.is_linux() !bool`
|
||||
Checks if the current OS is Linux.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_osx() !bool`
|
||||
Checks if the current OS is macOS.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_ubuntu() !bool`
|
||||
Checks if the current OS is Ubuntu.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_osx_arm() !bool`
|
||||
Checks if the current OS is macOS ARM.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_linux_arm() !bool`
|
||||
Checks if the current OS is Linux ARM.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_osx_intel() !bool`
|
||||
Checks if the current OS is macOS Intel.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.is_linux_intel() !bool`
|
||||
Checks if the current OS is Linux Intel.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.hostname() !string`
|
||||
Returns the system hostname.
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.initname() !string`
|
||||
Returns the init system name (e.g., `systemd`, `bash`, `zinit`).
|
||||
* **Returns**: `string`.
|
||||
|
||||
### `osal.sleep(duration int)`
|
||||
Pauses execution for a specified duration.
|
||||
* **Parameters**: `duration` (int): Sleep duration in seconds.
|
||||
|
||||
### `osal.download(args: DownloadArgs) !pathlib.Path`
|
||||
Downloads a file from a URL.
|
||||
* **Parameters**:
|
||||
* `args` (`DownloadArgs` struct):
|
||||
* `url` (string, required): URL of the file.
|
||||
* `name` (string): Optional, derived from filename if empty.
|
||||
* `reset` (bool): Force download, remove existing.
|
||||
* `hash` (string): Hash for verification.
|
||||
* `dest` (string): Destination path.
|
||||
* `timeout` (int, default: 180): Download timeout in seconds.
|
||||
* `retry` (int, default: 3): Number of retries.
|
||||
* `minsize_kb` (u32, default: 10): Minimum expected size in KB.
|
||||
* `maxsize_kb` (u32): Maximum expected size in KB.
|
||||
* `expand_dir` (string): Directory to expand archive into.
|
||||
* `expand_file` (string): File to expand archive into.
|
||||
* **Returns**: `pathlib.Path` (path to the downloaded file/directory).
|
||||
|
||||
### `osal.user_exists(username string) bool`
|
||||
Checks if a user exists on the system.
|
||||
* **Parameters**: `username` (string): Username to check.
|
||||
* **Returns**: `bool`.
|
||||
|
||||
### `osal.user_id_get(username string) !int`
|
||||
Retrieves the user ID for a given username.
|
||||
* **Parameters**: `username` (string): Username.
|
||||
* **Returns**: `int` (User ID).
|
||||
|
||||
### `osal.user_add(args: UserArgs) !int`
|
||||
Adds a new user to the system.
|
||||
* **Parameters**:
|
||||
* `args` (`UserArgs` struct):
|
||||
* `name` (string, required): Username to add.
|
||||
* **Returns**: `int` (User ID of the added user).
|
||||
|
||||
## Enums & Structs
|
||||
|
||||
### `enum PlatformType`
|
||||
Represents the detected operating system.
|
||||
* Values: `unknown`, `osx`, `ubuntu`, `alpine`, `arch`, `suse`.
|
||||
|
||||
### `enum CPUType`
|
||||
Represents the detected CPU architecture.
|
||||
* Values: `unknown`, `intel`, `arm`, `intel32`, `arm32`.
|
||||
|
||||
### `enum RunTime`
|
||||
Specifies the runtime environment for command execution.
|
||||
* Values: `bash`, `python`, `heroscript`, `herocmd`, `v`.
|
||||
|
||||
### `enum JobStatus`
|
||||
Status of an executed command job.
|
||||
* Values: `init`, `running`, `error_exec`, `error_timeout`, `error_args`, `done`.
|
||||
|
||||
### `enum ErrorType`
|
||||
Types of errors that can occur during job execution.
|
||||
* Values: `exec`, `timeout`, `args`.
|
||||
|
||||
### `enum PingResult`
|
||||
Result of a ping operation.
|
||||
* Values: `ok`, `timeout`, `unknownhost`.
|
||||
|
||||
### `struct Command`
|
||||
Configuration for `osal.exec` function. (See `osal.exec` parameters for fields).
|
||||
|
||||
### `struct Job`
|
||||
Result object returned by `osal.exec`. (See `osal.exec` returns for fields).
|
||||
|
||||
### `struct JobError`
|
||||
Error details for failed jobs.
|
||||
|
||||
### `struct PingArgs`
|
||||
Arguments for `osal.ping` function. (See `osal.ping` parameters for fields).
|
||||
|
||||
### `struct TcpPortTestArgs`
|
||||
Arguments for `osal.tcp_port_test` function. (See `osal.tcp_port_test` parameters for fields).
|
||||
|
||||
### `struct EnvSet`
|
||||
Arguments for `osal.env_set` function. (See `osal.env_set` parameters for fields).
|
||||
|
||||
### `struct EnvSetAll`
|
||||
Arguments for `osal.env_set_all` function. (See `osal.env_set_all` parameters for fields).
|
||||
|
||||
### `struct CmdAddArgs`
|
||||
Arguments for `osal.cmd_add` function. (See `osal.cmd_add` parameters for fields).
|
||||
|
||||
### `struct ProfilePathAddRemoveArgs`
|
||||
Arguments for `osal.profile_path_add_remove` function. (See `osal.profile_path_add_remove` parameters for fields).
|
||||
|
||||
### `struct ProcessMap`
|
||||
Contains a list of `ProcessInfo` objects.
|
||||
|
||||
### `struct ProcessInfo`
|
||||
Detailed information about a single process. (See `osal.processinfo_get` returns for fields).
|
||||
|
||||
### `struct ProcessKillArgs`
|
||||
Arguments for `osal.process_kill_recursive` function. (See `osal.process_kill_recursive` parameters for fields).
|
||||
|
||||
### `struct DownloadArgs`
|
||||
Arguments for `osal.download` function. (See `osal.download` parameters for fields).
|
||||
|
||||
### `struct UserArgs`
|
||||
Arguments for `osal.user_add` function. (See `osal.user_add` parameters for fields).
|
||||
92
aiprompts/herolib_advanced/ourdb.md
Normal file
92
aiprompts/herolib_advanced/ourdb.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# OurTime Module
|
||||
|
||||
The `OurTime` module in V provides flexible time handling, supporting relative and absolute time formats, Unix timestamps, and formatting utilities.
|
||||
|
||||
## Key Features
|
||||
- Create time objects from strings or current time
|
||||
- Relative time expressions (e.g., `+1h`, `-2d`)
|
||||
- Absolute time formats (e.g., `YYYY-MM-DD HH:mm:ss`)
|
||||
- Unix timestamp conversion
|
||||
- Time formatting and warping
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
|
||||
// Current time
|
||||
mut t := ourtime.now()
|
||||
|
||||
// From string
|
||||
t2 := ourtime.new('2022-12-05 20:14:35')!
|
||||
|
||||
// Get formatted string
|
||||
println(t2.str()) // e.g., 2022-12-05 20:14
|
||||
|
||||
// Get Unix timestamp
|
||||
println(t2.unix()) // e.g., 1670271275
|
||||
```
|
||||
|
||||
## Time Formats
|
||||
|
||||
### Relative Time
|
||||
|
||||
Use `s` (seconds), `h` (hours), `d` (days), `w` (weeks), `M` (months), `Q` (quarters), `Y` (years).
|
||||
|
||||
```v
|
||||
// Create with relative time
|
||||
mut t := ourtime.new('+1w +2d -4h')!
|
||||
|
||||
// Warp existing time
|
||||
mut t2 := ourtime.now()
|
||||
t2.warp('+1h')!
|
||||
```
|
||||
|
||||
### Absolute Time
|
||||
|
||||
Supports `YYYY-MM-DD HH:mm:ss`, `YYYY-MM-DD HH:mm`, `YYYY-MM-DD HH`, `YYYY-MM-DD`, `DD-MM-YYYY`.
|
||||
|
||||
```v
|
||||
t1 := ourtime.new('2022-12-05 20:14:35')!
|
||||
t2 := ourtime.new('2022-12-05')! // Time defaults to 00:00:00
|
||||
```
|
||||
|
||||
## Methods Overview
|
||||
|
||||
### Creation
|
||||
|
||||
```v
|
||||
now_time := ourtime.now()
|
||||
from_string := ourtime.new('2023-01-15')!
|
||||
from_epoch := ourtime.new_from_epoch(1673788800)
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
```v
|
||||
mut t := ourtime.now()
|
||||
println(t.str()) // YYYY-MM-DD HH:mm
|
||||
println(t.day()) // YYYY-MM-DD
|
||||
println(t.key()) // YYYY_MM_DD_HH_mm_ss
|
||||
println(t.md()) // Markdown format
|
||||
```
|
||||
|
||||
### Operations
|
||||
|
||||
```v
|
||||
mut t := ourtime.now()
|
||||
t.warp('+1h')! // Move 1 hour forward
|
||||
unix_ts := t.unix()
|
||||
is_empty := t.empty()
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Time parsing methods return a `Result` type and should be handled with `!` or `or` blocks.
|
||||
|
||||
```v
|
||||
t_valid := ourtime.new('2023-01-01')!
|
||||
t_invalid := ourtime.new('bad-date') or {
|
||||
println('Error: ${err}')
|
||||
ourtime.now() // Fallback
|
||||
}
|
||||
204
aiprompts/herolib_advanced/redis.md
Normal file
204
aiprompts/herolib_advanced/redis.md
Normal file
@@ -0,0 +1,204 @@
|
||||
# Redisclient Module
|
||||
|
||||
The `redisclient` module in Herolib provides a comprehensive client for interacting with Redis, supporting various commands, caching, queues, and RPC mechanisms.
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Direct Redis Commands**: Access to a wide range of Redis commands (strings, hashes, lists, keys, etc.).
|
||||
- **Caching**: Built-in caching mechanism with namespace support and expiration.
|
||||
- **Queues**: Simple queue implementation using Redis lists.
|
||||
- **RPC**: Remote Procedure Call (RPC) functionality over Redis queues for inter-service communication.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
To get a Redis client instance, use `redisclient.core_get()`. By default, it connects to `127.0.0.1:6379`. You can specify a different address and port using the `RedisURL` struct.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
|
||||
// Connect to default Redis instance (127.0.0.1:6379)
|
||||
mut redis := redisclient.core_get()!
|
||||
|
||||
// Or connect to a specific Redis instance
|
||||
// mut redis_url := redisclient.RedisURL{address: 'my.redis.server', port: 6380}
|
||||
// mut redis := redisclient.core_get(redis_url)!
|
||||
|
||||
// Example: Set and Get a key
|
||||
redis.set('mykey', 'myvalue')!
|
||||
value := redis.get('mykey')!
|
||||
// assert value == 'myvalue'
|
||||
|
||||
// Example: Check if a key exists
|
||||
exists := redis.exists('mykey')!
|
||||
// assert exists == true
|
||||
|
||||
// Example: Delete a key
|
||||
redis.del('mykey')!
|
||||
```
|
||||
|
||||
## Redis Commands
|
||||
|
||||
The `Redis` object provides methods for most standard Redis commands. Here are some examples:
|
||||
|
||||
### String Commands
|
||||
|
||||
- `set(key string, value string) !`: Sets the string value of a key.
|
||||
- `get(key string) !string`: Gets the string value of a key.
|
||||
- `set_ex(key string, value string, ex string) !`: Sets a key with an expiration time in seconds.
|
||||
- `incr(key string) !int`: Increments the integer value of a key by one.
|
||||
- `decr(key string) !int`: Decrements the integer value of a key by one.
|
||||
- `append(key string, value string) !int`: Appends a value to a key.
|
||||
- `strlen(key string) !int`: Gets the length of the value stored in a key.
|
||||
|
||||
```v
|
||||
redis.set('counter', '10')!
|
||||
redis.incr('counter')! // counter is now 11
|
||||
val := redis.get('counter')! // "11"
|
||||
```
|
||||
|
||||
### Hash Commands
|
||||
|
||||
- `hset(key string, skey string, value string) !`: Sets the string value of a hash field.
|
||||
- `hget(key string, skey string) !string`: Gets the value of a hash field.
|
||||
- `hgetall(key string) !map[string]string`: Gets all fields and values in a hash.
|
||||
- `hexists(key string, skey string) !bool`: Checks if a hash field exists.
|
||||
- `hdel(key string, skey string) !int`: Deletes one or more hash fields.
|
||||
|
||||
```v
|
||||
redis.hset('user:1', 'name', 'John Doe')!
|
||||
redis.hset('user:1', 'email', 'john@example.com')!
|
||||
user_name := redis.hget('user:1', 'name')! // "John Doe"
|
||||
user_data := redis.hgetall('user:1')! // map['name':'John Doe', 'email':'john@example.com']
|
||||
```
|
||||
|
||||
### List Commands
|
||||
|
||||
- `lpush(key string, element string) !int`: Inserts all specified values at the head of the list stored at key.
|
||||
- `rpush(key string, element string) !int`: Inserts all specified values at the tail of the list stored at key.
|
||||
- `lpop(key string) !string`: Removes and returns the first element of the list stored at key.
|
||||
- `rpop(key string) !string`: Removes and returns the last element of the list stored at key.
|
||||
- `llen(key string) !int`: Gets the length of a list.
|
||||
- `lrange(key string, start int, end int) ![]resp.RValue`: Gets a range of elements from a list.
|
||||
|
||||
```v
|
||||
redis.lpush('mylist', 'item1')!
|
||||
redis.rpush('mylist', 'item2')!
|
||||
first_item := redis.lpop('mylist')! // "item1"
|
||||
```
|
||||
|
||||
### Set Commands
|
||||
|
||||
- `sadd(key string, members []string) !int`: Adds the specified members to the set stored at key.
|
||||
- `smismember(key string, members []string) ![]int`: Returns if member is a member of the set stored at key.
|
||||
|
||||
```v
|
||||
redis.sadd('myset', ['member1', 'member2'])!
|
||||
is_member := redis.smismember('myset', ['member1', 'member3'])! // [1, 0]
|
||||
```
|
||||
|
||||
### Key Management
|
||||
|
||||
- `keys(pattern string) ![]string`: Finds all keys matching the given pattern.
|
||||
- `del(key string) !int`: Deletes a key.
|
||||
- `expire(key string, seconds int) !int`: Sets a key's time to live in seconds.
|
||||
- `ttl(key string) !int`: Gets the time to live for a key in seconds.
|
||||
- `flushall() !`: Deletes all the keys of all the existing databases.
|
||||
- `flushdb() !`: Deletes all the keys of the currently selected database.
|
||||
- `selectdb(database int) !`: Changes the selected database.
|
||||
|
||||
```v
|
||||
redis.set('temp_key', 'value')!
|
||||
redis.expire('temp_key', 60)! // Expires in 60 seconds
|
||||
```
|
||||
|
||||
## Redis Cache
|
||||
|
||||
The `RedisCache` struct provides a convenient way to implement caching using Redis.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
|
||||
mut redis := redisclient.core_get()!
|
||||
mut cache := redis.cache('my_app_cache')
|
||||
|
||||
// Set a value in cache with expiration (e.g., 3600 seconds)
|
||||
cache.set('user:profile:123', '{ "name": "Alice" }', 3600)!
|
||||
|
||||
// Get a value from cache
|
||||
cached_data := cache.get('user:profile:123') or {
|
||||
// Cache miss, fetch from source
|
||||
println('Cache miss for user:profile:123')
|
||||
return
|
||||
}
|
||||
// println('Cached data: ${cached_data}')
|
||||
|
||||
// Check if a key exists in cache
|
||||
exists := cache.exists('user:profile:123')
|
||||
// assert exists == true
|
||||
|
||||
// Reset the cache for the namespace
|
||||
cache.reset()!
|
||||
```
|
||||
|
||||
## Redis Queue
|
||||
|
||||
The `RedisQueue` struct provides a simple queue mechanism using Redis lists.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import time
|
||||
|
||||
mut redis := redisclient.core_get()!
|
||||
mut my_queue := redis.queue_get('my_task_queue')
|
||||
|
||||
// Add items to the queue
|
||||
my_queue.add('task1')!
|
||||
my_queue.add('task2')!
|
||||
|
||||
// Get an item from the queue with a timeout (e.g., 1000 milliseconds)
|
||||
task := my_queue.get(1000)!
|
||||
// assert task == 'task1'
|
||||
|
||||
// Pop an item without timeout (returns error if no item)
|
||||
task2 := my_queue.pop()!
|
||||
// assert task2 == 'task2'
|
||||
```
|
||||
|
||||
## Redis RPC
|
||||
|
||||
The `RedisRpc` struct enables Remote Procedure Call (RPC) over Redis, allowing services to communicate by sending messages to queues and waiting for responses.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.redisclient
|
||||
import json
|
||||
import time
|
||||
|
||||
mut redis := redisclient.core_get()!
|
||||
mut rpc_client := redis.rpc_get('my_rpc_service')
|
||||
|
||||
// Define a function to process RPC requests (server-side)
|
||||
fn my_rpc_processor(cmd string, data string) !string {
|
||||
// Simulate some processing based on cmd and data
|
||||
return 'Processed: cmd=${cmd}, data=${data}'
|
||||
}
|
||||
|
||||
// --- Client Side (calling the RPC) ---
|
||||
// Call the RPC service
|
||||
response := rpc_client.call(
|
||||
cmd: 'greet',
|
||||
data: '{"name": "World"}',
|
||||
wait: true,
|
||||
timeout: 5000 // 5 seconds timeout
|
||||
)!
|
||||
// println('RPC Response: ${response}')
|
||||
// assert response == 'Processed: cmd=greet, data={"name": "World"}'
|
||||
|
||||
// --- Server Side (processing RPC requests) ---
|
||||
// In a separate goroutine or process, you would run:
|
||||
// rpc_client.process(my_rpc_processor, timeout: 0)! // timeout 0 means no timeout, keeps processing
|
||||
|
||||
// Example of how to process a single request (for testing/demonstration)
|
||||
// In a real application, this would be in a loop or a background worker
|
||||
// return_queue_name := rpc_client.process(my_rpc_processor, timeout: 1000)!
|
||||
// result := rpc_client.result(1000, return_queue_name)!
|
||||
// println('Processed result: ${result}')
|
||||
206
aiprompts/herolib_advanced/spreadsheet.md
Normal file
206
aiprompts/herolib_advanced/spreadsheet.md
Normal file
@@ -0,0 +1,206 @@
|
||||
# Herolib Spreadsheet Module for AI Prompt Engineering
|
||||
|
||||
This document provides an overview and usage instructions for the `freeflowuniverse.herolib.biz.spreadsheet` module, which offers a powerful software representation of a spreadsheet. This module is designed for business modeling, data analysis, and can be leveraged in AI prompt engineering scenarios where structured data manipulation and visualization are required.
|
||||
|
||||
## 1. Core Concepts
|
||||
|
||||
The spreadsheet module revolves around three main entities: `Sheet`, `Row`, and `Cell`.
|
||||
|
||||
### 1.1. Sheet
|
||||
|
||||
The `Sheet` is the primary container, representing the entire spreadsheet.
|
||||
|
||||
* **Properties:**
|
||||
* `name` (string): A unique identifier for the sheet.
|
||||
* `rows` (map[string]&Row): A collection of `Row` objects, indexed by their names.
|
||||
* `nrcol` (int): The number of columns in the sheet (e.g., 60 for 5 years of monthly data).
|
||||
* `params` (SheetParams): Configuration parameters, e.g., `visualize_cur` (boolean to display currency symbols).
|
||||
* `currency` (currency.Currency): The default currency for the sheet (e.g., USD), used for automatic conversions.
|
||||
|
||||
* **Creation:**
|
||||
```v
|
||||
import freeflowuniverse.herolib.biz.spreadsheet
|
||||
|
||||
// Create a new sheet named 'my_financial_sheet' with 60 columns (e.g., 60 months)
|
||||
mut my_sheet := spreadsheet.sheet_new(
|
||||
name: 'my_financial_sheet',
|
||||
nrcol: 60,
|
||||
visualize_cur: true, // Optional: display currency symbols
|
||||
curr: 'USD' // Optional: set default currency
|
||||
)!
|
||||
|
||||
// Get an existing sheet from the global store
|
||||
mut existing_sheet := spreadsheet.sheet_get('my_financial_sheet')!
|
||||
```
|
||||
|
||||
* **Key Operations:**
|
||||
* `sheet.row_get(name string) !&Row`: Retrieves a row by its name.
|
||||
* `sheet.cell_get(row string, col int) !&Cell`: Retrieves a cell by row name and column index.
|
||||
* `sheet.row_delete(name string)` / `sheet.delete(name string)`: Deletes a row.
|
||||
* `sheet.cells_width(colnr int) !int`: Finds the maximum string length of cells in a given column.
|
||||
* `sheet.rows_names_width_max() int`: Returns the maximum width of row names/aliases.
|
||||
* `sheet.rows_description_width_max() int`: Returns the maximum width of row descriptions.
|
||||
* `sheet.header() ![]string`: Generates column headers (e.g., "M1", "Q1", "Y1") based on `nrcol`.
|
||||
|
||||
### 1.2. Row
|
||||
|
||||
A `Row` represents a single horizontal line of data within a `Sheet`.
|
||||
|
||||
* **Properties:**
|
||||
* `name` (string): Unique identifier for the row.
|
||||
* `alias` (string, optional): Alternative name.
|
||||
* `description` (string): Textual description.
|
||||
* `tags` (string): Space-separated tags for categorization (e.g., "department:hr location:belgium").
|
||||
* `cells` ([]Cell): List of `Cell` objects.
|
||||
* `aggregatetype` (RowAggregateType): Defines default aggregation for this row (`.sum`, `.avg`, `.max`, `.min`).
|
||||
|
||||
* **Creation (within a Sheet):**
|
||||
```v
|
||||
// Assuming 'my_sheet' is an existing Sheet object
|
||||
mut salaries_row := my_sheet.row_new(
|
||||
name: 'salaries',
|
||||
tags: 'department:hr location:belgium',
|
||||
descr: 'Monthly salaries for HR department in Belgium',
|
||||
aggregatetype: .sum
|
||||
)!
|
||||
```
|
||||
|
||||
* **Key Operations:**
|
||||
* `row.values_get() []f64`: Returns all cell values in the row as a list of floats.
|
||||
|
||||
### 1.3. Cell
|
||||
|
||||
A `Cell` is the fundamental unit of data, storing a numeric value.
|
||||
|
||||
* **Properties:**
|
||||
* `val` (f64): The numeric value.
|
||||
* `empty` (bool): `true` if the cell is empty.
|
||||
|
||||
* **Key Operations:**
|
||||
* `cell.set(v string) !`: Sets the cell's value. Handles currency strings (e.g., "100 USD") by converting to the sheet's currency.
|
||||
* `cell.add(v f64)`: Adds a numeric value to the existing cell value.
|
||||
* `cell.repr() string`: Returns a formatted string representation of the value (e.g., "100.00", or "-" if empty).
|
||||
|
||||
## 2. Data Aggregation and Transformation
|
||||
|
||||
The module provides powerful tools for summarizing and transforming data.
|
||||
|
||||
### 2.1. Grouping Rows (`group2row`)
|
||||
|
||||
Aggregates selected rows into a new single row based on tags.
|
||||
|
||||
```v
|
||||
// Aggregate rows tagged 'department:dev' or 'department:engineering' into a new row
|
||||
mut total_salaries_row := my_sheet.group2row(
|
||||
name: 'total_dev_engineering_salaries',
|
||||
include: ['department:dev', 'department:engineering'],
|
||||
tags: 'summary:dev_eng',
|
||||
descr: 'Total salaries for Development and Engineering departments',
|
||||
aggregatetype: .sum // Can be .sum, .avg, .max, .min
|
||||
)!
|
||||
```
|
||||
|
||||
### 2.2. Transforming Periodicity (`toyear`, `toquarter`)
|
||||
|
||||
Creates new sheets with data aggregated into larger time periods.
|
||||
|
||||
```v
|
||||
// Assuming 'monthly_sheet' has 60 columns (monthly data)
|
||||
mut monthly_sheet := spreadsheet.sheet_new(name: 'monthly_data', nrcol: 60)!
|
||||
// ... populate monthly_sheet
|
||||
|
||||
// Create a new sheet 'yearly_data' with data aggregated by year
|
||||
mut yearly_sheet := monthly_sheet.toyear(
|
||||
name: 'yearly_data',
|
||||
namefilter: ['revenue_row', 'expenses_row'], // Optional: filter specific rows
|
||||
includefilter: ['category:income'] // Optional: filter by tags
|
||||
)!
|
||||
|
||||
// Create a new sheet 'quarterly_data' with data aggregated by quarter
|
||||
mut quarterly_sheet := monthly_sheet.toquarter(name: 'quarterly_data')!
|
||||
```
|
||||
|
||||
## 3. Exporting Data
|
||||
|
||||
Export sheet data to CSV format.
|
||||
|
||||
### 3.1. Export to CSV (`export_csv`)
|
||||
|
||||
```v
|
||||
import os
|
||||
|
||||
// Export to a CSV file with default pipe '|' separator
|
||||
my_sheet.export_csv(path: '~/output.csv')!
|
||||
|
||||
// Export with custom comma ',' separator and include empty cells
|
||||
csv_content_with_empty := my_sheet.export_csv(
|
||||
path: '~/output_with_empty.csv',
|
||||
separator: ',',
|
||||
include_empty: true
|
||||
)!
|
||||
|
||||
// Export to a string only (no file)
|
||||
csv_string := my_sheet.export_csv(path: '')!
|
||||
println(csv_string)
|
||||
```
|
||||
|
||||
* **`ExportCSVArgs` Parameters:**
|
||||
* `path` (string, optional): File path. Empty string returns content as string. `~` is expanded to home directory.
|
||||
* `include_empty` (bool, optional, default: `false`): If `true`, empty cells are included.
|
||||
* `separator` (string, optional, default: `'|'`): Delimiter character.
|
||||
|
||||
## 4. Charting Capabilities
|
||||
|
||||
Integrates with ECharts for data visualization. Charting functions return an `echarts.EChartsOption` object.
|
||||
|
||||
### 4.1. Common Charting Parameters (`RowGetArgs`)
|
||||
|
||||
Used across line, bar, and pie charts to specify data and presentation.
|
||||
|
||||
* `rowname` (string, optional): Single row name or comma-separated list.
|
||||
* `namefilter` ([]string, optional): List of exact row names to include.
|
||||
* `includefilter` ([]string, optional): List of tags to include.
|
||||
* `excludefilter` ([]string, optional): List of tags to exclude.
|
||||
* `period_type` (PeriodType, optional): X-axis period (`.month`, `.quarter`, `.year`).
|
||||
* `aggregate` (bool, optional, default: `true`): Aggregate multiple matching rows.
|
||||
* `aggregatetype` (RowAggregateType, optional, default: `.sum`): Aggregation type.
|
||||
* `unit` (UnitType, optional): Data unit.
|
||||
* `title`, `title_sub` (string, optional): Chart titles.
|
||||
* `size` (string, optional): For pie charts, defines radius (e.g., "70%").
|
||||
* `rowname_show` (bool, optional, default: `true`): Show row name in legend.
|
||||
* `descr_show` (bool, optional, default: `false`): Show row description (overrides `rowname_show`).
|
||||
* `description` (string, optional): General chart description.
|
||||
|
||||
### 4.2. Chart Types
|
||||
|
||||
* **Line Chart (`line_chart`)**: Visualizes trends over time.
|
||||
```v
|
||||
import freeflowuniverse.herolib.web.echarts // Required for EChartsOption type
|
||||
|
||||
line_chart_option := my_sheet.line_chart(
|
||||
rowname: 'revenue_row,expenses_row',
|
||||
period_type: .month,
|
||||
title: 'Revenue vs. Expenses Over Time'
|
||||
)!
|
||||
```
|
||||
|
||||
* **Bar Chart (`bar_chart`)**: Compares discrete categories or values.
|
||||
```v
|
||||
bar_chart_option := my_sheet.bar_chart(
|
||||
rowname: 'profit_row',
|
||||
period_type: .quarter,
|
||||
title: 'Quarterly Profit'
|
||||
)!
|
||||
```
|
||||
|
||||
* **Pie Chart (`pie_chart`)**: Shows proportions of categories.
|
||||
```v
|
||||
pie_chart_option := my_sheet.pie_chart(
|
||||
rowname: 'budget_allocation_row',
|
||||
period_type: .year,
|
||||
title: 'Annual Budget Allocation',
|
||||
size: '70%'
|
||||
)!
|
||||
```
|
||||
|
||||
This documentation should provide sufficient information for an AI to understand and utilize the `lib/biz/spreadsheet` module effectively for various data manipulation and visualization tasks.
|
||||
11
aiprompts/herolib_core/core_curdir_example.md
Normal file
11
aiprompts/herolib_core/core_curdir_example.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Getting the Current Script's Path in Herolib/V Shell
|
||||
|
||||
can be used in any .v or .vsh script, easy to find content close to the script itself.
|
||||
|
||||
```v
|
||||
#!/usr/bin/env vsh
|
||||
|
||||
const script_path = os.dir(@FILE) + '/scripts'
|
||||
echo "Current scripts directory: ${script_directory}"
|
||||
|
||||
```
|
||||
44
aiprompts/herolib_core/core_globals.md
Normal file
44
aiprompts/herolib_core/core_globals.md
Normal file
@@ -0,0 +1,44 @@
|
||||
## how to remember clients, installers as a global
|
||||
|
||||
the following is a good pragmatic way to remember clients, installers as a global, use it as best practice.
|
||||
|
||||
```vmodule docsite
|
||||
|
||||
module docsite
|
||||
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
__global (
|
||||
siteconfigs map[string]&SiteConfig
|
||||
)
|
||||
|
||||
@[params]
|
||||
pub struct FactoryArgs {
|
||||
pub mut:
|
||||
name string = "default"
|
||||
}
|
||||
|
||||
pub fn new(args FactoryArgs) !&SiteConfig {
|
||||
name := texttools.name_fix(args.name)
|
||||
siteconfigs[name] = &SiteConfig{
|
||||
name: name
|
||||
}
|
||||
return get(name:name)!
|
||||
}
|
||||
|
||||
pub fn get(args FactoryArgs) !&SiteConfig {
|
||||
name := texttools.name_fix(args.name)
|
||||
mut sc := siteconfigs[name] or {
|
||||
return error('siteconfig with name "${name}" does not exist')
|
||||
}
|
||||
return sc
|
||||
}
|
||||
|
||||
pub fn default() !&SiteConfig {
|
||||
if siteconfigs.len == 0 {
|
||||
return new(name:'default')!
|
||||
}
|
||||
return get()!
|
||||
}
|
||||
|
||||
```
|
||||
54
aiprompts/herolib_core/core_heroscript_basics.md
Normal file
54
aiprompts/herolib_core/core_heroscript_basics.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# HeroScript: Vlang Integration
|
||||
|
||||
## HeroScript Structure
|
||||
|
||||
HeroScript is a concise scripting language with the following structure:
|
||||
|
||||
```heroscript
|
||||
!!actor.action_name
|
||||
param1: 'value1'
|
||||
param2: 'value with spaces'
|
||||
multiline_description: '
|
||||
This is a multiline description.
|
||||
It can span multiple lines.
|
||||
'
|
||||
arg1 arg2 // Arguments without keys
|
||||
```
|
||||
|
||||
Key characteristics:
|
||||
- **Actions**: Start with `!!`, followed by `actor.action_name` (e.g., `!!mailclient.configure`).
|
||||
- **Parameters**: Defined as `key:value`. Values can be quoted for spaces.
|
||||
- **Multiline Support**: Parameters like `description` can span multiple lines.
|
||||
- **Arguments**: Values without keys (e.g., `arg1`).
|
||||
|
||||
## Processing HeroScript in Vlang
|
||||
|
||||
HeroScript can be parsed into a `playbook.PlayBook` object, allowing structured access to actions and their parameters, this is used in most of the herolib modules, it allows configuration or actions in a structured way.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.playbook { PlayBook }
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
|
||||
pub fn play(mut plbook PlayBook) ! {
|
||||
|
||||
if plbook.exists_once(filter: 'docusaurus.define') {
|
||||
mut action := plbook.get(filter: 'docusaurus.define')!
|
||||
mut p := action.params
|
||||
//example how we get parameters from the action see core_params.md for more details
|
||||
ds = new(
|
||||
path: p.get_default('path_publish', '')!
|
||||
production: p.get_default_false('production')
|
||||
)!
|
||||
}
|
||||
|
||||
// Process 'docusaurus.add' actions to configure individual Docusaurus sites
|
||||
actions := plbook.find(filter: 'docusaurus.add')!
|
||||
for action in actions {
|
||||
mut p := action.params
|
||||
//do more processing here
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For detailed information on parameter retrieval methods (e.g., `p.get()`, `p.get_int()`, `p.get_default_true()`), refer to `aiprompts/ai_core/core_params.md`.
|
||||
|
||||
25
aiprompts/herolib_core/core_heroscript_playbook.md
Normal file
25
aiprompts/herolib_core/core_heroscript_playbook.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# PlayBook
|
||||
|
||||
## get & execute a playbook
|
||||
|
||||
HeroScript can be parsed into a `playbook.PlayBook` object, allowing structured access to actions and their parameters.
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
|
||||
// path string
|
||||
// text string
|
||||
// git_url string
|
||||
// git_pull bool
|
||||
// git_branch string
|
||||
// git_reset bool
|
||||
// session ?&base.Session is optional
|
||||
mut plbook := playbook.new(path: "....")!
|
||||
|
||||
//now we run all the commands as they are pre-defined in herolib, this will execute the playbook and do all actions.
|
||||
playcmds.run(mut plbook)!
|
||||
|
||||
```
|
||||
|
||||
|
||||
107
aiprompts/herolib_core/core_http_client.md
Normal file
107
aiprompts/herolib_core/core_http_client.md
Normal file
@@ -0,0 +1,107 @@
|
||||
# HTTPConnection Module
|
||||
|
||||
The `HTTPConnection` module provides a robust HTTP client for Vlang, supporting JSON, custom headers, retries, and caching.
|
||||
|
||||
## Key Features
|
||||
- Type-safe JSON methods
|
||||
- Custom headers
|
||||
- Retry mechanism
|
||||
- Caching
|
||||
- URL encoding
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.httpconnection
|
||||
|
||||
// Create a new HTTP connection
|
||||
mut conn := httpconnection.new(
|
||||
name: 'my_api_client'
|
||||
url: 'https://api.example.com'
|
||||
retry: 3 // Number of retries for failed requests
|
||||
cache: true // Enable caching
|
||||
)!
|
||||
```
|
||||
|
||||
## Integration with Management Classes
|
||||
|
||||
To integrate `HTTPConnection` into a management class (e.g., `HetznerManager`), use a method to lazily initialize and return the connection:
|
||||
|
||||
```v
|
||||
// Example: HetznerManager
|
||||
pub fn (mut h HetznerManager) connection() !&httpconnection.HTTPConnection {
|
||||
mut c := h.conn or {
|
||||
mut c2 := httpconnection.new(
|
||||
name: 'hetzner_${h.name}'
|
||||
url: h.baseurl
|
||||
cache: true
|
||||
retry: 3
|
||||
)!
|
||||
c2.basic_auth(h.user, h.password)
|
||||
c2
|
||||
}
|
||||
return c
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### GET Request with JSON Response
|
||||
|
||||
```v
|
||||
struct User {
|
||||
id int
|
||||
name string
|
||||
email string
|
||||
}
|
||||
|
||||
user := conn.get_json_generic[User](
|
||||
prefix: 'users/1'
|
||||
)!
|
||||
```
|
||||
|
||||
### POST Request with JSON Data
|
||||
|
||||
```v
|
||||
struct NewUserResponse {
|
||||
id int
|
||||
status string
|
||||
}
|
||||
|
||||
new_user_resp := conn.post_json_generic[NewUserResponse](
|
||||
prefix: 'users'
|
||||
params: {
|
||||
'name': 'Jane Doe'
|
||||
'email': 'jane@example.com'
|
||||
}
|
||||
)!
|
||||
```
|
||||
|
||||
### Custom Headers
|
||||
|
||||
Set default headers or add them per request:
|
||||
|
||||
```v
|
||||
import net.http { Header }
|
||||
|
||||
// Set default header
|
||||
conn.default_header = http.new_header(key: .authorization, value: 'Bearer your-token')
|
||||
|
||||
// Add custom header for a specific request
|
||||
response := conn.get_json(
|
||||
prefix: 'protected/resource'
|
||||
header: http.new_header(key: .content_type, value: 'application/json')
|
||||
)!
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Methods return a `Result` type for error handling:
|
||||
|
||||
```v
|
||||
user := conn.get_json_generic[User](
|
||||
prefix: 'users/1'
|
||||
) or {
|
||||
println('Error fetching user: ${err}')
|
||||
return
|
||||
}
|
||||
63
aiprompts/herolib_core/core_osal.md
Normal file
63
aiprompts/herolib_core/core_osal.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# OSAL Core Module - Key Capabilities (freeflowuniverse.herolib.osal.core)
|
||||
|
||||
|
||||
```v
|
||||
//example how to get started
|
||||
|
||||
import freeflowuniverse.herolib.osal.core as osal
|
||||
|
||||
osal.exec(cmd:"ls /")!
|
||||
|
||||
```
|
||||
|
||||
this document has info about the most core functions, more detailed info can be found in `aiprompts/herolib_advanced/osal.md` if needed.
|
||||
|
||||
## Key Functions
|
||||
|
||||
### 1. Process Execution
|
||||
|
||||
* **`osal.exec(cmd: Command) !Job`**: Execute a shell command.
|
||||
* **Key Parameters**: `cmd` (string), `timeout` (int), `retry` (int), `work_folder` (string), `environment` (map[string]string), `stdout` (bool), `raise_error` (bool).
|
||||
* **Returns**: `Job` (status, output, error, exit code).
|
||||
* **`osal.execute_silent(cmd string) !string`**: Execute silently, return output.
|
||||
* **`osal.cmd_exists(cmd string) bool`**: Check if a command exists.
|
||||
* **`osal.process_kill_recursive(args: ProcessKillArgs) !`**: Kill a process and its children.
|
||||
|
||||
### 2. Network Utilities
|
||||
|
||||
* **`osal.ping(args: PingArgs) !PingResult`**: Check host reachability.
|
||||
* **Key Parameters**: `address` (string).
|
||||
* **Returns**: `PingResult` (`.ok`, `.timeout`, `.unknownhost`).
|
||||
* **`osal.tcp_port_test(args: TcpPortTestArgs) bool`**: Test if a TCP port is open.
|
||||
* **Key Parameters**: `address` (string), `port` (int).
|
||||
* **`osal.ipaddr_pub_get() !string`**: Get public IP address.
|
||||
|
||||
### 3. File System Operations
|
||||
|
||||
* **`osal.file_write(path string, text string) !`**: Write text to a file.
|
||||
* **`osal.file_read(path string) !string`**: Read content from a file.
|
||||
* **`osal.dir_ensure(path string) !`**: Ensure a directory exists.
|
||||
* **`osal.rm(todelete string) !`**: Remove files/directories.
|
||||
|
||||
### 4. Environment Variables
|
||||
|
||||
* **`osal.env_set(args: EnvSet)`**: Set an environment variable.
|
||||
* **Key Parameters**: `key` (string), `value` (string).
|
||||
* **`osal.env_get(key string) !string`**: Get an environment variable's value.
|
||||
* **`osal.load_env_file(file_path string) !`**: Load variables from a file.
|
||||
|
||||
### 5. Command & Profile Management
|
||||
|
||||
* **`osal.cmd_add(args: CmdAddArgs) !`**: Add a binary to system paths and update profiles.
|
||||
* **Key Parameters**: `source` (string, required), `cmdname` (string).
|
||||
* **`osal.profile_path_add_remove(args: ProfilePathAddRemoveArgs) !`**: Add/remove paths from profiles.
|
||||
* **Key Parameters**: `paths2add` (string), `paths2delete` (string).
|
||||
|
||||
### 6. System Information
|
||||
|
||||
* **`osal.platform() !PlatformType`**: Identify the operating system.
|
||||
* **`osal.cputype() !CPUType`**: Identify the CPU architecture.
|
||||
* **`osal.hostname() !string`**: Get system hostname.
|
||||
|
||||
---
|
||||
|
||||
92
aiprompts/herolib_core/core_ourtime.md
Normal file
92
aiprompts/herolib_core/core_ourtime.md
Normal file
@@ -0,0 +1,92 @@
|
||||
# OurTime Module
|
||||
|
||||
The `OurTime` module in V provides flexible time handling, supporting relative and absolute time formats, Unix timestamps, and formatting utilities.
|
||||
|
||||
## Key Features
|
||||
- Create time objects from strings or current time
|
||||
- Relative time expressions (e.g., `+1h`, `-2d`)
|
||||
- Absolute time formats (e.g., `YYYY-MM-DD HH:mm:ss`)
|
||||
- Unix timestamp conversion
|
||||
- Time formatting and warping
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.data.ourtime
|
||||
|
||||
// Current time
|
||||
mut t := ourtime.now()
|
||||
|
||||
// From string
|
||||
t2 := ourtime.new('2022-12-05 20:14:35')!
|
||||
|
||||
// Get formatted string
|
||||
println(t2.str()) // e.g., 2022-12-05 20:14
|
||||
|
||||
// Get Unix timestamp
|
||||
println(t2.unix()) // e.g., 1670271275
|
||||
```
|
||||
|
||||
## Time Formats
|
||||
|
||||
### Relative Time
|
||||
|
||||
Use `s` (seconds), `h` (hours), `d` (days), `w` (weeks), `M` (months), `Q` (quarters), `Y` (years).
|
||||
|
||||
```v
|
||||
// Create with relative time
|
||||
mut t := ourtime.new('+1w +2d -4h')!
|
||||
|
||||
// Warp existing time
|
||||
mut t2 := ourtime.now()
|
||||
t2.warp('+1h')!
|
||||
```
|
||||
|
||||
### Absolute Time
|
||||
|
||||
Supports `YYYY-MM-DD HH:mm:ss`, `YYYY-MM-DD HH:mm`, `YYYY-MM-DD HH`, `YYYY-MM-DD`, `DD-MM-YYYY`.
|
||||
|
||||
```v
|
||||
t1 := ourtime.new('2022-12-05 20:14:35')!
|
||||
t2 := ourtime.new('2022-12-05')! // Time defaults to 00:00:00
|
||||
```
|
||||
|
||||
## Methods Overview
|
||||
|
||||
### Creation
|
||||
|
||||
```v
|
||||
now_time := ourtime.now()
|
||||
from_string := ourtime.new('2023-01-15')!
|
||||
from_epoch := ourtime.new_from_epoch(1673788800)
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
```v
|
||||
mut t := ourtime.now()
|
||||
println(t.str()) // YYYY-MM-DD HH:mm
|
||||
println(t.day()) // YYYY-MM-DD
|
||||
println(t.key()) // YYYY_MM_DD_HH_mm_ss
|
||||
println(t.md()) // Markdown format
|
||||
```
|
||||
|
||||
### Operations
|
||||
|
||||
```v
|
||||
mut t := ourtime.now()
|
||||
t.warp('+1h')! // Move 1 hour forward
|
||||
unix_ts := t.unix()
|
||||
is_empty := t.empty()
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Time parsing methods return a `Result` type and should be handled with `!` or `or` blocks.
|
||||
|
||||
```v
|
||||
t_valid := ourtime.new('2023-01-01')!
|
||||
t_invalid := ourtime.new('bad-date') or {
|
||||
println('Error: ${err}')
|
||||
ourtime.now() // Fallback
|
||||
}
|
||||
109
aiprompts/herolib_core/core_params.md
Normal file
109
aiprompts/herolib_core/core_params.md
Normal file
@@ -0,0 +1,109 @@
|
||||
# Parameter Parsing in Vlang
|
||||
|
||||
This document details the `paramsparser` module, essential for handling parameters in HeroScript and other contexts.
|
||||
|
||||
## Obtaining a `paramsparser` Instance
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.data.paramsparser
|
||||
|
||||
// Create new params from a string
|
||||
params := paramsparser.new("color:red size:'large' priority:1 enable:true")!
|
||||
|
||||
// Or create an empty instance and add parameters programmatically
|
||||
mut params := paramsparser.new_params()
|
||||
params.set("color", "red")
|
||||
```
|
||||
|
||||
## Parameter Formats
|
||||
|
||||
The parser supports various input formats:
|
||||
|
||||
1. **Key-value pairs**: `key:value`
|
||||
2. **Quoted values**: `key:'value with spaces'` (single or double quotes)
|
||||
3. **Arguments without keys**: `arg1 arg2` (accessed by index)
|
||||
4. **Comments**: `// this is a comment` (ignored during parsing)
|
||||
|
||||
Example:
|
||||
```vlang
|
||||
text := "name:'John Doe' age:30 active:true // user details"
|
||||
params := paramsparser.new(text)!
|
||||
```
|
||||
|
||||
## Parameter Retrieval Methods
|
||||
|
||||
The `paramsparser` module provides a comprehensive set of methods for retrieving and converting parameter values.
|
||||
|
||||
### Basic Retrieval
|
||||
|
||||
- `get(key string) !string`: Retrieves a string value by key. Returns an error if the key does not exist.
|
||||
- `get_default(key string, defval string) !string`: Retrieves a string value by key, or returns `defval` if the key is not found.
|
||||
- `exists(key string) bool`: Checks if a keyword argument (`key:value`) exists.
|
||||
- `exists_arg(key string) bool`: Checks if an argument (value without a key) exists.
|
||||
|
||||
### Argument Retrieval (Positional)
|
||||
|
||||
- `get_arg(nr int) !string`: Retrieves an argument by its 0-based index. Returns an error if the index is out of bounds.
|
||||
- `get_arg_default(nr int, defval string) !string`: Retrieves an argument by index, or returns `defval` if the index is out of bounds.
|
||||
|
||||
### Type-Specific Retrieval
|
||||
|
||||
- `get_int(key string) !int`: Converts and retrieves an integer (int32).
|
||||
- `get_int_default(key string, defval int) !int`: Retrieves an integer with a default.
|
||||
- `get_u32(key string) !u32`: Converts and retrieves an unsigned 32-bit integer.
|
||||
- `get_u32_default(key string, defval u32) !u32`: Retrieves a u32 with a default.
|
||||
- `get_u64(key string) !u64`: Converts and retrieves an unsigned 64-bit integer.
|
||||
- `get_u64_default(key string, defval u64) !u64`: Retrieves a u64 with a default.
|
||||
- `get_u8(key string) !u8`: Converts and retrieves an unsigned 8-bit integer.
|
||||
- `get_u8_default(key string, defval u8) !u8`: Retrieves a u8 with a default.
|
||||
- `get_float(key string) !f64`: Converts and retrieves a 64-bit float.
|
||||
- `get_float_default(key string, defval f64) !f64`: Retrieves a float with a default.
|
||||
- `get_percentage(key string) !f64`: Converts a percentage string (e.g., "80%") to a float (0.8).
|
||||
- `get_percentage_default(key string, defval string) !f64`: Retrieves a percentage with a default.
|
||||
|
||||
### Boolean Retrieval
|
||||
|
||||
- `get_default_true(key string) bool`: Returns `true` if the value is empty, "1", "true", "y", or "yes". Otherwise `false`.
|
||||
- `get_default_false(key string) bool`: Returns `false` if the value is empty, "0", "false", "n", or "no". Otherwise `true`.
|
||||
|
||||
### List Retrieval
|
||||
|
||||
Lists are typically comma-separated strings (e.g., `users: "john,jane,bob"`).
|
||||
|
||||
- `get_list(key string) ![]string`: Retrieves a list of strings.
|
||||
- `get_list_default(key string, def []string) ![]string`: Retrieves a list of strings with a default.
|
||||
- `get_list_int(key string) ![]int`: Retrieves a list of integers.
|
||||
- `get_list_int_default(key string, def []int) []int`: Retrieves a list of integers with a default.
|
||||
- `get_list_f32(key string) ![]f32`: Retrieves a list of 32-bit floats.
|
||||
- `get_list_f32_default(key string, def []f32) []f32`: Retrieves a list of f32 with a default.
|
||||
- `get_list_f64(key string) ![]f64`: Retrieves a list of 64-bit floats.
|
||||
- `get_list_f64_default(key string, def []f64) []f64`: Retrieves a list of f64 with a default.
|
||||
- `get_list_i8(key string) ![]i8`: Retrieves a list of 8-bit signed integers.
|
||||
- `get_list_i8_default(key string, def []i8) []i8`: Retrieves a list of i8 with a default.
|
||||
- `get_list_i16(key string) ![]i16`: Retrieves a list of 16-bit signed integers.
|
||||
- `get_list_i16_default(key string, def []i16) []i16`: Retrieves a list of i16 with a default.
|
||||
- `get_list_i64(key string) ![]i64`: Retrieves a list of 64-bit signed integers.
|
||||
- `get_list_i64_default(key string, def []i64) []i64`: Retrieves a list of i64 with a default.
|
||||
- `get_list_u16(key string) ![]u16`: Retrieves a list of 16-bit unsigned integers.
|
||||
- `get_list_u16_default(key string, def []u16) []u16`: Retrieves a list of u16 with a default.
|
||||
- `get_list_u32(key string) ![]u32`: Retrieves a list of 32-bit unsigned integers.
|
||||
- `get_list_u32_default(key string, def []u32) []u32`: Retrieves a list of u32 with a default.
|
||||
- `get_list_u64(key string) ![]u64`: Retrieves a list of 64-bit unsigned integers.
|
||||
- `get_list_u64_default(key string, def []u64) []u64`: Retrieves a list of u64 with a default.
|
||||
- `get_list_namefix(key string) ![]string`: Retrieves a list of strings, normalizing each item (e.g., "My Name" -> "my_name").
|
||||
- `get_list_namefix_default(key string, def []string) ![]string`: Retrieves a list of name-fixed strings with a default.
|
||||
|
||||
### Specialized Retrieval
|
||||
|
||||
- `get_map() map[string]string`: Returns all parameters as a map.
|
||||
- `get_path(key string) !string`: Retrieves a path string.
|
||||
- `get_path_create(key string) !string`: Retrieves a path string, creating the directory if it doesn't exist.
|
||||
- `get_from_hashmap(key string, defval string, hashmap map[string]string) !string`: Retrieves a value from a provided hashmap based on the parameter's value.
|
||||
- `get_storagecapacity_in_bytes(key string) !u64`: Converts storage capacity strings (e.g., "10 GB", "500 MB") to bytes (u64).
|
||||
- `get_storagecapacity_in_bytes_default(key string, defval u64) !u64`: Retrieves storage capacity in bytes with a default.
|
||||
- `get_storagecapacity_in_gigabytes(key string) !u64`: Converts storage capacity strings to gigabytes (u64).
|
||||
- `get_time(key string) !ourtime.OurTime`: Parses a time string (relative or absolute) into an `ourtime.OurTime` object.
|
||||
- `get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime`: Retrieves time with a default.
|
||||
- `get_time_interval(key string) !Duration`: Parses a time interval string into a `Duration` object.
|
||||
- `get_timestamp(key string) !Duration`: Parses a timestamp string into a `Duration` object.
|
||||
- `get_timestamp_default(key string, defval Duration) !Duration`: Retrieves a timestamp with a default.
|
||||
150
aiprompts/herolib_core/core_paths.md
Normal file
150
aiprompts/herolib_core/core_paths.md
Normal file
@@ -0,0 +1,150 @@
|
||||
# Pathlib Usage Guide
|
||||
|
||||
## Overview
|
||||
|
||||
The pathlib module provides a comprehensive interface for handling file system operations. Key features include:
|
||||
|
||||
- Robust path handling for files, directories, and symlinks
|
||||
- Support for both absolute and relative paths
|
||||
- Automatic home directory expansion (~)
|
||||
- Recursive directory operations
|
||||
- Path filtering and listing
|
||||
- File and directory metadata access
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Importing pathlib
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
```
|
||||
|
||||
### Creating Path Objects
|
||||
```v
|
||||
// Create a Path object for a file
|
||||
mut file_path := pathlib.get("path/to/file.txt")
|
||||
|
||||
// Create a Path object for a directory
|
||||
mut dir_path := pathlib.get("path/to/directory")
|
||||
```
|
||||
|
||||
### Basic Path Operations
|
||||
```v
|
||||
// Get absolute path
|
||||
abs_path := file_path.absolute()
|
||||
|
||||
// Get real path (resolves symlinks)
|
||||
real_path := file_path.realpath()
|
||||
|
||||
// Check if path exists
|
||||
if file_path.exists() {
|
||||
// Path exists
|
||||
}
|
||||
```
|
||||
|
||||
## Path Properties and Methods
|
||||
|
||||
### Path Types
|
||||
```v
|
||||
// Check if path is a file
|
||||
if file_path.is_file() {
|
||||
// Handle as file
|
||||
}
|
||||
|
||||
// Check if path is a directory
|
||||
if dir_path.is_dir() {
|
||||
// Handle as directory
|
||||
}
|
||||
|
||||
// Check if path is a symlink
|
||||
if file_path.is_link() {
|
||||
// Handle as symlink
|
||||
}
|
||||
```
|
||||
|
||||
### Path Normalization
|
||||
```v
|
||||
// Normalize path (remove extra slashes, resolve . and ..)
|
||||
normalized_path := file_path.path_normalize()
|
||||
|
||||
// Get path directory
|
||||
dir_path := file_path.path_dir()
|
||||
|
||||
// Get path name without extension
|
||||
name_no_ext := file_path.name_no_ext()
|
||||
```
|
||||
|
||||
## File and Directory Operations
|
||||
|
||||
### File Operations
|
||||
```v
|
||||
// Write to file
|
||||
file_path.write("Content to write")!
|
||||
|
||||
// Read from file
|
||||
content := file_path.read()!
|
||||
|
||||
// Delete file
|
||||
file_path.delete()!
|
||||
```
|
||||
|
||||
### Directory Operations
|
||||
```v
|
||||
// Create directory
|
||||
mut dir := pathlib.get_dir(
|
||||
path: "path/to/new/dir"
|
||||
create: true
|
||||
)!
|
||||
|
||||
// List directory contents
|
||||
mut dir_list := dir.list()!
|
||||
|
||||
// Delete directory
|
||||
dir.delete()!
|
||||
```
|
||||
|
||||
### Symlink Operations
|
||||
```v
|
||||
// Create symlink
|
||||
file_path.link("path/to/symlink", delete_exists: true)!
|
||||
|
||||
// Resolve symlink
|
||||
real_path := file_path.realpath()
|
||||
```
|
||||
|
||||
## Advanced Operations
|
||||
|
||||
### Path Copying
|
||||
```v
|
||||
// Copy file to destination
|
||||
file_path.copy(dest: "path/to/destination")!
|
||||
```
|
||||
|
||||
### Recursive Operations
|
||||
```v
|
||||
// List directory recursively
|
||||
mut recursive_list := dir.list(recursive: true)!
|
||||
|
||||
// Delete directory recursively
|
||||
dir.delete()!
|
||||
```
|
||||
|
||||
### Path Filtering
|
||||
```v
|
||||
// List files matching pattern
|
||||
mut filtered_list := dir.list(
|
||||
regex: [r".*\.txt$"],
|
||||
recursive: true
|
||||
)!
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Error Handling
|
||||
```v
|
||||
if file_path.exists() {
|
||||
// Safe to operate
|
||||
} else {
|
||||
// Handle missing file
|
||||
}
|
||||
```
|
||||
|
||||
99
aiprompts/herolib_core/core_text.md
Normal file
99
aiprompts/herolib_core/core_text.md
Normal file
@@ -0,0 +1,99 @@
|
||||
# TextTools Module
|
||||
|
||||
The `texttools` module provides a comprehensive set of utilities for text manipulation and processing.
|
||||
|
||||
## Functions and Examples:
|
||||
|
||||
```v
|
||||
import freeflowuniverse.herolib.core.texttools
|
||||
|
||||
assert hello_world == texttools.name_fix("Hello World!")
|
||||
|
||||
```
|
||||
### Name/Path Processing
|
||||
* `name_fix(name string) string`: Normalizes filenames and paths.
|
||||
* `name_fix_keepspace(name string) !string`: Like name_fix but preserves spaces.
|
||||
* `name_fix_no_ext(name_ string) string`: Removes file extension.
|
||||
* `name_fix_snake_to_pascal(name string) string`: Converts snake_case to PascalCase.
|
||||
```v
|
||||
name := texttools.name_fix_snake_to_pascal("hello_world") // Result: "HelloWorld"
|
||||
```
|
||||
* `snake_case(name string) string`: Converts PascalCase to snake_case.
|
||||
```v
|
||||
name := texttools.snake_case("HelloWorld") // Result: "hello_world"
|
||||
```
|
||||
* `name_split(name string) !(string, string)`: Splits name into site and page components.
|
||||
|
||||
|
||||
### Text Cleaning
|
||||
* `name_clean(r string) string`: Normalizes names by removing special characters.
|
||||
```v
|
||||
name := texttools.name_clean("Hello@World!") // Result: "HelloWorld"
|
||||
```
|
||||
* `ascii_clean(r string) string`: Removes all non-ASCII characters.
|
||||
* `remove_empty_lines(text string) string`: Removes empty lines from text.
|
||||
```v
|
||||
text := texttools.remove_empty_lines("line1\n\nline2\n\n\nline3") // Result: "line1\nline2\nline3"
|
||||
```
|
||||
* `remove_double_lines(text string) string`: Removes consecutive empty lines.
|
||||
* `remove_empty_js_blocks(text string) string`: Removes empty code blocks (```...```).
|
||||
|
||||
### Command Line Parsing
|
||||
* `cmd_line_args_parser(text string) ![]string`: Parses command line arguments with support for quotes and escaping.
|
||||
```v
|
||||
args := texttools.cmd_line_args_parser("'arg with spaces' --flag=value") // Result: ['arg with spaces', '--flag=value']
|
||||
```
|
||||
* `text_remove_quotes(text string) string`: Removes quoted sections from text.
|
||||
* `check_exists_outside_quotes(text string, items []string) bool`: Checks if items exist in text outside of quotes.
|
||||
|
||||
### Text Expansion
|
||||
* `expand(txt_ string, l int, expand_with string) string`: Expands text to a specified length with a given character.
|
||||
|
||||
### Indentation
|
||||
* `indent(text string, prefix string) string`: Adds indentation prefix to each line.
|
||||
```v
|
||||
text := texttools.indent("line1\nline2", " ") // Result: " line1\n line2\n"
|
||||
```
|
||||
* `dedent(text string) string`: Removes common leading whitespace from every line.
|
||||
```v
|
||||
text := texttools.dedent(" line1\n line2") // Result: "line1\nline2"
|
||||
```
|
||||
|
||||
### String Validation
|
||||
* `is_int(text string) bool`: Checks if text contains only digits.
|
||||
* `is_upper_text(text string) bool`: Checks if text contains only uppercase letters.
|
||||
|
||||
### Multiline Processing
|
||||
* `multiline_to_single(text string) !string`: Converts multiline text to a single line with proper escaping.
|
||||
|
||||
### Text Splitting
|
||||
* `split_smart(t string, delimiter_ string) []string`: Intelligent string splitting that respects quotes.
|
||||
|
||||
### Tokenization
|
||||
* `tokenize(text_ string) TokenizerResult`: Tokenizes text into meaningful parts.
|
||||
* `text_token_replace(text string, tofind string, replacewith string) !string`: Replaces tokens in text.
|
||||
|
||||
### Version Parsing
|
||||
* `version(text_ string) int`: Converts version strings to comparable integers.
|
||||
```v
|
||||
ver := texttools.version("v0.4.36") // Result: 4036
|
||||
ver = texttools.version("v1.4.36") // Result: 1004036
|
||||
```
|
||||
|
||||
### Formatting
|
||||
* `format_rfc1123(t time.Time) string`: Formats a time.Time object into RFC 1123 format.
|
||||
|
||||
|
||||
### Array Operations
|
||||
* `to_array(r string) []string`: Converts a comma or newline separated list to an array of strings.
|
||||
```v
|
||||
text := "item1,item2,item3"
|
||||
array := texttools.to_array(text) // Result: ['item1', 'item2', 'item3']
|
||||
```
|
||||
* `to_array_int(r string) []int`: Converts a text list to an array of integers.
|
||||
* `to_map(mapstring string, line string, delimiter_ string) map[string]string`: Intelligent mapping of a line to a map based on a template.
|
||||
```v
|
||||
r := texttools.to_map("name,-,-,-,-,pid,-,-,-,-,path",
|
||||
"root 304 0.0 0.0 408185328 1360 ?? S 16Dec23 0:34.06 /usr/sbin/distnoted")
|
||||
// Result: {'name': 'root', 'pid': '1360', 'path': '/usr/sbin/distnoted'}
|
||||
```
|
||||
@@ -3,13 +3,10 @@
|
||||
this is how we want example scripts to be, see the first line
|
||||
|
||||
```vlang
|
||||
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import freeflowuniverse.herolib.installers.sysadmintools.daguserver
|
||||
import freeflowuniverse.herolib...
|
||||
|
||||
mut ds := daguserver.get()!
|
||||
|
||||
println(ds)
|
||||
```
|
||||
|
||||
the files are in ~/code/github/freeflowuniverse/herolib/examples for herolib
|
||||
@@ -17,3 +14,5 @@ the files are in ~/code/github/freeflowuniverse/herolib/examples for herolib
|
||||
## important instructions
|
||||
|
||||
- never use fn main() in a .vsh script
|
||||
- always use the top line as in example above
|
||||
- these scripts can be executed as is but can also use vrun $pathOfFile
|
||||
@@ -11,45 +11,22 @@
|
||||
when I generate vlang scripts I will always use .vsh extension and use following as first line:
|
||||
|
||||
```
|
||||
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
```
|
||||
|
||||
- a .vsh is a v shell script and can be executed as is, no need to use v ...
|
||||
- in .vsh file there is no need for a main() function
|
||||
- these scripts can be used for examples or instruction scripts e.g. an installs script
|
||||
|
||||
## to do argument parsing use following examples
|
||||
## executing vlang scripts
|
||||
|
||||
```v
|
||||
#!/usr/bin/env -S v -n -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile hero binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
As AI agent I should also execute v or .vsh scripts with vrun
|
||||
|
||||
```bash
|
||||
vrun ~/code/github/freeflowuniverse/herolib/examples/biztools/bizmodel.vsh
|
||||
```
|
||||
|
||||
|
||||
## when creating a test script
|
||||
## executing test scripts
|
||||
|
||||
instruct user to test as follows (vtest is an alias which gets installed when herolib gets installed), can be done for a dir and for a file
|
||||
|
||||
@@ -58,3 +35,4 @@ vtest ~/code/github/freeflowuniverse/herolib/lib/osal/package_test.v
|
||||
```
|
||||
|
||||
- use ~ so it works over all machines
|
||||
- don't use 'v test', we have vtest as alternative
|
||||
@@ -1 +0,0 @@
|
||||
../lib/core/httpconnection/readme.md
|
||||
@@ -1 +0,0 @@
|
||||
../lib/osal/readme.md
|
||||
@@ -1 +0,0 @@
|
||||
../lib/data/ourdb/README.md
|
||||
@@ -1 +0,0 @@
|
||||
../lib/data/ourtime/readme.md
|
||||
@@ -1 +0,0 @@
|
||||
../lib/data/paramsparser/readme.md
|
||||
@@ -1,309 +0,0 @@
|
||||
# how to work with heroscript in vlang
|
||||
|
||||
## heroscript
|
||||
|
||||
Heroscript is our small scripting language which has following structure
|
||||
|
||||
an example of a heroscript is
|
||||
|
||||
```heroscript
|
||||
|
||||
!!dagu.script_define
|
||||
name: 'test_dag'
|
||||
homedir:''
|
||||
title:'a title'
|
||||
reset:1
|
||||
start:true //trie or 1 is same
|
||||
colors: 'green,red,purple' //lists are comma separated
|
||||
description: '
|
||||
a description can be multiline
|
||||
|
||||
like this
|
||||
'
|
||||
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'hello_world'
|
||||
command: 'echo hello world'
|
||||
|
||||
!!dagu.add_step
|
||||
dag: 'test_dag'
|
||||
name: 'last_step'
|
||||
command: 'echo last step'
|
||||
|
||||
|
||||
```
|
||||
|
||||
Notice how:
|
||||
- every action starts with !!
|
||||
- the first part is the actor e.g. dagu in this case
|
||||
- the 2e part is the action name
|
||||
- multilines are supported see the description field
|
||||
|
||||
## how to process heroscript in Vlang
|
||||
|
||||
- heroscript can be converted to a struct,
|
||||
- the methods available to get the params are in 'params' section further in this doc
|
||||
|
||||
|
||||
```vlang
|
||||
|
||||
fn test_play_dagu() ! {
|
||||
mut plbook := playbook.new(text: thetext_from_above)!
|
||||
play_dagu(mut plbook)! //see below in vlang block there it all happens
|
||||
}
|
||||
|
||||
|
||||
pub fn play_dagu(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//find all actions are !!$actor.$actionname. in this case above the actor is !!dagu, we check with the fitler if it exists, if not we return
|
||||
dagu_actions := plbook.find(filter: 'dagu.')!
|
||||
if dagu_actions.len == 0 {
|
||||
return
|
||||
}
|
||||
play_dagu_basic(mut plbook)!
|
||||
}
|
||||
|
||||
pub struct DaguScript {
|
||||
pub mut:
|
||||
name string
|
||||
homedir string
|
||||
title string
|
||||
reset bool
|
||||
start bool
|
||||
colors []string
|
||||
}
|
||||
|
||||
// play_dagu plays the dagu play commands
|
||||
pub fn play_dagu_basic(mut plbook playbook.PlayBook) ! {
|
||||
|
||||
//now find the specific ones for dagu.script_define
|
||||
mut actions := plbook.find(filter: 'dagu.script_define')!
|
||||
|
||||
if actions.len > 0 {
|
||||
for myaction in actions {
|
||||
mut p := myaction.params //get the params object from the action object, this can then be processed using the param getters
|
||||
mut obj := DaguScript{
|
||||
//INFO: all details about the get methods can be found in 'params get methods' section
|
||||
name : p.get('name')! //will give error if not exist
|
||||
homedir : p.get('homedir')!
|
||||
title : p.get_default('title', 'My Hero DAG')! //uses a default if not set
|
||||
reset : p.get_default_false('reset')
|
||||
start : p.get_default_true('start')
|
||||
colors : p.get_list('colors')
|
||||
description : p.get_default('description','')!
|
||||
}
|
||||
...
|
||||
}
|
||||
}
|
||||
|
||||
//there can be more actions which will have other filter
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## params get methods (param getters)
|
||||
|
||||
```vlang
|
||||
|
||||
fn (params &Params) exists(key_ string) bool
|
||||
|
||||
//check if arg exist (arg is just a value in the string e.g. red, not value:something)
|
||||
fn (params &Params) exists_arg(key_ string) bool
|
||||
|
||||
//see if the kwarg with the key exists if yes return as string trimmed
|
||||
fn (params &Params) get(key_ string) !string
|
||||
|
||||
//return the arg with nr, 0 is the first
|
||||
fn (params &Params) get_arg(nr int) !string
|
||||
|
||||
//return arg, if the nr is larger than amount of args, will return the defval
|
||||
fn (params &Params) get_arg_default(nr int, defval string) !string
|
||||
|
||||
fn (params &Params) get_default(key string, defval string) !string
|
||||
|
||||
fn (params &Params) get_default_false(key string) bool
|
||||
|
||||
fn (params &Params) get_default_true(key string) bool
|
||||
|
||||
fn (params &Params) get_float(key string) !f64
|
||||
|
||||
fn (params &Params) get_float_default(key string, defval f64) !f64
|
||||
|
||||
fn (params &Params) get_from_hashmap(key_ string, defval string, hashmap map[string]string) !string
|
||||
|
||||
fn (params &Params) get_int(key string) !int
|
||||
|
||||
fn (params &Params) get_int_default(key string, defval int) !int
|
||||
|
||||
//Looks for a list of strings in the parameters. ',' are used as deliminator to list
|
||||
fn (params &Params) get_list(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_f32(key string) ![]f32
|
||||
|
||||
fn (params &Params) get_list_f32_default(key string, def []f32) []f32
|
||||
|
||||
fn (params &Params) get_list_f64(key string) ![]f64
|
||||
|
||||
fn (params &Params) get_list_f64_default(key string, def []f64) []f64
|
||||
|
||||
fn (params &Params) get_list_i16(key string) ![]i16
|
||||
|
||||
fn (params &Params) get_list_i16_default(key string, def []i16) []i16
|
||||
|
||||
fn (params &Params) get_list_i64(key string) ![]i64
|
||||
|
||||
fn (params &Params) get_list_i64_default(key string, def []i64) []i64
|
||||
|
||||
fn (params &Params) get_list_i8(key string) ![]i8
|
||||
|
||||
fn (params &Params) get_list_i8_default(key string, def []i8) []i8
|
||||
|
||||
fn (params &Params) get_list_int(key string) ![]int
|
||||
|
||||
fn (params &Params) get_list_int_default(key string, def []int) []int
|
||||
|
||||
fn (params &Params) get_list_namefix(key string) ![]string
|
||||
|
||||
fn (params &Params) get_list_namefix_default(key string, def []string) ![]string
|
||||
|
||||
fn (params &Params) get_list_u16(key string) ![]u16
|
||||
|
||||
fn (params &Params) get_list_u16_default(key string, def []u16) []u16
|
||||
|
||||
fn (params &Params) get_list_u32(key string) ![]u32
|
||||
|
||||
fn (params &Params) get_list_u32_default(key string, def []u32) []u32
|
||||
|
||||
fn (params &Params) get_list_u64(key string) ![]u64
|
||||
|
||||
fn (params &Params) get_list_u64_default(key string, def []u64) []u64
|
||||
|
||||
fn (params &Params) get_list_u8(key string) ![]u8
|
||||
|
||||
fn (params &Params) get_list_u8_default(key string, def []u8) []u8
|
||||
|
||||
fn (params &Params) get_map() map[string]string
|
||||
|
||||
fn (params &Params) get_path(key string) !string
|
||||
|
||||
fn (params &Params) get_path_create(key string) !string
|
||||
|
||||
fn (params &Params) get_percentage(key string) !f64
|
||||
|
||||
fn (params &Params) get_percentage_default(key string, defval string) !f64
|
||||
|
||||
//convert GB, MB, KB to bytes e.g. 10 GB becomes bytes in u64
|
||||
fn (params &Params) get_storagecapacity_in_bytes(key string) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_bytes_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_storagecapacity_in_gigabytes(key string) !u64
|
||||
|
||||
//Get Expiration object from time string input input can be either relative or absolute## Relative time
|
||||
fn (params &Params) get_time(key string) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_default(key string, defval ourtime.OurTime) !ourtime.OurTime
|
||||
|
||||
fn (params &Params) get_time_interval(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp(key string) !Duration
|
||||
|
||||
fn (params &Params) get_timestamp_default(key string, defval Duration) !Duration
|
||||
|
||||
fn (params &Params) get_u32(key string) !u32
|
||||
|
||||
fn (params &Params) get_u32_default(key string, defval u32) !u32
|
||||
|
||||
fn (params &Params) get_u64(key string) !u64
|
||||
|
||||
fn (params &Params) get_u64_default(key string, defval u64) !u64
|
||||
|
||||
fn (params &Params) get_u8(key string) !u8
|
||||
|
||||
fn (params &Params) get_u8_default(key string, defval u8) !u8
|
||||
|
||||
```
|
||||
|
||||
## how internally a heroscript gets parsed for params
|
||||
|
||||
- example to show how a heroscript gets parsed in action with params
|
||||
- params are part of action object
|
||||
|
||||
```heroscript
|
||||
example text to parse (heroscript)
|
||||
|
||||
id:a1 name6:aaaaa
|
||||
name:'need to do something 1'
|
||||
description:
|
||||
'
|
||||
## markdown works in it
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
|
||||
name2: test
|
||||
name3: hi
|
||||
name10:'this is with space' name11:aaa11
|
||||
|
||||
name4: 'aaa'
|
||||
|
||||
//somecomment
|
||||
name5: 'aab'
|
||||
```
|
||||
|
||||
the params are part of the action and are represented as follow for the above:
|
||||
|
||||
```vlang
|
||||
Params{
|
||||
params: [Param{
|
||||
key: 'id'
|
||||
value: 'a1'
|
||||
}, Param{
|
||||
key: 'name6'
|
||||
value: 'aaaaa'
|
||||
}, Param{
|
||||
key: 'name'
|
||||
value: 'need to do something 1'
|
||||
}, Param{
|
||||
key: 'description'
|
||||
value: '## markdown works in it
|
||||
|
||||
description can be multiline
|
||||
lets see what happens
|
||||
|
||||
- a
|
||||
- something else
|
||||
|
||||
### subtitle
|
||||
'
|
||||
}, Param{
|
||||
key: 'name2'
|
||||
value: 'test'
|
||||
}, Param{
|
||||
key: 'name3'
|
||||
value: 'hi'
|
||||
}, Param{
|
||||
key: 'name10'
|
||||
value: 'this is with space'
|
||||
}, Param{
|
||||
key: 'name11'
|
||||
value: 'aaa11'
|
||||
}, Param{
|
||||
key: 'name4'
|
||||
value: 'aaa'
|
||||
}, Param{
|
||||
key: 'name5'
|
||||
value: 'aab'
|
||||
}]
|
||||
}
|
||||
```
|
||||
@@ -2238,7 +2238,7 @@ be faster, since there is no need for a re-compilation of a script, that has not
|
||||
An example `deploy.vsh`:
|
||||
|
||||
```v oksyntax
|
||||
#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
// Note: The shebang line above, associates the .vsh file to V on Unix-like systems,
|
||||
// so it can be run just by specifying the path to the .vsh file, once it's made
|
||||
@@ -2300,11 +2300,11 @@ Whilst V does normally not allow vsh scripts without the designated file extensi
|
||||
to circumvent this rule and have a file with a fully custom name and shebang. Whilst this feature
|
||||
exists it is only recommended for specific usecases like scripts that will be put in the path and
|
||||
should **not** be used for things like build or deploy scripts. To access this feature start the
|
||||
file with `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
file with `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
the built executable. This will run in crun mode so it will only rebuild if changes to the script
|
||||
were made and keep the binary as `tmp.<scriptfilename>`. **Caution**: if this filename already
|
||||
exists the file will be overridden. If you want to rebuild each time and not keep this binary
|
||||
instead use `#!/usr/bin/env -S v -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
instead use `#!/usr/bin/env -S v -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
|
||||
# Appendices
|
||||
|
||||
45
aiprompts/v_advanced/compress.md
Normal file
45
aiprompts/v_advanced/compress.md
Normal file
@@ -0,0 +1,45 @@
|
||||
The `compress` module in V provides low-level functionalities for compressing and decompressing byte arrays.
|
||||
|
||||
**Functions Overview (Low-Level):**
|
||||
|
||||
* **`compress(data []u8, flags int) ![]u8`**: Compresses an array of bytes.
|
||||
* **`decompress(data []u8, flags int) ![]u8`**: Decompresses an array of bytes.
|
||||
* **`decompress_with_callback(data []u8, cb ChunkCallback, userdata voidptr, flags int) !u64`**: Decompresses byte arrays using a callback function for chunks.
|
||||
|
||||
**Type Definition (Low-Level):**
|
||||
|
||||
* **`ChunkCallback`**: A function type `fn (chunk []u8, userdata voidptr) int` used to receive decompressed chunks.
|
||||
|
||||
---
|
||||
|
||||
**`compress.gzip` Module (High-Level Gzip Operations):**
|
||||
|
||||
For high-level gzip compression and decompression, use the `compress.gzip` module. This module provides a more convenient and recommended way to handle gzip operations compared to the low-level `compress` module.
|
||||
|
||||
**Key Features of `compress.gzip`:**
|
||||
|
||||
* **`compress(data []u8, params CompressParams) ![]u8`**: Compresses data using gzip, allowing specification of `CompressParams` like `compression_level` (0-4095).
|
||||
* **`decompress(data []u8, params DecompressParams) ![]u8`**: Decompresses gzip-compressed data, allowing specification of `DecompressParams` for verification.
|
||||
* **`decompress_with_callback(data []u8, cb compr.ChunkCallback, userdata voidptr, params DecompressParams) !int`**: Decompresses gzip data with a callback for chunks, similar to the low-level version but for gzip streams.
|
||||
* **`validate(data []u8, params DecompressParams) !GzipHeader`**: Validates a gzip header and returns its details.
|
||||
|
||||
**Parameter Structures:**
|
||||
|
||||
* **`CompressParams`**: Configures compression, primarily `compression_level` (0-4095).
|
||||
* **`DecompressParams`**: Configures decompression, including `verify_header_checksum`, `verify_length`, and `verify_checksum`.
|
||||
* **`GzipHeader`**: Represents the structure of a gzip header.
|
||||
|
||||
**Inline Code Example (Gzip Compression/Decompression):**
|
||||
|
||||
```v
|
||||
import compress.gzip
|
||||
|
||||
data := 'Hello, Gzip!'
|
||||
compressed := gzip.compress(data.bytes(), compression_level: 4095)!
|
||||
decompressed := gzip.decompress(compressed)!
|
||||
|
||||
// Check if decompressed data matches original
|
||||
// if data.bytes() == decompressed { ... }
|
||||
```
|
||||
|
||||
**Important Note:** Always prefer `compress.gzip` for general gzip compression/decompression tasks over the low-level `compress` module.
|
||||
64
aiprompts/v_advanced/generics.md
Normal file
64
aiprompts/v_advanced/generics.md
Normal file
@@ -0,0 +1,64 @@
|
||||
|
||||
```v
|
||||
|
||||
struct Repo[T] {
|
||||
db DB
|
||||
}
|
||||
|
||||
struct User {
|
||||
id int
|
||||
name string
|
||||
}
|
||||
|
||||
struct Post {
|
||||
id int
|
||||
user_id int
|
||||
title string
|
||||
body string
|
||||
}
|
||||
|
||||
fn new_repo[T](db DB) Repo[T] {
|
||||
return Repo[T]{db: db}
|
||||
}
|
||||
|
||||
// This is a generic function. V will generate it for every type it's used with.
|
||||
fn (r Repo[T]) find_by_id(id int) ?T {
|
||||
table_name := T.name // in this example getting the name of the type gives us the table name
|
||||
return r.db.query_one[T]('select * from ${table_name} where id = ?', id)
|
||||
}
|
||||
|
||||
db := new_db()
|
||||
users_repo := new_repo[User](db) // returns Repo[User]
|
||||
posts_repo := new_repo[Post](db) // returns Repo[Post]
|
||||
user := users_repo.find_by_id(1)? // find_by_id[User]
|
||||
post := posts_repo.find_by_id(1)? // find_by_id[Post]
|
||||
|
||||
```
|
||||
|
||||
Currently generic function definitions must declare their type parameters, but in future V will infer generic type parameters from single-letter type names in runtime parameter types. This is why find_by_id can omit [T], because the receiver argument r uses a generic type T.
|
||||
|
||||
```v
|
||||
fn compare[T](a T, b T) int {
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
if a > b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// compare[int]
|
||||
println(compare(1, 0)) // Outputs: 1
|
||||
println(compare(1, 1)) // 0
|
||||
println(compare(1, 2)) // -1
|
||||
// compare[string]
|
||||
println(compare('1', '0')) // Outputs: 1
|
||||
println(compare('1', '1')) // 0
|
||||
println(compare('1', '2')) // -1
|
||||
// compare[f64]
|
||||
println(compare(1.1, 1.0)) // Outputs: 1
|
||||
println(compare(1.1, 1.1)) // 0
|
||||
println(compare(1.1, 1.2)) // -1
|
||||
```
|
||||
|
||||
187
aiprompts/v_advanced/reflection.md
Normal file
187
aiprompts/v_advanced/reflection.md
Normal file
@@ -0,0 +1,187 @@
|
||||
## Compile time reflection
|
||||
|
||||
$ is used as a prefix for compile time (also referred to as 'comptime') operations.
|
||||
|
||||
Having built-in JSON support is nice, but V also allows you to create efficient serializers for any data format. V has compile time if and for constructs:
|
||||
|
||||
.fields
|
||||
You can iterate over struct fields using .fields, it also works with generic types (e.g. T.fields) and generic arguments (e.g. param.fields where fn gen[T](param T) {).
|
||||
|
||||
struct User {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for field in User.fields {
|
||||
$if field.typ is string {
|
||||
println('${field.name} is of type string')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// name is of type string
|
||||
.values
|
||||
You can read Enum values and their attributes.
|
||||
|
||||
enum Color {
|
||||
red @[RED] // first attribute
|
||||
blue @[BLUE] // second attribute
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for e in Color.values {
|
||||
println(e.name)
|
||||
println(e.attrs)
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// red
|
||||
// ['RED']
|
||||
// blue
|
||||
// ['BLUE']
|
||||
.attributes
|
||||
You can read Struct attributes.
|
||||
|
||||
@[COLOR]
|
||||
struct Foo {
|
||||
a int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for e in Foo.attributes {
|
||||
println(e)
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// StructAttribute{
|
||||
// name: 'COLOR'
|
||||
// has_arg: false
|
||||
// arg: ''
|
||||
// kind: plain
|
||||
// }
|
||||
.variants
|
||||
You can read variant types from Sum type.
|
||||
|
||||
type MySum = int | string
|
||||
|
||||
fn main() {
|
||||
$for v in MySum.variants {
|
||||
$if v.typ is int {
|
||||
println('has int type')
|
||||
} $else $if v.typ is string {
|
||||
println('has string type')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// has int type
|
||||
// has string type
|
||||
.methods
|
||||
You can retrieve information about struct methods.
|
||||
|
||||
struct Foo {
|
||||
}
|
||||
|
||||
fn (f Foo) test() int {
|
||||
return 123
|
||||
}
|
||||
|
||||
fn (f Foo) test2() string {
|
||||
return 'foo'
|
||||
}
|
||||
|
||||
fn main() {
|
||||
foo := Foo{}
|
||||
$for m in Foo.methods {
|
||||
$if m.return_type is int {
|
||||
print('${m.name} returns int: ')
|
||||
println(foo.$method())
|
||||
} $else $if m.return_type is string {
|
||||
print('${m.name} returns string: ')
|
||||
println(foo.$method())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// test returns int: 123
|
||||
// test2 returns string: foo
|
||||
.params
|
||||
You can retrieve information about struct method params.
|
||||
|
||||
struct Test {
|
||||
}
|
||||
|
||||
fn (t Test) foo(arg1 int, arg2 string) {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
$for m in Test.methods {
|
||||
$for param in m.params {
|
||||
println('${typeof(param.typ).name}: ${param.name}')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output:
|
||||
// int: arg1
|
||||
// string: arg2
|
||||
|
||||
## Example
|
||||
|
||||
```v
|
||||
// An example deserializer implementation
|
||||
|
||||
struct User {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
data := 'name=Alice\nage=18'
|
||||
user := decode[User](data)
|
||||
println(user)
|
||||
}
|
||||
|
||||
fn decode[T](data string) T {
|
||||
mut result := T{}
|
||||
// compile-time `for` loop
|
||||
// T.fields gives an array of a field metadata type
|
||||
$for field in T.fields {
|
||||
$if field.typ is string {
|
||||
// $(string_expr) produces an identifier
|
||||
result.$(field.name) = get_string(data, field.name)
|
||||
} $else $if field.typ is int {
|
||||
result.$(field.name) = get_int(data, field.name)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fn get_string(data string, field_name string) string {
|
||||
for line in data.split_into_lines() {
|
||||
key_val := line.split('=')
|
||||
if key_val[0] == field_name {
|
||||
return key_val[1]
|
||||
}
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
fn get_int(data string, field string) int {
|
||||
return get_string(data, field).int()
|
||||
}
|
||||
|
||||
// `decode<User>` generates:
|
||||
// fn decode_User(data string) User {
|
||||
// mut result := User{}
|
||||
// result.name = get_string(data, 'name')
|
||||
// result.age = get_int(data, 'age')
|
||||
// return result
|
||||
// }
|
||||
```
|
||||
@@ -1,907 +0,0 @@
|
||||
# veb - the V Web Server
|
||||
|
||||
A simple yet powerful web server with built-in routing, parameter handling, templating, and other
|
||||
features.
|
||||
## Quick Start
|
||||
|
||||
Run your veb app with a live reload via `v -d veb_livereload watch run .`
|
||||
|
||||
Now modifying any file in your web app (whether it's a .v file with the backend logic
|
||||
or a compiled .html template file) will
|
||||
result in an instant refresh of your app
|
||||
in the browser. No need to quit the app, rebuild it, and refresh the page in the browser!
|
||||
|
||||
## Deploying veb apps
|
||||
|
||||
All the code, including HTML templates, is in one binary file. That's all you need to deploy.
|
||||
Use the `-prod` flag when building for production.
|
||||
|
||||
## Getting Started
|
||||
|
||||
To start, you must import the module `veb` and define a structure which will
|
||||
represent your app and a structure which will represent the context of a request.
|
||||
These structures must be declared with the `pub` keyword.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v
|
||||
module main
|
||||
|
||||
import veb
|
||||
|
||||
pub struct User {
|
||||
pub mut:
|
||||
name string
|
||||
id int
|
||||
}
|
||||
|
||||
// Our context struct must embed `veb.Context`!
|
||||
pub struct Context {
|
||||
veb.Context
|
||||
pub mut:
|
||||
// In the context struct we store data that could be different
|
||||
// for each request. Like a User struct or a session id
|
||||
user User
|
||||
session_id string
|
||||
}
|
||||
|
||||
pub struct App {
|
||||
pub:
|
||||
// In the app struct we store data that should be accessible by all endpoints.
|
||||
// For example, a database or configuration values.
|
||||
secret_key string
|
||||
}
|
||||
|
||||
// This is how endpoints are defined in veb. This is the index route
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('Hello V! The secret key is "${app.secret_key}"')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut app := &App{
|
||||
secret_key: 'secret'
|
||||
}
|
||||
// Pass the App and context type and start the web server on port 8080
|
||||
veb.run[App, Context](mut app, 8080)
|
||||
}
|
||||
```
|
||||
|
||||
You can use the `App` struct for data you want to keep during the lifetime of your program,
|
||||
or for data that you want to share between different routes.
|
||||
|
||||
A new `Context` struct is created every time a request is received,
|
||||
so it can contain different data for each request.
|
||||
|
||||
## Defining endpoints
|
||||
|
||||
To add endpoints to your web server, you must extend the `App` struct.
|
||||
For routing you can either use auto-mapping of function names or specify the path as an attribute.
|
||||
The function expects a parameter of your Context type and a response of the type `veb.Result`.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// This endpoint can be accessed via http://server:port/hello
|
||||
pub fn (app &App) hello(mut ctx Context) veb.Result {
|
||||
return ctx.text('Hello')
|
||||
}
|
||||
|
||||
// This endpoint can be accessed via http://server:port/foo
|
||||
@['/foo']
|
||||
pub fn (app &App) world(mut ctx Context) veb.Result {
|
||||
return ctx.text('World')
|
||||
}
|
||||
```
|
||||
|
||||
### HTTP verbs
|
||||
|
||||
To use any HTTP verbs (or methods, as they are properly called),
|
||||
such as `@[post]`, `@[get]`, `@[put]`, `@[patch]` or `@[delete]`
|
||||
you can simply add the attribute before the function definition.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// only GET requests to http://server:port/world are handled by this method
|
||||
@[get]
|
||||
pub fn (app &App) world(mut ctx Context) veb.Result {
|
||||
return ctx.text('World')
|
||||
}
|
||||
|
||||
// only POST requests to http://server:port/product/create are handled by this method
|
||||
@['/product/create'; post]
|
||||
pub fn (app &App) create_product(mut ctx Context) veb.Result {
|
||||
return ctx.text('product')
|
||||
}
|
||||
```
|
||||
|
||||
By default, endpoints are marked as GET requests only. It is also possible to
|
||||
add multiple HTTP verbs per endpoint.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// only GET and POST requests to http://server:port/login are handled by this method
|
||||
@['/login'; get; post]
|
||||
pub fn (app &App) login(mut ctx Context) veb.Result {
|
||||
if ctx.req.method == .get {
|
||||
// show the login page on a GET request
|
||||
return ctx.html('<h1>Login page</h1><p>todo: make form</p>')
|
||||
} else {
|
||||
// request method is POST
|
||||
password := ctx.form['password']
|
||||
// validate password length
|
||||
if password.len < 12 {
|
||||
return ctx.text('password is too weak!')
|
||||
} else {
|
||||
// we receive a POST request, so we want to explicitly tell the browser
|
||||
// to send a GET request to the profile page.
|
||||
return ctx.redirect('/profile')
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Routes with Parameters
|
||||
|
||||
Parameters are passed directly to an endpoint route using the colon sign `:`. The route
|
||||
parameters are passed as arguments. V will cast the parameter to any of V's primitive types
|
||||
(`string`, `int` etc,).
|
||||
|
||||
To pass a parameter to an endpoint, you simply define it inside an attribute, e. g.
|
||||
`@['/hello/:user]`.
|
||||
After it is defined in the attribute, you have to add it as a function parameter.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// V will pass the parameter 'user' as a string
|
||||
vvvv
|
||||
@['/hello/:user'] vvvv
|
||||
pub fn (app &App) hello_user(mut ctx Context, user string) veb.Result {
|
||||
return ctx.text('Hello ${user}')
|
||||
}
|
||||
|
||||
// V will pass the parameter 'id' as an int
|
||||
vv
|
||||
@['/document/:id'] vv
|
||||
pub fn (app &App) get_document(mut ctx Context, id int) veb.Result {
|
||||
return ctx.text('Hello ${user}')
|
||||
}
|
||||
```
|
||||
|
||||
If we visit http://localhost:port/hello/vaesel we would see the text `Hello vaesel`.
|
||||
|
||||
### Routes with Parameter Arrays
|
||||
|
||||
If you want multiple parameters in your route and if you want to parse the parameters
|
||||
yourself, or you want a wildcard route, you can add `...` after the `:` and name,
|
||||
e.g. `@['/:path...']`.
|
||||
|
||||
This will match all routes after `'/'`. For example, the url `/path/to/test` would give
|
||||
`path = '/path/to/test'`.
|
||||
|
||||
```v ignore
|
||||
vvv
|
||||
@['/:path...'] vvvv
|
||||
pub fn (app &App) wildcard(mut ctx Context, path string) veb.Result {
|
||||
return ctx.text('URL path = "${path}"')
|
||||
}
|
||||
```
|
||||
|
||||
### Query, Form and Files
|
||||
|
||||
You have direct access to query values by accessing the `query` field on your context struct.
|
||||
You are also able to access any formdata or files that were sent
|
||||
with the request with the fields `.form` and `.files` respectively.
|
||||
|
||||
In the following example, visiting http://localhost:port/user?name=veb we
|
||||
will see the text `Hello veb!`. And if we access the route without the `name` parameter,
|
||||
http://localhost:port/user, we will see the text `no user was found`,
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
@['/user'; get]
|
||||
pub fn (app &App) get_user_by_id(mut ctx Context) veb.Result {
|
||||
user_name := ctx.query['name'] or {
|
||||
// we can exit early and send a different response if no `name` parameter was passed
|
||||
return ctx.text('no user was found')
|
||||
}
|
||||
|
||||
return ctx.text('Hello ${user_name}!')
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
To restrict an endpoint to a specific host, you can use the `host` attribute
|
||||
followed by a colon `:` and the host name. You can test the Host feature locally
|
||||
by adding a host to the "hosts" file of your device.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
@['/'; host: 'example.com']
|
||||
pub fn (app &App) hello_web(mut ctx Context) veb.Result {
|
||||
return app.text('Hello World')
|
||||
}
|
||||
|
||||
@['/'; host: 'api.example.org']
|
||||
pub fn (app &App) hello_api(mut ctx Context) veb.Result {
|
||||
return ctx.text('Hello API')
|
||||
}
|
||||
|
||||
// define the handler without a host attribute last if you have conflicting paths.
|
||||
@['/']
|
||||
pub fn (app &App) hello_others(mut ctx Context) veb.Result {
|
||||
return ctx.text('Hello Others')
|
||||
}
|
||||
```
|
||||
|
||||
You can also [create a controller](#controller-with-hostname) to handle all requests from a specific
|
||||
host in one app struct.
|
||||
|
||||
### Route Matching Order
|
||||
|
||||
veb will match routes in the order that you define endpoints.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
@['/:path']
|
||||
pub fn (app &App) with_parameter(mut ctx Context, path string) veb.Result {
|
||||
return ctx.text('from with_parameter, path: "${path}"')
|
||||
}
|
||||
|
||||
@['/normal']
|
||||
pub fn (app &App) normal(mut ctx Context) veb.Result {
|
||||
return ctx.text('from normal')
|
||||
}
|
||||
```
|
||||
|
||||
In this example we defined an endpoint with a parameter first. If we access our app
|
||||
on the url http://localhost:port/normal we will not see `from normal`, but
|
||||
`from with_parameter, path: "normal"`.
|
||||
|
||||
### Custom not found page
|
||||
|
||||
You can implement a `not_found` endpoint that is called when a request is made, and no
|
||||
matching route is found to replace the default HTTP 404 not found page. This route
|
||||
has to be defined on our Context struct.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (mut ctx Context) not_found() veb.Result {
|
||||
// set HTTP status 404
|
||||
ctx.res.set_status(.not_found)
|
||||
return ctx.html('<h1>Page not found!</h1>')
|
||||
}
|
||||
```
|
||||
|
||||
## Static files and website
|
||||
|
||||
veb also provides a way of handling static files. We can mount a folder at the root
|
||||
of our web app, or at a custom route. To start using static files we have to embed
|
||||
`veb.StaticHandler` on our app struct.
|
||||
|
||||
**Example:**
|
||||
|
||||
Let's say you have the following file structure:
|
||||
|
||||
```
|
||||
.
|
||||
├── static/
|
||||
│ ├── css/
|
||||
│ │ └── main.css
|
||||
│ └── js/
|
||||
│ └── main.js
|
||||
└── main.v
|
||||
```
|
||||
|
||||
If we want all the documents inside the `static` sub-directory to be publicly accessible, we can
|
||||
use `handle_static`.
|
||||
|
||||
> **Note:**
|
||||
> veb will recursively search the folder you mount; all the files inside that folder
|
||||
> will be publicly available.
|
||||
|
||||
_main.v_
|
||||
|
||||
```v
|
||||
module main
|
||||
|
||||
import veb
|
||||
|
||||
pub struct Context {
|
||||
veb.Context
|
||||
}
|
||||
|
||||
pub struct App {
|
||||
veb.StaticHandler
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut app := &App{}
|
||||
|
||||
app.handle_static('static', false)!
|
||||
|
||||
veb.run[App, Context](mut app, 8080)
|
||||
}
|
||||
```
|
||||
|
||||
If we start the app with `v run main.v` we can access our `main.css` file at
|
||||
http://localhost:8080/static/css/main.css
|
||||
|
||||
### Mounting folders at specific locations
|
||||
|
||||
In the previous example the folder `static` was mounted at `/static`. We could also choose
|
||||
to mount the static folder at the root of our app: everything inside the `static` folder
|
||||
is available at `/`.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// change the second argument to `true` to mount a folder at the app root
|
||||
app.handle_static('static', true)!
|
||||
```
|
||||
|
||||
We can now access `main.css` directly at http://localhost:8080/css/main.css.
|
||||
|
||||
If a request is made to the root of a static folder, veb will look for an
|
||||
`index.html` or `ìndex.htm` file and serve it if available.
|
||||
Thus, it's also a good way to host a complete website.
|
||||
An example is available [here](/examples/veb/static_website).
|
||||
|
||||
It is also possible to mount the `static` folder at a custom path.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// mount the folder 'static' at path '/public', the path has to start with '/'
|
||||
app.mount_static_folder_at('static', '/public')
|
||||
```
|
||||
|
||||
If we run our app the `main.css` file is available at http://localhost:8080/public/main.css
|
||||
|
||||
### Adding a single static asset
|
||||
|
||||
If you don't want to mount an entire folder, but only a single file, you can use `serve_static`.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// serve the `main.css` file at '/path/main.css'
|
||||
app.serve_static('/path/main.css', 'static/css/main.css')!
|
||||
```
|
||||
|
||||
### Dealing with MIME types
|
||||
|
||||
By default, veb will map the extension of a file to a MIME type. If any of your static file's
|
||||
extensions do not have a default MIME type in veb, veb will throw an error and you
|
||||
have to add your MIME type to `.static_mime_types` yourself.
|
||||
|
||||
**Example:**
|
||||
|
||||
Given the following file structure:
|
||||
|
||||
```
|
||||
.
|
||||
├── static/
|
||||
│ └── file.what
|
||||
└── main.v
|
||||
```
|
||||
|
||||
```v ignore
|
||||
app.handle_static('static', true)!
|
||||
```
|
||||
|
||||
This code will throw an error, because veb has no default MIME type for a `.what` file extension.
|
||||
|
||||
```
|
||||
unknown MIME type for file extension ".what"
|
||||
```
|
||||
|
||||
To fix this we have to provide a MIME type for the `.what` file extension:
|
||||
|
||||
```v ignore
|
||||
app.static_mime_types['.what'] = 'txt/plain'
|
||||
app.handle_static('static', true)!
|
||||
```
|
||||
|
||||
## Middleware
|
||||
|
||||
Middleware in web development is (loosely defined) a hidden layer that sits between
|
||||
what a user requests (the HTTP Request) and what a user sees (the HTTP Response).
|
||||
We can use this middleware layer to provide "hidden" functionality to our apps endpoints.
|
||||
|
||||
To use veb's middleware we have to embed `veb.Middleware` on our app struct and provide
|
||||
the type of which context struct should be used.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub struct App {
|
||||
veb.Middleware[Context]
|
||||
}
|
||||
```
|
||||
|
||||
### Use case
|
||||
|
||||
We could, for example, get the cookies for an HTTP request and check if the user has already
|
||||
accepted our cookie policy. Let's modify our Context struct to store whether the user has
|
||||
accepted our policy or not.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub struct Context {
|
||||
veb.Context
|
||||
pub mut:
|
||||
has_accepted_cookies bool
|
||||
}
|
||||
```
|
||||
|
||||
In veb middleware functions take a `mut` parameter with the type of your context struct
|
||||
and must return `bool`. We have full access to modify our Context struct!
|
||||
|
||||
The return value indicates to veb whether it can continue or has to stop. If we send a
|
||||
response to the client in a middleware function veb has to stop, so we return `false`.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn check_cookie_policy(mut ctx Context) bool {
|
||||
// get the cookie
|
||||
cookie_value := ctx.get_cookie('accepted_cookies') or { '' }
|
||||
// check if the cookie has been set
|
||||
if cookie_value == 'true' {
|
||||
ctx.has_accepted_cookies = true
|
||||
}
|
||||
// we don't send a response, so we must return true
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
We can check this value in an endpoint and return a different response.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
@['/only-cookies']
|
||||
pub fn (app &App) only_cookie_route(mut ctx Context) veb.Result {
|
||||
if ctx.has_accepted_cookies {
|
||||
return ctx.text('Welcome!')
|
||||
} else {
|
||||
return ctx.text('You must accept the cookie policy!')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
There is one thing left for our middleware to work: we have to register our `only_cookie_route`
|
||||
function as middleware for our app. We must do this after the app is created and before the
|
||||
app is started.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
fn main() {
|
||||
mut app := &App{}
|
||||
|
||||
// register middleware for all routes
|
||||
app.use(handler: check_cookie_policy)
|
||||
|
||||
// Pass the App and context type and start the web server on port 8080
|
||||
veb.run[App, Context](mut app, 8080)
|
||||
}
|
||||
```
|
||||
|
||||
### Types of middleware
|
||||
|
||||
In the previous example we used so called "global" middleware. This type of middleware
|
||||
applies to every endpoint defined on our app struct; global. It is also possible
|
||||
to register middleware for only a certain route(s).
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
// register middleware only for the route '/auth'
|
||||
app.route_use('/auth', handler: auth_middleware)
|
||||
// register middleware only for the route '/documents/' with a parameter
|
||||
// e.g. '/documents/5'
|
||||
app.route_use('/documents/:id')
|
||||
// register middleware with a parameter array. The middleware will be registered
|
||||
// for all routes that start with '/user/' e.g. '/user/profile/update'
|
||||
app.route_use('/user/:path...')
|
||||
```
|
||||
|
||||
### Evaluation moment
|
||||
|
||||
By default, the registered middleware functions are executed *before* a method on your
|
||||
app struct is called. You can also change this behaviour to execute middleware functions
|
||||
*after* a method on your app struct is called, but before the response is sent!
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn modify_headers(mut ctx Context) bool {
|
||||
// add Content-Language: 'en-US' header to each response
|
||||
ctx.res.header.add(.content_language, 'en-US')
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
```v ignore
|
||||
app.use(handler: modify_headers, after: true)
|
||||
```
|
||||
|
||||
#### When to use which type
|
||||
|
||||
You could use "before" middleware to check and modify the HTTP request and you could use
|
||||
"after" middleware to validate the HTTP response that will be sent or do some cleanup.
|
||||
|
||||
Anything you can do in "before" middleware, you can do in "after" middleware.
|
||||
|
||||
### Evaluation order
|
||||
|
||||
veb will handle requests in the following order:
|
||||
|
||||
1. Execute global "before" middleware
|
||||
2. Execute "before" middleware that matches the requested route
|
||||
3. Execute the endpoint handler on your app struct
|
||||
4. Execute global "after" middleware
|
||||
5. Execute "after" middleware that matches the requested route
|
||||
|
||||
In each step, except for step `3`, veb will evaluate the middleware in the order that
|
||||
they are registered; when you call `app.use` or `app.route_use`.
|
||||
|
||||
### Early exit
|
||||
|
||||
If any middleware sends a response (and thus must return `false`) veb will not execute any
|
||||
other middleware, or the endpoint method, and immediately send the response.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn early_exit(mut ctx Context) bool {
|
||||
ctx.text('early exit')
|
||||
// we send a response from middleware, so we have to return false
|
||||
return false
|
||||
}
|
||||
|
||||
pub fn logger(mut ctx Context) bool {
|
||||
println('received request for "${ctx.req.url}"')
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
```v ignore
|
||||
app.use(handler: early_exit)
|
||||
app.use(handler: logger)
|
||||
```
|
||||
|
||||
Because we register `early_exit` before `logger` our logging middleware will never be executed!
|
||||
|
||||
## Controllers
|
||||
|
||||
Controllers can be used to split up your app logic so you are able to have one struct
|
||||
per "route group". E.g. a struct `Admin` for urls starting with `'/admin'` and a struct `Foo`
|
||||
for urls starting with `'/foo'`.
|
||||
|
||||
To use controllers we have to embed `veb.Controller` on
|
||||
our app struct and when we register a controller we also have to specify
|
||||
what the type of the context struct will be. That means that it is possible
|
||||
to have a different context struct for each controller and the main app struct.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v
|
||||
module main
|
||||
|
||||
import veb
|
||||
|
||||
pub struct Context {
|
||||
veb.Context
|
||||
}
|
||||
|
||||
pub struct App {
|
||||
veb.Controller
|
||||
}
|
||||
|
||||
// this endpoint will be available at '/'
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('from app')
|
||||
}
|
||||
|
||||
pub struct Admin {}
|
||||
|
||||
// this endpoint will be available at '/admin/'
|
||||
pub fn (app &Admin) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('from admin')
|
||||
}
|
||||
|
||||
pub struct Foo {}
|
||||
|
||||
// this endpoint will be available at '/foo/'
|
||||
pub fn (app &Foo) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('from foo')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut app := &App{}
|
||||
|
||||
// register the controllers the same way as how we start a veb app
|
||||
mut admin_app := &Admin{}
|
||||
app.register_controller[Admin, Context]('/admin', mut admin_app)!
|
||||
|
||||
mut foo_app := &Foo{}
|
||||
app.register_controller[Foo, Context]('/foo', mut foo_app)!
|
||||
|
||||
veb.run[App, Context](mut app, 8080)
|
||||
}
|
||||
```
|
||||
|
||||
You can do everything with a controller struct as with a regular `App` struct.
|
||||
Register middleware, add static files and you can even register other controllers!
|
||||
|
||||
### Routing
|
||||
|
||||
Any route inside a controller struct is treated as a relative route to its controller namespace.
|
||||
|
||||
```v ignore
|
||||
@['/path']
|
||||
pub fn (app &Admin) path(mut ctx Context) veb.Result {
|
||||
return ctx.text('Admin')
|
||||
}
|
||||
```
|
||||
|
||||
When we registered the controller with
|
||||
`app.register_controller[Admin, Context]('/admin', mut admin_app)!`
|
||||
we told veb that the namespace of that controller is `'/admin'` so in this example we would
|
||||
see the text "Admin" if we navigate to the url `'/admin/path'`.
|
||||
|
||||
veb doesn't support duplicate routes, so if we add the following
|
||||
route to the example the code will produce an error.
|
||||
|
||||
```v ignore
|
||||
@['/admin/path']
|
||||
pub fn (app &App) admin_path(mut ctx Context) veb.Result {
|
||||
return ctx.text('Admin overwrite')
|
||||
}
|
||||
```
|
||||
|
||||
There will be an error, because the controller `Admin` handles all routes starting with
|
||||
`'/admin'`: the endpoint `admin_path` is unreachable.
|
||||
|
||||
### Controller with hostname
|
||||
|
||||
You can also set a host for a controller. All requests coming to that host will be handled
|
||||
by the controller.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
struct Example {}
|
||||
|
||||
// You can only access this route at example.com: http://example.com/
|
||||
pub fn (app &Example) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('Example')
|
||||
}
|
||||
```
|
||||
|
||||
```v ignore
|
||||
mut example_app := &Example{}
|
||||
// set the controllers hostname to 'example.com' and handle all routes starting with '/',
|
||||
// we handle requests with any route to 'example.com'
|
||||
app.register_controller[Example, Context]('example.com', '/', mut example_app)!
|
||||
```
|
||||
|
||||
## Context Methods
|
||||
|
||||
veb has a number of utility methods that make it easier to handle requests and send responses.
|
||||
These methods are available on `veb.Context` and directly on your own context struct if you
|
||||
embed `veb.Context`. Below are some of the most used methods, look at the
|
||||
[standard library documentation](https://modules.vlang.io/) to see them all.
|
||||
|
||||
### Request methods
|
||||
|
||||
You can directly access the HTTP request on the `.req` field.
|
||||
|
||||
#### Get request headers
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
content_length := ctx.get_header(.content_length) or { '0' }
|
||||
// get custom header
|
||||
custom_header := ctx.get_custom_header('X-HEADER') or { '' }
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
#### Get a cookie
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
cookie_val := ctx.get_cookie('token') or { '' }
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Response methods
|
||||
|
||||
You can directly modify the HTTP response by changing the `res` field,
|
||||
which is of the type `http.Response`.
|
||||
|
||||
#### Send response with different MIME types
|
||||
|
||||
```v ignore
|
||||
// send response HTTP_OK with content-type `text/html`
|
||||
ctx.html('<h1>Hello world!</h1>')
|
||||
// send response HTTP_OK with content-type `text/plain`
|
||||
ctx.text('Hello world!')
|
||||
// stringify the object and send response HTTP_OK with content-type `application/json`
|
||||
ctx.json(User{
|
||||
name: 'test'
|
||||
age: 20
|
||||
})
|
||||
```
|
||||
|
||||
#### Sending files
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) file_response(mut ctx Context) veb.Result {
|
||||
// send the file 'image.png' in folder 'data' to the user
|
||||
return ctx.file('data/image.png')
|
||||
}
|
||||
```
|
||||
|
||||
#### Set response headers
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
ctx.set_header(.accept, 'text/html')
|
||||
// set custom header
|
||||
ctx.set_custom_header('X-HEADER', 'my-value')!
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
#### Set a cookie
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
ctx.set_cookie(http.Cookie{
|
||||
name: 'token'
|
||||
value: 'true'
|
||||
path: '/'
|
||||
secure: true
|
||||
http_only: true
|
||||
})
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
#### Redirect
|
||||
|
||||
You must pass the type of redirect to veb:
|
||||
|
||||
- `moved_permanently` HTTP code 301
|
||||
- `found` HTTP code 302
|
||||
- `see_other` HTTP code 303
|
||||
- `temporary_redirect` HTTP code 307
|
||||
- `permanent_redirect` HTTP code 308
|
||||
|
||||
**Common use cases:**
|
||||
|
||||
If you want to change the request method, for example when you receive a post request and
|
||||
want to redirect to another page via a GET request, you should use `see_other`. If you want
|
||||
the HTTP method to stay the same, you should use `found` generally speaking.
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
token := ctx.get_cookie('token') or { '' }
|
||||
if token == '' {
|
||||
// redirect the user to '/login' if the 'token' cookie is not set
|
||||
// we explicitly tell the browser to send a GET request
|
||||
return ctx.redirect('/login', typ: .see_other)
|
||||
} else {
|
||||
return ctx.text('Welcome!')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Sending error responses
|
||||
|
||||
**Example:**
|
||||
|
||||
```v ignore
|
||||
pub fn (app &App) login(mut ctx Context) veb.Result {
|
||||
if username := ctx.form['username'] {
|
||||
return ctx.text('Hello "${username}"')
|
||||
} else {
|
||||
// send an HTTP 400 Bad Request response with a message
|
||||
return ctx.request_error('missing form value "username"')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You can also use `ctx.server_error(msg string)` to send an HTTP 500 internal server
|
||||
error with a message.
|
||||
|
||||
## Advanced usage
|
||||
|
||||
If you need more control over the TCP connection with a client, for example when
|
||||
you want to keep the connection open. You can call `ctx.takeover_conn`.
|
||||
|
||||
When this function is called you are free to do anything you want with the TCP
|
||||
connection and veb will not interfere. This means that we are responsible for
|
||||
sending a response over the connection and closing it.
|
||||
|
||||
### Empty Result
|
||||
|
||||
Sometimes you want to send the response in another thread, for example when using
|
||||
[Server Sent Events](sse/README.md). When you are sure that a response will be sent
|
||||
over the TCP connection you can return `veb.no_result()`. This function does nothing
|
||||
and returns an empty `veb.Result` struct, letting veb know that we sent a response ourselves.
|
||||
|
||||
> **Note:**
|
||||
> It is important to call `ctx.takeover_conn` before you spawn a thread
|
||||
|
||||
**Example:**
|
||||
|
||||
```v
|
||||
module main
|
||||
|
||||
import net
|
||||
import time
|
||||
import veb
|
||||
|
||||
pub struct Context {
|
||||
veb.Context
|
||||
}
|
||||
|
||||
pub struct App {}
|
||||
|
||||
pub fn (app &App) index(mut ctx Context) veb.Result {
|
||||
return ctx.text('hello!')
|
||||
}
|
||||
|
||||
@['/long']
|
||||
pub fn (app &App) long_response(mut ctx Context) veb.Result {
|
||||
// let veb know that the connection should not be closed
|
||||
ctx.takeover_conn()
|
||||
// use spawn to handle the connection in another thread
|
||||
// if we don't the whole web server will block for 10 seconds,
|
||||
// since veb is singlethreaded
|
||||
spawn handle_connection(mut ctx.conn)
|
||||
// we will send a custom response ourselves, so we can safely return an empty result
|
||||
return veb.no_result()
|
||||
}
|
||||
|
||||
fn handle_connection(mut conn net.TcpConn) {
|
||||
defer {
|
||||
conn.close() or {}
|
||||
}
|
||||
// block for 10 second
|
||||
time.sleep(time.second * 10)
|
||||
conn.write_string('HTTP/1.1 200 OK\r\nContent-type: text/html\r\nContent-length: 15\r\n\r\nHello takeover!') or {}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut app := &App{}
|
||||
veb.run[App, Context](mut app, 8080)
|
||||
}
|
||||
```
|
||||
3
cli/.gitignore
vendored
3
cli/.gitignore
vendored
@@ -1 +1,4 @@
|
||||
hero
|
||||
compile
|
||||
compile_upload
|
||||
vdo
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env -S v -parallel-cc -enable-globals run
|
||||
// #!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
|
||||
#!/usr/bin/env -S v -n -g -cg -w -parallel-cc -showcc -enable-globals run
|
||||
|
||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
import os
|
||||
import flag
|
||||
|
||||
@@ -14,20 +14,20 @@ prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
if additional_args.len > 0 {
|
||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Change to the hero directory
|
||||
@@ -37,36 +37,39 @@ os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}
|
||||
// Set HEROPATH based on OS
|
||||
mut heropath := '/usr/local/bin/hero'
|
||||
if os.user_os() == 'macos' {
|
||||
heropath = os.join_path(os.home_dir(), 'hero/bin/hero')
|
||||
heropath = os.join_path(os.home_dir(), 'hero/bin/hero')
|
||||
}
|
||||
|
||||
// Set compilation command based on OS and mode
|
||||
compile_cmd := if os.user_os() == 'macos' {
|
||||
if prod_mode {
|
||||
'v -enable-globals -w -n -prod hero.v'
|
||||
} else {
|
||||
'v -w -cg -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
}
|
||||
if prod_mode {
|
||||
'v -enable-globals -g -w -n -prod hero.v'
|
||||
} else {
|
||||
'v -n -g -w -cg -gc none -cc tcc -d use_openssl -enable-globals hero.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
'v -cg -enable-globals -parallel-cc -w -n hero.v'
|
||||
} else {
|
||||
'v -cg -enable-globals -w -n hero.v'
|
||||
}
|
||||
if prod_mode {
|
||||
'v -cg -enable-globals -parallel-cc -w -n hero.v'
|
||||
} else {
|
||||
'v -cg -enable-globals -w -n hero.v'
|
||||
}
|
||||
}
|
||||
|
||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||
eprintln(compile_cmd)
|
||||
|
||||
if os.system(compile_cmd) != 0 {
|
||||
panic('Failed to compile hero.v with command: ${compile_cmd}')
|
||||
panic('Failed to compile hero.v with command: ${compile_cmd}')
|
||||
}
|
||||
|
||||
// Make executable
|
||||
os.chmod('hero', 0o755) or { panic('Failed to make hero binary executable: ${err}') }
|
||||
|
||||
// Ensure destination directory exists
|
||||
os.mkdir_all(os.dir(heropath)) or { panic('Failed to create directory ${os.dir(heropath)}: ${err}') }
|
||||
|
||||
os.mkdir_all(os.dir(heropath)) or {
|
||||
panic('Failed to create directory ${os.dir(heropath)}: ${err}')
|
||||
}
|
||||
println(heropath)
|
||||
// Copy to destination paths
|
||||
os.cp('hero', heropath) or { panic('Failed to copy hero binary to ${heropath}: ${err}') }
|
||||
os.cp('hero', '/tmp/hero') or { panic('Failed to copy hero binary to /tmp/hero: ${err}') }
|
||||
|
||||
@@ -28,7 +28,7 @@ fn get_platform_id() string {
|
||||
}
|
||||
|
||||
fn read_secrets() ! {
|
||||
secret_file := os.join_path(os.home_dir(), 'code/git.ourworld.tf/despiegk/hero_secrets/mysecrets.sh')
|
||||
secret_file := os.join_path(os.home_dir(), 'code/git.threefold.info/despiegk/hero_secrets/mysecrets.sh')
|
||||
if os.exists(secret_file) {
|
||||
println('Reading secrets from ${secret_file}')
|
||||
content := os.read_file(secret_file)!
|
||||
@@ -64,7 +64,9 @@ account = ${s3keyid}
|
||||
key = ${s3appid}
|
||||
hard_delete = true'
|
||||
|
||||
os.write_file(rclone_conf, config_content) or { return error('Failed to write rclone config: ${err}') }
|
||||
os.write_file(rclone_conf, config_content) or {
|
||||
return error('Failed to write rclone config: ${err}')
|
||||
}
|
||||
|
||||
println('made S3 config on: ${rclone_conf}')
|
||||
content := os.read_file(rclone_conf) or { return error('Failed to read rclone config: ${err}') }
|
||||
@@ -72,8 +74,10 @@ hard_delete = true'
|
||||
}
|
||||
|
||||
fn hero_upload() ! {
|
||||
hero_path := os.find_abs_path_of_executable('hero') or { return error("Error: 'hero' command not found in PATH") }
|
||||
|
||||
hero_path := os.find_abs_path_of_executable('hero') or {
|
||||
return error("Error: 'hero' command not found in PATH")
|
||||
}
|
||||
|
||||
s3_configure()!
|
||||
|
||||
platform_id := get_platform_id()
|
||||
@@ -83,11 +87,18 @@ fn hero_upload() ! {
|
||||
|
||||
// List contents
|
||||
os.execute_or_panic('rclone --config="${rclone_conf}" lsl b2:threefold/${platform_id}/')
|
||||
|
||||
|
||||
// Copy hero binary
|
||||
os.execute_or_panic('rclone --config="${rclone_conf}" copy "${hero_path}" b2:threefold/${platform_id}/')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
hero_upload() or { eprintln(err) exit(1) }
|
||||
// os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
||||
println('compile hero can take 60 sec+ on osx.')
|
||||
os.execute_or_panic('${os.home_dir()}/code/github/freeflowuniverse/herolib/cli/compile.vsh -p')
|
||||
println('upload:')
|
||||
hero_upload() or {
|
||||
eprintln(err)
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
79
cli/compile_vdo.vsh
Executable file
79
cli/compile_vdo.vsh
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env -S v -n -cg -w -parallel-cc -enable-globals run
|
||||
|
||||
// #!/usr/bin/env -S v -n -w -gc none -cc tcc -d use_openssl -enable-globals run
|
||||
import os
|
||||
import flag
|
||||
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('compile_vdo.vsh')
|
||||
fp.version('v0.1.0')
|
||||
fp.description('Compile vdo binary in debug or production mode')
|
||||
fp.skip_executable()
|
||||
|
||||
prod_mode := fp.bool('prod', `p`, false, 'Build production version (optimized)')
|
||||
help_requested := fp.bool('help', `h`, false, 'Show help message')
|
||||
|
||||
if help_requested {
|
||||
println(fp.usage())
|
||||
exit(0)
|
||||
}
|
||||
|
||||
additional_args := fp.finalize() or {
|
||||
eprintln(err)
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
if additional_args.len > 0 {
|
||||
eprintln('Unexpected arguments: ${additional_args.join(' ')}')
|
||||
println(fp.usage())
|
||||
exit(1)
|
||||
}
|
||||
|
||||
// Change to the vdo directory
|
||||
hero_dir := os.join_path(os.home_dir(), 'code/github/freeflowuniverse/herolib/cli')
|
||||
os.chdir(hero_dir) or { panic('Failed to change directory to ${hero_dir}: ${err}') }
|
||||
|
||||
// Set HEROPATH based on OS
|
||||
mut heropath := '/usr/local/bin/vdo'
|
||||
if os.user_os() == 'macos' {
|
||||
heropath = os.join_path(os.home_dir(), 'hero/bin/vdo')
|
||||
}
|
||||
|
||||
// Set compilation command based on OS and mode
|
||||
compile_cmd := if os.user_os() == 'macos' {
|
||||
if prod_mode {
|
||||
'v -enable-globals -w -n -prod vdo.v'
|
||||
} else {
|
||||
'v -w -cg -gc none -cc tcc -d use_openssl -enable-globals vdo.v'
|
||||
}
|
||||
} else {
|
||||
if prod_mode {
|
||||
'v -cg -enable-globals -parallel-cc -w -n vdo.v'
|
||||
} else {
|
||||
'v -cg -enable-globals -w -n vdo.v'
|
||||
}
|
||||
}
|
||||
|
||||
println('Building in ${if prod_mode { 'production' } else { 'debug' }} mode...')
|
||||
|
||||
if os.system(compile_cmd) != 0 {
|
||||
panic('Failed to compile vdo.v with command: ${compile_cmd}')
|
||||
}
|
||||
|
||||
// Make executable
|
||||
os.chmod('vdo', 0o755) or { panic('Failed to make vdo binary executable: ${err}') }
|
||||
|
||||
// Ensure destination directory exists
|
||||
os.mkdir_all(os.dir(heropath)) or {
|
||||
panic('Failed to create directory ${os.dir(heropath)}: ${err}')
|
||||
}
|
||||
println(heropath)
|
||||
// Copy to destination paths
|
||||
os.cp('vdo', heropath) or { panic('Failed to copy vdo binary to ${heropath}: ${err}') }
|
||||
os.cp('vdo', '/tmp/vdo') or { panic('Failed to copy vdo binary to /tmp/vdo: ${err}') }
|
||||
|
||||
// Clean up
|
||||
os.rm('vdo') or { panic('Failed to remove temporary vdo binary: ${err}') }
|
||||
|
||||
println('**COMPILE OK**')
|
||||
61
cli/hero.v
61
cli/hero.v
@@ -3,22 +3,39 @@ module main
|
||||
import os
|
||||
import cli { Command }
|
||||
import freeflowuniverse.herolib.core.herocmds
|
||||
// import freeflowuniverse.herolib.hero.cmds
|
||||
// import freeflowuniverse.herolib.hero.publishing
|
||||
import freeflowuniverse.herolib.installers.base
|
||||
import freeflowuniverse.herolib.ui.console
|
||||
import freeflowuniverse.herolib.ui
|
||||
import freeflowuniverse.herolib.osal
|
||||
import freeflowuniverse.herolib.osal.core as osal
|
||||
import freeflowuniverse.herolib.core
|
||||
import freeflowuniverse.herolib.core.playbook
|
||||
import freeflowuniverse.herolib.core.playcmds
|
||||
|
||||
fn playcmds_do(path string) ! {
|
||||
mut plbook := playbook.new(path: path)!
|
||||
playcmds.run(mut plbook, false)!
|
||||
playcmds.run(plbook: plbook)!
|
||||
}
|
||||
|
||||
fn do() ! {
|
||||
if !core.is_osx()! {
|
||||
if os.getenv('SUDO_COMMAND') != '' || os.getenv('SUDO_USER') != '' {
|
||||
println('Error: Please do not run this program with sudo!')
|
||||
exit(1) // Exit with error code
|
||||
}
|
||||
}
|
||||
|
||||
if os.getuid() == 0 {
|
||||
if core.is_osx()! {
|
||||
eprintln('please do not run hero as root in osx.')
|
||||
exit(1)
|
||||
}
|
||||
} else {
|
||||
if !core.is_osx()! {
|
||||
eprintln("please do run hero as root, don't use sudo.")
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
if os.args.len == 2 {
|
||||
mypath := os.args[1]
|
||||
if mypath.to_lower().ends_with('.hero') {
|
||||
@@ -31,7 +48,7 @@ fn do() ! {
|
||||
mut cmd := Command{
|
||||
name: 'hero'
|
||||
description: 'Your HERO toolset.'
|
||||
version: '2.0.0'
|
||||
version: '1.0.28'
|
||||
}
|
||||
|
||||
// herocmds.cmd_run_add_flags(mut cmd)
|
||||
@@ -64,33 +81,25 @@ fn do() ! {
|
||||
|
||||
base.redis_install()!
|
||||
|
||||
// herocmds.cmd_bootstrap(mut cmd)
|
||||
// herocmds.cmd_run(mut cmd)
|
||||
herocmds.cmd_run(mut cmd)
|
||||
herocmds.cmd_git(mut cmd)
|
||||
// herocmds.cmd_init(mut cmd)
|
||||
// herocmds.cmd_imagedownsize(mut cmd)
|
||||
// herocmds.cmd_biztools(mut cmd)
|
||||
// herocmds.cmd_gen(mut cmd)
|
||||
// herocmds.cmd_sshagent(mut cmd)
|
||||
// herocmds.cmd_installers(mut cmd)
|
||||
// herocmds.cmd_configure(mut cmd)
|
||||
// herocmds.cmd_postgres(mut cmd)
|
||||
herocmds.cmd_mdbook(mut cmd)
|
||||
// herocmds.cmd_luadns(mut cmd)
|
||||
// herocmds.cmd_caddy(mut cmd)
|
||||
// herocmds.cmd_zola(mut cmd)
|
||||
// herocmds.cmd_juggler(mut cmd)
|
||||
herocmds.cmd_generator(mut cmd)
|
||||
// herocmds.cmd_docsorter(mut cmd)
|
||||
// cmd.add_command(publishing.cmd_publisher(pre_func))
|
||||
herocmds.cmd_docusaurus(mut cmd)
|
||||
// herocmds.cmd_web(mut cmd)
|
||||
|
||||
cmd.setup()
|
||||
cmd.parse(os.args)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
do() or { panic(err) }
|
||||
do() or {
|
||||
$dbg;
|
||||
eprintln('Error: ${err}')
|
||||
print_backtrace()
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
fn pre_func(cmd Command) ! {
|
||||
herocmds.plbook_run(cmd)!
|
||||
}
|
||||
// fn pre_func(cmd Command) ! {
|
||||
// herocmds.plbook_run(cmd)!
|
||||
// }
|
||||
|
||||
12
cli/vdo.v
Normal file
12
cli/vdo.v
Normal file
@@ -0,0 +1,12 @@
|
||||
module main
|
||||
|
||||
import freeflowuniverse.herolib.mcp.v_do
|
||||
|
||||
fn main() {
|
||||
// Create and start the MCP server
|
||||
mut server := v_do.new_server()
|
||||
server.start() or {
|
||||
eprintln('Error starting server: ${err}')
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
480
compile.sh
Executable file
480
compile.sh
Executable file
@@ -0,0 +1,480 @@
|
||||
#!/bin/bash
|
||||
# compile.sh - Script to compile each module in the herolib/lib directory
|
||||
# This script compiles each module in the lib directory to ensure they build correctly
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Default settings
|
||||
CONCURRENT=false
|
||||
MAX_JOBS=4 # Default number of concurrent jobs
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-c|--concurrent)
|
||||
CONCURRENT=true
|
||||
shift
|
||||
;;
|
||||
-j|--jobs)
|
||||
MAX_JOBS="$2"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [options]"
|
||||
echo "Options:"
|
||||
echo " -c, --concurrent Enable concurrent compilation"
|
||||
echo " -j, --jobs N Set maximum number of concurrent jobs (default: 4)"
|
||||
echo " -h, --help Show this help message"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
echo "Use -h or --help for usage information"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Color codes for output
|
||||
GREEN='\033[0;32m'
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[0;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Get the directory of this script
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
LIB_DIR="$SCRIPT_DIR/lib"
|
||||
|
||||
# V compiler flags based on the project's test script
|
||||
V_FLAGS="-stats -enable-globals -n -w -gc none -d use_openssl -shared"
|
||||
|
||||
# Log file for compilation results
|
||||
LOG_FILE="$SCRIPT_DIR/compile_results.log"
|
||||
> "$LOG_FILE" # Clear log file
|
||||
|
||||
# Summary log file
|
||||
SUMMARY_FILE="$SCRIPT_DIR/compile_summary.log"
|
||||
> "$SUMMARY_FILE" # Clear summary file
|
||||
|
||||
# Cache directory for storing timestamps of last successful compilation
|
||||
CACHE_DIR="$SCRIPT_DIR/.compile_cache"
|
||||
mkdir -p "$CACHE_DIR"
|
||||
|
||||
# Create temporary directory for compiled binaries
|
||||
mkdir -p "$SCRIPT_DIR/tmp"
|
||||
|
||||
# Create a directory for temporary output files
|
||||
TEMP_DIR="$SCRIPT_DIR/.temp_compile"
|
||||
mkdir -p "$TEMP_DIR"
|
||||
|
||||
# Trap for cleaning up on exit
|
||||
cleanup() {
|
||||
echo "Cleaning up..."
|
||||
# Kill any remaining child processes
|
||||
jobs -p | xargs kill -9 2>/dev/null || true
|
||||
# Remove temporary directories
|
||||
rm -rf "$TEMP_DIR" "$SCRIPT_DIR/tmp" 2>/dev/null || true
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Set up traps for various signals
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Define modules to skip entirely due to known compilation issues
|
||||
SKIP_MODULES=("flist" "openai" "mycelium" "vastai" "rclone" "sendgrid" "mailclient" "ipapi" "runpod" "postgresql_client" "meilisearch" "livekit" "wireguard" "_archive" "clients")
|
||||
|
||||
# Function to check if a module should be skipped
|
||||
should_skip_module() {
|
||||
local module_name="$1"
|
||||
|
||||
for skip_module in "${SKIP_MODULES[@]}"; do
|
||||
if [[ "$module_name" == "$skip_module" ]]; then
|
||||
return 0 # true, should skip
|
||||
fi
|
||||
done
|
||||
|
||||
return 1 # false, should not skip
|
||||
}
|
||||
|
||||
# Function to check if a module needs recompilation
|
||||
needs_module_recompilation() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local cache_file="$CACHE_DIR/$module_name.timestamp"
|
||||
|
||||
# If cache file doesn't exist, module needs recompilation
|
||||
if [ ! -f "$cache_file" ]; then
|
||||
return 0 # true, needs recompilation
|
||||
fi
|
||||
|
||||
# Check if any .v file in the module is newer than the last compilation
|
||||
if find "$module_path" -name "*.v" -type f -newer "$cache_file" | grep -q .; then
|
||||
return 0 # true, needs recompilation
|
||||
fi
|
||||
|
||||
return 1 # false, doesn't need recompilation
|
||||
}
|
||||
|
||||
# Function to update the cache timestamp for a module
|
||||
update_module_cache() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local cache_file="$CACHE_DIR/$module_name.timestamp"
|
||||
|
||||
# Update the timestamp
|
||||
touch "$cache_file"
|
||||
}
|
||||
|
||||
# Function to check if a directory is a module (contains .v files directly, not just in subdirectories)
|
||||
is_module() {
|
||||
local dir_path="$1"
|
||||
|
||||
# Check if there are any .v files directly in this directory (not in subdirectories)
|
||||
if [ -n "$(find "$dir_path" -maxdepth 1 -name "*.v" -type f -print -quit)" ]; then
|
||||
return 0 # true, is a module
|
||||
fi
|
||||
|
||||
return 1 # false, not a module
|
||||
}
|
||||
|
||||
# Function to compile a module
|
||||
compile_module() {
|
||||
local module_path="$1"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local output_file="$TEMP_DIR/${module_name}.log"
|
||||
local result_file="$TEMP_DIR/${module_name}.result"
|
||||
|
||||
# Initialize the result file
|
||||
echo "pending" > "$result_file"
|
||||
|
||||
# Check if this module should be skipped
|
||||
if should_skip_module "$module_name"; then
|
||||
echo "Skipping problematic module: $module_name" > "$output_file"
|
||||
echo "skipped|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "skipped" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Check if this is actually a module (has .v files directly)
|
||||
if ! is_module "$module_path"; then
|
||||
echo "$module_name is not a module (no direct .v files), skipping" > "$output_file"
|
||||
echo "not_module|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "skipped" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Compiling module: $module_name" > "$output_file"
|
||||
|
||||
# Check if the module needs recompilation
|
||||
if ! needs_module_recompilation "$module_path"; then
|
||||
echo " No changes detected in $module_name, skipping compilation" >> "$output_file"
|
||||
echo "cached|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "cached" > "$result_file"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Record start time
|
||||
local start_time=$(date +%s.%N)
|
||||
|
||||
# Try to compile the module - redirect both stdout and stderr to the output file
|
||||
if v $V_FLAGS -o "$SCRIPT_DIR/tmp/$module_name" "$module_path" >> "$output_file" 2>&1; then
|
||||
# Calculate compilation time
|
||||
local end_time=$(date +%s.%N)
|
||||
local compile_time=$(echo "$end_time - $start_time" | bc)
|
||||
|
||||
echo " Successfully compiled $module_name" >> "$output_file"
|
||||
# Update the cache timestamp
|
||||
update_module_cache "$module_path"
|
||||
|
||||
# Log result
|
||||
echo "success|${module_path#$LIB_DIR/}|$compile_time" >> "$SUMMARY_FILE"
|
||||
echo "success" > "$result_file"
|
||||
return 0
|
||||
else
|
||||
echo " Failed to compile $module_name" >> "$output_file"
|
||||
|
||||
# Log result
|
||||
echo "failed|${module_path#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
echo "failed" > "$result_file"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to run modules in parallel with a maximum number of concurrent jobs
|
||||
run_parallel() {
|
||||
local modules=("$@")
|
||||
local total=${#modules[@]}
|
||||
local completed=0
|
||||
local running=0
|
||||
local pids=()
|
||||
local module_indices=()
|
||||
|
||||
echo "Running $total modules in parallel (max $MAX_JOBS jobs at once)"
|
||||
|
||||
# Initialize arrays to track jobs
|
||||
for ((i=0; i<$total; i++)); do
|
||||
pids[$i]=-1
|
||||
done
|
||||
|
||||
# Start initial batch of jobs
|
||||
local next_job=0
|
||||
while [[ $next_job -lt $total && $running -lt $MAX_JOBS ]]; do
|
||||
compile_module "${modules[$next_job]}" > /dev/null 2>&1 &
|
||||
pids[$next_job]=$!
|
||||
((running++))
|
||||
((next_job++))
|
||||
done
|
||||
|
||||
# Display progress indicator
|
||||
display_progress() {
|
||||
local current=$1
|
||||
local total=$2
|
||||
local percent=$((current * 100 / total))
|
||||
local bar_length=50
|
||||
local filled_length=$((percent * bar_length / 100))
|
||||
|
||||
printf "\r[" >&2
|
||||
for ((i=0; i<bar_length; i++)); do
|
||||
if [ $i -lt $filled_length ]; then
|
||||
printf "#" >&2
|
||||
else
|
||||
printf " " >&2
|
||||
fi
|
||||
done
|
||||
printf "] %d%% (%d/%d modules)" $percent $current $total >&2
|
||||
}
|
||||
|
||||
# Monitor running jobs and start new ones as needed
|
||||
while [[ $completed -lt $total ]]; do
|
||||
display_progress $completed $total
|
||||
|
||||
# Check for completed jobs
|
||||
for ((i=0; i<$total; i++)); do
|
||||
if [[ ${pids[$i]} -gt 0 ]]; then
|
||||
if ! kill -0 ${pids[$i]} 2>/dev/null; then
|
||||
# Job completed
|
||||
local module_path="${modules[$i]}"
|
||||
local module_name="$(basename "$module_path")"
|
||||
local output_file="$TEMP_DIR/${module_name}.log"
|
||||
|
||||
# Add output to log file
|
||||
if [[ -f "$output_file" ]]; then
|
||||
cat "$output_file" >> "$LOG_FILE"
|
||||
fi
|
||||
|
||||
# Mark job as completed
|
||||
pids[$i]=-2
|
||||
((completed++))
|
||||
((running--))
|
||||
|
||||
# Start a new job if available
|
||||
if [[ $next_job -lt $total ]]; then
|
||||
compile_module "${modules[$next_job]}" > /dev/null 2>&1 &
|
||||
pids[$next_job]=$!
|
||||
((running++))
|
||||
((next_job++))
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Brief pause to avoid excessive CPU usage
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
# Clear the progress line
|
||||
printf "\r%$(tput cols)s\r" ""
|
||||
|
||||
# Wait for any remaining background jobs
|
||||
wait
|
||||
}
|
||||
|
||||
# Function to find all modules in a directory (recursively)
|
||||
find_modules() {
|
||||
local dir_path="$1"
|
||||
local modules=()
|
||||
|
||||
# Check if this directory is a module itself
|
||||
if is_module "$dir_path"; then
|
||||
modules+=("$dir_path")
|
||||
fi
|
||||
|
||||
# Look for modules in subdirectories (only one level deep)
|
||||
for subdir in "$dir_path"/*; do
|
||||
if [ -d "$subdir" ]; then
|
||||
local subdir_name="$(basename "$subdir")"
|
||||
|
||||
# Skip if this is in the skip list
|
||||
if should_skip_module "$subdir_name"; then
|
||||
echo -e "${YELLOW}Skipping problematic module: $subdir_name${NC}"
|
||||
echo "Skipping problematic module: $subdir_name" >> "$LOG_FILE"
|
||||
echo "skipped|${subdir#$LIB_DIR/}|" >> "$SUMMARY_FILE"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check if this subdirectory is a module
|
||||
if is_module "$subdir"; then
|
||||
modules+=("$subdir")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "${modules[@]}"
|
||||
}
|
||||
|
||||
echo "===== Starting compilation of all modules in lib ====="
|
||||
echo "===== Starting compilation of all modules in lib =====" >> "$LOG_FILE"
|
||||
|
||||
# Define priority modules to compile first
|
||||
PRIORITY_MODULES=("biz" "builder" "core" "crystallib" "jsonrpc" "jsonschema")
|
||||
|
||||
echo -e "${YELLOW}Attempting to compile each module as a whole...${NC}"
|
||||
echo "Attempting to compile each module as a whole..." >> "$LOG_FILE"
|
||||
|
||||
# Collect all modules to compile
|
||||
all_modules=()
|
||||
|
||||
# First add priority modules
|
||||
for module_name in "${PRIORITY_MODULES[@]}"; do
|
||||
module_dir="$LIB_DIR/$module_name"
|
||||
if [ -d "$module_dir" ]; then
|
||||
# Find all modules in this directory
|
||||
modules=($(find_modules "$module_dir"))
|
||||
all_modules+=("${modules[@]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Then add remaining modules
|
||||
for module_dir in "$LIB_DIR"/*; do
|
||||
if [ -d "$module_dir" ]; then
|
||||
module_name="$(basename "$module_dir")"
|
||||
# Skip modules already compiled in priority list
|
||||
if [[ " ${PRIORITY_MODULES[*]} " =~ " $module_name " ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Find all modules in this directory
|
||||
modules=($(find_modules "$module_dir"))
|
||||
all_modules+=("${modules[@]}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Debug: print all modules found
|
||||
echo "Found ${#all_modules[@]} modules to compile" >> "$LOG_FILE"
|
||||
for module in "${all_modules[@]}"; do
|
||||
echo " - $module" >> "$LOG_FILE"
|
||||
done
|
||||
|
||||
# Compile modules (either in parallel or sequentially)
|
||||
if $CONCURRENT; then
|
||||
run_parallel "${all_modules[@]}"
|
||||
else
|
||||
# Sequential compilation
|
||||
for module_path in "${all_modules[@]}"; do
|
||||
# Display module being compiled
|
||||
module_name="$(basename "$module_path")"
|
||||
echo -e "${YELLOW}Compiling module: $module_name${NC}"
|
||||
|
||||
# Compile the module
|
||||
compile_module "$module_path" > /dev/null 2>&1
|
||||
|
||||
# Display result
|
||||
output_file="$TEMP_DIR/${module_name}.log"
|
||||
result_file="$TEMP_DIR/${module_name}.result"
|
||||
|
||||
if [[ -f "$output_file" ]]; then
|
||||
cat "$output_file" >> "$LOG_FILE"
|
||||
|
||||
# Display with color based on result
|
||||
result=$(cat "$result_file")
|
||||
if [[ "$result" == "success" ]]; then
|
||||
echo -e "${GREEN} Successfully compiled $module_name${NC}"
|
||||
elif [[ "$result" == "failed" ]]; then
|
||||
echo -e "${RED} Failed to compile $module_name${NC}"
|
||||
elif [[ "$result" == "cached" ]]; then
|
||||
echo -e "${GREEN} No changes detected in $module_name, skipping compilation${NC}"
|
||||
else
|
||||
echo -e "${YELLOW} Skipped $module_name${NC}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Count successes and failures
|
||||
success_count=$(grep -c "^success|" "$SUMMARY_FILE" || echo 0)
|
||||
failure_count=$(grep -c "^failed|" "$SUMMARY_FILE" || echo 0)
|
||||
cached_count=$(grep -c "^cached|" "$SUMMARY_FILE" || echo 0)
|
||||
skipped_count=$(grep -c "^skipped|" "$SUMMARY_FILE" || echo 0)
|
||||
not_module_count=$(grep -c "^not_module|" "$SUMMARY_FILE" || echo 0)
|
||||
|
||||
echo "===== Compilation complete ====="
|
||||
echo -e "${GREEN}Successfully compiled: $success_count modules${NC}"
|
||||
echo -e "${GREEN}Cached (no changes): $cached_count modules${NC}"
|
||||
echo -e "${YELLOW}Skipped: $skipped_count modules${NC}"
|
||||
echo -e "${YELLOW}Not modules: $not_module_count directories${NC}"
|
||||
echo -e "${RED}Failed to compile: $failure_count modules${NC}"
|
||||
echo "See $LOG_FILE for detailed compilation results"
|
||||
|
||||
echo "===== Compilation complete =====" >> "$LOG_FILE"
|
||||
echo "Successfully compiled: $success_count modules" >> "$LOG_FILE"
|
||||
echo "Cached (no changes): $cached_count modules" >> "$LOG_FILE"
|
||||
echo "Skipped: $skipped_count modules" >> "$LOG_FILE"
|
||||
echo "Not modules: $not_module_count directories" >> "$LOG_FILE"
|
||||
echo "Failed to compile: $failure_count modules" >> "$LOG_FILE"
|
||||
|
||||
# Print detailed summary
|
||||
echo ""
|
||||
echo "===== Module Compilation Summary ====="
|
||||
echo ""
|
||||
|
||||
# Print successful modules first, sorted by compilation time
|
||||
echo "Successful compilations:"
|
||||
grep "^success|" "$SUMMARY_FILE" | sort -t'|' -k3,3n | while IFS='|' read -r status path time; do
|
||||
# Color code based on compilation time
|
||||
time_color="$GREEN"
|
||||
if (( $(echo "$time > 10.0" | bc -l) )); then
|
||||
time_color="$RED"
|
||||
elif (( $(echo "$time > 1.0" | bc -l) )); then
|
||||
time_color="$YELLOW"
|
||||
fi
|
||||
|
||||
echo -e "✅ $path\t${time_color}${time}s${NC}"
|
||||
done
|
||||
|
||||
# Print cached modules
|
||||
echo ""
|
||||
echo "Cached modules (no changes detected):"
|
||||
grep "^cached|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "🔄 $path\t${GREEN}CACHED${NC}"
|
||||
done
|
||||
|
||||
# Print skipped modules
|
||||
echo ""
|
||||
echo "Skipped modules:"
|
||||
grep "^skipped|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "⏭️ $path\t${YELLOW}SKIPPED${NC}"
|
||||
done
|
||||
|
||||
# Print not modules
|
||||
echo ""
|
||||
echo "Not modules (directories without direct .v files):"
|
||||
grep "^not_module|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "📁 $path\t${YELLOW}NOT MODULE${NC}"
|
||||
done
|
||||
|
||||
# Print failed modules
|
||||
echo ""
|
||||
echo "Failed modules:"
|
||||
grep "^failed|" "$SUMMARY_FILE" | sort | while IFS='|' read -r status path time; do
|
||||
echo -e "❌ $path\t${RED}FAILED${NC}"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "===== End of Summary ====="
|
||||
|
||||
# Exit with error code if any module failed to compile
|
||||
if [ $failure_count -gt 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user