Merge branch 'main_openai_fixes' into development_actions

* main_openai_fixes:
  wip: add tests for openai client
  fix crystallib imports
  fixes in openai client

# Conflicts:
#	examples/hero/generation/openapi_generation/example_actor/actor.v
#	examples/hero/openapi/actor.vsh
#	examples/hero/openapi/server.vsh
This commit is contained in:
2024-12-31 10:23:20 +01:00
27 changed files with 787 additions and 353 deletions

View File

@@ -25,6 +25,7 @@ The parser supports several formats:
4. Comments: `// this is a comment`
Example:
```v
text := "name:'John Doe' age:30 active:true // user details"
params := paramsparser.new(text)!
@@ -59,6 +60,7 @@ progress := params.get_percentage("progress")!
The module supports various type conversions:
### Basic Types
- `get_int()`: Convert to int32
- `get_u32()`: Convert to unsigned 32-bit integer
- `get_u64()`: Convert to unsigned 64-bit integer
@@ -67,10 +69,12 @@ The module supports various type conversions:
- `get_percentage()`: Convert percentage string to float (e.g., "80%" → 0.8)
### Boolean Values
- `get_default_true()`: Returns true if value is empty, "1", "true", "y", or "yes"
- `get_default_false()`: Returns false if value is empty, "0", "false", "n", or "no"
### Lists
The module provides robust support for parsing and converting lists:
```v
@@ -89,6 +93,7 @@ clean_names := params.get_list_namefix("categories")!
```
Supported list types:
- `get_list()`: String list
- `get_list_u8()`, `get_list_u16()`, `get_list_u32()`, `get_list_u64()`: Unsigned integers
- `get_list_i8()`, `get_list_i16()`, `get_list_int()`, `get_list_i64()`: Signed integers
@@ -97,6 +102,7 @@ Supported list types:
Each list method has a corresponding `_default` version that accepts a default value.
Valid list formats:
```v
users: "john, jane,bob"
ids: "1,2,3,4,5"

View File

@@ -1,6 +1,5 @@
# module osal
import as
```vlang
@@ -46,7 +45,6 @@ pub enum CPUType {
## process
### execute jobs
```v
@@ -91,10 +89,10 @@ info returns like:
## other commands
fn bin_path() !string
fn cmd_add(args_ CmdAddArgs) !
fn bin*path() !string
fn cmd_add(args* CmdAddArgs) !
copy a binary to the right location on the local computer . e.g. is /usr/local/bin on linux . e.g. is ~/hero/bin on osx . will also add the bin location to the path of .zprofile and .zshrc (different per platform)
fn cmd_exists(cmd string) bool
fn cmd*exists(cmd string) bool
fn cmd_exists_profile(cmd string) bool
fn cmd_path(cmd string) !string
is same as executing which in OS returns path or error
@@ -117,7 +115,7 @@ fn done_get_str(key string) string
fn done_print() !
fn done_reset() !
fn done_set(key string, val string) !
fn download(args_ DownloadArgs) !pathlib.Path
fn download(args* DownloadArgs) !pathlib.Path
if name is not specified, then will be the filename part if the last ends in an extension like .md .txt .log .text ... the file will be downloaded
fn env_get(key string) !string
Returns the requested environment variable if it exists or throws an error if it does not
@@ -167,6 +165,7 @@ fn exec(cmd Command) !Job
process os.Process
```
return Job .
fn exec_string(cmd Command) !string
cmd is the cmd to execute can use ' ' and spaces if \n in cmd it will write it to ext and then execute with bash if die==false then will just return returncode,out but not return error if stdout will show stderr and stdout
@@ -175,7 +174,8 @@ fn exec_string(cmd Command) !string
Command argument: cmd string timeout int = 600 stdout bool = true die bool = true debug bool
return what needs to be executed can give it to bash -c ...
fn execute_debug(cmd string) !string
fn execute*debug(cmd string) !string
fn execute_interactive(cmd string) !
shortcut to execute a job interactive means in shell
fn execute_ok(cmd string) bool
@@ -205,7 +205,7 @@ fn load_env_file(file_path string) !
fn memdb_exists(key string) bool
fn memdb_get(key string) string
fn memdb_set(key string, val string)
fn package_install(name_ string) !
fn package_install(name* string) !
install a package will use right commands per platform
fn package_refresh() !
update the package list
@@ -221,8 +221,7 @@ fn processinfo_children(pid int) !ProcessMap
get all children of 1 process
fn processinfo_get(pid int) !ProcessInfo
get process info from 1 specific process returns
```
pub struct ProcessInfo {
` pub struct ProcessInfo {
pub mut:
cpu_perc f32
mem_perc f32
@@ -232,7 +231,7 @@ fn processinfo_get(pid int) !ProcessInfo
//resident memory
rss int
}
```
`
fn processinfo_get_byname(name string) ![]ProcessInfo
fn processinfo_with_children(pid int) !ProcessMap
return the process and its children
@@ -250,11 +249,10 @@ fn sleep(duration int)
sleep in seconds
fn tcp_port_test(args TcpPortTestArgs) bool
test if a tcp port answers
```
address string //192.168.8.8
` address string //192.168.8.8
port int = 22
timeout u16 = 2000 // total time in milliseconds to keep on trying
```
`
fn user_add(args UserArgs) !int
add's a user if the user does not exist yet
fn user_exists(username string) bool
@@ -437,4 +435,5 @@ struct UserArgs {
pub mut:
name string @[required]
}
*
-

View File

@@ -1,7 +1,7 @@
# module ui.console.chalk
Chalk offers functions:- `console.color_fg(text string, color string)` - To change the foreground color.
- `console.color_bg(text string, color string)` - To change the background color.
- `console.style(text string, style string)` - To change the text style.
@@ -18,6 +18,7 @@ println('I am really ' + console.color_fg(console.style('ANGRY', 'bold'), 'red')
```
Available colors:- black
- red
- green
- yellow
@@ -36,6 +37,7 @@ Available colors:- black
- white
Available styles:- bold
- dim
- underline
- blink

View File

@@ -9,10 +9,9 @@ import freeflowuniverse.herolib.ui.console
```
## Methods
```v
````v
fn clear()
//reset the console screen
@@ -86,13 +85,12 @@ fn style(c Style) string
fn trim(c_ string) string
```
````
## Console Object
Is used to ask feedback to users
```v
struct UIConsole {
@@ -148,11 +146,8 @@ fn (mut c UIConsole) status() string
```
## enums
```v
enum BackgroundColor {
default_color = 49 // 'default' is a reserved keyword in V

View File

@@ -1,4 +1,3 @@
# how to run the vshell example scripts
this is how we want example scripts to be, see the first line
@@ -18,4 +17,3 @@ the files are in ~/code/github/freeflowuniverse/herolib/examples for herolib
## important instructions
- never use fn main() in a .vsh script

View File

@@ -28,7 +28,7 @@ pub fn run_server(params RunParams) ! {
mut server := actor.new_server(
redis_url: 'localhost:6379'
redis_queue: a.name
openapi_spec: openapi_specification
openapi_spec: example_actor.openapi_specification
)!
server.run(params)
}

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -w -n -enable-globals run
import os
import time

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env -S v -n -w -gc none -no-retry-compilation -cc tcc -d use_openssl -enable-globals run
#!/usr/bin/env -S v -w -n -enable-globals run
import os
import time

View File

@@ -0,0 +1,27 @@
module openai
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
__global (
openai_global map[string]&OpenAI
openai_default string
)
/////////FACTORY
@[params]
pub struct ArgsGet {
pub mut:
name string
}
pub fn get(args_ ArgsGet) !&OpenAI {
return &OpenAI{}
}
// switch instance to be used for openai
pub fn switch(name string) {
openai_default = name
}

View File

@@ -0,0 +1,49 @@
module openai
import freeflowuniverse.herolib.data.paramsparser
import os
import freeflowuniverse.herolib.core.httpconnection
pub const version = '0.0.0'
const singleton = false
const default = true
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct OpenAI {
pub mut:
name string = 'default'
mail_from string
mail_password string @[secret]
mail_port int
mail_server string
mail_username string
}
fn obj_init(obj_ OpenAI) !OpenAI {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
panic('implement')
return obj
}
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
mut c := client.conn or {
mut c2 := httpconnection.new(
name: 'openrouterclient_${client.name}'
url: 'https://openrouter.ai/api/v1/chat/completions'
cache: false
retry: 0
)!
c2
}
// see https://modules.vlang.io/net.http.html#CommonHeader
// -H "Authorization: Bearer $OPENROUTER_API_KEY" \
c.default_header.set(.authorization, 'Bearer ${client.openaikey}')
c.default_header.add_custom('HTTP-Referer', client.your_site_url)!
c.default_header.add_custom('X-Title', client.your_site_name)!
client.conn = c
return c
}

View File

@@ -60,22 +60,22 @@ pub mut:
// create transcription from an audio file
// supported audio formats are mp3, mp4, mpeg, mpga, m4a, wav, or webm
pub fn (mut f OpenAIClient[Config]) create_transcription(args AudioArgs) !AudioResponse {
pub fn (mut f OpenAI) create_transcription(args AudioArgs) !AudioResponse {
return f.create_audio_request(args, 'audio/transcriptions')
}
// create translation to english from an audio file
// supported audio formats are mp3, mp4, mpeg, mpga, m4a, wav, or webm
pub fn (mut f OpenAIClient[Config]) create_tranlation(args AudioArgs) !AudioResponse {
pub fn (mut f OpenAI) create_tranlation(args AudioArgs) !AudioResponse {
return f.create_audio_request(args, 'audio/translations')
}
fn (mut f OpenAIClient[Config]) create_audio_request(args AudioArgs, endpoint string) !AudioResponse {
fn (mut f OpenAI) create_audio_request(args AudioArgs, endpoint string) !AudioResponse {
file_content := os.read_file(args.filepath)!
ext := os.file_ext(args.filepath)
mut file_mime_type := ''
if ext in audio_mime_types {
file_mime_type = audio_mime_types[ext]
if ext in openai.audio_mime_types {
file_mime_type = openai.audio_mime_types[ext]
} else {
return error('file extenion not supported')
}
@@ -91,7 +91,7 @@ fn (mut f OpenAIClient[Config]) create_audio_request(args AudioArgs, endpoint st
'file': [file_data]
}
form: {
'model': audio_model
'model': openai.audio_model
'prompt': args.prompt
'response_format': audio_resp_type_str(args.response_format)
'temperature': args.temperature.str()
@@ -102,9 +102,49 @@ fn (mut f OpenAIClient[Config]) create_audio_request(args AudioArgs, endpoint st
req := httpconnection.Request{
prefix: endpoint
}
r := f.connection.post_multi_part(req, form)!
mut conn := f.connection()!
r := conn.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
return json.decode(AudioResponse, r.body)!
}
@[params]
pub struct CreateSpeechArgs {
pub:
model ModelType = .tts_1
input string @[required]
voice Voice = .alloy
response_format AudioFormat = .mp3
speed f32 = 1.0
output_path string @[required]
}
pub struct CreateSpeechRequest {
pub:
model string
input string
voice string
response_format string
speed f32
}
pub fn (mut f OpenAI) create_speech(args CreateSpeechArgs) ! {
mut output_file := os.open_file(args.output_path, 'w+')!
req := CreateSpeechRequest{
model: modelname_str(args.model)
input: args.input
voice: voice_str(args.voice)
response_format: audio_format_str(args.response_format)
speed: args.speed
}
data := json.encode(req)
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'audio/speech', data: data)!
output_file.write(r.bytes())!
}

View File

@@ -0,0 +1,105 @@
module openai
import os
fn test_chat_completion() {
key := os.getenv('OPENAI_API_KEY')
heroscript := '!!openai.configure api_key: "${key}"'
play(heroscript: heroscript)!
mut client := get()!
res := client.chat_completion(.gpt_4o_2024_08_06, Messages{
messages: [
Message{
role: .user
content: 'Say these words exactly as i write them with no punctuation: AI is getting out of hand'
},
]
})!
assert res.choices.len == 1
assert res.choices[0].message.content == 'AI is getting out of hand'
}
fn test_embeddings() {
key := os.getenv('OPENAI_API_KEY')
heroscript := '!!openai.configure api_key: "${key}"'
play(heroscript: heroscript)!
mut client := get()!
res := client.create_embeddings(
input: ['The food was delicious and the waiter..']
model: .text_embedding_ada
)!
assert res.data.len == 1
assert res.data[0].embedding.len == 1536
}
fn test_files() {
key := os.getenv('OPENAI_API_KEY')
heroscript := '!!openai.configure api_key: "${key}"'
play(heroscript: heroscript)!
mut client := get()!
uploaded_file := client.upload_file(
filepath: '${os.dir(@FILE) + '/testdata/testfile.txt'}'
purpose: .assistants
)!
assert uploaded_file.filename == 'testfile.txt'
assert uploaded_file.purpose == 'assistants'
got_file := client.get_file(uploaded_file.id)!
assert got_file == uploaded_file
uploaded_file2 := client.upload_file(
filepath: '${os.dir(@FILE) + '/testdata/testfile2.txt'}'
purpose: .assistants
)!
assert uploaded_file2.filename == 'testfile2.txt'
assert uploaded_file2.purpose == 'assistants'
mut got_list := client.list_files()!
assert got_list.data.len >= 2 // there could be other older files
mut ids := []string{}
for file in got_list.data {
ids << file.id
}
assert uploaded_file.id in ids
assert uploaded_file2.id in ids
for file in got_list.data {
client.delete_file(file.id)!
}
got_list = client.list_files()!
assert got_list.data.len == 0
}
fn test_audio() {
key := os.getenv('OPENAI_API_KEY')
heroscript := '!!openai.configure api_key: "${key}"'
play(heroscript: heroscript)!
mut client := get()!
// create speech
client.create_speech(
input: 'the quick brown fox jumps over the lazy dog'
output_path: '/tmp/output.mp3'
)!
assert os.exists('/tmp/output.mp3')
}

View File

@@ -50,7 +50,7 @@ mut:
// creates a new chat completion given a list of messages
// each message consists of message content and the role of the author
pub fn (mut f OpenAIClient[Config]) chat_completion(model_type ModelType, msgs Messages) !ChatCompletion {
pub fn (mut f OpenAI) chat_completion(model_type ModelType, msgs Messages) !ChatCompletion {
model_type0 := modelname_str(model_type)
mut m := ChatMessagesRaw{
model: model_type0
@@ -63,7 +63,10 @@ pub fn (mut f OpenAIClient[Config]) chat_completion(model_type ModelType, msgs M
m.messages << mr
}
data := json.encode(m)
r := f.connection.post_json_str(prefix: 'chat/completions', data: data)!
println('data: ${data}')
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'chat/completions', data: data)!
println('res: ${r}')
res := json.decode(ChatCompletion, r)!
return res

View File

@@ -42,13 +42,15 @@ pub mut:
usage Usage
}
pub fn (mut f OpenAIClient[Config]) create_embeddings(args EmbeddingCreateArgs) !EmbeddingResponse {
pub fn (mut f OpenAI) create_embeddings(args EmbeddingCreateArgs) !EmbeddingResponse {
req := EmbeddingCreateRequest{
input: args.input
model: embedding_model_str(args.model)
user: args.user
}
data := json.encode(req)
r := f.connection.post_json_str(prefix: 'embeddings', data: data)!
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'embeddings', data: data)!
return json.decode(EmbeddingResponse, r)!
}

View File

@@ -11,7 +11,14 @@ const jsonl_mime_type = 'text/jsonl'
pub struct FileUploadArgs {
pub:
filepath string
purpose string
purpose FilePurpose
}
pub enum FilePurpose {
assistants
vision
batch
fine_tuning
}
pub struct File {
@@ -37,13 +44,13 @@ pub mut:
}
// upload file to client org, usually used for fine tuning
pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File {
pub fn (mut f OpenAI) upload_file(args FileUploadArgs) !File {
file_content := os.read_file(args.filepath)!
file_data := http.FileData{
filename: os.base(args.filepath)
data: file_content
content_type: jsonl_mime_type
content_type: openai.jsonl_mime_type
}
form := http.PostMultipartFormConfig{
@@ -51,14 +58,15 @@ pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File {
'file': [file_data]
}
form: {
'purpose': args.purpose
'purpose': file_purpose_str(args.purpose)
}
}
req := httpconnection.Request{
prefix: 'files'
}
r := f.connection.post_multi_part(req, form)!
mut conn := f.connection()!
r := conn.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
@@ -66,25 +74,47 @@ pub fn (mut f OpenAIClient[Config]) upload_file(args FileUploadArgs) !File {
}
// list all files in client org
pub fn (mut f OpenAIClient[Config]) list_files() !Files {
r := f.connection.get(prefix: 'files')!
pub fn (mut f OpenAI) list_files() !Files {
mut conn := f.connection()!
r := conn.get(prefix: 'files')!
return json.decode(Files, r)!
}
// deletes a file
pub fn (mut f OpenAIClient[Config]) delete_file(file_id string) !DeleteResp {
r := f.connection.delete(prefix: 'files/' + file_id)!
pub fn (mut f OpenAI) delete_file(file_id string) !DeleteResp {
mut conn := f.connection()!
r := conn.delete(prefix: 'files/' + file_id)!
return json.decode(DeleteResp, r)!
}
// returns a single file metadata
pub fn (mut f OpenAIClient[Config]) get_file(file_id string) !File {
r := f.connection.get(prefix: 'files/' + file_id)!
pub fn (mut f OpenAI) get_file(file_id string) !File {
mut conn := f.connection()!
r := conn.get(prefix: 'files/' + file_id)!
return json.decode(File, r)!
}
// returns the content of a specific file
pub fn (mut f OpenAIClient[Config]) get_file_content(file_id string) !string {
r := f.connection.get(prefix: 'files/' + file_id + '/content')!
pub fn (mut f OpenAI) get_file_content(file_id string) !string {
mut conn := f.connection()!
r := conn.get(prefix: 'files/' + file_id + '/content')!
return r
}
// returns the purpose of the file in string format
fn file_purpose_str(purpose FilePurpose) string {
return match purpose {
.assistants {
'assistants'
}
.vision {
'vision'
}
.batch {
'batch'
}
.fine_tuning {
'fine_tuning'
}
}
}

View File

@@ -61,33 +61,38 @@ pub mut:
}
// creates a new fine-tune based on an already uploaded file
pub fn (mut f OpenAIClient[Config]) create_fine_tune(args FineTuneCreateArgs) !FineTune {
pub fn (mut f OpenAI) create_fine_tune(args FineTuneCreateArgs) !FineTune {
data := json.encode(args)
r := f.connection.post_json_str(prefix: 'fine-tunes', data: data)!
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'fine-tunes', data: data)!
return json.decode(FineTune, r)!
}
// returns all fine-tunes in this account
pub fn (mut f OpenAIClient[Config]) list_fine_tunes() !FineTuneList {
r := f.connection.get(prefix: 'fine-tunes')!
pub fn (mut f OpenAI) list_fine_tunes() !FineTuneList {
mut conn := f.connection()!
r := conn.get(prefix: 'fine-tunes')!
return json.decode(FineTuneList, r)!
}
// get a single fine-tune information
pub fn (mut f OpenAIClient[Config]) get_fine_tune(fine_tune string) !FineTune {
r := f.connection.get(prefix: 'fine-tunes/' + fine_tune)!
pub fn (mut f OpenAI) get_fine_tune(fine_tune string) !FineTune {
mut conn := f.connection()!
r := conn.get(prefix: 'fine-tunes/' + fine_tune)!
return json.decode(FineTune, r)!
}
// cancel a fine-tune that didn't finish yet
pub fn (mut f OpenAIClient[Config]) cancel_fine_tune(fine_tune string) !FineTune {
r := f.connection.post_json_str(prefix: 'fine-tunes/' + fine_tune + '/cancel')!
pub fn (mut f OpenAI) cancel_fine_tune(fine_tune string) !FineTune {
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'fine-tunes/' + fine_tune + '/cancel')!
return json.decode(FineTune, r)!
}
// returns all events for a fine tune in this account
pub fn (mut f OpenAIClient[Config]) list_fine_tune_events(fine_tune string) !FineTuneEventList {
r := f.connection.get(prefix: 'fine-tunes/' + fine_tune + '/events')!
pub fn (mut f OpenAI) list_fine_tune_events(fine_tune string) !FineTuneEventList {
mut conn := f.connection()!
r := conn.get(prefix: 'fine-tunes/' + fine_tune + '/events')!
return json.decode(FineTuneEventList, r)!
}

View File

@@ -95,7 +95,7 @@ pub mut:
// Create new images generation given a prompt
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_image(args ImageCreateArgs) !Images {
pub fn (mut f OpenAI) create_image(args ImageCreateArgs) !Images {
image_size := image_size_str(args.size)
response_format := image_resp_type_str(args.format)
request := ImageRequest{
@@ -106,7 +106,8 @@ pub fn (mut f OpenAIClient[Config]) create_image(args ImageCreateArgs) !Images {
user: args.user
}
data := json.encode(request)
r := f.connection.post_json_str(prefix: 'images/generations', data: data)!
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'images/generations', data: data)!
return json.decode(Images, r)!
}
@@ -114,11 +115,11 @@ pub fn (mut f OpenAIClient[Config]) create_image(args ImageCreateArgs) !Images {
// image needs to be in PNG format and transparent or else a mask of the same size needs
// to be specified to indicate where the image should be in the generated image
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Images {
pub fn (mut f OpenAI) create_edit_image(args ImageEditArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: image_mine_type
content_type: openai.image_mine_type
data: image_content
}
mut mask_file := []http.FileData{}
@@ -126,7 +127,7 @@ pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Image
mask_content := os.read_file(args.mask_path)!
mask_file << http.FileData{
filename: os.base(args.mask_path)
content_type: image_mine_type
content_type: openai.image_mine_type
data: mask_content
}
}
@@ -148,7 +149,8 @@ pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Image
req := httpconnection.Request{
prefix: 'images/edits'
}
r := f.connection.post_multi_part(req, form)!
mut conn := f.connection()!
r := conn.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}
@@ -158,11 +160,11 @@ pub fn (mut f OpenAIClient[Config]) create_edit_image(args ImageEditArgs) !Image
// create variations of the given image
// image needs to be in PNG format
// the amount of images returned is specified by `num_images`
pub fn (mut f OpenAIClient[Config]) create_variation_image(args ImageVariationArgs) !Images {
pub fn (mut f OpenAI) create_variation_image(args ImageVariationArgs) !Images {
image_content := os.read_file(args.image_path)!
image_file := http.FileData{
filename: os.base(args.image_path)
content_type: image_mine_type
content_type: openai.image_mine_type
data: image_content
}
@@ -181,7 +183,8 @@ pub fn (mut f OpenAIClient[Config]) create_variation_image(args ImageVariationAr
req := httpconnection.Request{
prefix: 'images/variations'
}
r := f.connection.post_multi_part(req, form)!
mut conn := f.connection()!
r := conn.post_multi_part(req, form)!
if r.status_code != 200 {
return error('got error from server: ${r.body}')
}

View File

@@ -1,6 +1,7 @@
module openai
pub enum ModelType {
gpt_4o_2024_08_06
gpt_3_5_turbo
gpt_4
gpt_4_0613
@@ -10,16 +11,17 @@ pub enum ModelType {
gpt_3_5_turbo_16k
gpt_3_5_turbo_16k_0613
whisper_1
tts_1
}
fn modelname_str(e ModelType) string {
if e == .gpt_4 {
return 'gpt-4'
}
if e == .gpt_3_5_turbo {
return 'gpt-3.5-turbo'
}
return match e {
.tts_1 {
'tts-1'
}
.gpt_4o_2024_08_06 {
'gpt-4o-2024-08-06'
}
.gpt_4 {
'gpt-4'
}
@@ -73,3 +75,79 @@ fn roletype_str(x RoleType) string {
}
}
}
pub enum Voice {
alloy
ash
coral
echo
fable
onyx
nova
sage
shimmer
}
fn voice_str(x Voice) string {
return match x {
.alloy {
'alloy'
}
.ash {
'ash'
}
.coral {
'coral'
}
.echo {
'echo'
}
.fable {
'fable'
}
.onyx {
'onyx'
}
.nova {
'nova'
}
.sage {
'sage'
}
.shimmer {
'shimmer'
}
}
}
pub enum AudioFormat {
mp3
opus
aac
flac
wav
pcm
}
fn audio_format_str(x AudioFormat) string {
return match x {
.mp3 {
'mp3'
}
.opus {
'opus'
}
.aac {
'aac'
}
.flac {
'flac'
}
.wav {
'wav'
}
.pcm {
'pcm'
}
}
}

View File

@@ -34,13 +34,15 @@ pub mut:
}
// list current models available in Open AI
pub fn (mut f OpenAIClient[Config]) list_models() !Models {
r := f.connection.get(prefix: 'models')!
pub fn (mut f OpenAI) list_models() !Models {
mut conn := f.connection()!
r := conn.get(prefix: 'models')!
return json.decode(Models, r)!
}
// returns details of a model using the model id
pub fn (mut f OpenAIClient[Config]) get_model(model string) !Model {
r := f.connection.get(prefix: 'models/' + model)!
pub fn (mut f OpenAI) get_model(model string) !Model {
mut conn := f.connection()!
r := conn.get(prefix: 'models/' + model)!
return json.decode(Model, r)!
}

View File

@@ -69,12 +69,13 @@ pub mut:
results []ModerationResult
}
pub fn (mut f OpenAIClient[Config]) create_moderation(input string, model ModerationModel) !ModerationResponse {
pub fn (mut f OpenAI) create_moderation(input string, model ModerationModel) !ModerationResponse {
req := ModerationRequest{
input: input
model: moderation_model_str(model)
}
data := json.encode(req)
r := f.connection.post_json_str(prefix: 'moderations', data: data)!
mut conn := f.connection()!
r := conn.post_json_str(prefix: 'moderations', data: data)!
return json.decode(ModerationResponse, r)!
}

View File

@@ -2,7 +2,6 @@ module openai
import freeflowuniverse.herolib.core.base
import freeflowuniverse.herolib.core.playbook
import freeflowuniverse.herolib.ui.console
__global (
openai_global map[string]&OpenAI
@@ -14,11 +13,90 @@ __global (
@[params]
pub struct ArgsGet {
pub mut:
name string
name string = 'default'
}
fn args_get(args_ ArgsGet) ArgsGet {
mut args := args_
if args.name == '' {
args.name = openai_default
}
if args.name == '' {
args.name = 'default'
}
return args
}
pub fn get(args_ ArgsGet) !&OpenAI {
return &OpenAI{}
mut args := args_get(args_)
if args.name !in openai_global {
if !config_exists() {
if default {
config_save()!
}
}
config_load()!
}
return openai_global[args.name] or {
println(openai_global)
panic('bug in get from factory: ')
}
}
fn config_exists(args_ ArgsGet) bool {
mut args := args_get(args_)
mut context := base.context() or { panic('bug') }
return context.hero_config_exists('openai', args.name)
}
fn config_load(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
mut heroscript := context.hero_config_get('openai', args.name)!
play(heroscript: heroscript)!
}
fn config_save(args_ ArgsGet) ! {
mut args := args_get(args_)
mut context := base.context()!
context.hero_config_set('openai', args.name, heroscript_default()!)!
}
fn set(o OpenAI) ! {
mut o2 := obj_init(o)!
openai_global['default'] = &o2
}
@[params]
pub struct PlayArgs {
pub mut:
name string = 'default'
heroscript string // if filled in then plbook will be made out of it
plbook ?playbook.PlayBook
reset bool
start bool
stop bool
restart bool
delete bool
configure bool // make sure there is at least one installed
}
pub fn play(args_ PlayArgs) ! {
mut args := args_
if args.heroscript == '' {
args.heroscript = heroscript_default()!
}
mut plbook := args.plbook or { playbook.new(text: args.heroscript)! }
mut install_actions := plbook.find(filter: 'openai.configure')!
if install_actions.len > 0 {
for install_action in install_actions {
mut p := install_action.params
cfg_play(p)!
}
}
}
// switch instance to be used for openai

View File

@@ -1,28 +1,60 @@
module openai
import freeflowuniverse.herolib.data.paramsparser
import os
import freeflowuniverse.herolib.core.httpconnection
pub const version = '0.0.0'
pub const version = '1.14.3'
const singleton = false
const default = true
// TODO: THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE TO STRUCT BELOW, IS STRUCTURED AS HEROSCRIPT
pub fn heroscript_default() !string {
heroscript := "
!!openai.configure
name:'openai'
key: 'YOUR_API_KEY'
"
return heroscript
}
// THIS THE THE SOURCE OF THE INFORMATION OF THIS FILE, HERE WE HAVE THE CONFIG OBJECT CONFIGURED AND MODELLED
@[heap]
pub struct OpenAI {
pub mut:
name string = 'default'
mail_from string
mail_password string @[secret]
mail_port int
mail_server string
mail_username string
api_key string @[secret]
conn ?&httpconnection.HTTPConnection
}
fn cfg_play(p paramsparser.Params) ! {
// THIS IS EXAMPLE CODE AND NEEDS TO BE CHANGED IN LINE WITH struct above
mut mycfg := OpenAI{
name: p.get_default('name', 'default')!
api_key: p.get('api_key')!
}
set(mycfg)!
}
fn obj_init(obj_ OpenAI) !OpenAI {
// never call get here, only thing we can do here is work on object itself
mut obj := obj_
panic('implement')
return obj
}
pub fn (mut client OpenAI) connection() !&httpconnection.HTTPConnection {
mut c := client.conn or {
mut c2 := httpconnection.new(
name: 'openaiconnection_${client.name}'
url: 'https://api.openai.com/v1'
cache: false
)!
c2
}
c.default_header.set(.authorization, 'Bearer ${client.api_key}')
client.conn = c
return c
}

View File

@@ -1,50 +1,27 @@
# OpenAI
# openai
An implementation of an OpenAI client using Vlang.
To get started
## Supported methods
```vlang
- List available models
- Chat Completion
- Translate Audio
- Transcribe Audio
- Create image based on prompt
- Edit an existing image
- Create variation of an image
## Usage
To use the client you need a OpenAi key which can be generated from [here](https://platform.openai.com/account/api-keys).
The key should be exposed in an environment variable as following:
```bash
export OPENAI_API_KEY=<your-api-key>
```
To get a new instance of the client:
```v
import freeflowuniverse.herolib.clients. openai
ai_cli := openai.new()!
mut client:= openai.get()!
client...
```
Then it is possible to perform all the listed operations:
## example heroscript
```v
// listing models
models := ai_cli.list_models()!
// creating a new chat completion
mut msg := []op.Message{}
msg << op.Message{
role: op.RoleType.user
content: 'Say this is a test!'
}
mut msgs := op.Messages{
messages: msg
}
res := ai_cli.chat_completion(op.ModelType.gpt_3_5_turbo, msgs)!
```hero
!!openai.configure
secret: '...'
host: 'localhost'
port: 8888
```

View File

@@ -0,0 +1 @@
hello world

View File

@@ -0,0 +1 @@
testfile2 content