From 17e5924e0b11228b7d2a2886e38675ba355270a5 Mon Sep 17 00:00:00 2001 From: despiegk Date: Mon, 25 Aug 2025 07:06:50 +0200 Subject: [PATCH] ... --- .gitignore | 5 +- Cargo.toml | 1 + config/README.md | 14 ++ config/myenv_templ.sh | 6 + examples_rust/ai/Cargo.toml | 15 ++ examples_rust/ai/openrouter_example.rs | 47 +++++ examples_rust/ai/run.sh | 13 ++ packages/ai/codemonkey/Cargo.toml | 10 ++ packages/ai/codemonkey/src/lib.rs | 227 +++++++++++++++++++++++++ 9 files changed, 337 insertions(+), 1 deletion(-) create mode 100644 config/README.md create mode 100644 config/myenv_templ.sh create mode 100644 examples_rust/ai/Cargo.toml create mode 100644 examples_rust/ai/openrouter_example.rs create mode 100755 examples_rust/ai/run.sh create mode 100644 packages/ai/codemonkey/Cargo.toml create mode 100644 packages/ai/codemonkey/src/lib.rs diff --git a/.gitignore b/.gitignore index 6b25cc5..863375c 100644 --- a/.gitignore +++ b/.gitignore @@ -63,4 +63,7 @@ sidebars.ts tsconfig.json Cargo.toml.bak -for_augment \ No newline at end of file +for_augment + +myenv.sh + diff --git a/Cargo.toml b/Cargo.toml index 71575e3..4db6f9c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "rhai", "herodo", "packages/clients/hetznerclient", + "packages/ai/codemonkey", ] resolver = "2" diff --git a/config/README.md b/config/README.md new file mode 100644 index 0000000..36436a4 --- /dev/null +++ b/config/README.md @@ -0,0 +1,14 @@ +# Environment Configuration + +To set up your environment variables: + +1. Copy the template file to `env.sh`: + + ```bash + cp config/myenv_templ.sh config/env.sh + ``` + +2. Edit `config/env.sh` and fill in your specific values for the variables. + +3. This file (`config/env.sh`) is excluded from version control by the project's `.gitignore` configuration, ensuring your sensitive information remains local and is never committed to the repository. + diff --git a/config/myenv_templ.sh b/config/myenv_templ.sh new file mode 100644 index 0000000..7176da2 --- /dev/null +++ b/config/myenv_templ.sh @@ -0,0 +1,6 @@ + + +export OPENROUTER_API_KEY="" +export GROQ_API_KEY="" +export CEREBRAS_API_KEY="" +export OPENAI_API_KEY="sk-xxxxxxx" \ No newline at end of file diff --git a/examples_rust/ai/Cargo.toml b/examples_rust/ai/Cargo.toml new file mode 100644 index 0000000..ee3ec09 --- /dev/null +++ b/examples_rust/ai/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "openrouter_example" +version = "0.1.0" +edition = "2021" + +[workspace] + +[[bin]] +name = "openrouter_example" +path = "openrouter_example.rs" + +[dependencies] +codemonkey = { path = "../../packages/ai/codemonkey" } +openai-api-rs = "6.0.8" +tokio = { version = "1.0", features = ["full"] } diff --git a/examples_rust/ai/openrouter_example.rs b/examples_rust/ai/openrouter_example.rs new file mode 100644 index 0000000..a410c96 --- /dev/null +++ b/examples_rust/ai/openrouter_example.rs @@ -0,0 +1,47 @@ +use codemonkey::{create_ai_provider, AIProviderType, CompletionRequestBuilder, Message, MessageRole, Content}; +use std::error::Error; + +#[tokio::main] +async fn main() -> Result<(), Box> { + + let (mut provider, provider_type) = create_ai_provider(AIProviderType::OpenRouter)?; + + let messages = vec![Message { + role: MessageRole::user, + content: Content::Text("Explain the concept of a factory design pattern in Rust.".to_string()), + name: None, + tool_calls: None, + tool_call_id: None, + }]; + + println!("Sending request to OpenRouter..."); + let response = CompletionRequestBuilder::new( + &mut *provider, + "openai/gpt-oss-120b".to_string(), // Model name as specified by the user + messages, + provider_type, // Pass the provider_type + ) + .temperature(1.0) + .max_tokens(8192) + .top_p(1.0) + .reasoning_effort("medium") + .stream(false) + .openrouter_options(|builder| { + builder.provider( + codemonkey::OpenRouterProviderOptionsBuilder::new() + .order(vec!["cerebras"]) + .build(), + ) + }) + .completion() + .await?; + + for choice in response.choices { + if let Some(content) = choice.message.content { + print!("{}", content); + } + } + println!(); + + Ok(()) +} diff --git a/examples_rust/ai/run.sh b/examples_rust/ai/run.sh new file mode 100755 index 0000000..525d965 --- /dev/null +++ b/examples_rust/ai/run.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +# Change to directory where this script is located +cd "$(dirname "${BASH_SOURCE[0]}")" + +source ../../config/myenv.sh + +# Build the example +cargo build + +# Run the example +cargo run --bin openrouter_example diff --git a/packages/ai/codemonkey/Cargo.toml b/packages/ai/codemonkey/Cargo.toml new file mode 100644 index 0000000..e3489da --- /dev/null +++ b/packages/ai/codemonkey/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "codemonkey" +version = "0.1.0" +edition = "2021" + +[dependencies] +tokio = { version = "1", features = ["full"] } +async-trait = "0.1.80" +openrouter-rs = "0.4.5" +serde = { version = "1.0", features = ["derive"] } \ No newline at end of file diff --git a/packages/ai/codemonkey/src/lib.rs b/packages/ai/codemonkey/src/lib.rs new file mode 100644 index 0000000..21cd5f3 --- /dev/null +++ b/packages/ai/codemonkey/src/lib.rs @@ -0,0 +1,227 @@ +use async_trait::async_trait; +use openrouter_rs::{OpenRouterClient, api::chat::*, types::Role, ChatCompletionResponse}; // Added ChatCompletionResponse here +use std::env; +use std::error::Error; + +// Re-export Message and MessageRole for easier use in client code +pub use openrouter_rs::api::chat::Message; +pub use openrouter_rs::types::Role as MessageRole; +// Removed the problematic import for ChatCompletionResponse +// pub use openrouter_rs::api::chat::chat_completion::ChatCompletionResponse; + +#[async_trait] +pub trait AIProvider { + async fn completion( + &mut self, + request: CompletionRequest, + ) -> Result>; +} + +pub struct CompletionRequest { + pub model: String, + pub messages: Vec, + pub temperature: Option, + pub max_tokens: Option, + pub top_p: Option, + pub stream: Option, + pub stop: Option>, +} + +pub struct CompletionRequestBuilder<'a> { + provider: &'a mut dyn AIProvider, + model: String, + messages: Vec, + temperature: Option, + max_tokens: Option, + top_p: Option, + stream: Option, + stop: Option>, + provider_type: AIProviderType, +} + +impl<'a> CompletionRequestBuilder<'a> { + pub fn new(provider: &'a mut dyn AIProvider, model: String, messages: Vec, provider_type: AIProviderType) -> Self { + Self { + provider, + model, + messages, + temperature: None, + max_tokens: None, + top_p: None, + stream: None, + stop: None, + provider_type, + } + } + + pub fn temperature(mut self, temperature: f64) -> Self { + self.temperature = Some(temperature); + self + } + + pub fn max_tokens(mut self, max_tokens: i64) -> Self { + self.max_tokens = Some(max_tokens); + self + } + + pub fn top_p(mut self, top_p: f64) -> Self { + self.top_p = Some(top_p); + self + } + + pub fn stream(mut self, stream: bool) -> Self { + self.stream = Some(stream); + self + } + + pub fn stop(mut self, stop: Vec) -> Self { + self.stop = Some(stop); + self + } + + pub async fn completion(self) -> Result> { + let request = CompletionRequest { + model: self.model, + messages: self.messages, + temperature: self.temperature, + max_tokens: self.max_tokens, + top_p: self.top_p, + stream: self.stream, + stop: self.stop, + }; + self.provider.completion(request).await + } +} + +pub struct GroqAIProvider { + client: OpenRouterClient, +} + +#[async_trait] +impl AIProvider for GroqAIProvider { + async fn completion( + &mut self, + request: CompletionRequest, + ) -> Result> { + let chat_request = ChatCompletionRequest::builder() + .model(request.model) + .messages(request.messages) + .temperature(request.temperature.unwrap_or(1.0)) + .max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048)) + .top_p(request.top_p.unwrap_or(1.0)) + .stream(request.stream.unwrap_or(false)) // Corrected to field assignment + .stop(request.stop.unwrap_or_default()) + .build()?; + + let result = self.client.send_chat_completion(&chat_request).await?; + Ok(result) + } +} + +pub struct OpenAIProvider { + client: OpenRouterClient, +} + +#[async_trait] +impl AIProvider for OpenAIProvider { + async fn completion( + &mut self, + request: CompletionRequest, + ) -> Result> { + let chat_request = ChatCompletionRequest::builder() + .model(request.model) + .messages(request.messages) + .temperature(request.temperature.unwrap_or(1.0)) + .max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048)) + .top_p(request.top_p.unwrap_or(1.0)) + .stream(request.stream.unwrap_or(false)) // Corrected to field assignment + .stop(request.stop.unwrap_or_default()) + .build()?; + + let result = self.client.send_chat_completion(&chat_request).await?; + Ok(result) + } +} + +pub struct OpenRouterAIProvider { + client: OpenRouterClient, +} + +#[async_trait] +impl AIProvider for OpenRouterAIProvider { + async fn completion( + &mut self, + request: CompletionRequest, + ) -> Result> { + let chat_request = ChatCompletionRequest::builder() + .model(request.model) + .messages(request.messages) + .temperature(request.temperature.unwrap_or(1.0)) + .max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048)) + .top_p(request.top_p.unwrap_or(1.0)) + .stream(request.stream.unwrap_or(false)) // Corrected to field assignment + .stop(request.stop.unwrap_or_default()) + .build()?; + + let result = self.client.send_chat_completion(&chat_request).await?; + Ok(result) + } +} + +pub struct CerebrasAIProvider { + client: OpenRouterClient, +} + +#[async_trait] +impl AIProvider for CerebrasAIProvider { + async fn completion( + &mut self, + request: CompletionRequest, + ) -> Result> { + let chat_request = ChatCompletionRequest::builder() + .model(request.model) + .messages(request.messages) + .temperature(request.temperature.unwrap_or(1.0)) + .max_tokens(request.max_tokens.map(|x| x as u32).unwrap_or(2048)) + .top_p(request.top_p.unwrap_or(1.0)) + .stream(request.stream.unwrap_or(false)) // Corrected to field assignment + .stop(request.stop.unwrap_or_default()) + .build()?; + + let result = self.client.send_chat_completion(&chat_request).await?; + Ok(result) + } +} + +#[derive(PartialEq)] +pub enum AIProviderType { + Groq, + OpenAI, + OpenRouter, + Cerebras, +} + +pub fn create_ai_provider(provider_type: AIProviderType) -> Result<(Box, AIProviderType), Box> { + match provider_type { + AIProviderType::Groq => { + let api_key = env::var("GROQ_API_KEY")?; + let client = OpenRouterClient::builder().api_key(api_key).build()?; + Ok((Box::new(GroqAIProvider { client }), AIProviderType::Groq)) + } + AIProviderType::OpenAI => { + let api_key = env::var("OPENAI_API_KEY")?; + let client = OpenRouterClient::builder().api_key(api_key).build()?; + Ok((Box::new(OpenAIProvider { client }), AIProviderType::OpenAI)) + } + AIProviderType::OpenRouter => { + let api_key = env::var("OPENROUTER_API_KEY")?; + let client = OpenRouterClient::builder().api_key(api_key).build()?; + Ok((Box::new(OpenRouterAIProvider { client }), AIProviderType::OpenRouter)) + } + AIProviderType::Cerebras => { + let api_key = env::var("CEREBRAS_API_KEY")?; + let client = OpenRouterClient::builder().api_key(api_key).build()?; + Ok((Box::new(CerebrasAIProvider { client }), AIProviderType::Cerebras)) + } + } +}