...
This commit is contained in:
@@ -66,22 +66,3 @@ mut server := mcp.new_server(backend, mcp.ServerParams{
|
||||
// Start the server
|
||||
server.start()!
|
||||
```
|
||||
|
||||
## Sub-modules
|
||||
|
||||
The MCP directory contains several sub-modules that implement specific MCP servers:
|
||||
|
||||
- **baobab**: An MCP server implementation for Baobab-specific tools and functionality
|
||||
- **developer**: An MCP server implementation focused on developer tools
|
||||
|
||||
Each sub-module leverages the core MCP implementation but provides its own specific tools, handlers, and configurations. Thanks to the boilerplate functionality provided by the core module, these implementations only need to define their specific tools and handlers without worrying about the underlying protocol details.
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `freeflowuniverse.herolib.schemas.jsonrpc`: For JSON-RPC communication
|
||||
- `x.json2`: For JSON serialization/deserialization
|
||||
- Standard V libraries: `time`, `os`, `log`
|
||||
|
||||
## License
|
||||
|
||||
This module is part of the HeroLib project. See the project's license for more information.
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
|
||||
make an mcp server in @lib/mcp/v_do
|
||||
|
||||
use the Standard Input/Output (stdio) transport as described in
|
||||
https://modelcontextprotocol.io/docs/concepts/transports
|
||||
|
||||
The tool has following methods
|
||||
|
||||
## test
|
||||
- args: $fullpath
|
||||
- cmd: 'v -gc none -stats -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc test ${fullpath}'
|
||||
|
||||
if the file is a dir then find the .v files (non recursive) and do it for each opf those
|
||||
|
||||
collect the output and return
|
||||
|
||||
## run
|
||||
- args: $fullpath
|
||||
- cmd: 'v -gc none -stats -enable-globals -n -w -cg -g -cc tcc run ${fullpath}'
|
||||
|
||||
if the file is a dir then find the .v files (non recursive) and do it for each opf those
|
||||
|
||||
collect the output and return
|
||||
|
||||
|
||||
## compile
|
||||
- args: $fullpath
|
||||
- cmd: 'cd /tmp && v -gc none -enable-globals -show-c-output -keepc -n -w -cg -o /tmp/tester.c -g -cc tcc ${fullpath}'
|
||||
|
||||
if the file is a dir then find the .v files (non recursive) and do it for each opf those
|
||||
|
||||
collect the output and return
|
||||
|
||||
|
||||
## vet
|
||||
- args: $fullpath
|
||||
- cmd: 'v vet -v -w ${fullpath}'
|
||||
|
||||
if the file is a dir then find the .v files (non recursive) and do it for each opf those
|
||||
|
||||
collect the output and return
|
||||
|
||||
|
||||
|
||||
@@ -120,4 +120,4 @@ This module was designed based on the following requirements:
|
||||
|
||||
For a detailed architecture overview, see [escalayer_architecture.md](./escalayer_architecture.md).
|
||||
|
||||
For a complete example, see [example.v](./example.v).
|
||||
For a complete example, see [example.v](../servers/rhai).
|
||||
@@ -105,7 +105,7 @@ pub fn default_base_model() ModelConfig {
|
||||
name: 'gpt-3.5-turbo'
|
||||
provider: 'openai'
|
||||
temperature: 0.7
|
||||
max_tokens: 2000
|
||||
max_tokens: 20000
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ pub fn default_retry_model() ModelConfig {
|
||||
name: 'gpt-4'
|
||||
provider: 'openai'
|
||||
temperature: 0.7
|
||||
max_tokens: 4000
|
||||
max_tokens: 40000
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -134,9 +134,7 @@ pub mut:
|
||||
}
|
||||
|
||||
// UnitTaskParams defines the parameters for creating a new unit task
|
||||
@[params]
|
||||
pub struct UnitTaskParams {
|
||||
pub:
|
||||
struct UnitTaskParams {
|
||||
name string
|
||||
prompt_function fn(string) string
|
||||
callback_function fn(string)! string
|
||||
@@ -147,36 +145,11 @@ pub:
|
||||
|
||||
// Add a new unit task to the task
|
||||
pub fn (mut t Task) new_unit_task(params UnitTaskParams) &UnitTask {
|
||||
mut unit_task := UnitTask{
|
||||
name: params.name
|
||||
prompt_function: params.prompt_function
|
||||
callback_function: params.callback_function
|
||||
base_model: if params.base_model != none { params.base_model? } else { default_base_model() }
|
||||
retry_model: if params.retry_model != none { params.retry_model? } else { default_retry_model() }
|
||||
retry_count: if params.retry_count != none { params.retry_count? } else { 3 }
|
||||
}
|
||||
|
||||
t.unit_tasks << unit_task
|
||||
return &t.unit_tasks[t.unit_tasks.len - 1]
|
||||
}
|
||||
|
||||
// Initiate the task execution
|
||||
pub fn (mut t Task) initiate(input string)! string {
|
||||
mut current_input := input
|
||||
|
||||
for i, mut unit_task in t.unit_tasks {
|
||||
println('Executing unit task ${i+1}/${t.unit_tasks.len}: ${unit_task.name}')
|
||||
|
||||
// Execute the unit task with the current input
|
||||
result := unit_task.execute(current_input)!
|
||||
|
||||
// Update the current input for the next unit task
|
||||
current_input = result
|
||||
t.current_result = result
|
||||
}
|
||||
|
||||
return t.current_result
|
||||
}
|
||||
```
|
||||
|
||||
### 3.3 unit_task.v
|
||||
@@ -199,53 +172,6 @@ pub mut:
|
||||
|
||||
// Execute the unit task
|
||||
pub fn (mut ut UnitTask) execute(input string)! string {
|
||||
// Generate the prompt using the prompt function
|
||||
prompt := ut.prompt_function(input)
|
||||
|
||||
// Try with the base model first
|
||||
mut current_model := ut.base_model
|
||||
mut attempts := 0
|
||||
mut max_attempts := ut.retry_count + 1 // +1 for the initial attempt
|
||||
mut last_error := ''
|
||||
|
||||
for attempts < max_attempts {
|
||||
attempts++
|
||||
|
||||
// If we've exhausted retries with the base model, switch to the retry model
|
||||
if attempts > ut.retry_count {
|
||||
println('Escalating to more powerful model: ${ut.retry_model.name}')
|
||||
current_model = ut.retry_model
|
||||
max_attempts = attempts + ut.retry_count // Reset max attempts for the retry model
|
||||
}
|
||||
|
||||
println('Attempt ${attempts} with model ${current_model.name}')
|
||||
|
||||
// Prepare the prompt with error feedback if this is a retry
|
||||
mut current_prompt := prompt
|
||||
if last_error != '' {
|
||||
current_prompt = 'Previous attempt failed with error: ${last_error}\n\n${prompt}'
|
||||
}
|
||||
|
||||
// Call the AI model
|
||||
response := call_ai_model(current_prompt, current_model) or {
|
||||
println('AI call failed: ${err}')
|
||||
last_error = err.str()
|
||||
continue // Try again
|
||||
}
|
||||
|
||||
// Process the response with the callback function
|
||||
result := ut.callback_function(response) or {
|
||||
// If callback returns an error, retry with the error message
|
||||
println('Callback returned error: ${err}')
|
||||
last_error = err.str()
|
||||
continue // Try again
|
||||
}
|
||||
|
||||
// If we get here, the callback was successful
|
||||
return result
|
||||
}
|
||||
|
||||
return error('Failed to execute unit task after ${attempts} attempts. Last error: ${last_error}')
|
||||
}
|
||||
```
|
||||
|
||||
@@ -414,35 +340,3 @@ fn process_tests(response string)! string {
|
||||
return response
|
||||
}
|
||||
```
|
||||
|
||||
## 5. Key Features and Benefits
|
||||
|
||||
1. **V-Idiomatic Design**: Uses V's `@[params]` structures for configuration and the V result type (`fn ()!`) for error handling.
|
||||
|
||||
2. **Sequential Task Execution**: Tasks are executed in sequence, with each unit task building on the results of the previous one.
|
||||
|
||||
3. **Automatic Model Escalation**: If a unit task fails with a cheaper model, the system automatically retries with a more powerful model.
|
||||
|
||||
4. **Flexible Configuration**: Each unit task can be configured with different models, retry counts, and other parameters.
|
||||
|
||||
5. **Error Handling**: Comprehensive error handling with detailed error messages and retry mechanisms using V's built-in error handling.
|
||||
|
||||
6. **Callback Processing**: Custom callback functions allow for validation and processing of AI responses.
|
||||
|
||||
7. **OpenRouter Integration**: Uses OpenRouter to access a wide range of AI models from different providers.
|
||||
|
||||
## 6. Future Enhancements
|
||||
|
||||
1. **Parallel Execution**: Add support for executing unit tasks in parallel when they don't depend on each other.
|
||||
|
||||
2. **Caching**: Implement caching of AI responses to avoid redundant API calls.
|
||||
|
||||
3. **Cost Tracking**: Add functionality to track and report on API usage costs.
|
||||
|
||||
4. **Timeout Handling**: Add support for timeouts and graceful handling of long-running tasks.
|
||||
|
||||
5. **Streaming Responses**: Support for streaming AI responses for long-form content generation.
|
||||
|
||||
6. **Prompt Templates**: Add support for reusable prompt templates.
|
||||
|
||||
7. **Logging and Monitoring**: Enhanced logging and monitoring capabilities.
|
||||
@@ -1,50 +0,0 @@
|
||||
module logger
|
||||
|
||||
import os
|
||||
|
||||
// LogLevel defines the severity of log messages
|
||||
pub enum LogLevel {
|
||||
debug
|
||||
info
|
||||
warn
|
||||
error
|
||||
fatal
|
||||
}
|
||||
|
||||
// log outputs a message to stderr with the specified log level
|
||||
pub fn log(level LogLevel, message string) {
|
||||
level_str := match level {
|
||||
.debug { 'DEBUG' }
|
||||
.info { 'INFO ' }
|
||||
.warn { 'WARN ' }
|
||||
.error { 'ERROR' }
|
||||
.fatal { 'FATAL' }
|
||||
}
|
||||
eprintln('[${level_str}] ${message}')
|
||||
}
|
||||
|
||||
// debug logs a debug message to stderr
|
||||
pub fn debug(message string) {
|
||||
log(.debug, message)
|
||||
}
|
||||
|
||||
// info logs an info message to stderr
|
||||
pub fn info(message string) {
|
||||
log(.info, message)
|
||||
}
|
||||
|
||||
// warn logs a warning message to stderr
|
||||
pub fn warn(message string) {
|
||||
log(.warn, message)
|
||||
}
|
||||
|
||||
// error logs an error message to stderr
|
||||
pub fn error(message string) {
|
||||
log(.error, message)
|
||||
}
|
||||
|
||||
// fatal logs a fatal error message to stderr and exits the program
|
||||
pub fn fatal(message string) {
|
||||
log(.fatal, message)
|
||||
exit(1)
|
||||
}
|
||||
2
lib/mcp/pugconvert/cmd/.gitignore
vendored
2
lib/mcp/pugconvert/cmd/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
main
|
||||
pugconvert
|
||||
2
lib/mcp/servers/pugconvert/cmd/.gitignore
vendored
Normal file
2
lib/mcp/servers/pugconvert/cmd/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
main
|
||||
|
||||
16
lib/mcp/servers/pugconvert/cmd/compile.sh
Executable file
16
lib/mcp/servers/pugconvert/cmd/compile.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
export name="mcp_pugconvert"
|
||||
|
||||
# Change to the directory containing this script
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Compile the V program
|
||||
v -n -w -gc none -cc tcc -d use_openssl -enable-globals main.v
|
||||
|
||||
# Ensure the binary is executable
|
||||
chmod +x main
|
||||
mv main ~/hero/bin/${name}
|
||||
|
||||
echo "Compilation successful. Binary '${name}' is ready."
|
||||
2
lib/mcp/servers/rhai/cmd/.gitignore
vendored
Normal file
2
lib/mcp/servers/rhai/cmd/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
main
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
export name="mcp_rhai"
|
||||
|
||||
# Change to the directory containing this script
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
@@ -9,6 +11,6 @@ v -n -w -gc none -cc tcc -d use_openssl -enable-globals main.v
|
||||
|
||||
# Ensure the binary is executable
|
||||
chmod +x main
|
||||
mv main ~/hero/bin/pugconvert
|
||||
mv main ~/hero/bin/${name}
|
||||
|
||||
echo "Compilation successful. Binary 'main' is ready."
|
||||
echo "Compilation successful. Binary '${name}' is ready."
|
||||
17
lib/mcp/servers/rhai/cmd/main.v
Normal file
17
lib/mcp/servers/rhai/cmd/main.v
Normal file
@@ -0,0 +1,17 @@
|
||||
module main
|
||||
|
||||
import freeflowuniverse.herolib.mcp.pugconvert
|
||||
|
||||
fn main() {
|
||||
// Create a new MCP server
|
||||
mut server := pugconvert.new_mcp_server() or {
|
||||
eprintln('Failed to create MCP server: ${err}')
|
||||
return
|
||||
}
|
||||
|
||||
// Start the server
|
||||
server.start() or {
|
||||
eprintln('Failed to start MCP server: ${err}')
|
||||
return
|
||||
}
|
||||
}
|
||||
58
lib/mcp/servers/rhai/mcp/handlers.v
Normal file
58
lib/mcp/servers/rhai/mcp/handlers.v
Normal file
@@ -0,0 +1,58 @@
|
||||
module rhaiconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2 as json { Any }
|
||||
import freeflowuniverse.herolib.mcp.servers.rhai.logic as rhaido
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
//TODO: implement
|
||||
|
||||
pub fn handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
path := arguments['path'].str()
|
||||
|
||||
// Check if path exists
|
||||
if !os.exists(path) {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error: Path '${path}' does not exist")
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if path is a file or directory
|
||||
is_directory := os.is_dir(path)
|
||||
|
||||
mut message := ""
|
||||
|
||||
//TODO: implement
|
||||
|
||||
if is_directory {
|
||||
// Convert all rhai files in the directory
|
||||
rhaido.convert_rhai(path) or {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error converting rhai files in directory: ${err}")
|
||||
}
|
||||
}
|
||||
message = "Successfully converted all rhai files in directory '${path}'"
|
||||
} else if path.ends_with(".rhai") {
|
||||
// Convert a single rhai file
|
||||
rhaido.convert_rhai_file(path) or {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error converting rhai file: ${err}")
|
||||
}
|
||||
}
|
||||
message = "Successfully converted rhai file '${path}'"
|
||||
} else {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error: Path '${path}' is not a directory or .rhai file")
|
||||
}
|
||||
}
|
||||
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: mcp.result_to_mcp_tool_contents[string](message)
|
||||
}
|
||||
}
|
||||
27
lib/mcp/servers/rhai/mcp/mcp.v
Normal file
27
lib/mcp/servers/rhai/mcp/mcp.v
Normal file
@@ -0,0 +1,27 @@
|
||||
module pugconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
||||
|
||||
pub fn new_mcp_server() !&mcp.Server {
|
||||
logger.info('Creating new Rhai MCP server')
|
||||
|
||||
// Initialize the server with the empty handlers map
|
||||
mut server := mcp.new_server(mcp.MemoryBackend{
|
||||
tools: {
|
||||
'rhai_interface': specs
|
||||
}
|
||||
tool_handlers: {
|
||||
'rhai_interface': handler
|
||||
}
|
||||
}, mcp.ServerParams{
|
||||
config: mcp.ServerConfiguration{
|
||||
server_info: mcp.ServerInfo{
|
||||
name: 'developer'
|
||||
version: '1.0.0'
|
||||
}
|
||||
}
|
||||
})!
|
||||
return server
|
||||
}
|
||||
21
lib/mcp/servers/rhai/mcp/specifications.v
Normal file
21
lib/mcp/servers/rhai/mcp/specifications.v
Normal file
@@ -0,0 +1,21 @@
|
||||
module pugconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2 as json { Any }
|
||||
import freeflowuniverse.herolib.schemas.jsonschema
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
|
||||
const specs = mcp.Tool{
|
||||
name: 'rhai_interface'
|
||||
description: 'Add Rhai Interface to Rust Code Files'
|
||||
input_schema: jsonschema.Schema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'path': jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'string',
|
||||
description: 'Path to a .rs file or directory containing .rs files to make rhai interface for',
|
||||
})
|
||||
}
|
||||
required: ['path']
|
||||
}
|
||||
}
|
||||
2
lib/mcp/servers/vcode/cmd/.gitignore
vendored
Normal file
2
lib/mcp/servers/vcode/cmd/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
main
|
||||
|
||||
16
lib/mcp/servers/vcode/cmd/compile.sh
Executable file
16
lib/mcp/servers/vcode/cmd/compile.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
export name="mcp_vcode"
|
||||
|
||||
# Change to the directory containing this script
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Compile the V program
|
||||
v -n -w -gc none -cc tcc -d use_openssl -enable-globals main.v
|
||||
|
||||
# Ensure the binary is executable
|
||||
chmod +x main
|
||||
mv main ~/hero/bin/${name}
|
||||
|
||||
echo "Compilation successful. Binary '${name}' is ready."
|
||||
17
lib/mcp/servers/vcode/cmd/main.v
Normal file
17
lib/mcp/servers/vcode/cmd/main.v
Normal file
@@ -0,0 +1,17 @@
|
||||
module main
|
||||
|
||||
import freeflowuniverse.herolib.mcp.servers.vcode
|
||||
|
||||
fn main() {
|
||||
// Create a new MCP server
|
||||
mut server := vcode.new_mcp_server() or {
|
||||
eprintln('Failed to create MCP server: ${err}')
|
||||
return
|
||||
}
|
||||
|
||||
// Start the server
|
||||
server.start() or {
|
||||
eprintln('Failed to start MCP server: ${err}')
|
||||
return
|
||||
}
|
||||
}
|
||||
54
lib/mcp/servers/vcode/mcp/handlers.v
Normal file
54
lib/mcp/servers/vcode/mcp/handlers.v
Normal file
@@ -0,0 +1,54 @@
|
||||
module pugconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2 as json { Any }
|
||||
import freeflowuniverse.herolib.mcp.aitools.pugconvert
|
||||
import freeflowuniverse.herolib.core.pathlib
|
||||
import os
|
||||
|
||||
pub fn handler(arguments map[string]Any) !mcp.ToolCallResult {
|
||||
path := arguments['path'].str()
|
||||
|
||||
// Check if path exists
|
||||
if !os.exists(path) {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error: Path '${path}' does not exist")
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if path is a file or directory
|
||||
is_directory := os.is_dir(path)
|
||||
|
||||
mut message := ""
|
||||
|
||||
if is_directory {
|
||||
// Convert all pug files in the directory
|
||||
pugconvert.convert_pug(path) or {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error converting pug files in directory: ${err}")
|
||||
}
|
||||
}
|
||||
message = "Successfully converted all pug files in directory '${path}'"
|
||||
} else if path.ends_with(".v") {
|
||||
// Convert a single pug file
|
||||
pugconvert.convert_pug_file(path) or {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error converting pug file: ${err}")
|
||||
}
|
||||
}
|
||||
message = "Successfully converted pug file '${path}'"
|
||||
} else {
|
||||
return mcp.ToolCallResult{
|
||||
is_error: true
|
||||
content: mcp.result_to_mcp_tool_contents[string]("Error: Path '${path}' is not a directory or .pug file")
|
||||
}
|
||||
}
|
||||
|
||||
return mcp.ToolCallResult{
|
||||
is_error: false
|
||||
content: mcp.result_to_mcp_tool_contents[string](message)
|
||||
}
|
||||
}
|
||||
27
lib/mcp/servers/vcode/mcp/mcp.v
Normal file
27
lib/mcp/servers/vcode/mcp/mcp.v
Normal file
@@ -0,0 +1,27 @@
|
||||
module pugconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
||||
|
||||
pub fn new_mcp_server() !&mcp.Server {
|
||||
logger.info('Creating new Developer MCP server')
|
||||
|
||||
// Initialize the server with the empty handlers map
|
||||
mut server := mcp.new_server(mcp.MemoryBackend{
|
||||
tools: {
|
||||
'pugconvert': specs
|
||||
}
|
||||
tool_handlers: {
|
||||
'pugconvert': handler
|
||||
}
|
||||
}, mcp.ServerParams{
|
||||
config: mcp.ServerConfiguration{
|
||||
server_info: mcp.ServerInfo{
|
||||
name: 'developer'
|
||||
version: '1.0.0'
|
||||
}
|
||||
}
|
||||
})!
|
||||
return server
|
||||
}
|
||||
21
lib/mcp/servers/vcode/mcp/specifications.v
Normal file
21
lib/mcp/servers/vcode/mcp/specifications.v
Normal file
@@ -0,0 +1,21 @@
|
||||
module pugconvert
|
||||
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import x.json2 as json { Any }
|
||||
import freeflowuniverse.herolib.schemas.jsonschema
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
|
||||
const specs = mcp.Tool{
|
||||
name: 'pugconvert'
|
||||
description: 'Convert Pug template files to Jet template files'
|
||||
input_schema: jsonschema.Schema{
|
||||
typ: 'object'
|
||||
properties: {
|
||||
'path': jsonschema.SchemaRef(jsonschema.Schema{
|
||||
typ: 'string',
|
||||
description: 'Path to a .pug file or directory containing .pug files to convert'
|
||||
})
|
||||
}
|
||||
required: ['path']
|
||||
}
|
||||
}
|
||||
3
lib/mcp/vcode/.gitignore
vendored
3
lib/mcp/vcode/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
vdo
|
||||
v_do
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
module vcode
|
||||
|
||||
import cli
|
||||
|
||||
pub const command := cli.Command{
|
||||
sort_flags: true
|
||||
name: 'vcode'
|
||||
execute: cmd_vcode
|
||||
description: 'will list existing mdbooks'
|
||||
}
|
||||
|
||||
fn cmd_vcode(cmd cli.Command) ! {
|
||||
mut server := new_mcp_server(&VCode{})!
|
||||
server.start()!
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "generate_module_from_openapi",
|
||||
"arguments": {
|
||||
"openapi_path": "/Users/timurgordon/code/github/freeflowuniverse/herolib/lib/circles/mcc/openapi.json"
|
||||
},
|
||||
"_meta": {
|
||||
"progressToken": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
module vcode
|
||||
|
||||
import freeflowuniverse.herolib.mcp.logger
|
||||
import freeflowuniverse.herolib.mcp
|
||||
import freeflowuniverse.herolib.schemas.jsonrpc
|
||||
|
||||
// fn main() {
|
||||
// // logger.info('Starting V-Do server')
|
||||
|
||||
// // Create an empty map of procedure handlers
|
||||
// handlers := map[string]jsonrpc.ProcedureHandler{}
|
||||
|
||||
// // Initialize the server with the empty handlers map
|
||||
// mut server := mcp.new_server(mcp.MemoryBackend{}, mcp.ServerParams{
|
||||
// handlers: handlers
|
||||
// config: mcp.ServerConfiguration{
|
||||
// server_info: mcp.ServerInfo{
|
||||
// name: 'v_do'
|
||||
// version: '1.0.0'
|
||||
// }
|
||||
// }
|
||||
// })!
|
||||
|
||||
// server.start() or {
|
||||
// logger.fatal('Error starting server: ${err}')
|
||||
// exit(1)
|
||||
// }
|
||||
// }
|
||||
Reference in New Issue
Block a user