| Crates.io | mcp-runner |
| lib.rs | mcp-runner |
| version | 0.3.1 |
| created_at | 2025-04-24 20:51:36.103235+00 |
| updated_at | 2025-05-13 15:56:45.315664+00 |
| description | A Rust library for running and interacting with Model Context Protocol (MCP) servers locally |
| homepage | |
| repository | https://github.com/Streamline-TS/mcp-runner |
| max_upload_size | |
| id | 1648005 |
| size | 369,323 |
A Rust library for running and interacting with Model Context Protocol (MCP) servers locally.
MCP Runner provides a complete solution for managing Model Context Protocol servers in Rust applications. It enables:
Add this to your Cargo.toml:
[dependencies]
mcp-runner = "0.3.1"
Here's a simple example of using MCP Runner to start a server and call a tool:
use mcp_runner::{McpRunner, error::Result};
use serde::{Deserialize, Serialize};
use serde_json::json;
#[tokio::main]
async fn main() -> Result<()> {
// Create runner from config file
let mut runner = McpRunner::from_config_file("config.json")?;
// Start all servers and the SSE proxy if configured
let (server_ids, proxy_started) = runner.start_all_with_proxy().await;
let server_ids = server_ids?;
if proxy_started {
println!("SSE proxy started successfully");
}
// Get client for interacting with a specific server
let server_id = runner.get_server_id("fetch")?;
let client = runner.get_client(server_id)?;
// Initialize the client
client.initialize().await?;
// List available tools
let tools = client.list_tools().await?;
println!("Available tools:");
for tool in tools {
println!(" - {}: {}", tool.name, tool.description);
}
// Call the fetch tool with structured input
let fetch_result = client.call_tool("fetch", &json!({
"url": "https://modelcontextprotocol.io"
})).await?;
println!("Fetch result: {}", fetch_result);
// Stop the server when done
runner.stop_server(server_id).await?;
Ok(())
}
This library uses the tracing crate for logging and diagnostics. To enable logging, ensure you have a tracing_subscriber configured in your application and set the RUST_LOG environment variable. For example:
# Show info level logs for all crates
RUST_LOG=info cargo run --example simple_client
# Show trace level logs specifically for mcp_runner
RUST_LOG=mcp_runner=trace cargo run --example simple_client
MCP Runner uses JSON configuration to define MCP servers and optional SSE proxy settings.
{
"mcpServers": {
"fetch": {
"command": "uvx",
"args": ["mcp-server-fetch"]
},
"filesystem": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/allowed/files"]
}
},
"sseProxy": {
"address": "127.0.0.1",
"port": 3000,
"allowedServers": ["fetch", "filesystem"],
"authenticate": {
"bearer": {
"token": "your-secure-token"
}
}
}
}
You can load configurations in three different ways:
use mcp_runner::McpRunner;
let runner = McpRunner::from_config_file("config.json")?;
use mcp_runner::McpRunner;
let config_json = r#"{
"mcpServers": {
"fetch": {
"command": "uvx",
"args": ["mcp-server-fetch"]
}
}
}"#;
let runner = McpRunner::from_config_str(config_json)?;
use mcp_runner::{McpRunner, config::{Config, ServerConfig}};
use std::collections::HashMap;
let mut servers = HashMap::new();
let server_config = ServerConfig {
command: "uvx".to_string(),
args: vec!["mcp-server-fetch".to_string()],
env: HashMap::new(),
};
servers.insert("fetch".to_string(), server_config);
let config = Config { mcp_servers: servers };
// Initialize the runner
let runner = McpRunner::new(config);
MCP Runner uses a custom error type that covers:
match result {
Ok(value) => println!("Success: {:?}", value),
Err(Error::ServerNotFound(name)) => println!("Server not found: {}", name),
Err(Error::Communication(msg)) => println!("Communication error: {}", msg),
Err(e) => println!("Other error: {}", e),
}
The main entry point for managing MCP servers:
let mut runner = McpRunner::from_config_file("config.json")?;
let server_ids = runner.start_all_servers().await?;
For interacting with MCP servers:
let client = runner.get_client(server_id)?;
client.initialize().await?;
// Call tools
let result = client.call_tool("fetch", &json!({
"url": "https://example.com",
})).await?;
The SSE (Server-Sent Events) proxy allows clients to connect to MCP servers through HTTP and receive real-time updates using the Server-Sent Events protocol. Built on Actix Web, it provides a unified JSON-RPC over HTTP interface with high performance, reliability, and maintainability.
You can start the SSE proxy automatically when starting your servers:
// Start all servers and the proxy if configured
let (server_ids, proxy_started) = runner.start_all_with_proxy().await;
let server_ids = server_ids?;
if proxy_started {
println!("SSE proxy started successfully");
}
Or manually start it after configuring your servers:
if runner.is_sse_proxy_configured() {
runner.start_sse_proxy().await?;
println!("SSE proxy started manually");
}
Configure the SSE proxy in your configuration file:
{
"mcpServers": { /* server configs */ },
"sseProxy": {
"address": "127.0.0.1", // Listen address (localhost only) - default if omitted
"port": 3000, // Port to listen on - default if omitted
"workers": 4, // Number of worker threads - default is 4 if omitted
"allowedServers": [ // Optional: restrict which servers can be accessed
"fetch",
"filesystem"
],
"authenticate": { // Optional: require authentication
"bearer": {
"token": "your-secure-token-here"
}
}
}
}
The SSE proxy exposes the following HTTP endpoints:
| Endpoint | Method | Description |
|---|---|---|
/sse |
GET | SSE event stream endpoint for receiving real-time updates (sends endpoint and message events) |
/sse/messages |
POST | JSON-RPC endpoint for sending requests to MCP servers (supports initialize, tools/list, tools/call, ping) |
Check the examples/ directory for more usage examples:
simple_client.rs: Basic usage of the client API
# Run with info level logging
RUST_LOG=info cargo run --example simple_client
sse_proxy.rs: Example of using the SSE proxy to expose MCP servers to web clients
# Run with info level logging
RUST_LOG=info cargo run --example sse_proxy
This example uses the config in examples/sse_config.json to start servers and an SSE proxy,
allowing web clients to connect and interact with MCP servers through HTTP and SSE.
JavaScript client example:
// Connect to the event stream
const eventSource = new EventSource('http://localhost:3000/sse');
// First you'll receive the endpoint information
eventSource.addEventListener('endpoint', (event) => {
console.log('Received endpoint path:', event.data);
});
// Then you'll receive JSON-RPC responses
eventSource.addEventListener('message', (event) => {
const response = JSON.parse(event.data);
console.log('Received JSON-RPC response:', response);
});
// Make a tool call
async function callTool() {
const response = await fetch('http://localhost:3000/sse/messages', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer your-secure-token-here'
},
body: JSON.stringify({
jsonrpc: '2.0',
id: 1,
method: 'tools/call',
params: {
server: 'fetch',
tool: 'fetch',
arguments: {
url: 'https://modelcontextprotocol.io'
}
}
})
});
const result = await response.json();
console.log('Tool call initiated:', result);
// Actual response will come through the SSE event stream
}
Contributions are welcome! Please feel free to submit a Pull Request.
This project is licensed under the terms in the LICENSE file.