| Crates.io | coderlib |
| lib.rs | coderlib |
| version | 0.1.0 |
| created_at | 2025-06-25 17:42:00.205293+00 |
| updated_at | 2025-06-25 17:42:00.205293+00 |
| description | A Rust library for AI-powered code assistance and agentic system |
| homepage | https://github.com/mexyusef/coderlib |
| repository | https://github.com/mexyusef/coderlib |
| max_upload_size | |
| id | 1726230 |
| size | 1,284,359 |
CoderLib is a comprehensive Rust library for LLM-powered code generation, analysis, and editing.
Add CoderLib to your Cargo.toml:
[dependencies]
coderlib = "0.1.0"
tokio = { version = "1.0", features = ["full"] }
Enable specific functionality based on your needs:
[dependencies]
coderlib = { version = "0.1.0", features = ["full"] }
Available features:
tools - File operations and code analysis tools (default)lsp - Language Server Protocol support (default)mcp - Model Context Protocol integration (default)full - All features enableduse coderlib::{CoderLib, CoderLibConfig, CodeRequest};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize with default configuration
let config = CoderLibConfig::default();
let coder_lib = CoderLib::new(config).await?;
// Create a session
let session_id = coder_lib.create_session(Some("My Session".to_string())).await?;
// Make a request
let request = CodeRequest {
session_id,
content: "Write a hello world function in Rust".to_string(),
attachments: Vec::new(),
model: None,
context: Default::default(),
};
// Process and get streaming response
let mut response_stream = coder_lib.process_request(request).await?;
while let Ok(response) = response_stream.recv().await {
print!("{}", response.content);
if response.is_complete {
break;
}
}
Ok(())
}
use coderlib::tools::{ToolRouter, FileOperationsTool};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut router = ToolRouter::new();
// Register file operations tool
router.register_tool(Box::new(FileOperationsTool::new()));
// Execute a tool
let result = router.execute_tool(
"read_file",
serde_json::json!({
"path": "src/main.rs"
})
).await?;
println!("File content: {}", result);
Ok(())
}
use coderlib::lsp::{LspClient, LspConfig};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let config = LspConfig {
server_command: "rust-analyzer".to_string(),
server_args: vec![],
root_uri: "file:///path/to/project".to_string(),
};
let mut client = LspClient::new(config).await?;
client.initialize().await?;
// Get diagnostics for a file
let diagnostics = client.get_diagnostics("src/main.rs").await?;
println!("Found {} diagnostics", diagnostics.len());
Ok(())
}
CoderLib supports flexible configuration through TOML files or programmatic setup:
coderlib.toml)debug = false
log_level = "info"
# OpenAI Provider
[providers.openai]
enabled = true
api_key = "your-api-key"
default_model = "gpt-4"
max_tokens = 4000
timeout = 30
[providers.openai.settings]
base_url = "https://api.openai.com/v1"
# Anthropic Provider
[providers.anthropic]
enabled = true
api_key = "your-anthropic-key"
default_model = "claude-3-5-sonnet-20241022"
max_tokens = 4000
# Local Model Provider
[providers.local]
enabled = true
base_url = "http://localhost:11434" # Ollama default
default_model = "llama3.1:8b"
# Storage Configuration
[storage]
storage_type = "sqlite"
database_path = "coderlib.db"
# Tool Configuration
[tools]
shell_enabled = true
file_operations_enabled = true
max_file_size = 10485760
allowed_extensions = [".rs", ".py", ".js", ".ts", ".md"]
# Permission System
[permissions]
require_confirmation = true
dangerous_commands_blocked = true
allowed_directories = ["/home/user/projects", "/tmp"]
# LSP Configuration
[lsp]
rust_analyzer_path = "rust-analyzer"
typescript_server_path = "typescript-language-server"
python_server_path = "pylsp"
use coderlib::{CoderLibConfig, ProviderConfig, ProviderType};
let config = CoderLibConfig {
debug: false,
log_level: "info".to_string(),
providers: vec![
ProviderConfig {
provider_type: ProviderType::OpenAI,
api_key: Some("your-api-key".to_string()),
base_url: Some("https://api.openai.com/v1".to_string()),
default_model: "gpt-4".to_string(),
enabled: true,
..Default::default()
}
],
..Default::default()
};
CoderLib follows a modular, plugin-based architecture designed for flexibility and extensibility:
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
│ Host Editor │◄──►│ CoderLib │◄──►│ LLM Provider │
│ (Edit/IDE) │ │ Core │ │ (OpenAI, etc.) │
└─────────────────┘ └─────────────────┘ └─────────────────┘
│ │ │
│ │ │
▼ ▼ ▼
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
│ Integration │ │ Tool System │ │ Storage │
│ Layer │ │ (File, Git, │ │ (SQLite, │
│ (LSP, MCP) │ │ Code, Shell) │ │ Memory) │
└─────────────────┘ └─────────────────┘ └─────────────────┘
│ │ │
│ │ │
▼ ▼ ▼
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
│ Permission │ │ Session │ │ Event │
│ System │ │ Management │ │ System │
└─────────────────┘ └─────────────────┘ └─────────────────┘
use coderlib::integration::{EditHost, HostEvent, HostCommand};
struct AIAssistantPlugin {
coderlib: CoderLib,
}
impl EditHost for AIAssistantPlugin {
async fn handle_event(&mut self, event: HostEvent) -> Result<Option<HostCommand>> {
match event {
HostEvent::KeyPressed(key) if key == "F10" => {
// Show AI context menu
Ok(Some(HostCommand::ShowContextMenu {
items: vec![
"Explain Code".to_string(),
"Refactor".to_string(),
"Generate Tests".to_string(),
"Fix Issues".to_string(),
]
}))
}
HostEvent::MenuItemSelected(item) => {
self.handle_ai_request(item).await
}
_ => Ok(None),
}
}
}
use coderlib::mcp::{McpBridge, McpServer};
#[tokio::main]
async fn main() -> Result<()> {
let coderlib = CoderLib::new(config).await?;
// Create MCP bridge
let bridge = McpBridge::new(coderlib);
// Start MCP server
let server = McpServer::new(bridge);
server.listen("127.0.0.1:8080").await?;
Ok(())
}
The examples/ directory contains comprehensive usage examples:
basic_usage.rs - Simple interactive AI assistantprovider_test.rs - Testing different LLM providersopenai_test.rs - OpenAI-specific integrationgemini_provider.rs - Google Gemini integrationcomprehensive_tools_demo.rs - Full tool system showcasepermission_system_demo.rs - Security and permissionsauto_summarization_demo.rs - Context managementcustom_commands_demo.rs - Custom command templatesedit_integration.rs - Editor plugin integrationmcp_bridge_test.rs - MCP server integrationlsp_integration_test.rs - Language server integration# Basic usage
cargo run --example basic_usage
# Tool system demo
cargo run --example comprehensive_tools_demo
# Permission system
cargo run --example permission_system_demo
# MCP bridge
cargo run --example mcp_bridge_test
CoderLib is production-ready with comprehensive features:
We welcome contributions! Here's how to get started:
git checkout -b feature/amazing-featurecargo testgit clone https://github.com/mexyusef/coderlib.git
cd coderlib
cargo build
cargo test
See CONTRIBUTING.md for detailed guidelines.
Licensed under:
at your option.
CoderLib - A library for coder in the AI world. 🚀