| Crates.io | anthropic-sdk-rust |
| lib.rs | anthropic-sdk-rust |
| version | 0.1.1 |
| created_at | 2025-06-11 09:30:20.06395+00 |
| updated_at | 2025-06-11 09:42:25.784008+00 |
| description | Comprehensive, type-safe Rust SDK for the Anthropic API with streaming, tools, vision, files, and batch processing support |
| homepage | https://github.com/dimichgh/anthropic-sdk-rust |
| repository | https://github.com/dimichgh/anthropic-sdk-rust |
| max_upload_size | |
| id | 1708379 |
| size | 655,911 |
An unofficial, comprehensive, type-safe Rust SDK for the Anthropic API with full feature parity to the TypeScript SDK. Built with modern async/await patterns, extensive error handling, and ergonomic builder APIs.
Add this to your Cargo.toml:
[dependencies]
anthropic-sdk-rust = "0.1.0"
tokio = { version = "1.0", features = ["rt-multi-thread", "macros"] }
use anthropic_sdk::{Anthropic, Result};
#[tokio::main]
async fn main() -> Result<()> {
// Create client with API key from environment or directly
let client = Anthropic::new("your-api-key")?;
// Or use environment variable ANTHROPIC_API_KEY
let client = Anthropic::from_env()?;
Ok(())
}
use anthropic_sdk::{Anthropic, MessageCreateBuilder, Result};
#[tokio::main]
async fn main() -> Result<()> {
let client = Anthropic::from_env()?;
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user("Hello, Claude! How are you today?")
.build()
)
.await?;
println!("Claude: {}", response.content.get_text());
Ok(())
}
use anthropic_sdk::{Anthropic, ClientConfig, LogLevel};
use std::time::Duration;
let config = ClientConfig::new("your-api-key")
.with_timeout(Duration::from_secs(30))
.with_max_retries(3)
.with_log_level(LogLevel::Info)
.with_base_url("https://api.anthropic.com");
let client = Anthropic::with_config(config)?;
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user("What's the capital of France?")
.system("You are a helpful geography assistant.")
.build()
)
.await?;
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user("Hi! What's your name?")
.assistant("Hello! I'm Claude, an AI assistant.")
.user("Nice to meet you! Can you help me with math?")
.system("You are a helpful math tutor.")
.temperature(0.3)
.build()
)
.await?;
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 2048)
.user("Write a creative story about space exploration.")
.temperature(0.8) // More creative
.top_p(0.9) // Nucleus sampling
.top_k(50) // Top-k sampling
.stop_sequences(vec!["THE END".to_string()])
.build()
)
.await?;
use anthropic_sdk::{MessageCreateBuilder, StreamEvent};
use futures::StreamExt;
let mut stream = client.messages()
.stream(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user("Tell me a story")
.build()
)
.await?;
while let Some(event) = stream.next().await {
match event? {
StreamEvent::ContentBlockDelta { delta, .. } => {
if let Some(text) = delta.text {
print!("{}", text);
}
}
StreamEvent::MessageStop => break,
_ => {}
}
}
use anthropic_sdk::{ContentBlockParam, MessageContent};
// Using base64 image
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user(MessageContent::Blocks(vec![
ContentBlockParam::text("What do you see in this image?"),
ContentBlockParam::image_base64("image/jpeg", base64_image_data),
]))
.build()
)
.await?;
// Using image URL
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user(MessageContent::Blocks(vec![
ContentBlockParam::text("Describe this image"),
ContentBlockParam::image_url("https://example.com/image.jpg"),
]))
.build()
)
.await?;
use anthropic_sdk::{Tool, ToolFunction, MessageCreateBuilder};
// Define a weather tool
let weather_tool = Tool {
name: "get_weather".to_string(),
description: "Get current weather for a location".to_string(),
input_schema: serde_json::json!({
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "City name"
}
},
"required": ["location"]
}),
};
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user("What's the weather like in Paris?")
.tools(vec![weather_tool])
.build()
)
.await?;
// Handle tool use in response
if let Some(tool_use) = response.content.get_tool_use() {
println!("Claude wants to use tool: {}", tool_use.name);
println!("With input: {}", tool_use.input);
}
// Upload a file
let file = client.files()
.upload("path/to/document.pdf", "assistants")
.await?;
println!("Uploaded file: {} ({})", file.filename, file.id);
// List files
let files = client.files()
.list(Some("assistants"), None, None)
.await?;
// Use file in conversation
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.user(MessageContent::Blocks(vec![
ContentBlockParam::text("Summarize this document"),
ContentBlockParam::document(file.id, "Document to analyze"),
]))
.build()
)
.await?;
use anthropic_sdk::{BatchCreateBuilder, BatchRequestBuilder};
// Create batch requests
let requests = vec![
BatchRequestBuilder::new("req_1")
.messages(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 100)
.user("What is 2+2?")
.build()
)
.build(),
BatchRequestBuilder::new("req_2")
.messages(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 100)
.user("What is 3+3?")
.build()
)
.build(),
];
// Create and submit batch
let batch = client.batches()
.create(
BatchCreateBuilder::new(requests)
.completion_window("24h")
.build()
)
.await?;
println!("Batch created: {}", batch.id);
// Check batch status
let status = client.batches().retrieve(&batch.id).await?;
println!("Batch status: {:?}", status.processing_status);
use anthropic_sdk::{ModelRequirements, ModelCapability};
// List available models
let models = client.models().list(None).await?;
for model in models.data {
println!("Model: {} ({})", model.display_name, model.id);
}
// Find best model for requirements
let requirements = ModelRequirements::new()
.max_cost_per_million_tokens(15.0)
.min_context_length(100000)
.required_capabilities(vec![
ModelCapability::Vision,
ModelCapability::ToolUse,
])
.build();
let best_model = client.models()
.find_best_model(&requirements)
.await?;
println!("Best model: {}", best_model.display_name);
// Compare models
let comparison = client.models()
.compare_models(&["claude-3-5-sonnet-latest", "claude-3-5-haiku-latest"])
.await?;
println!("Comparison: {}", comparison.summary.recommendation);
The SDK uses a comprehensive type system with:
use anthropic_sdk::{AnthropicError, Result};
match client.messages().create(params).await {
Ok(response) => println!("Success: {}", response.content.get_text()),
Err(AnthropicError::ApiError { status, message, .. }) => {
eprintln!("API Error {}: {}", status, message);
}
Err(AnthropicError::NetworkError { source }) => {
eprintln!("Network Error: {}", source);
}
Err(e) => eprintln!("Other Error: {}", e),
}
Built on tokio with:
ANTHROPIC_API_KEY=your-api-key
ANTHROPIC_BASE_URL=https://api.anthropic.com # Optional
ANTHROPIC_TIMEOUT=30 # Seconds
ANTHROPIC_MAX_RETRIES=3 # Retry attempts
use anthropic_sdk::{ClientConfig, LogLevel, Anthropic};
use std::time::Duration;
let config = ClientConfig::new("api-key")
.with_base_url("https://api.anthropic.com")
.with_timeout(Duration::from_secs(60))
.with_max_retries(5)
.with_log_level(LogLevel::Debug)
.with_user_agent("MyApp/1.0");
let client = Anthropic::with_config(config)?;
The examples/ directory contains comprehensive demonstrations:
basic_client.rs - Basic client setup and configurationmessages_api.rs - Complete Messages API usagestreaming_example.rs - Real-time streaming responsescomprehensive_tool_use.rs - Advanced tool integrationcomprehensive_file_upload.rs - File management workflowsphase5_1_batches.rs - Batch processing examplesphase5_3_models_api.rs - Model selection and comparisonproduction_patterns.rs - Production-ready patternsRun examples:
cargo run --example basic_client
cargo run --example messages_api
cargo run --example streaming_example
# Run all tests
cargo test
# Run with output
cargo test -- --nocapture
# Test specific module
cargo test messages
# Run integration tests
cargo test --test integration
Generate and view documentation:
# Generate docs
cargo doc --open --no-deps
# View online documentation
# https://docs.rs/anthropic-sdk-rust
The SDK automatically manages HTTP connections with:
git checkout -b feature/amazing-featurecargo testgit commit -m 'Add amazing feature'git push origin feature/amazing-featureThis project is licensed under the MIT License - see the LICENSE file for details.
// Customer Support Bot
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 1024)
.system("You are a helpful customer support agent.")
.user("I need help with my order #12345")
.temperature(0.1) // Consistent responses
.build()
)
.await?;
// Code Review Assistant
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 2048)
.system("You are an expert code reviewer.")
.user("Please review this Rust code for potential issues")
.tools(vec![code_analysis_tool])
.build()
)
.await?;
// Document Analysis
let response = client.messages()
.create(
MessageCreateBuilder::new("claude-3-5-sonnet-latest", 4096)
.user(MessageContent::Blocks(vec![
ContentBlockParam::text("Analyze this financial report"),
ContentBlockParam::document(uploaded_file.id, "Q3 Report"),
]))
.build()
)
.await?;
Ready to build amazing AI-powered applications with Rust? Get started with the Anthropic SDK today! ๐ฆโจ