| Crates.io | oramacore-client |
| lib.rs | oramacore-client |
| version | 1.2.0 |
| created_at | 2025-08-07 05:22:11.098763+00 |
| updated_at | 2025-08-07 05:22:11.098763+00 |
| description | Server-side Rust client for OramaCore and Orama Cloud |
| homepage | |
| repository | https://github.com/oramasearch/oramacore-client-rust |
| max_upload_size | |
| id | 1784712 |
| size | 210,578 |
A comprehensive Rust client for Orama Core and Orama Cloud.
Add this to your Cargo.toml:
[dependencies]
oramacore-client = "1.2.0"
tokio = { version = "1.0", features = ["full"] }
serde = { version = "1.0", features = ["derive"] }
use oramacore_client::{
collection::{CollectionManager, CollectionManagerConfig},
types::{SearchParams, SearchMode},
error::Result,
};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
struct Document {
id: String,
title: String,
content: String,
}
#[tokio::main]
async fn main() -> Result<()> {
// Initialize the client
let config = CollectionManagerConfig::new("your-collection-id", "your-api-key");
let client = CollectionManager::new(config).await?;
// Perform a search
let search_params = SearchParams::new("artificial intelligence")
.with_mode(SearchMode::Hybrid)
.with_limit(10);
let results = client.search::<Document>(&search_params).await?;
println!("Found {} documents", results.hits.len());
for hit in results.hits {
println!("- {} (score: {:.4})", hit.document.title, hit.score);
}
Ok(())
}
Collections are containers for your documents. Each collection can have multiple indexes for different types of searches:
use oramacore_client::collection::{CollectionManager, CreateIndexParams};
// Create an index
let create_params = CreateIndexParams {
id: Some("articles".to_string()),
embeddings: Some(serde_json::json!("automatic")),
};
client.index.create(create_params).await?;
// Insert documents
let index = client.index.set("articles".to_string());
index.insert_documents(documents).await?;
The client supports three search modes:
use oramacore_client::types::{SearchParams, SearchMode};
// Vector search for semantic similarity
let vector_search = SearchParams::new("machine learning concepts")
.with_mode(SearchMode::Vector)
.with_limit(5);
// Hybrid search for comprehensive results
let hybrid_search = SearchParams::new("deep learning")
.with_mode(SearchMode::Hybrid)
.with_threshold(0.8);
Create AI sessions for natural language interactions:
use oramacore_client::stream_manager::{AnswerConfig, CreateAiSessionConfig};
use oramacore_client::types::{LlmConfig, LlmProvider};
// Create an AI session
let session_config = CreateAiSessionConfig::new()
.with_llm_config(LlmConfig {
provider: LlmProvider::OpenAI,
model: "gpt-4".to_string(),
});
let ai_session = client.ai.create_ai_session().await?;
// Get an AI-powered answer
let answer_config = AnswerConfig::new("Explain quantum computing")
.with_max_documents(5);
let answer = ai_session.answer(answer_config).await?;
println!("AI Response: {}", answer);
For real-time AI interactions, use streaming:
use futures::StreamExt;
let stream_config = AnswerConfig::new("What is the future of AI?");
let mut answer_stream = ai_session.answer_stream(stream_config).await?;
while let Some(chunk_result) = answer_stream.next().await {
match chunk_result {
Ok(chunk) => print!("{}", chunk),
Err(e) => eprintln!("Stream error: {}", e),
}
}
The client supports two authentication methods:
For regular API keys:
let config = CollectionManagerConfig::new("collection-id", "your-api-key");
For private API keys (starting with "p_"):
let config = CollectionManagerConfig::new("collection-id", "p_your-private-key")
.with_auth_jwt_url("https://your-auth-endpoint.com");
For Orama Cloud projects:
use oramacore_client::cloud::{OramaCloud, ProjectManagerConfig, CloudSearchParams};
let cloud_config = ProjectManagerConfig::new("project-id", "api-key");
let cloud_client = OramaCloud::new(cloud_config).await?;
// Cloud-specific search with datasources
let search_params = CloudSearchParams::new(
"search query",
vec!["datasource1".to_string(), "datasource2".to_string()]
);
let results = cloud_client.search::<Document>(&search_params).await?;
Create and manage collections programmatically:
use oramacore_client::manager::{OramaCoreManager, CreateCollectionParams};
use oramacore_client::types::{Language, EmbeddingsModel};
let manager_config = OramaCoreManagerConfig {
url: "https://api.orama.com".to_string(),
master_api_key: "your-master-key".to_string(),
};
let manager = OramaCoreManager::new(manager_config).await?;
// Create a new collection
let collection_params = CreateCollectionParams::new("my-collection")
.with_description("My new collection")
.with_language(Language::English)
.with_embeddings_model(EmbeddingsModel::BgeBase);
let new_collection = manager.collection.create(collection_params).await?;
Manage AI system prompts:
// Get all system prompts
let prompts = client.system_prompts.get_all().await?;
// Get a specific prompt
let prompt = client.system_prompts.get("prompt-id").await?;
Execute custom tools:
use oramacore_client::collection::ExecuteToolsBody;
let tools_body = ExecuteToolsBody {
tool_ids: Some(vec!["tool1".to_string()]),
messages: vec![/* your messages */],
llm_config: None,
};
let result = client.tools.execute::<serde_json::Value>(tools_body).await?;
Manage collection hooks:
use oramacore_client::collection::AddHookConfig;
use oramacore_client::types::Hook;
let hook_config = AddHookConfig {
name: Hook::BeforeInsert,
code: "// Your hook code".to_string(),
};
let hook_response = client.hooks.insert(hook_config).await?;
The repository includes comprehensive examples:
Run an example:
cargo run --example basic_search
The client provides detailed error types:
use oramacore_client::error::{OramaError, Result};
match client.search(¶ms).await {
Ok(results) => println!("Found {} results", results.hits.len()),
Err(OramaError::Auth(msg)) => eprintln!("Authentication error: {}", msg),
Err(OramaError::Api { status, message }) => {
eprintln!("API error {}: {}", status, message);
}
Err(e) => eprintln!("Other error: {}", e),
}
You can configure the client using environment variables:
export ORAMA_MASTER_API_KEY="your-master-key"
export ORAMA_PROJECT_ID="your-project-id"
export ORAMA_COLLECTION_API_KEY="your-collection-key"
For custom deployments:
use oramacore_client::collection::ClusterConfig;
let config = CollectionManagerConfig::new("collection-id", "api-key")
.with_cluster(
ClusterConfig::new()
.with_read_url("https://your-reader.com")
.with_writer_url("https://your-writer.com")
);
The client is designed for high-performance applications:
Run the test suite:
# Run all tests
cargo test
# Run with output
cargo test -- --nocapture
# Run specific test
cargo test test_search_operations
Contributions are welcome! Please read our contributing guidelines and submit pull requests to the main repository.