| Crates.io | libsql-orm |
| lib.rs | libsql-orm |
| version | 0.2.5 |
| created_at | 2025-07-14 20:25:37.110745+00 |
| updated_at | 2026-01-07 19:04:30.282531+00 |
| description | A powerful, async-first ORM for Turso Database with first-class support for Cloudflare Workers and WebAssembly environments. Features include automatic boolean conversion, upsert operations, built-in logging, migrations, and comprehensive query building. |
| homepage | https://github.com/ayonsaha2011/libsql-orm |
| repository | https://github.com/ayonsaha2011/libsql-orm |
| max_upload_size | |
| id | 1752236 |
| size | 242,361 |
update # libsql-orm
A powerful, async-first ORM for Turso Database with first-class support for Cloudflare Workers and WebAssembly environments.
⚠️ Disclaimer: This library is in early development and not fully tested in production environments. Use at your own risk. Please report any issues you encounter and feel free to contribute via pull requests - we're happy to address them and welcome community contributions!
#[derive(Model)]#[table_name("custom")] attribute support#[orm_column(...)] for column customizationAdd this to your Cargo.toml:
[dependencies]
libsql-orm = { version = "0.2.4", features = ["cloudflare"] }
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4", features = ["serde"] }
# Required for Cloudflare Workers support - use git version of libsql with newer worker dependency
[patch.crates-io]
libsql = { git = "https://github.com/ayonsaha2011/libsql", features = ["cloudflare"] }
For Cloudflare Workers compatibility, you need to use a patched version of libsql that includes:
worker dependency compatibilityThe patch ensures seamless integration with Cloudflare Workers' runtime environment.
use libsql_orm::{Model, Database, FilterOperator, Filter, Value};
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc};
#[derive(Model, Debug, Clone, Serialize, Deserialize)]
#[table_name("users")] // Custom table name (optional)
struct User {
pub id: Option<i64>,
pub name: String,
pub email: String,
pub age: Option<i32>,
pub is_active: bool, // ✅ Automatic boolean conversion
pub is_verified: bool, // ✅ Works with any boolean field
pub created_at: DateTime<Utc>,
}
// In your async function
async fn example() -> Result<(), Box<dyn std::error::Error>> {
// Connect to database
let db = Database::new_connect("turso://your-db.turso.io", "your-auth-token").await?;
// Create a user
let user = User {
id: None,
name: "Alice".to_string(),
email: "alice@example.com".to_string(),
age: Some(30),
is_active: true,
created_at: Utc::now(),
};
// Save to database
let saved_user = user.create(&db).await?;
// Find users
let users = User::find_all(&db).await?;
// Query with conditions
let active_users = User::find_where(
FilterOperator::Single(Filter::eq("is_active", true)),
&db
).await?;
Ok(())
}
First, ensure your Cargo.toml includes the necessary features and patches:
[dependencies]
libsql-orm = { version = "0.2.4", features = ["cloudflare"] }
worker = ">=0.7.0"
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4", features = ["serde"] }
# Use git version of libsql with newer worker dependency
[patch.crates-io]
libsql = { git = "https://github.com/ayonsaha2011/libsql", features = ["cloudflare"] }
Then in your worker code:
use worker::*;
use libsql_orm::{Model, Database};
use serde::{Serialize, Deserialize};
use chrono::{DateTime, Utc};
#[derive(Model, Debug, Clone, Serialize, Deserialize)]
#[table_name("blog_posts")] // Custom table name
struct Post {
pub id: Option<i64>,
pub title: String,
pub content: String,
pub published: bool, // ✅ Boolean automatically converted from SQLite
pub featured: bool, // ✅ Multiple boolean fields supported
pub created_at: DateTime<Utc>,
}
#[event(fetch)]
async fn fetch(req: Request, env: Env, _ctx: Context) -> Result<Response> {
console_error_panic_hook::set_once();
// Get database credentials from environment
let database_url = env.var("TURSO_DATABASE_URL")?.to_string();
let auth_token = env.var("TURSO_AUTH_TOKEN")?.to_string();
// Connect to database
let db = Database::new_connect(&database_url, &auth_token).await
.map_err(|e| format!("Database connection failed: {}", e))?;
// Handle the request
match req.method() {
Method::Get => {
let posts = Post::find_all(&db).await
.map_err(|e| format!("Query failed: {}", e)))?;
Response::from_json(&posts)
}
Method::Post => {
let post: Post = req.json().await?;
let saved_post = post.create(&db).await
.map_err(|e| format!("Create failed: {}", e)))?;
Response::from_json(&saved_post)
}
_ => Response::error("Method not allowed", 405)
}
}
For more complex applications, you can integrate libsql-orm with Axum for better routing and state management.
Key Requirements:
crate-type = ["cdylib"] for Cloudflare Workershttp and axum features for the worker cratedefault-features = false for WASM compatibilitySetup:
[package]
name = "my-cloudflare-app"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib"]
[dependencies]
worker = { version = "0.7", features = ['http', 'axum'] }
worker-macros = { version = "0.7", features = ['http'] }
axum = { version = "0.8", default-features = false, features = ["json", "macros"] }
tower-service = "0.3.3"
libsql-orm = { version = "0.2.4", features = ["cloudflare"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = { version = "0.4", features = ["serde"] }
console_error_panic_hook = "0.1"
# Use git version of libsql with newer worker dependency
[patch.crates-io]
libsql = { git = "https://github.com/ayonsaha2011/libsql", features = ["cloudflare"] }
use axum::{
extract::{Path, State},
http::StatusCode,
response::Json,
routing::{get, post, put, delete},
Router,
};
use tower_service::Service;
use worker::*;
use std::result::Result;
use libsql_orm::{Model, Database, FilterOperator, Filter};
use serde::{Serialize, Deserialize};
use chrono::{DateTime, Utc};
use std::sync::Arc;
// Application state
#[derive(Clone)]
pub struct AppState {
pub db: Arc<Database>,
}
impl AppState {
pub async fn new(env: &Env) -> worker::Result<Self> {
// Get database credentials from environment
let database_url = env.var("TURSO_DATABASE_URL")?.to_string();
let auth_token = env.var("TURSO_AUTH_TOKEN")?.to_string();
// Connect to database
let db = Database::new_connect(&database_url, &auth_token).await
.map_err(|e| format!("Database connection failed: {}", e))?;
Ok(Self {
db: Arc::new(db),
})
}
}
// User model
#[derive(Model, Debug, Clone, Serialize, Deserialize)]
#[table_name("users")]
struct User {
pub id: Option<i64>,
pub name: String,
pub email: String,
pub is_active: bool,
pub created_at: DateTime<Utc>,
}
// Request/Response DTOs
#[derive(Deserialize)]
struct CreateUserRequest {
pub name: String,
pub email: String,
}
#[derive(Serialize)]
struct ApiResponse<T> {
pub success: bool,
pub data: Option<T>,
pub error: Option<String>,
}
impl<T> ApiResponse<T> {
fn success(data: T) -> Self {
Self { success: true, data: Some(data), error: None }
}
fn error(error: String) -> Self {
Self { success: false, data: None, error: Some(error) }
}
}
#[derive(Serialize)]
struct ErrorResponse {
pub error: String,
pub message: String,
}
// Route handlers
#[worker::send]
async fn get_users(State(state): State<AppState>) -> Result<Json<ApiResponse<Vec<User>>>, (StatusCode, Json<ErrorResponse>)> {
match User::find_all(&state.db).await {
Ok(users) => Ok(Json(ApiResponse::success(users))),
Err(e) => {
console_log!("Error fetching users: {}", e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
#[worker::send]
async fn get_user_by_id(
State(state): State<AppState>,
Path(id): Path<i64>
) -> Result<Json<ApiResponse<User>>, (StatusCode, Json<ErrorResponse>)> {
match User::find_by_id(id, &state.db).await {
Ok(Some(user)) => Ok(Json(ApiResponse::success(user))),
Ok(None) => Err((StatusCode::NOT_FOUND, Json(ErrorResponse {
error: "user_not_found".to_string(),
message: "User not found".to_string()
}))),
Err(e) => {
console_log!("Error fetching user {}: {}", id, e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
#[worker::send]
async fn create_user(
State(state): State<AppState>,
Json(req): Json<CreateUserRequest>
) -> Result<(StatusCode, Json<ApiResponse<User>>), (StatusCode, Json<ErrorResponse>)> {
let user = User {
id: None,
name: req.name,
email: req.email,
is_active: true,
created_at: Utc::now(),
};
match user.create(&state.db).await {
Ok(created_user) => Ok((
StatusCode::CREATED,
Json(ApiResponse::success(created_user))
)),
Err(e) => {
console_log!("Error creating user: {}", e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
#[worker::send]
async fn update_user(
State(state): State<AppState>,
Path(id): Path<i64>,
Json(mut user): Json<User>
) -> Result<Json<ApiResponse<User>>, (StatusCode, Json<ErrorResponse>)> {
user.id = Some(id);
match user.update(&state.db).await {
Ok(updated_user) => Ok(Json(ApiResponse::success(updated_user))),
Err(e) => {
console_log!("Error updating user {}: {}", id, e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
#[worker::send]
async fn delete_user(
State(state): State<AppState>,
Path(id): Path<i64>
) -> Result<Json<ApiResponse<String>>, (StatusCode, Json<ErrorResponse>)> {
let filter = FilterOperator::Single(Filter::eq("id", id));
match User::delete_where(filter, &state.db).await {
Ok(_) => Ok(Json(ApiResponse::success("User deleted successfully".to_string()))),
Err(e) => {
console_log!("Error deleting user {}: {}", id, e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
#[worker::send]
async fn get_active_users(State(state): State<AppState>) -> Result<Json<ApiResponse<Vec<User>>>, (StatusCode, Json<ErrorResponse>)> {
let filter = FilterOperator::Single(Filter::eq("is_active", true));
match User::find_where(filter, &state.db).await {
Ok(users) => Ok(Json(ApiResponse::success(users))),
Err(e) => {
console_log!("Error fetching active users: {}", e);
Err((StatusCode::INTERNAL_SERVER_ERROR, Json(ErrorResponse {
error: "internal_server_error".to_string(),
message: "Internal server error".to_string()
})))
}
}
}
// Create router with all routes
fn create_router() -> Router<AppState> {
Router::new()
.route("/users", get(get_users).post(create_user))
.route("/users/active", get(get_active_users))
.route("/users/:id", get(get_user_by_id).put(update_user).delete(delete_user))
}
// Main Cloudflare Workers handler
#[event(fetch)]
async fn fetch(
req: HttpRequest,
env: Env,
_ctx: Context,
) -> worker::Result<axum::http::Response<axum::body::Body>> {
console_error_panic_hook::set_once();
// Initialize application state
let app_state = match AppState::new(&env).await {
Ok(state) => state,
Err(e) => {
console_log!("Failed to initialize application state: {}", e);
return Ok(axum::http::Response::builder()
.status(500)
.header("content-type", "application/json")
.body(axum::body::Body::from(
"{\"error\":\"initialization_failed\",\"message\":\"Failed to initialize application\"}"
))?
);
}
};
// Create router
let mut router = create_router().with_state(app_state);
// Handle the request
Ok(router.call(req).await?)
}
This example demonstrates:
AppState::new()console_log! macro for debugging#[worker::send] attributes for optimal performance| Method | Endpoint | Description |
|---|---|---|
| GET | /users |
Get all users |
| POST | /users |
Create a new user |
| GET | /users/:id |
Get user by ID |
| PUT | /users/:id |
Update user |
| DELETE | /users/:id |
Delete user |
| GET | /users/active |
Get all active users |
To deploy this Cloudflare Workers application, you'll need a wrangler.toml configuration:
name = "my-cloudflare-app"
main = "build/worker/shim.mjs"
compatibility_date = "2023-05-18"
[env.production.vars]
TURSO_DATABASE_URL = "your-database-url"
TURSO_AUTH_TOKEN = "your-auth-token"
[[env.production.rules]]
type = "CompiledWasm"
globs = ["**/*.wasm"]
fallthrough = true
Deploy commands:
# Install dependencies
npm install -g wrangler
# Deploy to Cloudflare Workers
wrangler deploy
Use the #[table_name("custom_name")] attribute to specify custom table names:
#[derive(Model, Serialize, Deserialize)]
#[table_name("user_accounts")] // Custom table name
struct User {
pub id: Option<i64>,
pub username: String,
pub email: String,
}
// Default table name would be "user" (struct name lowercase)
// With attribute, table name is "user_accounts"
assert_eq!(User::table_name(), "user_accounts");
Benefits:
tenant_userslibsql-orm automatically handles boolean conversion between SQLite and Rust:
use libsql_orm::{Model, FilterOperator, Filter, Value};
use serde::{Serialize, Deserialize};
#[derive(Model, Serialize, Deserialize)]
struct User {
pub id: Option<i64>,
pub is_active: bool, // ✅ SQLite INTEGER(0/1) ↔ Rust bool
pub is_verified: bool, // ✅ Automatic conversion
pub has_premium: bool, // ✅ Works with any boolean field name
pub can_edit: bool, // ✅ No configuration needed
pub enabled: bool, // ✅ Type-safe operations
}
// All boolean operations work seamlessly
let user = User::find_where(
FilterOperator::Single(Filter::eq("is_active", true)),
&db
).await?;
// JSON serialization works correctly
let json = serde_json::to_string(&user)?; // ✅ Booleans as true/false
let deserialized: User = serde_json::from_str(&json)?; // ✅ No errors
Key Features:
Customize column properties with #[orm_column(...)]:
use libsql_orm::Model;
use serde::{Serialize, Deserialize};
#[derive(Model, Serialize, Deserialize)]
struct Product {
#[orm_column(type = "INTEGER PRIMARY KEY AUTOINCREMENT")]
pub id: Option<i64>,
#[orm_column(not_null, unique)]
pub sku: String,
#[orm_column(type = "REAL CHECK(price >= 0)")]
pub price: f64,
#[orm_column(type = "BOOLEAN DEFAULT TRUE")]
pub is_available: bool, // ✅ Boolean with DEFAULT constraint
}
use libsql_orm::{QueryBuilder, FilterOperator, Filter, Sort, SortOrder, Pagination};
// Complex query with filtering and pagination
let query = QueryBuilder::new("users")
.select(&["id", "name", "email"])
.r#where(FilterOperator::Single(Filter::ge("age", 18i64)))
.order_by(Sort::new("created_at", SortOrder::Desc))
.limit(10)
.offset(20);
let (sql, params) = query.build()?;
use libsql_orm::{Pagination, PaginatedResult};
let pagination = Pagination::new(1, 10); // page 1, 10 items per page
let result: PaginatedResult<User> = User::find_paginated(&pagination, &db).await?;
// Access pagination info
// Page: result.pagination.page
// Total pages: result.pagination.total_pages.unwrap_or(0)
// Total items: result.pagination.total.unwrap_or(0)
for user in result.data {
// Process user: user.name
}
// Bulk insert
let users = vec![
User { /* ... */ },
User { /* ... */ },
User { /* ... */ },
];
let saved_users = User::bulk_create(&users, &db).await?;
// Bulk delete
let ids_to_delete = vec![1, 2, 3, 4, 5];
let deleted_count = User::bulk_delete(&ids_to_delete, &db).await?;
use libsql_orm::Aggregate;
// Count users
let total_users = User::count(&db).await?;
// Average age
let avg_age = User::aggregate(
Aggregate::Avg,
"age",
None,
&db
).await?;
// Count with filter
let active_users_count = User::count_where(
FilterOperator::Single(Filter::eq("is_active", true)),
&db
).await?;
use libsql_orm::{SearchFilter, Pagination};
let search = SearchFilter::new(
"john",
vec!["name", "email"]
);
// Optional pagination
let pagination = Pagination::new(1, 10);
let results = User::search(&search, Some(&pagination), &db).await?;
libsql-orm provides intelligent create-or-update operations:
use libsql_orm::{Model, Database};
use chrono::{DateTime, Utc};
// Create or update based on primary key
let mut user = User {
id: Some(123), // If record exists, it will be updated
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
is_active: true,
created_at: Utc::now(),
};
// Automatically decides whether to create or update
let saved_user = user.create_or_update(&db).await?;
// Upsert based on unique constraints (e.g., email)
let user = User {
id: None, // Primary key not set
name: "Jane Smith".to_string(),
email: "jane@example.com".to_string(), // Unique field
is_active: true,
created_at: Utc::now(),
};
// Will update existing record with this email, or create new if not found
let saved_user = user.upsert(&["email"], &db).await?;
// Multiple unique constraints
let saved_user = user.upsert(&["email", "username"], &db).await?;
libsql-orm provides comprehensive MCP server integration for AI-powered database interactions. The MCP protocol enables seamless communication between AI assistants and your Turso database.
The MCP server exposes:
{
"tool": "model_find_where",
"arguments": {
"model": "User",
"filter": {
"type": "And",
"filters": [
{
"type": "Single",
"filter": {"column": "is_active", "operator": "Eq", "value": true}
},
{
"type": "Single",
"filter": {"column": "age", "operator": "Gt", "value": 18}
}
]
}
}
}
📖 Complete MCP Documentation - Comprehensive guide with all 40+ tools, examples, and best practices
libsql-orm is built from the ground up for WebAssembly environments:
Uses libsql WASM bindings for database connectivity
Optimized async runtime for edge computing
Minimal binary size with selective feature compilation
Compatible with Cloudflare Workers, Deno Deploy, and other edge platforms
libsql-orm works great with:
Contributions are welcome! Please feel free to submit issues and pull requests.
If you find this library helpful and would like to support its development, consider making a donation:
Every contribution, no matter the size, helps make this library better for everyone! 🙏
This project is licensed under the MIT License - see the LICENSE file for details.
Need help?