| Crates.io | adbc-taos |
| lib.rs | adbc-taos |
| version | 0.1.0 |
| created_at | 2026-01-03 06:40:03.917118+00 |
| updated_at | 2026-01-03 06:40:03.917118+00 |
| description | A high-performance Rust driver for TDengine time-series database, implementing the ADBC standard |
| homepage | |
| repository | https://github.com/greatwallisme/adbc-taos.git |
| max_upload_size | |
| id | 2019696 |
| size | 289,427 |
ADBC-Taos is a high-performance Rust driver for TDengine time-series database, implementing the Arrow Database Connectivity (ADBC) standard. It provides zero-copy Arrow-native data access, eliminating unnecessary data conversion overhead for analytical workloads.
deadpool for high-concurrency scenariosAdd this to your Cargo.toml:
[dependencies]
adbc-taos = "0.1"
adbc-core = "0.21"
arrow-array = "57"
use adbc_core::{Database, Connection, Statement, Driver, Optionable};
use adbc_core::options::{OptionDatabase, OptionValue};
fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Create driver and configure database
let driver = adbc_taos::TaosDriver::default();
let mut db = driver.new_database()?;
db.set_option(
OptionDatabase::Uri,
OptionValue::String("taos://root:taosdata@127.0.0.1:6030".into())
)?;
// 2. Establish connection
let mut conn = db.new_connection()?;
// 3. Execute query
let mut stmt = conn.new_statement()?;
stmt.set_sql_query("SELECT * FROM my_db.my_table LIMIT 10")?;
let reader = stmt.execute()?;
// 4. Process results as Arrow RecordBatches
for batch in reader {
let batch = batch?;
println!("Received batch with {} rows", batch.num_rows());
// Access columns by index
for col_idx in 0..batch.num_columns() {
let col = batch.column(col_idx);
println!(" Column {}: {} rows, type: {:?}",
col_idx,
col.len(),
col.data_type());
}
// Example: Access first column as string if it's NCHAR/VARCHAR
use arrow_array::StringArray;
if let Some(str_col) = batch.column(0).as_any().downcast_ref::<StringArray>() {
for row_idx in 0..str_col.len() {
if let Some(value) = str_col.get(row_idx) {
println!(" Row[{}]: {}", row_idx, value);
}
}
}
}
Ok(())
}
The TDengine DSN (Data Source Name) format:
taos://[user:password@]host[:port][/database][?params]
// Default credentials (root/taosdata)
"taos://127.0.0.1:6030"
// With credentials
"taos://admin:pass@localhost:6030"
// With database
"taos://root:taosdata@localhost:6030/mydb"
// All options
"taos://admin:secret@192.168.1.100:6030/production?timezone=UTC"
You can also set options programmatically:
let mut db = adbc_taos::TaosDatabase::default();
// Set individual options
db.set_option(OptionDatabase::Uri, OptionValue::String("taos://127.0.0.1:6030".into()))?;
db.set_option(OptionDatabase::User, OptionValue::String("admin".into()))?;
db.set_option(OptionDatabase::Password, OptionValue::String("pass".into()))?;
use adbc_core::{Connection, Statement};
use arrow_array::{Float64Array, TimestampMillisecondArray, StringArray};
let mut conn = db.new_connection()?;
let mut stmt = conn.new_statement()?;
stmt.set_sql_query("SELECT ts, temperature, location FROM sensors WHERE location = 'room-1' LIMIT 100")?;
let reader = stmt.execute()?;
for batch in reader {
let batch = batch?;
// Access column 0: timestamp
let ts_col = batch.column(0)
.as_any()
.downcast_ref::<TimestampMillisecondArray>()
.expect("Column 0 should be timestamp");
// Access column 1: temperature (double)
let temp_col = batch.column(1)
.as_any()
.downcast_ref::<Float64Array>()
.expect("Column 1 should be float");
// Access column 2: location (string)
let loc_col = batch.column(2)
.as_any()
.downcast_ref::<StringArray>()
.expect("Column 2 should be string");
// Iterate through rows
for i in 0..batch.num_rows() {
if let Some(ts) = ts_col.get(i) {
if let Some(temp) = temp_col.get(i) {
if let Some(location) = loc_col.get(i) {
println!("{} - {} - {}°C",
ts,
location,
temp);
}
}
}
}
}
let mut stmt = conn.new_statement()?;
// Single row insertion
stmt.set_sql_query("INSERT INTO my_db.data VALUES (NOW, 25.5, 'active')")?;
let affected_rows = stmt.execute_update()?;
println!("Inserted {} rows", affected_rows);
// Batch insertion
let insert_sql = "
INSERT INTO my_db.data VALUES
(NOW, 25.5, 'active'),
(NOW + 1s, 26.1, 'active'),
(NOW + 2s, 24.8, 'idle')
";
stmt.set_sql_query(insert_sql)?;
let affected_rows = stmt.execute_update()?;
let mut stmt = conn.new_statement()?;
// Prepare statement with parameters
stmt.set_sql_query("SELECT * FROM sensors WHERE temperature > ? AND location = ?")?;
stmt.prepare()?;
// Note: Parameter binding uses Arrow RecordBatches
// See examples/prepared.rs for complete implementation
For high-concurrency scenarios, enable connection pooling:
use adbc_taos::TaosDatabase;
let mut db = TaosDatabase::default();
db.uri = "taos://root:taosdata@127.0.0.1:6030".into();
// Enable pooling with max 10 connections
db.set_pool_size(10);
// All subsequent connections use the pool
for i in 0..5 {
let conn = db.new_connection()?;
// Use connection in parallel tasks...
}
// Pool is disabled by default (pool_size = 0)
Benefits:
use adbc_core::Connection;
// Get table schema
let schema = conn.get_table_schema(Some("my_db"), None, "my_table")?;
println!("Table schema:");
for field in schema.fields() {
println!(" - {}: {:?}", field.name(), field.data_type());
}
// Check if table is a supertable
let is_super = conn.is_supertable(Some("my_db"), "my_supertable")?;
println!("Is supertable: {}", is_super);
// Get supertable tags
if is_super {
let tags = conn.get_table_tags(Some("my_db"), "my_supertable")?;
for batch in tags {
let batch = batch?;
let tag_name_col = batch.column(0)
.as_any()
.downcast_ref::<arrow_array::StringArray>();
let tag_type_col = batch.column(1)
.as_any()
.downcast_ref::<arrow_array::StringArray>();
if let (Some(names), Some(types)) = (tag_name_col, tag_type_col) {
for i in 0..names.len() {
println!(" Tag: {} ({})",
names.get(i).unwrap_or(""),
types.get(i).unwrap_or(""));
}
}
}
}
// Get server version
println!("TDengine version: {}", conn.server_version());
| TDengine Type | Arrow Type | Notes |
|---|---|---|
BOOL |
Boolean |
|
TINYINT |
Int8 |
Signed 8-bit |
SMALLINT |
Int16 |
Signed 16-bit |
INT |
Int32 |
Signed 32-bit |
BIGINT |
Int64 |
Signed 64-bit |
TINYINT UNSIGNED |
UInt8 |
Unsigned 8-bit |
SMALLINT UNSIGNED |
UInt16 |
Unsigned 16-bit |
INT UNSIGNED |
UInt32 |
Unsigned 32-bit |
BIGINT UNSIGNED |
UInt64 |
Unsigned 64-bit |
FLOAT |
Float32 |
IEEE 754 single precision |
DOUBLE |
Float64 |
IEEE 754 double precision |
VARCHAR |
Binary |
Variable-length binary |
VARBINARY |
Binary |
Variable-length binary |
NCHAR |
Utf8 |
Unicode string |
JSON |
Utf8 |
JSON as string |
TIMESTAMP |
Timestamp(Millisecond, None) |
Millisecond precision |
GEOMETRY |
Binary |
Spatial data type |
DECIMAL |
Decimal128(38, 0) |
Maximum precision |
BLOB |
Binary |
Large binary objects |
Supported Arrow types for prepared statement parameters:
BooleanArray, Int8Array, Int16Array, Int32Array, Int64ArrayUInt8Array, UInt16Array, UInt32Array, UInt64ArrayFloat32Array, Float64ArrayStringArray, BinaryArrayTimestampMillisecondArray, TimestampMicrosecondArray, TimestampNanosecondArrayADBC-Taos is optimized for high-throughput data processing:
RawBlock API instead of row iterationSee benches/conversion_benchmark.rs for performance measurements. Recent optimizations (commit 6606c74) achieved significant improvements by replacing row-based iteration with columnar block access.
Run benchmarks:
cargo bench
# Run all tests
cargo test
# Run tests with output
cargo test -- --nocapture
# Run specific test
cargo test test_basic_query
# Run integration tests only
cargo test --test integration_tests
Configure TDengine connection in .env (copy from .env.example):
TAOS_HOST=127.0.0.1
TAOS_PORT=6030
TAOS_USER=root
TAOS_PASSWORD=taosdata
TAOS_DATABASE=demo # Optional
tests/
├── common/
│ └── mod.rs # Test utilities and fixtures
├── integration_tests.rs # Main integration test suite
└── readme_examples.rs # README example validation
# Generate and open API documentation
cargo doc --open
# Generate documentation without dependencies
cargo doc --no-deps
# Include private items
cargo doc --document-private-items
Explore the examples/ directory for complete working examples:
cargo run --example basic # Basic query
cargo run --example query # Query execution
cargo run --example insert # Data insertion
cargo run --example metadata # Schema introspection
cargo run --example table_ops # Table operations
adbc-taos/
├── src/
│ ├── lib.rs # Public API exports
│ ├── driver.rs # TaosDriver: Entry point
│ ├── database.rs # TaosDatabase: Configuration and pooling
│ ├── connection.rs # TaosConnection: Active connections
│ ├── statement.rs # TaosStatement: SQL execution
│ ├── reader.rs # RecordBatch readers (optimized)
│ ├── pool.rs # Connection pooling
│ ├── error.rs # Error types
│ ├── types.rs # Type conversion utilities
│ └── utils.rs # Async runtime handling
├── examples/ # Usage examples
├── tests/ # Integration tests
├── benches/ # Performance benchmarks
├── CLAUDE.md # Development guidelines
├── PROJECT_INDEX.md # Detailed architecture docs
└── README.md # This file
adbc_core traits for interoperabilityWe welcome contributions! Please see our development guidelines:
CLAUDE.md for development workflowPROJECT_INDEX.md for architecture detailscargo clippy and cargo fmt)# Clone repository
git clone https://github.com/your-org/adbc-taos.git
cd adbc-taos
# Install dependencies
cargo build
# Run tests
cargo test
# Run linter
cargo clippy
# Format code
cargo fmt
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Project Status: Phase 2 Complete - Full ADBC implementation with optimized data access
For detailed architecture and API reference, see PROJECT_INDEX.md