| Crates.io | scirs2-io |
| lib.rs | scirs2-io |
| version | 0.1.0-beta.2 |
| created_at | 2025-04-12 19:41:05.580055+00 |
| updated_at | 2025-09-20 08:55:56.114366+00 |
| description | Input/Output utilities module for SciRS2 (scirs2-io) |
| homepage | |
| repository | https://github.com/cool-japan/scirs |
| max_upload_size | |
| id | 1631183 |
| size | 7,850,541 |
Production-ready Input/Output module for the SciRS2 scientific computing library. This module provides comprehensive functionality for reading and writing various scientific and numerical data formats with high performance and reliability.
.mat file format support with all data typesAdd to your Cargo.toml:
[dependencies]
scirs2-io = "0.1.0-beta.2"
Enable specific features as needed:
[dependencies]
scirs2-io = { version = "0.1.0-beta.2", features = ["hdf5", "async", "compression"] }
default: CSV, compression, and validation (recommended for most use cases)hdf5: HDF5 file format supportasync: Asynchronous I/O with tokioreqwest: Network operations and HTTP clientall: All features enableduse scirs2_io::{matlab, csv, image, compression};
use scirs2_core::error::CoreResult;
use ndarray::Array2;
// Read MATLAB file
let data = matlab::loadmat("data.mat")?;
let array = data.get_array::<f64>("matrix")?;
// Process CSV with automatic type detection
let (headers, data) = csv::read_csv_numeric("dataset.csv", None)?;
println!("Dataset shape: {:?}", data.shape());
// Handle images with metadata
let (image_data, metadata) = image::read_image("photo.jpg")?;
println!("Image: {}x{} pixels", metadata.width, metadata.height);
// High-performance compression
let compressed = compression::compress_data(&large_dataset,
compression::CompressionAlgorithm::Zstd, Some(6))?;
use scirs2_io::compression::{
compress_data_parallel, ParallelCompressionConfig, CompressionAlgorithm
};
// Configure high-performance parallel compression
let config = ParallelCompressionConfig {
num_threads: 8,
chunk_size: 1024 * 1024, // 1MB chunks
buffer_size: 64 * 1024, // 64KB buffer
enable_memory_mapping: true,
};
// Process large dataset (10MB example)
let large_data = vec![0u8; 10_000_000];
let (compressed, stats) = compress_data_parallel(
&large_data,
CompressionAlgorithm::Zstd,
Some(6),
config
)?;
println!("Compressed to {:.1}% in {:.2}ms",
100.0 * stats.bytes_output as f64 / stats.bytes_processed as f64,
stats.operation_time_ms);
println!("Throughput: {:.2} MB/s", stats.throughput_bps / 1_000_000.0);
use scirs2_io::validation::{SchemaValidator, schema_helpers, SchemaConstraint};
use serde_json::json;
let validator = SchemaValidator::new();
// Define validation schema
let user_schema = schema_helpers::object([
("name", schema_helpers::string()
.with_constraint(SchemaConstraint::MinLength(1))
.required()),
("age", schema_helpers::integer()
.with_constraint(SchemaConstraint::MinValue(0.0))
.with_constraint(SchemaConstraint::MaxValue(150.0))
.required()),
("email", schema_helpers::email().required()),
].into_iter().collect());
// Validate data
let user_data = json!({
"name": "Alice Johnson",
"age": 30,
"email": "alice@example.com"
});
let result = validator.validate(&user_data, &user_schema);
if result.valid {
println!("Data validation passed!");
} else {
for error in &result.errors {
println!("Validation error in {}: {}", error.path, error.message);
}
}
use scirs2_io::streaming::{StreamingConfig, process_file_chunked};
// Process large files efficiently
let config = StreamingConfig::default()
.chunk_size(64 * 1024)
.enable_progress_reporting(true);
let (result, stats) = process_file_chunked("large_dataset.bin", config,
|chunk_data, chunk_id| {
// Process each chunk
println!("Processing chunk {}: {} bytes", chunk_id, chunk_data.len());
// Your processing logic here
Ok(())
})?;
println!("Processed {} chunks, {} total bytes",
stats.total_chunks, stats.total_bytes_processed);
use scirs2_io::matlab::{loadmat, savemat, MatFile, MatVar};
use scirs2_io::{
netcdf::{NetCDFFile, NetCDFOptions, NetCDFFormat},
hdf5::{HDF5File, CompressionOptions, DatasetOptions},
matrix_market::{read_matrix_market, write_matrix_market},
};
use scirs2_io::image::{
read_image, write_image, convert_image, get_grayscale,
ImageFormat, ColorMode, ImageMetadata
};
use scirs2_io::compression::{
// Basic compression
compress_data, decompress_data,
// Parallel processing
compress_data_parallel, decompress_data_parallel,
// Configuration
CompressionAlgorithm, ParallelCompressionConfig,
// Array-specific
ndarray::{compress_array, decompress_array},
};
use scirs2_io::validation::{
// Integrity checking
calculate_checksum, verify_checksum,
// Schema validation
SchemaValidator, schema_helpers, SchemaConstraint,
// Format validation
formats::{validate_format, detect_file_format},
};
use scirs2_io::serialize::{
serialize_array, deserialize_array,
serialize_sparse_matrix, deserialize_sparse_matrix,
SerializationFormat, SparseMatrixCOO,
};
This library is production-ready with:
We welcome contributions! Please see our Contributing Guidelines for details.
For production releases:
Licensed under either:
Choose the license that works best for your project.
Ready for Production: scirs2-io v0.1.0-beta.2 provides enterprise-grade I/O capabilities for scientific computing applications.