Crates.io | hlx |
lib.rs | hlx |
version | 1.1.7 |
created_at | 2025-09-07 17:34:07.803416+00 |
updated_at | 2025-09-25 20:40:53.608454+00 |
description | Configuration language designed specifically for AI systems |
homepage | |
repository | https://github.com/cyber-boost/helix |
max_upload_size | |
id | 1828364 |
size | 4,620,729 |
Helix Configuration is a purpose-built configuration language designed for AI systems. No more TOML limitations, JSON verbosity, or environment variable chaos. This is configuration that understands AI workflows, agents, and pipelines natively. Designed and built for Maestro.ps
Current Status: W.I.P. not production-ready with but do have a full compiler, CLI tools, and comprehensive language features.
Crates install: cargo build --release --features full && hlx install
TOML Problems:
JSON Problems:
ENV Variables Problems:
YAML Problems:
Built for AI Configuration:
# Comments start with #
project "helixia" {
version = "3.0.0"
author = "B"
description = "AI-Human collaboration system"
}
agent "senior-rust-engineer" {
model = "claude-3-opus"
role = "Systems Architect"
temperature = 0.7
max_tokens = 100000
capabilities [
"rust-async"
"memory-optimization"
"concurrency"
"zero-copy"
]
backstory {
15 years of systems programming
Rust contributor since 2015
Focus on safety and performance
Built high-frequency trading systems
}
tools = [
"cargo"
"rustc"
"clippy"
"miri"
]
}
workflow "code-review-pipeline" {
trigger = "pull_request"
step "analyze" {
agent = "senior-rust-engineer"
task = "Review code for safety and performance"
timeout = 30m # Native duration type!
parallel = false
depends_on = []
}
step "test" {
crew = ["test-engineer", "qa-engineer"]
task = "Run comprehensive test suite"
timeout = 1h
parallel = true
depends_on = ["analyze"]
retry {
max_attempts = 3
delay = 30s
backoff = "exponential"
}
}
pipeline {
analyze -> test -> approve -> merge
}
}
memory {
provider = "helix_db" # Our AI-native database
connection = "file:./data/agents.db"
embeddings {
model = "text-embedding-3-small"
dimensions = 1536
batch_size = 100
}
cache {
size = 1000
ttl = 24h # Duration type
}
}
context "production" {
environment = "prod"
debug = false
max_tokens = 100000
variables {
api_endpoint = "https://api.helix.cm"
timeout = 30s
retry_count = 3
}
secrets {
anthropic_key = $ANTHROPIC_API_KEY # Environment reference
openai_key = $OPENAI_API_KEY
database_url = "vault:database/prod/url" # Vault reference
}
}
crew "development-team" {
agents [
"senior-rust-engineer"
"code-reviewer"
"test-engineer"
]
process = "hierarchical"
manager = "senior-rust-engineer"
max_iterations = 10
verbose = true
}
string_value = "Hello, World"
number_value = 42
float_value = 3.14
boolean_value = true
null_value = null
# All of these work naturally
timeout = 30s # 30 seconds
delay = 5m # 5 minutes
cache_ttl = 24h # 24 hours
retention = 7d # 7 days
# Environment variables
api_key = $API_KEY
# Memory references
context = @memory.conversation.latest
# Variable references
base_url = ${config.api.endpoint}
# Pull from shell environment, .bashrc, or .env files
agent "my-agent" {
model = $ANTHROPIC_API_KEY
tools = ["tool1", "tool2"]
}
context "production" {
secrets {
# Environment variables
db_password = $DATABASE_PASSWORD
api_key = $MY_API_KEY
# Vault references (for sensitive data)
cert_path = "vault:ssl/certificate"
private_key = "vault:ssl/private_key"
}
variables {
# Regular configuration values
api_endpoint = "https://api.production.com"
timeout = 30s
max_retries = 3
}
}
Setting up Environment Variables:
# In your .bashrc, .zshrc, or .env file
export ANTHROPIC_API_KEY="your-key-here"
export DATABASE_PASSWORD="your-password"
export MY_API_KEY="another-key"
# helix will automatically pick these up
# Simple arrays
tags = ["rust", "systems", "performance"]
# Multi-line arrays
capabilities [
"reasoning"
"generation"
"analysis"
]
# Inline objects
metadata = { version = "1.0", stable = true }
# Nested objects
config {
api {
endpoint = "https://api.example.com"
timeout = 30s
}
}
Pipeline Flow:
pipeline {
fetch -> process -> validate -> store
}
Hierarchical Tags:
tags [
"capability:reasoning:logical"
"model:gpt-4"
"context:conversation"
]
Weighted Values:
priority = "high:0.9"
confidence = "certain:0.95"
helix Source Code
↓
[Lexer] → Tokens
↓
[Parser] → AST
↓
[Validator] → Validated AST
↓
[Compiler] → Binary Format
Converts text into tokens:
Builds Abstract Syntax Tree (AST):
Ensures correctness:
use helix_config::{parse, helixConfig};
// Parse from string
let config_str = r#"
agent "assistant" {
model = "gpt-4"
temperature = 0.7
}
"#;
let config = parse(config_str)?;
let agent = config.agents.get("assistant").unwrap();
use helix_config::helixLoader;
let mut loader = helixLoader::new();
// Load single file
let config = loader.load_file("config.hlxbb")?;
// Load directory of .hlxbb files
loader.load_directory("./configs")?;
// Access merged configuration
let merged = loader.get_merged_config();
use helix_config::{parse_and_validate, ValidationRules};
let rules = ValidationRules {
require_version: true,
max_agents: Some(100),
allowed_models: vec!["gpt-4", "claude-3"],
};
let config = parse_and_validate(source, rules)?;
agent - Define an AI agent
workflow - Define a workflow
context - Define an execution context
memory - Configure memory/storage
crew - Define an agent crew
pipeline - Define a processing pipeline
step - Workflow step
trigger - Workflow trigger
capabilities - Agent capabilities
backstory - Agent background
secrets - Sensitive configuration
embeddings - Embedding configuration
= - Assignment
-> - Pipeline flow
[] - Array delimiter
{} - Block/object delimiter
$ - Environment variable
@ - Memory reference
# - Comment
: - Type/weight separator
# Planned for future versions
result = sum([1, 2, 3])
encoded = base64("data")
hashed = sha256("content")
# Group related configurations
# agents.hlxbb
agent "coder" { ... }
agent "reviewer" { ... }
# workflows.hlxbb
workflow "ci" { ... }
workflow "cd" { ... }
# config.hlxbb
memory { ... }
context "prod" { ... }
# Use descriptive names
agent "senior-rust-engineer" # Good
agent "sre" # Too short
agent "a1" # Meaningless
# Use consistent separators
workflow "code-review-pipeline" # kebab-case
context "production_environment" # snake_case (pick one!)
# Document why, not what
agent "specialist" {
# Higher temperature for creative problem solving
temperature = 0.9
# Limit tokens to control costs in development
max_tokens = 50000
}
# Define base configurations (future feature)
base_agent {
temperature = 0.7
max_tokens = 100000
}
agent "coder" extends base_agent {
model = "gpt-4"
role = "Developer"
}
Helix Configuration provides clear, actionable error messages:
Error at line 15, column 8:
timeout = "30 minutes"
^^^^^^^^^^^^
Expected duration type (e.g., 30m, 1h, 5s)
Error at line 23:
agent = "undefined-agent"
^^^^^^^^^^^^^^^^^
Reference to undefined agent. Available agents:
- senior-rust-engineer
- code-reviewer
- test-engineer
Available for:
helix-config
)vim-helix
)helix
)# Format .hlxbb files
helix fmt config.hlxbb
# Check formatting
helix fmt --check config.hlxbb
# Lint for common issues
helix lint config.hlxbb
# With auto-fix
helix lint --fix config.hlxbb
# Before (TOML)
[agent.coder]
model = "gpt-4"
temperature = 0.7
capabilities = ["rust", "python"]
# After (helix)
agent "coder" {
model = "gpt-4"
temperature = 0.7
capabilities ["rust", "python"]
}
// Before (JSON)
{
"workflow": {
"name": "ci",
"timeout": "30m",
"steps": [...]
}
}
# After (helix)
workflow "ci" {
timeout = 30m # Native duration!
step { ... }
}
# Before (YAML)
agent:
name: coder
config:
model: gpt-4
temperature: 0.7
# After (helix)
agent "coder" {
model = "gpt-4"
temperature = 0.7
}
helix
) with 25+ commands including compile, decompile, bundle, validate, test, bench, serve, and morehelix/
├── Cargo.toml # Package definition with features
├── lib.rs # Public API and exports
├── types.rs # Configuration types
├── lexer.rs # Tokenization with source tracking
├── parser.rs # Recursive descent parser with error recovery
├── ast.rs # Abstract syntax tree and visitor pattern
├── semantic.rs # Semantic analysis and validation
├── codegen.rs # IR generation
├── error.rs # Error handling and types
├── integration.rs # Integration tests
├── tests.rs # Unit test suite
├── benches.rs # Performance benchmarks
├── compiler/ # Binary compilation subsystem
│ ├── mod.rs # Module exports
│ ├── binary.rs # Binary format definitions
│ ├── optimizer.rs # Optimization pipeline (0-3)
│ ├── serializer.rs # Binary serialization
│ ├── loader.rs # Runtime loading with mmap
│ ├── bundle.rs # Multi-file bundling
│ ├── cli.rs # CLI implementation
│ ├── cli/ # CLI command modules
│ ├── config/ # Configuration management
│ ├── project/ # Project management
│ ├── publish/ # Publishing and distribution
│ ├── tools/ # Development tools
│ └── workflow/ # Workflow management
├── src/bin/
│ └── helix.rs # CLI binary entry point
├── examples/ # 5 complete .hlxbb example files
│ ├── ai_development_team.hlxbb
│ ├── data_pipeline.hlxbb
│ ├── research_assistant.hlxbb
│ ├── customer_support.hlxbb
│ └── minimal.hlxbb
├── binaries/ # Compiled binary examples
├── admin/ # Documentation and scripts
├── summaries/ # Development summaries
└── build.sh # Build and test script
## Performance (Actual Benchmarks)
### Parse Performance
| File Size | TOML | JSON | YAML | helix |
|-----------|------|------|------|-----|
| Small (100 lines) | ~0.5ms | ~0.3ms | ~0.8ms | <1ms |
| Medium (1K lines) | ~45ms | ~28ms | ~72ms | <10ms |
| Large (10K lines) | ~450ms | ~280ms | ~750ms | <100ms |
### With Binary Compilation
| Operation | Text (.hlxbb) | Binary (.hlxb) | Speedup |
|-----------|-------------|----------------|---------|
| Parse | 35ms | N/A | N/A |
| Load | 35ms | 0.8ms | 44x |
| Validate | 12ms | 0ms | ∞ |
| Total | 47ms | 0.8ms | 59x |
## Installation & Usage
### As a Library
```toml
[dependencies]
helix-config = { path = "path/to/helix" }
# Or with specific features:
helix-config = { path = "path/to/helix", default-features = false } # Just parsing
helix-config = { path = "path/to/helix", features = ["compiler"] } # With compiler
cd helix
cargo install --path . --features cli
# Now use helix command globally
helix compile config.hlxbb -O3
# Core compilation commands
helix compile config.hlxbb -O3 --compress
helix decompile config.hlxb -o recovered.hlxbb
helix validate config.hlxbb --detailed
helix bundle ./configs/ -o bundle.hlxb --tree-shake
helix optimize config.hlxb -O3
# Project management
helix init my-project
helix add dependency-name
helix remove dependency-name
helix clean
helix reset
helix build
helix run
# Development tools
helix fmt config.hlxbb
helix lint config.hlxbb --fix
helix generate template-name
helix test
helix bench
# System integration
helix watch ./configs/ -O2
helix serve --port 8080
helix info config.hlxb --symbols --sections
helix diff old.hlxb new.hlxb
helix config list
helix cache clear
helix doctor
# Publishing and distribution
helix publish --version 1.0.0
helix sign config.hlxb
helix export --format json
helix import --from toml config.toml
Run all tests with the provided scripts:
# Full build and test
./build.sh # Complete build and test suite
# Just test examples
cargo test # Run all tests
cargo test integration # Run integration tests
cargo bench --no-run # Compile benchmarks
cargo build --all-features # Build everything
Note: Some integration tests may have compilation issues that need to be resolved. The core functionality is working as demonstrated by the successful CLI commands.
Priority areas for contribution:
BBL - Configuration should enable, not constrain.
Priority areas for contribution: