| Crates.io | oak-hlsl |
| lib.rs | oak-hlsl |
| version | 0.0.1 |
| created_at | 2025-10-21 01:29:41.470234+00 |
| updated_at | 2026-01-23 04:29:10.844052+00 |
| description | HLSL language parser with support for modern shader programming and graphics pipeline features. |
| homepage | https://github.com/ygg-lang/oaks |
| repository | https://github.com/ygg-lang/oaks |
| max_upload_size | |
| id | 1893097 |
| size | 114,785 |
A high-performance High Level Shading Language (HLSL) parser for Rust, built with the Oak parser combinator framework. Parse DirectX shader code with comprehensive AST generation and error handling.
Oak HLSL provides robust parsing capabilities for High Level Shading Language files, supporting vertex shaders, pixel shaders, compute shaders, and all major HLSL constructs. Built on the Oak parser combinator framework, it delivers excellent performance and detailed error messages.
use oak::{Parser, Language};
use oak_hlsl::HLSLLanguage;
fn main() {
let source = r#"
struct VS_INPUT {
float4 position : POSITION;
float2 texcoord : TEXCOORD0;
float3 normal : NORMAL;
};
struct VS_OUTPUT {
float4 position : SV_POSITION;
float2 texcoord : TEXCOORD0;
float3 normal : NORMAL;
};
VS_OUTPUT main(VS_INPUT input) {
VS_OUTPUT output;
output.position = mul(float4(input.position.xyz, 1.0), WorldViewProj);
output.texcoord = input.texcoord;
output.normal = mul(input.normal, (float3x3)WorldInverseTranspose);
return output;
}
"#;
let mut parser = Parser::<HLSLLanguage>::new();
match parser.parse(&source) {
Ok(ast) => {
println!("Parsed AST: {:#?}", ast);
}
Err(error) => {
eprintln!("Parse error: {}", error);
}
}
}
use oak::{Parser, Language};
use oak_hlsl::HLSLLanguage;
fn main() {
let source = r#"
Texture2D gDiffuseTexture : register(t0);
Texture2D gNormalTexture : register(t1);
SamplerState gDiffuseSampler : register(s0);
cbuffer PerFrameCB : register(b0) {
float4 gLightDirection;
float4 gLightColor;
float4 gAmbientColor;
};
struct PS_INPUT {
float4 position : SV_POSITION;
float2 texcoord : TEXCOORD0;
float3 normal : NORMAL;
float3 tangent : TANGENT;
float3 bitangent : BITANGENT;
};
float4 main(PS_INPUT input) : SV_TARGET {
float4 diffuseColor = gDiffuseTexture.Sample(gDiffuseSampler, input.texcoord);
float3 normalMap = gNormalTexture.Sample(gDiffuseSampler, input.texcoord).xyz * 2.0 - 1.0;
float3 N = normalize(input.normal);
float3 L = normalize(gLightDirection.xyz);
float NdotL = max(dot(N, L), 0.0);
float3 ambient = gAmbientColor.rgb * diffuseColor.rgb;
float3 diffuse = gLightColor.rgb * diffuseColor.rgb * NdotL;
return float4(ambient + diffuse, diffuseColor.a);
}
"#;
let mut parser = Parser::<HLSLLanguage>::new();
match parser.parse(&source) {
Ok(ast) => {
println!("Pixel shader parsed successfully!");
}
Err(error) => {
eprintln!("Parse error: {}", error);
}
}
}
Oak HLSL supports parsing compute shaders with thread group semantics:
let source = r#"
RWTexture2D<float4> gOutput : register(u0);
[numthreads(8, 8, 1)]
void main(uint3 id : SV_DispatchThreadID) {
float4 color = float4(id.x / 255.0, id.y / 255.0, 0.5, 1.0);
gOutput[id.xy] = color;
}
"#;
Parse effect files with techniques and passes:
let source = r#"
technique11 Main {
pass P0 {
SetVertexShader(CompileShader(vs_5_0, VSMain()));
SetPixelShader(CompileShader(ps_5_0, PSMain()));
SetGeometryShader(NULL);
}
}
"#;
The parser generates a rich AST with the following main node types:
HLSLFile - Root node containing the entire fileStruct - Structure definitions with semanticsFunction - Shader functions with return semanticsVariable - Variable declarations with types and semanticsTechnique - Technique blocks with passesPass - Render pass definitionsExpression - HLSL expressions and operatorsStatement - Control flow and assignment statementsOak HLSL is designed for high performance:
Oak HLSL integrates seamlessly with the Oak ecosystem:
use oak::{Parser, Language};
use oak_hlsl::HLSLLanguage;
// Use with other Oak parsers
let mut parser = Parser::<HLSLLanguage>::new();
let result = parser.parse(hlsl_source);
More examples can be found in the examples directory:
We welcome contributions! Please see our Contributing Guide for details.