[package] name = "rllama" version = "0.3.0" edition = "2021" authors = ["Mikko Juola"] description = "Pure Rust implementation of LLaMA-family of models, executable" documentation = "https://github.com/Noeda/rllama" homepage = "https://github.com/Noeda/rllama" repository = "https://github.com/Noeda/rllama" license = "AGPL-3.0" keywords = ["llama", "machine-learning"] categories = ["command-line-utilities"] [lib] path = "src/lib.rs" [[bin]] name = "rllama" path = "src/main.rs" [dependencies] protobuf = "3.2" thiserror = "1.0" half = "2.2" num-complex = "0.4" embedded-profiling = "0.3" rand = "0.8" approx = "0.5" rayon = "1.7" clap = { version = "4.1", features = ["derive"] } indicatif = "0.17" colored = "2" serde = { version = "1", features = ["derive"] } serde_json = "1" ocl = { version = "0.19", optional = true } [features] opencl = ["ocl"] # We need protobuf compiler [build-dependencies] protobuf-codegen = "3.2" protobuf-parse = "3.2" [dev-dependencies] criterion = "0.4" [profile.release] debug = true [[bench]] path = "src/benches/benchmark.rs" name = "benchmark" harness = false