[package] name = "llm-llama" version = "0.1.1" license = { workspace = true } repository = { workspace = true } description = "An implementation of LLaMA (Large Language Model Meta AI) for the `llm` ecosystem." edition = "2021" readme = "../../../README.md" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] llm-base = { path = "../../llm-base", version = "0.1.1" } bytemuck = { workspace = true } rand = { workspace = true } thiserror = { workspace = true } # Used for the `convert` feature serde = { workspace = true, optional = true } serde_json = { version = "1.0", optional = true } protobuf = { version = "= 2.14.0", optional = true } rust_tokenizers = { version = "3.1.2", optional = true } [features] convert = ["dep:serde", "dep:serde_json", "dep:protobuf", "dep:rust_tokenizers"]