| Crates.io | lmql |
| lib.rs | lmql |
| version | 0.3.0 |
| created_at | 2025-02-14 13:04:31.679467+00 |
| updated_at | 2025-03-11 13:18:42.781251+00 |
| description | A general-purpose async streaming large language model interface |
| homepage | |
| repository | |
| max_upload_size | |
| id | 1555570 |
| size | 71,680 |
An typesafe high-level LLM API for Rust, inspired by the Python library of the same name.
use futures::StreamExt;
use lmql::{PromptOptions, Chunk, Message, LLM};
#[tokio::main]
async fn main() {
let claude = lmql::llms::anthropic::Claude::new_from_env(
lmql::llms::anthropic::ClaudeModel::Claude_3_5_Haiku_20241022,
);
let mut stream = claude
.prompt(
&[Message::User("Please provide a poem about the moon.".into())],
&PromptOptions::default(),
)
.unwrap();
// Loop over each token as they arrive
while let Some(t) = stream.next().await {
if let Ok(Chunk::Token(t)) = t {
print!("{}", t)
} else {
panic!("Unexpected chunk: {t:#?}")
}
}
// Or use `lmql::TokenStreamExt` to collect the tokens together
let mut stream = claude
.prompt(
&[Message::User("What is bitcoin?".into())],
&PromptOptions::default(),
)
.unwrap();
use lmql::TokenStreamExt;
let response = stream.all_tokens().await.unwrap();
assert_eq!(response.len(), 1);
let Chunk::Token(t) = &response[0] else {
panic!("Expected only text in response")
};
println!("{t}");
}