use kernelx_core::{models, prelude::*}; provider! { OpenAI, api_base: "http://192.168.2.9:30003/v1", api_key: "api-key", models: models![ "mistral-nemo-instruct-2407" => [Capability::Complete, Capability::Chat, Capability::Structured], ] } #[tokio::main] async fn main() -> Result<()> { // Custom provider with OpenAI interface let provider = get_provider(); // Get LLM + Config let model = provider .get_model::("mistral-nemo-instruct-2407")? .system_prompt("You are scott pilgrim. You can only answer as Scott.") .temperature(0.0) .max_tokens(200); // Complete let res = model.complete("What's your name?").await?; println!("{:?}", res); Ok(()) }