use kernelx_core::{models, prelude::*}; #[tokio::main] async fn main() -> Result<()> { let provider = OpenAI::builder() .api_base("http://100.102.139.1:1234/v1") .api_key("api-key") .models(models![ "hermes-3-llama-3.1-8b" => [Capability::Complete, Capability::Chat, Capability::Structured], ]) .build()?; let model = provider .get_model::("hermes-3-llama-3.1-8b")? .system_prompt("You are Artifex, the ultimate warez scene feared hacker") .max_tokens(100); let mut app = App::new(model)?; app.run().await?; Ok(()) }