use genai_custom as genai; use genai::chat::printer::print_chat_stream; use genai::chat::{ChatMessage, ChatRequest}; use genai::Client; const MODEL: &str = "gpt-4o-mini"; #[tokio::main] async fn main() -> Result<(), Box> { let questions = &[ // follow-up questions "Why is the sky blue?", "Why is it red sometime?", ]; let client = Client::default(); let mut chat_req = ChatRequest::default().with_system("Answer in one sentence"); // Similar to putting a first System Chat Message(s) (will be cumulative with system chat messages) for &question in questions { chat_req = chat_req.append_message(ChatMessage::user(question)); println!("\n--- Question:\n{question}"); let chat_res = client.exec_chat_stream(MODEL, chat_req.clone(), None).await?; println!("\n--- Answer: (streaming)"); let assistant_answer = print_chat_stream(chat_res, None).await?; chat_req = chat_req.append_message(ChatMessage::assistant(assistant_answer)); } Ok(()) }