mod support; use crate::support::common_tests; use genai::adapter::AdapterKind; use genai::resolver::AuthData; type Result = core::result::Result>; // For tests. const MODEL: &str = "phi3:latest"; // region: --- Chat #[tokio::test] async fn test_chat_simple_ok() -> Result<()> { common_tests::common_test_chat_simple_ok(MODEL).await } #[tokio::test] async fn test_chat_multi_system_ok() -> Result<()> { common_tests::common_test_chat_multi_system_ok(MODEL).await } #[tokio::test] async fn test_chat_json_mode_ok() -> Result<()> { common_tests::common_test_chat_json_mode_ok(MODEL, false).await } #[tokio::test] async fn test_chat_temperature_ok() -> Result<()> { common_tests::common_test_chat_temperature_ok(MODEL).await } #[tokio::test] async fn test_chat_stop_sequences_ok() -> Result<()> { common_tests::common_test_chat_stop_sequences_ok(MODEL).await } // endregion: --- Chat // region: --- Chat Stream Tests #[tokio::test] async fn test_chat_stream_simple_ok() -> Result<()> { common_tests::common_test_chat_stream_simple_ok(MODEL).await } #[tokio::test] async fn test_chat_stream_capture_content_ok() -> Result<()> { common_tests::common_test_chat_stream_capture_content_ok(MODEL).await } // /// COMMENTED FOR NOW AS OLLAMA OpenAI Compatibility Layer does not support // /// usage tokens when streaming. See https://github.com/ollama/ollama/issues/4448 // #[tokio::test] // async fn test_chat_stream_capture_all_ok() -> Result<()> { // common_tests::common_test_chat_stream_capture_all_ok(MODEL).await // } // endregion: --- Chat Stream Tests // region: --- Resolver Tests #[tokio::test] async fn test_resolver_auth_ok() -> Result<()> { common_tests::common_test_resolver_auth_ok(MODEL, AuthData::from_single("ollama")).await } // endregion: --- Resolver Tests // region: --- List /// NOTE this test assume the "llama3.1:8b" is present. #[tokio::test] async fn test_list_models() -> Result<()> { common_tests::common_test_list_models(AdapterKind::Ollama, "llama3.1:8b").await } // endregion: --- List