use crate::{ context::ContextSession, llms::{ anthropic::{ messages::{AnthropicMessage, AnthropicMessageContent}, AnthropicClient, }, messages::{AbstractMessage, Message, MessageType, Role}, open_ai::{messages::OpenAIMessage, OpenAIClient}, tools::{ToolChoice, Toolkit}, LLMProvider, LLMToolUsage, MultiModelLLMProvider, ProviderModel, }, }; use anyhow::Result; use log::debug; use templates::get_context_prompt; pub mod templates; pub trait ProviderPrompt { fn to_anthropic(&self) -> Result; fn to_open_ai(&self) -> Result; } pub trait BasePrompt: ProviderPrompt { fn with_context(&self, context: &ContextSession) -> Self where Self: Sized; fn with_additional_instructions(&self, instructions: &str) -> Self where Self: Sized; fn send_to(&self, llm: ProviderModel) -> Result> { let messages = match llm { ProviderModel::Anthropic(model) => { let client = AnthropicClient::new(); client .with_model(model) .get_completion(vec![self.to_anthropic()?])? .into_iter() .map(MessageType::Anthropic) .collect() } ProviderModel::OpenAI(model) => { let client = OpenAIClient::new().with_model(model); client .get_completion(vec![self.to_open_ai()?])? .into_iter() .map(MessageType::OpenAI) .collect() } }; Ok(messages) } fn get_response(&self, llm: ProviderModel) -> Result { self.send_to(llm)? .last() .ok_or_else(|| anyhow::anyhow!("No messages returned"))? .get_content() } } pub struct UserPrompt(pub String); pub trait ToolPrompt: ProviderPrompt { fn get_work_results( &self, llm: &ProviderModel, tool_kit: &Toolkit, force_tool_use: &ToolChoice, ) -> Result> { debug!("Getting work results"); let messages = match llm { ProviderModel::Anthropic(model) => { debug!("Getting work results with Anthropic"); let client = AnthropicClient::new(); client .with_model(*model) .get_work_result(vec![self.to_anthropic()?], tool_kit, &force_tool_use)? .into_iter() .map(MessageType::Anthropic) .collect() } ProviderModel::OpenAI(model) => { debug!("Getting work results with OpenAI"); let client = OpenAIClient::new().with_model(*model); client .get_work_result(vec![self.to_open_ai()?], tool_kit, &force_tool_use)? .into_iter() .map(MessageType::OpenAI) .collect() } }; Ok(messages) } /// Get the response from the LLM when tools are passed to it. /// /// If the last message is a continuation then call the llm again fn get_work_output( &self, llm: &ProviderModel, tool_kit: &Toolkit, force_tool_use: &ToolChoice, ) -> Result { self.get_work_results(llm, tool_kit, force_tool_use)? .last() .ok_or_else(|| anyhow::anyhow!("No messages returned"))? .get_content() } } impl ToolPrompt for UserPrompt {} impl ProviderPrompt for UserPrompt { fn to_anthropic(&self) -> Result { Ok(AnthropicMessage::new( vec![AnthropicMessageContent::Text { text: self.0.clone(), }], Role::User, )) } fn to_open_ai(&self) -> Result { Ok(OpenAIMessage::new(self.0.as_str(), Role::User)) } } impl BasePrompt for UserPrompt { fn with_context(&self, context: &ContextSession) -> Self { Self(get_context_prompt(self.0.clone(), &context)) } fn with_additional_instructions(&self, instructions: &str) -> Self { Self(format!( "### Instructions\n{}\n---\n### User Input\n{}", instructions, self.0 )) } } pub struct SystemPrompt(pub String); impl ProviderPrompt for SystemPrompt { fn to_anthropic(&self) -> Result { Ok(AnthropicMessage::new( vec![AnthropicMessageContent::Text { text: self.0.clone(), }], Role::System, )) } fn to_open_ai(&self) -> Result { Ok(OpenAIMessage::new(self.0.as_str(), Role::System)) } } pub struct AssistantPrompt(pub String); impl ProviderPrompt for AssistantPrompt { fn to_anthropic(&self) -> Result { Ok(AnthropicMessage::new( vec![AnthropicMessageContent::Text { text: self.0.clone(), }], Role::Assistant, )) } fn to_open_ai(&self) -> Result { Ok(OpenAIMessage::new(self.0.as_str(), Role::Assistant)) } } impl BasePrompt for AssistantPrompt { fn with_context(&self, context: &ContextSession) -> Self { Self(get_context_prompt(self.0.clone(), context)) } fn with_additional_instructions(&self, instructions: &str) -> Self { Self(format!( "### Instructions\n{}\n---\n### Assistant\n{}", instructions, self.0 )) } }