# Config ```toml [genai] # Here we can override the default model for this agent only # This agent works with Claude Sonnet # model = "claude-3-5-sonnet-20241022" # e.g., OpenAI: "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview" # Anthropic: "claude-3-5-sonnet-20241022", "claude-3-5-haiku-20241022", "claude-3-haiku-20240307" # Ollama: "llama3.1:70b" (or any locally installed Ollama) ``` # Data ```lua -- == Step 1 == Load the doc files -- The `list_load` will list the files from the devai parent dir and load each of them -- so, each item is a FileRecord local doc_files = utils.file.list_load(".devai/doc/**/*.md") -- == Step 2 == Create/Load the `ask-devai.md` local ask_file_path = ".devai/tmp/ask-devai.md" local placeholder_text = [[ Placeholder: - To ask your question, remove this placeholder section - ask your question - and `devai run ask-devai` - or **Replay** in the terminal if already running ]] if not utils.path.exists(ask_file_path) then utils.file.save(ask_file_path, placeholder_text) -- Open it with VSCode -- Call with pcall to prevent failure if the code is not available -- TODO: Enhance this part (should notify the user) pcall(utils.cmd.exec,"code", {ask_file_path} ) end -- Load & split_first markdown local ask_file_split = utils.file.load_md_split_first(ask_file_path) -- {before = "", first: MdSection, after: string} -- For now, before is always empty, first is the MdSection with .content -- and .heading_content, and .heading_raw (empty string if no heading and heading content with '\n' if heading) -- If it starts with placeholder, then we skip local str = ask_file_split.first.content; str = utils.text.trim(str) -- Trim leading and trailing spaces if str:sub(1, 11):lower() == "placeholder" then return devai.skip("Question is a placeholder, so skipping for now") end -- == Step 3 == Return the data return { doc_files = doc_files, ask_file_path = ask_file_path, ask_file_split = ask_file_split, } ``` # System Your goal is to answer **devai** questions in a concise and precise manner, with some code examples when appropriate. **devai** is a command line utility that allows you to write agents in a markdown file, with full control over the prompt, using a multi-stage approach. Here are some documentation files, separated with `=== file/path/to/document.md` for you to refer to in your answer. {{#each data.doc_files}} === {{this.path}} {{this.content}} {{/each}} # System - When the user asks a question, provide the answer. - Give me the file path of where the answer was found. - Format the answer this way: ``` # QUESTION: _user_question_ _answer_ ``` - For the `_user_question_` text, if the question is short, put it there; if it is long or contains special characters, summarize the question and put it there. - For the `# Instruction` and `# System`, the content is just normal markdown, not in a markdown code block, except when putting some code, where it is appropriate to put them in the appropriate code block. - If it is appropriate, do not hesitate to use bullet points to help clarity and conciseness. - When the user asks to generate a stage of a devai file, do not wrap it with a `lua` code block, just `# stage_name` and then put the Lua code in the `lua` code block. - When providing the answer, do not surround it in a markdown code block. - For the `# Instruction`, `# System`, and `# Assistant` sections, do not put the template in markdown codeblock handlebars, as this is not needed by devai. Just the test with the handlebars characters, that will work nice. - Remember, a `.devai` file is a markdown file, so when giving devai examples, do not surround the devai stages example with a markdown code block. Just put them in lines. - When giving the reference to the doc file, prefix it with `../` and then add the full path with the `file/path/to/document.md`, and put them in a markdown link like `[../../file/path/to/document.md](../../file/path/to/document.md#possible_heading_anchor)` # Instruction Here is the user question: {{data.ask_file_split.first.content}} # Output ```lua -- Extract content from AI response local answer = ai_response.content -- Sometimes, some models still put the result in a markdown block. -- This alleviates some of these issues (when it starts with ```) local answer = utils.text.trim(answer) local answer = utils.md.outer_block_content_or_raw(answer) local answer = utils.text.trim(answer) -- Build the question content local first = data.ask_file_split.first local question = first.heading_raw .. first.content -- Make sure it ends with only one newline question = utils.text.ensure_single_ending_newline(question) -- Concatenate the content from the file local after = data.ask_file_split.after local content = question .. "\n" .. answer .. "\n\n---\n\n" .. after -- Save the new content back to the file utils.file.save(data.ask_file_path, content) return "done" ```