#[cfg(test)] mod tests { use insta::assert_debug_snapshot; use rs_html_parser_tokenizer::{Tokenizer, TokenizerOptions}; use rs_html_parser_tokenizer_tokens::TokenizerToken; fn tokenize(data: &str) -> Vec { let mut log: Vec = Vec::new(); let options = TokenizerOptions { xml_mode: Option::from(false), decode_entities: Option::from(true), ignore_whitespace_between_tags: Some(true) }; let tokenizer = Tokenizer::new(data.as_bytes(), &options); for token in tokenizer { log.push(token); } log } #[test] fn basic_element() { assert_debug_snapshot!(tokenize("foo")); } #[test] fn short_comment() { assert_debug_snapshot!(tokenize("")); } #[test] fn dash_in_comment() { assert_debug_snapshot!(tokenize("")); } #[test] fn invalid_end_comment() { assert_debug_snapshot!(tokenize(""####)); } #[test] fn short_comment_text() { assert_debug_snapshot!(tokenize(r####"test"####)); } }