--- source: crates/tele_tokenizer/tests/basics.rs assertion_line: 87 expression: tokenizer.tokenize() --- Ok( [ Token { token_type: Ident, start_pos: Pos { offset: 0, line: 1, column: 1, }, end_pos: Pos { offset: 3, line: 1, column: 4, }, content: "div", }, Token { token_type: WhiteSpace, start_pos: Pos { offset: 3, line: 1, column: 4, }, end_pos: Pos { offset: 4, line: 1, column: 5, }, content: " ", }, Token { token_type: Delim, start_pos: Pos { offset: 4, line: 1, column: 5, }, end_pos: Pos { offset: 5, line: 1, column: 6, }, content: "+", }, Token { token_type: WhiteSpace, start_pos: Pos { offset: 5, line: 1, column: 6, }, end_pos: Pos { offset: 6, line: 1, column: 7, }, content: " ", }, Token { token_type: Ident, start_pos: Pos { offset: 6, line: 1, column: 7, }, end_pos: Pos { offset: 7, line: 1, column: 8, }, content: "p", }, Token { token_type: EOF, start_pos: Pos { offset: 7, line: 1, column: 8, }, end_pos: Pos { offset: 7, line: 1, column: 8, }, content: "", }, ], )