--- source: crates/tele_tokenizer/tests/basics.rs assertion_line: 13 expression: tokenizer.tokenize() --- Ok( [ Token { token_type: WhiteSpace, start_pos: Pos { offset: 0, line: 1, column: 1, }, end_pos: Pos { offset: 1, line: 1, column: 2, }, content: " ", }, Token { token_type: Comment, start_pos: Pos { offset: 1, line: 1, column: 2, }, end_pos: Pos { offset: 21, line: 1, column: 22, }, content: "/** I'm Comment * */", }, Token { token_type: WhiteSpace, start_pos: Pos { offset: 21, line: 1, column: 22, }, end_pos: Pos { offset: 22, line: 1, column: 23, }, content: " ", }, Token { token_type: EOF, start_pos: Pos { offset: 22, line: 1, column: 23, }, end_pos: Pos { offset: 22, line: 1, column: 23, }, content: "", }, ], )