--- source: src/main.rs expression: scanned input_file: test-data/lua5.4-tests/literals.lua --- [ Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 4, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 4, }, Token { kind: String, lexeme: "'testing scanner'", computed_lexeme: None, line: 4, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 4, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "debug", computed_lexeme: None, line: 6, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "require", computed_lexeme: None, line: 6, }, Token { kind: String, lexeme: "\"debug\"", computed_lexeme: None, line: 6, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 9, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "dostring", computed_lexeme: None, line: 9, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 9, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 9, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 9, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 9, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 9, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 9, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 9, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 9, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 9, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 9, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 9, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 9, }, Token { kind: Identifier, lexeme: "dostring", computed_lexeme: None, line: 11, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 11, }, Token { kind: String, lexeme: "\"x \\v\\f = \\t\\r 'a\\0a' \\v\\f\\f\"", computed_lexeme: None, line: 11, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 12, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 12, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 12, }, Token { kind: String, lexeme: "'a\\0a'", computed_lexeme: None, line: 12, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 12, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 12, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 12, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: String, lexeme: "'\\n\\\"\\'\\\\'", computed_lexeme: None, line: 15, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 15, }, Token { kind: MultilineString, lexeme: "[[\n\n\"'\\]]", computed_lexeme: None, line: 17, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 19, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 19, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 19, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 19, }, Token { kind: String, lexeme: "\"\\a\\b\\f\\n\\r\\t\\v\"", computed_lexeme: None, line: 19, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 19, }, Token { kind: String, lexeme: "\"^%c%c%c%c%c%c%c$\"", computed_lexeme: None, line: 19, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 19, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: String, lexeme: "\"\\09912\"", computed_lexeme: None, line: 22, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 22, }, Token { kind: String, lexeme: "'c12'", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 23, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 23, }, Token { kind: String, lexeme: "\"\\99ab\"", computed_lexeme: None, line: 23, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 23, }, Token { kind: String, lexeme: "'cab'", computed_lexeme: None, line: 23, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 23, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 24, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 24, }, Token { kind: String, lexeme: "\"\\099\"", computed_lexeme: None, line: 24, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 24, }, Token { kind: String, lexeme: "'\\99'", computed_lexeme: None, line: 24, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 25, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 25, }, Token { kind: String, lexeme: "\"\\099\\n\"", computed_lexeme: None, line: 25, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 25, }, Token { kind: String, lexeme: "'c\\10'", computed_lexeme: None, line: 25, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 26, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 26, }, Token { kind: String, lexeme: "'\\0\\0\\0alo'", computed_lexeme: None, line: 26, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 26, }, Token { kind: String, lexeme: "'\\0'", computed_lexeme: None, line: 26, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 26, }, Token { kind: String, lexeme: "'\\0\\0'", computed_lexeme: None, line: 26, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 26, }, Token { kind: String, lexeme: "'alo'", computed_lexeme: None, line: 26, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 26, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 28, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 28, }, Token { kind: Number, lexeme: "010", computed_lexeme: Some( "010", ), line: 28, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 28, }, Token { kind: Number, lexeme: "020", computed_lexeme: Some( "020", ), line: 28, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 28, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 28, }, Token { kind: Number, lexeme: "030", computed_lexeme: Some( "030", ), line: 28, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 28, }, Token { kind: String, lexeme: "\"1020-30\"", computed_lexeme: None, line: 28, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 31, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 31, }, Token { kind: String, lexeme: "\"\\x00\\x05\\x10\\x1f\\x3C\\xfF\\xe8\"", computed_lexeme: None, line: 31, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 31, }, Token { kind: String, lexeme: "\"\\0\\5\\16\\31\\60\\255\\232\"", computed_lexeme: None, line: 31, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 31, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 33, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 33, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 33, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 33, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 33, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 33, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 34, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 34, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 34, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 34, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 34, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 34, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 34, }, Token { kind: String, lexeme: "'return '", computed_lexeme: None, line: 34, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 34, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 34, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 34, }, Token { kind: String, lexeme: "', require\"debug\".getinfo(1).currentline'", computed_lexeme: None, line: 35, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 35, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 36, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 36, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 36, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 36, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 37, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 37, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 37, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 37, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 37, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 37, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 38, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 40, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 40, }, Token { kind: String, lexeme: "\"'abc\\\\z \\n efg'\"", computed_lexeme: None, line: 40, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 40, }, Token { kind: String, lexeme: "\"abcefg\"", computed_lexeme: None, line: 40, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 40, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 40, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: String, lexeme: "\"'abc\\\\z \\n\\n\\n'\"", computed_lexeme: None, line: 41, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 41, }, Token { kind: String, lexeme: "\"abc\"", computed_lexeme: None, line: 41, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 41, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 42, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 42, }, Token { kind: String, lexeme: "\"'\\\\z \\n\\t\\f\\v\\n'\"", computed_lexeme: None, line: 42, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 42, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 42, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 42, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 42, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 43, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 43, }, Token { kind: String, lexeme: "\"[[\\nalo\\nalo\\n\\n]]\"", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: String, lexeme: "\"alo\\nalo\\n\\n\"", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 43, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: String, lexeme: "\"[[\\nalo\\ralo\\n\\n]]\"", computed_lexeme: None, line: 44, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 44, }, Token { kind: String, lexeme: "\"alo\\nalo\\n\\n\"", computed_lexeme: None, line: 44, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 44, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 45, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 45, }, Token { kind: String, lexeme: "\"[[\\nalo\\ralo\\r\\n]]\"", computed_lexeme: None, line: 45, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 45, }, Token { kind: String, lexeme: "\"alo\\nalo\\n\"", computed_lexeme: None, line: 45, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 45, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 45, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 46, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 46, }, Token { kind: String, lexeme: "\"[[\\ralo\\n\\ralo\\r\\n]]\"", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: String, lexeme: "\"alo\\nalo\\n\"", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 46, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "lexstring", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: String, lexeme: "\"[[alo]\\n]alo]]\"", computed_lexeme: None, line: 47, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 47, }, Token { kind: String, lexeme: "\"alo]\\n]alo\"", computed_lexeme: None, line: 47, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 47, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 49, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 49, }, Token { kind: String, lexeme: "\"abc\\z\n def\\z\n ghi\\z\n \"", computed_lexeme: None, line: 52, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 52, }, Token { kind: String, lexeme: "'abcdefghi'", computed_lexeme: None, line: 52, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 52, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 56, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 56, }, Token { kind: String, lexeme: "\"\\u{0}\\u{00000000}\\x00\\0\"", computed_lexeme: None, line: 56, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 56, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "char", computed_lexeme: None, line: 56, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 56, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 56, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 56, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 56, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 56, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 56, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 56, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 56, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 56, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 59, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 59, }, Token { kind: String, lexeme: "\"\\u{0}\\u{7F}\"", computed_lexeme: None, line: 59, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 59, }, Token { kind: String, lexeme: "\"\\x00\\x7F\"", computed_lexeme: None, line: 59, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 62, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 62, }, Token { kind: String, lexeme: "\"\\u{80}\\u{7FF}\"", computed_lexeme: None, line: 62, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 62, }, Token { kind: String, lexeme: "\"\\xC2\\x80\\xDF\\xBF\"", computed_lexeme: None, line: 62, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 65, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 65, }, Token { kind: String, lexeme: "\"\\u{800}\\u{FFFF}\"", computed_lexeme: None, line: 65, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 65, }, Token { kind: String, lexeme: "\"\\xE0\\xA0\\x80\\xEF\\xBF\\xBF\"", computed_lexeme: None, line: 65, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: String, lexeme: "\"\\u{10000}\\u{1FFFFF}\"", computed_lexeme: None, line: 68, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 68, }, Token { kind: String, lexeme: "\"\\xF0\\x90\\x80\\x80\\xF7\\xBF\\xBF\\xBF\"", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: String, lexeme: "\"\\u{200000}\\u{3FFFFFF}\"", computed_lexeme: None, line: 71, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 71, }, Token { kind: String, lexeme: "\"\\xF8\\x88\\x80\\x80\\x80\\xFB\\xBF\\xBF\\xBF\\xBF\"", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 74, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 74, }, Token { kind: String, lexeme: "\"\\u{4000000}\\u{7FFFFFFF}\"", computed_lexeme: None, line: 74, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 74, }, Token { kind: String, lexeme: "\"\\xFC\\x84\\x80\\x80\\x80\\x80\\xFD\\xBF\\xBF\\xBF\\xBF\\xBF\"", computed_lexeme: None, line: 75, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 75, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 79, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 79, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 79, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 79, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 79, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "st", computed_lexeme: None, line: 80, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 80, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 80, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 80, }, Token { kind: String, lexeme: "'return '", computed_lexeme: None, line: 80, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 80, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 80, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 80, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 80, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 81, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 81, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 81, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 81, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 81, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 81, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 81, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 81, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 81, }, Token { kind: String, lexeme: "\"'\"", computed_lexeme: None, line: 81, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 81, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 82, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 82, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "st", computed_lexeme: None, line: 82, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 82, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 82, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 82, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 82, }, Token { kind: String, lexeme: "\"near .-\"", computed_lexeme: None, line: 82, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 82, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 82, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 82, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 85, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 85, }, Token { kind: MultilineString, lexeme: "[[\"abc\\x\"]]", computed_lexeme: None, line: 85, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 85, }, Token { kind: MultilineString, lexeme: "[[\\x\"]]", computed_lexeme: None, line: 85, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 85, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 86, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 86, }, Token { kind: MultilineString, lexeme: "[[\"abc\\x]]", computed_lexeme: None, line: 86, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 86, }, Token { kind: MultilineString, lexeme: "[[\\x]]", computed_lexeme: None, line: 86, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 86, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 87, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 87, }, Token { kind: MultilineString, lexeme: "[[\"\\x]]", computed_lexeme: None, line: 87, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 87, }, Token { kind: MultilineString, lexeme: "[[\\x]]", computed_lexeme: None, line: 87, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 87, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: MultilineString, lexeme: "[[\"\\x5\"]]", computed_lexeme: None, line: 88, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 88, }, Token { kind: MultilineString, lexeme: "[[\\x5\"]]", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: MultilineString, lexeme: "[[\"\\x5]]", computed_lexeme: None, line: 89, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 89, }, Token { kind: MultilineString, lexeme: "[[\\x5]]", computed_lexeme: None, line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 90, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 90, }, Token { kind: MultilineString, lexeme: "[[\"\\xr\"]]", computed_lexeme: None, line: 90, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 90, }, Token { kind: MultilineString, lexeme: "[[\\xr]]", computed_lexeme: None, line: 90, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 90, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: MultilineString, lexeme: "[[\"\\xr]]", computed_lexeme: None, line: 91, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 91, }, Token { kind: MultilineString, lexeme: "[[\\xr]]", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 92, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 92, }, Token { kind: MultilineString, lexeme: "[[\"\\x.]]", computed_lexeme: None, line: 92, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 92, }, Token { kind: MultilineString, lexeme: "[[\\x.]]", computed_lexeme: None, line: 92, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 92, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 93, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 93, }, Token { kind: MultilineString, lexeme: "[[\"\\x8%\"]]", computed_lexeme: None, line: 93, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 93, }, Token { kind: MultilineString, lexeme: "[[\\x8%%]]", computed_lexeme: None, line: 93, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 94, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 94, }, Token { kind: MultilineString, lexeme: "[[\"\\xAG]]", computed_lexeme: None, line: 94, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 94, }, Token { kind: MultilineString, lexeme: "[[\\xAG]]", computed_lexeme: None, line: 94, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 94, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 95, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 95, }, Token { kind: MultilineString, lexeme: "[[\"\\g\"]]", computed_lexeme: None, line: 95, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 95, }, Token { kind: MultilineString, lexeme: "[[\\g]]", computed_lexeme: None, line: 95, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 95, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 96, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 96, }, Token { kind: MultilineString, lexeme: "[[\"\\g]]", computed_lexeme: None, line: 96, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 96, }, Token { kind: MultilineString, lexeme: "[[\\g]]", computed_lexeme: None, line: 96, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 97, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 97, }, Token { kind: MultilineString, lexeme: "[[\"\\.\"]]", computed_lexeme: None, line: 97, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 97, }, Token { kind: MultilineString, lexeme: "[[\\%.]]", computed_lexeme: None, line: 97, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: MultilineString, lexeme: "[[\"\\999\"]]", computed_lexeme: None, line: 99, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 99, }, Token { kind: MultilineString, lexeme: "[[\\999\"]]", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 100, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 100, }, Token { kind: MultilineString, lexeme: "[[\"xyz\\300\"]]", computed_lexeme: None, line: 100, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 100, }, Token { kind: MultilineString, lexeme: "[[\\300\"]]", computed_lexeme: None, line: 100, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 100, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 101, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 101, }, Token { kind: MultilineString, lexeme: "[[\" \\256\"]]", computed_lexeme: None, line: 101, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 101, }, Token { kind: MultilineString, lexeme: "[[\\256\"]]", computed_lexeme: None, line: 101, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 101, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 104, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 104, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u{100000000}\"]]", computed_lexeme: None, line: 104, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 104, }, Token { kind: MultilineString, lexeme: "[[abc\\u{100000000]]", computed_lexeme: None, line: 104, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 105, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 105, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u11r\"]]", computed_lexeme: None, line: 105, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 105, }, Token { kind: MultilineString, lexeme: "[[abc\\u1]]", computed_lexeme: None, line: 105, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 105, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 106, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 106, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u\"]]", computed_lexeme: None, line: 106, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 106, }, Token { kind: MultilineString, lexeme: "[[abc\\u\"]]", computed_lexeme: None, line: 106, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 106, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 107, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 107, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u{11r\"]]", computed_lexeme: None, line: 107, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 107, }, Token { kind: MultilineString, lexeme: "[[abc\\u{11r]]", computed_lexeme: None, line: 107, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 108, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 108, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u{11\"]]", computed_lexeme: None, line: 108, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 108, }, Token { kind: MultilineString, lexeme: "[[abc\\u{11\"]]", computed_lexeme: None, line: 108, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 109, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 109, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u{11]]", computed_lexeme: None, line: 109, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 109, }, Token { kind: MultilineString, lexeme: "[[abc\\u{11]]", computed_lexeme: None, line: 109, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 109, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 110, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 110, }, Token { kind: MultilineString, lexeme: "[[\"abc\\u{r\"]]", computed_lexeme: None, line: 110, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 110, }, Token { kind: MultilineString, lexeme: "[[abc\\u{r]]", computed_lexeme: None, line: 110, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 110, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: String, lexeme: "\"[=[alo]]\"", computed_lexeme: None, line: 113, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 113, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 114, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 114, }, Token { kind: String, lexeme: "\"[=[alo]=\"", computed_lexeme: None, line: 114, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 114, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 114, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 115, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 115, }, Token { kind: String, lexeme: "\"[=[alo]\"", computed_lexeme: None, line: 115, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 115, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 115, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 115, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 116, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 116, }, Token { kind: String, lexeme: "\"'alo\"", computed_lexeme: None, line: 116, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 116, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 116, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 117, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 117, }, Token { kind: String, lexeme: "\"'alo \\\\z \\n\\n\"", computed_lexeme: None, line: 117, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 117, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 117, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 118, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 118, }, Token { kind: String, lexeme: "\"'alo \\\\z\"", computed_lexeme: None, line: 118, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 118, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 118, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 118, }, Token { kind: Identifier, lexeme: "lexerror", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: MultilineString, lexeme: "[['alo \\98]]", computed_lexeme: None, line: 119, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 119, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 122, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 122, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 122, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 122, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 122, }, Token { kind: Number, lexeme: "255", computed_lexeme: Some( "255", ), line: 122, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 122, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 123, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 123, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "char", computed_lexeme: None, line: 123, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 123, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 124, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 124, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 124, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 124, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 124, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 124, }, Token { kind: String, lexeme: "\"[a-zA-Z_]\"", computed_lexeme: None, line: 124, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 124, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 124, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 124, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 124, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 124, }, Token { kind: String, lexeme: "\"=1\"", computed_lexeme: None, line: 124, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 124, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 124, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 124, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 124, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 125, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 125, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 125, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 125, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 125, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 125, }, Token { kind: String, lexeme: "\"[a-zA-Z_0-9]\"", computed_lexeme: None, line: 125, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 125, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 125, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 126, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 126, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 126, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 126, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 126, }, Token { kind: String, lexeme: "\"1 = 1\"", computed_lexeme: None, line: 126, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 126, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 126, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 126, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 126, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 127, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 132, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 132, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 132, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 132, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 132, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 132, }, Token { kind: Number, lexeme: "15000", computed_lexeme: Some( "15000", ), line: 132, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 132, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 132, }, Token { kind: String, lexeme: "'1'", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "var2", computed_lexeme: None, line: 133, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 133, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 133, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 133, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 133, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 133, }, Token { kind: Number, lexeme: "15000", computed_lexeme: Some( "15000", ), line: 133, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 133, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 133, }, Token { kind: String, lexeme: "'2'", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 134, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 134, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 134, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 134, }, Token { kind: MultilineString, lexeme: "[[\n %s = 5\n %s = %s + 1\n return function () return %s - %s end\n]]", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "var2", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "var2", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 139, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "dostring", computed_lexeme: None, line: 139, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 139, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 140, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 140, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 140, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 140, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 140, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 140, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "var2", computed_lexeme: None, line: 140, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 140, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 140, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 140, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 140, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 140, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "var1", computed_lexeme: None, line: 141, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "var2", computed_lexeme: None, line: 141, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 141, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 141, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 142, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 142, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 142, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 145, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 145, }, Token { kind: String, lexeme: "\"\\n\\t\"", computed_lexeme: None, line: 145, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 145, }, Token { kind: MultilineString, lexeme: "[[\n\n\t]]", computed_lexeme: None, line: 147, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: MultilineString, lexeme: "[[\n\n $debug]]", computed_lexeme: None, line: 150, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 150, }, Token { kind: String, lexeme: "\"\\n $debug\"", computed_lexeme: None, line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 151, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 151, }, Token { kind: MultilineString, lexeme: "[[ [ ]]", computed_lexeme: None, line: 151, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 151, }, Token { kind: MultilineString, lexeme: "[[ ] ]]", computed_lexeme: None, line: 151, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 153, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 153, }, Token { kind: String, lexeme: "\"001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789\"", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 154, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 154, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 154, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 154, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 154, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 154, }, Token { kind: Number, lexeme: "960", computed_lexeme: Some( "960", ), line: 154, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 155, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 155, }, Token { kind: MultilineString, lexeme: "[=[\nprint('+')\n\na1 = [[\"this is a 'string' with several 'quotes'\"]]\na2 = \"'quotes'\"\n\nassert(string.find(a1, a2) == 34)\nprint('+')\n\na1 = [==[temp = [[an arbitrary value]]; ]==]\nassert(load(a1))()\nassert(temp == 'an arbitrary value')\n-- long strings --\nb = \"001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789001234567890123456789012345678901234567891234567890123456789012345678901234567890012345678901234567890123456789012345678912345678901234567890123456789012345678900123456789012345678901234567890123456789123456789012345678901234567890123456789\"\nassert(string.len(b) == 960)\nprint('+')\n\na = [[00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n00123456789012345678901234567890123456789123456789012345678901234567890123456789\n]]\nassert(string.len(a) == 1863)\nassert(string.sub(a, 1, 40) == string.sub(b, 1, 40))\nx = 1\n]=]", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 201, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 201, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 201, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 201, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 202, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 202, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "dostring", computed_lexeme: None, line: 203, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 203, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 206, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 206, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 206, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 207, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 207, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 207, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 208, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 208, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 208, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 211, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 214, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 214, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 214, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 214, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 214, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 214, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 214, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 214, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 214, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 214, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 214, }, Token { kind: String, lexeme: "\"%p\"", computed_lexeme: None, line: 214, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 214, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 214, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 214, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 214, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "s1", computed_lexeme: None, line: 216, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "const", computed_lexeme: None, line: 216, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 216, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 216, }, Token { kind: String, lexeme: "\"01234567890123456789012345678901234567890123456789\"", computed_lexeme: None, line: 216, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "s2", computed_lexeme: None, line: 217, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "const", computed_lexeme: None, line: 217, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 217, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 217, }, Token { kind: String, lexeme: "\"01234567890123456789012345678901234567890123456789\"", computed_lexeme: None, line: 217, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 218, }, Token { kind: Identifier, lexeme: "s3", computed_lexeme: None, line: 218, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 218, }, Token { kind: String, lexeme: "\"01234567890123456789012345678901234567890123456789\"", computed_lexeme: None, line: 218, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 219, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "foo", computed_lexeme: None, line: 219, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 219, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 219, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "s1", computed_lexeme: None, line: 219, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 219, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 220, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "foo1", computed_lexeme: None, line: 220, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 220, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 220, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "s3", computed_lexeme: None, line: 220, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 220, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 221, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "foo2", computed_lexeme: None, line: 221, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 221, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 221, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 222, }, Token { kind: String, lexeme: "\"01234567890123456789012345678901234567890123456789\"", computed_lexeme: None, line: 222, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 223, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 224, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 224, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "s1", computed_lexeme: None, line: 224, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 225, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "s2", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 226, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "foo", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 227, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "foo1", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 228, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "foo2", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 230, }, Token { kind: Identifier, lexeme: "sd", computed_lexeme: None, line: 230, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 230, }, Token { kind: String, lexeme: "\"0123456789\"", computed_lexeme: None, line: 230, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 230, }, Token { kind: String, lexeme: "\"0123456789012345678901234567890123456789\"", computed_lexeme: None, line: 230, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 231, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "sd", computed_lexeme: None, line: 231, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "s1", computed_lexeme: None, line: 231, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "getadd", computed_lexeme: None, line: 231, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "sd", computed_lexeme: None, line: 231, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 231, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "a1", computed_lexeme: None, line: 231, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 231, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 232, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 236, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 236, }, Token { kind: MultilineString, lexeme: "[[\na = 1 -- a comment\nb = 2\n\n\nx = [=[\nhi\n]=]\ny = \"\\\nhello\\r\\n\\\n\"\nreturn require\"debug\".getinfo(1).currentline\n]]", computed_lexeme: None, line: 248, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 250, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 250, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 250, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 250, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 250, }, Token { kind: Identifier, lexeme: "pairs", computed_lexeme: None, line: 250, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 250, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 250, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 250, }, Token { kind: String, lexeme: "\"\\r\"", computed_lexeme: None, line: 250, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 250, }, Token { kind: String, lexeme: "\"\\n\\r\"", computed_lexeme: None, line: 250, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 250, }, Token { kind: String, lexeme: "\"\\r\\n\"", computed_lexeme: None, line: 250, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 250, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 250, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 251, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "nn", computed_lexeme: None, line: 251, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 251, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "gsub", computed_lexeme: None, line: 251, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 251, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 251, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 251, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 251, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 252, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "dostring", computed_lexeme: None, line: 252, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "prog", computed_lexeme: None, line: 252, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 252, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "nn", computed_lexeme: None, line: 252, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 253, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 253, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 253, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 253, }, Token { kind: String, lexeme: "\"hi\\n\"", computed_lexeme: None, line: 253, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 253, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 253, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 253, }, Token { kind: String, lexeme: "\"\\nhello\\r\\n\\n\"", computed_lexeme: None, line: 253, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 253, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 258, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 258, }, Token { kind: MultilineString, lexeme: "[==[]=]==]", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 259, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 259, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 259, }, Token { kind: String, lexeme: "\"]=\"", computed_lexeme: None, line: 259, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 261, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 261, }, Token { kind: MultilineString, lexeme: "[==[[===[[=[]]=][====[]]===]===]==]", computed_lexeme: None, line: 261, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 262, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 262, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 262, }, Token { kind: String, lexeme: "\"[===[[=[]]=][====[]]===]===\"", computed_lexeme: None, line: 262, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 264, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 264, }, Token { kind: MultilineString, lexeme: "[====[[===[[=[]]=][====[]]===]===]====]", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 265, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 265, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 265, }, Token { kind: String, lexeme: "\"[===[[=[]]=][====[]]===]===\"", computed_lexeme: None, line: 265, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 267, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 267, }, Token { kind: MultilineString, lexeme: "[=[]]]]]]]]]=]", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 268, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 268, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 268, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 268, }, Token { kind: String, lexeme: "\"]]]]]]]]\"", computed_lexeme: None, line: 268, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 268, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 279, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 279, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 279, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 279, }, Token { kind: String, lexeme: "\"=\"", computed_lexeme: None, line: 279, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 279, }, Token { kind: String, lexeme: "\"[\"", computed_lexeme: None, line: 279, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 279, }, Token { kind: String, lexeme: "\"]\"", computed_lexeme: None, line: 279, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 279, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 279, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 279, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 280, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 280, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 280, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 280, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 281, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 281, }, Token { kind: Identifier, lexeme: "gen", computed_lexeme: None, line: 281, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 281, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 281, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 281, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 281, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 281, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 282, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 282, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 282, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 282, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 282, }, Token { kind: Identifier, lexeme: "coroutine", computed_lexeme: None, line: 282, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 282, }, Token { kind: Identifier, lexeme: "yield", computed_lexeme: None, line: 282, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 282, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 282, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 282, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 283, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 284, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 284, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "pairs", computed_lexeme: None, line: 284, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 284, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 284, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "gen", computed_lexeme: None, line: 285, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 285, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 285, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 285, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 285, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 285, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 285, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 285, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 285, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 285, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 286, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 287, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 288, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 290, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "coroutine", computed_lexeme: None, line: 290, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "wrap", computed_lexeme: None, line: 290, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 290, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 290, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 290, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "gen", computed_lexeme: None, line: 290, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 290, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 290, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 290, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 290, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 290, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 290, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 291, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: String, lexeme: "\"return [====[\\n\"", computed_lexeme: None, line: 291, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 291, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 291, }, Token { kind: String, lexeme: "\"]====]\"", computed_lexeme: None, line: 291, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 291, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 292, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 296, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "setlocale", computed_lexeme: None, line: 296, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 296, }, Token { kind: String, lexeme: "\"pt_BR\"", computed_lexeme: None, line: 296, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 296, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 296, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "setlocale", computed_lexeme: None, line: 296, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 296, }, Token { kind: String, lexeme: "\"ptb\"", computed_lexeme: None, line: 296, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 296, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 297, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 297, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 297, }, Token { kind: String, lexeme: "\"3,4\"", computed_lexeme: None, line: 297, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 297, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 297, }, Token { kind: Number, lexeme: "3.4", computed_lexeme: Some( "3.4", ), line: 297, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 297, }, Token { kind: String, lexeme: "\"3.4\"", computed_lexeme: None, line: 297, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 297, }, Token { kind: Number, lexeme: "3.4", computed_lexeme: Some( "3.4", ), line: 297, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 298, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 298, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 298, }, Token { kind: String, lexeme: "\" -.4 \"", computed_lexeme: None, line: 298, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 298, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 298, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 298, }, Token { kind: Number, lexeme: "0.4", computed_lexeme: Some( "0.4", ), line: 298, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 299, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 299, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 299, }, Token { kind: String, lexeme: "\" +0x.41 \"", computed_lexeme: None, line: 299, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 299, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 299, }, Token { kind: Number, lexeme: "0X0.41", computed_lexeme: Some( "0.25390625", ), line: 299, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 300, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 300, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 300, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 300, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 300, }, Token { kind: String, lexeme: "\"a = (3,4)\"", computed_lexeme: None, line: 300, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 300, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 300, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 301, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 301, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 301, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 301, }, Token { kind: String, lexeme: "\"return 3.4\"", computed_lexeme: None, line: 301, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 301, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 301, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 301, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 301, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 301, }, Token { kind: Number, lexeme: "3.4", computed_lexeme: Some( "3.4", ), line: 301, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: String, lexeme: "\"return .4,3\"", computed_lexeme: None, line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 302, }, Token { kind: Number, lexeme: ".4", computed_lexeme: Some( "0.4", ), line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: String, lexeme: "\"return 4.\"", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 303, }, Token { kind: Number, lexeme: "4.", computed_lexeme: Some( "4.0", ), line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: String, lexeme: "\"return 4.+.5\"", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 304, }, Token { kind: Number, lexeme: "4.5", computed_lexeme: Some( "4.5", ), line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: String, lexeme: "\" 0x.1 \"", computed_lexeme: None, line: 306, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 306, }, Token { kind: String, lexeme: "\" 0x,1\"", computed_lexeme: None, line: 306, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 306, }, Token { kind: String, lexeme: "\"-0X.1\\t\"", computed_lexeme: None, line: 306, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 306, }, Token { kind: Number, lexeme: "0x0.1", computed_lexeme: Some( "0.0625", ), line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 308, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 308, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 308, }, Token { kind: String, lexeme: "\"inf\"", computed_lexeme: None, line: 308, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 308, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 308, }, Token { kind: String, lexeme: "\"NAN\"", computed_lexeme: None, line: 308, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 310, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: String, lexeme: "\"return %q\"", computed_lexeme: None, line: 310, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 310, }, Token { kind: Number, lexeme: "4.51", computed_lexeme: Some( "4.51", ), line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 310, }, Token { kind: Number, lexeme: "4.51", computed_lexeme: Some( "4.51", ), line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 312, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 312, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 312, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 312, }, Token { kind: String, lexeme: "\"return 4.5.\"", computed_lexeme: None, line: 312, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 313, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 313, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 313, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 313, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 313, }, Token { kind: String, lexeme: "\"'4%.5%.'\"", computed_lexeme: None, line: 313, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 313, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 315, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 315, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "setlocale", computed_lexeme: None, line: 315, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 315, }, Token { kind: String, lexeme: "\"C\"", computed_lexeme: None, line: 315, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 315, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 315, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 316, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "Message", computed_lexeme: None, line: 317, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 317, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 317, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 317, }, Token { kind: String, lexeme: "'\\n >>> pt_BR locale not available: skipping decimal point tests <<<\\n'", computed_lexeme: None, line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 319, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 323, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 323, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 323, }, Token { kind: String, lexeme: "\"a string with \\r and \\n and \\r\\n and \\n\\r\"", computed_lexeme: None, line: 323, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 324, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 324, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 324, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 324, }, Token { kind: String, lexeme: "\"return %q\"", computed_lexeme: None, line: 324, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 324, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 328, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 328, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 328, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 328, }, Token { kind: String, lexeme: "\"a = 'non-ending string\"", computed_lexeme: None, line: 328, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 328, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 329, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 329, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 329, }, Token { kind: String, lexeme: "\"a = 'non-ending string\\n'\"", computed_lexeme: None, line: 329, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 330, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 330, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 330, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 330, }, Token { kind: String, lexeme: "\"a = '\\\\345'\"", computed_lexeme: None, line: 330, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 330, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 331, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 331, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 331, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 331, }, Token { kind: String, lexeme: "\"a = [=x]\"", computed_lexeme: None, line: 331, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 331, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 333, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 333, }, Token { kind: Identifier, lexeme: "malformednum", computed_lexeme: None, line: 333, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 333, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 333, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 333, }, Token { kind: Identifier, lexeme: "exp", computed_lexeme: None, line: 333, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 333, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 334, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 334, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 334, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 334, }, Token { kind: String, lexeme: "\"return \"", computed_lexeme: None, line: 334, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 334, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 335, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 335, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 335, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 335, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 335, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 335, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "exp", computed_lexeme: None, line: 335, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 335, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 335, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 336, }, Token { kind: Identifier, lexeme: "malformednum", computed_lexeme: None, line: 338, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 338, }, Token { kind: String, lexeme: "\"0xe-\"", computed_lexeme: None, line: 338, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 338, }, Token { kind: String, lexeme: "\"near \"", computed_lexeme: None, line: 338, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 338, }, Token { kind: Identifier, lexeme: "malformednum", computed_lexeme: None, line: 339, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 339, }, Token { kind: String, lexeme: "\"0xep-p\"", computed_lexeme: None, line: 339, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 339, }, Token { kind: String, lexeme: "\"malformed number\"", computed_lexeme: None, line: 339, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 339, }, Token { kind: Identifier, lexeme: "malformednum", computed_lexeme: None, line: 340, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 340, }, Token { kind: String, lexeme: "\"1print()\"", computed_lexeme: None, line: 340, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 340, }, Token { kind: String, lexeme: "\"malformed number\"", computed_lexeme: None, line: 340, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 340, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: String, lexeme: "'OK'", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, ]