--- source: src/main.rs expression: scanned input_file: test-data/lua5.4-tests/files.lua --- [ Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 4, }, Token { kind: Identifier, lexeme: "debug", computed_lexeme: None, line: 4, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 4, }, Token { kind: Identifier, lexeme: "require", computed_lexeme: None, line: 4, }, Token { kind: String, lexeme: "\"debug\"", computed_lexeme: None, line: 4, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 6, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "math", computed_lexeme: None, line: 6, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "maxinteger", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 8, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 8, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 8, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 8, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 8, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 8, }, Token { kind: Identifier, lexeme: "getenv", computed_lexeme: None, line: 8, }, Token { kind: String, lexeme: "\"PATH\"", computed_lexeme: None, line: 8, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 8, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 8, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 8, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 8, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 10, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 10, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 10, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 10, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 10, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 10, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 10, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 10, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 11, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 11, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 11, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 11, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 11, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 11, }, Token { kind: String, lexeme: "\"non-existent-file\"", computed_lexeme: None, line: 11, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 11, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 12, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 12, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 12, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 12, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 12, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 15, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "testerr", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 15, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 15, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 15, }, Token { kind: TripleDot, lexeme: "...", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "stat", computed_lexeme: None, line: 16, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 16, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 16, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 16, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 16, }, Token { kind: TripleDot, lexeme: "...", computed_lexeme: None, line: 16, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 16, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 17, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 17, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "stat", computed_lexeme: None, line: 17, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 17, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 17, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "err", computed_lexeme: None, line: 17, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 17, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 17, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 17, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 17, }, Token { kind: True, lexeme: "true", computed_lexeme: None, line: 17, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 17, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 17, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 18, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 21, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 21, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 21, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 21, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 21, }, Token { kind: TripleDot, lexeme: "...", computed_lexeme: None, line: 21, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "testerr", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 22, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 22, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 22, }, Token { kind: TripleDot, lexeme: "...", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 23, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 27, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 27, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 27, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 27, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 27, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 27, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 27, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 27, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 28, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 28, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 28, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 28, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 28, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 28, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 29, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 29, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 29, }, Token { kind: Identifier, lexeme: "stderr", computed_lexeme: None, line: 29, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 29, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 29, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 29, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 29, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 29, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 32, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 32, }, Token { kind: String, lexeme: "\"got no value\"", computed_lexeme: None, line: 32, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 32, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 32, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 32, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 32, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 32, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 32, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 32, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 35, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 35, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 35, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 35, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 35, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 35, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 36, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 36, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 36, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 36, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 36, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 36, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 36, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 36, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 36, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 36, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "stderr", computed_lexeme: None, line: 36, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 36, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 36, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 36, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 36, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 37, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 37, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 37, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 37, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 37, }, Token { kind: Number, lexeme: "8", computed_lexeme: Some( "8", ), line: 37, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 37, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 37, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 38, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 38, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 38, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 38, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 38, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 38, }, Token { kind: Identifier, lexeme: "setmetatable", computed_lexeme: None, line: 38, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 38, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 38, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 38, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 38, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 38, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 38, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 39, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 39, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 39, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 39, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 39, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 39, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "getmetatable", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 41, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "__name", computed_lexeme: None, line: 41, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 41, }, Token { kind: String, lexeme: "\"FILE*\"", computed_lexeme: None, line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 43, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 43, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 43, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 43, }, Token { kind: String, lexeme: "'xuxu_nao_existe'", computed_lexeme: None, line: 43, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 44, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 44, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 44, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 44, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 46, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 46, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 46, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 46, }, Token { kind: String, lexeme: "'/a/b/c/d'", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 46, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 47, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 47, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 47, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 47, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 49, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 49, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "tmpname", computed_lexeme: None, line: 49, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 49, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 49, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 50, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 50, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 50, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 50, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 50, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 50, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 50, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 50, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 50, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 50, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 50, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 50, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 50, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 51, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 51, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 51, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 52, }, Token { kind: Identifier, lexeme: "Message", computed_lexeme: None, line: 52, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 52, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 52, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 52, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 52, }, Token { kind: String, lexeme: "\"'os.tmpname' file cannot be open; skipping file tests\"", computed_lexeme: None, line: 52, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 52, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 54, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 55, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 55, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 55, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 57, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 57, }, Token { kind: String, lexeme: "'testing i/o'", computed_lexeme: None, line: 57, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 57, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 59, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 59, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "tmpname", computed_lexeme: None, line: 59, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 59, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 61, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 61, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 61, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 61, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 61, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 61, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 61, }, Token { kind: String, lexeme: "\"rw\"", computed_lexeme: None, line: 61, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 62, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 62, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 62, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 62, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 62, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 62, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 62, }, Token { kind: String, lexeme: "\"rb+\"", computed_lexeme: None, line: 62, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 63, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 63, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 63, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 63, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 63, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 63, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 63, }, Token { kind: String, lexeme: "\"r+bk\"", computed_lexeme: None, line: 63, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 64, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 64, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 64, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 64, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 64, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 64, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 64, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 64, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 64, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 64, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 64, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 64, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 65, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 65, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 65, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 65, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 65, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 65, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 65, }, Token { kind: String, lexeme: "\"+\"", computed_lexeme: None, line: 65, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 66, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 66, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 66, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 66, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 66, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 66, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 66, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 66, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 66, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 66, }, Token { kind: String, lexeme: "\"b\"", computed_lexeme: None, line: 66, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 66, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 67, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 67, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 67, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 67, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 67, }, Token { kind: String, lexeme: "\"r+b\"", computed_lexeme: None, line: 67, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 67, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 67, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 67, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 67, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 68, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 68, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 68, }, Token { kind: String, lexeme: "\"r+\"", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 69, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 69, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 69, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 69, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 69, }, Token { kind: String, lexeme: "\"rb\"", computed_lexeme: None, line: 69, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 69, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 69, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 69, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 69, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 71, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "setlocale", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: String, lexeme: "'C'", computed_lexeme: None, line: 71, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 71, }, Token { kind: String, lexeme: "'all'", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 73, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 73, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 73, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 73, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 73, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 73, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 73, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 73, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 73, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 73, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 73, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 75, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 75, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 75, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 75, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 75, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 75, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 76, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 76, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 76, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 76, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 76, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 76, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 76, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 76, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 77, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 77, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 77, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 77, }, Token { kind: Identifier, lexeme: "dofile", computed_lexeme: None, line: 77, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 77, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 77, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 77, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 78, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 78, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 78, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 78, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 78, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 78, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 79, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 79, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 79, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 80, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 80, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 80, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 80, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 80, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 80, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 80, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 80, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 82, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 82, }, Token { kind: Identifier, lexeme: "_port", computed_lexeme: None, line: 82, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 82, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "status", computed_lexeme: None, line: 83, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 83, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "code", computed_lexeme: None, line: 83, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 83, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 83, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 83, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 83, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 83, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 83, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 83, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 84, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 84, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 84, }, Token { kind: Identifier, lexeme: "status", computed_lexeme: None, line: 84, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 84, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 84, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 84, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 84, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 84, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 84, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 84, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 84, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 84, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 84, }, Token { kind: Identifier, lexeme: "code", computed_lexeme: None, line: 84, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 84, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 84, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 84, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 84, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 85, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 87, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 87, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 87, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 87, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 87, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 87, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 87, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 87, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 87, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 87, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 87, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 87, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 87, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 87, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 88, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 88, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 89, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: String, lexeme: "\"cur\"", computed_lexeme: None, line: 89, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 89, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 89, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 89, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 89, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 90, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 90, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 90, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 90, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 90, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 90, }, Token { kind: String, lexeme: "\"joao\"", computed_lexeme: None, line: 90, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 90, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 90, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 91, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: String, lexeme: "\"end\"", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 91, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: String, lexeme: "\"alo joao\"", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 93, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 93, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 93, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 93, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 93, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 93, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 93, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 93, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 93, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 93, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 93, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 95, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 95, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 95, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 95, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 95, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 95, }, Token { kind: String, lexeme: "'\"álo\"'", computed_lexeme: None, line: 95, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 95, }, Token { kind: String, lexeme: "\"{a}\\n\"", computed_lexeme: None, line: 95, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 95, }, Token { kind: String, lexeme: "\"second line\\n\"", computed_lexeme: None, line: 95, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 95, }, Token { kind: String, lexeme: "\"third line \\n\"", computed_lexeme: None, line: 95, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 95, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 95, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 96, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 96, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 96, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 96, }, Token { kind: String, lexeme: "'çfourth_line'", computed_lexeme: None, line: 96, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 96, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 97, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 97, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 97, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 97, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 98, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 98, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 98, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 99, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 99, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 99, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "rawequal", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 99, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 99, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 100, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 100, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 100, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 100, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 103, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 103, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 103, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 104, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 104, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 104, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 104, }, Token { kind: Number, lexeme: "120", computed_lexeme: Some( "120", ), line: 104, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 104, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 105, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 105, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 105, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 105, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 105, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 105, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 105, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 106, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 106, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 106, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 106, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 106, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 106, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 107, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 107, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 107, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 107, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 107, }, Token { kind: String, lexeme: "'r'", computed_lexeme: None, line: 107, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 107, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 108, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 108, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 108, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 108, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 109, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 110, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 110, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 110, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 113, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 114, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 114, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 114, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 116, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 116, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 116, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 116, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 116, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 116, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 117, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 117, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 117, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 117, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 117, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 117, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 117, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 119, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 119, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 119, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 119, }, Token { kind: String, lexeme: "\"ab\"", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 120, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 120, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 120, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 120, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 120, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 120, }, Token { kind: String, lexeme: "\"\\n\\n\\t\\t \"", computed_lexeme: None, line: 120, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 120, }, Token { kind: Number, lexeme: "3450", computed_lexeme: Some( "3450", ), line: 120, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 120, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 120, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 120, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 120, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 120, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 121, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 121, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 121, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 121, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 121, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 124, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "F", computed_lexeme: None, line: 126, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 126, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 126, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 127, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 128, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 128, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 128, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 128, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 128, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 128, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 128, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 128, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 128, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 128, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "F", computed_lexeme: None, line: 129, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 129, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 129, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 130, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 131, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 131, }, Token { kind: Identifier, lexeme: "tostring", computed_lexeme: None, line: 131, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 131, }, Token { kind: Identifier, lexeme: "F", computed_lexeme: None, line: 131, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 131, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 131, }, Token { kind: String, lexeme: "\"file (closed)\"", computed_lexeme: None, line: 131, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 131, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 133, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 133, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 133, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 133, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 133, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 133, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 136, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 138, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 138, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 138, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 138, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 138, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 138, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 139, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 139, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 139, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 139, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 139, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 140, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 140, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: String, lexeme: "\"0X%x\\n\"", computed_lexeme: None, line: 140, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 141, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 141, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 141, }, Token { kind: String, lexeme: "\"0xABCp-3\"", computed_lexeme: None, line: 141, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 141, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 141, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 142, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 142, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 142, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 142, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 142, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 142, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 143, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 143, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 143, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 143, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 143, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 143, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 144, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 144, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 144, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 144, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 144, }, Token { kind: String, lexeme: "\"0x%X\\n\"", computed_lexeme: None, line: 144, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 144, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 144, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 144, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 145, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 145, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 145, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 145, }, Token { kind: String, lexeme: "\"-0xABCp-3\"", computed_lexeme: None, line: 145, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 145, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 145, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 145, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 146, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 146, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 146, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 146, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 146, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 146, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 146, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 146, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 147, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 147, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 147, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 147, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 147, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 147, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 147, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 147, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 147, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 147, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 148, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 148, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 148, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 148, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 149, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 149, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 149, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 149, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 149, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 149, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 149, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 150, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 150, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 150, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 150, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 150, }, Token { kind: Number, lexeme: "0xABCp-3", computed_lexeme: Some( "343.5", ), line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 151, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 151, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 151, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 151, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 151, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 151, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 151, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 151, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 152, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 152, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 152, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 152, }, Token { kind: String, lexeme: "\"*n\"", computed_lexeme: None, line: 152, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 152, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 152, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 152, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 153, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 153, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 153, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 153, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 153, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 153, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 153, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 153, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 154, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 154, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 154, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 154, }, Token { kind: String, lexeme: "\"*n\"", computed_lexeme: None, line: 154, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 154, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 154, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 154, }, Token { kind: Number, lexeme: "0xABCp-3", computed_lexeme: Some( "343.5", ), line: 154, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 154, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 155, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 156, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 156, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 156, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 156, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 156, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 156, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 160, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 161, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 161, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 161, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 161, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 161, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 161, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 161, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 161, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 161, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 161, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 162, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 162, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 162, }, Token { kind: MultilineString, lexeme: "[[\na line\nanother line\n1234\n3.45\none\ntwo\nthree\n]]", computed_lexeme: None, line: 170, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "l2", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "l3", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "l4", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 171, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 171, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 172, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 172, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 172, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 172, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 172, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 172, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 173, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 173, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 173, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 173, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 173, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 173, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 173, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 173, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 173, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 173, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "l2", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 174, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 174, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 174, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 174, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 174, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 174, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 174, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 175, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 175, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 175, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 175, }, Token { kind: String, lexeme: "\"a line\"", computed_lexeme: None, line: 175, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 175, }, Token { kind: Identifier, lexeme: "l2", computed_lexeme: None, line: 175, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 175, }, Token { kind: String, lexeme: "\"another line\\n\"", computed_lexeme: None, line: 175, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 175, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 176, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 176, }, Token { kind: Number, lexeme: "1234", computed_lexeme: Some( "1234", ), line: 176, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 176, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 176, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 176, }, Token { kind: Number, lexeme: "3.45", computed_lexeme: Some( "3.45", ), line: 176, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 176, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 176, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 176, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 176, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 176, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 177, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 177, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 177, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 177, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 177, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 177, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 177, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 177, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 178, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 178, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 178, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 178, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 178, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 178, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 178, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 178, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 178, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 178, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "l2", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "l3", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "l4", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 179, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 179, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 179, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 179, }, Token { kind: Number, lexeme: "7", computed_lexeme: Some( "7", ), line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 179, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 179, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 180, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 180, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 180, }, Token { kind: String, lexeme: "\"a line\\n\"", computed_lexeme: None, line: 180, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "l2", computed_lexeme: None, line: 180, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 180, }, Token { kind: String, lexeme: "\"another line\"", computed_lexeme: None, line: 180, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 180, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 180, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 180, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 181, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 181, }, Token { kind: Number, lexeme: "1234", computed_lexeme: Some( "1234", ), line: 181, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 181, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 181, }, Token { kind: Number, lexeme: "3.45", computed_lexeme: Some( "3.45", ), line: 181, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "l3", computed_lexeme: None, line: 181, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 181, }, Token { kind: String, lexeme: "\"one\"", computed_lexeme: None, line: 181, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "l4", computed_lexeme: None, line: 181, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 181, }, Token { kind: String, lexeme: "\"two\"", computed_lexeme: None, line: 181, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 182, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 182, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 182, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 182, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 182, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 183, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 183, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 183, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 183, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 183, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 183, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 183, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 183, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 184, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 184, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 184, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 184, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 184, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 184, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 184, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 184, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 184, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 184, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "n2", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "dummy", computed_lexeme: None, line: 186, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 186, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 186, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 186, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 186, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 186, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 186, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 187, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 187, }, Token { kind: Identifier, lexeme: "l1", computed_lexeme: None, line: 187, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 187, }, Token { kind: String, lexeme: "\"a line\"", computed_lexeme: None, line: 187, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 187, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 187, }, Token { kind: Identifier, lexeme: "n1", computed_lexeme: None, line: 187, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 187, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 188, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 189, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 189, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 189, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 189, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 189, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 189, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 189, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 189, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 189, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 194, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 194, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 194, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 194, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 194, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 194, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 194, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 194, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 194, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 194, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 194, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 194, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 194, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 195, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 195, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 195, }, Token { kind: MultilineString, lexeme: "[[\nlocal x, z = coroutine.yield(10)\nlocal y = coroutine.yield(20)\nreturn x + y * z\n]]", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 200, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 200, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 200, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 200, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 200, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 201, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 201, }, Token { kind: Identifier, lexeme: "coroutine", computed_lexeme: None, line: 201, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 201, }, Token { kind: Identifier, lexeme: "wrap", computed_lexeme: None, line: 201, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 201, }, Token { kind: Identifier, lexeme: "dofile", computed_lexeme: None, line: 201, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 201, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 202, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 202, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 202, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 202, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 202, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 202, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 203, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 203, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 203, }, Token { kind: Number, lexeme: "100", computed_lexeme: Some( "100", ), line: 203, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 203, }, Token { kind: Number, lexeme: "101", computed_lexeme: Some( "101", ), line: 203, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 203, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 203, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 203, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Number, lexeme: "200", computed_lexeme: Some( "200", ), line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 204, }, Token { kind: Number, lexeme: "100", computed_lexeme: Some( "100", ), line: 204, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 204, }, Token { kind: Number, lexeme: "200", computed_lexeme: Some( "200", ), line: 204, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 204, }, Token { kind: Number, lexeme: "101", computed_lexeme: Some( "101", ), line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 205, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 205, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 205, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 205, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 205, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 208, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 208, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 208, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 208, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 208, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 208, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 208, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 208, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 210, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 210, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 210, }, Token { kind: MultilineString, lexeme: "[[\n-12.3-\t-0xffff+ .3|5.E-3X +234e+13E 0xDEADBEEFDEADBEEFx\n0x1.13Ap+3e\n]]", computed_lexeme: None, line: 213, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 215, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 215, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 215, }, Token { kind: String, lexeme: "\"1234\"", computed_lexeme: None, line: 215, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 215, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 215, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 215, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 215, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 215, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 215, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 215, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 215, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 215, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 215, }, Token { kind: String, lexeme: "\"0\"", computed_lexeme: None, line: 215, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 215, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 215, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 215, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 215, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 215, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 215, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 215, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 217, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 217, }, Token { kind: MultilineString, lexeme: "[[\n.e+\t0.e;\t--; 0xX;\n]]", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 220, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 220, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 220, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 220, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 220, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 221, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 221, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 221, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 221, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 221, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 221, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 221, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 221, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 222, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 222, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 222, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 222, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 222, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 222, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 222, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 222, }, Token { kind: Number, lexeme: "12.3", computed_lexeme: Some( "12.3", ), line: 222, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 222, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 222, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 222, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 222, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 222, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 222, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 222, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 222, }, Token { kind: String, lexeme: "\"-\"", computed_lexeme: None, line: 222, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 223, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 223, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 223, }, Token { kind: Number, lexeme: "0xffff", computed_lexeme: Some( "0xffff", ), line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 223, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 223, }, Token { kind: String, lexeme: "\"+ \"", computed_lexeme: None, line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 224, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 224, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 224, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 224, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 224, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 224, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 224, }, Token { kind: Number, lexeme: "0.3", computed_lexeme: Some( "0.3", ), line: 224, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 224, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 224, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 224, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 224, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 224, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 224, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 224, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 224, }, Token { kind: String, lexeme: "\"|\"", computed_lexeme: None, line: 224, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 224, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 225, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 225, }, Token { kind: Number, lexeme: "5e-3", computed_lexeme: Some( "5e-3", ), line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 225, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 225, }, Token { kind: String, lexeme: "\"X\"", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 226, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 226, }, Token { kind: Number, lexeme: "234e13", computed_lexeme: Some( "234e13", ), line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 226, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 226, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 226, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 226, }, Token { kind: String, lexeme: "\"E\"", computed_lexeme: None, line: 226, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 227, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 227, }, Token { kind: Number, lexeme: "0Xdeadbeefdeadbeef", computed_lexeme: Some( "0Xdeadbeefdeadbeef", ), line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 227, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 227, }, Token { kind: String, lexeme: "\"x\\n\"", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 228, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 228, }, Token { kind: Number, lexeme: "0x1.13aP3", computed_lexeme: Some( "8.61328125", ), line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 228, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 228, }, Token { kind: String, lexeme: "\"e\"", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 230, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 231, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 231, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 231, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 231, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 231, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 231, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 231, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 231, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 232, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 232, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 232, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 232, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 232, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 232, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 232, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 232, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 232, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 233, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 233, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 233, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 233, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 233, }, Token { kind: String, lexeme: "\"^00*\\n$\"", computed_lexeme: None, line: 233, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 233, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 233, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 234, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 236, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 236, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 236, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 236, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 236, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 236, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 236, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 236, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 236, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 236, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 236, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 236, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 236, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 236, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 236, }, Token { kind: String, lexeme: "\"e+\"", computed_lexeme: None, line: 236, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 236, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 237, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 237, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 237, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 237, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 237, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 237, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 237, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 237, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 237, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 237, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 237, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 237, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 237, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 237, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 237, }, Token { kind: String, lexeme: "\";\"", computed_lexeme: None, line: 237, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 237, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 238, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 238, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 238, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 238, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 238, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 238, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 238, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 238, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 238, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 238, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 238, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 238, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 238, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 238, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 238, }, Token { kind: String, lexeme: "\"-;\"", computed_lexeme: None, line: 238, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 239, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 239, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 239, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 239, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 239, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 239, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 239, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 239, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 239, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 239, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 239, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 239, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 239, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 239, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 239, }, Token { kind: String, lexeme: "\"X\"", computed_lexeme: None, line: 239, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 240, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 240, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 240, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 240, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 240, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 240, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 240, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 240, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 240, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 240, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 240, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 240, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 240, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 240, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 240, }, Token { kind: String, lexeme: "\";\"", computed_lexeme: None, line: 240, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 240, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 241, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 241, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 241, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 241, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 241, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 241, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 241, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 241, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 241, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 241, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 241, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 241, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 241, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 241, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 241, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 241, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 242, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 242, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 242, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 242, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 242, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 243, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 243, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 243, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 243, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 243, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 247, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 247, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 247, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 247, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 247, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 247, }, Token { kind: String, lexeme: "\"non-existent-file\"", computed_lexeme: None, line: 247, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 247, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 248, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 248, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 248, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 248, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 248, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 248, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 249, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 249, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 249, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 249, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 249, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 249, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 250, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 250, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 250, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 250, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 251, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 251, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 251, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 251, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 251, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 251, }, Token { kind: While, lexeme: "while", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 252, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 252, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 252, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 252, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 252, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 252, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 252, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 252, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 253, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 253, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 253, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 253, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 254, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 254, }, Token { kind: String, lexeme: "\"file is already closed\"", computed_lexeme: None, line: 254, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 254, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 255, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 255, }, Token { kind: String, lexeme: "\"file is already closed\"", computed_lexeme: None, line: 255, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 255, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 257, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 257, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 257, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 258, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 258, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 258, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 258, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 258, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 258, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 258, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 258, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 258, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 258, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 258, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 258, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 258, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 258, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 258, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 258, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 259, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 259, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 259, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 260, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 260, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 260, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 260, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 260, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 260, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 262, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 262, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 262, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 262, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 262, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 262, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 262, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 263, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 263, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 263, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 263, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 263, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 263, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 263, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 264, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 264, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 264, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 265, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 265, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 265, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 265, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 265, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 265, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 265, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 265, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 265, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 266, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 266, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 266, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 267, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 267, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 267, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 267, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 267, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 267, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 267, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 267, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 267, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 267, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 267, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 267, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 267, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 267, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 267, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 268, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 268, }, Token { kind: Identifier, lexeme: "tostring", computed_lexeme: None, line: 268, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 268, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 268, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 268, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 268, }, Token { kind: Identifier, lexeme: "sub", computed_lexeme: None, line: 268, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 268, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 268, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 268, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 268, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 268, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 268, }, Token { kind: String, lexeme: "\"file \"", computed_lexeme: None, line: 268, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 268, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 269, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 269, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 269, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 269, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 269, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 269, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 269, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 269, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 269, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 270, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 270, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 270, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 270, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 271, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 271, }, Token { kind: String, lexeme: "\"closed file\"", computed_lexeme: None, line: 271, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 271, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 271, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 271, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 272, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "tostring", computed_lexeme: None, line: 272, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 272, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 272, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 272, }, Token { kind: String, lexeme: "\"file (closed)\"", computed_lexeme: None, line: 272, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 273, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 273, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 273, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 273, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 273, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 273, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 273, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 273, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 273, }, Token { kind: String, lexeme: "\"closed file\"", computed_lexeme: None, line: 273, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 273, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 274, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 274, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 274, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 274, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 274, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 274, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 275, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 275, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 275, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 275, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 275, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 275, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 275, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 275, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 275, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 275, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 275, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 275, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 276, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 276, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 276, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 277, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 277, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 277, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 277, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 277, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 277, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 277, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 277, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 277, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 277, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 277, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 277, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 277, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 277, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 277, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 277, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 278, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 278, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 278, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 278, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 278, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 278, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 278, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 279, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 279, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 279, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 279, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 279, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 279, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 280, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 280, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 280, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 280, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 280, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 280, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 280, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 280, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 280, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 282, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 283, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 283, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 283, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 283, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 283, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 283, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 284, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 284, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 284, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 284, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 284, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 284, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 284, }, Token { kind: Number, lexeme: "300", computed_lexeme: Some( "300", ), line: 284, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 284, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 284, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 284, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 284, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 285, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 285, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 285, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 285, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 285, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 286, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 286, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 286, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 286, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 286, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 286, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 286, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 286, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 286, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 286, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 286, }, Token { kind: Number, lexeme: "250", computed_lexeme: Some( "250", ), line: 286, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 286, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 286, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 286, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 286, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 286, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 286, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 286, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 286, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 287, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 287, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 287, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 287, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 287, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "table", computed_lexeme: None, line: 287, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "unpack", computed_lexeme: None, line: 287, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 287, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 287, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 287, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 287, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 287, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 287, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 289, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 289, }, Token { kind: Hash, lexeme: "#", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 289, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 289, }, Token { kind: Number, lexeme: "250", computed_lexeme: Some( "250", ), line: 289, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 289, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 289, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 289, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 289, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 289, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 289, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 289, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 289, }, Token { kind: Hash, lexeme: "#", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 289, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 289, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 289, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 289, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 290, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 290, }, Token { kind: Hash, lexeme: "#", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 290, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 290, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 290, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 290, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 290, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 290, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: String, lexeme: "\"too many arguments\"", computed_lexeme: None, line: 291, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 291, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 291, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 291, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "table", computed_lexeme: None, line: 291, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "unpack", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 292, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 292, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 292, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 293, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 293, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 293, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 293, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 293, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 293, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 294, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 296, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 296, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 296, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 296, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 297, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 298, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 298, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 298, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 298, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 298, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 298, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 298, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 298, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 298, }, Token { kind: String, lexeme: "\"xuxu\"", computed_lexeme: None, line: 298, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 299, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 299, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 299, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 299, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 299, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 299, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 299, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 299, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 299, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 299, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 299, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 299, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 299, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 299, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 299, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 300, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 301, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 301, }, Token { kind: String, lexeme: "\"invalid format\"", computed_lexeme: None, line: 301, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 301, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 301, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 301, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 301, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 301, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 302, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 302, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 302, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 302, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 302, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 303, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 303, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 303, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 303, }, Token { kind: String, lexeme: "'\"álo\"'", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 304, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 304, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 305, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 305, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 305, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 305, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 305, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 305, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 305, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 305, }, Token { kind: String, lexeme: "\"second line\"", computed_lexeme: None, line: 305, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 305, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 306, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 306, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 307, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 307, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 307, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 307, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 307, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 307, }, Token { kind: String, lexeme: "\"third line \"", computed_lexeme: None, line: 307, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 308, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 308, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 308, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 308, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 308, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 308, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 308, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 308, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 308, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 308, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 309, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 309, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 309, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 309, }, Token { kind: String, lexeme: "'L'", computed_lexeme: None, line: 309, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 309, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 309, }, Token { kind: String, lexeme: "\"third line \\n\"", computed_lexeme: None, line: 309, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 310, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 310, }, Token { kind: String, lexeme: "\"ç\"", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 311, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 311, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 311, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 311, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 311, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 311, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 311, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 311, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 311, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 312, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 312, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 312, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 312, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 312, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 312, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 312, }, Token { kind: String, lexeme: "\"cur\"", computed_lexeme: None, line: 312, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 312, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 312, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 312, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 312, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 312, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 312, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 313, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 313, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 313, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 313, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 313, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 313, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 313, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 313, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 314, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 314, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 314, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 314, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 314, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 314, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 314, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 315, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 315, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 315, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 315, }, Token { kind: String, lexeme: "'n'", computed_lexeme: None, line: 315, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 315, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 315, }, Token { kind: Number, lexeme: "3450", computed_lexeme: Some( "3450", ), line: 315, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 316, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 316, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 316, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 316, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 316, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 316, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 316, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 316, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 317, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 317, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 317, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 317, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 317, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 317, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 317, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 318, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 318, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 318, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 318, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 318, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 319, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 319, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 319, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 319, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 319, }, Token { kind: Number, lexeme: "30000", computed_lexeme: Some( "30000", ), line: 319, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 319, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 320, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 320, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 320, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 320, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 320, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 320, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 320, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 320, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 320, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 320, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 320, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 320, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 320, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "undef", computed_lexeme: None, line: 320, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 321, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 321, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 321, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 321, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 321, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 321, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 321, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 321, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 321, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 322, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 322, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 322, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 322, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 322, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 322, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 322, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 322, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 322, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 322, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 322, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 322, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 322, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 322, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 322, }, Token { kind: Identifier, lexeme: "undef", computed_lexeme: None, line: 322, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 322, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 323, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 323, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 323, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 323, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 323, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 323, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 323, }, Token { kind: String, lexeme: "'n'", computed_lexeme: None, line: 323, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 323, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 323, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 324, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 324, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 324, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 324, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 324, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 324, }, Token { kind: String, lexeme: "'n'", computed_lexeme: None, line: 324, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 324, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 324, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 324, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 324, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 324, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 324, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "undef", computed_lexeme: None, line: 324, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 325, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 325, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 325, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 325, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 325, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 325, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 326, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 326, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 326, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 326, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 326, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 326, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 326, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 326, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 326, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 326, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 326, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 327, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 327, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 327, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 328, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 328, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 328, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 328, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 329, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 329, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 329, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 329, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 329, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 329, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 329, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 330, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 330, }, Token { kind: String, lexeme: "\" input file is closed\"", computed_lexeme: None, line: 330, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 330, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 330, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 330, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 330, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 330, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 332, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 332, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 332, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 332, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 332, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 332, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 332, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 332, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 332, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 334, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 334, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 334, }, Token { kind: String, lexeme: "'0123456789'", computed_lexeme: None, line: 334, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 335, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 335, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 335, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 335, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 335, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 335, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 335, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 335, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 335, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 335, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 336, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 336, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 336, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 336, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 336, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 336, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 336, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 336, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 336, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 336, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 336, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 336, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 336, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 336, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 336, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 338, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 338, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 338, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 338, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 338, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 338, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 339, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 339, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 339, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 339, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 339, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 339, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 339, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 339, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 339, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 339, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 339, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 340, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 340, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 340, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 340, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 340, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 341, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 341, }, Token { kind: String, lexeme: "\" output file is closed\"", computed_lexeme: None, line: 341, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 341, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 341, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 341, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 342, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 342, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 342, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 342, }, Token { kind: String, lexeme: "\"a+b\"", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 343, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 343, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 343, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 343, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 343, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 343, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 344, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 344, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 346, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 346, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 346, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 346, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 346, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 346, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 346, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 346, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 346, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 347, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 347, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 347, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 347, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 347, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 347, }, Token { kind: String, lexeme: "';'", computed_lexeme: None, line: 347, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 347, }, Token { kind: String, lexeme: "'end of file\\n'", computed_lexeme: None, line: 347, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 347, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 347, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 348, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 348, }, Token { kind: Identifier, lexeme: "flush", computed_lexeme: None, line: 348, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 348, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 348, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 348, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 348, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 348, }, Token { kind: Identifier, lexeme: "flush", computed_lexeme: None, line: 348, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 348, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 348, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 349, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 349, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 349, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 349, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 349, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 350, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 350, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 350, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 352, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 352, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 352, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 353, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 353, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 353, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 353, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 353, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 353, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 353, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 353, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 353, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 353, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 354, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 354, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 354, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 354, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 354, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 354, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 354, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 354, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 355, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 355, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 356, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 356, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 356, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 356, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 356, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 356, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 356, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 356, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 357, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 357, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 357, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 357, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 357, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 357, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 357, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 357, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 357, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 358, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 358, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 358, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 358, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 358, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 358, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 358, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 358, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 358, }, Token { kind: String, lexeme: "';end of file\\n'", computed_lexeme: None, line: 358, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 358, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 359, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 359, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 359, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 359, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 359, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 359, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 359, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 359, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 359, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 359, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 360, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 360, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 360, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 360, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 360, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 360, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 360, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 360, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 360, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 364, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 365, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 365, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 365, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 365, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 367, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 367, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 367, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 367, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 367, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 367, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 367, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 367, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 367, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 367, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 367, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 367, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 367, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 367, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 367, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 368, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 371, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 371, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 371, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 371, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 371, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 371, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 371, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 372, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 372, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 372, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 372, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 372, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 372, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 373, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 373, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 373, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 373, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 373, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 373, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 373, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 373, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 373, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 373, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 373, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 373, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 374, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 374, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 374, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 374, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 374, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 374, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 374, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 374, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 376, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 376, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 376, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 376, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 376, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 376, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 376, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 376, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 376, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 376, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 377, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 377, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 377, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 377, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 377, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 377, }, Token { kind: String, lexeme: "\"whatever\"", computed_lexeme: None, line: 377, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 378, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 378, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 378, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 378, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 378, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 378, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 378, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 378, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 378, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 378, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 378, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 379, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 379, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 379, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 379, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 379, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 381, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 381, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 381, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 381, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 381, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 381, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 382, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 382, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 382, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 382, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 382, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 382, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 382, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 383, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 383, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 383, }, Token { kind: False, lexeme: "false", computed_lexeme: None, line: 383, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 383, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 383, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 383, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 384, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 384, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 384, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 384, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 384, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 384, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 384, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 384, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 385, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 387, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 387, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 387, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 387, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 387, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 390, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 390, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 390, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 390, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 390, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 390, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 390, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 390, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 390, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 390, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 391, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 391, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 391, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 391, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 391, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 391, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 392, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 392, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 392, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 392, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 392, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 392, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 392, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 392, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 392, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 393, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 393, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 393, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 393, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 393, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 393, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 394, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 394, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 394, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 394, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 394, }, Token { kind: String, lexeme: "\"line\\n\"", computed_lexeme: None, line: 394, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 395, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 395, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 395, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 395, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 395, }, Token { kind: String, lexeme: "\"other\"", computed_lexeme: None, line: 395, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 396, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 396, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 396, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 396, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 396, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 397, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 397, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 397, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 397, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 397, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 397, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 397, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 399, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 399, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 399, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 399, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 399, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 399, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 399, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 400, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 400, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 400, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 400, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 401, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 401, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 401, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 401, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 401, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 401, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 401, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 401, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 401, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 401, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 402, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 402, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 402, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 402, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 402, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 402, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 403, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 403, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 403, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 403, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 403, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 405, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 405, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 405, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 406, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 406, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 406, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 407, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 407, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 407, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 407, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 407, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 407, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 407, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 407, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 407, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 407, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 407, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 408, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 408, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 408, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 408, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 408, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 408, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 409, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 409, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 409, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 409, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 409, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 409, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 409, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 409, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 409, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 411, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 411, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 411, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 412, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 412, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 412, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 412, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 412, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 412, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 412, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 412, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 412, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 412, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 413, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 413, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 413, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 413, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 413, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 413, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 415, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 415, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 415, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 416, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 416, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 416, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 416, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 416, }, Token { kind: String, lexeme: "\"l\"", computed_lexeme: None, line: 416, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 416, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 416, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 416, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 416, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 417, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 417, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 417, }, Token { kind: String, lexeme: "\"lineother\"", computed_lexeme: None, line: 417, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 419, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 419, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 419, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 419, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 419, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 419, }, Token { kind: String, lexeme: "\"a = 10 + 34\\na = 2*a\\na = -a\\n\"", computed_lexeme: None, line: 419, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 419, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 419, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 419, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 420, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 420, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 420, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 420, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 420, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 421, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 421, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 421, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 421, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 421, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 421, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 421, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 421, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 421, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 421, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 421, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 421, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 421, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 421, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 421, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 421, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 422, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 422, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 422, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 422, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 422, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 422, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 422, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 422, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 422, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 422, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 422, }, Token { kind: Number, lexeme: "34", computed_lexeme: Some( "34", ), line: 422, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 422, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 422, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 422, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 422, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 422, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 425, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 428, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "gettoclose", computed_lexeme: None, line: 428, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "lv", computed_lexeme: None, line: 428, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "lv", computed_lexeme: None, line: 429, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 429, }, Token { kind: Identifier, lexeme: "lv", computed_lexeme: None, line: 429, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 429, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 429, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 430, }, Token { kind: Identifier, lexeme: "stvar", computed_lexeme: None, line: 430, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 430, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 430, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 431, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 431, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 431, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 431, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 431, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 431, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 431, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 432, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 432, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "debug", computed_lexeme: None, line: 432, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "getlocal", computed_lexeme: None, line: 432, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "lv", computed_lexeme: None, line: 432, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 432, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 432, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 433, }, Token { kind: Identifier, lexeme: "n", computed_lexeme: None, line: 433, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 433, }, Token { kind: String, lexeme: "\"(for state)\"", computed_lexeme: None, line: 433, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 433, }, Token { kind: Identifier, lexeme: "stvar", computed_lexeme: None, line: 434, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 434, }, Token { kind: Identifier, lexeme: "stvar", computed_lexeme: None, line: 434, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 434, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 434, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 435, }, Token { kind: Identifier, lexeme: "stvar", computed_lexeme: None, line: 435, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 435, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 435, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 435, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 435, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 435, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 435, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 436, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 437, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 438, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 440, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 440, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 441, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 441, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 441, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 441, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 441, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 441, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 441, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 441, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 441, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 441, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 442, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 442, }, Token { kind: Identifier, lexeme: "gettoclose", computed_lexeme: None, line: 442, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 442, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 442, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 442, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 443, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 443, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 443, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 443, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 443, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 443, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 443, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 443, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 443, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 443, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 443, }, Token { kind: Break, lexeme: "break", computed_lexeme: None, line: 444, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 446, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 446, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 446, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 446, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 446, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 446, }, Token { kind: String, lexeme: "\"closed file\"", computed_lexeme: None, line: 446, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 448, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 448, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 448, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 449, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 449, }, Token { kind: Identifier, lexeme: "foo", computed_lexeme: None, line: 449, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 449, }, Token { kind: Identifier, lexeme: "name", computed_lexeme: None, line: 449, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 449, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 450, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 450, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 450, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 450, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 450, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 450, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 450, }, Token { kind: Identifier, lexeme: "name", computed_lexeme: None, line: 450, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 450, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 450, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 451, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 451, }, Token { kind: Identifier, lexeme: "gettoclose", computed_lexeme: None, line: 451, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 451, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 451, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 451, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 452, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 452, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 452, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 452, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 452, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 452, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 452, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 452, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 452, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 452, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 452, }, Token { kind: Identifier, lexeme: "error", computed_lexeme: None, line: 453, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 453, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 453, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 453, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 454, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 455, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "st", computed_lexeme: None, line: 456, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 456, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 456, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "foo", computed_lexeme: None, line: 456, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 456, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 456, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 457, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 457, }, Token { kind: Identifier, lexeme: "st", computed_lexeme: None, line: 457, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 457, }, Token { kind: False, lexeme: "false", computed_lexeme: None, line: 457, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 457, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 457, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 457, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 457, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 457, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 457, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 457, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 457, }, Token { kind: String, lexeme: "\"closed file\"", computed_lexeme: None, line: 457, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 457, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 463, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 463, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 463, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 463, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 463, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 463, }, Token { kind: String, lexeme: "\"0123456789\\n\"", computed_lexeme: None, line: 463, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 463, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 463, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 463, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 464, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 464, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 464, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 464, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 464, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 464, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 464, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 464, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 464, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 464, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 464, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 465, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 465, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 465, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 465, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 465, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 465, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 465, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 466, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 466, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 466, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 466, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 466, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 466, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 466, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 466, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 466, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 466, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 467, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 468, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 470, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 470, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 470, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 470, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 470, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 470, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 470, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 470, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 470, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 470, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 470, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 470, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 470, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 471, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 471, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 471, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 471, }, Token { kind: String, lexeme: "\"0\"", computed_lexeme: None, line: 471, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 471, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 471, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 471, }, Token { kind: String, lexeme: "\"12\"", computed_lexeme: None, line: 471, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 471, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 471, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 471, }, Token { kind: String, lexeme: "\"3456789\\n\"", computed_lexeme: None, line: 471, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 471, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 472, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 474, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 474, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 474, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 474, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 474, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 474, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 474, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 474, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 474, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 474, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 474, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 474, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 474, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 474, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 475, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 475, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 475, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 475, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 475, }, Token { kind: Break, lexeme: "break", computed_lexeme: None, line: 475, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 475, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 476, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 476, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 476, }, Token { kind: String, lexeme: "\"0123456789\\n\"", computed_lexeme: None, line: 476, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 476, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 476, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 476, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 476, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 476, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 477, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 478, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 478, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 478, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 480, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 480, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 480, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 480, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 480, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 480, }, Token { kind: String, lexeme: "\"00\\n10\\n20\\n30\\n40\\n\"", computed_lexeme: None, line: 480, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 480, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 480, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 480, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 481, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 481, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 481, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 481, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 481, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 481, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 481, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 481, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 481, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 481, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 481, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 481, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 481, }, Token { kind: String, lexeme: "\"n\"", computed_lexeme: None, line: 481, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 481, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 481, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 482, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 482, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 482, }, Token { kind: Number, lexeme: "40", computed_lexeme: Some( "40", ), line: 482, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 482, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 482, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 482, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 482, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 482, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 482, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 483, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 483, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 483, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 483, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 483, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 483, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 483, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 483, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 483, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 484, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 485, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 489, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 489, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 489, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 489, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 489, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 489, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 489, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 490, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 490, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 490, }, Token { kind: MultilineString, lexeme: "[[\nlocal y\n= X\nX =\nX *\n2 +\nX;\nX =\nX\n- y;\n]]", computed_lexeme: None, line: 500, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 500, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 500, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 501, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 501, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 501, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 501, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 501, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 502, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 502, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 502, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 502, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 502, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 502, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 502, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 502, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 502, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 502, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 502, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 502, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 502, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 502, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 502, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 503, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 503, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 503, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 504, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 504, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 504, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 504, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 504, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 504, }, Token { kind: String, lexeme: "\"L\"", computed_lexeme: None, line: 504, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 504, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 504, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 504, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 504, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 505, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 505, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 505, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 505, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 505, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 505, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 505, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 505, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 506, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 506, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 506, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 506, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 506, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 506, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 506, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 506, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 506, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 506, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 506, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 506, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 506, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 506, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 506, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 507, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 507, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 507, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 507, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 507, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 507, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 507, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 507, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 508, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 508, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 508, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 508, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 508, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 508, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 508, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 508, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 508, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 508, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 508, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 508, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 508, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 508, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 508, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 509, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 509, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 509, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 509, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 509, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 509, }, Token { kind: Number, lexeme: "8", computed_lexeme: Some( "8", ), line: 509, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 509, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 511, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 511, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 511, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 511, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 513, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 513, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 513, }, Token { kind: String, lexeme: "\"string\\n\\n\\\\com \\\"\\\"''coisas [[estranhas]] ]]'\"", computed_lexeme: None, line: 513, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 514, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 514, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 514, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 514, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 514, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 514, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 515, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 515, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 515, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 515, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 515, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 515, }, Token { kind: String, lexeme: "\"x2 = %q\\n-- comment without ending EOS\"", computed_lexeme: None, line: 515, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 515, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 515, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 515, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 515, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 516, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 516, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 516, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 516, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 516, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 517, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 517, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 517, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 517, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 517, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 517, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 517, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 517, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 517, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 518, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 518, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "x2", computed_lexeme: None, line: 518, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 519, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 519, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 519, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 520, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 520, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 520, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 520, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 520, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 521, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 521, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 521, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 521, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 521, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 521, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 522, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 522, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 522, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 522, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 522, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 522, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 522, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 522, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 522, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 522, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 525, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 525, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 525, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "expres", computed_lexeme: None, line: 525, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 526, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 526, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 526, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 526, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 526, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 526, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 527, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 527, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 527, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 527, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 527, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 528, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 528, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 528, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 528, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 528, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 529, }, Token { kind: Identifier, lexeme: "res", computed_lexeme: None, line: 529, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 529, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 529, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 529, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 529, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 529, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 529, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 529, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 529, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 529, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 529, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 530, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 530, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 530, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 530, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 530, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 530, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 530, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 530, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 530, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 531, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 531, }, Token { kind: Identifier, lexeme: "res", computed_lexeme: None, line: 531, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 531, }, Token { kind: Identifier, lexeme: "expres", computed_lexeme: None, line: 531, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 531, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 532, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 535, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 535, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 535, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 535, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 535, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 535, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 538, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 538, }, Token { kind: String, lexeme: "\"# a non-ending comment\"", computed_lexeme: None, line: 538, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 538, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 538, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 538, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 542, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 542, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBF# some comment\\nreturn 234\"", computed_lexeme: None, line: 542, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 542, }, Token { kind: Number, lexeme: "234", computed_lexeme: Some( "234", ), line: 542, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 542, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 543, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 543, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBFreturn 239\"", computed_lexeme: None, line: 543, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 543, }, Token { kind: Number, lexeme: "239", computed_lexeme: Some( "239", ), line: 543, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 543, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 544, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 544, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBF\"", computed_lexeme: None, line: 544, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 544, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 544, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 544, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 548, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 548, }, Token { kind: String, lexeme: "\"# a comment\\nreturn require'debug'.getinfo(1).currentline\"", computed_lexeme: None, line: 548, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 548, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 548, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 548, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 552, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 552, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 552, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 552, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 552, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 552, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 552, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 552, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 553, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 553, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 553, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 553, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 553, }, Token { kind: String, lexeme: "'\\0alo\\255'", computed_lexeme: None, line: 553, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 553, }, Token { kind: String, lexeme: "'hi'", computed_lexeme: None, line: 553, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 554, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 554, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 554, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 554, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 554, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 555, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 555, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 555, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 555, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 555, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 555, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 555, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 555, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 555, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 556, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 556, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 556, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 556, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 556, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 556, }, Token { kind: String, lexeme: "\"\\0alo\\255\"", computed_lexeme: None, line: 556, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 556, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 556, }, Token { kind: String, lexeme: "\"hi\"", computed_lexeme: None, line: 556, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 557, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 557, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 557, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 557, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 557, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 557, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 557, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 557, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 557, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 560, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 561, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 561, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 561, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 561, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 561, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 561, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 561, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 561, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 563, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 563, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 563, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 563, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 564, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 564, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 564, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 565, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 565, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 565, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 565, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 565, }, Token { kind: String, lexeme: "\"b\"", computed_lexeme: None, line: 565, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 565, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 565, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 565, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 565, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 566, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 566, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 566, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 566, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 566, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 566, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 566, }, Token { kind: String, lexeme: "\"function\"", computed_lexeme: None, line: 566, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 566, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 566, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 566, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 566, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 566, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 566, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 566, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 567, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 567, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 567, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 567, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 567, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 567, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 567, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 567, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 567, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 568, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 571, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 571, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 571, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 571, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 571, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 571, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 571, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 571, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 571, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 571, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 571, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 571, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 571, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 572, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 572, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 572, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 572, }, Token { kind: String, lexeme: "\"#this is a comment for a binary file\\0\\n\"", computed_lexeme: None, line: 572, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 573, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 573, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 573, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 573, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 573, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 573, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 573, }, Token { kind: String, lexeme: "'\\0\\0\\0'", computed_lexeme: None, line: 573, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 574, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 574, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 574, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 574, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 574, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 575, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 575, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 575, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 575, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 575, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 575, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 575, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 575, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 575, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 576, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 576, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 576, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 576, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 576, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 576, }, Token { kind: String, lexeme: "\"\\0\\0\\0\"", computed_lexeme: None, line: 576, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 576, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 576, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 576, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 577, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 577, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 577, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 577, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 577, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 577, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 577, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 577, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 577, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 581, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 582, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 582, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 582, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 582, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 582, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 582, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 583, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 583, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 583, }, Token { kind: MultilineString, lexeme: "[[\n if (...) then a = 15; return b, c, d\n else return _ENV\n end\n ]]", computed_lexeme: None, line: 587, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 588, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 588, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 588, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 588, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 588, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 589, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 589, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 589, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 589, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 589, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 589, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 589, }, Token { kind: String, lexeme: "\"xuxu\"", computed_lexeme: None, line: 589, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 589, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 589, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 589, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 590, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 590, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 590, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 590, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 590, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 590, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 590, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 590, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 590, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 591, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 591, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 591, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 591, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 591, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 591, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 592, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 592, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 592, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 592, }, Token { kind: Number, lexeme: "15", computed_lexeme: Some( "15", ), line: 592, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 592, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 592, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 592, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 592, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 592, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 592, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 592, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 592, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 592, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 594, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 594, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 594, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 594, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 594, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 594, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 594, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 594, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 594, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 594, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 594, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 594, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 594, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 595, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 595, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 595, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 595, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 595, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 595, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 595, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 595, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 596, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 596, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 596, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 596, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 596, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 597, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 597, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 597, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 597, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 597, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 598, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 598, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 598, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 598, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 598, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 598, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 598, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 598, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 598, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 599, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 603, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 604, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 604, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 604, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 604, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 604, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 604, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 604, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 604, }, Token { kind: String, lexeme: "\"return 10\"", computed_lexeme: None, line: 604, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 604, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 604, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 604, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 604, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 605, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 605, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 605, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 605, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 605, }, Token { kind: String, lexeme: "'b'", computed_lexeme: None, line: 605, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 606, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 606, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 606, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 606, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 606, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 606, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 606, }, Token { kind: String, lexeme: "\"a text chunk\"", computed_lexeme: None, line: 606, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 606, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 607, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 607, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 607, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 607, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 607, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 607, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 607, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 607, }, Token { kind: String, lexeme: "\"\\27 return 10\"", computed_lexeme: None, line: 607, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 607, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 607, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 607, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 607, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 608, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 608, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 608, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 608, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 608, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 608, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 609, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 609, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 609, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 609, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 609, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 609, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 609, }, Token { kind: String, lexeme: "\"a binary chunk\"", computed_lexeme: None, line: 609, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 609, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 610, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 610, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 610, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 610, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 610, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 610, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 614, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 614, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 614, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 614, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 614, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 614, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 615, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 615, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 615, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 615, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 615, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 615, }, Token { kind: String, lexeme: "\"qualquer coisa\\n\"", computed_lexeme: None, line: 615, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 615, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 615, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 616, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 616, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 616, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 616, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 616, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 616, }, Token { kind: String, lexeme: "\"mais qualquer coisa\"", computed_lexeme: None, line: 616, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 616, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 616, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 617, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 617, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 617, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 617, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 617, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 618, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 618, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 618, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 618, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 618, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 618, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 618, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 618, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 618, }, Token { kind: String, lexeme: "'wb'", computed_lexeme: None, line: 618, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 618, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 618, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 618, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 619, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 619, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 619, }, Token { kind: String, lexeme: "\"outra coisa\\0\\1\\3\\0\\0\\0\\0\\255\\0\"", computed_lexeme: None, line: 619, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 619, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 620, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 620, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 620, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 620, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 620, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 622, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 622, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 622, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 622, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 622, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 622, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 622, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 622, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 622, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 622, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 622, }, Token { kind: String, lexeme: "'r+'", computed_lexeme: None, line: 622, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 622, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 622, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 623, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 623, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 623, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 623, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 623, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 623, }, Token { kind: String, lexeme: "'rb'", computed_lexeme: None, line: 623, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 623, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 623, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 624, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 624, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 624, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 624, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 624, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 624, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 625, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 625, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 625, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 625, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 625, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 625, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 625, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 625, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 625, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 626, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 626, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 626, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 626, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 626, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 626, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 626, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 626, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 626, }, Token { kind: String, lexeme: "\"qualquer coisa\"", computed_lexeme: None, line: 626, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 626, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 627, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 627, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 627, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 627, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 627, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 627, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 628, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 628, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 628, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 628, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 628, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 628, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 628, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 628, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 628, }, Token { kind: String, lexeme: "\"outra coisa\"", computed_lexeme: None, line: 628, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 628, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 628, }, Token { kind: String, lexeme: "\"outra coisa\"", computed_lexeme: None, line: 628, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 628, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 629, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 629, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 629, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 629, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 629, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 629, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 629, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 629, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 629, }, Token { kind: String, lexeme: "\"mais qualquer coisa\"", computed_lexeme: None, line: 629, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 629, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 630, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 630, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 630, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 630, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 630, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 630, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 631, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 631, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 631, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 631, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 631, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 631, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 631, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 631, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 631, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 632, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 632, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 632, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 632, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 632, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 632, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 633, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 633, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 633, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 633, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 633, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 633, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 633, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 633, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 633, }, Token { kind: String, lexeme: "\"\\0\\1\\3\\0\"", computed_lexeme: None, line: 633, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 633, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 634, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 634, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 634, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 634, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 634, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 634, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 634, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 634, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 634, }, Token { kind: String, lexeme: "\"\\0\\0\\0\"", computed_lexeme: None, line: 634, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 634, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 635, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 635, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 635, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 635, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 635, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 635, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 635, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 635, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 635, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 635, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 635, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 636, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 636, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 636, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 636, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 636, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 636, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 636, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 636, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 636, }, Token { kind: String, lexeme: "\"\\255\"", computed_lexeme: None, line: 636, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 636, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 637, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 637, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 637, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 637, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 637, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 637, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 637, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 637, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 637, }, Token { kind: String, lexeme: "\"\\0\"", computed_lexeme: None, line: 637, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 637, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 638, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 638, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 638, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 638, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 638, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 638, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 638, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 638, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 638, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 638, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 639, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 639, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 639, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 639, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 639, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 639, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 639, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 639, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 639, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 639, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 640, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 640, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 640, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 640, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 640, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 641, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 641, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 641, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 641, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 641, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 641, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 641, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 641, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 641, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 642, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 642, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 642, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 642, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 642, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 642, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 642, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 642, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 642, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 643, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 643, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 643, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 645, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 645, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 645, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 645, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 645, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 645, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 646, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 646, }, Token { kind: MultilineString, lexeme: "[[\n 123.4\t-56e-2 not a number\nsecond line\nthird line\n\nand the rest of the file\n]]", computed_lexeme: None, line: 652, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 653, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 653, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 653, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 653, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 654, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 654, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 654, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 654, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 654, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 654, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "e", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "__", computed_lexeme: None, line: 655, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 655, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 655, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 655, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'n'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'n'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'l'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 655, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 655, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 655, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 655, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 656, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 656, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 656, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 656, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 656, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 656, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 656, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 656, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 656, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 656, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 656, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 656, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 656, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 657, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 657, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 657, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 657, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 657, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 657, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 657, }, Token { kind: Identifier, lexeme: "__", computed_lexeme: None, line: 657, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 657, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 658, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 658, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 658, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 658, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 658, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 658, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 658, }, Token { kind: String, lexeme: "'number'", computed_lexeme: None, line: 658, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 658, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 658, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 658, }, Token { kind: Number, lexeme: "123.4", computed_lexeme: Some( "123.4", ), line: 658, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 658, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 658, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 658, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 658, }, Token { kind: Number, lexeme: "56e-2", computed_lexeme: Some( "56e-2", ), line: 658, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 658, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 659, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 659, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 659, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 659, }, Token { kind: String, lexeme: "'second line'", computed_lexeme: None, line: 659, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 659, }, Token { kind: Identifier, lexeme: "e", computed_lexeme: None, line: 659, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 659, }, Token { kind: String, lexeme: "'third line'", computed_lexeme: None, line: 659, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 659, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 660, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 660, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 660, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 660, }, Token { kind: MultilineString, lexeme: "[[\n\nand the rest of the file\n]]", computed_lexeme: None, line: 663, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 663, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 664, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 664, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 664, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 664, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 664, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 664, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 664, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 664, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 664, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 665, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 665, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 665, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 668, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 669, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 669, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 669, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 669, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 669, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 669, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 669, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 669, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 669, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 669, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 669, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 669, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 669, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 669, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 670, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 670, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 670, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 670, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 670, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 670, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 670, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 670, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 670, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 671, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 671, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 671, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 671, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 671, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 671, }, Token { kind: String, lexeme: "\"full\"", computed_lexeme: None, line: 671, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 671, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 671, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 671, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 671, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 672, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 672, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 672, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 672, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 672, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 672, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 673, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 673, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 673, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 673, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 673, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 673, }, Token { kind: String, lexeme: "\"all\"", computed_lexeme: None, line: 673, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 673, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 673, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 673, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 673, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 674, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 674, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 674, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 674, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 674, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 675, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 675, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 675, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 675, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 675, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 675, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 676, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 676, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 676, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 676, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 676, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 676, }, Token { kind: String, lexeme: "\"all\"", computed_lexeme: None, line: 676, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 676, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 676, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 676, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 676, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 677, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 677, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 677, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 677, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 677, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 677, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 677, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 677, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 677, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 677, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 677, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 677, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 677, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 678, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 678, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 678, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 678, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 678, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 678, }, Token { kind: String, lexeme: "\"no\"", computed_lexeme: None, line: 678, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 678, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 678, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 679, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 679, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 679, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 679, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 679, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 679, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 680, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 680, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 680, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 680, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 680, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 680, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 681, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 681, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 681, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 681, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 681, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 681, }, Token { kind: String, lexeme: "\"all\"", computed_lexeme: None, line: 681, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 681, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 681, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 681, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 681, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 682, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 682, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 682, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 682, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 682, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 683, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 683, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 683, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 683, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 683, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 683, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 683, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 683, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 683, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 683, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 683, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 683, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 683, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 684, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 684, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 684, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 684, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 684, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 684, }, Token { kind: String, lexeme: "\"line\"", computed_lexeme: None, line: 684, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 684, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 684, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 685, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 685, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 685, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 685, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 685, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 685, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 686, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 686, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 686, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 686, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 686, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 686, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 686, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 686, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 687, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 687, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 687, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 687, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 687, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 687, }, Token { kind: String, lexeme: "\"all\"", computed_lexeme: None, line: 687, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 687, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 687, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 687, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 687, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 688, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 688, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 688, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 688, }, Token { kind: String, lexeme: "\"a\\n\"", computed_lexeme: None, line: 688, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 688, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 688, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 688, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 688, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 688, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 688, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 688, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 688, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 689, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 689, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 689, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 689, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 689, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 689, }, Token { kind: String, lexeme: "\"all\"", computed_lexeme: None, line: 689, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 689, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 689, }, Token { kind: String, lexeme: "\"xa\\n\"", computed_lexeme: None, line: 689, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 689, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 690, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 690, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 690, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 690, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 690, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 690, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 690, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 690, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 690, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 690, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 690, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 691, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 691, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 691, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 691, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 691, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 691, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 691, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 691, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 691, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 692, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 695, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 695, }, Token { kind: Identifier, lexeme: "_soft", computed_lexeme: None, line: 695, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 695, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 696, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 696, }, Token { kind: String, lexeme: "\"testing large files (> BUFSIZ)\"", computed_lexeme: None, line: 696, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 696, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 697, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 697, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 697, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 697, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 697, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 697, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 698, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 698, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 698, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 698, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 698, }, Token { kind: Number, lexeme: "5001", computed_lexeme: Some( "5001", ), line: 698, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 698, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 698, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 698, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 698, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 698, }, Token { kind: String, lexeme: "'0123456789123'", computed_lexeme: None, line: 698, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 698, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 698, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 699, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 699, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 699, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 699, }, Token { kind: String, lexeme: "'\\n12346'", computed_lexeme: None, line: 699, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 699, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 699, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 699, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 699, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 699, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 700, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 700, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 700, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 700, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 700, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 700, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 701, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 701, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 701, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 701, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 701, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 701, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 701, }, Token { kind: String, lexeme: "'a'", computed_lexeme: None, line: 701, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 701, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 702, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 702, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 702, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 702, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 702, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 702, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 702, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 702, }, Token { kind: String, lexeme: "'set'", computed_lexeme: None, line: 702, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 702, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 702, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 702, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 703, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 703, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 703, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 703, }, Token { kind: Number, lexeme: "30001", computed_lexeme: Some( "30001", ), line: 703, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 703, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 703, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 703, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 703, }, Token { kind: Number, lexeme: "1005", computed_lexeme: Some( "1005", ), line: 703, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 703, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 703, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 703, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 703, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 703, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 703, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 703, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 704, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 704, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 704, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 704, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 704, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 704, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 704, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 704, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 704, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 704, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 704, }, Token { kind: Number, lexeme: "100003", computed_lexeme: Some( "100003", ), line: 704, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 704, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 705, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 705, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 705, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 705, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 705, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 705, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 705, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 705, }, Token { kind: Number, lexeme: "5001", computed_lexeme: Some( "5001", ), line: 705, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 705, }, Token { kind: Number, lexeme: "13", computed_lexeme: Some( "13", ), line: 705, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 705, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 705, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 705, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 706, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 706, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 706, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 706, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 706, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 706, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 706, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 706, }, Token { kind: String, lexeme: "'set'", computed_lexeme: None, line: 706, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 706, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 706, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 706, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 707, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 707, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 707, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 707, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 707, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 707, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 707, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 708, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 708, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 708, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 708, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 708, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 708, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 708, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 708, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 708, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 708, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 708, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 708, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 708, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 708, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 709, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 709, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 709, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 709, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 709, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 709, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 709, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 709, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 709, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 710, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 710, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 710, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 710, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 710, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 710, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 710, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 710, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 710, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 710, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 711, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 711, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 711, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 711, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 711, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 711, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 711, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 711, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 711, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 712, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 712, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 712, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 712, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 712, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 712, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 712, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 713, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 715, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 715, }, Token { kind: Identifier, lexeme: "_port", computed_lexeme: None, line: 715, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 715, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 716, }, Token { kind: Identifier, lexeme: "progname", computed_lexeme: None, line: 716, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 717, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 718, }, Token { kind: Identifier, lexeme: "arg", computed_lexeme: None, line: 718, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 718, }, Token { kind: Identifier, lexeme: "arg", computed_lexeme: None, line: 718, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 718, }, Token { kind: Identifier, lexeme: "ARG", computed_lexeme: None, line: 718, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 719, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 719, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 719, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 719, }, Token { kind: While, lexeme: "while", computed_lexeme: None, line: 720, }, Token { kind: Identifier, lexeme: "arg", computed_lexeme: None, line: 720, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 720, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 720, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 720, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 720, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 720, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 720, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 720, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 720, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 720, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 720, }, Token { kind: Identifier, lexeme: "progname", computed_lexeme: None, line: 721, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 721, }, Token { kind: String, lexeme: "'\"'", computed_lexeme: None, line: 721, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 721, }, Token { kind: Identifier, lexeme: "arg", computed_lexeme: None, line: 721, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 721, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 721, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 721, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 721, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 721, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 721, }, Token { kind: String, lexeme: "'\"'", computed_lexeme: None, line: 721, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 722, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 723, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 723, }, Token { kind: String, lexeme: "\"testing popen/pclose and execute\"", computed_lexeme: None, line: 723, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 723, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 725, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 725, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 725, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 725, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 725, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 725, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 725, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 725, }, Token { kind: String, lexeme: "\"cat\"", computed_lexeme: None, line: 725, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 725, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 725, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 725, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 726, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 726, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 726, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 726, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 726, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 726, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 726, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 726, }, Token { kind: String, lexeme: "\"cat\"", computed_lexeme: None, line: 726, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 726, }, Token { kind: String, lexeme: "\"r+\"", computed_lexeme: None, line: 726, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 726, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 727, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 727, }, Token { kind: String, lexeme: "\"invalid mode\"", computed_lexeme: None, line: 727, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 727, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 727, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 727, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 727, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 727, }, Token { kind: String, lexeme: "\"cat\"", computed_lexeme: None, line: 727, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 727, }, Token { kind: String, lexeme: "\"rw\"", computed_lexeme: None, line: 727, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 727, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 728, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 729, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 729, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 729, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 729, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 729, }, Token { kind: Identifier, lexeme: "tmpname", computed_lexeme: None, line: 729, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 729, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 729, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 730, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 730, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 730, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 730, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 730, }, Token { kind: String, lexeme: "\"cat - > \"", computed_lexeme: None, line: 730, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 730, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 730, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 730, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 730, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 730, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 731, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 731, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 731, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 731, }, Token { kind: String, lexeme: "\"a line\"", computed_lexeme: None, line: 731, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 731, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 732, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 732, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 732, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 732, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 732, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 732, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 732, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 732, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 733, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 733, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 733, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 733, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 733, }, Token { kind: String, lexeme: "\"cat - < \"", computed_lexeme: None, line: 733, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 733, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 733, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 733, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 733, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 733, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 734, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 734, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 734, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 734, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 734, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 734, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 734, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 734, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 734, }, Token { kind: String, lexeme: "\"a line\"", computed_lexeme: None, line: 734, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 734, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 735, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 735, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 735, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 735, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 735, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 735, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 735, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 735, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 736, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 736, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 736, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 736, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 736, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 736, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 736, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 736, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 736, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 737, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 739, }, Token { kind: Identifier, lexeme: "tests", computed_lexeme: None, line: 739, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 739, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 739, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 741, }, Token { kind: String, lexeme: "\"ls > /dev/null\"", computed_lexeme: None, line: 741, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 741, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 741, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 741, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 741, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 742, }, Token { kind: String, lexeme: "\"not-to-be-found-command\"", computed_lexeme: None, line: 742, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 742, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 742, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 742, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 742, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 743, }, Token { kind: String, lexeme: "\"exit 3\"", computed_lexeme: None, line: 743, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 743, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 743, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 743, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 743, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 743, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 743, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 744, }, Token { kind: String, lexeme: "\"exit 129\"", computed_lexeme: None, line: 744, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 744, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 744, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 744, }, Token { kind: Number, lexeme: "129", computed_lexeme: Some( "129", ), line: 744, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 744, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 744, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 745, }, Token { kind: String, lexeme: "\"kill -s HUP $$\"", computed_lexeme: None, line: 745, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 745, }, Token { kind: String, lexeme: "\"signal\"", computed_lexeme: None, line: 745, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 745, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 745, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 745, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 745, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 746, }, Token { kind: String, lexeme: "\"kill -s KILL $$\"", computed_lexeme: None, line: 746, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 746, }, Token { kind: String, lexeme: "\"signal\"", computed_lexeme: None, line: 746, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 746, }, Token { kind: Number, lexeme: "9", computed_lexeme: Some( "9", ), line: 746, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 746, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 746, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 747, }, Token { kind: String, lexeme: "\"sh -c 'kill -s HUP $$'\"", computed_lexeme: None, line: 747, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 747, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 747, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 747, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 747, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 748, }, Token { kind: Identifier, lexeme: "progname", computed_lexeme: None, line: 748, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 748, }, Token { kind: String, lexeme: "' -e \" \"'", computed_lexeme: None, line: 748, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 748, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 748, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 748, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 748, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 749, }, Token { kind: Identifier, lexeme: "progname", computed_lexeme: None, line: 749, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 749, }, Token { kind: String, lexeme: "' -e \"os.exit(0, true)\"'", computed_lexeme: None, line: 749, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 749, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 749, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 749, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 749, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 750, }, Token { kind: Identifier, lexeme: "progname", computed_lexeme: None, line: 750, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 750, }, Token { kind: String, lexeme: "' -e \"os.exit(20, true)\"'", computed_lexeme: None, line: 750, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 750, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 750, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 750, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 750, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 750, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 750, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 751, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 752, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 752, }, Token { kind: String, lexeme: "\"\\n(some error messages are expected now)\"", computed_lexeme: None, line: 752, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 752, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 753, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 753, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 753, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 753, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 753, }, Token { kind: Identifier, lexeme: "ipairs", computed_lexeme: None, line: 753, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 753, }, Token { kind: Identifier, lexeme: "tests", computed_lexeme: None, line: 753, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 753, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 753, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 754, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 754, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 754, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 754, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 754, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 754, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 754, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 754, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 754, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 754, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 754, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 754, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 754, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 754, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 755, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "y1", computed_lexeme: None, line: 755, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "z1", computed_lexeme: None, line: 755, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 755, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "execute", computed_lexeme: None, line: 755, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 755, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 755, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 755, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 755, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 755, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 756, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 756, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 756, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 756, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "y1", computed_lexeme: None, line: 756, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 756, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 756, }, Token { kind: Identifier, lexeme: "z1", computed_lexeme: None, line: 756, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 756, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 757, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 757, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 757, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 757, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 757, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 757, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 757, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 757, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 758, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 758, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 758, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 758, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 758, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 758, }, Token { kind: String, lexeme: "'exit'", computed_lexeme: None, line: 758, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 758, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 758, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 758, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 758, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 758, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 759, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 760, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 760, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 760, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 760, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 760, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 760, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 760, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 760, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 760, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 760, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 760, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 760, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 762, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 762, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 762, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 762, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 762, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 762, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 762, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 762, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 762, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 762, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 762, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 762, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 762, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 762, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 762, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 762, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 762, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 762, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 762, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 762, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 762, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 762, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 763, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 764, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 765, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 769, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 769, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 769, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 769, }, Token { kind: Identifier, lexeme: "tmpfile", computed_lexeme: None, line: 769, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 769, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 769, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 770, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 770, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 770, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 770, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 770, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 770, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 770, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 770, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 770, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 770, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 770, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 771, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 771, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 771, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 771, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 771, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 771, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 772, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 772, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 772, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 772, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 772, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 772, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 773, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 773, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 773, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 773, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 773, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 773, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 773, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 773, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 773, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 775, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 777, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 777, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 779, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 779, }, Token { kind: String, lexeme: "\"testing date/time\"", computed_lexeme: None, line: 779, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 779, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 781, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 781, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 781, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 781, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 781, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 781, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 781, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 781, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 781, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 781, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 781, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 782, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 782, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 782, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 782, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 782, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 782, }, Token { kind: String, lexeme: "\"!\"", computed_lexeme: None, line: 782, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 782, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 782, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 782, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 782, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 783, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 783, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 783, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 783, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 783, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 783, }, Token { kind: String, lexeme: "\"\\0\\0\"", computed_lexeme: None, line: 783, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 783, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 783, }, Token { kind: String, lexeme: "\"\\0\\0\"", computed_lexeme: None, line: 783, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 783, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 784, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 784, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 784, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 784, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 784, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 784, }, Token { kind: String, lexeme: "\"!\\0\\0\"", computed_lexeme: None, line: 784, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 784, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 784, }, Token { kind: String, lexeme: "\"\\0\\0\"", computed_lexeme: None, line: 784, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 784, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 785, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 785, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 785, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 785, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 785, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 785, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 785, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 785, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 785, }, Token { kind: Number, lexeme: "10000", computed_lexeme: Some( "10000", ), line: 785, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 785, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 786, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 786, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 786, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 786, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 786, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 786, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 786, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 786, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 786, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 786, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 786, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 787, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 787, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 787, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 787, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 787, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 787, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 787, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 787, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 788, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 788, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 788, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 788, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 788, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 788, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 788, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 788, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 788, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 788, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 789, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 789, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 789, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 789, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 789, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 789, }, Token { kind: String, lexeme: "\"%d\"", computed_lexeme: None, line: 789, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 789, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 789, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 789, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 789, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 789, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 789, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 790, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 790, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 790, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 790, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 790, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 790, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 790, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 790, }, Token { kind: String, lexeme: "\"%d\"", computed_lexeme: None, line: 790, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 790, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 790, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 790, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 790, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 790, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 790, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 790, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 791, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 791, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 791, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 791, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 791, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 791, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 791, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 791, }, Token { kind: Number, lexeme: "200", computed_lexeme: Some( "200", ), line: 791, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 791, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 791, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 791, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 791, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 791, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 791, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 791, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 791, }, Token { kind: Number, lexeme: "100", computed_lexeme: Some( "100", ), line: 791, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 791, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 791, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 793, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 793, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 793, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 793, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 793, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 793, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 794, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 794, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 794, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 794, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 794, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 794, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 794, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 794, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 794, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 794, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 794, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 794, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 795, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 795, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 795, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 795, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 795, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 795, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 795, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 795, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 795, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 795, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 795, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 796, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 796, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 796, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 796, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 796, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 796, }, Token { kind: MultilineString, lexeme: "[[assert(D.year==%Y and D.month==%m and D.day==%d and\n D.hour==%H and D.min==%M and D.sec==%S and\n D.wday==%w+1 and D.yday==%j)]]", computed_lexeme: None, line: 798, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 798, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 798, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 798, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 798, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 798, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 798, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 799, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 799, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 799, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 799, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 799, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 800, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 802, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 802, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 802, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 802, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 802, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 802, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 802, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 802, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 803, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 803, }, Token { kind: Identifier, lexeme: "_port", computed_lexeme: None, line: 803, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 803, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 805, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 805, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 805, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 805, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 806, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 806, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 806, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 806, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 807, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 807, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 807, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 807, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 808, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 808, }, Token { kind: Number, lexeme: "0x7fffffff", computed_lexeme: Some( "0x7fffffff", ), line: 808, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 808, }, Token { kind: Identifier, lexeme: "checkDateTable", computed_lexeme: None, line: 809, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 809, }, Token { kind: Number, lexeme: "0x80000000", computed_lexeme: Some( "0x80000000", ), line: 809, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 809, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 810, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 812, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 812, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 812, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 812, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 812, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 812, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 812, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 812, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 812, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 812, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 813, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 813, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 813, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 813, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 813, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 813, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 813, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 813, }, Token { kind: String, lexeme: "\"%9\"", computed_lexeme: None, line: 813, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 813, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 814, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 814, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 814, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 814, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 814, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 814, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 814, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 814, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 814, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 814, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 815, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 815, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 815, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 815, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 815, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 815, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 815, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 815, }, Token { kind: String, lexeme: "\"%O\"", computed_lexeme: None, line: 815, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 815, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 816, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 816, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 816, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 816, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 816, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 816, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 816, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 816, }, Token { kind: String, lexeme: "\"%E\"", computed_lexeme: None, line: 816, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 816, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 817, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 817, }, Token { kind: String, lexeme: "\"invalid conversion specifier\"", computed_lexeme: None, line: 817, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 817, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 817, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 817, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 817, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 817, }, Token { kind: String, lexeme: "\"%Ea\"", computed_lexeme: None, line: 817, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 817, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 819, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 819, }, Token { kind: String, lexeme: "\"not an integer\"", computed_lexeme: None, line: 819, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 819, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 819, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 819, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 819, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 819, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 819, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 819, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 819, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 819, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 819, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 819, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 819, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 819, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 819, }, Token { kind: String, lexeme: "'x'", computed_lexeme: None, line: 819, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 819, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 819, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 820, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 820, }, Token { kind: String, lexeme: "\"not an integer\"", computed_lexeme: None, line: 820, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 820, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 820, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 820, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 820, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 820, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 820, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 820, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 820, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 820, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 820, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 820, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 820, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 820, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 820, }, Token { kind: Number, lexeme: "1.5", computed_lexeme: Some( "1.5", ), line: 820, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 820, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 820, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 822, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 822, }, Token { kind: String, lexeme: "\"missing\"", computed_lexeme: None, line: 822, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 822, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 822, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 822, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 822, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 822, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 822, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 822, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 822, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 822, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 822, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 822, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 825, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 825, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 825, }, Token { kind: Identifier, lexeme: "packsize", computed_lexeme: None, line: 825, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 825, }, Token { kind: String, lexeme: "\"i\"", computed_lexeme: None, line: 825, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 825, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 825, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 825, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 825, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 826, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 826, }, Token { kind: String, lexeme: "\"field 'year' is out-of-bound\"", computed_lexeme: None, line: 826, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 826, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 826, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 826, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 826, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 826, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 827, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 827, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 827, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 827, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 827, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 827, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 827, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 827, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 827, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 827, }, Token { kind: Number, lexeme: "1899", computed_lexeme: Some( "1899", ), line: 827, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 827, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 827, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 827, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 827, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 827, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 827, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 827, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 827, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 827, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 827, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 828, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 830, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 830, }, Token { kind: Identifier, lexeme: "_port", computed_lexeme: None, line: 830, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 830, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 832, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 832, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 832, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 832, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 832, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 832, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 832, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 832, }, Token { kind: String, lexeme: "\"%Ex\"", computed_lexeme: None, line: 832, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 832, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 832, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 832, }, Token { kind: String, lexeme: "'string'", computed_lexeme: None, line: 832, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 832, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 833, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 833, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 833, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 833, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 833, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 833, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 833, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 833, }, Token { kind: String, lexeme: "\"%Oy\"", computed_lexeme: None, line: 833, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 833, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 833, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 833, }, Token { kind: String, lexeme: "'string'", computed_lexeme: None, line: 833, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 833, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "t0", computed_lexeme: None, line: 836, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 836, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 836, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 836, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 836, }, Token { kind: Number, lexeme: "1970", computed_lexeme: Some( "1970", ), line: 836, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 836, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 836, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 836, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 836, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 836, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 836, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 836, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 836, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 837, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 837, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 837, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 837, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "1970", computed_lexeme: Some( "1970", ), line: 837, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 837, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 837, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 837, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 837, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 837, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 837, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 837, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 837, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 837, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 837, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 837, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 837, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 838, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 838, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 838, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 838, }, Token { kind: Identifier, lexeme: "t0", computed_lexeme: None, line: 838, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 838, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 838, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 838, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 838, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 838, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 838, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 838, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 838, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 838, }, Token { kind: Identifier, lexeme: "t0", computed_lexeme: None, line: 839, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 839, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 839, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 839, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 839, }, Token { kind: Number, lexeme: "1970", computed_lexeme: Some( "1970", ), line: 839, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 839, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 839, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 839, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 839, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 839, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 839, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 839, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 840, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 840, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 840, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 840, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 840, }, Token { kind: Number, lexeme: "1970", computed_lexeme: Some( "1970", ), line: 840, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 840, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 840, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 840, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 840, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 840, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 840, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 840, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 840, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 840, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 840, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 840, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 840, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 840, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 840, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 840, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 841, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 841, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 841, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 841, }, Token { kind: Identifier, lexeme: "t0", computed_lexeme: None, line: 841, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 841, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 841, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 841, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 841, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 841, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 841, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 841, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 841, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 844, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 844, }, Token { kind: GreaterThanOrEqual, lexeme: ">=", computed_lexeme: None, line: 844, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 844, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 844, }, Token { kind: Number, lexeme: "62", computed_lexeme: Some( "62", ), line: 844, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 844, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 846, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 846, }, Token { kind: String, lexeme: "\"out-of-bound\"", computed_lexeme: None, line: 846, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 846, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 846, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 846, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 846, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 846, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "maxint", computed_lexeme: None, line: 846, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 846, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 846, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 846, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 846, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 846, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 846, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 846, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 846, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 846, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 847, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 847, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 847, }, Token { kind: Identifier, lexeme: "packsize", computed_lexeme: None, line: 847, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 847, }, Token { kind: String, lexeme: "\"i\"", computed_lexeme: None, line: 847, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 847, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 847, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 847, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 847, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 848, }, Token { kind: Identifier, lexeme: "testerr", computed_lexeme: None, line: 848, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 848, }, Token { kind: String, lexeme: "\"out-of-bound\"", computed_lexeme: None, line: 848, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 848, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 848, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 848, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 848, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 848, }, Token { kind: String, lexeme: "\"%Y\"", computed_lexeme: None, line: 848, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 848, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 848, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 848, }, Token { kind: Number, lexeme: "40", computed_lexeme: Some( "40", ), line: 848, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 848, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 848, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 850, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 850, }, Token { kind: String, lexeme: "\" 4-byte time_t\"", computed_lexeme: None, line: 850, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 850, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 851, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 851, }, Token { kind: String, lexeme: "\"cannot be represented\"", computed_lexeme: None, line: 851, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 851, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 851, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 851, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 851, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 851, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 851, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 851, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 851, }, Token { kind: Number, lexeme: "4000", computed_lexeme: Some( "4000", ), line: 851, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 851, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 851, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 851, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 851, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 851, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 851, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 851, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 851, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 851, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 851, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 852, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 854, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 854, }, Token { kind: String, lexeme: "\" 8-byte time_t\"", computed_lexeme: None, line: 854, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 854, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 855, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 855, }, Token { kind: String, lexeme: "\"cannot be represented\"", computed_lexeme: None, line: 855, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 855, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 855, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 855, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 855, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 855, }, Token { kind: String, lexeme: "\"%Y\"", computed_lexeme: None, line: 855, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 855, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 855, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 855, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 855, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 855, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 858, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 858, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 858, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 858, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 858, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 858, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 858, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 859, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 859, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 859, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "1899", computed_lexeme: Some( "1899", ), line: 859, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 859, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 859, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 859, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "59", computed_lexeme: Some( "59", ), line: 859, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 859, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 859, }, Token { kind: Number, lexeme: "59", computed_lexeme: Some( "59", ), line: 859, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 859, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 859, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 859, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 862, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 862, }, Token { kind: String, lexeme: "\"represented\"", computed_lexeme: None, line: 862, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 862, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 862, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 862, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 862, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 862, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 863, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 863, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 863, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "1899", computed_lexeme: Some( "1899", ), line: 863, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 863, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 863, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 863, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "59", computed_lexeme: Some( "59", ), line: 863, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 863, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 863, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 863, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 863, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 863, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 863, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 864, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 867, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 867, }, Token { kind: String, lexeme: "\"field 'day' is out-of-bound\"", computed_lexeme: None, line: 867, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 867, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 867, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 867, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 867, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 867, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 868, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 868, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 868, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 868, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 868, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 868, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 868, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 868, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 868, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 868, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 868, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 868, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 868, }, Token { kind: Number, lexeme: "32", computed_lexeme: Some( "32", ), line: 868, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 868, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 868, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 870, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 870, }, Token { kind: String, lexeme: "\"field 'month' is out-of-bound\"", computed_lexeme: None, line: 870, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 870, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 870, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 870, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 870, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 870, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 871, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 871, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 871, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 871, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 871, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 871, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 871, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 871, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 871, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 871, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 871, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 871, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 871, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 871, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 871, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 871, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 871, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 871, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 871, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 871, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 871, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 871, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 871, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 873, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 873, }, Token { kind: String, lexeme: "\"field 'year' is out-of-bound\"", computed_lexeme: None, line: 873, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 873, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 873, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 873, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 873, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 873, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 874, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 874, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 874, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 874, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 874, }, Token { kind: BitShiftLeft, lexeme: "<<", computed_lexeme: None, line: 874, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 874, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 874, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 874, }, Token { kind: Number, lexeme: "1900", computed_lexeme: Some( "1900", ), line: 874, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 874, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 874, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 874, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 874, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 874, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 874, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 874, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 874, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 874, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 874, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 876, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 878, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 878, }, Token { kind: String, lexeme: "\" 8-byte time_t\"", computed_lexeme: None, line: 878, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 878, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 879, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 879, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 879, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 879, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 879, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 879, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 879, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 879, }, Token { kind: String, lexeme: "\"%Y\"", computed_lexeme: None, line: 879, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 879, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 879, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 879, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 879, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 879, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 879, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 879, }, Token { kind: Identifier, lexeme: "checkerr", computed_lexeme: None, line: 882, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 882, }, Token { kind: String, lexeme: "\"cannot be represented\"", computed_lexeme: None, line: 882, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 882, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 882, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 882, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 882, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 882, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 882, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 882, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 882, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 882, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 882, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 882, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 882, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 882, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 882, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 882, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 882, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 882, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 882, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 882, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 882, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 882, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 883, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 884, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 885, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 887, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 888, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 888, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 888, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 888, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 888, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 888, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 888, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 888, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 888, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 889, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 889, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 889, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 889, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 889, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 889, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 889, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 889, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 889, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 890, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 890, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 890, }, Token { kind: Identifier, lexeme: "isdst", computed_lexeme: None, line: 890, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 890, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 890, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 890, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 891, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 891, }, Token { kind: String, lexeme: "\"no daylight saving information\"", computed_lexeme: None, line: 891, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 891, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 892, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 893, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 893, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 893, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 893, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 893, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 893, }, Token { kind: Identifier, lexeme: "isdst", computed_lexeme: None, line: 893, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 893, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 893, }, Token { kind: String, lexeme: "'boolean'", computed_lexeme: None, line: 893, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 893, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 894, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 895, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 895, }, Token { kind: Identifier, lexeme: "isdst", computed_lexeme: None, line: 895, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 895, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 895, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 896, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 896, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 896, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 896, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 896, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 896, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 896, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 896, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 896, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 897, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 897, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 897, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 897, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 897, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 897, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 898, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 900, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 900, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 900, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 900, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 900, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 900, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 900, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 900, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 900, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 901, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 901, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 901, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 901, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 901, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 901, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 901, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 901, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 902, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 902, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 902, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 902, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 902, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 902, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 902, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 902, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 902, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 902, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 903, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 903, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 903, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 903, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 903, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 903, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 903, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 903, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 903, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 905, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "math", computed_lexeme: None, line: 905, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "abs", computed_lexeme: None, line: 905, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 905, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 905, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 905, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 905, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 905, }, Token { kind: Slash, lexeme: "/", computed_lexeme: None, line: 905, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 905, }, Token { kind: Number, lexeme: "24", computed_lexeme: Some( "24", ), line: 905, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 905, }, Token { kind: Number, lexeme: "3600", computed_lexeme: Some( "3600", ), line: 905, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 905, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 905, }, Token { kind: Number, lexeme: "365", computed_lexeme: Some( "365", ), line: 905, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 905, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 905, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 905, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 905, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 908, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 908, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 908, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 908, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 908, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 908, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 908, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 909, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 909, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 909, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 909, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 909, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 909, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 909, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 909, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 909, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 909, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 909, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 909, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 909, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 910, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 910, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 910, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 910, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 910, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 910, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 911, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 911, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 911, }, Token { kind: LessThanOrEqual, lexeme: "<=", computed_lexeme: None, line: 911, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 911, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 911, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 911, }, Token { kind: LessThanOrEqual, lexeme: "<=", computed_lexeme: None, line: 911, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 911, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 911, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 912, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 912, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 912, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 912, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 912, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 912, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 912, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 912, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 912, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 912, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 913, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 913, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 913, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 913, }, Token { kind: LessThanOrEqual, lexeme: "<=", computed_lexeme: None, line: 913, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 913, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 913, }, Token { kind: Identifier, lexeme: "diff", computed_lexeme: None, line: 913, }, Token { kind: LessThanOrEqual, lexeme: "<=", computed_lexeme: None, line: 913, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 913, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 913, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 915, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 915, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 915, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 915, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 915, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 915, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 915, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 915, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 915, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 915, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 915, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "t2", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 916, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 916, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 916, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 916, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 916, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 916, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 916, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 916, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 916, }, Token { kind: Number, lexeme: "19", computed_lexeme: Some( "19", ), line: 916, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 916, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 917, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 917, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 917, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 917, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 917, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 917, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 917, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 917, }, Token { kind: Identifier, lexeme: "t2", computed_lexeme: None, line: 917, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 917, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 917, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 917, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 917, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 917, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 917, }, Token { kind: Number, lexeme: "19", computed_lexeme: Some( "19", ), line: 917, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 917, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "2005", computed_lexeme: Some( "2005", ), line: 920, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 920, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 920, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 920, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 920, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 920, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 920, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 920, }, Token { kind: Number, lexeme: "3602", computed_lexeme: Some( "3602", ), line: 920, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 920, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 921, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 921, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 921, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 921, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 921, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 921, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 922, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 922, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 922, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 922, }, Token { kind: Number, lexeme: "31", computed_lexeme: Some( "31", ), line: 922, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 922, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 922, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 922, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 922, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 922, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 922, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 922, }, Token { kind: Number, lexeme: "2004", computed_lexeme: Some( "2004", ), line: 922, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 922, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 923, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 923, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 923, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 923, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 923, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 923, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 923, }, Token { kind: Number, lexeme: "59", computed_lexeme: Some( "59", ), line: 923, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 923, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 923, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 923, }, Token { kind: Number, lexeme: "58", computed_lexeme: Some( "58", ), line: 923, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 923, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 924, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 924, }, Token { kind: Identifier, lexeme: "yday", computed_lexeme: None, line: 924, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 924, }, Token { kind: Number, lexeme: "366", computed_lexeme: Some( "366", ), line: 924, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 924, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 926, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 926, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 926, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 926, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 926, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 926, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 926, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 926, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 927, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 927, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 927, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 927, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 927, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 927, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 927, }, Token { kind: String, lexeme: "'%d %m %Y %H %M %S'", computed_lexeme: None, line: 927, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 927, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 928, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 928, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "match", computed_lexeme: None, line: 928, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 928, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 928, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 928, }, Token { kind: String, lexeme: "\"(%d+) (%d+) (%d+) (%d+) (%d+) (%d+)\"", computed_lexeme: None, line: 929, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 929, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 930, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 930, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 930, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 930, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 930, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 930, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 931, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 931, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 931, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 931, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 931, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 931, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 932, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 932, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 932, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 932, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 932, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 932, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 933, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 933, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 933, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 933, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 933, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 933, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 934, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 934, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 934, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 934, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 934, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 934, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 935, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 935, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 935, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 935, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 935, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 935, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 936, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 936, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 936, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 936, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 936, }, Token { kind: String, lexeme: "'test done on %2.2d/%2.2d/%d'", computed_lexeme: None, line: 936, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 936, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 936, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 936, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 936, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 936, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 937, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 937, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 937, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 937, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 937, }, Token { kind: String, lexeme: "', at %2.2d:%2.2d:%2.2d\\n'", computed_lexeme: None, line: 937, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 937, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 937, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 937, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 937, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 937, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 938, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 938, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 938, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 938, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 938, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 938, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 938, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 938, }, Token { kind: String, lexeme: "'%s\\n'", computed_lexeme: None, line: 938, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 938, }, Token { kind: Identifier, lexeme: "_VERSION", computed_lexeme: None, line: 938, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 938, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 938, }, ]