--- source: src/main.rs expression: scanned input_file: test-data/lua5.2-tests/files.lua --- [ Token { kind: Identifier, lexeme: "debug", computed_lexeme: None, line: 1, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 1, }, Token { kind: Identifier, lexeme: "require", computed_lexeme: None, line: 1, }, Token { kind: String, lexeme: "\"debug\"", computed_lexeme: None, line: 1, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 3, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 3, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 3, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 3, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 3, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 3, }, Token { kind: Identifier, lexeme: "getenv", computed_lexeme: None, line: 3, }, Token { kind: String, lexeme: "\"PATH\"", computed_lexeme: None, line: 3, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 3, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 3, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 3, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 3, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 5, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 5, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 5, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 5, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 5, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 5, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 5, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 5, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 5, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 6, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 6, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 6, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 6, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 6, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 6, }, Token { kind: String, lexeme: "\"non-existent-file\"", computed_lexeme: None, line: 6, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 6, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 6, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 7, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 7, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 7, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 7, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 7, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 7, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 7, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 7, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 7, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 10, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 10, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 10, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 10, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 10, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 10, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 10, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 10, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 10, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 11, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 11, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 11, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 11, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 11, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 11, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 11, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 12, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "stderr", computed_lexeme: None, line: 12, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 12, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 12, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 15, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 15, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 15, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 15, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 15, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 15, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 15, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 15, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 15, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 16, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 16, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 16, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 16, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 16, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 16, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 16, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 16, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 16, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 16, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "stderr", computed_lexeme: None, line: 16, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 16, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 16, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 16, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 16, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 17, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 17, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 17, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 17, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 17, }, Token { kind: Number, lexeme: "8", computed_lexeme: Some( "8", ), line: 17, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 17, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 17, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 17, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 17, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 18, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 18, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 18, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 18, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 18, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 18, }, Token { kind: Identifier, lexeme: "setmetatable", computed_lexeme: None, line: 18, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 18, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 18, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 18, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 18, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 18, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 18, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 19, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 19, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 19, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 19, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 19, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 19, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 19, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 19, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 19, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 21, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 21, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 21, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 21, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 21, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 21, }, Token { kind: String, lexeme: "'xuxu_nao_existe'", computed_lexeme: None, line: 21, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 21, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 22, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 22, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 22, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 22, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 22, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 22, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 22, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 24, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 24, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 24, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 24, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 24, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 24, }, Token { kind: String, lexeme: "'/a/b/c/d'", computed_lexeme: None, line: 24, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 24, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 24, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 24, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 25, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 25, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 25, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 25, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 25, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 25, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 25, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 25, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 25, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 25, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 25, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 25, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 25, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 25, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 25, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 27, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 27, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 27, }, Token { kind: Identifier, lexeme: "tmpname", computed_lexeme: None, line: 27, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 27, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 27, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 28, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "msg", computed_lexeme: None, line: 28, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 28, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 28, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 28, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 28, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 28, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 28, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 28, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 29, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 29, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 29, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 29, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 30, }, Token { kind: Identifier, lexeme: "Message", computed_lexeme: None, line: 30, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 30, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 30, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 30, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 30, }, Token { kind: String, lexeme: "\"'os.tmpname' file cannot be open; skipping file tests\"", computed_lexeme: None, line: 30, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 30, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 32, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 33, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 33, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 33, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 33, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 35, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 35, }, Token { kind: String, lexeme: "'testing i/o'", computed_lexeme: None, line: 35, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 35, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 37, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 37, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "tmpname", computed_lexeme: None, line: 37, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 37, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 37, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 39, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 39, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 39, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 39, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 39, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 39, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 39, }, Token { kind: String, lexeme: "\"rw\"", computed_lexeme: None, line: 39, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 39, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 39, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 40, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 40, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 40, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 40, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 40, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 40, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 40, }, Token { kind: String, lexeme: "\"rb+\"", computed_lexeme: None, line: 40, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 40, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 40, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 41, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 41, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 41, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 41, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 41, }, Token { kind: String, lexeme: "\"r+bk\"", computed_lexeme: None, line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 41, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 42, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 42, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 42, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 42, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 42, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 42, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 42, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 42, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 42, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 42, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 43, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 43, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 43, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 43, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 43, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 43, }, Token { kind: String, lexeme: "\"+\"", computed_lexeme: None, line: 43, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 43, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 43, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 44, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 44, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 44, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 44, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 44, }, Token { kind: String, lexeme: "\"b\"", computed_lexeme: None, line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 44, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 45, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 45, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 45, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 45, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 45, }, Token { kind: String, lexeme: "\"r+b\"", computed_lexeme: None, line: 45, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 45, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 45, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 45, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 45, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 45, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 46, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 46, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 46, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 46, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 46, }, Token { kind: String, lexeme: "\"r+\"", computed_lexeme: None, line: 46, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 46, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 46, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 46, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 46, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 46, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 47, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 47, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 47, }, Token { kind: String, lexeme: "\"rb\"", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 47, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 47, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 47, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 49, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 49, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "setlocale", computed_lexeme: None, line: 49, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 49, }, Token { kind: String, lexeme: "'C'", computed_lexeme: None, line: 49, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 49, }, Token { kind: String, lexeme: "'all'", computed_lexeme: None, line: 49, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 49, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 49, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 51, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 51, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 51, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 51, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 51, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 51, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 51, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 51, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 51, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 51, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 51, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 53, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 53, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 53, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 53, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 53, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 53, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 54, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 54, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 54, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 54, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 54, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 54, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 54, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 54, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 54, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 55, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 55, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 55, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 55, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 55, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 55, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 55, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 55, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 56, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 56, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 56, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 56, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 57, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 57, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 57, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 57, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 57, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 57, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 57, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 57, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 57, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 57, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 57, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 57, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 59, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 59, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 59, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 59, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 59, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 59, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 59, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 59, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 59, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 59, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 59, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 60, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 60, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 60, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 60, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 60, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 60, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 60, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 60, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 60, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 60, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 60, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 60, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 60, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 60, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 60, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 61, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 61, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 61, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 61, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 61, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 61, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 61, }, Token { kind: String, lexeme: "\"cur\"", computed_lexeme: None, line: 61, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 61, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 61, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 61, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 61, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 61, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 61, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 61, }, Token { kind: String, lexeme: "\"alo alo\"", computed_lexeme: None, line: 61, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 61, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 61, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 61, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 61, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 62, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 62, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 62, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 62, }, Token { kind: String, lexeme: "\"joao\"", computed_lexeme: None, line: 62, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 62, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 62, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 63, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 63, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 63, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 63, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 63, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 63, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 63, }, Token { kind: String, lexeme: "\"end\"", computed_lexeme: None, line: 63, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 63, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 63, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 63, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 63, }, Token { kind: String, lexeme: "\"alo joao\"", computed_lexeme: None, line: 63, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 63, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 63, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 65, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 65, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 65, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 65, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 65, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 65, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 65, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 65, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 65, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 65, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 65, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 65, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 67, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 67, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 67, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 67, }, Token { kind: String, lexeme: "'\"álo\"'", computed_lexeme: None, line: 67, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 67, }, Token { kind: String, lexeme: "\"{a}\\n\"", computed_lexeme: None, line: 67, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 67, }, Token { kind: String, lexeme: "\"second line\\n\"", computed_lexeme: None, line: 67, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 67, }, Token { kind: String, lexeme: "\"third line \\n\"", computed_lexeme: None, line: 67, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 67, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 67, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 68, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 68, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 68, }, Token { kind: String, lexeme: "'çfourth_line'", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 68, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 69, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 69, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 69, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 69, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 69, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 70, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 70, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 70, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 71, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 71, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "stdin", computed_lexeme: None, line: 71, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "rawequal", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 71, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 71, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 71, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 71, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 72, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 72, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 72, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 72, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 75, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 75, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 75, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 76, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 76, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 76, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 76, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 76, }, Token { kind: Number, lexeme: "120", computed_lexeme: Some( "120", ), line: 76, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 76, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 77, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 77, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 77, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 77, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 77, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 77, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 77, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 78, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 78, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 78, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 78, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 79, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 79, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 79, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 79, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 79, }, Token { kind: String, lexeme: "'r'", computed_lexeme: None, line: 79, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 79, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 79, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 80, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 80, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 80, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 80, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 80, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 81, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 82, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 82, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 82, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 83, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 85, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 85, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 85, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 85, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 85, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 85, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 85, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 85, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 85, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 86, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 86, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 86, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 86, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 86, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 88, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 88, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 88, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 88, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 89, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 89, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 89, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 89, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 89, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 89, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 91, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 91, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 91, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 91, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 91, }, Token { kind: String, lexeme: "\"ab\"", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 91, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 92, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 92, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 92, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 92, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 92, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 92, }, Token { kind: String, lexeme: "\"\\n\\n\\t\\t 3450\\n\"", computed_lexeme: None, line: 92, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 92, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 92, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 92, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 93, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 93, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 93, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 93, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 96, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 96, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 96, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 96, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 96, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 96, }, Token { kind: String, lexeme: "\"non-existent-file\"", computed_lexeme: None, line: 96, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 96, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 96, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 97, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 97, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "rename", computed_lexeme: None, line: 97, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 97, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 97, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 97, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 97, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 98, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 98, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 98, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 98, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 98, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 98, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 99, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 99, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 99, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 99, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 99, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 99, }, Token { kind: While, lexeme: "while", computed_lexeme: None, line: 100, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 100, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 100, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 100, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 100, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 100, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 100, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 101, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 101, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 101, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 101, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 101, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 101, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 101, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 101, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 102, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 102, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 102, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 102, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 102, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 102, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 102, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 102, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 104, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 104, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 104, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 104, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 104, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 104, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 104, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 104, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 104, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 104, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 104, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 104, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 105, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 105, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 105, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 105, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 105, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 107, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 107, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 107, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 107, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 107, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 107, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 107, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 108, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 108, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 108, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 108, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 108, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 108, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 108, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 108, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 109, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 109, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 109, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 109, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 109, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 109, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 110, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 110, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 110, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 110, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 110, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 110, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 110, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 110, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 110, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 110, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 110, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 110, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 110, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 110, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 111, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 111, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 111, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 111, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 111, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 111, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 111, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 111, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 111, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 111, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 111, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 111, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 112, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 112, }, Token { kind: Identifier, lexeme: "tostring", computed_lexeme: None, line: 112, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 112, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 112, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 112, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 112, }, Token { kind: Identifier, lexeme: "sub", computed_lexeme: None, line: 112, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 112, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 112, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 112, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 112, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 112, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 112, }, Token { kind: String, lexeme: "\"file \"", computed_lexeme: None, line: 112, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 112, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 113, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 113, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 113, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 113, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 113, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 114, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 114, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 114, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 114, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 114, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 114, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 114, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 114, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 115, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 115, }, Token { kind: Identifier, lexeme: "tostring", computed_lexeme: None, line: 115, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 115, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 115, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 115, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 115, }, Token { kind: String, lexeme: "\"file (closed)\"", computed_lexeme: None, line: 115, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 115, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 116, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 116, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 116, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 116, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 116, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 116, }, Token { kind: String, lexeme: "\"closed file\"", computed_lexeme: None, line: 116, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 116, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 117, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 117, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 117, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 117, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 118, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 118, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 118, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 118, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 118, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 118, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 118, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 118, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 118, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 118, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 118, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 118, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 119, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 119, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 119, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 119, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 119, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 119, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 120, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 120, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 120, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 120, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 120, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 120, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 120, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 121, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 121, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 121, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 121, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 121, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 121, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 121, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 121, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 121, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 123, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 123, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 123, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 123, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 123, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 124, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 125, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 125, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 125, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 125, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 125, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 125, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 125, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 125, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 125, }, Token { kind: String, lexeme: "\"xuxu\"", computed_lexeme: None, line: 125, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 125, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 126, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 126, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 126, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 126, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 126, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 126, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 126, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 126, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 126, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 126, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 126, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 126, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 126, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 126, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 126, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 127, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 128, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 128, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 128, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 128, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 128, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 128, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 128, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 128, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 128, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 129, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 129, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 129, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 129, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 129, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 129, }, Token { kind: Number, lexeme: "5", computed_lexeme: Some( "5", ), line: 129, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 129, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 129, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 129, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 129, }, Token { kind: String, lexeme: "'\"álo\"'", computed_lexeme: None, line: 129, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 129, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 130, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 130, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 130, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 130, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 130, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 130, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 130, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 130, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 130, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 130, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 130, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 131, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 131, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 131, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 131, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 131, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 131, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 131, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 131, }, Token { kind: String, lexeme: "\"second line\"", computed_lexeme: None, line: 131, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 131, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 132, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 132, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 132, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 132, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 132, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 132, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 132, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 132, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 133, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 133, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 133, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 133, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 133, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 133, }, Token { kind: String, lexeme: "\"third line \"", computed_lexeme: None, line: 133, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 133, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 134, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 134, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 134, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 134, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 134, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 134, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 134, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 134, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 134, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 134, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 134, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 135, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 135, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 135, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 135, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 135, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 135, }, Token { kind: String, lexeme: "'*L'", computed_lexeme: None, line: 135, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 135, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 135, }, Token { kind: String, lexeme: "\"third line \\n\"", computed_lexeme: None, line: 135, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 135, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 136, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 136, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 136, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 136, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 136, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 136, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 136, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 136, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 136, }, Token { kind: String, lexeme: "\"ç\"", computed_lexeme: None, line: 136, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 136, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 137, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 137, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 137, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 137, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 137, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 137, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 137, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 137, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 137, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 137, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 137, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 137, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 137, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 137, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 138, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 138, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 138, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 138, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 138, }, Token { kind: String, lexeme: "\"cur\"", computed_lexeme: None, line: 138, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 138, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 138, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 138, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 138, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 139, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 139, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 139, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 139, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 139, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 139, }, Token { kind: String, lexeme: "\"fourth_line\"", computed_lexeme: None, line: 139, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 139, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 140, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 140, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 140, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 140, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 140, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 141, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 141, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 141, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 141, }, Token { kind: String, lexeme: "'*n'", computed_lexeme: None, line: 141, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 141, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 141, }, Token { kind: Number, lexeme: "3450", computed_lexeme: Some( "3450", ), line: 141, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 141, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 142, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 142, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 142, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 142, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 142, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 142, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 142, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 142, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 142, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 143, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 143, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 143, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 143, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 143, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 143, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 143, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 143, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 143, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 144, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 144, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 144, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 144, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 144, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 144, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 144, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 144, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 144, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 145, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 145, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 145, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 145, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 145, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 145, }, Token { kind: Number, lexeme: "30000", computed_lexeme: Some( "30000", ), line: 145, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 145, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 145, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 145, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 145, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 146, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 146, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 146, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 146, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 146, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 146, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 146, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 146, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 146, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 146, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 146, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 146, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 146, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 146, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 146, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 146, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 146, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 146, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 147, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 147, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 147, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 147, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 147, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 147, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 147, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 147, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 148, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 148, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 148, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 148, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 148, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 148, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 148, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 148, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 148, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 148, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 148, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 148, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 149, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 149, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 149, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 149, }, Token { kind: String, lexeme: "'*n'", computed_lexeme: None, line: 149, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 149, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 149, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 149, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 149, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 150, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 150, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 150, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 150, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 150, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 150, }, Token { kind: String, lexeme: "'*n'", computed_lexeme: None, line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 150, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 150, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 150, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 150, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 150, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 150, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 151, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 151, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 151, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 151, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 151, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 151, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 151, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 151, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 151, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 152, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 152, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 152, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 152, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 152, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 152, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 152, }, Token { kind: String, lexeme: "''", computed_lexeme: None, line: 152, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 152, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 153, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 153, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 153, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 154, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 154, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 154, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 154, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 155, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 155, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 155, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 155, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 155, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 155, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 155, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 155, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 155, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 155, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 156, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 156, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 156, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 156, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 156, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 156, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 156, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 158, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 158, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 158, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 158, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 158, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 158, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 158, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 158, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 158, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 160, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 160, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 160, }, Token { kind: String, lexeme: "'0123456789'", computed_lexeme: None, line: 160, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 161, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 161, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 161, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 161, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 161, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 161, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 161, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 161, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 161, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 161, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 162, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 162, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 162, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 162, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 162, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 162, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 162, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 162, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 162, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 162, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 162, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 162, }, Token { kind: Caret, lexeme: "^", computed_lexeme: None, line: 162, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 162, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 162, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 164, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 164, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 164, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 164, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 164, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 164, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 165, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 165, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 165, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 165, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 165, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 165, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 165, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 165, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 165, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 165, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 165, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 166, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 166, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 166, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 166, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 166, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 167, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 167, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 167, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 167, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 167, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 167, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 167, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 167, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 167, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 167, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 168, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 168, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 168, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 168, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 168, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 168, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 168, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 168, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 168, }, Token { kind: String, lexeme: "\"a+b\"", computed_lexeme: None, line: 168, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 168, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 169, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 169, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 169, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 169, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 169, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 169, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 170, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 170, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 170, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 172, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 172, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 172, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 172, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 172, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 172, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 172, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 172, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 172, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 172, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 173, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 173, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 173, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 173, }, Token { kind: String, lexeme: "';'", computed_lexeme: None, line: 173, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 173, }, Token { kind: String, lexeme: "'end of file\\n'", computed_lexeme: None, line: 173, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 173, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 173, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 174, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "flush", computed_lexeme: None, line: 174, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 174, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 174, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 174, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "flush", computed_lexeme: None, line: 174, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 174, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 174, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 175, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 175, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 175, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 175, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 175, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 176, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 176, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 176, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 176, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 178, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 178, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 178, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 178, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 179, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 179, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 179, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 179, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 179, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 179, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 179, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 179, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 180, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 180, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 180, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 180, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 180, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 180, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 180, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 180, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 180, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 181, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 181, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 181, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 181, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 181, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 181, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 181, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 181, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 181, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 181, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 182, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 182, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 182, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 182, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 182, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 182, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 182, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 182, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 182, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 182, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 182, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 183, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 183, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 183, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 183, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 183, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 183, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 183, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 183, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 183, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 184, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 184, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 184, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 184, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 184, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 184, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 184, }, Token { kind: String, lexeme: "';end of file\\n'", computed_lexeme: None, line: 184, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 184, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 185, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 185, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 185, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 185, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 185, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 185, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 185, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 185, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 185, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 185, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 185, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 186, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 186, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 186, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 186, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 186, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 186, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 186, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 186, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 186, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 186, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 190, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 191, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 191, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 191, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 191, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 191, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 191, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 193, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 193, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 193, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 193, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 193, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 193, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 193, }, Token { kind: String, lexeme: "\"string\"", computed_lexeme: None, line: 193, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 193, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 193, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 193, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 193, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 193, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 193, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 193, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 193, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 194, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 197, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 197, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 197, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 197, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 197, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 197, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 197, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 197, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 197, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 197, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 197, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 198, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 198, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 198, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 198, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 198, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 198, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 198, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 199, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 199, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 199, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 199, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 199, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 199, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 199, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 199, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 199, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 199, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 199, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 199, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 199, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 200, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 200, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 200, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 200, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 200, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 200, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 202, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 202, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 202, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 202, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 202, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 202, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 202, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 203, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 203, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 203, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 203, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 203, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 203, }, Token { kind: String, lexeme: "\"whatever\"", computed_lexeme: None, line: 203, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 203, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 204, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 204, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 204, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 204, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 204, }, Token { kind: String, lexeme: "\"number\"", computed_lexeme: None, line: 204, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 204, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 205, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 205, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 205, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 205, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 205, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 205, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 207, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 207, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 207, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 207, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 207, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 207, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 207, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 207, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 207, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 207, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 208, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 208, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 208, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 208, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 208, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 208, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 208, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 208, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 209, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 209, }, Token { kind: Identifier, lexeme: "r", computed_lexeme: None, line: 209, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 209, }, Token { kind: False, lexeme: "false", computed_lexeme: None, line: 209, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 209, }, Token { kind: Identifier, lexeme: "ismsg", computed_lexeme: None, line: 209, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 209, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 209, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 209, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 209, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 210, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 210, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 210, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 210, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 210, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 210, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 210, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 210, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 211, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 213, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 213, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 213, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 213, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 213, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 213, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 213, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 213, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 213, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 216, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 216, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 216, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 216, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 216, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 216, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 216, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 216, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 216, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 216, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 217, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 217, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 217, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 217, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 218, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 218, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 218, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 218, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 218, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 218, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 218, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 218, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 218, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 219, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 219, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 219, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 219, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 219, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 219, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 219, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 220, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 220, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 220, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 220, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 220, }, Token { kind: String, lexeme: "\"line\\n\"", computed_lexeme: None, line: 220, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 220, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 221, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 221, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 221, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 221, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 221, }, Token { kind: String, lexeme: "\"other\"", computed_lexeme: None, line: 221, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 221, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 222, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 222, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 222, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 222, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 222, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 222, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 222, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 223, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 223, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 223, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 223, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 223, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 225, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 225, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 225, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 225, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 225, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 226, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 226, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 226, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 226, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 227, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 227, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 227, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 227, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 227, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 227, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 227, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 227, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 227, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 227, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 228, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 228, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 228, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 228, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 228, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 229, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 229, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 229, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 229, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 229, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 231, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 231, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 231, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 231, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 232, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 232, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 232, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 233, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 233, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 233, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 233, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 233, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 233, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 233, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 233, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 233, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 233, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 233, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 233, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 234, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 234, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 234, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 234, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 234, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 234, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 235, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 235, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 235, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 235, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 235, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 235, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 235, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 235, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 235, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 237, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 237, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 237, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 238, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 238, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 238, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 238, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 238, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 238, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 238, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 238, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 238, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 238, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 238, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 239, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 239, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 239, }, Token { kind: String, lexeme: "\"\\n\\nline\\nother\"", computed_lexeme: None, line: 239, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 239, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 241, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 241, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 241, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 242, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 242, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 242, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 242, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 242, }, Token { kind: String, lexeme: "\"*l\"", computed_lexeme: None, line: 242, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 242, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 242, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 242, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "l", computed_lexeme: None, line: 242, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 242, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 243, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 243, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 243, }, Token { kind: String, lexeme: "\"lineother\"", computed_lexeme: None, line: 243, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 243, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 245, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 245, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 245, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 245, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 245, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 245, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 245, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 245, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 245, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 245, }, Token { kind: String, lexeme: "\"a = 10 + 34\\na = 2*a\\na = -a\\n\"", computed_lexeme: None, line: 245, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 245, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 245, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 245, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 245, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 246, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 246, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 246, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 246, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 246, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 247, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 247, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 247, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 247, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 247, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 247, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 247, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 247, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 247, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 247, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 247, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 247, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 247, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 247, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 247, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 248, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 248, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 248, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 248, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 248, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 248, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 248, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 248, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 248, }, Token { kind: Number, lexeme: "34", computed_lexeme: Some( "34", ), line: 248, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 248, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 248, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 248, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 248, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 248, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 252, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 252, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 252, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 252, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 252, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 252, }, Token { kind: String, lexeme: "\"0123456789\\n\"", computed_lexeme: None, line: 252, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 252, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 252, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 252, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 252, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 253, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 253, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 253, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 253, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 253, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 253, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 253, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 253, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 253, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 253, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 253, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 253, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 254, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 254, }, Token { kind: String, lexeme: "\"\\n\"", computed_lexeme: None, line: 254, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 254, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 254, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 254, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 254, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 254, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 254, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 255, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "tonumber", computed_lexeme: None, line: 255, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 255, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 255, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 255, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 255, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 255, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 255, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 255, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 256, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 257, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 259, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 259, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 259, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 259, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 259, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 259, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 259, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 259, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 259, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 259, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 259, }, Token { kind: String, lexeme: "\"*a\"", computed_lexeme: None, line: 259, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 259, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 259, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 260, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 260, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 260, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 260, }, Token { kind: String, lexeme: "\"0\"", computed_lexeme: None, line: 260, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 260, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 260, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 260, }, Token { kind: String, lexeme: "\"12\"", computed_lexeme: None, line: 260, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 260, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 260, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 260, }, Token { kind: String, lexeme: "\"3456789\\n\"", computed_lexeme: None, line: 260, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 260, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 261, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 263, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 263, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 263, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 263, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 263, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 263, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 263, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 263, }, Token { kind: String, lexeme: "\"*a\"", computed_lexeme: None, line: 263, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 263, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 263, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 263, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 263, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 263, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 263, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 264, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 264, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 264, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 264, }, Token { kind: Break, lexeme: "break", computed_lexeme: None, line: 264, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 264, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 265, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 265, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 265, }, Token { kind: String, lexeme: "\"0123456789\\n\"", computed_lexeme: None, line: 265, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 265, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 265, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 265, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 265, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 265, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 265, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 265, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 265, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 266, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 267, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 267, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 267, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 269, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 269, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 269, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 269, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 269, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 269, }, Token { kind: String, lexeme: "\"00\\n10\\n20\\n30\\n40\\n\"", computed_lexeme: None, line: 269, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 269, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 269, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 269, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 269, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 270, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 270, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 270, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 270, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 270, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 270, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 270, }, Token { kind: String, lexeme: "\"*n\"", computed_lexeme: None, line: 270, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 270, }, Token { kind: String, lexeme: "\"*n\"", computed_lexeme: None, line: 270, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 270, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 270, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 271, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 271, }, Token { kind: Number, lexeme: "40", computed_lexeme: Some( "40", ), line: 271, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 271, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 271, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 271, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 271, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 271, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 271, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 272, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 272, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 272, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 272, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 272, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 272, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 272, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 273, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 274, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 278, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 278, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 278, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 278, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 278, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 278, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 278, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 279, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 279, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 279, }, Token { kind: MultilineString, lexeme: "[[\nlocal y\n= X\nX =\nX *\n2 +\nX;\nX =\nX\n- y;\n]]", computed_lexeme: None, line: 289, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 289, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 289, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 289, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 290, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 290, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 290, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 290, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 290, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 291, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 291, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 291, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 292, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 292, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 292, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 293, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 293, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 293, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 293, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 293, }, Token { kind: String, lexeme: "\"*L\"", computed_lexeme: None, line: 293, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 293, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 293, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 293, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 293, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 294, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 294, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 294, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 294, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 294, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 294, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 294, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 294, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 295, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 295, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 295, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 295, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 295, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 295, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 295, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 295, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 295, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 295, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 295, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 295, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 295, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 296, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 296, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 296, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 296, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 296, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 296, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 297, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 297, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "lines", computed_lexeme: None, line: 297, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 297, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 297, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 297, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 297, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 297, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 297, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 297, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 298, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 298, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "X", computed_lexeme: None, line: 298, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 298, }, Token { kind: Number, lexeme: "8", computed_lexeme: Some( "8", ), line: 298, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 298, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 300, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 300, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 300, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 300, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 302, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 302, }, Token { kind: String, lexeme: "\"string\\n\\n\\\\com \\\"\\\"''coisas [[estranhas]] ]]'\"", computed_lexeme: None, line: 302, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 303, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 303, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 303, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 303, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 304, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 304, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 304, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 304, }, Token { kind: String, lexeme: "\"x2 = %q\\n-- comment without ending EOS\"", computed_lexeme: None, line: 304, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 304, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 305, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 305, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 305, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 305, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 305, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 306, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 306, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 307, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 307, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "x2", computed_lexeme: None, line: 307, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 307, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 308, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 308, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 308, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 308, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 309, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 309, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 309, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 309, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 309, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 309, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 310, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 310, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 310, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 310, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 310, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 311, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 311, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 311, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 311, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 311, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 311, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 311, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 311, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 311, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 314, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 314, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 314, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "expres", computed_lexeme: None, line: 314, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 314, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 315, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 315, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 315, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 315, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 315, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 316, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 316, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 316, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 316, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 316, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 316, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 317, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 317, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 317, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 317, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 317, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "res", computed_lexeme: None, line: 318, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 318, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 318, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 318, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 318, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 319, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 319, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 319, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 319, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 319, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 319, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 320, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "res", computed_lexeme: None, line: 320, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 320, }, Token { kind: Identifier, lexeme: "expres", computed_lexeme: None, line: 320, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 320, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 321, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 324, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 324, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 324, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 324, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 324, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 324, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 327, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 327, }, Token { kind: String, lexeme: "\"# a non-ending comment\"", computed_lexeme: None, line: 327, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 327, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 327, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 327, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 331, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 331, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBF# some comment\\nreturn 234\"", computed_lexeme: None, line: 331, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 331, }, Token { kind: Number, lexeme: "234", computed_lexeme: Some( "234", ), line: 331, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 331, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 332, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 332, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBFreturn 239\"", computed_lexeme: None, line: 332, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 332, }, Token { kind: Number, lexeme: "239", computed_lexeme: Some( "239", ), line: 332, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 332, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 333, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 333, }, Token { kind: String, lexeme: "\"\\xEF\\xBB\\xBF\"", computed_lexeme: None, line: 333, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 333, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 333, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 333, }, Token { kind: Identifier, lexeme: "testloadfile", computed_lexeme: None, line: 337, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 337, }, Token { kind: String, lexeme: "\"# a comment\\nreturn debug.getinfo(1).currentline\"", computed_lexeme: None, line: 337, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 337, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 337, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 337, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 341, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 341, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 341, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 341, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 341, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 341, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 341, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 341, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 341, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 342, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 342, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 342, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 342, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 342, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 342, }, Token { kind: String, lexeme: "'\\0alo\\255'", computed_lexeme: None, line: 342, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 342, }, Token { kind: String, lexeme: "'hi'", computed_lexeme: None, line: 342, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 342, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 343, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 343, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 343, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 343, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 343, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 344, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 344, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 344, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 344, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 344, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 344, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 344, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 344, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 344, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 344, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 345, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 345, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 345, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 345, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 345, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 345, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 345, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 345, }, Token { kind: String, lexeme: "\"\\0alo\\255\"", computed_lexeme: None, line: 345, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 345, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 345, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 345, }, Token { kind: String, lexeme: "\"hi\"", computed_lexeme: None, line: 345, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 345, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 346, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 346, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 346, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 346, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 346, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 346, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 346, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 349, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 350, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 350, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 350, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 350, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 350, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 350, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 350, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 350, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 350, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 352, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 352, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 352, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 352, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 352, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 352, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 352, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 352, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 352, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 352, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 352, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 352, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 352, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 352, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 352, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 353, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 353, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 353, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 353, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 353, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 354, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 354, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 354, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 354, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 354, }, Token { kind: String, lexeme: "\"b\"", computed_lexeme: None, line: 354, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 354, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 354, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 354, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 354, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 354, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 355, }, Token { kind: String, lexeme: "\"function\"", computed_lexeme: None, line: 355, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 355, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 355, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 355, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 355, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 356, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 356, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 356, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 356, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 356, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 356, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 356, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 357, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 360, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 360, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 360, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 360, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 360, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 360, }, Token { kind: String, lexeme: "\"wb\"", computed_lexeme: None, line: 360, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 360, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 360, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 361, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 361, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 361, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 361, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 361, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 361, }, Token { kind: String, lexeme: "\"#this is a comment for a binary file\\0\\n\"", computed_lexeme: None, line: 361, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 361, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 362, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 362, }, Token { kind: Identifier, lexeme: "dump", computed_lexeme: None, line: 362, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 362, }, Token { kind: Function, lexeme: "function", computed_lexeme: None, line: 362, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 362, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 362, }, Token { kind: Return, lexeme: "return", computed_lexeme: None, line: 362, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 362, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 362, }, Token { kind: String, lexeme: "'\\0\\0\\0'", computed_lexeme: None, line: 362, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 362, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 362, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 362, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 362, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 363, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 363, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 363, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 363, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 363, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 364, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 364, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 364, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 364, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 364, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 364, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 364, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 364, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 364, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 364, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 365, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 365, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 365, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 365, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 365, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 365, }, Token { kind: String, lexeme: "\"\\0\\0\\0\"", computed_lexeme: None, line: 365, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 365, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 365, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 365, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 365, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 366, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 366, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 366, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 366, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 366, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 366, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 366, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 366, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 366, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 370, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 371, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 371, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 371, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 371, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 371, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 371, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 371, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 372, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 372, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 372, }, Token { kind: MultilineString, lexeme: "[[\n if (...) then a = 15; return b, c, d\n else return _ENV\n end\n ]]", computed_lexeme: None, line: 376, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 377, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 377, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 377, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 377, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 377, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 378, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 378, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 378, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 378, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 378, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 378, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 378, }, Token { kind: String, lexeme: "\"xuxu\"", computed_lexeme: None, line: 378, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 378, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 378, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 378, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 378, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 379, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 379, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 379, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 379, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 379, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 379, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 379, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 379, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 379, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 379, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 380, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 380, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 380, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 380, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 380, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 380, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 380, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 380, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 380, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 380, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 380, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 381, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 381, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 381, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 381, }, Token { kind: Number, lexeme: "15", computed_lexeme: Some( "15", ), line: 381, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 381, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 381, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 381, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 381, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 381, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 381, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 381, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 381, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 381, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 382, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 382, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 382, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 382, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 382, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 382, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 383, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 383, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 383, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 383, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 383, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 383, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 383, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 383, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 383, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 383, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 384, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 384, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 384, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 384, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 384, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 384, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 384, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 384, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 385, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 385, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 385, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 385, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 385, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 385, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 385, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 385, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 385, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 386, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 386, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 386, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 386, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 386, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 386, }, Token { kind: Identifier, lexeme: "_G", computed_lexeme: None, line: 386, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 386, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 387, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 387, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 387, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 387, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 387, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 387, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 387, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 388, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 392, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 393, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 393, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 393, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 393, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 393, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 393, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 393, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 393, }, Token { kind: String, lexeme: "\"return 10\"", computed_lexeme: None, line: 393, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 393, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 393, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 393, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 393, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 393, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 394, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 394, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 394, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 394, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 394, }, Token { kind: String, lexeme: "'b'", computed_lexeme: None, line: 394, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 394, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 395, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 395, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 395, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 395, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 395, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 395, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 395, }, Token { kind: String, lexeme: "\"a text chunk\"", computed_lexeme: None, line: 395, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 395, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 395, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 396, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 396, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 396, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 396, }, Token { kind: String, lexeme: "'w'", computed_lexeme: None, line: 396, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 396, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 396, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 396, }, Token { kind: String, lexeme: "\"\\27 return 10\"", computed_lexeme: None, line: 396, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 396, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 396, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 396, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 396, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 396, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 397, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 397, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "loadfile", computed_lexeme: None, line: 397, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 397, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 397, }, Token { kind: String, lexeme: "'t'", computed_lexeme: None, line: 397, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 397, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 398, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 398, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 398, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 398, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 398, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 398, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 398, }, Token { kind: Identifier, lexeme: "find", computed_lexeme: None, line: 398, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 398, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 398, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 398, }, Token { kind: String, lexeme: "\"a binary chunk\"", computed_lexeme: None, line: 398, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 398, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 398, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 399, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 399, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 399, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 399, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 399, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 399, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 399, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 400, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 403, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 403, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 403, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 403, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 403, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 403, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 404, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 404, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 404, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 404, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 404, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 404, }, Token { kind: String, lexeme: "\"qualquer coisa\\n\"", computed_lexeme: None, line: 404, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 404, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 404, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 405, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 405, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 405, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 405, }, Token { kind: String, lexeme: "\"mais qualquer coisa\"", computed_lexeme: None, line: 405, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 405, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 405, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 406, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 406, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 406, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 406, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 406, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 407, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 407, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 407, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 407, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 407, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 407, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 407, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 407, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 407, }, Token { kind: String, lexeme: "'wb'", computed_lexeme: None, line: 407, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 407, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 407, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 407, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 408, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 408, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 408, }, Token { kind: String, lexeme: "\"outra coisa\\0\\1\\3\\0\\0\\0\\0\\255\\0\"", computed_lexeme: None, line: 408, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 408, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 409, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 409, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 409, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 409, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 409, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 411, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 411, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 411, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 411, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 411, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 411, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 411, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 411, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 411, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 411, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 411, }, Token { kind: String, lexeme: "'r+'", computed_lexeme: None, line: 411, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 411, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 411, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 412, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 412, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 412, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 412, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 412, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 412, }, Token { kind: String, lexeme: "'rb'", computed_lexeme: None, line: 412, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 412, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 412, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 413, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 413, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 413, }, Token { kind: NotEquals, lexeme: "~=", computed_lexeme: None, line: 413, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 413, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 413, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 414, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 414, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 414, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 414, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 414, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 414, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 414, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 414, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 414, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 415, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 415, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 415, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 415, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 415, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 415, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 415, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 415, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 415, }, Token { kind: String, lexeme: "\"qualquer coisa\"", computed_lexeme: None, line: 415, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 415, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 416, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 416, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 416, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 416, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 417, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 417, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 417, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 417, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 417, }, Token { kind: String, lexeme: "\"outra coisa\"", computed_lexeme: None, line: 417, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 417, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 417, }, Token { kind: String, lexeme: "\"outra coisa\"", computed_lexeme: None, line: 417, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 417, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 418, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 418, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 418, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 418, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 418, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 418, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 418, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 418, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 418, }, Token { kind: String, lexeme: "\"mais qualquer coisa\"", computed_lexeme: None, line: 418, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 418, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 419, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 419, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 419, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 419, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 419, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 420, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 420, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 420, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 420, }, Token { kind: Identifier, lexeme: "filehandle", computed_lexeme: None, line: 420, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 420, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 420, }, Token { kind: String, lexeme: "\"userdata\"", computed_lexeme: None, line: 420, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 420, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 421, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 421, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 421, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 421, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 422, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 422, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 422, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 422, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 422, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 422, }, Token { kind: Number, lexeme: "4", computed_lexeme: Some( "4", ), line: 422, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 422, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 422, }, Token { kind: String, lexeme: "\"\\0\\1\\3\\0\"", computed_lexeme: None, line: 422, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 422, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 423, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 423, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 423, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 423, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 423, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 423, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 423, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 423, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 423, }, Token { kind: String, lexeme: "\"\\0\\0\\0\"", computed_lexeme: None, line: 423, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 423, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 424, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 424, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 424, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 424, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 424, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 424, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 424, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 424, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 424, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 424, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 424, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 425, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 425, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 425, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 425, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 425, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 425, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 425, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 425, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 425, }, Token { kind: String, lexeme: "\"\\255\"", computed_lexeme: None, line: 425, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 425, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 426, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 426, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 426, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 426, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 426, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 426, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 426, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 426, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 426, }, Token { kind: String, lexeme: "\"\\0\"", computed_lexeme: None, line: 426, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 426, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 427, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 427, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 427, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 427, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 427, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 427, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 427, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 427, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 427, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 427, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 428, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 428, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 428, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 428, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 428, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 428, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 428, }, Token { kind: Identifier, lexeme: "otherfilehandle", computed_lexeme: None, line: 429, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 429, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 429, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 429, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 429, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 430, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 430, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 430, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 430, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 430, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 430, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 430, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 430, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 430, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 431, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 431, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 431, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 431, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 431, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 431, }, Token { kind: Identifier, lexeme: "otherfile", computed_lexeme: None, line: 431, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 431, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 431, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 432, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 432, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 432, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 434, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 434, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 434, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 434, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 434, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 434, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 435, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 435, }, Token { kind: MultilineString, lexeme: "[[\n 123.4\t-56e-2 not a number\nsecond line\nthird line\n\nand the rest of the file\n]]", computed_lexeme: None, line: 441, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 442, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 442, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 442, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 442, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 443, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 443, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 443, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 443, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 443, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 443, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "c", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "e", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "__", computed_lexeme: None, line: 444, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 444, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 444, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 444, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*n'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*n'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*l'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 444, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 444, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 444, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 444, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 445, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 445, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 445, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 445, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 445, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 445, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 445, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 445, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 445, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 446, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 446, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 446, }, Token { kind: String, lexeme: "' '", computed_lexeme: None, line: 446, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "__", computed_lexeme: None, line: 446, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 446, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 446, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 446, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 447, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 447, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 447, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 447, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 447, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 447, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 447, }, Token { kind: String, lexeme: "'number'", computed_lexeme: None, line: 447, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 447, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 447, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 447, }, Token { kind: Number, lexeme: "123.4", computed_lexeme: Some( "123.4", ), line: 447, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 447, }, Token { kind: Identifier, lexeme: "b", computed_lexeme: None, line: 447, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 447, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 447, }, Token { kind: Number, lexeme: "56e-2", computed_lexeme: Some( "56e-2", ), line: 447, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 447, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 448, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 448, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 448, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 448, }, Token { kind: String, lexeme: "'second line'", computed_lexeme: None, line: 448, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 448, }, Token { kind: Identifier, lexeme: "e", computed_lexeme: None, line: 448, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 448, }, Token { kind: String, lexeme: "'third line'", computed_lexeme: None, line: 448, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 448, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 449, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 449, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 449, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 449, }, Token { kind: MultilineString, lexeme: "[[\n\nand the rest of the file\n]]", computed_lexeme: None, line: 452, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 452, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 453, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 453, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 453, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 453, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 453, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 453, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 453, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 453, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 453, }, Token { kind: Identifier, lexeme: "collectgarbage", computed_lexeme: None, line: 454, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 454, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 454, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 457, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 458, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 458, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 458, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 458, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 458, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 458, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 458, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 458, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 458, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 458, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 458, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 458, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 458, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 458, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 459, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 459, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 459, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 459, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 459, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 459, }, Token { kind: String, lexeme: "\"r\"", computed_lexeme: None, line: 459, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 459, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 459, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 460, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 460, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 460, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 460, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 460, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 460, }, Token { kind: String, lexeme: "\"full\"", computed_lexeme: None, line: 460, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 460, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 460, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 460, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 460, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 461, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 461, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 461, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 461, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 461, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 461, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 462, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 462, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 462, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 462, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 462, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 462, }, Token { kind: String, lexeme: "\"*all\"", computed_lexeme: None, line: 462, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 462, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 462, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 462, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 462, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 463, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 463, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 463, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 463, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 464, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 464, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 464, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 464, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 464, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 465, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 465, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 465, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 465, }, Token { kind: String, lexeme: "\"*all\"", computed_lexeme: None, line: 465, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 465, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 465, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 465, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 465, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 466, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 466, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 466, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 466, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 466, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 466, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 466, }, Token { kind: String, lexeme: "\"w\"", computed_lexeme: None, line: 466, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 466, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 467, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 467, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 467, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 467, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 467, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 467, }, Token { kind: String, lexeme: "\"no\"", computed_lexeme: None, line: 467, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 467, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 467, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 468, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 468, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 468, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 468, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 468, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 468, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 469, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 469, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 469, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 469, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 469, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 469, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 470, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 470, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 470, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 470, }, Token { kind: String, lexeme: "\"*all\"", computed_lexeme: None, line: 470, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 470, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 470, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 470, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 470, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 471, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 471, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 471, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 471, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 471, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 472, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 472, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 472, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 472, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 472, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 472, }, Token { kind: Identifier, lexeme: "open", computed_lexeme: None, line: 472, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 472, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 472, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 472, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 472, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 472, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 472, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 473, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 473, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 473, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 473, }, Token { kind: Identifier, lexeme: "setvbuf", computed_lexeme: None, line: 473, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 473, }, Token { kind: String, lexeme: "\"line\"", computed_lexeme: None, line: 473, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 473, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 473, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 474, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 474, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 474, }, Token { kind: String, lexeme: "\"x\"", computed_lexeme: None, line: 474, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 474, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 475, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 475, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 475, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 475, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 475, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 475, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 475, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 475, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 476, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 476, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 476, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 476, }, Token { kind: String, lexeme: "\"*all\"", computed_lexeme: None, line: 476, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 476, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 476, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 476, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 476, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 477, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 477, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 477, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 477, }, Token { kind: String, lexeme: "\"a\\n\"", computed_lexeme: None, line: 477, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 477, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 477, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 477, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 477, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 477, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 477, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 477, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 477, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 478, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 478, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 478, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 478, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 478, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 478, }, Token { kind: String, lexeme: "\"*all\"", computed_lexeme: None, line: 478, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 478, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 478, }, Token { kind: String, lexeme: "\"xa\\n\"", computed_lexeme: None, line: 478, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 478, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 479, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 479, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 479, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 479, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 479, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 479, }, Token { kind: Identifier, lexeme: "fr", computed_lexeme: None, line: 479, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 479, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 479, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 479, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 479, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 480, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 480, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 480, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 480, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 480, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 480, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 480, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 481, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 484, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 484, }, Token { kind: Identifier, lexeme: "_soft", computed_lexeme: None, line: 484, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 484, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 485, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 485, }, Token { kind: String, lexeme: "\"testing large files (> BUFSIZ)\"", computed_lexeme: None, line: 485, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 485, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 486, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 486, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 486, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 486, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 486, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 486, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 487, }, Token { kind: Identifier, lexeme: "i", computed_lexeme: None, line: 487, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 487, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 487, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 487, }, Token { kind: Number, lexeme: "5001", computed_lexeme: Some( "5001", ), line: 487, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 487, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 487, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 487, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 487, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 487, }, Token { kind: String, lexeme: "'0123456789123'", computed_lexeme: None, line: 487, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 487, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 487, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 488, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 488, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 488, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 488, }, Token { kind: String, lexeme: "'\\n12346'", computed_lexeme: None, line: 488, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 488, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 488, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 488, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 488, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 488, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 489, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 489, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 489, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 489, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 489, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 489, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 490, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 490, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 490, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 490, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 490, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 490, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 490, }, Token { kind: String, lexeme: "'*a'", computed_lexeme: None, line: 490, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 490, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 491, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 491, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 491, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 491, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 491, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 491, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 491, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 491, }, Token { kind: String, lexeme: "'set'", computed_lexeme: None, line: 491, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 491, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 491, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 491, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 492, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 492, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 492, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 492, }, Token { kind: Number, lexeme: "30001", computed_lexeme: Some( "30001", ), line: 492, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 492, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 492, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 492, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 492, }, Token { kind: Number, lexeme: "1005", computed_lexeme: Some( "1005", ), line: 492, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 492, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 492, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 492, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 492, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 492, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 492, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 492, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 493, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 493, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 493, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 493, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 493, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 493, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 493, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 493, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 493, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 493, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 493, }, Token { kind: Number, lexeme: "100003", computed_lexeme: Some( "100003", ), line: 493, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 493, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 494, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 494, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 494, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 494, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "len", computed_lexeme: None, line: 494, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 494, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 494, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 494, }, Token { kind: Number, lexeme: "5001", computed_lexeme: Some( "5001", ), line: 494, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 494, }, Token { kind: Number, lexeme: "13", computed_lexeme: Some( "13", ), line: 494, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 494, }, Token { kind: Number, lexeme: "6", computed_lexeme: Some( "6", ), line: 494, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 494, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 495, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 495, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 495, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 495, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 495, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 495, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 495, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 495, }, Token { kind: String, lexeme: "'set'", computed_lexeme: None, line: 495, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 495, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 495, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 495, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 496, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 496, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 496, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 496, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 496, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 496, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 496, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 497, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 497, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 497, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 497, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 497, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 497, }, Token { kind: String, lexeme: "'\\n'", computed_lexeme: None, line: 497, }, Token { kind: DoubleDot, lexeme: "..", computed_lexeme: None, line: 497, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 497, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 497, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 497, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 497, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 497, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 497, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 498, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 498, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 498, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 498, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 498, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 498, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 498, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 498, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 498, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 498, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 499, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 499, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 499, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 499, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 499, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 499, }, Token { kind: Identifier, lexeme: "input", computed_lexeme: None, line: 499, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 499, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 499, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 499, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 500, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 500, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "remove", computed_lexeme: None, line: 500, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "file", computed_lexeme: None, line: 500, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 500, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 500, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 501, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 501, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 501, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 501, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 501, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 501, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 501, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 502, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 504, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "_noposix", computed_lexeme: None, line: 504, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 504, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 505, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 505, }, Token { kind: String, lexeme: "\"testing popen/pclose and execute\"", computed_lexeme: None, line: 505, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 505, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 506, }, Token { kind: Identifier, lexeme: "tests", computed_lexeme: None, line: 506, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 506, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 506, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 508, }, Token { kind: String, lexeme: "\"ls > /dev/null\"", computed_lexeme: None, line: 508, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 508, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 508, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 508, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 508, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 509, }, Token { kind: String, lexeme: "\"not-to-be-found-command\"", computed_lexeme: None, line: 509, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 509, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 509, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 509, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 509, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 510, }, Token { kind: String, lexeme: "\"exit 3\"", computed_lexeme: None, line: 510, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 510, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 510, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 510, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 510, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 510, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 510, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 511, }, Token { kind: String, lexeme: "\"exit 129\"", computed_lexeme: None, line: 511, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 511, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 511, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 511, }, Token { kind: Number, lexeme: "129", computed_lexeme: Some( "129", ), line: 511, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 511, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 511, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 512, }, Token { kind: String, lexeme: "\"kill -s HUP $$\"", computed_lexeme: None, line: 512, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 512, }, Token { kind: String, lexeme: "\"signal\"", computed_lexeme: None, line: 512, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 512, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 512, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 512, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 512, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 513, }, Token { kind: String, lexeme: "\"kill -s KILL $$\"", computed_lexeme: None, line: 513, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 513, }, Token { kind: String, lexeme: "\"signal\"", computed_lexeme: None, line: 513, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 513, }, Token { kind: Number, lexeme: "9", computed_lexeme: Some( "9", ), line: 513, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 513, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 513, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 514, }, Token { kind: String, lexeme: "\"sh -c 'kill -s HUP $$'\"", computed_lexeme: None, line: 514, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 514, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 514, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 514, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 514, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 515, }, Token { kind: String, lexeme: "'lua -e \"os.exit(20, true)\"'", computed_lexeme: None, line: 515, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 515, }, Token { kind: String, lexeme: "\"exit\"", computed_lexeme: None, line: 515, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 515, }, Token { kind: Number, lexeme: "20", computed_lexeme: Some( "20", ), line: 515, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 515, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 515, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 516, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 517, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 517, }, Token { kind: String, lexeme: "\"\\n(some error messages are expected now)\"", computed_lexeme: None, line: 517, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 517, }, Token { kind: For, lexeme: "for", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "_", computed_lexeme: None, line: 518, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 518, }, Token { kind: In, lexeme: "in", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "ipairs", computed_lexeme: None, line: 518, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 518, }, Token { kind: Identifier, lexeme: "tests", computed_lexeme: None, line: 518, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 518, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 518, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 519, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 519, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 519, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 519, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "popen", computed_lexeme: None, line: 519, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 519, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 519, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 519, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 519, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 519, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 519, }, Token { kind: Identifier, lexeme: "close", computed_lexeme: None, line: 519, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 519, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 519, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 520, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "y1", computed_lexeme: None, line: 520, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "z1", computed_lexeme: None, line: 520, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 520, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "execute", computed_lexeme: None, line: 520, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 520, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 520, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 520, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 520, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 520, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 521, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 521, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "x1", computed_lexeme: None, line: 521, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 521, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "y1", computed_lexeme: None, line: 521, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 521, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 521, }, Token { kind: Identifier, lexeme: "z1", computed_lexeme: None, line: 521, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 521, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 522, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 522, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 522, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 522, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 522, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 522, }, Token { kind: String, lexeme: "\"ok\"", computed_lexeme: None, line: 522, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 522, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 523, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 523, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 523, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 523, }, Token { kind: True, lexeme: "true", computed_lexeme: None, line: 523, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 523, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 523, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 523, }, Token { kind: String, lexeme: "'exit'", computed_lexeme: None, line: 523, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 523, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 523, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 523, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 523, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 523, }, Token { kind: Else, lexeme: "else", computed_lexeme: None, line: 524, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 525, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 525, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 525, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 525, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "y", computed_lexeme: None, line: 525, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 525, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 525, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 525, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 525, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 525, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 527, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 527, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 527, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 527, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 527, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 527, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 527, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 527, }, Token { kind: And, lexeme: "and", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 527, }, Token { kind: GreaterThan, lexeme: ">", computed_lexeme: None, line: 527, }, Token { kind: Number, lexeme: "0", computed_lexeme: Some( "0", ), line: 527, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 527, }, Token { kind: Or, lexeme: "or", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "v", computed_lexeme: None, line: 527, }, Token { kind: LeftBracket, lexeme: "[", computed_lexeme: None, line: 527, }, Token { kind: Number, lexeme: "3", computed_lexeme: Some( "3", ), line: 527, }, Token { kind: RightBracket, lexeme: "]", computed_lexeme: None, line: 527, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 527, }, Token { kind: Identifier, lexeme: "z", computed_lexeme: None, line: 527, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 527, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 528, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 529, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 530, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 534, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 534, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 534, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 534, }, Token { kind: Identifier, lexeme: "tmpfile", computed_lexeme: None, line: 534, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 534, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 534, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 535, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 535, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 535, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 535, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 535, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 535, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 535, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 535, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 535, }, Token { kind: String, lexeme: "\"file\"", computed_lexeme: None, line: 535, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 535, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 536, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 536, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 536, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 536, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 536, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 536, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 537, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 537, }, Token { kind: Identifier, lexeme: "seek", computed_lexeme: None, line: 537, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 537, }, Token { kind: String, lexeme: "\"set\"", computed_lexeme: None, line: 537, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 537, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 538, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 538, }, Token { kind: Identifier, lexeme: "f", computed_lexeme: None, line: 538, }, Token { kind: Colon, lexeme: ":", computed_lexeme: None, line: 538, }, Token { kind: Identifier, lexeme: "read", computed_lexeme: None, line: 538, }, Token { kind: String, lexeme: "\"*a\"", computed_lexeme: None, line: 538, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 538, }, Token { kind: String, lexeme: "\"alo\"", computed_lexeme: None, line: 538, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 538, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 540, }, Token { kind: Identifier, lexeme: "print", computed_lexeme: None, line: 542, }, Token { kind: String, lexeme: "'+'", computed_lexeme: None, line: 542, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 545, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 545, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 545, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 545, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 545, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 545, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 545, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 545, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 545, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 545, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 545, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 546, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 546, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 546, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 546, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 546, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 546, }, Token { kind: String, lexeme: "\"!\"", computed_lexeme: None, line: 546, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 546, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 546, }, Token { kind: String, lexeme: "\"\"", computed_lexeme: None, line: 546, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 546, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 547, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 547, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 547, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 547, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 547, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 547, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 547, }, Token { kind: String, lexeme: "\"a\"", computed_lexeme: None, line: 547, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 547, }, Token { kind: Number, lexeme: "10000", computed_lexeme: Some( "10000", ), line: 547, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 547, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 548, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 548, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 548, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 548, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 548, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 548, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 548, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 548, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 548, }, Token { kind: Identifier, lexeme: "x", computed_lexeme: None, line: 548, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 548, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 549, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 549, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 549, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 549, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 549, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 549, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 549, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 549, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 550, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 550, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 550, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 550, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 550, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 550, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 550, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 550, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 550, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 550, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 551, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 551, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 551, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 551, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 551, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 551, }, Token { kind: String, lexeme: "\"%d\"", computed_lexeme: None, line: 551, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 551, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 551, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 551, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 551, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 551, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 551, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 552, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 552, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 552, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 552, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 552, }, Token { kind: String, lexeme: "\"%d\"", computed_lexeme: None, line: 552, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 552, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 552, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 552, }, Token { kind: Number, lexeme: "1000", computed_lexeme: Some( "1000", ), line: 552, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 552, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 552, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 553, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 553, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 553, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 553, }, Token { kind: Number, lexeme: "200", computed_lexeme: Some( "200", ), line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 553, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 553, }, Token { kind: Identifier, lexeme: "rep", computed_lexeme: None, line: 553, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 553, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 553, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 553, }, Token { kind: Number, lexeme: "100", computed_lexeme: Some( "100", ), line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 553, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 555, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 555, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 555, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 555, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 555, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 556, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 556, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 556, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 556, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 556, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 556, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 556, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 557, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 557, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 557, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 557, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 557, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 557, }, Token { kind: MultilineString, lexeme: "[[assert(D.year==%Y and D.month==%m and D.day==%d and\n D.hour==%H and D.min==%M and D.sec==%S and\n D.wday==%w+1 and D.yday==%j and type(D.isdst) == 'boolean')]]", computed_lexeme: None, line: 559, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 559, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 559, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 559, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 559, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 559, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 559, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 561, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 561, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 561, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 561, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 561, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 561, }, Token { kind: String, lexeme: "\"%9\"", computed_lexeme: None, line: 561, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 561, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 561, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 562, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 562, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 562, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 562, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 562, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 562, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 562, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 562, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 562, }, Token { kind: String, lexeme: "\"%\"", computed_lexeme: None, line: 562, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 562, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 562, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 563, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 563, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 563, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 563, }, Token { kind: String, lexeme: "\"%O\"", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 563, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 564, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 564, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 564, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 564, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 564, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 564, }, Token { kind: String, lexeme: "\"%E\"", computed_lexeme: None, line: 564, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 564, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 564, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 565, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 565, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 565, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 565, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 565, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 565, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 565, }, Token { kind: String, lexeme: "\"%Ea\"", computed_lexeme: None, line: 565, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 565, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 565, }, Token { kind: If, lexeme: "if", computed_lexeme: None, line: 567, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 567, }, Token { kind: Identifier, lexeme: "_noposix", computed_lexeme: None, line: 567, }, Token { kind: Then, lexeme: "then", computed_lexeme: None, line: 567, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 568, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 568, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 568, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 568, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 568, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 568, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 568, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 568, }, Token { kind: String, lexeme: "\"%Ex\"", computed_lexeme: None, line: 568, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 568, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 568, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 568, }, Token { kind: String, lexeme: "'string'", computed_lexeme: None, line: 568, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 568, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 569, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 569, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 569, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 569, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 569, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 569, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 569, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 569, }, Token { kind: String, lexeme: "\"%Oy\"", computed_lexeme: None, line: 569, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 569, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 569, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 569, }, Token { kind: String, lexeme: "'string'", computed_lexeme: None, line: 569, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 569, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 570, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 572, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 572, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 572, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 572, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 572, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 572, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 572, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 573, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 573, }, Token { kind: Not, lexeme: "not", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "pcall", computed_lexeme: None, line: 573, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 573, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 573, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 573, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 573, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 573, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 573, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 573, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 575, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 575, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 575, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 575, }, Token { kind: String, lexeme: "\"!*t\"", computed_lexeme: None, line: 575, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 575, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 575, }, Token { kind: Identifier, lexeme: "load", computed_lexeme: None, line: 576, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 576, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 576, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 576, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 576, }, Token { kind: MultilineString, lexeme: "[[!assert(D.year==%Y and D.month==%m and D.day==%d and\n D.hour==%H and D.min==%M and D.sec==%S and\n D.wday==%w+1 and D.yday==%j and type(D.isdst) == 'boolean')]]", computed_lexeme: None, line: 578, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 578, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 578, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 578, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 578, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 578, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 578, }, Token { kind: Do, lexeme: "do", computed_lexeme: None, line: 580, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 581, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 581, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 581, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 581, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 581, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 581, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 581, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 581, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 581, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 582, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 582, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 582, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 582, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 582, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 583, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 583, }, Token { kind: Identifier, lexeme: "type", computed_lexeme: None, line: 583, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 583, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 583, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 583, }, Token { kind: Identifier, lexeme: "isdst", computed_lexeme: None, line: 583, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 583, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 583, }, Token { kind: String, lexeme: "'boolean'", computed_lexeme: None, line: 583, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 583, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 584, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 584, }, Token { kind: Identifier, lexeme: "isdst", computed_lexeme: None, line: 584, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 584, }, Token { kind: Nil, lexeme: "nil", computed_lexeme: None, line: 584, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 585, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 585, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 585, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 585, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 585, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 585, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 585, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 585, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 585, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 586, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 586, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 586, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 586, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 586, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 586, }, Token { kind: End, lexeme: "end", computed_lexeme: None, line: 587, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 589, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 589, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 589, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 589, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 589, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 590, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 590, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 590, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 590, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 590, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 590, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 590, }, Token { kind: Semicolon, lexeme: ";", computed_lexeme: None, line: 590, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 591, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 591, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 591, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "D", computed_lexeme: None, line: 591, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 591, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "math", computed_lexeme: None, line: 593, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "abs", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 593, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 593, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: Slash, lexeme: "/", computed_lexeme: None, line: 593, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 593, }, Token { kind: Number, lexeme: "24", computed_lexeme: Some( "24", ), line: 593, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 593, }, Token { kind: Number, lexeme: "3600", computed_lexeme: Some( "3600", ), line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 593, }, Token { kind: Number, lexeme: "365", computed_lexeme: Some( "365", ), line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: LessThan, lexeme: "<", computed_lexeme: None, line: 593, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 593, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 593, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 595, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 595, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 595, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 595, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 595, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 595, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 595, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 596, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 596, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 596, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 596, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 596, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 596, }, Token { kind: String, lexeme: "\"*t\"", computed_lexeme: None, line: 596, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 596, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 596, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 597, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 597, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 597, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 597, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 597, }, Token { kind: Identifier, lexeme: "t", computed_lexeme: None, line: 597, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 597, }, Token { kind: LessThanOrEqual, lexeme: "<=", computed_lexeme: None, line: 597, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 597, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 597, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 599, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 599, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 599, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 599, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 599, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 599, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 599, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 599, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 599, }, Token { kind: Number, lexeme: "12", computed_lexeme: Some( "12", ), line: 599, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 599, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "t2", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 600, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "time", computed_lexeme: None, line: 600, }, Token { kind: LeftBrace, lexeme: "{", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "year", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "2000", computed_lexeme: Some( "2000", ), line: 600, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "month", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 600, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "day", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 600, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "hour", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "23", computed_lexeme: Some( "23", ), line: 600, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "10", computed_lexeme: Some( "10", ), line: 600, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "sec", computed_lexeme: None, line: 600, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 600, }, Token { kind: Number, lexeme: "19", computed_lexeme: Some( "19", ), line: 600, }, Token { kind: RightBrace, lexeme: "}", computed_lexeme: None, line: 600, }, Token { kind: Identifier, lexeme: "assert", computed_lexeme: None, line: 601, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 601, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 601, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 601, }, Token { kind: Identifier, lexeme: "difftime", computed_lexeme: None, line: 601, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 601, }, Token { kind: Identifier, lexeme: "t1", computed_lexeme: None, line: 601, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 601, }, Token { kind: Identifier, lexeme: "t2", computed_lexeme: None, line: 601, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 601, }, Token { kind: DoubleEquals, lexeme: "==", computed_lexeme: None, line: 601, }, Token { kind: Number, lexeme: "60", computed_lexeme: Some( "60", ), line: 601, }, Token { kind: Star, lexeme: "*", computed_lexeme: None, line: 601, }, Token { kind: Number, lexeme: "2", computed_lexeme: Some( "2", ), line: 601, }, Token { kind: Minus, lexeme: "-", computed_lexeme: None, line: 601, }, Token { kind: Number, lexeme: "19", computed_lexeme: Some( "19", ), line: 601, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 601, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 603, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 603, }, Token { kind: Identifier, lexeme: "output", computed_lexeme: None, line: 603, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 603, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 603, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 603, }, Token { kind: Identifier, lexeme: "stdout", computed_lexeme: None, line: 603, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 603, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 604, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 604, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 604, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 604, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 604, }, Token { kind: String, lexeme: "'%d'", computed_lexeme: None, line: 604, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 604, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 605, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 605, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 605, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 605, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 605, }, Token { kind: String, lexeme: "'%m'", computed_lexeme: None, line: 605, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 605, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 606, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 606, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 606, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 606, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 606, }, Token { kind: String, lexeme: "'%Y'", computed_lexeme: None, line: 606, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 606, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "ds", computed_lexeme: None, line: 607, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 607, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 607, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 607, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 607, }, Token { kind: String, lexeme: "'%w'", computed_lexeme: None, line: 607, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 607, }, Token { kind: Plus, lexeme: "+", computed_lexeme: None, line: 607, }, Token { kind: Number, lexeme: "1", computed_lexeme: Some( "1", ), line: 607, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 608, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 608, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 608, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 608, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 608, }, Token { kind: String, lexeme: "'%H'", computed_lexeme: None, line: 608, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 608, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 609, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 609, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 609, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 609, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 609, }, Token { kind: String, lexeme: "'%M'", computed_lexeme: None, line: 609, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 609, }, Token { kind: Local, lexeme: "local", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 610, }, Token { kind: Equals, lexeme: "=", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "os", computed_lexeme: None, line: 610, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "date", computed_lexeme: None, line: 610, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 610, }, Token { kind: String, lexeme: "'%S'", computed_lexeme: None, line: 610, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 610, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 611, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 611, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 611, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 611, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 611, }, Token { kind: String, lexeme: "'test done on %2.2d/%2.2d/%d'", computed_lexeme: None, line: 611, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "d", computed_lexeme: None, line: 611, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "m", computed_lexeme: None, line: 611, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "a", computed_lexeme: None, line: 611, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 611, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 611, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 612, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 612, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 612, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 612, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 612, }, Token { kind: String, lexeme: "', at %2.2d:%2.2d:%2.2d\\n'", computed_lexeme: None, line: 612, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "h", computed_lexeme: None, line: 612, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "min", computed_lexeme: None, line: 612, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "s", computed_lexeme: None, line: 612, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 612, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 612, }, Token { kind: Identifier, lexeme: "io", computed_lexeme: None, line: 613, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 613, }, Token { kind: Identifier, lexeme: "write", computed_lexeme: None, line: 613, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 613, }, Token { kind: Identifier, lexeme: "string", computed_lexeme: None, line: 613, }, Token { kind: Dot, lexeme: ".", computed_lexeme: None, line: 613, }, Token { kind: Identifier, lexeme: "format", computed_lexeme: None, line: 613, }, Token { kind: LeftParen, lexeme: "(", computed_lexeme: None, line: 613, }, Token { kind: String, lexeme: "'%s\\n'", computed_lexeme: None, line: 613, }, Token { kind: Comma, lexeme: ",", computed_lexeme: None, line: 613, }, Token { kind: Identifier, lexeme: "_VERSION", computed_lexeme: None, line: 613, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 613, }, Token { kind: RightParen, lexeme: ")", computed_lexeme: None, line: 613, }, ]