Crates.io | rslexer |
lib.rs | rslexer |
version | 1.0.2 |
source | src |
created_at | 2020-01-02 11:17:47.81769 |
updated_at | 2020-01-02 12:03:22.012951 |
description | Simple lexer written in Rust! |
homepage | |
repository | |
max_upload_size | |
id | 194449 |
size | 7,782 |
#[derive(Debug, PartialEq)]
enum Token {
Number,
Word{word: String, line: usize, character: usize},
}
#[test]
fn str_test() {
// The result of the lex function is a Result<Vec<Token>, String>!
let ts: Vec<Token> = lex(
// The &str to be lexed!
"test 12 21 text\n w2\n3",
// A Vec of Rules, jus use the rules! macro!
// rules! expects a Type (Token) and a list of rules:
// &str (regex) => |string, line, character| Option<Token>
rules!(Token;
r"\s+" => |_,_,_| None,
r"\d+" => |_,_,_| Some(Token::Number),
r"[^\s\d]+" => |s, l, c| Some(Token::Word{ word: s.to_string(), line: l, character: c }),
)
).expect("Ok([...])");
// test if equal!
assert_eq!(
vec![
Token::Word{ word: String::from("test"), line: 1, character: 1 },
Token::Number,
Token::Number,
Token::Word{ word: String::from("text"), line: 1, character: 12 },
Token::Word{ word: String::from("w"), line: 2, character: 2 },
Token::Number,
Token::Number,
], ts
);
}
This software is available under the MIT license