Crates.io | lib-lexin |
lib.rs | lib-lexin |
version | 0.3.6 |
source | src |
created_at | 2023-09-12 19:45:09.27179 |
updated_at | 2023-11-11 14:29:10.751205 |
description | A simple lexer library |
homepage | |
repository | https://github.com/proxin187/lib-lexin |
max_upload_size | |
id | 970940 |
size | 14,745 |
A simple lexer library.
lib-lexin is a small lexer library created to be able to quickly lex anything
This example shows how easy it is to lex a file
use lib_lexin::{Lexer, Section};
let mut lexer = Lexer::new(
&[ // keywords
"fn",
"return"
],
&[ // section
Section::new("string", "\"", "\""),
],
&[ // symbols
('+', "Plus"),
],
);
lexer.load_file("[FILE]");
let tokens = lexer.tokenize()?;
Lexer::new
pub fn new(keywords: &[&'a str], sections: &[Section], symbols: &[(char, &'a str)]) -> Lexer<'a>
Lexer::load_file
pub fn load_file(&mut self, filename: &str) -> Result<(), Box<dyn std::error::Error>>
Lexer::load_str
pub fn load_str(&mut self, string: &str)
Lexer::tokenize
pub fn tokenize(&mut self) -> Result<Vec<Token>, Box<dyn std::error::Error>>
Section::new
pub fn new(name: &str, start: &str, end: &str) -> Section
Token::is_keyword
pub fn is_keyword(&self, keyword: &str) -> Result<(), Box<dyn std::error::Error>>
Token::is_section
pub fn is_section(&self, name: &str) -> Result<String, Box<dyn std::error::Error>>
Token::is_ident
pub fn is_ident(&self) -> Result<String, Box<dyn std::error::Error>>
Token::is_integer
pub fn is_integer(&self) -> Result<usize, Box<dyn std::error::Error>>
Token::is_float
pub fn is_float(&self) -> Result<f64, Box<dyn std::error::Error>>
Token::is_symbol
pub fn is_symbol(&self, name: &str) -> Result<(), Box<dyn std::error::Error>>
Sections are always escaped
Contributors names and contact info
Currently there is no license, this may change in the future