//! This example shows how to use a file as input to a lexer, rather than a hardcoded string. //! //! This lexer simply captures as many standard identifiers as it can, and ignores everything else. #![allow(missing_docs)] use std::{ fs, io::{stdin, stdout, BufRead, Write}, path::Path, }; use alkale::{span::Spanned, token::Token, LexerResult, SourceCodeScanner}; pub fn main() { // Buffer for the input path let mut str_path = String::new(); println!( "Current path: {:?}", std::env::current_dir().expect("No current dir exists.") ); println!("Leave input blank to use default path."); print!("Input path to file: "); stdout().lock().flush().expect("Could not flush stdout."); // Read path input stdin() .lock() .read_line(&mut str_path) .expect("Could not read stdin."); let trimmed = str_path.trim(); // Get path from string. let path = if trimmed.is_empty() { Path::new("./examples/file/file.txt") } else { Path::new(trimmed) }; println!(); println!("Path: {path:?}"); let source = fs::read_to_string(path).expect("Could not read file"); // Create a context from the BufReader. The closure dictates // how we want to handle any read failures— in this case just panic. let context = SourceCodeScanner::new(&source); let mut result = LexerResult::<&str, ()>::new(); // Lexer logic while context.has_next() { if let Some(Spanned { data, span }) = context.try_consume_standard_identifier() { result.push_token(Token::new(data, span)); } else { context.skip(); } } // Print result println!(); println!("{:#?}", result.finalize()); }