extern crate gramatica; use std::cmp::Ordering; use std::io::BufRead; use gramatica::{Associativity,EarleyKind,State,Parser,ParsingTablesTrait,AmbiguityInfo}; fn parse_from_stdin(){ let stdin=std::io::stdin(); for rline in stdin.lock().lines() {let line=rline.unwrap() + "\n"; println!("line={}" , line); match Parser::::parse(&line,None) { Err(x) => println!("error parsing: {:?}" , x), Ok(x) => println!("parsed correctly: {:?}" , x), } ;} } fn main(){ parse_from_stdin(); } #[derive(Clone,Debug,PartialEq)] enum Token{DummyStart, Num(f64),Plus,Minus,Star,Slash,Caret,LPar,RPar,NewLine,Input,Line,Expression(f64),} impl Default for Token { fn default()->Self{Token::DummyStart} } struct ParsingTables { } impl ParsingTablesTrait for ParsingTables { fn initial()->usize { 10 } fn match_some(parser: &mut Parser) -> Option<(usize,Token)> { let source=&parser.source[parser.source_index..]; match { match parser.re("[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?",source) { None => None, Some((size,string)) => Some((size,string.parse::().unwrap() )) } } { None => (), Some((size,result)) => return Some((size,Token::Num(result))), }; match { match parser.re("\\+",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::Plus)), }; match { match parser.re("-",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::Minus)), }; match { match parser.re("\\*",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::Star)), }; match { match parser.re("/",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::Slash)), }; match { match parser.re("\\^",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::Caret)), }; match { match parser.re("\\(",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::LPar)), }; match { match parser.re("\\)",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::RPar)), }; match { match parser.re("\\n",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::NewLine)), }; match { match parser.re("\\s+",source) { None => None, Some((size,_string)) => Some((size,())) } } { None => (), Some((size,_result)) => return Some((size,Token::DummyStart)), }; None }//match_some fn predict(parser:&mut Parser,index:usize,state_index:usize,token:usize) { match token { 10 => { parser.sets[index].predict(State{rule: 1 ,left: 10 ,right:vec![ ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 0 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 2 ,left: 10 ,right:vec![ 10,11 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 2 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); } 11 => { parser.sets[index].predict(State{rule: 3 ,left: 11 ,right:vec![ 9 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 1 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 4 ,left: 11 ,right:vec![ 12,9 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 2 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); } 12 => { parser.sets[index].predict(State{rule: 5 ,left: 12 ,right:vec![ 1 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 1 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 6 ,left: 12 ,right:vec![ 12,2,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 7 ,left: 12 ,right:vec![ 12,3,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 8 ,left: 12 ,right:vec![ 12,4,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 9 ,left: 12 ,right:vec![ 12,5,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 10 ,left: 12 ,right:vec![ 3,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 2 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 11 ,left: 12 ,right:vec![ 12,6,12 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); parser.sets[index].predict(State{rule: 12 ,left: 12 ,right:vec![ 7,12,8 ],position:0,original_set:index,kind:EarleyKind::Predict(state_index),values:vec![Token::DummyStart; 3 ],computed_value:Token::DummyStart,ambiguity_info:AmbiguityInfo::default(),}); } _ => panic!(""), } }//predict fn compute_value(state:&mut State) { state.computed_value = match state.rule { 0 => state.values[0].clone(), 1 => { (); Token::Input }, 2 => match (&state.values[0],&state.values[1]) { (&Token::Input,&Token::Line) => { (); Token::Input }, _ => panic!(""), }, 3 => match &state.values[0] { &Token::NewLine => { (); Token::Line }, _ => panic!(""), }, 4 => match (&state.values[0],&state.values[1]) { (&Token::Expression(value),&Token::NewLine) => { {println!("{}" , value);}; Token::Line }, _ => panic!(""), }, 5 => match &state.values[0] { &Token::Num(value) => Token::Expression(value), _ => panic!(""), }, 6 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::Expression(l),&Token::Plus,&Token::Expression(r)) => Token::Expression(l + r), _ => panic!(""), }, 7 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::Expression(l),&Token::Minus,&Token::Expression(r)) => Token::Expression(l - r), _ => panic!(""), }, 8 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::Expression(l),&Token::Star,&Token::Expression(r)) => Token::Expression(l * r), _ => panic!(""), }, 9 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::Expression(l),&Token::Slash,&Token::Expression(r)) => Token::Expression(l / r), _ => panic!(""), }, 10 => match (&state.values[0],&state.values[1]) { (&Token::Minus,&Token::Expression(value)) => Token::Expression(- value), _ => panic!(""), }, 11 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::Expression(l),&Token::Caret,&Token::Expression(r)) => Token::Expression(l.powf(r)), _ => panic!(""), }, 12 => match (&state.values[0],&state.values[1],&state.values[2]) { (&Token::LPar,&Token::Expression(value),&Token::RPar) => Token::Expression(value), _ => panic!(""), }, _ => panic!(""), } }//compute_value fn table_terminal(token_index:usize)->bool { match token_index { 1|2|3|4|5|6|7|8|9 => true, 0|10|11|12 => false, _ => panic!("table_terminal"), } }//table_terminal fn table_priority(a:usize, b:usize) -> Option { match (a,b) { (6,6) => Some(Ordering::Equal), (6,7) => Some(Ordering::Equal), (6,8) => Some(Ordering::Greater), (6,9) => Some(Ordering::Greater), (6,10) => Some(Ordering::Equal), (6,11) => Some(Ordering::Greater), (7,6) => Some(Ordering::Equal), (7,7) => Some(Ordering::Equal), (7,8) => Some(Ordering::Greater), (7,9) => Some(Ordering::Greater), (7,10) => Some(Ordering::Equal), (7,11) => Some(Ordering::Greater), (8,6) => Some(Ordering::Less), (8,7) => Some(Ordering::Less), (8,8) => Some(Ordering::Equal), (8,9) => Some(Ordering::Equal), (8,10) => Some(Ordering::Less), (8,11) => Some(Ordering::Greater), (9,6) => Some(Ordering::Less), (9,7) => Some(Ordering::Less), (9,8) => Some(Ordering::Equal), (9,9) => Some(Ordering::Equal), (9,10) => Some(Ordering::Less), (9,11) => Some(Ordering::Greater), (10,6) => Some(Ordering::Equal), (10,7) => Some(Ordering::Equal), (10,8) => Some(Ordering::Greater), (10,9) => Some(Ordering::Greater), (10,10) => Some(Ordering::Equal), (10,11) => Some(Ordering::Greater), (11,6) => Some(Ordering::Less), (11,7) => Some(Ordering::Less), (11,8) => Some(Ordering::Less), (11,9) => Some(Ordering::Less), (11,10) => Some(Ordering::Less), (11,11) => Some(Ordering::Equal), _ => None, } }//table_priority fn table_associativity(rule:usize) -> Option { match rule { 6 => Some(Associativity::Left), 7 => Some(Associativity::Left), 8 => Some(Associativity::Left), 9 => Some(Associativity::Left), 10 => Some(Associativity::Left), 11 => Some(Associativity::Right), _ => None, } }//table_associativity fn to_usize(token:&Token) -> usize { match token { &Token::DummyStart => 0, &Token::Num(_) => 1, &Token::Plus => 2, &Token::Minus => 3, &Token::Star => 4, &Token::Slash => 5, &Token::Caret => 6, &Token::LPar => 7, &Token::RPar => 8, &Token::NewLine => 9, &Token::Input => 10, &Token::Line => 11, &Token::Expression(_) => 12, } }//to_usize }//impl