#![allow(clippy::needless_update)] use df_ls_core::Reference; use df_ls_debug_structure::*; use df_ls_diagnostics::lsp_types::*; use df_ls_lexical_analysis::test_utils::LexerTestBuilder; use df_ls_syntax_analysis::test_utils::SyntaxTestBuilder; /// Regression test for issue #22 #[test] fn missing_close_brackets_object() { SyntaxTestBuilder::from_lexer_test_builder( LexerTestBuilder::test_source( "h [MAIN:TYPE1 [TYPE1:DOG] [ITEM:T1]", ) .add_test_lexer_diagnostics_codes(vec!["missing_end_bracket"]) .add_test_lexer_diagnostics_ranges(vec![Range { start: Position { line: 1, character: 23, }, end: Position { line: 1, character: 23, }, }]), ) .add_test_structure(DebugRaw { header: "h".to_owned(), token_structure: vec![MainToken { type_1: vec![Type1Token { reference: Some(Reference("DOG".to_owned())), list: vec![Reference("T1".to_owned())], ..Default::default() }], ..Default::default() }], }) .add_test_syntax_diagnostics_codes(vec![]) .add_test_syntax_diagnostics_ranges(vec![]) .run_test(); } /// Regression test for issue #22 #[test] fn missing_open_brackets_object() { SyntaxTestBuilder::from_lexer_test_builder( LexerTestBuilder::test_source( "h MAIN:TYPE1] [TYPE1:DOG] [ITEM:T1]", ) .add_test_lexer_diagnostics_codes(vec!["unexpected_end_bracket"]) .add_test_lexer_diagnostics_ranges(vec![Range { start: Position { line: 1, character: 22, }, end: Position { line: 1, character: 23, }, }]), ) .add_test_structure(DebugRaw { header: "".to_owned(), token_structure: vec![], }) .add_test_syntax_diagnostics_codes(vec!["token_not_expected", "unchecked_code"]) .add_test_syntax_diagnostics_ranges(vec![ Range { start: Position { line: 3, character: 12, }, end: Position { line: 3, character: 23, }, }, Range { start: Position { line: 3, character: 23, }, end: Position { line: 4, character: 25, }, }, ]) .run_test(); } /// Regression test for issue #23 #[test] fn extra_brackets_object() { SyntaxTestBuilder::from_lexer_test_builder( LexerTestBuilder::test_source( "h [[] ] [[ ][[][ [MAIN:TYPE1] [TYPE1:DOG] [ITEM:T1]", ) .add_test_lexer_diagnostics_codes(vec![ "missing_token_name", "missing_end_bracket", "missing_token_name", "unexpected_end_bracket", "missing_token_name", "missing_end_bracket", "missing_token_name", "missing_end_bracket", "unexpected_end_bracket", "missing_token_name", "missing_end_bracket", "missing_token_name", "missing_token_name", "missing_end_bracket", ]) .add_test_lexer_diagnostics_ranges(vec![ Range { start: Position { line: 2, character: 13, }, end: Position { line: 2, character: 13, }, }, Range { start: Position { line: 2, character: 13, }, end: Position { line: 2, character: 13, }, }, Range { start: Position { line: 2, character: 14, }, end: Position { line: 2, character: 14, }, }, Range { start: Position { line: 3, character: 12, }, end: Position { line: 3, character: 13, }, }, Range { start: Position { line: 4, character: 13, }, end: Position { line: 4, character: 13, }, }, Range { start: Position { line: 4, character: 13, }, end: Position { line: 4, character: 13, }, }, Range { start: Position { line: 4, character: 14, }, end: Position { line: 4, character: 14, }, }, Range { start: Position { line: 4, character: 14, }, end: Position { line: 4, character: 14, }, }, Range { start: Position { line: 5, character: 12, }, end: Position { line: 5, character: 13, }, }, Range { start: Position { line: 5, character: 14, }, end: Position { line: 5, character: 14, }, }, Range { start: Position { line: 5, character: 14, }, end: Position { line: 5, character: 14, }, }, Range { start: Position { line: 5, character: 15, }, end: Position { line: 5, character: 15, }, }, Range { start: Position { line: 5, character: 17, }, end: Position { line: 5, character: 17, }, }, Range { start: Position { line: 5, character: 17, }, end: Position { line: 5, character: 17, }, }, ]), ) .add_test_structure(DebugRaw { header: "".to_owned(), token_structure: vec![], }) .add_test_syntax_diagnostics_codes(vec!["token_not_expected", "unchecked_code"]) .add_test_syntax_diagnostics_ranges(vec![ Range { start: Position { line: 2, character: 12, }, end: Position { line: 2, character: 13, }, }, Range { start: Position { line: 2, character: 13, }, end: Position { line: 10, character: 25, }, }, ]) .run_test(); } /// Regression test for issue #23 #[test] fn extra_open_bracket_tokens() { SyntaxTestBuilder::from_lexer_test_builder( LexerTestBuilder::test_source( "h [MAIN:TYPE1] [TYPE1:DOG] [ITE[M:T1]", ) .add_test_lexer_diagnostics_codes(vec!["missing_end_bracket"]) .add_test_lexer_diagnostics_ranges(vec![Range { start: Position { line: 4, character: 20, }, end: Position { line: 4, character: 20, }, }]), ) .add_test_structure(DebugRaw { header: "h".to_owned(), token_structure: vec![MainToken { type_1: vec![Type1Token { reference: Some(Reference("DOG".to_owned())), ..Default::default() }], ..Default::default() }], }) .add_test_syntax_diagnostics_codes(vec!["unknown_token", "unchecked_code"]) .add_test_syntax_diagnostics_ranges(vec![ Range { start: Position { line: 4, character: 16, }, end: Position { line: 4, character: 20, }, }, Range { start: Position { line: 4, character: 20, }, end: Position { line: 4, character: 26, }, }, ]) .run_test(); } /// Regression test for issue #23 #[test] fn extra_close_bracket_tokens() { SyntaxTestBuilder::from_lexer_test_builder( LexerTestBuilder::test_source( "h [MAIN:TYPE1] [TYPE1:DOG] [ITE]M:T1]", ) .add_test_lexer_diagnostics_codes(vec!["unexpected_end_bracket"]) .add_test_lexer_diagnostics_ranges(vec![Range { start: Position { line: 4, character: 25, }, end: Position { line: 4, character: 26, }, }]), ) .add_test_structure(DebugRaw { header: "h".to_owned(), token_structure: vec![MainToken { type_1: vec![Type1Token { reference: Some(Reference("DOG".to_owned())), ..Default::default() }], ..Default::default() }], }) .add_test_syntax_diagnostics_codes(vec!["unknown_token", "unchecked_code"]) .add_test_syntax_diagnostics_ranges(vec![ Range { start: Position { line: 4, character: 16, }, end: Position { line: 4, character: 21, }, }, Range { start: Position { line: 4, character: 21, }, end: Position { line: 4, character: 26, }, }, ]) .run_test(); }