use df_ls_lexical_analysis::test_utils::LexerTestBuilder; #[test] fn test_argument_types_1() { LexerTestBuilder::test_source( "header [REF:REF] [REF:'c'] char [REF:string] [REF:9] int ", ) .add_test_s_exp( "(raw_file (header: `header`) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_reference: `REF`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_character: `'c'`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_string: `string`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_integer: `9`) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); } #[test] fn test_argument_types_2() { LexerTestBuilder::test_source( "header [REF:REF:REF] [REF:'c':'p'] char [REF:string:string2] [REF:9:-6] int ", ) .add_test_s_exp( "(raw_file (header: `header`) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_reference: `REF`) (:) (token_argument_reference: `REF`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_character: `'c'`) (:) (token_argument_character: `'p'`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_string: `string`) (:) (token_argument_string: `string2`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_integer: `9`) (:) (token_argument_integer: `-6`) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); } #[test] fn test_argument_types_3() { LexerTestBuilder::test_source( "header [REF:REF:REF:OTHER] [REF:'c':'p':'&'] char [REF:string:string2:large String] [REF:9:-6:93315] int ", ) .add_test_s_exp( "(raw_file (header: `header`) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_reference: `REF`) (:) (token_argument_reference: `REF`) (:) (token_argument_reference: `OTHER`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_character: `'c'`) (:) (token_argument_character: `'p'`) (:) (token_argument_character: `'&'`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_string: `string`) (:) (token_argument_string: `string2`) (:) (token_argument_string: `large String`) ) (]) ) (comment) (token ([) (token_name: `REF`) (token_arguments (:) (token_argument_integer: `9`) (:) (token_argument_integer: `-6`) (:) (token_argument_integer: `93315`) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); }