use df_ls_lexical_analysis::test_utils::LexerTestBuilder; #[test] fn test_token_argument_reference_permutations() { LexerTestBuilder::test_source( "Token Argument Reference [A:REF] [A:REF2] [A:2_REF] [A:326REF] [A:REF_REF] [A:325_235_32_REF] [A:REF_WITH_LONG_NAME] [A:2_A] Variations on Arg N [A:ARG2Y] [A:YARG2] [A:ARG2_2] [A:ARG02] [A:ARG000000] ", ) .add_test_s_exp( "(raw_file (header: `Token Argument Reference`) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `REF`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `REF2`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `2_REF`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `326REF`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `REF_REF`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `325_235_32_REF`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `REF_WITH_LONG_NAME`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `2_A`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `ARG2Y`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `YARG2`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `ARG2_2`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `ARG02`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_reference: `ARG000000`) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); }