use df_ls_lexical_analysis::test_utils::LexerTestBuilder; #[test] fn test_token_argument_string_permutations() { LexerTestBuilder::test_source( "Token Argument String [A:a] [A:string] [A:string_string] [A:>=<.-,+*)('&%$] [A:String] [A:UPPER_a] [A:a_UPPER] [A:UPw] [A:02m] [A:2m] [A:A chain of 5 meters] [A:After many YEARS] [A:You will have !ARG1] [A:But !ARG2 might stop you.] [A:The combined power of !ARG3 and !ARG4 makes you !ARG5] [A:!ARG1 !ARG2 !ARG3] [A:!] [A:ARG3 is not accepted in strings] [A:ARG3!!!] [A:!!!ARG3] [A:_] (underscore) [A: ] (space) Variations on number [A:0246a] [A:a0246] [A:0246_0415a] [A:0246a123] Variations on character [A:'a'a] [A:a'a'] [A:'''a] [A:'a '] Variations on Arg N [A:ARG02a] [A:aARG02] Variations on Reference [A:2_2] [A:1_] [A:!ARG1_REF] [A:!ARG2_2_REF] [A:2_!ARG2_REF] [A:REF_!ARG1_REF] [A:2_!ARG2] [A:A_!ARG2] [A:!ARG2_2] [A:!ARG1] [A:!ARG0] [A:!ARG01] [A:!ARG10] [A:REF_!ARG1_REF!ARG2REF] [A:!ARG1!ARG2] [A:!ARG1_!ARG2] ", ) .add_test_s_exp( "(raw_file (header: `Token Argument String`) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `string`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `string_string`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `>=<.-,+*)('&%$`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `String`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `UPPER_a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a_UPPER`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `UPw`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `02m`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `2m`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `A chain of 5 meters`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `After many YEARS`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `You will have `) (token_argument_bang_arg_n: `!ARG1`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `But `) (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: ` might stop you.`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `The combined power of `) (token_argument_bang_arg_n: `!ARG3`) (token_argument_string: ` and `) (token_argument_bang_arg_n: `!ARG4`) (token_argument_string: ` makes you `) (token_argument_bang_arg_n: `!ARG5`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG1`) (token_argument_string: ` `) (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: ` `) (token_argument_bang_arg_n: `!ARG3`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `!`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `ARG3 is not accepted in strings`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `ARG3!!!`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `!!`) (token_argument_bang_arg_n: `!ARG3`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `_`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: ` `) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `0246a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a0246`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `0246_0415a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `0246a123`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `'a'a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a'a'`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `'''a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `'a '`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `ARG02a`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `aARG02`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `2_2`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `1_`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG1`) (token_argument_string: `_REF`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: `_2_REF`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `2_`) (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: `_REF`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `REF_`) (token_argument_bang_arg_n: `!ARG1`) (token_argument_string: `_REF`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `2_`) (token_argument_bang_arg_n: `!ARG2`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `A_`) (token_argument_bang_arg_n: `!ARG2`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: `_2`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG1`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG0`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG0`) (token_argument_string: `1`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG10`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_string: `REF_`) (token_argument_bang_arg_n: `!ARG1`) (token_argument_string: `_REF`) (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: `REF`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG1`) (token_argument_bang_arg_n: `!ARG2`) ) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_bang_arg_n_sequence (token_argument_bang_arg_n: `!ARG1`) (token_argument_string: `_`) (token_argument_bang_arg_n: `!ARG2`) ) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); }