use df_ls_lexical_analysis::test_utils::LexerTestBuilder; #[test] fn test_token_arguments_permutations() { LexerTestBuilder::test_source("Token Arguments [A:a:b:c] [A:a:b:c:d:e:f:g:h:i:j:k:l:m:n:o:p:q:r:s:t:u:v:w:x:y:z:A:B:C:D:E:F:G:H:I:J:K:L:M:N:O:P:Q:R:S:T:U:V:W:X:Y:Z] [A::] [A:::::a:string] [A:string:'c':10:REF:pipe|value:s!ARG2string] " ) .add_test_s_exp("(raw_file (header: `Token Arguments`) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a`) (:) (token_argument_string: `b`) (:) (token_argument_string: `c`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `a`) (:) (token_argument_string: `b`) (:) (token_argument_string: `c`) (:) (token_argument_string: `d`) (:) (token_argument_string: `e`) (:) (token_argument_string: `f`) (:) (token_argument_string: `g`) (:) (token_argument_string: `h`) (:) (token_argument_string: `i`) (:) (token_argument_string: `j`) (:) (token_argument_string: `k`) (:) (token_argument_string: `l`) (:) (token_argument_string: `m`) (:) (token_argument_string: `n`) (:) (token_argument_string: `o`) (:) (token_argument_string: `p`) (:) (token_argument_string: `q`) (:) (token_argument_string: `r`) (:) (token_argument_string: `s`) (:) (token_argument_string: `t`) (:) (token_argument_string: `u`) (:) (token_argument_string: `v`) (:) (token_argument_string: `w`) (:) (token_argument_string: `x`) (:) (token_argument_string: `y`) (:) (token_argument_string: `z`) (:) (token_argument_reference: `A`) (:) (token_argument_reference: `B`) (:) (token_argument_reference: `C`) (:) (token_argument_reference: `D`) (:) (token_argument_reference: `E`) (:) (token_argument_reference: `F`) (:) (token_argument_reference: `G`) (:) (token_argument_reference: `H`) (:) (token_argument_reference: `I`) (:) (token_argument_reference: `J`) (:) (token_argument_reference: `K`) (:) (token_argument_reference: `L`) (:) (token_argument_reference: `M`) (:) (token_argument_reference: `N`) (:) (token_argument_reference: `O`) (:) (token_argument_reference: `P`) (:) (token_argument_reference: `Q`) (:) (token_argument_reference: `R`) (:) (token_argument_reference: `S`) (:) (token_argument_reference: `T`) (:) (token_argument_reference: `U`) (:) (token_argument_reference: `V`) (:) (token_argument_reference: `W`) (:) (token_argument_reference: `X`) (:) (token_argument_reference: `Y`) (:) (token_argument_reference: `Z`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_empty: ``) (:) (token_argument_empty: ``) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_empty: ``) (:) (token_argument_empty: ``) (:) (token_argument_empty: ``) (:) (token_argument_empty: ``) (:) (token_argument_string: `a`) (:) (token_argument_string: `string`) ) (]) ) (comment) (token ([) (token_name: `A`) (token_arguments (:) (token_argument_string: `string`) (:) (token_argument_character: `'c'`) (:) (token_argument_integer: `10`) (:) (token_argument_reference: `REF`) (:) (token_argument_pipe_arguments (token_argument_string: `pipe`) (|) (token_argument_string: `value`) ) (:) (token_argument_bang_arg_n_sequence (token_argument_string: `s`) (token_argument_bang_arg_n: `!ARG2`) (token_argument_string: `string`) ) ) (]) ) (comment) (EOF: ``) ) " ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); }