use df_ls_lexical_analysis::test_utils::LexerTestBuilder; #[test] fn test_token_values() { LexerTestBuilder::test_source( "descriptor_color_standard [NAME:Amber] string [NAME:A mber] string [NAME:AMBER] ref [NAME:AMBER_] ref [NAME:AMBER ] string [NAME:AMBER-] string [NAME:AMBER:] ref + empty arg [NAME:AMBER ] string [NAME:AMBER a] string [NAME:AMBER A] string [NAME:AMBER___A] ref [NAME:AMbER___A] string [NAME:AMbER] string [NAME:9AMbER] string [NAME:9AMBER] ref [NAME:9AMBER ] string [NAME:9AMB2ER] ref [NAME:AMB2ER] ref [NAME:52] int [NAME:-52] int [NAME:42] int [NAME:-42] int [NAME:2556888] int [NAME:000] int [NAME:0] int [NAME:'c'] char [NAME:'\\n'] string [NAME:'k'] char [NAME:'R'] char [NAME:'9'] char [NAME:'7'] char [NAME:'['] char [NAME:':'] string + string [NAME:'*'] char [NAME:'''] char [NAME:'\"'] char [NAME:\"\"\"] string [NAME:'@'] char [NAME:POP] ref [NAME:POP9] ref [NAME:99P9] ref [NAME:99 P9] string [NAME:9p P9] string [NAME:9p_P9] string [NAME:9P_P9] ref [NAME:9_P9] ref [NAME:99_P9] ref [NAME:9_9_P9] ref [NAME:9__P9] ref [NAME:9_9] string ", ) .add_test_s_exp( "(raw_file (header: `descriptor_color_standard`) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `Amber`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `A mber`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `AMBER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `AMBER_`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMBER `) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMBER-`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `AMBER`) (:) (token_argument_empty: ``) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMBER `) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMBER a`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMBER A`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `AMBER___A`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMbER___A`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `AMbER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `9AMbER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9AMBER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `9AMBER `) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9AMB2ER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `AMB2ER`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `52`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `-52`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `42`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `-42`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `2556888`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `000`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_integer: `0`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'c'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `'\\n'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'k'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'R'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'9'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'7'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'['`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `'`) (:) (token_argument_string: `'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'*'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'''`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'\"'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `\"\"\"`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_character: `'@'`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `POP`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `POP9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `99P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `99 P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `9p P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `9p_P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9P_P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9_P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `99_P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9_9_P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_reference: `9__P9`) ) (]) ) (comment) (token ([) (token_name: `NAME`) (token_arguments (:) (token_argument_string: `9_9`) ) (]) ) (comment) (EOF: ``) ) ", ) .add_test_lexer_diagnostics_codes(vec![]) .add_test_lexer_diagnostics_ranges(vec![]) .run_test(); }