# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "chinese_segmenter" version = "1.0.1" authors = ["Preston Wang-Stosur-Bassett "] description = "Tokenize Chinese sentences using a dictionary-driven largest first matching approach." readme = "README.md" keywords = [ "chinese", "hanzi", "segment", "tokenize", ] categories = [ "text-processing", "localization", "internationalization", "value-formatting", ] license = "MIT" repository = "https://github.com/sotch-pr35mac/chinese_segmenter" resolver = "2" [dependencies.character_converter] version = "2.1.2"