# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "rust-bert" version = "0.23.0" authors = ["Guillaume Becquin "] build = "build.rs" autobins = false autoexamples = false autotests = false autobenches = false description = "Ready-to-use NLP pipelines and language models" documentation = "https://docs.rs/rust-bert" readme = "README.md" keywords = [ "nlp", "deep-learning", "machine-learning", "transformers", "translation", ] license = "Apache-2.0" repository = "https://github.com/guillaume-be/rust-bert" [package.metadata.docs.rs] features = ["doc-only"] [profile.bench] opt-level = 3 [lib] name = "rust_bert" crate-type = ["lib"] path = "src/lib.rs" [[bin]] name = "convert-tensor" path = "src/convert-tensor.rs" doc = false [[example]] name = "async-sentiment" path = "examples/async-sentiment.rs" [[example]] name = "buffer_resource" path = "examples/buffer_resource.rs" [[example]] name = "codebert" path = "examples/codebert.rs" [[example]] name = "conversation" path = "examples/conversation.rs" [[example]] name = "generation_gpt2" path = "examples/generation_gpt2.rs" [[example]] name = "generation_gpt2_hf_tokenizers" path = "examples/generation_gpt2_hf_tokenizers.rs" required-features = ["hf-tokenizers"] [[example]] name = "generation_gpt_neo" path = "examples/generation_gpt_neo.rs" [[example]] name = "generation_gptj" path = "examples/generation_gptj.rs" [[example]] name = "generation_reformer" path = "examples/generation_reformer.rs" [[example]] name = "generation_xlnet" path = "examples/generation_xlnet.rs" [[example]] name = "keyword_extraction" path = "examples/keyword_extraction.rs" [[example]] name = "masked_language" path = "examples/masked_language.rs" [[example]] name = "named_entities_recognition" path = "examples/named_entities_recognition.rs" [[example]] name = "natural_language_inference_deberta" path = "examples/natural_language_inference_deberta.rs" [[example]] name = "onnx-masked-lm" path = "examples/onnx-masked-lm.rs" required-features = ["onnx"] [[example]] name = "onnx-question-answering" path = "examples/onnx-question-answering.rs" required-features = ["onnx"] [[example]] name = "onnx-sequence-classification" path = "examples/onnx-sequence-classification.rs" required-features = ["onnx"] [[example]] name = "onnx-text-generation" path = "examples/onnx-text-generation.rs" required-features = ["onnx"] [[example]] name = "onnx-token-classification" path = "examples/onnx-token-classification.rs" required-features = ["onnx"] [[example]] name = "onnx-translation" path = "examples/onnx-translation.rs" required-features = ["onnx"] [[example]] name = "part_of_speech_tagging" path = "examples/part_of_speech_tagging.rs" [[example]] name = "question_answering" path = "examples/question_answering.rs" [[example]] name = "question_answering_bert" path = "examples/question_answering_bert.rs" [[example]] name = "question_answering_longformer" path = "examples/question_answering_longformer.rs" [[example]] name = "question_answering_squad" path = "examples/question_answering_squad.rs" [[example]] name = "sentence_embeddings" path = "examples/sentence_embeddings.rs" [[example]] name = "sentence_embeddings_local" path = "examples/sentence_embeddings_local.rs" [[example]] name = "sentiment_analysis" path = "examples/sentiment_analysis.rs" [[example]] name = "sentiment_analysis_fnet" path = "examples/sentiment_analysis_fnet.rs" [[example]] name = "sentiment_analysis_sst2" path = "examples/sentiment_analysis_sst2.rs" [[example]] name = "sequence_classification" path = "examples/sequence_classification.rs" [[example]] name = "sequence_classification_multilabel" path = "examples/sequence_classification_multilabel.rs" [[example]] name = "summarization_bart" path = "examples/summarization_bart.rs" [[example]] name = "summarization_pegasus" path = "examples/summarization_pegasus.rs" [[example]] name = "summarization_prophetnet" path = "examples/summarization_prophetnet.rs" [[example]] name = "summarization_t5" path = "examples/summarization_t5.rs" [[example]] name = "token_classification" path = "examples/token_classification.rs" [[example]] name = "translation_builder" path = "examples/translation_builder.rs" [[example]] name = "translation_m2m100" path = "examples/translation_m2m100.rs" [[example]] name = "translation_marian" path = "examples/translation_marian.rs" [[example]] name = "translation_mbart" path = "examples/translation_mbart.rs" [[example]] name = "translation_t5" path = "examples/translation_t5.rs" [[example]] name = "zero_shot_classification" path = "examples/zero_shot_classification.rs" [[test]] name = "albert" path = "tests/albert.rs" [[test]] name = "bart" path = "tests/bart.rs" [[test]] name = "bert" path = "tests/bert.rs" [[test]] name = "deberta" path = "tests/deberta.rs" [[test]] name = "deberta_v2" path = "tests/deberta_v2.rs" [[test]] name = "distilbert" path = "tests/distilbert.rs" [[test]] name = "distilgpt2" path = "tests/distilgpt2.rs" [[test]] name = "electra" path = "tests/electra.rs" [[test]] name = "fnet" path = "tests/fnet.rs" [[test]] name = "gpt2" path = "tests/gpt2.rs" [[test]] name = "gpt_j" path = "tests/gpt_j.rs" [[test]] name = "gpt_neo" path = "tests/gpt_neo.rs" [[test]] name = "hf_tokenizers" path = "tests/hf_tokenizers.rs" [[test]] name = "longformer" path = "tests/longformer.rs" [[test]] name = "longt5" path = "tests/longt5.rs" [[test]] name = "m2m100" path = "tests/m2m100.rs" [[test]] name = "marian" path = "tests/marian.rs" [[test]] name = "mbart" path = "tests/mbart.rs" [[test]] name = "mobilebert" path = "tests/mobilebert.rs" [[test]] name = "nllb" path = "tests/nllb.rs" [[test]] name = "onnx" path = "tests/onnx.rs" [[test]] name = "openai_gpt" path = "tests/openai_gpt.rs" [[test]] name = "pegasus" path = "tests/pegasus.rs" [[test]] name = "prophetnet" path = "tests/prophetnet.rs" [[test]] name = "reformer" path = "tests/reformer.rs" [[test]] name = "roberta" path = "tests/roberta.rs" [[test]] name = "sentence_embeddings" path = "tests/sentence_embeddings.rs" [[test]] name = "t5" path = "tests/t5.rs" [[test]] name = "xlnet" path = "tests/xlnet.rs" [[bench]] name = "generation_benchmark" path = "benches/generation_benchmark.rs" harness = false [[bench]] name = "squad_benchmark" path = "benches/squad_benchmark.rs" harness = false [[bench]] name = "sst2_benchmark" path = "benches/sst2_benchmark.rs" harness = false [[bench]] name = "summarization_benchmark" path = "benches/summarization_benchmark.rs" harness = false [[bench]] name = "tensor_operations_benchmark" path = "benches/tensor_operations_benchmark.rs" harness = false [[bench]] name = "token_classification_benchmark" path = "benches/token_classification_benchmark.rs" harness = false [[bench]] name = "translation_benchmark" path = "benches/translation_benchmark.rs" harness = false [dependencies.cached-path] version = "0.6" optional = true default-features = false [dependencies.dirs] version = "5" optional = true [dependencies.half] version = "2" [dependencies.lazy_static] version = "1" optional = true [dependencies.ndarray] version = "0.15" optional = true [dependencies.ordered-float] version = "4.2.0" [dependencies.ort] version = "1.16.3" features = ["half"] optional = true default-features = false [dependencies.regex] version = "1.10" [dependencies.rust_tokenizers] version = "8.1.1" [dependencies.serde] version = "1" features = ["derive"] [dependencies.serde_json] version = "1" [dependencies.tch] version = "0.17.0" features = ["download-libtorch"] [dependencies.thiserror] version = "1" [dependencies.tokenizers] version = "0.20" features = ["onig"] optional = true default-features = false [dependencies.uuid] version = "1" features = ["v4"] [dev-dependencies.anyhow] version = "1" [dev-dependencies.criterion] version = "0.5" [dev-dependencies.csv] version = "1" [dev-dependencies.itertools] version = "0.13.0" [dev-dependencies.ort] version = "1.16.3" features = ["load-dynamic"] [dev-dependencies.tempfile] version = "3" [dev-dependencies.tokio] version = "1.35" features = [ "sync", "rt-multi-thread", "macros", ] [dev-dependencies.tracing-subscriber] version = "0.3" features = [ "env-filter", "fmt", ] default-features = false [features] all-tests = [] default = [ "remote", "default-tls", ] default-tls = ["cached-path/default-tls"] doc-only = ["tch/doc-only"] download-libtorch = ["tch/download-libtorch"] hf-tokenizers = ["tokenizers"] onnx = [ "ort", "ndarray", ] remote = [ "cached-path", "dirs", "lazy_static", ] rustls-tls = ["cached-path/rustls-tls"]