commit 14ad6e0c95da0a362540b3ee443fb60cfc3e60c0
Author: Zack Newman <zack@philomathiclife.com>
Date: Tue, 7 Oct 2025 21:12:41 -0600
init
Diffstat:
| A | .gitignore | | | 2 | ++ |
| A | Cargo.toml | | | 204 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | LICENSE-APACHE | | | 177 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | LICENSE-MIT | | | 20 | ++++++++++++++++++++ |
| A | README.md | | | 244 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | src/args.rs | | | 1215 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | src/cargo.rs | | | 689 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | src/main.rs | | | 289 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | src/manifest.rs | | | 2471 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
| A | src/rustup.rs | | | 1212 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
10 files changed, 6523 insertions(+), 0 deletions(-)
diff --git a/.gitignore b/.gitignore
@@ -0,0 +1,2 @@
+Cargo.lock
+target/**
diff --git a/Cargo.toml b/Cargo.toml
@@ -0,0 +1,204 @@
+[package]
+authors = ["Zack Newman <zack@philomathiclife.com>"]
+categories = ["command-line-utilities", "development-tools::testing", "rust-patterns"]
+description = "Continuous integration for Clippy, unit tests, and doc tests for all possible features."
+documentation = "https://git.philomathiclife.com/ci-cargo/file/README.md.html"
+edition = "2024"
+keywords = ["cargo", "ci", "rust"]
+license = "MIT OR Apache-2.0"
+name = "ci-cargo"
+readme = "README.md"
+repository = "https://git.philomathiclife.com/repos/ci-cargo/"
+rust-version = "1.88.0"
+version = "0.1.0"
+
+[lints.rust]
+ambiguous_negative_literals = { level = "deny", priority = -1 }
+closure_returning_async_block = { level = "deny", priority = -1 }
+deprecated_safe = { level = "deny", priority = -1 }
+deref_into_dyn_supertrait = { level = "deny", priority = -1 }
+ffi_unwind_calls = { level = "deny", priority = -1 }
+future_incompatible = { level = "deny", priority = -1 }
+#fuzzy_provenance_casts = { level = "deny", priority = -1 }
+impl_trait_redundant_captures = { level = "deny", priority = -1 }
+keyword_idents = { level = "deny", priority = -1 }
+let_underscore = { level = "deny", priority = -1 }
+linker_messages = { level = "deny", priority = -1 }
+#lossy_provenance_casts = { level = "deny", priority = -1 }
+macro_use_extern_crate = { level = "deny", priority = -1 }
+meta_variable_misuse = { level = "deny", priority = -1 }
+missing_copy_implementations = { level = "deny", priority = -1 }
+missing_debug_implementations = { level = "deny", priority = -1 }
+missing_docs = { level = "deny", priority = -1 }
+#multiple_supertrait_upcastable = { level = "deny", priority = -1 }
+#must_not_suspend = { level = "deny", priority = -1 }
+non_ascii_idents = { level = "deny", priority = -1 }
+#non_exhaustive_omitted_patterns = { level = "deny", priority = -1 }
+nonstandard_style = { level = "deny", priority = -1 }
+redundant_imports = { level = "deny", priority = -1 }
+redundant_lifetimes = { level = "deny", priority = -1 }
+refining_impl_trait = { level = "deny", priority = -1 }
+rust_2018_compatibility = { level = "deny", priority = -1 }
+rust_2018_idioms = { level = "deny", priority = -1 }
+rust_2021_compatibility = { level = "deny", priority = -1 }
+rust_2024_compatibility = { level = "deny", priority = -1 }
+single_use_lifetimes = { level = "deny", priority = -1 }
+#supertrait_item_shadowing_definition = { level = "deny", priority = -1 }
+#supertrait_item_shadowing_usage = { level = "deny", priority = -1 }
+trivial_casts = { level = "deny", priority = -1 }
+trivial_numeric_casts = { level = "deny", priority = -1 }
+unit_bindings = { level = "deny", priority = -1 }
+unknown_or_malformed_diagnostic_attributes = { level = "deny", priority = -1 }
+unnameable_types = { level = "deny", priority = -1 }
+#unqualified_local_imports = { level = "deny", priority = -1 }
+unreachable_pub = { level = "deny", priority = -1 }
+unsafe_code = { level = "deny", priority = -1 }
+unstable_features = { level = "deny", priority = -1 }
+unused = { level = "deny", priority = -1 }
+unused_crate_dependencies = { level = "deny", priority = -1 }
+unused_import_braces = { level = "deny", priority = -1 }
+unused_lifetimes = { level = "deny", priority = -1 }
+unused_qualifications = { level = "deny", priority = -1 }
+unused_results = { level = "deny", priority = -1 }
+variant_size_differences = { level = "deny", priority = -1 }
+warnings = { level = "deny", priority = -1 }
+#ambiguous_negative_literals = "allow"
+#closure_returning_async_block = "allow"
+#deprecated_safe = "allow"
+#deref_into_dyn_supertrait = "allow"
+#ffi_unwind_calls = "allow"
+#future_incompatible = "allow"
+##fuzzy_provenance_casts = "allow"
+#impl_trait_redundant_captures = "allow"
+#keyword_idents = "allow"
+#let_underscore = "allow"
+#linker_messages = "allow"
+##lossy_provenance_casts = "allow"
+#macro_use_extern_crate = "allow"
+#meta_variable_misuse = "allow"
+#missing_copy_implementations = "allow"
+#missing_debug_implementations = "allow"
+#missing_docs = "allow"
+##multiple_supertrait_upcastable = "allow"
+##must_not_suspend = "allow"
+#non_ascii_idents = "allow"
+##non_exhaustive_omitted_patterns = "allow"
+#nonstandard_style = "allow"
+#redundant_imports = "allow"
+#redundant_lifetimes = "allow"
+#refining_impl_trait = "allow"
+#rust_2018_compatibility = "allow"
+#rust_2018_idioms = "allow"
+#rust_2021_compatibility = "allow"
+#rust_2024_compatibility = "allow"
+#single_use_lifetimes = "allow"
+##supertrait_item_shadowing_definition = "allow"
+##supertrait_item_shadowing_usage = "allow"
+#trivial_casts = "allow"
+#trivial_numeric_casts = "allow"
+#unit_bindings = "allow"
+#unknown_or_malformed_diagnostic_attributes = "allow"
+#unnameable_types = "allow"
+##unqualified_local_imports = "allow"
+#unreachable_pub = "allow"
+#unsafe_code = "allow"
+#unstable_features = "allow"
+#unused = "allow"
+#unused_crate_dependencies = "allow"
+#unused_import_braces = "allow"
+#unused_lifetimes = "allow"
+#unused_qualifications = "allow"
+#unused_results = "allow"
+#variant_size_differences = "allow"
+#warnings = "allow"
+#ambiguous_associated_items = "allow"
+#ambiguous_glob_imports = "allow"
+#arithmetic_overflow = "allow"
+#binary_asm_labels = "allow"
+#bindings_with_variant_name = "allow"
+#conflicting_repr_hints = "allow"
+#dangerous_implicit_autorefs = "allow"
+##default_overrides_default_fields = "allow"
+#elided_lifetimes_in_associated_constant = "allow"
+#enum_intrinsics_non_enums = "allow"
+#explicit_builtin_cfgs_in_flags = "allow"
+#ill_formed_attribute_input = "allow"
+#incomplete_include = "allow"
+#ineffective_unstable_trait_impl = "allow"
+#invalid_atomic_ordering = "allow"
+#invalid_doc_attributes = "allow"
+#invalid_from_utf8_unchecked = "allow"
+#invalid_null_arguments = "allow"
+#invalid_reference_casting = "allow"
+#invalid_type_param_default = "allow"
+#let_underscore_lock = "allow"
+#long_running_const_eval = "allow"
+#macro_expanded_macro_exports_accessed_by_absolute_paths = "allow"
+#mutable_transmutes = "allow"
+#named_asm_labels = "allow"
+#no_mangle_const_items = "allow"
+#overflowing_literals = "allow"
+#patterns_in_fns_without_body = "allow"
+#proc_macro_derive_resolution_fallback = "allow"
+#pub_use_of_private_extern_crate = "allow"
+#soft_unstable = "allow"
+##test_unstable_lint = "allow"
+#text_direction_codepoint_in_comment = "allow"
+#text_direction_codepoint_in_literal = "allow"
+#unconditional_panic = "allow"
+#undropped_manually_drops = "allow"
+#unknown_crate_types = "allow"
+#useless_deprecated = "allow"
+
+[lints.clippy]
+cargo = { level = "deny", priority = -1 }
+complexity = { level = "deny", priority = -1 }
+correctness = { level = "deny", priority = -1 }
+nursery = { level = "deny", priority = -1 }
+pedantic = { level = "deny", priority = -1 }
+perf = { level = "deny", priority = -1 }
+restriction = { level = "deny", priority = -1 }
+style = { level = "deny", priority = -1 }
+suspicious = { level = "deny", priority = -1 }
+# Noisy, opinionated, and likely don't prevent bugs or improve APIs.
+arbitrary_source_item_ordering = "allow"
+blanket_clippy_restriction_lints = "allow"
+implicit_return = "allow"
+min_ident_chars = "allow"
+missing_trait_methods = "allow"
+pathbuf_init_then_push = "allow"
+pub_with_shorthand = "allow"
+ref_patterns = "allow"
+redundant_pub_crate = "allow"
+return_and_then = "allow"
+single_call_fn = "allow"
+single_char_lifetime_names = "allow"
+unseparated_literal_suffix = "allow"
+#cargo = "allow"
+#complexity = "allow"
+#correctness = "allow"
+#deprecated = "allow"
+#nursery = "allow"
+#pedantic = "allow"
+#perf = "allow"
+#restriction = "allow"
+#style = "allow"
+#suspicious = "allow"
+
+[package.metadata.docs.rs]
+default-target = "x86_64-unknown-linux-gnu"
+targets = [
+ "aarch64-apple-darwin"
+]
+
+[dependencies]
+toml = { version = "0.9.7", default-features = false, features = ["parse"] }
+
+[target.'cfg(target_os = "openbsd")'.dependencies]
+priv_sep = { version = "3.0.0-alpha.2.1", default-features = false }
+
+[profile.release]
+codegen-units = 1
+lto = true
+panic = 'abort'
+strip = true
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/LICENSE-MIT b/LICENSE-MIT
@@ -0,0 +1,20 @@
+Copyright © 2025 Zack Newman
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+“Software”), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/README.md b/README.md
@@ -0,0 +1,244 @@
+CI app for Rust code
+====================
+
+[<img alt="git" src="https://git.philomathiclife.com/badges/ci-cargo.svg" height="20">](https://git.philomathiclife.com/ci-cargo/log.html)
+[<img alt="crates.io" src="https://img.shields.io/crates/v/ci-cargo.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/ci-cargo)
+
+`ci-cargo` is a CLI application that runs [`cargo`](https://doc.rust-lang.org/cargo/index.html) with
+`clippy`, `t --tests`, and `t --doc` for all possible combinations of features defined in `Cargo.toml`.
+
+The toolchain(s) used depend on platform support for [`rustup`](https://rust-lang.github.io/rustup/), the existence
+of `rust-toolchain.toml`, the defined MSRV (if there is one), and the `--rustup-home` option passed. Specifically
+for platforms that don't support `rustup` and when `--rustup-home` is _not_ passed, then the default toolchain is
+used (i.e., `cargo` is invoked as is without specifying the toolchain to use). For platforms that do support `rustup`
+or when `--rustup-home` is passed, `cargo +stable` will be used to run the command(s) unless `rust-toolchain.toml`
+exists in the same directory as `Cargo.toml`; in which case, `cargo` is invoked as is. Additionally if there is a
+defined MSRV in `Cargo.toml` that is semantically lower than the `stable` or default toolchain that was used, then
+`cargo +<MSRV>` will also be used to run the command(s) for platforms that support `rustup` or when `--rustup-home`
+was passed.
+
+`ci-cargo` avoids superfluous combinations of features. For example if feature `foo` depends on feature `bar` and
+`bar` depends on feature `fizz`; then no combination of features that contain `foo` and `bar`, `foo` and `fizz`, or
+`bar` and `fizz` will be tested.
+
+When a command errors, `ci-cargo` will terminate; upon termination (successful or not), `ci-cargo` will write all
+_unique_ messages that were written to `stderr` to `stderr` followed by the offending command in case of an error.
+Nothing is written to `stdout` unless `--summary` was passed.
+
+## Why is this useful?
+
+The number of possible configurations grows exponentially based on the number of features in `Cargo.toml`. This can
+easily cause a crate to not be tested with certain combinations of features. Instead of manually invoking `cargo`
+with each possible combination of features, this handles it automatically. Additionally it automatically ensures the
+build works on both the stable or default toolchain _and_ the stated MSRV (if one is defined).
+
+## Commands
+
+* `<none>`: `cargo clippy` and `cargo t` are invoked for each combination of features.
+* `help`/`h`: Prints help message.
+* `version`/`v`: Prints version info.
+* `clippy`/`c`: `cargo clippy` is invoked for each combination of features.
+* `tests`/`t`: `cargo t --tests` is invoked for each combination of features.
+* `doc_tests`/`d`: `cargo t --doc` is invoked for each combination of features.
+
+## Options
+
+* `--all-targets`: `cargo clippy --all-targets` is invoked for each combination of features.
+* `--allow-implied-features`: Features implied by optional dependencies are allowed; by default features must be
+ explicitly defined or an error will occur (e.g., `foo = ["dep:bar"]`).
+* `--cargo-home <PATH>`: Sets the storage directory used by `cargo`.
+* `--cargo-path <PATH>`: Sets the directory to search for `cargo`. Defaults to `cargo`.
+* `--color`: `--color always` is passed to the above commands; otherwise without this option, `--color never` is
+ passed.
+* `--deny-warnings`: `cargo clippy -- --Dwarnings` is invoked for each combination of features.
+* `--dir <PATH>`: Changes the working directory to the passed path before executing. Without this, the current
+ directory and all ancestor directories are searched for `Cargo.toml` before changing the working directory to its
+ location.
+* `--ignore-compile-errors`: [`compile_error`](https://doc.rust-lang.org/core/macro.compile_error.html)s are ignored
+ and don't lead to termination.
+* `--ignored`: `cargo t --tests -- --ignored` is invoked for each combination of features.
+* `--include-ignored`: `cargo t --tests -- --include-ignored` is invoked for each combination of features.
+* `--rustup-home <PATH>`: Sets the storage directory used by `rustup`.
+* `--summary`: Writes the toolchain(s) used and the combinations of features run on to `stdout` on success.
+
+Any unique sequence of the above options are allowed to be passed after the command so long as the following
+conditions are met:
+
+* No options are allowed for the `help`/`h` or `version`/`v` commands.
+* `--all-targets` and `--deny-warnings` are allowed iff `clippy`/`c` or no command is passed.
+* `--ignored` is allowed iff `tests`/`t` or no command is passed and `--include-ignored` is not passed.
+* `--include-ignored` is allowed iff `tests`/`t` or no command is passed and `--ignored` is not passed.
+
+## `ci-cargo` in action
+
+```bash
+[zack@laptop example]$ cat Cargo.toml
+[package]
+authors = ["Johann Carl Friedrich Gauß <gauss@invalid.com>"]
+categories = ["mathematics"]
+description = "Example."
+documentation = "https://example.com/"
+edition = "2024"
+keywords = ["example"]
+license = "MIT OR Apache-2.0"
+name = "example"
+readme = "README.md"
+repository = "https://example.com/"
+rust-version = "1.89.0"
+version = "0.1.0"
+
+[lints.rust]
+ambiguous_negative_literals = { level = "deny", priority = -1 }
+closure_returning_async_block = { level = "deny", priority = -1 }
+deprecated_safe = { level = "deny", priority = -1 }
+deref_into_dyn_supertrait = { level = "deny", priority = -1 }
+ffi_unwind_calls = { level = "deny", priority = -1 }
+future_incompatible = { level = "deny", priority = -1 }
+impl_trait_redundant_captures = { level = "deny", priority = -1 }
+keyword_idents = { level = "deny", priority = -1 }
+let_underscore = { level = "deny", priority = -1 }
+linker_messages = { level = "deny", priority = -1 }
+macro_use_extern_crate = { level = "deny", priority = -1 }
+meta_variable_misuse = { level = "deny", priority = -1 }
+missing_copy_implementations = { level = "deny", priority = -1 }
+missing_debug_implementations = { level = "deny", priority = -1 }
+missing_docs = { level = "deny", priority = -1 }
+non_ascii_idents = { level = "deny", priority = -1 }
+nonstandard_style = { level = "deny", priority = -1 }
+redundant_imports = { level = "deny", priority = -1 }
+redundant_lifetimes = { level = "deny", priority = -1 }
+refining_impl_trait = { level = "deny", priority = -1 }
+rust_2018_compatibility = { level = "deny", priority = -1 }
+rust_2018_idioms = { level = "deny", priority = -1 }
+rust_2021_compatibility = { level = "deny", priority = -1 }
+rust_2024_compatibility = { level = "deny", priority = -1 }
+single_use_lifetimes = { level = "deny", priority = -1 }
+trivial_casts = { level = "deny", priority = -1 }
+trivial_numeric_casts = { level = "deny", priority = -1 }
+unit_bindings = { level = "deny", priority = -1 }
+unknown-or-malformed-diagnostic-attributes = { level = "deny", priority = -1 }
+unnameable_types = { level = "deny", priority = -1 }
+unreachable_pub = { level = "deny", priority = -1 }
+unsafe_code = { level = "deny", priority = -1 }
+unstable_features = { level = "deny", priority = -1 }
+unused = { level = "deny", priority = -1 }
+unused_crate_dependencies = { level = "deny", priority = -1 }
+unused_import_braces = { level = "deny", priority = -1 }
+unused_lifetimes = { level = "deny", priority = -1 }
+unused_qualifications = { level = "deny", priority = -1 }
+unused_results = { level = "deny", priority = -1 }
+variant_size_differences = { level = "deny", priority = -1 }
+warnings = { level = "deny", priority = -1 }
+
+[lints.clippy]
+cargo = { level = "deny", priority = -1 }
+complexity = { level = "deny", priority = -1 }
+correctness = { level = "deny", priority = -1 }
+nursery = { level = "deny", priority = -1 }
+pedantic = { level = "deny", priority = -1 }
+perf = { level = "deny", priority = -1 }
+restriction = { level = "deny", priority = -1 }
+style = { level = "deny", priority = -1 }
+suspicious = { level = "deny", priority = -1 }
+blanket_clippy_restriction_lints = "allow"
+implicit_return = "allow"
+
+[dependencies]
+buzz = { version = "0.1.0", default-features = false, optional = true }
+
+[features]
+buzz = ["dep:buzz"]
+default = ["foo"]
+foo = []
+bar = ["fizz"]
+fizz = []
+[zack@laptop example]$ ci-cargo --summary
+Toolchains used: cargo +stable and cargo +1.89.0
+Features used:
+buzz,fizz,foo
+fizz,foo
+bar,buzz,foo
+buzz,foo
+bar,foo
+foo
+buzz,default,fizz
+default,fizz
+buzz,fizz
+fizz
+bar,buzz,default
+buzz,default
+bar,default
+default
+bar,buzz
+buzz
+bar
+
+[zack@laptop example]$ ci-cargo clippy --deny-warnings --ignore-compile-errors
+[zack@laptop ~]$ ci-cargo t --allow-implied-features --cargo-home ~/.cargo/ --cargo-path ~/.cargo/bin --dir ~/example/ --ignored --rustup-home ~/.rustup/
+[zack@laptop ~]$ ci-cargo v
+ci-cargo 0.1.0
+[zack@laptop example]$ ci-cargo --summary d
+d is an unknown argument. See ci-cargo help for more information.
+```
+
+## Limitations
+
+There is a hard limit on the number of features allowed. Specifically the number of features can't exceed the
+number of bits that make up a pointer; however practical limits will almost always be a factor long before hitting
+such a hard limit due to the exponential effect features have.
+
+No attempt is made to distinguish packages from workspaces; thus running `ci-cargo` in the root directory of a
+workspace may not work as intended.
+
+Cyclic and redundant features are forbidden. For example the below snippets from `Cargo.toml` files will cause an
+error:
+
+```toml
+[features]
+# Loops are forbidden by `cargo`, so this is not an additional limitation.
+a = ["a"]
+[features]
+# Cyclic features are disallowed even though `cargo` allows them.
+a = ["b"]
+b = ["a"]
+[features]
+# `a` should more concisely be assigned `["b"]` since `b` already depends on `c`.
+a = ["b", "c"]
+b = ["c"]
+c = []
+```
+
+## Minimum Supported Rust Version (MSRV)
+
+This will frequently be updated to be the same as stable. Specifically, any time stable is updated and that
+update has "useful" features or compilation no longer succeeds (e.g., due to new compiler lints), then MSRV
+will be updated.
+
+MSRV changes will correspond to a SemVer patch version bump pre-`1.0.0`; otherwise a minor version bump.
+
+## SemVer Policy
+
+* All on-by-default features of this library are covered by SemVer
+* MSRV is considered exempt from SemVer as noted above
+
+## License
+
+Licensed under either of
+
+* Apache License, Version 2.0 ([LICENSE-APACHE](https://www.apache.org/licenses/LICENSE-2.0))
+* MIT license ([LICENSE-MIT](https://opensource.org/licenses/MIT))
+
+at your option.
+
+## Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you,
+as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
+
+Before any PR is sent, `ci-cargo --all-targets --include-ignored` should be run on itself. Additionally
+`cargo +nightly doc` should be run to ensure documentation can be built.
+
+### Status
+
+The crate is only tested on the `x86_64-unknown-linux-gnu`, `x86_64-unknown-openbsd`, and `aarch64-apple-darwin`
+targets; but it should work on most platforms.
diff --git a/src/args.rs b/src/args.rs
@@ -0,0 +1,1215 @@
+use super::{
+ cargo::{CargoErr, Clippy, Options, TestKind, Tests, Toolchain},
+ manifest::PowerSet,
+};
+use std::{
+ ffi::OsString,
+ io::{Error, StderrLock, Write as _},
+ path::PathBuf,
+};
+/// Help message.
+pub(crate) const HELP_MSG: &str = "Continuous integration of all features using cargo
+
+Usage: ci-cargo [COMMAND] [OPTIONS]
+
+Commands:
+ <none> cargo clippy and cargo t
+ help, h This message
+ version, v Prints version info
+ clippy, c cargo clippy
+ tests, t cargo t --tests
+ doc-tests, d cargo t --doc
+
+Options:
+ --all-targets --all-targets is passed to cargo clippy
+ --allow-implied features Allow implied features from optional dependencies
+ --cargo-home <PATH> Set the storage directory used by cargo
+ --cargo-path <PATH> Set the path cargo is in. Defaults to cargo
+ --color --color always is passed to each command; otherwise --color never is
+ --deny-warnings -Dwarnings is passed to cargo clippy
+ --dir <PATH> Set the working directory
+ --ignore-compile-errors compile_error!s are ignored
+ --ignored --ignored is passed to cargo t --tests
+ --include-ignored --include-ignored is passed to cargo t --tests
+ --rustup-home <PATH> Set the storage directory used by rustup
+ --summary Writes the toolchain(s) used and the combinations of features run on
+
+Any unique sequence of the above options are allowed so long as the following
+conditions are met:
+
+* no options are allowed for the help/h or version/v commands
+* --all-targets and --deny-warnings are allowed iff clippy/c or no command is passed
+* --ignored is allowed iff tests/t or no command is passed and --include-ignored
+ is not passed
+* --include-ignored is allowed iff tests/t or no command is passed and --ignored
+ is not passed
+
+ci-cargo will run the appropriate command(s) for all possible combinations of features.
+If an error occurs, ci-cargo will terminate writing the error(s) and the offending command
+to stderr. If successful and --summary was passed, then the toolchain(s) used and the
+combinations of features run will be written to stdout; otherwise stdout is never written
+to.
+";
+/// `"help"`.
+const HELP: &str = "help";
+/// `"h"`.
+const H: &str = "h";
+/// `"version"`.
+const VERSION: &str = "version";
+/// `"v"`.
+const V: &str = "v";
+/// `"clippy"`.
+const CLIPPY: &str = "clippy";
+/// `"c"`.
+const C: &str = "c";
+/// `"tests"`.
+const TESTS: &str = "tests";
+/// `"t"`.
+const T: &str = "t";
+/// `"doc-tests"`.
+const DOC_TESTS: &str = "doc-tests";
+/// `"d"`.
+const D: &str = "d";
+/// `"--all-targets"`.
+const ALL_TARGETS: &str = "--all-targets";
+/// `"--allow-implied-features"`.
+const ALLOW_IMPLIED_FEATURES: &str = "--allow-implied-features";
+/// `"--cargo-home"`.
+const CARGO_HOME: &str = "--cargo-home";
+/// `"--cargo-path"`.
+const CARGO_PATH: &str = "--cargo-path";
+/// `"--color"`.
+const COLOR: &str = "--color";
+/// `"--deny-warnings"`.
+const DENY_WARNINGS: &str = "--deny-warnings";
+/// `"--dir"`.
+const DIR: &str = "--dir";
+/// `"--ignore-compile-errors"`.
+const IGNORE_COMPILE_ERRORS: &str = "--ignore-compile-errors";
+/// `"--ignored"`.
+const IGNORED: &str = "--ignored";
+/// `"--include-ignored"`.
+const INCLUDE_IGNORED: &str = "--include-ignored";
+/// `"--rustup-home"`.
+const RUSTUP_HOME: &str = "--rustup-home";
+/// `"--summary"`.
+const SUMMARY: &str = "--summary";
+/// Error returned when parsing arguments passed to the application.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum ArgsErr {
+ /// Error when no arguments exist.
+ NoArgs,
+ /// Error when an unknown argument is passed. The contained [`OsString`] is the value of the unknown command
+ /// or option.
+ UnknownArg(OsString),
+ /// Error when an option is passed more than once. The contained [`OsString`] is the duplicate argument.
+ DuplicateOption(OsString),
+ /// Error when `help` is passed followed by a non-empty sequence of arguments.
+ HelpWithArgs,
+ /// Error when `version` is passed followed by a non-empty sequence of arguments.
+ VersionWithArgs,
+ /// Error when `--dir` is passed with no file path to the directory `ci-cargo` should run in.
+ MissingDirPath,
+ /// Error when `--cargo-path` is passed with no file path to the directory `cargo` should be located in.
+ MissingCargoPath,
+ /// Error when `--cargo-home` is passed with no file path to the storage directory `cargo` uses.
+ MissingCargoHome,
+ /// Error when `--rustup-home` is passed with no file path to the storage directory `rustup` uses.
+ MissingRustupHome,
+ /// Error when `--all-targets` is passed for `tests` or `doc-tests`.
+ AllTargetsTests,
+ /// Error when `--deny-warnings` is passed for `tests` or `doc-tests`.
+ DenyWarningsTests,
+ /// Error when `--ignored` or `--include-ignored` is passed for `clippy` or `doc-tests`.
+ IgnoredClippyDoc,
+ /// Error when `--ignored` and `--include-ignored` are passed.
+ IgnoredIncludeIgnored,
+}
+impl ArgsErr {
+ /// Writes `self` to `stderr`.
+ pub(crate) fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ const FINAL_SENTENCE: &str = " See ci-cargo help for more information.";
+ match self {
+ Self::NoArgs => writeln!(
+ stderr,
+ "No arguments exist including the name of the process itself.{FINAL_SENTENCE}"
+ ),
+ Self::UnknownArg(arg) => {
+ writeln!(
+ stderr,
+ "{} is an unknown argument.{FINAL_SENTENCE}",
+ arg.display()
+ )
+ }
+ Self::DuplicateOption(arg) => {
+ writeln!(
+ stderr,
+ "{} was passed more than once.{FINAL_SENTENCE}",
+ arg.display()
+ )
+ }
+ Self::HelpWithArgs => {
+ writeln!(
+ stderr,
+ "{HELP} was passed with one or more arguments.{FINAL_SENTENCE}",
+ )
+ }
+ Self::VersionWithArgs => {
+ writeln!(
+ stderr,
+ "{VERSION} was passed with one or more arguments.{FINAL_SENTENCE}",
+ )
+ }
+ Self::MissingDirPath => {
+ writeln!(
+ stderr,
+ "{DIR} was passed without a path to the directory ci-cargo should run in.{FINAL_SENTENCE}"
+ )
+ }
+ Self::MissingCargoPath => {
+ writeln!(
+ stderr,
+ "{CARGO_PATH} was passed without a path to the directory cargo is located in.{FINAL_SENTENCE}"
+ )
+ }
+ Self::MissingCargoHome => {
+ writeln!(
+ stderr,
+ "{CARGO_HOME} was passed without a path to the cargo storage directory.{FINAL_SENTENCE}"
+ )
+ }
+ Self::MissingRustupHome => {
+ writeln!(
+ stderr,
+ "{RUSTUP_HOME} was passed without a path to the rustup storage directory.{FINAL_SENTENCE}"
+ )
+ }
+ Self::AllTargetsTests => {
+ writeln!(
+ stderr,
+ "{ALL_TARGETS} was passed with {TESTS} or {DOC_TESTS}.{FINAL_SENTENCE}"
+ )
+ }
+ Self::DenyWarningsTests => {
+ writeln!(
+ stderr,
+ "{DENY_WARNINGS} was passed with {TESTS} or {DOC_TESTS}.{FINAL_SENTENCE}"
+ )
+ }
+ Self::IgnoredClippyDoc => {
+ writeln!(
+ stderr,
+ "{IGNORED} or {INCLUDE_IGNORED} was passed with {CLIPPY} or {DOC_TESTS}.{FINAL_SENTENCE}"
+ )
+ }
+ Self::IgnoredIncludeIgnored => {
+ writeln!(
+ stderr,
+ "{IGNORED} and {INCLUDE_IGNORED} were both passed.{FINAL_SENTENCE}"
+ )
+ }
+ }
+ }
+}
+/// Options to use for `cargo`.
+#[expect(
+ clippy::struct_excessive_bools,
+ reason = "not a problem. arguable false positive based on its use"
+)]
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct Opts {
+ /// The directory to run `ci-cargo` in.
+ pub exec_dir: Option<PathBuf>,
+ /// Storage directory for `rustup`.
+ pub rustup_home: Option<PathBuf>,
+ /// Path to `cargo`.
+ pub cargo_path: PathBuf,
+ /// Storage directory for `cargo`.
+ pub cargo_home: Option<PathBuf>,
+ /// `true` iff color should be outputted.
+ pub color: bool,
+ /// `true` iff implied features should be allowed an tested.
+ pub allow_implied_features: bool,
+ /// `true` iff `compile_error`s should be ignored.
+ pub ignore_compile_errors: bool,
+ /// `true` iff the toolchains used and combinations of features run on should be written
+ /// to `stdout` upon success.
+ pub summary: bool,
+}
+/// Controls if `cargo t -tests -- --ignored` or `cargo t --tests --include-ignored` should be run.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+#[derive(Clone, Copy)]
+pub(crate) enum Ignored {
+ /// Don't run any `ignore` tests.
+ None,
+ /// Only run `ignore` tests.
+ Only,
+ /// Run all tests.
+ Include,
+}
+/// `cargo` command(s) we should run.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum Cmd {
+ /// Execute all `cargo` commands.
+ ///
+ /// The first `bool` is `true` iff `--all-targets` was passed,
+ /// the second `bool` is `true` iff `--deny-warnings` was passed, and
+ /// the `Ignored` represents if `--ignored` or `--include-ignored` were
+ /// passed.
+ All(bool, bool, Ignored),
+ /// `cargo clippy`.
+ ///
+ /// The first `bool` is `true` iff `--all-targets` was passed,
+ /// and the second `bool` is `true` iff `--deny-warnings` was passed.
+ Clippy(bool, bool),
+ /// `cargo clippy t --tests`.
+ Tests(Ignored),
+ /// `cargo clippy t --doc`.
+ DocTests,
+}
+impl Cmd {
+ /// Runs the appropriate `cargo` command for all features in `power_set`.
+ ///
+ /// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
+ /// later used.
+ pub(crate) fn run<'a>(
+ self,
+ options: Options<'a, '_>,
+ msrv: Option<&'a str>,
+ power_set: &mut PowerSet<'_>,
+ ) -> Result<(), Box<CargoErr>> {
+ match self {
+ Self::All(all_targets, deny_warning, ignored_tests) => Self::run_all(
+ msrv,
+ options,
+ all_targets,
+ deny_warning,
+ ignored_tests,
+ power_set,
+ ),
+ Self::Clippy(all_targets, deny_warnings) => {
+ Self::run_clippy(msrv, options, all_targets, deny_warnings, power_set)
+ }
+ Self::Tests(ignored_tests) => {
+ Self::run_unit_tests(msrv, options, ignored_tests, power_set)
+ }
+ Self::DocTests => Self::run_doc_tests(msrv, options, power_set),
+ }
+ }
+ /// Runs `cargo clippy` and `cargo t` for all features in `power_set`.
+ ///
+ /// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
+ /// later used.
+ #[expect(
+ clippy::panic_in_result_fn,
+ reason = "want to crash when there is a bug"
+ )]
+ fn run_all<'a>(
+ msrv: Option<&'a str>,
+ mut options: Options<'a, '_>,
+ all_targets: bool,
+ deny_warnings: bool,
+ ignored_tests: Ignored,
+ power_set: &mut PowerSet<'_>,
+ ) -> Result<(), Box<CargoErr>> {
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ match Tests::run(&mut options, TestKind::All(ignored_tests), set) {
+ // Since we are running `cargo t`, a no library error won't happen.
+ Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Err(e) => return Err(e),
+ }
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ match Tests::run(&mut options, TestKind::All(ignored_tests), set) {
+ // Since we are running `cargo t`, a no library error won't happen.
+ Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Err(e) => return Err(e),
+ }
+ }
+ }
+ Ok(())
+ }
+ /// Runs `cargo clippy` for all features in `power_set`.
+ ///
+ /// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
+ /// later used.
+ fn run_clippy<'a>(
+ msrv: Option<&'a str>,
+ mut options: Options<'a, '_>,
+ all_targets: bool,
+ deny_warnings: bool,
+ power_set: &mut PowerSet<'_>,
+ ) -> Result<(), Box<CargoErr>> {
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ }
+ }
+ Ok(())
+ }
+ /// Runs `cargo t --tests` for all features in `power_set`.
+ ///
+ /// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
+ /// later used.
+ #[expect(
+ clippy::panic_in_result_fn,
+ reason = "want to crash when there is a bug"
+ )]
+ fn run_unit_tests<'a>(
+ msrv: Option<&'a str>,
+ mut options: Options<'a, '_>,
+ ignored_tests: Ignored,
+ power_set: &mut PowerSet<'_>,
+ ) -> Result<(), Box<CargoErr>> {
+ while let Some(set) = power_set.next_set() {
+ match Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ // Since we are running `cargo t --tests`, a no library error won't happen.
+ Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Err(e) => return Err(e),
+ }
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ match Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ // Since we are running `cargo t --tests`, a no library error won't happen.
+ Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Err(e) => return Err(e),
+ }
+ }
+ }
+ Ok(())
+ }
+ /// Runs `cargo t --doc` for all features in `power_set`.
+ ///
+ /// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
+ /// later used.
+ #[expect(
+ clippy::panic_in_result_fn,
+ reason = "want to crash when there is a bug"
+ )]
+ fn run_doc_tests<'a>(
+ msrv: Option<&'a str>,
+ mut options: Options<'a, '_>,
+ power_set: &mut PowerSet<'_>,
+ ) -> Result<(), Box<CargoErr>> {
+ while let Some(set) = power_set.next_set() {
+ match Tests::run(&mut options, TestKind::Doc, set) {
+ Ok(no_library) => {
+ if no_library {
+ // We don't want to continue invoking `cargo t --doc` once we know this is not a library
+ // target.
+ return Ok(());
+ }
+ }
+ Err(e) => return Err(e),
+ }
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ match Tests::run(&mut options, TestKind::Doc, set) {
+ // If there is no library target, then we would have been informed above.
+ Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Err(e) => return Err(e),
+ }
+ }
+ }
+ Ok(())
+ }
+}
+/// `ci-cargo` command to run.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum MetaCmd {
+ /// Write help to stdout.
+ Help,
+ /// Write version info to stdout.
+ Version,
+ /// Execute `cargo` command(s).
+ Cargo(Cmd, Opts),
+}
+/// Helper to store options from the command line.
+#[expect(
+ clippy::struct_excessive_bools,
+ reason = "used exclusively in the recursive function MetaCmd::from_args::extract_options"
+)]
+#[derive(Default)]
+struct ArgOpts {
+ /// `--all-targets`.
+ all_targets: bool,
+ /// `--allow-implied-features`.
+ allow_implied_features: bool,
+ /// `--cargo-home` along with the path.
+ cargo_home: Option<PathBuf>,
+ /// `--cargo-path` along with the path.
+ cargo_path: Option<PathBuf>,
+ /// `--color`.
+ color: bool,
+ /// `--deny-warnings`.
+ deny_warnings: bool,
+ /// `--dir` along with the path.
+ dir: Option<PathBuf>,
+ /// `--ignore-compile-errors`.
+ ignore_compile_errors: bool,
+ /// `--ignored`.
+ ignored: bool,
+ /// `--include-ignored`.
+ include_ignored: bool,
+ /// `--rustup-home` along with the path.
+ rustup_home: Option<PathBuf>,
+ /// `--summary`.
+ summary: bool,
+}
+impl ArgOpts {
+ /// Returns `Ignored` based on [`Self::ignored`] and [`Self::include_ignored`].
+ const fn ignored(&self) -> Ignored {
+ if self.ignored {
+ Ignored::Only
+ } else if self.include_ignored {
+ Ignored::Include
+ } else {
+ Ignored::None
+ }
+ }
+}
+/// `"cargo"`.
+const CARGO: &str = "cargo";
+/// Returns `"cargo"`.
+fn cargo_path() -> PathBuf {
+ CARGO.to_owned().into()
+}
+impl From<ArgOpts> for Opts {
+ fn from(value: ArgOpts) -> Self {
+ Self {
+ exec_dir: value.dir,
+ rustup_home: value.rustup_home,
+ cargo_path: value.cargo_path.unwrap_or_else(cargo_path),
+ cargo_home: value.cargo_home,
+ color: value.color,
+ allow_implied_features: value.allow_implied_features,
+ ignore_compile_errors: value.ignore_compile_errors,
+ summary: value.summary,
+ }
+ }
+}
+impl MetaCmd {
+ /// Recursively extracts options from `args`.
+ ///
+ /// This must only be called from [`Self::from_args`].
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ #[expect(clippy::else_if_without_else, reason = "more concise")]
+ #[expect(
+ clippy::too_many_lines,
+ reason = "a lot of options to extract, so expected. 104 lines isn't too bad either"
+ )]
+ fn extract_options<T: Iterator<Item = OsString>>(
+ opts: &mut ArgOpts,
+ arg: Option<OsString>,
+ mut args: T,
+ ) -> Result<(), ArgsErr> {
+ arg.map_or_else(
+ || Ok(()),
+ |val| {
+ if let Some(val_str) = val.to_str() {
+ match val_str {
+ ALL_TARGETS => {
+ if opts.all_targets {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.all_targets = true;
+ }
+ ALLOW_IMPLIED_FEATURES => {
+ if opts.allow_implied_features {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.allow_implied_features = true;
+ }
+ CARGO_HOME => {
+ if opts.cargo_home.is_some() {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if let Some(path) = args.next() {
+ opts.cargo_home = Some(path.into());
+ } else {
+ return Err(ArgsErr::MissingCargoHome);
+ }
+ }
+ CARGO_PATH => {
+ if opts.cargo_path.is_some() {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if let Some(p) = args.next() {
+ // This won't overflow since `p.len() + 6 < usize::MAX` since
+ // `p.len() <= isize::MAX`, `usize::MAX >= u16::MAX`, and
+ // `i16::MAX + 6 < u16::MAX`.
+ let mut path = PathBuf::with_capacity(CARGO.len() + 1 + p.len());
+ path.push(p);
+ path.push(CARGO);
+ opts.cargo_path = Some(path);
+ } else {
+ return Err(ArgsErr::MissingCargoPath);
+ }
+ }
+ COLOR => {
+ if opts.color {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.color = true;
+ }
+ DENY_WARNINGS => {
+ if opts.deny_warnings {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.deny_warnings = true;
+ }
+ DIR => {
+ if opts.dir.is_some() {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if let Some(path) = args.next() {
+ opts.dir = Some(path.into());
+ } else {
+ return Err(ArgsErr::MissingDirPath);
+ }
+ }
+ IGNORE_COMPILE_ERRORS => {
+ if opts.ignore_compile_errors {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.ignore_compile_errors = true;
+ }
+ IGNORED => {
+ if opts.ignored {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if opts.include_ignored {
+ return Err(ArgsErr::IgnoredIncludeIgnored);
+ }
+ opts.ignored = true;
+ }
+ INCLUDE_IGNORED => {
+ if opts.include_ignored {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if opts.ignored {
+ return Err(ArgsErr::IgnoredIncludeIgnored);
+ }
+ opts.include_ignored = true;
+ }
+ RUSTUP_HOME => {
+ if opts.rustup_home.is_some() {
+ return Err(ArgsErr::DuplicateOption(val));
+ } else if let Some(path) = args.next() {
+ opts.rustup_home = Some(path.into());
+ } else {
+ return Err(ArgsErr::MissingRustupHome);
+ }
+ }
+ SUMMARY => {
+ if opts.summary {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.summary = true;
+ }
+ _ => return Err(ArgsErr::UnknownArg(val)),
+ }
+ Self::extract_options(opts, args.next(), args)
+ } else {
+ Err(ArgsErr::UnknownArg(val))
+ }
+ },
+ )
+ }
+ /// Returns data we need by reading the supplied CLI arguments.
+ pub(crate) fn from_args<T: Iterator<Item = OsString>>(mut args: T) -> Result<Self, ArgsErr> {
+ args.next().ok_or(ArgsErr::NoArgs).and_then(|_| {
+ args.next().map_or_else(
+ || {
+ Ok(Self::Cargo(
+ Cmd::All(false, false, Ignored::None),
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_path: cargo_path(),
+ cargo_home: None,
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ },
+ ))
+ },
+ |arg| {
+ if let Some(arg_str) = arg.to_str() {
+ match arg_str {
+ H | HELP => {
+ if args.next().is_none() {
+ Ok(Self::Help)
+ } else {
+ Err(ArgsErr::HelpWithArgs)
+ }
+ }
+ V | VERSION => {
+ if args.next().is_none() {
+ Ok(Self::Version)
+ } else {
+ Err(ArgsErr::VersionWithArgs)
+ }
+ }
+ C | CLIPPY => {
+ let mut opts = ArgOpts::default();
+ Self::extract_options(&mut opts, args.next(), &mut args).and_then(
+ |()| {
+ if opts.ignored || opts.include_ignored {
+ Err(ArgsErr::IgnoredClippyDoc)
+ } else {
+ Ok(Self::Cargo(
+ Cmd::Clippy(opts.all_targets, opts.deny_warnings),
+ opts.into(),
+ ))
+ }
+ },
+ )
+ }
+ T | TESTS => {
+ let mut opts = ArgOpts::default();
+ Self::extract_options(&mut opts, args.next(), &mut args).and_then(
+ |()| {
+ if opts.all_targets {
+ Err(ArgsErr::AllTargetsTests)
+ } else if opts.deny_warnings {
+ Err(ArgsErr::DenyWarningsTests)
+ } else {
+ let ignored = opts.ignored();
+ Ok(Self::Cargo(Cmd::Tests(ignored), opts.into()))
+ }
+ },
+ )
+ }
+ D | DOC_TESTS => {
+ let mut opts = ArgOpts::default();
+ Self::extract_options(&mut opts, args.next(), &mut args).and_then(
+ |()| {
+ if opts.all_targets {
+ Err(ArgsErr::AllTargetsTests)
+ } else if opts.deny_warnings {
+ Err(ArgsErr::DenyWarningsTests)
+ } else if opts.ignored || opts.include_ignored {
+ Err(ArgsErr::IgnoredClippyDoc)
+ } else {
+ Ok(Self::Cargo(Cmd::DocTests, opts.into()))
+ }
+ },
+ )
+ }
+ _ => {
+ let mut opts = ArgOpts::default();
+ Self::extract_options(&mut opts, Some(arg), &mut args).map(|()| {
+ Self::Cargo(
+ Cmd::All(
+ opts.all_targets,
+ opts.deny_warnings,
+ opts.ignored(),
+ ),
+ opts.into(),
+ )
+ })
+ }
+ }
+ } else {
+ Err(ArgsErr::UnknownArg(arg))
+ }
+ },
+ )
+ })
+ }
+}
+#[cfg(test)]
+mod tests {
+ use super::{ArgsErr, Cmd, Ignored, MetaCmd, Opts, OsString, PathBuf};
+ #[cfg(unix)]
+ use std::os::unix::ffi::OsStringExt as _;
+ #[expect(
+ clippy::cognitive_complexity,
+ clippy::too_many_lines,
+ reason = "want to test for a lot of things"
+ )]
+ #[expect(
+ clippy::iter_on_empty_collections,
+ clippy::iter_on_single_items,
+ reason = "want to test for it"
+ )]
+ #[test]
+ fn arg_parsing() {
+ assert_eq!(MetaCmd::from_args([].into_iter()), Err(ArgsErr::NoArgs));
+ // We always ignore the first argument.
+ assert_eq!(
+ MetaCmd::from_args([OsString::new()].into_iter()),
+ Ok(MetaCmd::Cargo(
+ Cmd::All(false, false, Ignored::None),
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_path: "cargo".to_owned().into(),
+ cargo_home: None,
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ }
+ )),
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), OsString::new()].into_iter()),
+ Err(ArgsErr::UnknownArg(OsString::new()))
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), OsString::new()].into_iter()),
+ Err(ArgsErr::UnknownArg(OsString::new()))
+ );
+ // Invalid UTF-8 errors gracefully.
+ #[cfg(unix)]
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), OsString::from_vec(vec![255])].into_iter()),
+ Err(ArgsErr::UnknownArg(OsString::from_vec(vec![255])))
+ );
+ // Whitespace is not ignored.
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), " clippy".to_owned().into()].into_iter()),
+ Err(ArgsErr::UnknownArg(" clippy".to_owned().into()))
+ );
+ // We parse in a case-sensitive way.
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "Clippy".to_owned().into()].into_iter()),
+ Err(ArgsErr::UnknownArg("Clippy".to_owned().into()))
+ );
+ // We require options to be after the command (if one was passed).
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "--summary".to_owned().into(),
+ "clippy".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::UnknownArg("clippy".to_owned().into()))
+ );
+ // `ArgsErr::DuplicateOption` has higher priority than disallowed option errors.
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "t".to_owned().into(),
+ "--deny-warnings".to_owned().into(),
+ "--deny-warnings".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::DuplicateOption(
+ "--deny-warnings".to_owned().into()
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "h".to_owned().into(),
+ "--summary".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::HelpWithArgs)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "version".to_owned().into(),
+ "foo".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::VersionWithArgs)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "--cargo-path".to_owned().into()].into_iter()),
+ Err(ArgsErr::MissingCargoPath)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "--cargo-home".to_owned().into()].into_iter()),
+ Err(ArgsErr::MissingCargoHome)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "--rustup-home".to_owned().into()].into_iter()),
+ Err(ArgsErr::MissingRustupHome)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "t".to_owned().into(),
+ "--all-targets".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::AllTargetsTests)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "d".to_owned().into(),
+ "--all-targets".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::AllTargetsTests)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "tests".to_owned().into(),
+ "--deny-warnings".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::DenyWarningsTests)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "doc-tests".to_owned().into(),
+ "--deny-warnings".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::DenyWarningsTests)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "c".to_owned().into(),
+ "--ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredClippyDoc)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "d".to_owned().into(),
+ "--ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredClippyDoc)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "clippy".to_owned().into(),
+ "--include-ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredClippyDoc)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "doc-tests".to_owned().into(),
+ "--include-ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredClippyDoc)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "tests".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--include-ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredIncludeIgnored)
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "--ignored".to_owned().into(),
+ "--include-ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Err(ArgsErr::IgnoredIncludeIgnored)
+ );
+ // When paths are passed, no attempt is made to interpret them. `cargo` is unconditionally pushed
+ // to the path.
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "--all-targets".to_owned().into(),
+ "--allow-implied-features".to_owned().into(),
+ "--cargo-home".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--cargo-path".to_owned().into(),
+ "cargo".to_owned().into(),
+ "--color".to_owned().into(),
+ "--deny-warnings".to_owned().into(),
+ "--dir".to_owned().into(),
+ OsString::new(),
+ "--ignore-compile-errors".to_owned().into(),
+ "--include-ignored".to_owned().into(),
+ "--rustup-home".to_owned().into(),
+ OsString::new(),
+ "--summary".to_owned().into(),
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::All(true, true, Ignored::Include),
+ Opts {
+ exec_dir: Some(PathBuf::new()),
+ rustup_home: Some(PathBuf::new()),
+ cargo_home: Some("--ignored".to_owned().into()),
+ cargo_path: "cargo/cargo".to_owned().into(),
+ color: true,
+ allow_implied_features: true,
+ ignore_compile_errors: true,
+ summary: true,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "c".to_owned().into(),
+ "--all-targets".to_owned().into(),
+ "--allow-implied-features".to_owned().into(),
+ "--cargo-home".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--cargo-path".to_owned().into(),
+ "cargo".to_owned().into(),
+ "--color".to_owned().into(),
+ "--deny-warnings".to_owned().into(),
+ "--dir".to_owned().into(),
+ OsString::new(),
+ "--ignore-compile-errors".to_owned().into(),
+ "--rustup-home".to_owned().into(),
+ "a".to_owned().into(),
+ "--summary".to_owned().into(),
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::Clippy(true, true,),
+ Opts {
+ exec_dir: Some(PathBuf::new()),
+ rustup_home: Some("a".to_owned().into()),
+ cargo_home: Some("--ignored".to_owned().into()),
+ cargo_path: "cargo/cargo".to_owned().into(),
+ color: true,
+ allow_implied_features: true,
+ ignore_compile_errors: true,
+ summary: true,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "clippy".to_owned().into(),
+ "--all-targets".to_owned().into(),
+ "--deny-warnings".to_owned().into(),
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::Clippy(true, true,),
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_home: None,
+ cargo_path: "cargo".to_owned().into(),
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "tests".to_owned().into(),
+ "--allow-implied-features".to_owned().into(),
+ "--cargo-home".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--cargo-path".to_owned().into(),
+ "cargo".to_owned().into(),
+ "--color".to_owned().into(),
+ "--dir".to_owned().into(),
+ OsString::new(),
+ "--ignore-compile-errors".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--rustup-home".to_owned().into(),
+ OsString::new(),
+ "--summary".to_owned().into(),
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::Tests(Ignored::Only),
+ Opts {
+ exec_dir: Some(PathBuf::new()),
+ rustup_home: Some(PathBuf::new()),
+ cargo_home: Some("--ignored".to_owned().into()),
+ cargo_path: "cargo/cargo".to_owned().into(),
+ color: true,
+ allow_implied_features: true,
+ ignore_compile_errors: true,
+ summary: true,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "t".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Cargo(
+ Cmd::Tests(Ignored::None),
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_home: None,
+ cargo_path: "cargo".to_owned().into(),
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "t".to_owned().into(),
+ "--include-ignored".to_owned().into()
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::Tests(Ignored::Include),
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_home: None,
+ cargo_path: "cargo".to_owned().into(),
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args(
+ [
+ OsString::new(),
+ "doc-tests".to_owned().into(),
+ "--allow-implied-features".to_owned().into(),
+ "--cargo-home".to_owned().into(),
+ "--ignored".to_owned().into(),
+ "--cargo-path".to_owned().into(),
+ "cargo".to_owned().into(),
+ "--color".to_owned().into(),
+ "--dir".to_owned().into(),
+ OsString::new(),
+ "--ignore-compile-errors".to_owned().into(),
+ "--rustup-home".to_owned().into(),
+ OsString::new(),
+ "--summary".to_owned().into(),
+ ]
+ .into_iter()
+ ),
+ Ok(MetaCmd::Cargo(
+ Cmd::DocTests,
+ Opts {
+ exec_dir: Some(PathBuf::new()),
+ rustup_home: Some(PathBuf::new()),
+ cargo_home: Some("--ignored".to_owned().into()),
+ cargo_path: "cargo/cargo".to_owned().into(),
+ color: true,
+ allow_implied_features: true,
+ ignore_compile_errors: true,
+ summary: true,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "d".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Cargo(
+ Cmd::DocTests,
+ Opts {
+ exec_dir: None,
+ rustup_home: None,
+ cargo_home: None,
+ cargo_path: "cargo".to_owned().into(),
+ color: false,
+ allow_implied_features: false,
+ ignore_compile_errors: false,
+ summary: false,
+ }
+ ))
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "h".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Help)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "help".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Help)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "v".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Version)
+ );
+ assert_eq!(
+ MetaCmd::from_args([OsString::new(), "version".to_owned().into(),].into_iter()),
+ Ok(MetaCmd::Version)
+ );
+ }
+}
diff --git a/src/cargo.rs b/src/cargo.rs
@@ -0,0 +1,689 @@
+use super::{args::Ignored, manifest};
+use std::{
+ collections::HashSet,
+ io::{Error, StderrLock, Write as _},
+ path::{Path, PathBuf},
+ process::{Command, Stdio},
+};
+/// Error returned from [`Toolchain::get_version`].
+pub(crate) enum ToolchainErr {
+ /// [`Command::output`] erred with the contained `Error` for the contained `Command`.
+ CommandFail(Command, Error),
+ /// [`Command::output`] was successful for the contained `Command`, but it didn't return a status code.
+ ///
+ /// The contained `String` is the potentially empty content written to `stderr`.
+ CommandNoStatus(Command, String),
+ /// [`Command::output`] was successful for the contained `Command`, but it returned an error status code
+ /// represented by the contained `i32`.
+ ///
+ /// The contained `String` is the potentially empty content written to `stderr`.
+ CommandErr(Command, String, i32),
+ /// [`Command::output`] was successful for the contained `Command`, but the data it wrote to `stdout` was not
+ /// valid UTF-8.
+ StdoutNotUtf8(Command),
+ /// [`Command::output`] was successful for the contained `Command`, but the data it wrote to `stderr` was not
+ /// valid UTF-8.
+ StderrNotUtf8(Command),
+ /// [`Command::output`] was successful for the contained `Command`, but the non-empty data it wrote to
+ /// `stdout` was unexpected.
+ ///
+ /// The contained `String` is the unexpected data.
+ UnexpectedOutput(Command, String),
+ /// The parsed MSRV value is newer than `stable` or the default toolchain.
+ MsrvTooHigh,
+ /// Error when `cargo +<MSRV> -V` returns a version not compatible with the defined MSRV.
+ ///
+ /// The contained `Version` is the installed MSRV.
+ MsrvNotCompatibleWithInstalledMsrv(Version),
+}
+impl ToolchainErr {
+ /// Writes `self` to `stderr`.
+ pub(crate) fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::CommandFail(cmd, err) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| writeln!(stderr, " erred: {err}"))
+ }),
+ Self::CommandNoStatus(cmd, err) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| {
+ if err.is_empty() {
+ writeln!(stderr, " did not return a status code and didn't write anything to stderr.")
+ } else {
+ writeln!(stderr, " did not return a status code but wrote the following to stderr: {err}")
+ }
+ })
+ }),
+ Self::CommandErr(cmd, err, status) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| {
+ if err.is_empty() {
+ writeln!(stderr, " returned status code {status} but didn't write anything to stderr.")
+ } else {
+ writeln!(stderr, " returned status code {status} and wrote the following to stderr: {err}")
+ }
+ })
+ }),
+ Self::StdoutNotUtf8(cmd) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| writeln!(stderr, " wrote invalid UTF-8 to stdout."))
+ }),
+ Self::StderrNotUtf8(cmd) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| writeln!(stderr, " wrote invalid UTF-8 to stderr."))
+ }),
+ Self::UnexpectedOutput(cmd, output) => stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args().try_fold((), |(), arg| {
+ stderr.write_all(b" ").and_then(|()| {
+ stderr
+ .write_all(arg.to_string_lossy().as_bytes())
+ })
+ }).and_then(|()| writeln!(stderr, " wrote the following unexpected data to stdout: {output}"))
+ }),
+ Self::MsrvTooHigh => writeln!(stderr, "MSRV is higher than cargo stable."),
+ Self::MsrvNotCompatibleWithInstalledMsrv(version)=> writeln!(stderr, "cargo +<MSRV> -V returned '{}.{}.{}' which is inconsistent with the defined MSRV.", version.major, version.minor, version.patch),
+ }
+ }
+}
+/// Error returned when running any `cargo` command.
+pub(crate) enum CargoErr {
+ /// [`Command::output`] erred with the contained `Error` for the contained `Command`.
+ CommandFail(Command, Error),
+ /// [`Command::output`] was successful for the contained `Command`, but it didn't return a status code.
+ ///
+ /// The first contained `String` is the potentially empty content written to `stderr`, and the second
+ /// `String` is the potentially empty content written to `stdout`.
+ CommandNoStatus(Command, String, String),
+ /// [`Command::output`] was successful for the contained `Command`, but it returned an error status code.
+ ///
+ /// The first contained `String` is the potentially empty content written to `stderr`, and the second
+ /// `String` is the potentially empty content written to `stdout`.
+ CommandErr(Command, String, String),
+ /// [`Command::output`] was successful for the contained `Command`, but the data it wrote to `stdout` was not
+ /// valid UTF-8.
+ StdoutNotUtf8(Command),
+ /// [`Command::output`] was successful for the contained `Command`, but the data it wrote to `stderr` was not
+ /// valid UTF-8.
+ StderrNotUtf8(Command),
+ /// [`Command::output`] was successful for the contained `Command`, but a `compile_error` occurred for the
+ /// `"default"` feature.
+ CompileErrDefault(Command),
+ /// [`Command::output`] was successful for the contained `Command`, but a `compile_error` occurred when
+ /// no features were enabled and when there isn't a `"default"` feature defined.
+ CompileErrNoFeatures(Command),
+}
+impl CargoErr {
+ /// Writes `self` to `stderr`.
+ pub(crate) fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::CommandFail(cmd, err) => writeln!(stderr, "{err}").map(|()| cmd),
+ Self::CommandNoStatus(cmd, err, out) => if out.is_empty() {
+ Ok(())
+ } else {
+ writeln!(stderr, "{out}")
+ }
+ .and_then(|()| {
+ if err.is_empty() {
+ writeln!(stderr, "Missing status code and nothing written to stderr.")
+ } else {
+ writeln!(
+ stderr,
+ "Missing status code, but the following was written to stderr: {err}"
+ )
+ }
+ .map(|()| cmd)
+ }),
+ Self::CommandErr(cmd, err, out) => if out.is_empty() {
+ Ok(())
+ } else {
+ writeln!(stderr, "{out}")
+ }
+ .and_then(|()| {
+ if err.is_empty() {
+ Ok(cmd)
+ } else {
+ writeln!(stderr, "{err}").map(|()| cmd)
+ }
+ }),
+ Self::StdoutNotUtf8(cmd) => {
+ writeln!(stderr, "Invalid UTF-8 written to stdout.").map(|()| cmd)
+ }
+ Self::StderrNotUtf8(cmd) => {
+ writeln!(stderr, "Invalid UTF-8 written to stderr.").map(|()| cmd)
+ }
+ Self::CompileErrDefault(cmd) => {
+ writeln!(stderr, "compile_error! raised on default feature.").map(|()| cmd)
+ }
+ Self::CompileErrNoFeatures(cmd) => writeln!(
+ stderr,
+ "compile_error! raised with no features when a default feature does not exist."
+ )
+ .map(|()| cmd),
+ }
+ .and_then(|cmd| {
+ stderr
+ .write_all(cmd.get_program().to_string_lossy().as_bytes())
+ .and_then(|()| {
+ cmd.get_args()
+ .try_fold((), |(), arg| {
+ stderr
+ .write_all(b" ")
+ .and_then(|()| stderr.write_all(arg.to_string_lossy().as_bytes()))
+ })
+ .and_then(|()| writeln!(stderr))
+ })
+ })
+ }
+}
+/// Compiler version.
+#[cfg_attr(test, derive(PartialEq))]
+pub(crate) struct Version {
+ /// Major version.
+ pub major: u64,
+ /// Minor version.
+ pub minor: u64,
+ /// Patch version.
+ pub patch: u64,
+}
+/// `"RUSTUP_HOME"`.
+const RUSTUP_HOME: &str = "RUSTUP_HOME";
+/// `"CARGO_HOME"`.
+const CARGO_HOME: &str = "CARGO_HOME";
+/// `"-q"`.
+const DASH_Q: &str = "-q";
+/// Toolchain to use.
+#[derive(Clone, Copy)]
+pub(crate) enum Toolchain<'a> {
+ /// `cargo +stable`.
+ Stable,
+ /// `cargo`.
+ Default,
+ /// `cargo +<MSRV>`.
+ Msrv(&'a str),
+}
+impl Toolchain<'_> {
+ /// Extracts the compiler version from `stdout`
+ ///
+ /// This must only be called by [`Self::get_version`].
+ #[expect(unsafe_code, reason = "comments justify correctness")]
+ fn parse_stdout(cmd: Command, stdout: Vec<u8>) -> Result<Version, Box<ToolchainErr>> {
+ /// `"cargo "`.
+ const CARGO: &[u8; 6] = b"cargo ";
+ if let Ok(utf8) = String::from_utf8(stdout) {
+ utf8.as_bytes()
+ .split_at_checked(CARGO.len())
+ .and_then(|(pref, rem)| {
+ if pref == CARGO {
+ let mut iter = rem.split(|b| *b == b'.');
+ if let Some(fst) = iter.next()
+ // SAFETY:
+ // Original input was a `str`, and we split on a single-byte
+ // UTF-8 code unit.
+ && let Ok(major) = manifest::parse_int(unsafe {
+ str::from_utf8_unchecked(fst)
+ })
+ && let Some(snd) = iter.next()
+ // SAFETY:
+ // Original input was a `str`, and we split on a single-byte
+ // UTF-8 code unit.
+ && let Ok(minor) = manifest::parse_int(unsafe {
+ str::from_utf8_unchecked(snd)
+ })
+ && let Some(lst) = iter.next()
+ && iter.next().is_none()
+ && let Some(lst_fst) = lst.split(|b| *b == b' ').next()
+ // SAFETY:
+ // Original input was a `str`, and we split on a single-byte
+ // UTF-8 code unit.
+ && let Ok(patch) = manifest::parse_int(unsafe {
+ str::from_utf8_unchecked(lst_fst)
+ })
+ {
+ Some(Version {
+ major,
+ minor,
+ patch,
+ })
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ })
+ .ok_or_else(|| Box::new(ToolchainErr::UnexpectedOutput(cmd, utf8)))
+ } else {
+ Err(Box::new(ToolchainErr::StdoutNotUtf8(cmd)))
+ }
+ }
+ /// Returns the version.
+ pub(crate) fn get_version(
+ self,
+ rustup_home: Option<&Path>,
+ cargo_path: &Path,
+ cargo_home: Option<&Path>,
+ ) -> Result<Version, Box<ToolchainErr>> {
+ let mut cmd = Command::new(cargo_path);
+ if let Some(env) = rustup_home {
+ _ = cmd.env(RUSTUP_HOME, env);
+ }
+ if let Some(env) = cargo_home {
+ _ = cmd.env(CARGO_HOME, env);
+ }
+ match self {
+ Self::Stable => {
+ _ = cmd.arg(PLUS_STABLE);
+ }
+ Self::Default => {}
+ Self::Msrv(val) => {
+ _ = cmd.arg(val);
+ }
+ }
+ match cmd
+ .arg("-V")
+ .stderr(Stdio::piped())
+ .stdin(Stdio::null())
+ .stdout(Stdio::piped())
+ .output()
+ {
+ Ok(output) => {
+ if let Some(status_code) = output.status.code() {
+ if status_code == 0i32 {
+ Self::parse_stdout(cmd, output.stdout)
+ } else if let Ok(err) = String::from_utf8(output.stderr) {
+ Err(Box::new(ToolchainErr::CommandErr(cmd, err, status_code)))
+ } else {
+ Err(Box::new(ToolchainErr::StderrNotUtf8(cmd)))
+ }
+ } else if let Ok(err) = String::from_utf8(output.stderr) {
+ Err(Box::new(ToolchainErr::CommandNoStatus(cmd, err)))
+ } else {
+ Err(Box::new(ToolchainErr::StderrNotUtf8(cmd)))
+ }
+ }
+ Err(e) => Err(Box::new(ToolchainErr::CommandFail(cmd, e))),
+ }
+ }
+}
+/// `"+stable"`.
+const PLUS_STABLE: &str = "+stable";
+/// `"--color"`.
+const DASH_DASH_COLOR: &str = "--color";
+/// `"always"`.
+const ALWAYS: &str = "always";
+/// `"never"`.
+const NEVER: &str = "never";
+/// `"--no-default-features"`.
+const DASH_DASH_NO_DEFAULT_FEATURES: &str = "--no-default-features";
+/// `"--features"`.
+const DASH_DASH_FEATURES: &str = "--features";
+/// `"--"`.
+const DASH_DASH: &str = "--";
+/// `"default"`.
+const DEFAULT: &str = "default";
+/// Common options to pass to [`Clippy::run`] and [`Tests::run`].
+pub(crate) struct Options<'toolchain, 'errs> {
+ /// The `cargo` toolchain to use.
+ pub toolchain: Toolchain<'toolchain>,
+ /// The path to the `rustup` storage directory.
+ pub rustup_home: Option<PathBuf>,
+ /// The path to `cargo`.
+ pub cargo_path: PathBuf,
+ /// The path to the `cargo` storage directory.
+ pub cargo_home: Option<PathBuf>,
+ /// `true` iff color should be written to `stdout` and `stderr`.
+ pub color: bool,
+ /// `true` iff `compile_error`s should be ignored.
+ pub ignore_compile_errors: bool,
+ /// `true` iff a feature named `"default"` exists.
+ pub default_feature_does_not_exist: bool,
+ /// Hash set of non-terminating errors to be written at the very end.
+ pub non_terminating_errors: &'errs mut HashSet<String>,
+}
+/// Executes `cmd`.
+///
+/// Returns `true` iff a no target error occurs and `doc_only` is `true`.
+fn execute_command(
+ mut cmd: Command,
+ options: &mut Options<'_, '_>,
+ features: &str,
+ doc_only: bool,
+) -> Result<bool, Box<CargoErr>> {
+ match cmd.stdout(Stdio::piped()).output() {
+ Ok(output) => {
+ if let Some(code) = output.status.code() {
+ match code {
+ 0i32 => {
+ if !output.stderr.is_empty() {
+ _ = options.non_terminating_errors.insert(
+ match String::from_utf8(output.stderr) {
+ Ok(err) => err,
+ Err(e) => e.to_string(),
+ },
+ );
+ }
+ Ok(false)
+ }
+ 101i32 => {
+ /// `"compile_error!"` as a byte string.
+ const COMPILE_ERROR: &[u8; 14] = b"compile_error!";
+ /// `"no library targets found in package"` as a byte string.
+ const NO_LIB_TARG: &[u8; 35] = b"no library targets found in package";
+ if output
+ .stderr
+ .windows(COMPILE_ERROR.len())
+ .any(|window| window == COMPILE_ERROR)
+ {
+ if options.ignore_compile_errors {
+ if features == DEFAULT {
+ Err(Box::new(CargoErr::CompileErrDefault(cmd)))
+ } else if options.default_feature_does_not_exist
+ && features.is_empty()
+ {
+ Err(Box::new(CargoErr::CompileErrNoFeatures(cmd)))
+ } else {
+ Ok(false)
+ }
+ } else if let Ok(err) = String::from_utf8(output.stderr) {
+ if let Ok(stdout) = String::from_utf8(output.stdout) {
+ Err(Box::new(CargoErr::CommandErr(cmd, err, stdout)))
+ } else {
+ Err(Box::new(CargoErr::StdoutNotUtf8(cmd)))
+ }
+ } else {
+ Err(Box::new(CargoErr::StderrNotUtf8(cmd)))
+ }
+ } else if doc_only
+ && output
+ .stderr
+ .windows(NO_LIB_TARG.len())
+ .any(|window| window == NO_LIB_TARG)
+ {
+ Ok(true)
+ } else if let Ok(err) = String::from_utf8(output.stderr) {
+ if let Ok(stdout) = String::from_utf8(output.stdout) {
+ Err(Box::new(CargoErr::CommandErr(cmd, err, stdout)))
+ } else {
+ Err(Box::new(CargoErr::StdoutNotUtf8(cmd)))
+ }
+ } else {
+ Err(Box::new(CargoErr::StderrNotUtf8(cmd)))
+ }
+ }
+ _ => {
+ if let Ok(err) = String::from_utf8(output.stderr) {
+ if let Ok(stdout) = String::from_utf8(output.stdout) {
+ Err(Box::new(CargoErr::CommandErr(cmd, err, stdout)))
+ } else {
+ Err(Box::new(CargoErr::StdoutNotUtf8(cmd)))
+ }
+ } else {
+ Err(Box::new(CargoErr::StderrNotUtf8(cmd)))
+ }
+ }
+ }
+ } else if let Ok(err) = String::from_utf8(output.stderr) {
+ if let Ok(stdout) = String::from_utf8(output.stdout) {
+ Err(Box::new(CargoErr::CommandNoStatus(cmd, err, stdout)))
+ } else {
+ Err(Box::new(CargoErr::StdoutNotUtf8(cmd)))
+ }
+ } else {
+ Err(Box::new(CargoErr::StderrNotUtf8(cmd)))
+ }
+ }
+ Err(e) => Err(Box::new(CargoErr::CommandFail(cmd, e))),
+ }
+}
+/// `cargo clippy`.
+pub(crate) struct Clippy;
+impl Clippy {
+ /// Execute `cargo clippy`.
+ #[expect(
+ clippy::panic_in_result_fn,
+ reason = "want to crash when there is a bug"
+ )]
+ pub(crate) fn run(
+ options: &mut Options<'_, '_>,
+ all_targets: bool,
+ deny_warnings: bool,
+ features: &str,
+ ) -> Result<(), Box<CargoErr>> {
+ let mut c = Command::new(options.cargo_path.as_path());
+ _ = c.stderr(Stdio::piped()).stdin(Stdio::null());
+ if let Some(ref env) = options.rustup_home {
+ _ = c.env(RUSTUP_HOME, env);
+ }
+ if let Some(ref env) = options.cargo_home {
+ _ = c.env(CARGO_HOME, env);
+ }
+ match options.toolchain {
+ Toolchain::Stable => {
+ _ = c.arg(PLUS_STABLE);
+ }
+ Toolchain::Default => {}
+ Toolchain::Msrv(ref msrv) => {
+ _ = c.arg(msrv);
+ }
+ }
+ _ = c.arg("clippy").arg(DASH_Q);
+ if all_targets {
+ _ = c.arg("--all-targets");
+ }
+ _ = c
+ .arg(DASH_DASH_COLOR)
+ .arg(if options.color { ALWAYS } else { NEVER })
+ .arg(DASH_DASH_NO_DEFAULT_FEATURES);
+ if !features.is_empty() {
+ _ = c.arg(DASH_DASH_FEATURES).arg(features);
+ }
+ if deny_warnings {
+ _ = c.arg(DASH_DASH).arg("-Dwarnings");
+ }
+ execute_command(c, options, features, false)
+ .map(|no_features| assert!(!no_features, "there is a bug in cargo::execute_command"))
+ }
+}
+/// What kind of test to run.
+#[derive(Clone, Copy)]
+pub(crate) enum TestKind {
+ /// Both unit/integration and doc tests.
+ All(Ignored),
+ /// Only unit/integeration and doc tests.
+ Unit(Ignored),
+ /// Only doc tests.
+ Doc,
+}
+/// `cargo t --tests/--doc`.
+pub(crate) struct Tests;
+impl Tests {
+ /// Execute `cargo t`.
+ ///
+ /// Returns `true` iff only doc tests were run and a no-library error was returned.
+ pub(crate) fn run(
+ options: &mut Options<'_, '_>,
+ kind: TestKind,
+ features: &str,
+ ) -> Result<bool, Box<CargoErr>> {
+ /// `"--ignored"`.
+ const DASH_DASH_IGNORED: &str = "--ignored";
+ /// `"--include-ignored"`.
+ const DASH_DASH_INCLUDE_IGNORED: &str = "--include-ignored";
+ let mut c = Command::new(options.cargo_path.as_path());
+ _ = c.stderr(Stdio::piped()).stdin(Stdio::null());
+ if let Some(ref env) = options.rustup_home {
+ _ = c.env(RUSTUP_HOME, env);
+ }
+ if let Some(ref env) = options.cargo_home {
+ _ = c.env(CARGO_HOME, env);
+ }
+ match options.toolchain {
+ Toolchain::Stable => {
+ _ = c.arg(PLUS_STABLE);
+ }
+ Toolchain::Default => {}
+ Toolchain::Msrv(ref msrv) => {
+ _ = c.arg(msrv);
+ }
+ }
+ _ = c
+ .arg("t")
+ .arg(DASH_Q)
+ .arg(DASH_DASH_COLOR)
+ .arg(if options.color { ALWAYS } else { NEVER })
+ .arg(DASH_DASH_NO_DEFAULT_FEATURES);
+ if !features.is_empty() {
+ _ = c.arg(DASH_DASH_FEATURES).arg(features);
+ }
+ let mut doc_only = false;
+ match kind {
+ TestKind::All(ignore) => {
+ _ = c.arg(DASH_DASH).arg(DASH_DASH_COLOR).arg(if options.color {
+ ALWAYS
+ } else {
+ NEVER
+ });
+ match ignore {
+ Ignored::None => {}
+ Ignored::Only => {
+ _ = c.arg(DASH_DASH_IGNORED);
+ }
+ Ignored::Include => {
+ _ = c.arg(DASH_DASH_INCLUDE_IGNORED);
+ }
+ }
+ }
+ TestKind::Unit(ignore) => {
+ _ = c
+ .arg("--tests")
+ .arg(DASH_DASH)
+ .arg(DASH_DASH_COLOR)
+ .arg(if options.color { ALWAYS } else { NEVER });
+ match ignore {
+ Ignored::None => {}
+ Ignored::Only => {
+ _ = c.arg(DASH_DASH_IGNORED);
+ }
+ Ignored::Include => {
+ _ = c.arg(DASH_DASH_INCLUDE_IGNORED);
+ }
+ }
+ }
+ TestKind::Doc => {
+ doc_only = true;
+ _ = c
+ .arg("--doc")
+ .arg(DASH_DASH)
+ .arg(DASH_DASH_COLOR)
+ .arg(if options.color { ALWAYS } else { NEVER });
+ }
+ }
+ execute_command(c, options, features, doc_only)
+ }
+}
+#[cfg(test)]
+mod tests {
+ use super::{Command, Toolchain, ToolchainErr, Version};
+ #[expect(clippy::cognitive_complexity, reason = "a lot of tests")]
+ #[test]
+ fn toolchain_parse() {
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), vec![255]), Err(e) if matches!(*e, ToolchainErr::StdoutNotUtf8(_)))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), Vec::new()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v.is_empty()))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.3.".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.3."))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.3a".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.3a"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b" cargo 1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == " cargo 1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"Cargo 1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "Cargo 1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.00.0".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.00.0"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.03".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.03"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo -1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo -1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo\t1.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo\t1.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1..3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1..3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1."))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 111111111111111111111111.2.3".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 111111111111111111111111.2.3"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.3.4".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.3.4"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 1.2.3-nightly".to_vec()), Err(e) if matches!(*e, ToolchainErr::UnexpectedOutput(_, ref v) if v == "cargo 1.2.3-nightly"))
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 18446744073709551615.18446744073709551615.18446744073709551615".to_vec()), Ok(v) if v == Version { major: u64::MAX, minor: u64::MAX, patch: u64::MAX, })
+ );
+ assert!(
+ matches!(Toolchain::parse_stdout(Command::new(""), b"cargo 0.0.0 asdflk 0023n0=lk0932(!@#V)\x00".to_vec()), Ok(v) if v == Version { major: 0, minor: 0, patch: 0, })
+ );
+ }
+}
diff --git a/src/main.rs b/src/main.rs
@@ -0,0 +1,289 @@
+//! Consult [`README.md`](https://crates.io/crates/ci-cargo).
+extern crate alloc;
+/// Functionality related to parsing CLI arguments.
+mod args;
+/// Functionality related to running `cargo`.
+mod cargo;
+/// Functionality related to `Cargo.toml` parsing.
+mod manifest;
+/// Contains a `const bool` that is `true` iff `rustup` is supported by the platform.
+mod rustup;
+#[cfg(target_os = "openbsd")]
+use alloc::ffi::CString;
+use args::{ArgsErr, HELP_MSG, MetaCmd};
+use cargo::{CargoErr, Options, Toolchain, ToolchainErr};
+use core::ffi::CStr;
+use manifest::{Manifest, ManifestErr, TooManyFeaturesErr};
+#[cfg(target_os = "openbsd")]
+use priv_sep::{Errno, Permissions, Promise, Promises};
+use std::{
+ collections::HashSet,
+ env, fs,
+ io::{self, BufWriter, Error, Write as _},
+ path::{Path, PathBuf},
+ process::ExitCode,
+};
+/// Application error.
+enum E {
+ /// Error related to the passed arguments.
+ Args(ArgsErr),
+ /// `Cargo.toml` does not exist.
+ NoCargoToml,
+ /// Error looking for `Cargo.toml`.
+ CargoTomlIo(Error),
+ /// Error reading `Cargo.toml`.
+ CargoTomlRead(Error),
+ /// Error looking for `rustup-toolchain.toml`.
+ RustupToolchainTomlIo(Error),
+ /// Error related to extracting the necessary data from `Cargo.toml`.
+ Manifest(ManifestErr),
+ /// Error from `Msrv::compare_to_other`.
+ Toolchain(Box<ToolchainErr>),
+ /// Error from OpenBSD `pledge`.
+ #[cfg(target_os = "openbsd")]
+ Pledge(Errno),
+ /// Error from OpenBSD `unveil`.
+ #[cfg(target_os = "openbsd")]
+ Unveil(Errno),
+ /// Error on OpenBSD when the `Path` for `cargo` can't be converted into a `CString`.
+ #[cfg(target_os = "openbsd")]
+ CargoPathCStr,
+ /// Variant returned where there are too many features to generate the power set on.
+ TooManyFeatures,
+ /// Unable to write non-terminating messages to stderr.
+ StdErr,
+ /// Unable to write the help message to stdout.
+ Help(Error),
+ /// Unable to write the version message to stdout.
+ Version(Error),
+ /// `cargo` erred.
+ Cargo(Box<CargoErr>),
+ /// Unable to write the summary message to stdout.
+ Summary(Error),
+}
+impl E {
+ /// Writes `self` to `stderr` before returning [`ExitCode::FAILURE`].
+ fn into_exit_code(self) -> ExitCode {
+ let mut stderr = io::stderr().lock();
+ match self {
+ Self::Args(e) => e.write(stderr),
+ Self::NoCargoToml => writeln!(
+ stderr,
+ "Cargo.toml doesn't exist in the current nor ancestor directories."
+ ),
+ Self::CargoTomlIo(err) => {
+ writeln!(stderr, "There was an error looking for Cargo.toml: {err}.")
+ }
+ Self::CargoTomlRead(err) => {
+ writeln!(stderr, "There was an error reading Cargo.toml: {err}.")
+ }
+ Self::RustupToolchainTomlIo(err) => {
+ writeln!(
+ stderr,
+ "There was an error looking for the existence of rust-toolchain.toml: {err}."
+ )
+ }
+ Self::Manifest(e) => e.write(stderr),
+ Self::Toolchain(e) => e.write(stderr),
+ #[cfg(target_os = "openbsd")]
+ Self::Pledge(e) => writeln!(stderr, "pledge(2) erred: {e}."),
+ #[cfg(target_os = "openbsd")]
+ Self::Unveil(e) => writeln!(stderr, "unveil(2) erred: {e}."),
+ #[cfg(target_os = "openbsd")]
+ Self::CargoPathCStr => writeln!(
+ stderr,
+ "unable to convert the path passed for --cargo-path into a C string"
+ ),
+ Self::TooManyFeatures => TooManyFeaturesErr::write(stderr),
+ Self::StdErr => Ok(()),
+ Self::Help(err) => writeln!(
+ stderr,
+ "There was an error writing ci-cargo help to stdout: {err}."
+ ),
+ Self::Version(err) => writeln!(
+ stderr,
+ "There was an error writing ci-cargo version to stdout: {err}."
+ ),
+ Self::Cargo(e) => e.write(stderr),
+ Self::Summary(err) => writeln!(
+ stderr,
+ "There was an error writing the summary to stdout: {err}."
+ ),
+ }
+ .map_or(ExitCode::FAILURE, |()| ExitCode::FAILURE)
+ }
+}
+/// No-op.
+#[cfg(not(target_os = "openbsd"))]
+#[expect(clippy::unnecessary_wraps, reason = "unify OpenBSD with non-OpenBSD")]
+const fn priv_init<Never>() -> Result<(), Never> {
+ Ok(())
+}
+/// Returns the inital set of `Promises` we pledged.
+#[cfg(target_os = "openbsd")]
+fn priv_init() -> Result<Promises, E> {
+ let proms = Promises::new([
+ Promise::Exec,
+ Promise::Proc,
+ Promise::Rpath,
+ Promise::Stdio,
+ Promise::Unveil,
+ ]);
+ proms.pledge().map_err(E::Pledge).and_then(|()| {
+ Permissions::READ
+ .unveil(c"/")
+ .map_err(E::Unveil)
+ .map(|()| proms)
+ })
+}
+/// `"Cargo.toml"` as a `CStr`.
+const CARGO_CSTR: &CStr = c"Cargo.toml";
+/// `"Cargo.toml"`.
+const CARGO: &str = match CARGO_CSTR.to_str() {
+ Ok(val) => val,
+ Err(_) => panic!("Cargo.toml is not a valid str"),
+};
+/// `"rust-toolchain.toml"` as a `CStr`.
+const RUST_TOOLCHAIN_CSTR: &CStr = c"rust-toolchain.toml";
+/// `"rust-toolchain.toml"`.
+const RUST_TOOLCHAIN: &str = match RUST_TOOLCHAIN_CSTR.to_str() {
+ Ok(val) => val,
+ Err(_) => panic!("rust-toolchain.toml is not a valid str"),
+};
+/// No-op.
+#[cfg(not(target_os = "openbsd"))]
+#[expect(clippy::unnecessary_wraps, reason = "unify OpenBSD with non-OpenBSD")]
+const fn unveil_next<Never>() -> Result<(), Never> {
+ Ok(())
+}
+/// Remove file permissions before only allowing read permissions to `CARGO_CSTR`
+/// and `RUST_TOOLCHAIN_CSTR`.
+#[cfg(target_os = "openbsd")]
+fn unveil_next() -> Result<(), E> {
+ Permissions::NONE
+ .unveil(c"/")
+ .and_then(|()| {
+ Permissions::READ
+ .unveil(CARGO_CSTR)
+ .and_then(|()| Permissions::READ.unveil(RUST_TOOLCHAIN_CSTR))
+ })
+ .map_err(E::Unveil)
+}
+/// No-op.
+#[cfg(not(target_os = "openbsd"))]
+#[expect(clippy::unnecessary_wraps, reason = "unify OpenBSD with non-OpenBSD")]
+const fn priv_sep_final(_: &mut (), _: &Path) -> Result<(), E> {
+ Ok(())
+}
+/// Remove read permissions to `CARGO_CSTR` and `RUST_TOOLCHAIN_CSTR` before allowing execute permissions to
+/// `cargo_path`. Last remove read and unveil abilities.
+#[cfg(target_os = "openbsd")]
+fn priv_sep_final(proms: &mut Promises, cargo_path: &Path) -> Result<(), E> {
+ Permissions::NONE
+ .unveil(CARGO_CSTR)
+ .and_then(|()| Permissions::NONE.unveil(RUST_TOOLCHAIN_CSTR))
+ .map_err(E::Unveil)
+ .and_then(|()| {
+ if cargo_path.is_absolute() {
+ CString::new(cargo_path.as_os_str().as_encoded_bytes())
+ .map_err(|_e| E::CargoPathCStr)
+ .and_then(|path_c| Permissions::EXECUTE.unveil(&path_c).map_err(E::Unveil))
+ } else {
+ Permissions::EXECUTE.unveil(c"/").map_err(E::Unveil)
+ }
+ .and_then(|()| {
+ proms
+ .remove_promises_then_pledge([Promise::Rpath, Promise::Unveil])
+ .map_err(E::Pledge)
+ })
+ })
+}
+/// Checks if `Cargo.toml` exists in `cur_dir`; if not, it recursively checks the ancestor
+/// directories.
+///
+/// We make this recursive in the rare (impossible?) case that traversal becomes circular; in which case,
+/// we want a stack overflow to occur.
+fn set_env(mut cur_dir: PathBuf) -> Result<bool, Error> {
+ match fs::exists(CARGO) {
+ Ok(exists) => {
+ if exists {
+ Ok(true)
+ } else if cur_dir.pop() {
+ env::set_current_dir(cur_dir.as_path()).and_then(|()| set_env(cur_dir))
+ } else {
+ Ok(false)
+ }
+ }
+ Err(e) => Err(e),
+ }
+}
+/// Current version of this crate.
+const VERSION: &str = "ci-cargo 0.1.0\n";
+fn main() -> ExitCode {
+ priv_init().and_then(|mut proms| MetaCmd::from_args(env::args_os()).map_err(E::Args).and_then(|meta_cmd| {
+ match meta_cmd {
+ MetaCmd::Help => io::stdout().lock().write_all(HELP_MSG.as_bytes()).map_err(E::Help),
+ MetaCmd::Version => io::stdout().lock().write_all(VERSION.as_bytes()).map_err(E::Version),
+ MetaCmd::Cargo(cmd, opts) => opts.exec_dir.map_or_else(
+ || env::current_dir().map_err(E::CargoTomlIo).and_then(|dir| set_env(dir).map_err(E::CargoTomlIo).and_then(|exists| if exists { Ok(()) } else { Err(E::NoCargoToml) })),
+ |path| env::set_current_dir(path).map_err(E::CargoTomlIo),
+ ).and_then(|()| {
+ unveil_next().and_then(|()| fs::read_to_string(CARGO).map_err(E::CargoTomlRead).and_then(|toml| Manifest::from_toml(toml, opts.allow_implied_features).map_err(E::Manifest).and_then(|man| fs::exists(RUST_TOOLCHAIN).map_err(E::RustupToolchainTomlIo).and_then(|rustup_toolchain_exists| priv_sep_final(&mut proms, &opts.cargo_path).and_then(|()| {
+ match man.msrv() {
+ None => Ok((None, if rustup_toolchain_exists || (!rustup::SUPPORTED && opts.rustup_home.is_none()) {
+ Toolchain::Default
+ } else {
+ Toolchain::Stable
+ })),
+ Some(val) => if rustup::SUPPORTED || opts.rustup_home.is_some() {
+ val.compare_to_other(rustup_toolchain_exists, opts.rustup_home.as_deref(), &opts.cargo_path, opts.cargo_home.as_deref()).map_err(E::Toolchain).map(|msrv_string| (msrv_string, if rustup_toolchain_exists { Toolchain::Default } else { Toolchain::Stable }))
+ } else {
+ Ok((None, Toolchain::Default))
+ },
+ }.and_then(|(msrv_string, toolchain)| {
+ let default_feature_does_not_exist = !man.features().contains_default();
+ man.features().power_set().map_err(|_e| E::TooManyFeatures).and_then(|mut power_set| {
+ let mut non_term_errs = HashSet::new();
+ cmd.run(Options { toolchain, rustup_home: opts.rustup_home, cargo_path: opts.cargo_path, cargo_home: opts.cargo_home, color: opts.color, ignore_compile_errors: opts.ignore_compile_errors, default_feature_does_not_exist, non_terminating_errors: &mut non_term_errs, }, msrv_string.as_deref(), &mut power_set).map_err(E::Cargo).and_then(|()| {
+ if non_term_errs.is_empty() {
+ Ok(())
+ } else {
+ // `StderrLock` is not buffered.
+ let mut stderr = BufWriter::new(io::stderr().lock());
+ non_term_errs.into_iter().try_fold((), |(), msg| stderr.write_all(msg.as_bytes())).and_then(|()| stderr.flush()).map_err(|_e| E::StdErr)
+ }
+ }).and_then(|()| {
+ if opts.summary {
+ let mut stdout = io::stdout().lock();
+ if matches!(toolchain, Toolchain::Stable) {
+ if let Some(ref msrv_val) = msrv_string {
+ writeln!(stdout, "Toolchains used: cargo +stable and cargo {msrv_val}")
+ } else {
+ writeln!(stdout, "Toolchain used: cargo +stable")
+ }
+ } else if let Some(ref msrv_val) = msrv_string {
+ writeln!(stdout, "Toolchains used: cargo and cargo {msrv_val}")
+ } else {
+ writeln!(stdout, "Toolchain used: cargo")
+ }.and_then(|()| {
+ writeln!(stdout, "Features used:").and_then(|()| {
+ power_set.reset();
+ while let Some(features) = power_set.next_set() {
+ if let Err(e) = writeln!(stdout, "{features}") {
+ return Err(e);
+ }
+ }
+ Ok(())
+ })
+ }).map_err(E::Summary)
+ } else {
+ Ok(())
+ }
+ })
+ })
+ })
+ })))))
+ })
+ }
+ })).map_or_else(E::into_exit_code, |()| ExitCode::SUCCESS)
+}
diff --git a/src/manifest.rs b/src/manifest.rs
@@ -0,0 +1,2471 @@
+extern crate alloc;
+use super::cargo::{Toolchain, ToolchainErr};
+use alloc::borrow::Cow;
+use core::cmp::Ordering;
+use std::{
+ io::{Error, StderrLock, Write as _},
+ path::Path,
+};
+use toml::{
+ Spanned,
+ de::{DeArray, DeValue, Error as TomlErr},
+ map::Map,
+};
+/// Error returned from extracting `"package"`.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum PackageErr {
+ /// Variant returned when there is no `"package"` key.
+ Missing,
+ /// Variant returned when `"package"` is not a table in Cargo.toml.
+ InvalidType,
+ /// Variant returned when `package.rust-version` is not a string in Cargo.toml.
+ InvalidMsrvType,
+ /// Variant returned from extracting `package.rust-version`.
+ Msrv,
+}
+impl PackageErr {
+ /// Writes `self` to `stderr`.
+ fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ writeln!(
+ stderr,
+ "{}",
+ match self {
+ Self::Missing => {
+ "Error with Cargo.toml: 'package' is missing."
+ }
+ Self::InvalidType => {
+ "Error with Cargo.toml: 'package' is not a table."
+ }
+ Self::InvalidMsrvType =>
+ "Error with Cargo.toml: 'package.rust-version' is not a string.",
+ Self::Msrv =>
+ "Error with Cargo.toml: 'package.rust-version' is not a valid MSRV. It must be of the form <major>[.<minor>[.<patch>]] where major, minor, and patch are unsigned 64-bit integers without leading 0s.",
+ }
+ )
+ }
+}
+/// Error returned from extracting feature dependencies.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum FeatureDependenciesErr {
+ /// Variant returned when a feature is not an array.
+ ///
+ /// The contained `String` is the name of the feature.
+ InvalidFeatureType(String),
+ /// Variant returned when a feature dependency is not a string.
+ ///
+ /// The contained `String` is the name of the feature.
+ InvalidDependencyType(String),
+ /// Variant returned when a feature dependency is not a feature nor dependency.
+ ///
+ /// The first contained `String` is the name of the feature, and the second `String` is the name of the invalid
+ /// feature dependency.
+ ///
+ /// Note this is only possible when `allow_implied_features` is `false` when passed to
+ /// [`Features::validate_dependencies`].
+ InvalidDependency(String, String),
+ /// Variant returned when a feature is cyclic.
+ ///
+ /// The contained `String` is the name of the feature.
+ CyclicFeature(String),
+ /// Variant returned when a feature dependency is redundant.
+ ///
+ /// The first contained `String` is the name of the feature, and the second `String` is the name of the
+ /// redundant feature dependency.
+ RedundantDependency(String, String),
+}
+impl FeatureDependenciesErr {
+ /// Writes `self` to `stderr`.
+ fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::InvalidFeatureType(name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' is not an array."
+ )
+ }
+ Self::InvalidDependencyType(name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' contains a value that is not a string."
+ ),
+ Self::InvalidDependency(name, dep_name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' contains '{dep_name}' which is neither a feature nor dependency."
+ ),
+ Self::CyclicFeature(name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' is a cyclic feature."
+ ),
+ Self::RedundantDependency(name, dep_name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' contains the redundant dependency '{dep_name}'."
+ ),
+ }
+ }
+}
+/// Error returned from extracting `"features"`.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum FeaturesErr {
+ /// Variant returned when `features` is not a table in Cargo.toml.
+ InvalidType,
+ /// Variant returned when `features` contains a feature with an invalid name.
+ ///
+ /// The contained `String` is the name of the feature.
+ InvalidName(String),
+ /// Variant returned when there is an issue with a feature's dependencies.
+ FeatureDependencies(FeatureDependenciesErr),
+}
+impl FeaturesErr {
+ /// Writes `self` to `stderr`.
+ fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::InvalidType => {
+ writeln!(stderr, "Error with Cargo.toml: 'features' is not a table.")
+ }
+ Self::InvalidName(name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' is not a valid feature name."
+ )
+ }
+ Self::FeatureDependencies(e) => e.write(stderr),
+ }
+ }
+}
+/// Error returned from extracting dependencies.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum DependenciesErr {
+ /// Variant returned when the dependencies is not a table.
+ ///
+ /// The contained `str` is the name of the dependencies (e.g., `"dependencies"`).
+ Type(&'static str),
+ /// Variant returned when a dependency has an invalid name.
+ ///
+ /// The contained `str` is the name of dependencies (e.g., `"dependencies"`), and the `String` is the
+ /// name of the dependency.
+ Name(&'static str, String),
+ /// Variant returned when a dependency is not a string or table.
+ ///
+ /// The contained `str` is the name of dependencies (e.g., `"dependencies"`), and the `String` is the
+ /// name of the dependency.
+ DependencyType(&'static str, String),
+ /// Variant returned when a dependency contains an `"optional"` key whose value is not a Boolean.
+ ///
+ /// The contained `str` is the name of dependencies (e.g., `"dependencies"`), and the `String` is the
+ /// name of the dependency.
+ OptionalType(&'static str, String),
+ /// Variant returned when an optional dependency would cause an implied feature to be created.
+ ///
+ /// Note this is only possible when `allow_implied_features` is `false` when passed to
+ /// [`Features::add_optional_dependencies`].
+ ///
+ /// The contained `str` is the name of dependencies (e.g., `"dependencies"`), and the `String` is the
+ /// name of the dependency.
+ ImpliedFeature(&'static str, String),
+}
+/// Error returned from extracting dependencies to add implied features.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum ImpliedFeaturesErr {
+ /// Variant returned from extracting dependencies.
+ Dependencies(DependenciesErr),
+ /// Variant returned when `target` is not a table in Cargo.toml.
+ TargetType,
+ /// Variant returned when `target` contains a key whose value is not a table.
+ ///
+ /// The contained `String` is the name of the key.
+ TargetPlatformType(String),
+ /// Variant returned when a target platform contains an issue with dependencies.
+ ///
+ /// The contained `String` is the name of the target platform.
+ TagetPlatformDependencies(String, DependenciesErr),
+ /// Variant returned when a feature dependency is not a feature nor dependency.
+ ///
+ /// The first contained `String` is the name of the feature, and the second `String` is the name of the invalid
+ /// feature dependency.
+ ///
+ /// Note this is only possible when `allow_implied_features` is `true` when passed to
+ /// [`Features::validate_dependencies`] since when `false` we verify the the dependency
+ /// is defined as a feature.
+ InvalidDependency(String, String),
+}
+impl ImpliedFeaturesErr {
+ /// Writes `self` to `stderr`.
+ fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::Dependencies(e) => match e {
+ DependenciesErr::Type(name) => {
+ writeln!(stderr, "Error with Cargo.toml: '{name}' is not a table.")
+ }
+ DependenciesErr::Name(name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: '{name}.{dep_name}' is not a valid dependency name."
+ )
+ }
+ DependenciesErr::DependencyType(name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: '{name}.{dep_name}' is not a string or table."
+ )
+ }
+ DependenciesErr::OptionalType(name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: '{name}.{dep_name}.optional' is not a Boolean."
+ )
+ }
+ DependenciesErr::ImpliedFeature(name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: '{name}.{dep_name}' causes an implied feature to be defined."
+ )
+ }
+ },
+ Self::TargetType => {
+ writeln!(stderr, "Error with Cargo.toml: 'target' is not a table.")
+ }
+ Self::TargetPlatformType(name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}' is not a table."
+ )
+ }
+ Self::TagetPlatformDependencies(name, e) => match e {
+ DependenciesErr::Type(table_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}.{table_name}' is not a table."
+ )
+ }
+ DependenciesErr::Name(table_name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' is not a valid dependency name."
+ )
+ }
+ DependenciesErr::DependencyType(table_name, dep_name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' is not a string or table."
+ ),
+ DependenciesErr::OptionalType(table_name, dep_name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}.optional' is not a Boolean."
+ ),
+ DependenciesErr::ImpliedFeature(table_name, dep_name) => {
+ writeln!(
+ stderr,
+ "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' causes an implied feature to be defined."
+ )
+ }
+ },
+ Self::InvalidDependency(name, dep_name) => writeln!(
+ stderr,
+ "Error with Cargo.toml: 'features.{name}' contains '{dep_name}' which is neither a feature nor dependency."
+ ),
+ }
+ }
+}
+/// Error returned from parsing Cargo.toml.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) enum ManifestErr {
+ /// Variant returned when Cargo.toml is not valid TOML.
+ Toml(TomlErr),
+ /// Variant returned when extracting `package`.
+ Package(PackageErr),
+ /// Variant returned when extracting `features`.
+ Features(FeaturesErr),
+ /// Variant returned when extracting dependencies in order to add implied features.
+ ImpliedFeatures(ImpliedFeaturesErr),
+}
+impl ManifestErr {
+ /// Writes `self` to `stderr`.
+ pub(crate) fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ match self {
+ Self::Toml(e) => write!(stderr, "Cargo.toml is not valid TOML: {e}"),
+ Self::Package(e) => e.write(stderr),
+ Self::Features(e) => e.write(stderr),
+ Self::ImpliedFeatures(e) => e.write(stderr),
+ }
+ }
+}
+/// Error when there are too many features to create the power set.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct TooManyFeaturesErr;
+impl TooManyFeaturesErr {
+ /// Writes `self` to `stderr`.
+ pub(crate) fn write(mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ writeln!(
+ stderr,
+ "There are too many features in Cargo.toml. The max number of features allowed is dependent on the host architecture. Specifically, the number of features must not exceed the width of a pointer in bits."
+ )
+ }
+}
+/// Parses `val` as a `u64` in decimal notation without leading 0s.
+///
+/// # Errors
+///
+/// Errors iff `val` is not a valid `u64` in decimal notation without leading 0s.
+pub(crate) fn parse_int(val: &str) -> Result<u64, ()> {
+ val.as_bytes().first().ok_or(()).and_then(|fst| {
+ if *fst == b'0' {
+ if val.len() == 1 { Ok(0) } else { Err(()) }
+ } else {
+ val.parse().map_err(|_e| ())
+ }
+ })
+}
+/// MSRV in Cargo.toml.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct Msrv {
+ /// Major version.
+ major: u64,
+ /// Minor version.
+ minor: Option<u64>,
+ /// Patch version.
+ patch: Option<u64>,
+}
+impl Msrv {
+ /// Extracts `"package"` from `toml` before extracting `"rust-version"` from it.
+ #[expect(unsafe_code, reason = "comments justify their correctness")]
+ fn extract_from_toml(
+ toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ ) -> Result<Option<Self>, PackageErr> {
+ toml.get("package")
+ .ok_or(PackageErr::Missing)
+ .and_then(|pack_span| match *pack_span.get_ref() {
+ DeValue::Table(ref package) => {
+ package
+ .get("rust-version")
+ .map_or(Ok(None), |msrv_span| match *msrv_span.get_ref() {
+ DeValue::String(ref msrv) => {
+ let mut iter = msrv.as_bytes().split(|b| *b == b'.');
+ iter.next().ok_or(PackageErr::Msrv).and_then(|fst| {
+ // SAFETY:
+ // The original input is a `str` and we split on `b'.'` which is a single-byte
+ // UTF-8 code unit; thus we don't have to worry about splitting a multi-byte
+ // UTF-8 code unit.
+ let major_utf8 = unsafe { str::from_utf8_unchecked(fst) };
+ parse_int(major_utf8)
+ .map_err(|()| PackageErr::Msrv)
+ .and_then(|major| {
+ iter.next().map_or_else(
+ || {
+ Ok(Some(Self {
+ major,
+ minor: None,
+ patch: None,
+ }))
+ },
+ |snd| {
+ // SAFETY:
+ // The original input is a `str` and we split on `b'.'` which is
+ // a single-byte UTF-8 code unit; thus we don't have to worry
+ // about splitting a multi-byte UTF-8 code unit.
+ let minor_utf8 = unsafe { str::from_utf8_unchecked(snd) };
+ parse_int(minor_utf8)
+ .map_err(|()| PackageErr::Msrv)
+ .and_then(|minor_val| {
+ iter.next().map_or_else(
+ || {
+ Ok(Some(Self {
+ major,
+ minor: Some(minor_val),
+ patch: None,
+ }))
+ },
+ |lst| {
+ // SAFETY:
+ // The original input is a `str` and we split on
+ // `b'.'` which is a single-byte UTF-8 code
+ // unit; thus we don't have to worry about
+ // splitting a multi-byte UTF-8 code unit.
+ let patch_utf8 =
+ unsafe { str::from_utf8_unchecked(lst) };
+ parse_int(patch_utf8).map_err(|()| PackageErr::Msrv).and_then(
+ |patch_val| {
+ iter.next().map_or_else(
+ || {
+ Ok(Some(Self {
+ major,
+ minor: Some(minor_val),
+ patch: Some(patch_val),
+ }))
+ },
+ |_| Err(PackageErr::Msrv),
+ )
+ },
+ )
+ },
+ )
+ })
+ },
+ )
+ })
+ })
+ }
+ DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_)
+ | DeValue::Table(_) => Err(PackageErr::InvalidMsrvType),
+ })
+ }
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => Err(PackageErr::InvalidType),
+ })
+ }
+ /// Returns `Some` containing the MSRV with `'+'` prepended iff the stable or default toolchain is semantically
+ /// greater than `self`; otherwise returns `None`.
+ pub(crate) fn compare_to_other(
+ &self,
+ default: bool,
+ rustup_home: Option<&Path>,
+ cargo_path: &Path,
+ cargo_home: Option<&Path>,
+ ) -> Result<Option<String>, Box<ToolchainErr>> {
+ if default {
+ Toolchain::Default
+ } else {
+ Toolchain::Stable
+ }
+ .get_version(rustup_home, cargo_path, cargo_home)
+ .and_then(|stable_dflt_version| {
+ match self.major.cmp(&stable_dflt_version.major) {
+ Ordering::Less => Ok(true),
+ Ordering::Equal => self.minor.map_or_else(
+ || Ok(false),
+ |min| match min.cmp(&stable_dflt_version.minor) {
+ Ordering::Less => Ok(true),
+ Ordering::Equal => self.patch.map_or_else(
+ || Ok(false),
+ |pat| match pat.cmp(&stable_dflt_version.patch) {
+ Ordering::Less => Ok(true),
+ Ordering::Equal => Ok(false),
+ Ordering::Greater => Err(Box::new(ToolchainErr::MsrvTooHigh)),
+ },
+ ),
+ Ordering::Greater => Err(Box::new(ToolchainErr::MsrvTooHigh)),
+ },
+ ),
+ Ordering::Greater => Err(Box::new(ToolchainErr::MsrvTooHigh)),
+ }
+ .and_then(|get_msrv| {
+ if get_msrv {
+ Toolchain::Msrv(&self.minor.map_or_else(
+ || format!("+{}", self.major),
+ |min| {
+ self.patch.map_or_else(
+ || format!("+{}.{min}", self.major),
+ |pat| format!("+{}.{min}.{pat}", self.major),
+ )
+ },
+ ))
+ .get_version(rustup_home, cargo_path, cargo_home)
+ .and_then(|msrv_version| {
+ if msrv_version.major == self.major
+ && self.minor.is_none_or(|minor| {
+ msrv_version.minor == minor
+ && self.patch.is_none_or(|patch| msrv_version.patch == patch)
+ })
+ {
+ Ok(Some(format!(
+ "+{}.{}.{}",
+ msrv_version.major, msrv_version.minor, msrv_version.patch
+ )))
+ } else {
+ Err(Box::new(ToolchainErr::MsrvNotCompatibleWithInstalledMsrv(
+ msrv_version,
+ )))
+ }
+ })
+ } else {
+ Ok(None)
+ }
+ })
+ })
+ }
+}
+/// Returns `true` iff `nodes` is pairwise disconnected.
+#[expect(
+ clippy::arithmetic_side_effects,
+ clippy::indexing_slicing,
+ reason = "comment justifies correctness"
+)]
+fn pairwise_disconnected(nodes: &[&str], features: &[(String, Vec<String>)]) -> bool {
+ /// `panic`s with a static message about the existence of a bug in [`Manifest::deserialize`].
+ #[expect(clippy::unreachable, reason = "want to crash when there is a bug")]
+ fn impossible<T>() -> T {
+ unreachable!(
+ "there is a bug in manifest::Manifest::deserialize where feature dependencies are not only features."
+ )
+ }
+ /// Returns `true` iff `feature_deps` directly contains `feature` or indirectly in the `Vec<String>`
+ /// associated with it.
+ fn contains(
+ feature_deps: &[String],
+ feature: &str,
+ features: &[(String, Vec<String>)],
+ ) -> bool {
+ feature_deps.iter().any(|feat| {
+ feat == feature
+ || contains(
+ &features
+ .iter()
+ .find(|val| val.0 == *feat)
+ .unwrap_or_else(impossible)
+ .1,
+ feature,
+ features,
+ )
+ })
+ }
+ !nodes.iter().enumerate().any(|(idx, feature)| {
+ let feature_info = &features
+ .iter()
+ .find(|val| val.0 == **feature)
+ .unwrap_or_else(impossible)
+ .1;
+ // `idx < nodes.len()`, so overflow is not possible and indexing is fine.
+ nodes[idx + 1..].iter().any(|feat| {
+ contains(feature_info, feat, features)
+ || contains(
+ features
+ .iter()
+ .find(|val| val.0 == **feat)
+ .unwrap_or_else(impossible)
+ .1
+ .as_slice(),
+ feature,
+ features,
+ )
+ })
+ })
+}
+/// Power set of [`Features`] returned from [`Features::power_set`].
+///
+/// Note this is technically not the power set of features since semantically equivalent sets are ignored.
+///
+/// The last set iterated will always be the empty set. If no features in [`Features`] depend on another
+/// feature, then this will always return the entire set of features first; otherwise the entire set will
+/// never be returned. The expected cardinality of a set iterated decreases; thus while it will almost always
+/// be possible for a set _A_ to have larger cardinality than a set _B_ even when _A_ is iterated before _B_,
+/// the expected value is smaller.
+///
+/// The reason we attempt, but don't guarantee, that the first set iterated corresponds to a semantically
+/// equivalent set as the original set is to take advantage of the parallel compiliation that occurs. Typically
+/// the more features one enables, the more dependencies and functionality is added. By compiling code that
+/// maximizes this first, we take better advantage of how code is compiled; in contrast if we compiled fewer
+/// features first, subsequent compilations with more features will have to happen. We don't guarantee this
+/// though since it slightly complicates code; instead we iterate based on the _expected_ cardinality in
+/// descending order.
+///
+/// We don't implement `Iterator` since we want to re-use the same `String` that represents the set we are
+/// returning.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct PowerSet<'a> {
+ /// The set of features.
+ feats: &'a [(String, Vec<String>)],
+ /// `true` iff there are more sets to iterate.
+ has_remaining: bool,
+ /// `true` iff `feats` has redundant features; thus requiring us to check if a given set should be returned.
+ check_overlap: bool,
+ /// The current element of the power set that we are to return.
+ ///
+ /// This gets decremented as we iterate sets.
+ idx: usize,
+ /// Intermediate buffer we use to check if a set contains redundant features.
+ buffer: Vec<&'a str>,
+ /// The set we return.
+ ///
+ /// This is of the form `"<feat_1>,<feat_2>,...,<feat_n>"`.
+ set: String,
+}
+impl<'a> PowerSet<'a> {
+ /// Max cardinality of a set we allow to take the power set of.
+ // usize::MAX = 2^usize::BITS - 1 >= usize::BITS since usize::MAX >= 0;
+ // thus `usize::BITS as usize` is free from truncation.
+ #[expect(clippy::as_conversions, reason = "comment justifies correctness")]
+ const MAX_SET_LEN: usize = usize::BITS as usize;
+ /// Contructs `Self` based on `features`.
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ fn new(features: &'a Features) -> Result<Self, TooManyFeaturesErr> {
+ let len = features.0.len();
+ if len <= Self::MAX_SET_LEN {
+ let mut buffer = Vec::with_capacity(len);
+ features.0.iter().fold((), |(), key| {
+ buffer.push(key.0.as_ref());
+ });
+ let check_overlap = !pairwise_disconnected(buffer.as_slice(), &features.0);
+ Ok(Self {
+ feats: &features.0,
+ has_remaining: true,
+ check_overlap,
+ // `1 << len` overflows iff `len` is `Self::MAX_SET_LEN`; thus we must treat that
+ // separately.
+ idx: if len == Self::MAX_SET_LEN {
+ usize::MAX
+ } else {
+ // Won't overflow since `len < Self::MAX_SET_LEN`.
+ // Won't underflow since `1 << len >= 1`.
+ (1 << len) - 1
+ },
+ buffer,
+ // This won't overflow since `usize::MAX = 2^usize::BITS - 1`, `usize::BITS >= 16`, the max
+ // value of `len` is `usize::BITS`.
+ // 16 * usize::BITS < 2^usize::BITS for `usize::BITS > 6`.
+ set: String::with_capacity(len << 4),
+ })
+ } else {
+ Err(TooManyFeaturesErr)
+ }
+ }
+ /// Resets `self` such that iteration returns to the beginning.
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ pub(crate) const fn reset(&mut self) {
+ let len = self.feats.len();
+ // We start on index 2^n - 1.
+ // We don't allow construction when `self.feats.len() > usize::BITS`; thus
+ // 2^n overflows iff `self.feats.len()` is `Self::MAX_SET_LEN`.
+ // We treat that separately.
+ self.idx = if len == Self::MAX_SET_LEN {
+ usize::MAX
+ } else {
+ // We verified that `len <= usize::BITS`; thus
+ // this won't overflow nor underflow since 2^0 = 1.
+ (1 << len) - 1
+ };
+ self.has_remaining = true;
+ }
+ /// Writes the next element into `self.buffer` even if the set contains overlapping features.
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ fn inner_next_set(&mut self) {
+ self.buffer.clear();
+ self.feats.iter().enumerate().fold((), |(), (i, feat)| {
+ if self.idx & (1 << i) != 0 {
+ self.buffer.push(feat.0.as_str());
+ }
+ });
+ if self.idx == 0 {
+ self.has_remaining = false;
+ } else {
+ // This won't underflow since `idx > 0`.
+ self.idx -= 1;
+ }
+ }
+ /// Transforms the current element into its string form.
+ fn current_set(&mut self) {
+ self.set.clear();
+ self.buffer.iter().fold((), |(), s| {
+ self.set.push_str(s);
+ self.set.push(',');
+ });
+ // We remove the trailing comma. In the event `self.set` is empty, this does nothing.
+ _ = self.set.pop();
+ }
+ /// Returns the next set.
+ ///
+ /// This returns `None` iff there are no more sets to return. It will continue to return `None`
+ /// unless [`Self::reset`] is called.
+ pub(crate) fn next_set(&mut self) -> Option<&str> {
+ if self.has_remaining {
+ if self.check_overlap {
+ while self.has_remaining {
+ self.inner_next_set();
+ if pairwise_disconnected(self.buffer.as_slice(), self.feats) {
+ self.current_set();
+ return Some(&self.set);
+ }
+ }
+ None
+ } else {
+ self.inner_next_set();
+ self.current_set();
+ Some(&self.set)
+ }
+ } else {
+ None
+ }
+ }
+}
+/// Dependency table.
+enum DepTable {
+ /// Library dependencies.
+ Dependencies,
+ /// Build dependencies.
+ BuildDependencies,
+}
+impl DepTable {
+ /// Returns the string representation of `self`.
+ const fn into_str(self) -> &'static str {
+ match self {
+ Self::Dependencies => "dependencies",
+ Self::BuildDependencies => "build-dependencies",
+ }
+ }
+}
+/// `"dep:"`.
+const DEP: &[u8; 4] = b"dep:";
+/// Returns `true` iff `utf8` begins with [`DEP`].
+fn is_feature_dependency_a_dependency(utf8: &[u8]) -> bool {
+ utf8.starts_with(DEP)
+}
+/// Returns `true` iff `name` does not contain a `'/'` nor begins with [`DEP`].
+fn is_feature_dependency_a_feature(name: &str) -> bool {
+ let utf8 = name.as_bytes();
+ !(utf8.contains(&b'/') || is_feature_dependency_a_dependency(utf8))
+}
+/// Features in Cargo.toml.
+///
+/// Note this contains a `Vec` instead of a `HashMap` or `BTreeMap` since we enforce that very few entries exist
+/// due to the exponential nature of generating the power set; thus making a `Vec` more efficient and faster.
+/// This size is so small that we don't even sort and perform a binary search.
+///
+/// The `String` in the tuple represents the name of the feature, and the `Vec` in the tuple represents the
+/// feature dependencies. The feature dependencies will not contain any dependency that contains a `'/'`
+/// but will contain all others. The name of each feature will not contain a `'/'` nor begin with [`DEP`].
+/// Each dependency that is a feature (i.e., does not begin with [`DEP`]) is well-defined (i.e., is a feature
+/// itself) when `false` is passed to [`Self::extract_from_toml`]; when `true` is passed, then feature dependencies
+/// that are features are not verified to be defined in `self` since implied features still have to be added.
+/// Each feature does not contain any cycles nor redundant dependencies.
+///
+/// One must still add implied features caused from any optional dependencies iff `true` is passed to
+/// [`Self::extract_from_toml`]; after which, one needs to remove all dependencies that are not features and verify
+/// all dependencies that are features are defined in `self` in order for [`PowerSet`] to work correctly. A feature
+/// with name `<dependency>` needs to be added with an empty `Vec` of dependencies iff no features exist that have a
+/// dependency whose name is `"dep:<dependency>"` _and_ there doesn't already exist a feature with that name.
+/// An error MUST NOT happen if there is such a feature since different kinds of dependencies can have the
+/// same name. While this will allow for the situation where a feature is defined with the same name as
+/// an implied feature, that won't matter once `cargo` is run since it will error anyway.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct Features(Vec<(String, Vec<String>)>);
+impl Features {
+ /// Returns `true` iff `self` contains a feature named `"default"`.
+ pub(crate) fn contains_default(&self) -> bool {
+ self.0.iter().any(|f| f.0 == "default")
+ }
+ /// `panic`s with a static message about the existence of a bug in `validate_dependencies`.
+ ///
+ /// This is used in lieu of `unreachable` in `validate_dependencies`, `check_redundant_features`
+ /// and `extract_feature_dependencies`.
+ #[expect(clippy::unreachable, reason = "want to crash when there is a bug")]
+ fn impossible<T>() -> T {
+ unreachable!("there is a bug in manifest::Features::validate_dependencies.")
+ }
+ /// Verifies dependencies associated with `feature` is valid.
+ ///
+ /// `dependencies` are assumed to be associated with `feature` which the pair of is checked to be
+ /// a key-value pair from `features` iff `allow_implied_features`. This verifies the following:
+ ///
+ /// * `dependencies` is an array that only contains strings.
+ /// * Each string in `dependencies` that does not contain a `'/'` nor begins with [`DEP`] is a key
+ /// in `features` iff `allow_implied_features`.
+ /// * `feature` nor any dependency that is a feature in `dependencies` is cyclic.
+ ///
+ /// `cycle_detection` MUST contain only `feature` when this is called externally.
+ ///
+ /// This MUST only be called by itself or [`Self::extract_feature_dependencies`].
+ fn validate_dependencies<'a>(
+ feature: &str,
+ dependencies: &'a DeValue<'_>,
+ features: &'a Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ cycle_detection: &mut Vec<&'a str>,
+ allow_implied_features: bool,
+ ) -> Result<(), FeatureDependenciesErr> {
+ match *dependencies {
+ DeValue::Array(ref info) => {
+ info.iter()
+ .try_fold((), |(), dep_span| match *dep_span.get_ref() {
+ DeValue::String(ref dep_name) => {
+ if is_feature_dependency_a_feature(dep_name) {
+ if cycle_detection.contains(&dep_name.as_ref()) {
+ Err(FeatureDependenciesErr::CyclicFeature(
+ dep_name.clone().into_owned(),
+ ))
+ } else if let Some(next_feature) = features.get(dep_name.as_ref()) {
+ cycle_detection.push(dep_name);
+ Self::validate_dependencies(
+ dep_name,
+ next_feature.get_ref(),
+ features,
+ cycle_detection,
+ allow_implied_features,
+ )
+ .map(|()| {
+ // We require calling code to add `feature`
+ // before calling this function. We always
+ // add the most recent feature dependency.
+ // Therefore this is not empty.
+ _ = cycle_detection.pop().unwrap_or_else(Self::impossible);
+ })
+ } else if allow_implied_features {
+ // `dep_name` may be an implied feature which we have yet to add.
+ Ok(())
+ } else {
+ Err(FeatureDependenciesErr::InvalidDependency(
+ cycle_detection
+ .pop()
+ // We require calling code to add `feature`
+ // before calling this function. We always
+ // add the most recent feature dependency.
+ // Therefore this is not empty.
+ .unwrap_or_else(Self::impossible)
+ .to_owned(),
+ dep_name.clone().into_owned(),
+ ))
+ }
+ } else {
+ Ok(())
+ }
+ }
+ DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_)
+ | DeValue::Table(_) => Err(FeatureDependenciesErr::InvalidDependencyType(
+ feature.to_owned(),
+ )),
+ })
+ }
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Table(_) => Err(FeatureDependenciesErr::InvalidFeatureType(
+ feature.to_owned(),
+ )),
+ }
+ }
+ /// Verifies there are no redundant dependencies that are features in `dependencies`.
+ ///
+ /// Returns `true` iff there is a redundant dependency.
+ ///
+ /// This must be called _after_ `validate_dependencies` is called on the same arguments.
+ fn check_redundant_dependencies(
+ feature: &str,
+ dependencies: &DeArray<'_>,
+ features: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ allow_implied_features: bool,
+ ) -> bool {
+ dependencies
+ .iter()
+ .any(|dep_span| match *dep_span.get_ref() {
+ DeValue::String(ref dep_name) => {
+ is_feature_dependency_a_feature(dep_name)
+ && (feature == dep_name
+ || features.get(dep_name.as_ref()).map_or_else(
+ || {
+ if allow_implied_features {
+ false
+ } else {
+ // We require `validate_dependencies` to be called
+ // before this function which ensures all features
+ // recursively in the `dependencies` are defined as
+ // features iff `!allow_implied_features`.
+ Self::impossible()
+ }
+ },
+ |next_feature_span| {
+ Self::check_redundant_dependencies(
+ feature,
+ next_feature_span
+ .get_ref()
+ .as_array()
+ // We require `validate_dependencies` to be called
+ // before this function which ensures all feature
+ // dependencies recursively are arrays.
+ .unwrap_or_else(Self::impossible),
+ features,
+ allow_implied_features,
+ )
+ },
+ ))
+ }
+ // We require `validate_dependencies` to be called
+ // before this function which ensures all dependencies
+ // recursivley in `dependencies` are strings.
+ DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_)
+ | DeValue::Table(_) => Self::impossible(),
+ })
+ }
+ /// Extracts the feature dependencies associated with `feature`.
+ ///
+ /// `dependencies` are assumed to be associated with `feature` which the pair of is checked to be
+ /// a key-value pair from `features` iff `allow_implied_features`. This verifies the following:
+ ///
+ /// * `dependencies` is an array that only contains strings.
+ /// * Each string in `dependencies` that does not contain a `'/'` nor begins with [`DEP`] is a key
+ /// in `features` iff `allow_implied_features`.
+ /// * There is no redundant feature in `dependencies` where "redundant" means the following:
+ /// * There are no cycles (e.g., feature = \["feature"] or feature = \["a"], a = \["b"], b = \["a"]).
+ /// * No unnecessary dependencies that are features (e.g., feature = \["a", "a"] or feature = \["a", "b"]
+ /// a = \["b"], b = \[]).
+ /// * There are no duplicate dependencies in `dependencies` that beging with [`DEP`].
+ ///
+ /// Note since all dependencies that contain a `'/'` are ignored, there may be duplicates of them.
+ /// Also when checking for redundant features in `dependencies`, _only_ features are considered; thus
+ /// something like the following is allowed: feature = \["dep:a", "a"], a = \["dep:a"]
+ ///
+ /// This must only be called from [`Self::extract_from_toml`].
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ fn extract_feature_dependencies<'a>(
+ feature: &'a str,
+ dependencies: &'a DeValue<'_>,
+ features: &'a Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ cycle_buffer: &mut Vec<&'a str>,
+ allow_implied_features: bool,
+ ) -> Result<Vec<String>, FeatureDependenciesErr> {
+ // `Self::validate_dependencies` requires `cycle_buffer` to contain, and only contain, `feature`.
+ cycle_buffer.clear();
+ cycle_buffer.push(feature);
+ Self::validate_dependencies(feature, dependencies, features, cycle_buffer, allow_implied_features).and_then(|()| {
+ // `validate_dependencies` ensures `dependencies` is an array.
+ let deps = dependencies.as_array().unwrap_or_else(Self::impossible);
+ let mut vec_deps = Vec::with_capacity(deps.len());
+ deps.iter().enumerate().try_fold((), |(), (idx, dep_span)| match *dep_span.get_ref() {
+ DeValue::String(ref dep_name) => {
+ let dep_utf8 = dep_name.as_bytes();
+ if dep_utf8.contains(&b'/') {
+ Ok(())
+ } else if is_feature_dependency_a_dependency(dep_utf8) {
+ if vec_deps.iter().any(|d| d == dep_name) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else {
+ vec_deps.push(dep_name.clone().into_owned());
+ Ok(())
+ }
+ } else if let Some(next_feature_span) = features.get(dep_name.as_ref()) {
+ // `validate_dependencies` ensures all feature
+ // dependencies recursively are arrays.
+ let feat_info = next_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible);
+ // `idx < deps.iter().len()`; thus this won't overflow.
+ deps.iter().skip(idx + 1).try_fold((), |(), next_dep_span| match *next_dep_span.get_ref() {
+ DeValue::String(ref next_dep_name) => {
+ if is_feature_dependency_a_feature(next_dep_name) {
+ if dep_name == next_dep_name {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else if Self::check_redundant_dependencies(next_dep_name, feat_info, features, allow_implied_features) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), next_dep_name.clone().into_owned()))
+ } else {
+ features.get(next_dep_name.as_ref()).map_or_else(
+ || {
+ if allow_implied_features {
+ Ok(())
+ } else {
+ // `validate_dependencies` ensures all features
+ // recursively in the feature dependencies are defined
+ // as features iff `!allow_implied_features`.
+ Self::impossible()
+ }
+ },
+ |next_dep_feature_span| {
+ // `validate_dependencies` ensures all feature
+ // dependencies recursively are arrays.
+ if Self::check_redundant_dependencies(dep_name, next_dep_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible), features, allow_implied_features) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else {
+ Ok(())
+ }
+ }
+ )
+ }
+ } else {
+ Ok(())
+ }
+ }
+ // `validate_dependencies` ensures all dependencies recursively in
+ // `dependencies` are strings.
+ DeValue::Integer(_) | DeValue::Float(_) | DeValue::Boolean(_) | DeValue::Datetime(_) | DeValue::Array(_) | DeValue::Table(_) => Self::impossible(),
+ }).map(|()| vec_deps.push(dep_name.clone().into_owned()))
+ } else if allow_implied_features {
+ vec_deps.push(dep_name.clone().into_owned());
+ Ok(())
+ } else {
+ // `validate_dependencies` ensures all features
+ // recursively in `dependencies` are defined as features
+ // iff `!allow_implied_features`.
+ Self::impossible()
+ }
+ }
+ // `validate_dependencies` ensures all dependencies recursively in `dependencies` are strings.
+ DeValue::Integer(_) | DeValue::Float(_) | DeValue::Boolean(_) | DeValue::Datetime(_) | DeValue::Array(_) | DeValue::Table(_) => Self::impossible(),
+ }).map(|()| vec_deps)
+ })
+ }
+ /// Extracts `"features"` from `toml`.
+ fn extract_from_toml(
+ toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ allow_implied_features: bool,
+ ) -> Result<Self, FeaturesErr> {
+ toml.get("features").map_or_else(
+ || Ok(Self(Vec::new())),
+ |features_span| match *features_span.get_ref() {
+ DeValue::Table(ref features) => {
+ let mut cycle_buffer = Vec::with_capacity(features.len());
+ let mut feats = Vec::with_capacity(features.len());
+ features
+ .iter()
+ .try_fold((), |(), (name_span, feature_span)| {
+ let name = name_span.get_ref();
+ if is_feature_dependency_a_feature(name) {
+ Self::extract_feature_dependencies(
+ name,
+ feature_span.get_ref(),
+ features,
+ &mut cycle_buffer,
+ allow_implied_features,
+ )
+ .map_err(FeaturesErr::FeatureDependencies)
+ .map(|deps| {
+ feats.push((name.clone().into_owned(), deps));
+ })
+ } else {
+ Err(FeaturesErr::InvalidName(name.clone().into_owned()))
+ }
+ })
+ .map(|()| Self(feats))
+ }
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => Err(FeaturesErr::InvalidType),
+ },
+ )
+ }
+ /// Extracts optional dependencies and adds their corresponding implied feature to `self` iff
+ /// `allow_implied_features` and it's appropriate to do so.
+ ///
+ /// This must only be called from [`Self::add_implied_features`].
+ fn add_optional_dependencies(
+ &mut self,
+ toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ table: DepTable,
+ allow_implied_features: bool,
+ ) -> Result<(), DependenciesErr> {
+ let table_name = table.into_str();
+ toml.get(table_name)
+ .map_or(Ok(()), |deps_span| match *deps_span.get_ref() {
+ DeValue::Table(ref deps) => deps.iter().try_fold((), |(), dep_span| {
+ let dep_name = dep_span.0.get_ref();
+ if is_feature_dependency_a_feature(dep_name) {
+ match *dep_span.1.get_ref() {
+ DeValue::String(_) => Ok(()),
+ DeValue::Table(ref dep_info) => {
+ dep_info.get("optional").map_or(Ok(()), |opt_span| {
+ match *opt_span.get_ref() {
+ DeValue::Boolean(ref optional) => {
+ if *optional {
+ self.0
+ .iter()
+ .try_fold((), |(), feat| {
+ if feat.0 == *dep_name {
+ // We already have a feature with the same name,
+ // so we don't need to continue.
+ Err(())
+ } else if feat.1.iter().any(|feat_dep| {
+ let dep_name_utf8 = dep_name.as_bytes();
+ let feat_dep_utf8 = feat_dep.as_bytes();
+ dep_name_utf8 == feat_dep_utf8
+ || feat_dep_utf8
+ .split_at_checked(DEP.len())
+ .is_some_and(|(pref, rem)| {
+ pref == DEP
+ && dep_name_utf8 == rem
+ })
+ }) {
+ // The feature dependencies contain
+ // `"dep:<dep_name>"` or `<dep_name>`. Either way,
+ // we don't need to add an implied feature.
+ Err(())
+ } else {
+ // The feature name is not `<dep_name>` and all of
+ // the feature dependencies are not named
+ // `<dep_name` nor `"dep:<dep_name>"`; thus we need
+ // to continue our search.
+ Ok(())
+ }
+ })
+ .map_or(Ok(()), |()| {
+ if allow_implied_features {
+ // There is no feature with the name `<dep_name>` nor
+ // are there any features that contain a feature
+ // dependency named `"dep:<dep_name>"`; thus we must
+ // insert an implied feature.
+ self.0.push((
+ dep_name.clone().into_owned(),
+ Vec::new(),
+ ));
+ Ok(())
+ } else {
+ Err(DependenciesErr::ImpliedFeature(
+ table_name,
+ dep_name.clone().into_owned(),
+ ))
+ }
+ })
+ } else {
+ Ok(())
+ }
+ }
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_)
+ | DeValue::Table(_) => Err(DependenciesErr::OptionalType(
+ table_name,
+ dep_name.clone().into_owned(),
+ )),
+ }
+ })
+ }
+ DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => Err(DependenciesErr::DependencyType(
+ table_name,
+ dep_name.clone().into_owned(),
+ )),
+ }
+ } else {
+ Err(DependenciesErr::Name(
+ table_name,
+ dep_name.clone().into_owned(),
+ ))
+ }
+ }),
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => Err(DependenciesErr::Type(table_name)),
+ })
+ }
+ /// Adds implied features to `self` based on the optional dependencies in `toml`
+ /// iff `allow_implied_features`.
+ fn add_implied_features(
+ &mut self,
+ toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ allow_implied_features: bool,
+ ) -> Result<(), ImpliedFeaturesErr> {
+ self.add_optional_dependencies(toml, DepTable::Dependencies, allow_implied_features)
+ .map_err(ImpliedFeaturesErr::Dependencies)
+ .and_then(|()| {
+ self.add_optional_dependencies(
+ toml,
+ DepTable::BuildDependencies,
+ allow_implied_features,
+ )
+ .map_err(ImpliedFeaturesErr::Dependencies)
+ .and_then(|()| {
+ toml.get("target")
+ .map_or_else(
+ || Ok(()),
+ |target_span| match *target_span.get_ref() {
+ DeValue::Table(ref target) => {
+ target.iter().try_fold((), |(), target_platform_span| {
+ match *target_platform_span.1.get_ref() {
+ DeValue::Table(ref target_platform) => self
+ .add_optional_dependencies(
+ target_platform,
+ DepTable::Dependencies,
+ allow_implied_features,
+ )
+ .map_err(|e| {
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ target_platform_span
+ .0
+ .get_ref()
+ .clone()
+ .into_owned(),
+ e,
+ )
+ })
+ .and_then(|()| {
+ self.add_optional_dependencies(
+ target_platform,
+ DepTable::BuildDependencies,
+ allow_implied_features,
+ )
+ .map_err(|e| {
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ target_platform_span
+ .0
+ .get_ref()
+ .clone()
+ .into_owned(),
+ e,
+ )
+ })
+ }),
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => {
+ Err(ImpliedFeaturesErr::TargetPlatformType(
+ target_platform_span
+ .0
+ .get_ref()
+ .clone()
+ .into_owned(),
+ ))
+ }
+ }
+ })
+ }
+ DeValue::String(_)
+ | DeValue::Integer(_)
+ | DeValue::Float(_)
+ | DeValue::Boolean(_)
+ | DeValue::Datetime(_)
+ | DeValue::Array(_) => Err(ImpliedFeaturesErr::TargetType),
+ },
+ )
+ .and_then(|()| {
+ if allow_implied_features {
+ // We don't have to worry about cyclic features or anything other
+ // than the lack of a feature with the name of the feature
+ // dependency.
+ self.0.iter().try_fold((), |(), feature| {
+ feature.1.iter().try_fold((), |(), dep| {
+ // We didn't save any feature dependencies that contain
+ // `'/'`, so we simply have to check if a dependency
+ // begins with [`DEP`] to skip it.
+ if is_feature_dependency_a_dependency(dep.as_bytes())
+ || self
+ .0
+ .iter()
+ .any(|other_feature| other_feature.0 == *dep)
+ {
+ Ok(())
+ } else {
+ Err(ImpliedFeaturesErr::InvalidDependency(
+ feature.0.clone(),
+ dep.clone(),
+ ))
+ }
+ })
+ })
+ } else {
+ // When `!allowed_implied_features`, [`Self::validate_dependencies`]
+ // verifies non-dependency feature dependencies are defined as
+ // features.
+ Ok(())
+ }
+ })
+ })
+ })
+ }
+ /// Returns the power set of `self` with semantically equivalent sets removed.
+ pub(crate) fn power_set(&self) -> Result<PowerSet<'_>, TooManyFeaturesErr> {
+ PowerSet::new(self)
+ }
+}
+/// MSRV and features in `Cargo.toml`.
+#[cfg_attr(test, derive(Debug, PartialEq))]
+pub(crate) struct Manifest {
+ /// The MSRV.
+ msrv: Option<Msrv>,
+ /// The features.
+ features: Features,
+}
+impl Manifest {
+ /// Returns the defined MSRV iff there was one defined.
+ pub(crate) const fn msrv(&self) -> Option<&Msrv> {
+ self.msrv.as_ref()
+ }
+ /// Returns the defined features.
+ ///
+ /// Note the returned `Features` doesn't have any cyclic features, each feature dependency for a given
+ /// feature is a feature itself, and there are no redundant feature dependencies for a given feature.
+ pub(crate) const fn features(&self) -> &Features {
+ &self.features
+ }
+ /// Returns the data needed from `Cargo.toml`.
+ #[expect(
+ clippy::needless_pass_by_value,
+ reason = "want to drop `val` as soon as possible"
+ )]
+ pub(crate) fn from_toml(
+ val: String,
+ allow_implied_features: bool,
+ ) -> Result<Self, ManifestErr> {
+ Map::parse(val.as_str())
+ .map_err(ManifestErr::Toml)
+ .and_then(|span| {
+ let cargo = span.get_ref();
+ Msrv::extract_from_toml(cargo)
+ .map_err(ManifestErr::Package)
+ .and_then(|msrv| {
+ Features::extract_from_toml(cargo, allow_implied_features)
+ .map_err(ManifestErr::Features)
+ .and_then(|mut features| {
+ features
+ .add_implied_features(cargo, allow_implied_features)
+ .map_err(ManifestErr::ImpliedFeatures)
+ .map(|()| {
+ if allow_implied_features {
+ features.0.iter_mut().fold(
+ (),
+ |(), &mut (_, ref mut feat)| {
+ feat.retain(|f| {
+ // We retain only features. Since we didn't save any
+ // dependencies that contain `'/'`, it's slightly faster to just
+ // check that a feature dependency is not a dependency.
+ !is_feature_dependency_a_dependency(
+ f.as_bytes(),
+ )
+ });
+ },
+ );
+ }
+ Self { msrv, features }
+ })
+ })
+ })
+ })
+ }
+}
+#[cfg(test)]
+mod tests {
+ use super::{
+ DependenciesErr, FeatureDependenciesErr, Features, FeaturesErr, ImpliedFeaturesErr,
+ Manifest, ManifestErr, Msrv, PackageErr, PowerSet, TooManyFeaturesErr,
+ };
+ #[expect(
+ clippy::cognitive_complexity,
+ clippy::too_many_lines,
+ reason = "want to test a lot of things"
+ )]
+ #[test]
+ fn cargo_toml() {
+ assert!(
+ Manifest::from_toml("a".to_owned(), false)
+ .map_or_else(|e| matches!(e, ManifestErr::Toml(_)), |_| false)
+ );
+ assert_eq!(
+ Manifest::from_toml(String::new(), false),
+ Err(ManifestErr::Package(PackageErr::Missing))
+ );
+ assert_eq!(
+ Manifest::from_toml("[' package']".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Missing))
+ );
+ assert_eq!(
+ Manifest::from_toml("['package ']".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Missing))
+ );
+ assert_eq!(
+ Manifest::from_toml("package=2".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::InvalidType))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=2".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::InvalidMsrvType))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"a\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"1.00.0\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"1..0\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"1.\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"01.0.0\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"1.0.0.1\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version=\"111111111111111111111111.2.3\"".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1.0.0-nightly\"".to_owned(),
+ false
+ ),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"-1.0.0\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\" 1.0.0\"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"1.0.0 \"".to_owned(), false),
+ Err(ManifestErr::Package(PackageErr::Msrv))
+ );
+ assert_eq!(
+ Manifest::from_toml("features=2\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::InvalidType))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"/\"=[]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::InvalidName(
+ "/".to_owned()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"dep:\"=[]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::InvalidName(
+ "dep:".to_owned()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=2\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::InvalidFeatureType(String::new())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=[true]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::InvalidDependencyType(String::new())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ )))
+ );
+ // Feature dependencies can't be implied features when implied features are forbidden.
+ assert_eq!(
+ Manifest::from_toml(
+ "[dependencies]\nfoo={\"optional\"=true}\n[features]\n\"\"=[\"foo\"]\n[package]"
+ .to_owned(),
+ false
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=[\"\"]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::CyclicFeature(String::new())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"a\"]\na=[\"\"]\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::CyclicFeature(String::new())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"a\"]\na=[\"b\"]\nb=[\"a\"]\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"a\"]\na=[\"c\",\"b\"]\nb=[\"a\"]\nc=[]\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[]\na=[\"c\",\"b\"]\nb=[\"a\"]\nc=[]\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"a\",\"b\"]\na=[\"b\"]\nb=[]\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::RedundantDependency(String::new(), "b".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"a\",\"a\"]\na=[]\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::RedundantDependency(String::new(), "a".to_owned())
+ )))
+ );
+ // Duplicate `"dep:"` feature dependencies error.
+ assert_eq!(
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"dep:\",\"dep:\"]\na=[]\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::RedundantDependency(String::new(), "dep:".to_owned())
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml("target=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(ImpliedFeaturesErr::TargetType))
+ );
+ assert_eq!(
+ Manifest::from_toml("dependencies=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("dependencies"))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("build-dependencies=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("build-dependencies"))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
+ "dependencies",
+ "dep:".to_owned()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[dependencies]\n\"/\"=\"\"\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
+ "dependencies",
+ "/".to_owned()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[build-dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
+ "build-dependencies",
+ "dep:".to_owned()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[build-dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
+ "build-dependencies",
+ "/".to_owned()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[dependencies]\n\"\"=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::DependencyType(
+ "dependencies",
+ String::new()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[build-dependencies]\n\"\"=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::DependencyType(
+ "build-dependencies",
+ String::new()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::OptionalType(
+ "dependencies",
+ String::new()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[build-dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::OptionalType(
+ "build-dependencies",
+ String::new()
+ ))
+ ))
+ );
+ // Implied features are disallowed iff `!allow_implied_features`.
+ assert_eq!(
+ Manifest::from_toml(
+ "[dependencies]\nfoo={\"optional\"=true}\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::ImpliedFeature(
+ "dependencies",
+ "foo".to_owned()
+ ))
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[target]\n\"\"=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TargetPlatformType(String::new())
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml("[target.\"\"]\ndependencies=2\n[package]".to_owned(), false),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Type("dependencies")
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\"]\nbuild-dependencies=2\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Type("build-dependencies")
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Name("dependencies", "/".to_owned())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Name("dependencies", "dep:".to_owned())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".build-dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Name("build-dependencies", "/".to_owned())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".build-dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::Name("build-dependencies", "dep:".to_owned())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".dependencies]\n\"\"=false\n[package]".to_owned(),
+ false
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::DependencyType("dependencies", String::new())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".build-dependencies]\n\"\"=false\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::DependencyType("build-dependencies", String::new())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::OptionalType("dependencies", String::new())
+ )
+ ))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[target.\"\".build-dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ false,
+ ),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TagetPlatformDependencies(
+ String::new(),
+ DependenciesErr::OptionalType("build-dependencies", String::new())
+ )
+ ))
+ );
+ // An invalid dependency error occurs later when we `allow_implied_features` since
+ // implied features aren't added until after feature extraction.
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), true),
+ Err(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::InvalidDependency(String::new(), "foo".to_owned())
+ ))
+ );
+ // In contrast, above would have erred sooner if `!allow_implied_features`.
+ assert_eq!(
+ Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), false),
+ Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
+ FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ )))
+ );
+ // Even if we forbid implied features, we don't error when a feature is defined
+ // with the same name of an implied feature. This is due to simplicity in code
+ // and the fact that `cargo` will error anyway.
+ //
+ // For example once `cargo` is invoked, an error will occur due to duplicate features:
+ // the explict feature `foo` and the implied feature from the dependency `foo`.
+ assert_eq!(
+ Manifest::from_toml(
+ "[dependencies]\nfoo={\"optional\"=true}\n[features]\nfoo=[]\n[package]".to_owned(),
+ false
+ ),
+ Ok(Manifest {
+ msrv: None,
+ features: Features(vec![("foo".to_owned(), Vec::new())]),
+ })
+ );
+ // Allow empty `package`.
+ assert_eq!(
+ Manifest::from_toml("[package]".to_owned(), false),
+ Ok(Manifest {
+ msrv: None,
+ features: Features(Vec::new()),
+ })
+ );
+ // Allow major-only MSRV.
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"0\"".to_owned(), false),
+ Ok(Manifest {
+ msrv: Some(Msrv {
+ major: 0,
+ minor: None,
+ patch: None,
+ }),
+ features: Features(Vec::new()),
+ })
+ );
+ // Allow escapes.
+ assert_eq!(
+ Manifest::from_toml(
+ "[\"\\u0070ackage\"]\n\"\\u0072ust-version\"=\"0\\u002E\\u0031\"".to_owned(),
+ false
+ ),
+ Ok(Manifest {
+ msrv: Some(Msrv {
+ major: 0,
+ minor: Some(1),
+ patch: None,
+ }),
+ features: Features(Vec::new()),
+ })
+ );
+ assert_eq!(
+ Manifest::from_toml("[package]\nrust-version=\"0.0.0\"".to_owned(), false),
+ Ok(Manifest {
+ msrv: Some(Msrv {
+ major: 0,
+ minor: Some(0),
+ patch: Some(0),
+ }),
+ features: Features(Vec::new()),
+ })
+ );
+ // Ignore non `rust-version` keys in `package`. Ignore keys in the root document except `package`,
+ // `features`, `dependencies`, `build-dependencies`, and `target`. Ignore keys in
+ // `target.<something>` unless the key is `dependencies` or `build-dependencies`. Don't treat
+ // `<something>` special in `target.<something>` other than its being a table.
+ assert_eq!(
+ Manifest::from_toml("dev-dependencies=2\n[package]\nfoo=2\nrust-version=\"18446744073709551615.18446744073709551615.18446744073709551615\"\n[foo]\nbar=false\n[target.\"\".foo]\nbar=2\n[target.foo]\nbar=false\n[target.dependencies]\nfoo=2\n[target.build-dependencies]\nfoo=false\n[target.dev-dependencies]\nfoo=true\n".to_owned(), false),
+ Ok(Manifest {
+
+ msrv: Some(Msrv {
+ major: u64::MAX,
+ minor: Some(u64::MAX),
+ patch: Some(u64::MAX),
+ }),
+ features: Features(Vec::new()),
+ })
+ );
+ // [package]
+ //
+ // ["\u0064ependencies"]
+ // "\u0000" = "\u0000"
+ // a = { optional = true }
+ //
+ // ["build-\u0064ependencies"]
+ // "\u0000" = { optional = true }
+ //
+ // [dev-dependencies]
+ // buzz = { optional = true }
+ //
+ // [target."".dependencies]
+ // b = { optional = false, foo = 2 }
+ // fizz = { optional = true, foo = 3 }
+ //
+ // [target.a.dependencies]
+ // c = { optional = true }
+ // wuzz = { optional = true }
+ //
+ // [features]
+ // default = ["bar","dep:lk","a/ak", "a/ak"]
+ // bar = ["dep\u003Awuzz"]
+ //
+ // We allow any and all key names unless it's the features table or a dependency table; in which case
+ // key names must not contain `'/'` nor begin with `"dep:"`.
+ //
+ // The order of features is based on the following hierarchy:
+ // * Explict features: lexicographically sorted
+ // * dependencies: optional only, lexicographically sorted, only if an explicit feature doesn't exist with
+ // the same name nor any explicit feature contains a dependency named `"dep:<dependecy>"` and we allow
+ // implied features. If such feature exists, we don't error but simply don't add.
+ // * build-dependencies: read above.
+ // * target.<something>: lexicographically sorted by something, within `something`, `dependencies`
+ // is first using the same methodology as item 2, last `build-dependencies`.
+ //
+ // Once the order of features is determined, the only feature dependencies that are retained are those
+ // that don't contain `'/'` nor begin with `"dep:"`. We don't require dependencies to be defined for
+ // feature dependencies that contain `'/'` or begin with `"dep:"`. We don't care about duplicate feature
+ // dependencies that contain `'/'`.
+ //
+ // Based on above, `Features` looks like the following:
+ // 1. (bar, [])
+ // 2. (default, ["bar"])
+ // 3. (a, [])
+ // 4. (\x00, [])
+ // 5. (fizz, [])
+ // 6. (c, [])
+ assert_eq!(
+ Manifest::from_toml(
+ "[\"\\u0064ependencies\"]\n\"\\u0000\"=\"\\u0000\"\na={\"optional\"=true}\n[\"build-\\u0064ependencies\"]\n\"\\u0000\"={\"optional\"=true}\n[target.\"\".dependencies]\nb={\"optional\"=false,foo=2}\nfizz={\"optional\"=true,foo=3}\n[features]\ndefault=[\"bar\",\"dep:lk\",\"a/ak\",\"a/ak\"]\nbar=[\"dep\\u003Awuzz\"]\n[dev-dependencies]\nbuzz={\"optional\"=true}\n[target.a.dependencies]\nc={\"optional\"=true}\nwuzz={\"optional\"=true}\n[package]".to_owned(),
+ true,
+ ),
+ Ok(Manifest {
+ msrv: None,
+ features: Features(vec![("bar".to_owned(), Vec::new()), ("default".to_owned(), vec!["bar".to_owned()]), ("a".to_owned(), Vec::new()), ("\0".to_owned(), Vec::new()), ("fizz".to_owned(), Vec::new()), ("c".to_owned(), Vec::new())]),
+ })
+ );
+ }
+ #[expect(clippy::unreachable, reason = "want to crash when there is a bug")]
+ #[expect(
+ clippy::cognitive_complexity,
+ clippy::too_many_lines,
+ reason = "want to test for a lot of things"
+ )]
+ #[test]
+ fn power_set() {
+ #[cfg(target_pointer_width = "16")]
+ let feat_len_one_too_large = 17;
+ #[cfg(target_pointer_width = "32")]
+ let feat_len_one_too_large = 33;
+ #[cfg(target_pointer_width = "64")]
+ let feat_len_one_too_large = 65;
+ let mut feats = Features(vec![(String::new(), Vec::new()); feat_len_one_too_large]);
+ assert_eq!(PowerSet::new(&feats), Err(TooManyFeaturesErr));
+ #[cfg(target_pointer_width = "16")]
+ let max_feat_len = 16;
+ #[cfg(target_pointer_width = "32")]
+ let max_feat_len = 32;
+ #[cfg(target_pointer_width = "64")]
+ let max_feat_len = 64;
+ feats.0 = vec![(String::new(), Vec::new()); max_feat_len];
+ assert_eq!(
+ PowerSet::new(&feats),
+ Ok(PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: usize::MAX,
+ buffer: vec![""; max_feat_len],
+ set: String::new(),
+ })
+ );
+ feats.0 = Vec::new();
+ assert_eq!(
+ PowerSet::new(&feats),
+ Ok(PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ })
+ );
+ let mut power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
+ unreachable!("not possible since we just verified PowerSet::new returned Ok")
+ });
+ assert_eq!(power_set.next_set(), Some(""));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ power_set.reset();
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some(""));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ // [features]
+ // a = ["b"]
+ // b = ["c", "d"]
+ // c = []
+ // d = []
+ feats.0 = vec![
+ ("a".to_owned(), vec!["b".to_owned()]),
+ ("b".to_owned(), vec!["c".to_owned(), "d".to_owned()]),
+ ("c".to_owned(), Vec::new()),
+ ("d".to_owned(), Vec::new()),
+ ];
+ assert_eq!(
+ PowerSet::new(&feats),
+ Ok(PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ // At least one feature depends on another, so this will be set to `true`.
+ check_overlap: true,
+ idx: 15,
+ buffer: vec!["a", "b", "c", "d"],
+ set: String::new(),
+ })
+ );
+ power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
+ unreachable!("not possible since we just verified PowerSet::new returned Ok")
+ });
+ // Order is the following:
+ // 1. a,b,c,d: skipped since a depends on b.
+ // 2. b,c,d: skipped since b depends on c.
+ // 3. a,c,d: skipped since a depends on c (via b).
+ // 4. c,d
+ // 5. a,b,d: skipped since a depends on b.
+ // 6. b,d: skipped since b depends on d.
+ // 7. a,d: skipped since a depends on d (via b).
+ // 8. d
+ // 9. a,b,c: skipped since a depends on b.
+ // 10. b,c: skipped since b depends on c.
+ // 11. a,c: skipped since a depends on c (via b).
+ // 12. c
+ // 13. a,b: skipped since a depends on b.
+ // 14. b
+ // 15. a
+ // 16.
+ assert_eq!(power_set.next_set(), Some("c,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ // We started at 15, and we iterated 4 items (skipping 3).
+ idx: 11,
+ buffer: vec!["c", "d"],
+ set: "c,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ // We started at 11, and we iterated 4 items (skipping 3).
+ idx: 7,
+ buffer: vec!["d"],
+ set: "d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("c"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ // We started at 7, and we iterated 4 items (skipping 3).
+ idx: 3,
+ buffer: vec!["c"],
+ set: "c".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("b"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ // We started at 3, and we iterated 2 items (skipping 1).
+ idx: 1,
+ buffer: vec!["b"],
+ set: "b".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ // We started at 1, and we iterated 1 item.
+ idx: 0,
+ buffer: vec!["a"],
+ set: "a".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some(""));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: true,
+ // We started at 0, and we iterated 1 item but we don't underflow instead `has_remaining` is set
+ // to `false`.
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ // Internal state is left unchanged.
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: true,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ // Internal state is left unchanged.
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: true,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ power_set.reset();
+ // `PowerSet::reset` only resets what is necessary nothing more; in particular, `buffer` and `set` are
+ // left alone.
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: true,
+ idx: 15,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ // Same as above except no feature depends on any other.
+ // [features]
+ // a = []
+ // b = []
+ // c = []
+ // d = []
+ feats.0 = vec![
+ ("a".to_owned(), Vec::new()),
+ ("b".to_owned(), Vec::new()),
+ ("c".to_owned(), Vec::new()),
+ ("d".to_owned(), Vec::new()),
+ ];
+ assert_eq!(
+ PowerSet::new(&feats),
+ Ok(PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 15,
+ buffer: vec!["a", "b", "c", "d"],
+ set: String::new(),
+ })
+ );
+ power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
+ unreachable!("not possible since we just verified PowerSet::new returned Ok")
+ });
+ // Order is the same as above except nothing is skipped:
+ // 1. a,b,c,d
+ // 2. b,c,d
+ // 3. a,c,d
+ // 4. c,d
+ // 5. a,b,d
+ // 6. b,d
+ // 7. a,d
+ // 8. d
+ // 9. a,b,c
+ // 10. b,c
+ // 11. a,c
+ // 12. c
+ // 13. a,b
+ // 14. b
+ // 15. a
+ // 16.
+ assert_eq!(power_set.next_set(), Some("a,b,c,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 14,
+ buffer: vec!["a", "b", "c", "d"],
+ set: "a,b,c,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("b,c,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 13,
+ buffer: vec!["b", "c", "d"],
+ set: "b,c,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,c,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 12,
+ buffer: vec!["a", "c", "d"],
+ set: "a,c,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("c,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 11,
+ buffer: vec!["c", "d"],
+ set: "c,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,b,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 10,
+ buffer: vec!["a", "b", "d"],
+ set: "a,b,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("b,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 9,
+ buffer: vec!["b", "d"],
+ set: "b,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 8,
+ buffer: vec!["a", "d"],
+ set: "a,d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("d"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 7,
+ buffer: vec!["d"],
+ set: "d".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,b,c"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 6,
+ buffer: vec!["a", "b", "c"],
+ set: "a,b,c".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("b,c"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 5,
+ buffer: vec!["b", "c"],
+ set: "b,c".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,c"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 4,
+ buffer: vec!["a", "c"],
+ set: "a,c".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("c"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 3,
+ buffer: vec!["c"],
+ set: "c".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a,b"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 2,
+ buffer: vec!["a", "b"],
+ set: "a,b".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("b"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 1,
+ buffer: vec!["b"],
+ set: "b".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some("a"));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: true,
+ check_overlap: false,
+ idx: 0,
+ buffer: vec!["a"],
+ set: "a".to_owned(),
+ }
+ );
+ assert_eq!(power_set.next_set(), Some(""));
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ assert_eq!(power_set.next_set(), None);
+ assert_eq!(
+ power_set,
+ PowerSet {
+ feats: feats.0.as_slice(),
+ has_remaining: false,
+ check_overlap: false,
+ idx: 0,
+ buffer: Vec::new(),
+ set: String::new(),
+ }
+ );
+ }
+}
diff --git a/src/rustup.rs b/src/rustup.rs
@@ -0,0 +1,1212 @@
+/// `rustup` is supported by the target per <https://rust-lang.github.io/rustup-components-history/>.
+/// Last checked on 2025-10-06T20:57.
+#[cfg(any(
+ all(
+ target_arch = "aarch64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "fuchsia",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "emscripten",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "sim",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "x32",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv32",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "redox",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "eabihf",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "nvptx64",
+ target_vendor = "nvidia",
+ target_os = "cuda",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "unknown",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "fortanix",
+ target_os = "unknown",
+ target_env = "sgx",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "fuchsia",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "freebsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "abi64",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "softfloat",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "illumos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips64r6",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "abi64",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "eabihf",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "netbsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "freebsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "macabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "macabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "eabi",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "wasi",
+ target_env = "p2",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "mips32r6",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "sparc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "softfloat",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "wasi",
+ target_env = "p1",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "s390x",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm64ec",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "solaris",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "eabihf",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "sparc64",
+ target_vendor = "sun",
+ target_os = "solaris",
+ target_env = "",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+))]
+pub(crate) const SUPPORTED: bool = true;
+/// `rustup` is _not_ supported by the target per <https://rust-lang.github.io/rustup-components-history/>.
+/// Last checked on 2025-10-06T20:57.
+#[cfg(not(any(
+ all(
+ target_arch = "aarch64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "fuchsia",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "emscripten",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "sim",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "x32",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv32",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "redox",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "eabihf",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "nvptx64",
+ target_vendor = "nvidia",
+ target_os = "cuda",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "unknown",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "fortanix",
+ target_os = "unknown",
+ target_env = "sgx",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "fuchsia",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "freebsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "abi64",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "softfloat",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "illumos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "eabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips64r6",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "abi64",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "uefi",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "eabihf",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "netbsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "freebsd",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "macabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "macos",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "apple",
+ target_os = "ios",
+ target_env = "",
+ target_abi = "macabi",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "eabi",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "wasi",
+ target_env = "p2",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "mips",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "mips32r6",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ ),
+ all(
+ target_arch = "sparc64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "riscv64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "softfloat",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "wasm32",
+ target_vendor = "unknown",
+ target_os = "wasi",
+ target_env = "p1",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "s390x",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm64ec",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "msvc",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "pc",
+ target_os = "solaris",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "arm",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "eabihf",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "aarch64",
+ target_vendor = "unknown",
+ target_os = "none",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "musl",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "android",
+ target_env = "",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86",
+ target_vendor = "pc",
+ target_os = "windows",
+ target_env = "gnu",
+ target_abi = "llvm",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "sparc64",
+ target_vendor = "sun",
+ target_os = "solaris",
+ target_env = "",
+ target_abi = "",
+ target_endian = "big",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "ohos",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "x86_64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "loongarch64",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "little",
+ ),
+ all(
+ target_arch = "powerpc",
+ target_vendor = "unknown",
+ target_os = "linux",
+ target_env = "gnu",
+ target_abi = "",
+ target_endian = "big",
+ ),
+)))]
+pub(crate) const SUPPORTED: bool = false;