commit 04147098b15c32571a836acc9e9a171813266451
parent d3707a9b2b6ceccf218799136f60672fa380b545
Author: Zack Newman <zack@philomathiclife.com>
Date: Sat, 11 Oct 2025 00:01:24 -0600
correct msrv use in workspaces. add progress option. more tests. cleanup
Diffstat:
| M | Cargo.toml | | | 2 | +- |
| M | README.md | | | 81 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++------ |
| M | src/args.rs | | | 437 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++------------- |
| M | src/cargo.rs | | | 7 | +++++-- |
| M | src/main.rs | | | 266 | +++++++++++++++++++++++++++++++++++++++++-------------------------------------- |
| M | src/manifest.rs | | | 2057 | ++++++++++++++++++++++++++++++++++++++++++++++++++++--------------------------- |
| M | src/rustup.rs | | | 4 | ++-- |
7 files changed, 1941 insertions(+), 913 deletions(-)
diff --git a/Cargo.toml b/Cargo.toml
@@ -4,7 +4,7 @@ categories = ["command-line-utilities", "development-tools::testing", "rust-patt
description = "Continuous integration for Clippy, unit tests, and doc tests for all possible features."
documentation = "https://crates.io/crates/ci-cargo"
edition = "2024"
-keywords = ["cargo", "ci", "rust"]
+keywords = ["cargo", "ci", "features", "msrv", "rust"]
license = "MIT OR Apache-2.0"
name = "ci-cargo"
readme = "README.md"
diff --git a/README.md b/README.md
@@ -23,7 +23,6 @@ was passed.
When a command errors, `ci-cargo` will terminate; upon termination (successful or not), `ci-cargo` will write all
_unique_ messages that were written to `stderr` to `stderr` followed by the offending command in case of an error.
-Nothing is written to `stdout` unless `--summary` was passed.
## Why is this useful?
@@ -58,6 +57,7 @@ build works on both the stable or default toolchain _and_ the stated MSRV (if on
and don't lead to termination.
* `--ignored`: `cargo t --tests -- --ignored` is invoked for each combination of features.
* `--include-ignored`: `cargo t --tests -- --include-ignored` is invoked for each combination of features.
+* `--progress`: Writes the current progress to `stdout`.
* `--rustup-home <PATH>`: Sets the storage directory used by `rustup`.
* `--summary`: Writes the toolchain(s) used and the combinations of features run on to `stdout` on success.
@@ -152,7 +152,79 @@ default = ["foo"]
foo = []
bar = ["fizz"]
fizz = []
-[zack@laptop example]$ ci-cargo --summary
+[zack@laptop example]$ ci-cargo --all-targets --include-ignored --progress --summary
+Toolchain (1/2): cargo +stable. Features (1/32, 5 skipped): buzz,fizz,foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (1/32, 5 skipped): buzz,fizz,foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (2/32, 6 skipped): fizz,foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (2/32, 6 skipped): fizz,foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (3/32, 10 skipped): bar,buzz,foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (3/32, 10 skipped): bar,buzz,foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (4/32, 10 skipped): buzz,foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (4/32, 10 skipped): buzz,foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (5/32, 10 skipped): bar,foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (5/32, 10 skipped): bar,foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (6/32, 10 skipped): foo. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (6/32, 10 skipped): foo. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (7/32, 11 skipped): buzz,default,fizz. Command (1/2): clippy. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (7/32, 11 skipped): buzz,default,fizz. Command (2/2): t. Time running: 0 s.
+Toolchain (1/2): cargo +stable. Features (8/32, 12 skipped): default,fizz. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (8/32, 12 skipped): default,fizz. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (9/32, 13 skipped): buzz,fizz. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (9/32, 13 skipped): buzz,fizz. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (10/32, 14 skipped): fizz. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (10/32, 14 skipped): fizz. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (11/32, 14 skipped): bar,buzz,default. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (11/32, 14 skipped): bar,buzz,default. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (12/32, 14 skipped): buzz,default. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (12/32, 14 skipped): buzz,default. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (13/32, 14 skipped): bar,default. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (13/32, 14 skipped): bar,default. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (14/32, 14 skipped): default. Command (1/2): clippy. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (14/32, 14 skipped): default. Command (2/2): t. Time running: 1 s.
+Toolchain (1/2): cargo +stable. Features (15/32, 14 skipped): bar,buzz. Command (1/2): clippy. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (15/32, 14 skipped): bar,buzz. Command (2/2): t. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (16/32, 14 skipped): buzz. Command (1/2): clippy. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (16/32, 14 skipped): buzz. Command (2/2): t. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (17/32, 14 skipped): bar. Command (1/2): clippy. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (17/32, 14 skipped): bar. Command (2/2): t. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (18/32, 14 skipped): <none>. Command (1/2): clippy. Time running: 2 s.
+Toolchain (1/2): cargo +stable. Features (18/32, 14 skipped): <none>. Command (2/2): t. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (1/32, 5 skipped): buzz,fizz,foo. Command (1/2): clippy. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (1/32, 5 skipped): buzz,fizz,foo. Command (2/2): t. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (2/32, 6 skipped): fizz,foo. Command (1/2): clippy. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (2/32, 6 skipped): fizz,foo. Command (2/2): t. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (3/32, 10 skipped): bar,buzz,foo. Command (1/2): clippy. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (3/32, 10 skipped): bar,buzz,foo. Command (2/2): t. Time running: 2 s.
+Toolchain (2/2): cargo +1.89.0. Features (4/32, 10 skipped): buzz,foo. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (4/32, 10 skipped): buzz,foo. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (5/32, 10 skipped): bar,foo. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (5/32, 10 skipped): bar,foo. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (6/32, 10 skipped): foo. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (6/32, 10 skipped): foo. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (7/32, 11 skipped): buzz,default,fizz. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (7/32, 11 skipped): buzz,default,fizz. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (8/32, 12 skipped): default,fizz. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (8/32, 12 skipped): default,fizz. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (9/32, 13 skipped): buzz,fizz. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (9/32, 13 skipped): buzz,fizz. Command (2/2): t. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (10/32, 14 skipped): fizz. Command (1/2): clippy. Time running: 3 s.
+Toolchain (2/2): cargo +1.89.0. Features (10/32, 14 skipped): fizz. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (11/32, 14 skipped): bar,buzz,default. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (11/32, 14 skipped): bar,buzz,default. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (12/32, 14 skipped): buzz,default. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (12/32, 14 skipped): buzz,default. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (13/32, 14 skipped): bar,default. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (13/32, 14 skipped): bar,default. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (14/32, 14 skipped): default. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (14/32, 14 skipped): default. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (15/32, 14 skipped): bar,buzz. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (15/32, 14 skipped): bar,buzz. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (16/32, 14 skipped): buzz. Command (1/2): clippy. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (16/32, 14 skipped): buzz. Command (2/2): t. Time running: 4 s.
+Toolchain (2/2): cargo +1.89.0. Features (17/32, 14 skipped): bar. Command (1/2): clippy. Time running: 5 s.
+Toolchain (2/2): cargo +1.89.0. Features (17/32, 14 skipped): bar. Command (2/2): t. Time running: 5 s.
+Toolchain (2/2): cargo +1.89.0. Features (18/32, 14 skipped): <none>. Command (1/2): clippy. Time running: 5 s.
+Toolchain (2/2): cargo +1.89.0. Features (18/32, 14 skipped): <none>. Command (2/2): t. Time running: 5 s.
Toolchains used: cargo +stable and cargo +1.89.0
Features used:
buzz,fizz,foo
@@ -172,7 +244,7 @@ default
bar,buzz
buzz
bar
-
+<none>
[zack@laptop example]$ ci-cargo clippy --deny-warnings --ignore-compile-errors
[zack@laptop ~]$ ci-cargo t --allow-implied-features --cargo-home ~/.cargo/ --cargo-path ~/.cargo/bin --dir ~/example/ --ignored --rustup-home ~/.rustup/
[zack@laptop ~]$ ci-cargo v
@@ -187,9 +259,6 @@ There is a hard limit on the number of features allowed. Specifically the number
number of bits that make up a pointer; however practical limits will almost always be a factor long before hitting
such a hard limit due to the exponential effect features have.
-No attempt is made to distinguish packages from workspaces; thus running `ci-cargo` in the root directory of a
-workspace may not work as intended.
-
Cyclic and redundant features are forbidden. For example the below snippets from `Cargo.toml` files will cause an
error:
diff --git a/src/args.rs b/src/args.rs
@@ -2,10 +2,15 @@ use super::{
cargo::{CargoErr, Clippy, Options, TestKind, Tests, Toolchain},
manifest::PowerSet,
};
+use core::{
+ fmt::{self, Display, Formatter},
+ ops::IndexMut as _,
+};
use std::{
ffi::OsString,
- io::{Error, StderrLock, Write as _},
+ io::{self, Error, StderrLock, StdoutLock, Write as _},
path::PathBuf,
+ time::Instant,
};
/// Help message.
pub(crate) const HELP_MSG: &str = "Continuous integration of all features using cargo
@@ -31,6 +36,7 @@ Options:
--ignore-compile-errors compile_error!s are ignored
--ignored --ignored is passed to cargo t --tests
--include-ignored --include-ignored is passed to cargo t --tests
+ --progress Writes the progress to stdout
--rustup-home <PATH> Set the storage directory used by rustup
--summary Writes the toolchain(s) used and the combinations of features run on
@@ -47,8 +53,8 @@ conditions are met:
ci-cargo will run the appropriate command(s) for all possible combinations of features.
If an error occurs, ci-cargo will terminate writing the error(s) and the offending command
to stderr. If successful and --summary was passed, then the toolchain(s) used and the
-combinations of features run will be written to stdout; otherwise stdout is never written
-to.
+combinations of features run will be written to stdout. If --progress was passed, the current
+progress will be written to stdout before testing each combination of features.
";
/// `"help"`.
const HELP: &str = "help";
@@ -90,6 +96,8 @@ const IGNORE_COMPILE_ERRORS: &str = "--ignore-compile-errors";
const IGNORED: &str = "--ignored";
/// `"--include-ignored"`.
const INCLUDE_IGNORED: &str = "--include-ignored";
+/// `"--progress"`.
+const PROGRESS: &str = "--progress";
/// `"--rustup-home"`.
const RUSTUP_HOME: &str = "--rustup-home";
/// `"--summary"`.
@@ -232,6 +240,8 @@ pub(crate) struct Opts {
pub allow_implied_features: bool,
/// `true` iff `compile_error`s should be ignored.
pub ignore_compile_errors: bool,
+ /// `true` iff progress should be written to `stdout`.
+ pub progress: bool,
/// `true` iff the toolchains used and combinations of features run on should be written
/// to `stdout` upon success.
pub summary: bool,
@@ -247,6 +257,115 @@ pub(crate) enum Ignored {
/// Run all tests.
Include,
}
+/// One more than the contained `usize`.
+struct OneMore(usize);
+impl Display for OneMore {
+ #[expect(unsafe_code, reason = "comment justifies correctness")]
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+ if let Some(val) = self.0.checked_add(1) {
+ write!(f, "{val}")
+ } else {
+ let mut val = usize::MAX.to_string();
+ // This won't underflow since the length is at least 1.
+ let idx = val.len() - 1;
+ // SAFETY:
+ // 2^n is even for all n != 0. When n = 0, 2^n = 1. This means we can always increment the last
+ // digit without carrying.
+ // We only mutate the last digit which is guaranteed to be valid ASCII; thuse we can increment
+ // the `u8` since digits are consecutive in ASCII.
+ *unsafe { val.as_bytes_mut() }.index_mut(idx) += 1;
+ write!(f, "{val}")
+ }
+ }
+}
+/// Progress tracker for when `--progress` was passed.
+struct Progress<'toolchain> {
+ /// The current toolchain counter.
+ toolchain_counter: &'static str,
+ /// The total toolchains that will be used.
+ toolchain_total: &'static str,
+ /// The current toolchain.
+ toolchain: &'toolchain str,
+ /// The current command counter.
+ cmd_counter: &'static str,
+ /// The total commands that will be used.
+ cmd_total: &'static str,
+ /// The current command.
+ cmd: &'static str,
+ /// The total number of features in the power set.
+ features_total: String,
+ /// The time in which we started.
+ time_started: Instant,
+ /// `stdout` stream.
+ ///
+ /// None iff we encountered any error when writing
+ /// to it.
+ stdout: Option<StdoutLock<'static>>,
+}
+impl Progress<'_> {
+ /// Returns `Self` based on running both clippy and t.
+ fn all(toolchain: Toolchain<'_>, use_msrv: bool, features_total_minus_1: usize) -> Self {
+ Self::inner_new("clippy", "2", toolchain, use_msrv, features_total_minus_1)
+ }
+ /// Returns `Self` based on running clippy.
+ fn clippy(toolchain: Toolchain<'_>, use_msrv: bool, features_total_minus_1: usize) -> Self {
+ Self::inner_new("clippy", "1", toolchain, use_msrv, features_total_minus_1)
+ }
+ /// Returns `Self` based on running tests.
+ fn tests(toolchain: Toolchain<'_>, use_msrv: bool, features_total_minus_1: usize) -> Self {
+ Self::inner_new("t", "1", toolchain, use_msrv, features_total_minus_1)
+ }
+ /// Returns `Self` based on running t.
+ fn doc_tests(toolchain: Toolchain<'_>, use_msrv: bool, features_total_minus_1: usize) -> Self {
+ Self::inner_new("t --doc", "1", toolchain, use_msrv, features_total_minus_1)
+ }
+ /// Returns `Self` based on the passed arguments.
+ fn inner_new(
+ cmd: &'static str,
+ cmd_total: &'static str,
+ toolchain: Toolchain<'_>,
+ use_msrv: bool,
+ features_total_minus_1: usize,
+ ) -> Self {
+ Self {
+ toolchain_counter: "1",
+ toolchain_total: if use_msrv { "2" } else { "1" },
+ toolchain: if matches!(toolchain, Toolchain::Stable) {
+ " +stable"
+ } else {
+ ""
+ },
+ cmd_counter: "1",
+ cmd_total,
+ cmd,
+ features_total: OneMore(features_total_minus_1).to_string(),
+ time_started: Instant::now(),
+ stdout: Some(io::stdout().lock()),
+ }
+ }
+ /// Writes the progress so far to `stdout`.
+ ///
+ /// If writing to `stdout` errors, then `stdout` will never be written to.
+ fn write_to_stdout(
+ &mut self,
+ features: &str,
+ features_counter_minus_1: usize,
+ features_skipped: usize,
+ ) {
+ if let Some(ref mut std) = self.stdout {
+ // Example:
+ // "Toolchain (1/2): cargo +stable. Features (18/128, 3 skipped): foo,bar. Command (1/2): clippy. Time running: 49 s.");
+ // Note `features_skipped` maxes at `usize::MAX` since the empty set is never skipped.
+ if writeln!(std, "Toolchain ({}/{}): cargo{}. Features ({}/{}, {} skipped): {}. Command ({}/{}): {}. Time running: {} s.", self.toolchain_counter, self.toolchain_total, self.toolchain, OneMore(features_counter_minus_1), self.features_total, features_skipped, if features.is_empty() { "<none>" } else { features }, self.cmd_counter, self.cmd_total, self.cmd, self.time_started.elapsed().as_secs()).is_err() {
+ drop(self.stdout.take());
+ }
+ }
+ }
+}
/// `cargo` command(s) we should run.
#[cfg_attr(test, derive(Debug, PartialEq))]
pub(crate) enum Cmd {
@@ -277,9 +396,11 @@ impl Cmd {
options: Options<'a, '_>,
msrv: Option<&'a str>,
power_set: &mut PowerSet<'_>,
+ progress: bool,
) -> Result<(), Box<CargoErr>> {
match self {
Self::All(all_targets, deny_warning, ignored_tests) => Self::run_all(
+ progress.then(|| Progress::all(options.toolchain, msrv.is_some(), power_set.len())),
msrv,
options,
all_targets,
@@ -287,24 +408,40 @@ impl Cmd {
ignored_tests,
power_set,
),
- Self::Clippy(all_targets, deny_warnings) => {
- Self::run_clippy(msrv, options, all_targets, deny_warnings, power_set)
- }
- Self::Tests(ignored_tests) => {
- Self::run_unit_tests(msrv, options, ignored_tests, power_set)
- }
- Self::DocTests => Self::run_doc_tests(msrv, options, power_set),
+ Self::Clippy(all_targets, deny_warnings) => Self::run_clippy(
+ progress
+ .then(|| Progress::clippy(options.toolchain, msrv.is_some(), power_set.len())),
+ msrv,
+ options,
+ all_targets,
+ deny_warnings,
+ power_set,
+ ),
+ Self::Tests(ignored_tests) => Self::run_unit_tests(
+ progress
+ .then(|| Progress::tests(options.toolchain, msrv.is_some(), power_set.len())),
+ msrv,
+ options,
+ ignored_tests,
+ power_set,
+ ),
+ Self::DocTests => Self::run_doc_tests(
+ progress.then(|| {
+ Progress::doc_tests(options.toolchain, msrv.is_some(), power_set.len())
+ }),
+ msrv,
+ options,
+ power_set,
+ ),
}
}
/// Runs `cargo clippy` and `cargo t` for all features in `power_set`.
///
/// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
/// later used.
- #[expect(
- clippy::panic_in_result_fn,
- reason = "want to crash when there is a bug"
- )]
+ #[expect(clippy::else_if_without_else, reason = "don't want an empty else")]
fn run_all<'a>(
+ mut progress: Option<Progress<'a>>,
msrv: Option<&'a str>,
mut options: Options<'a, '_>,
all_targets: bool,
@@ -312,27 +449,75 @@ impl Cmd {
ignored_tests: Ignored,
power_set: &mut PowerSet<'_>,
) -> Result<(), Box<CargoErr>> {
- while let Some(set) = power_set.next_set() {
- if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
- return Err(e);
+ if let Some(ref mut prog) = progress {
+ let mut feat_counter = 0;
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.cmd_counter = "1";
+ prog.cmd = "clippy";
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ // Note we run tests even if a `compile_error` occurred since it may not occur for tests.
+ prog.cmd_counter = "2";
+ prog.cmd = "t";
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Tests::run(&mut options, TestKind::All(ignored_tests), set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps.
+ feat_counter = feat_counter.wrapping_add(1);
}
- match Tests::run(&mut options, TestKind::All(ignored_tests), set) {
- // Since we are running `cargo t`, a no library error won't happen.
- Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
- Err(e) => return Err(e),
+ if let Some(msrv_val) = msrv {
+ feat_counter = 0;
+ prog.toolchain_counter = "2";
+ prog.toolchain = msrv_val;
+ prog.cmd_counter = "1";
+ prog.cmd = "clippy";
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.cmd_counter = "1";
+ prog.cmd = "clippy";
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ // Note we run tests even if a `compile_error` occurred since it may not occur for tests.
+ prog.cmd_counter = "2";
+ prog.cmd = "t";
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Tests::run(&mut options, TestKind::All(ignored_tests), set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps. Note we reset `feat_counter` to 0 before we
+ // started the loop.
+ feat_counter = feat_counter.wrapping_add(1);
+ }
}
- }
- if let Some(msrv_val) = msrv {
- options.toolchain = Toolchain::Msrv(msrv_val);
- power_set.reset();
+ } else {
while let Some(set) = power_set.next_set() {
if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
return Err(e);
+ // Note we run tests even if a `compile_error` occurred since it may not occur for tests.
+ } else if let Err(e) = Tests::run(&mut options, TestKind::All(ignored_tests), set) {
+ return Err(e);
}
- match Tests::run(&mut options, TestKind::All(ignored_tests), set) {
- // Since we are running `cargo t`, a no library error won't happen.
- Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
- Err(e) => return Err(e),
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ // Note we run tests even if a `compile_error` occurred since it may not occur for tests.
+ } else if let Err(e) =
+ Tests::run(&mut options, TestKind::All(ignored_tests), set)
+ {
+ return Err(e);
+ }
}
}
}
@@ -343,25 +528,56 @@ impl Cmd {
/// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
/// later used.
fn run_clippy<'a>(
+ mut progress: Option<Progress<'a>>,
msrv: Option<&'a str>,
mut options: Options<'a, '_>,
all_targets: bool,
deny_warnings: bool,
power_set: &mut PowerSet<'_>,
) -> Result<(), Box<CargoErr>> {
- while let Some(set) = power_set.next_set() {
- if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
- return Err(e);
+ if let Some(ref mut prog) = progress {
+ let mut feat_counter = 0;
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps.
+ feat_counter = feat_counter.wrapping_add(1);
}
- }
- if let Some(msrv_val) = msrv {
- options.toolchain = Toolchain::Msrv(msrv_val);
- power_set.reset();
+ if let Some(msrv_val) = msrv {
+ feat_counter = 0;
+ prog.toolchain_counter = "2";
+ prog.toolchain = msrv_val;
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps. Note we reset `feat_counter` to 0 before we
+ // started the loop.
+ feat_counter = feat_counter.wrapping_add(1);
+ }
+ }
+ } else {
while let Some(set) = power_set.next_set() {
if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
return Err(e);
}
}
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Clippy::run(&mut options, all_targets, deny_warnings, set) {
+ return Err(e);
+ }
+ }
+ }
}
Ok(())
}
@@ -369,31 +585,54 @@ impl Cmd {
///
/// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
/// later used.
- #[expect(
- clippy::panic_in_result_fn,
- reason = "want to crash when there is a bug"
- )]
fn run_unit_tests<'a>(
+ mut progress: Option<Progress<'a>>,
msrv: Option<&'a str>,
mut options: Options<'a, '_>,
ignored_tests: Ignored,
power_set: &mut PowerSet<'_>,
) -> Result<(), Box<CargoErr>> {
- while let Some(set) = power_set.next_set() {
- match Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
- // Since we are running `cargo t --tests`, a no library error won't happen.
- Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
- Err(e) => return Err(e),
+ if let Some(ref mut prog) = progress {
+ let mut feat_counter = 0;
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps.
+ feat_counter = feat_counter.wrapping_add(1);
}
- }
- if let Some(msrv_val) = msrv {
- options.toolchain = Toolchain::Msrv(msrv_val);
- power_set.reset();
+ if let Some(msrv_val) = msrv {
+ feat_counter = 0;
+ prog.toolchain_counter = "2";
+ prog.toolchain = msrv_val;
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ if let Err(e) = Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps. Note we reset `feat_counter` to 0 before we
+ // started the loop.
+ feat_counter = feat_counter.wrapping_add(1);
+ }
+ }
+ } else {
while let Some(set) = power_set.next_set() {
- match Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
- // Since we are running `cargo t --tests`, a no library error won't happen.
- Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
- Err(e) => return Err(e),
+ if let Err(e) = Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ return Err(e);
+ }
+ }
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ if let Err(e) = Tests::run(&mut options, TestKind::Unit(ignored_tests), set) {
+ return Err(e);
+ }
}
}
}
@@ -403,37 +642,69 @@ impl Cmd {
///
/// Note the [`Toolchain`] in `options` is first used; and if `msrv.is_some()`, then [`Toolchain::Msrv`] is
/// later used.
- #[expect(
- clippy::panic_in_result_fn,
- reason = "want to crash when there is a bug"
- )]
fn run_doc_tests<'a>(
+ mut progress: Option<Progress<'a>>,
msrv: Option<&'a str>,
mut options: Options<'a, '_>,
power_set: &mut PowerSet<'_>,
) -> Result<(), Box<CargoErr>> {
- while let Some(set) = power_set.next_set() {
- match Tests::run(&mut options, TestKind::Doc, set) {
- Ok(no_library) => {
- if no_library {
- // We don't want to continue invoking `cargo t --doc` once we know this is not a library
- // target.
- return Ok(());
+ if let Some(ref mut prog) = progress {
+ let mut feat_counter = 0;
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ match Tests::run(&mut options, TestKind::Doc, set) {
+ Ok(no_library_target) => {
+ if no_library_target {
+ return Ok(());
+ }
}
+ Err(e) => return Err(e),
}
- Err(e) => return Err(e),
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps.
+ feat_counter = feat_counter.wrapping_add(1);
}
- }
- if let Some(msrv_val) = msrv {
- options.toolchain = Toolchain::Msrv(msrv_val);
- power_set.reset();
+ if let Some(msrv_val) = msrv {
+ feat_counter = 0;
+ prog.toolchain_counter = "2";
+ prog.toolchain = msrv_val;
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some((set, skip_count)) = power_set.next_set_with_skip_count() {
+ prog.write_to_stdout(set, feat_counter, skip_count);
+ // If there is no library target, then we would have been informed above.
+ if let Err(e) = Tests::run(&mut options, TestKind::Doc, set) {
+ return Err(e);
+ }
+ // The maximum number possible is `usize::MAX + 1`; however that can only happen at the very
+ // last item, so we don't care this wraps. Note we reset `feat_counter` to 0 before we
+ // started the loop.
+ feat_counter = feat_counter.wrapping_add(1);
+ }
+ }
+ } else {
while let Some(set) = power_set.next_set() {
match Tests::run(&mut options, TestKind::Doc, set) {
- // If there is no library target, then we would have been informed above.
- Ok(no_library) => assert!(!no_library, "there is a bug in cargo::Tests::run"),
+ Ok(no_library_target) => {
+ if no_library_target {
+ // We don't want to continue invoking `cargo t --doc` once we know this is not a library
+ // target.
+ return Ok(());
+ }
+ }
Err(e) => return Err(e),
}
}
+ if let Some(msrv_val) = msrv {
+ options.toolchain = Toolchain::Msrv(msrv_val);
+ power_set.reset();
+ while let Some(set) = power_set.next_set() {
+ // If there is no library target, then we would have been informed above.
+ if let Err(e) = Tests::run(&mut options, TestKind::Doc, set) {
+ return Err(e);
+ }
+ }
+ }
}
Ok(())
}
@@ -475,6 +746,8 @@ struct ArgOpts {
ignored: bool,
/// `--include-ignored`.
include_ignored: bool,
+ /// `--progress`.
+ progress: bool,
/// `--rustup-home` along with the path.
rustup_home: Option<PathBuf>,
/// `--summary`.
@@ -508,6 +781,7 @@ impl From<ArgOpts> for Opts {
color: value.color,
allow_implied_features: value.allow_implied_features,
ignore_compile_errors: value.ignore_compile_errors,
+ progress: value.progress,
summary: value.summary,
}
}
@@ -614,6 +888,12 @@ impl MetaCmd {
}
opts.include_ignored = true;
}
+ PROGRESS => {
+ if opts.progress {
+ return Err(ArgsErr::DuplicateOption(val));
+ }
+ opts.progress = true;
+ }
RUSTUP_HOME => {
if opts.rustup_home.is_some() {
return Err(ArgsErr::DuplicateOption(val));
@@ -639,6 +919,7 @@ impl MetaCmd {
)
}
/// Returns data we need by reading the supplied CLI arguments.
+ #[expect(clippy::too_many_lines, reason = "101 is fine.")]
pub(crate) fn from_args<T: Iterator<Item = OsString>>(mut args: T) -> Result<Self, ArgsErr> {
args.next().ok_or(ArgsErr::NoArgs).and_then(|_| {
args.next().map_or_else(
@@ -653,6 +934,7 @@ impl MetaCmd {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
},
))
@@ -769,6 +1051,7 @@ mod tests {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
}
)),
@@ -989,6 +1272,7 @@ mod tests {
"--include-ignored".to_owned().into(),
"--rustup-home".to_owned().into(),
OsString::new(),
+ "--progress".to_owned().into(),
"--summary".to_owned().into(),
]
.into_iter()
@@ -1003,6 +1287,7 @@ mod tests {
color: true,
allow_implied_features: true,
ignore_compile_errors: true,
+ progress: true,
summary: true,
}
))
@@ -1025,6 +1310,7 @@ mod tests {
"--ignore-compile-errors".to_owned().into(),
"--rustup-home".to_owned().into(),
"a".to_owned().into(),
+ "--progress".to_owned().into(),
"--summary".to_owned().into(),
]
.into_iter()
@@ -1039,6 +1325,7 @@ mod tests {
color: true,
allow_implied_features: true,
ignore_compile_errors: true,
+ progress: true,
summary: true,
}
))
@@ -1063,6 +1350,7 @@ mod tests {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
}
))
@@ -1084,6 +1372,7 @@ mod tests {
"--ignored".to_owned().into(),
"--rustup-home".to_owned().into(),
OsString::new(),
+ "--progress".to_owned().into(),
"--summary".to_owned().into(),
]
.into_iter()
@@ -1098,6 +1387,7 @@ mod tests {
color: true,
allow_implied_features: true,
ignore_compile_errors: true,
+ progress: true,
summary: true,
}
))
@@ -1114,6 +1404,7 @@ mod tests {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
}
))
@@ -1137,6 +1428,7 @@ mod tests {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
}
))
@@ -1157,6 +1449,7 @@ mod tests {
"--ignore-compile-errors".to_owned().into(),
"--rustup-home".to_owned().into(),
OsString::new(),
+ "--progress".to_owned().into(),
"--summary".to_owned().into(),
]
.into_iter()
@@ -1171,6 +1464,7 @@ mod tests {
color: true,
allow_implied_features: true,
ignore_compile_errors: true,
+ progress: true,
summary: true,
}
))
@@ -1187,6 +1481,7 @@ mod tests {
color: false,
allow_implied_features: false,
ignore_compile_errors: false,
+ progress: false,
summary: false,
}
))
diff --git a/src/cargo.rs b/src/cargo.rs
@@ -374,7 +374,7 @@ pub(crate) struct Options<'toolchain, 'errs> {
}
/// Executes `cmd`.
///
-/// Returns `true` iff a no target error occurs and `doc_only` is `true`.
+/// Returns `true` iff a no-library target error occurred and `doc_only` was `true`.
fn execute_command(
mut cmd: Command,
options: &mut Options<'_, '_>,
@@ -471,6 +471,9 @@ fn execute_command(
pub(crate) struct Clippy;
impl Clippy {
/// Execute `cargo clippy`.
+ ///
+ /// Returns `false` iff the command ran successfully. Note this can only return `true` if
+ /// [`Options::ignore_compile_errors`] is `true` since an error would be returned instead.
#[expect(
clippy::panic_in_result_fn,
reason = "want to crash when there is a bug"
@@ -513,7 +516,7 @@ impl Clippy {
_ = c.arg(DASH_DASH).arg("-Dwarnings");
}
execute_command(c, options, features, false)
- .map(|no_features| assert!(!no_features, "there is a bug in cargo::execute_command"))
+ .map(|no_library_target| assert!(!no_library_target, "there is a bug in cargo::execute_command since a no-library target error was returned when running Clippy."))
}
}
/// What kind of test to run.
diff --git a/src/main.rs b/src/main.rs
@@ -12,8 +12,9 @@ mod rustup;
use alloc::ffi::CString;
use args::{ArgsErr, HELP_MSG, MetaCmd};
use cargo::{CargoErr, Options, Toolchain, ToolchainErr};
+#[cfg(target_os = "openbsd")]
use core::ffi::CStr;
-use manifest::{Manifest, ManifestErr, TooManyFeaturesErr};
+use manifest::{Manifest, ManifestErr};
#[cfg(target_os = "openbsd")]
use priv_sep::{Errno, Permissions, Promise, Promises};
use std::{
@@ -27,16 +28,24 @@ use std::{
enum E {
/// Error related to the passed arguments.
Args(ArgsErr),
- /// `Cargo.toml` does not exist.
- NoCargoToml,
+ /// Error getting the current directory.
+ CurDir(Error),
/// Error looking for `Cargo.toml`.
- CargoTomlIo(Error),
+ CargoTomlIo(Error, PathBuf),
+ /// Error when `Cargo.toml` could not be found.
+ ///
+ /// Note this is not returned when `--dir` is passed.
+ CargoTomlDoesNotExist(PathBuf),
+ /// Error canonicalizing `--dir`.
+ CanonicalizePath(Error, PathBuf),
+ /// Error setting the working directory.
+ SetDir(Error, PathBuf),
/// Error reading `Cargo.toml`.
- CargoTomlRead(Error),
- /// Error looking for `rustup-toolchain.toml`.
- RustupToolchainTomlIo(Error),
+ CargoTomlRead(Error, PathBuf),
+ /// Error looking for `rust-toolchain.toml`.
+ RustToolchainTomlIo(Error, PathBuf),
/// Error related to extracting the necessary data from `Cargo.toml`.
- Manifest(ManifestErr),
+ Manifest(Box<ManifestErr>),
/// Error from `Msrv::compare_to_other`.
Toolchain(Box<ToolchainErr>),
/// Error from OpenBSD `pledge`.
@@ -49,7 +58,7 @@ enum E {
#[cfg(target_os = "openbsd")]
CargoPathCStr,
/// Variant returned where there are too many features to generate the power set on.
- TooManyFeatures,
+ TooManyFeatures(PathBuf),
/// Unable to write non-terminating messages to stderr.
StdErr,
/// Unable to write the help message to stdout.
@@ -67,20 +76,38 @@ impl E {
let mut stderr = io::stderr().lock();
match self {
Self::Args(e) => e.write(stderr),
- Self::NoCargoToml => writeln!(
- stderr,
- "Cargo.toml doesn't exist in the current nor ancestor directories."
- ),
- Self::CargoTomlIo(err) => {
- writeln!(stderr, "There was an error looking for Cargo.toml: {err}.")
+ Self::CurDir(err) => {
+ writeln!(
+ stderr,
+ "There was an error getting the working directory: {err}."
+ )
+ }
+ Self::CargoTomlIo(err, p) => {
+ writeln!(stderr, "There was an error looking for Cargo.toml in {} and its ancestor directories: {err}.", p.display())
+ }
+ Self::CargoTomlDoesNotExist(p) => {
+ writeln!(stderr, "Cargo.toml does not exist in {} nor its ancestor directories.", p.display())
+ }
+ Self::CanonicalizePath(err, p) => {
+ writeln!(
+ stderr,
+ "There was an error canonicalizing the path {}: {err}.",
+ p.display()
+ )
+ }
+ Self::SetDir(err, p) => {
+ writeln!(
+ stderr,
+ "There was an error changing the working directory to {}: {err}.", p.display()
+ )
}
- Self::CargoTomlRead(err) => {
- writeln!(stderr, "There was an error reading Cargo.toml: {err}.")
+ Self::CargoTomlRead(err, p) => {
+ writeln!(stderr, "There was an error reading {}: {err}.", p.display())
}
- Self::RustupToolchainTomlIo(err) => {
+ Self::RustToolchainTomlIo(err, p) => {
writeln!(
stderr,
- "There was an error looking for the existence of rust-toolchain.toml: {err}."
+ "There was an error looking for rust-toolchain.toml in {} and its ancestor directories: {err}.", p.display()
)
}
Self::Manifest(e) => e.write(stderr),
@@ -92,9 +119,9 @@ impl E {
#[cfg(target_os = "openbsd")]
Self::CargoPathCStr => writeln!(
stderr,
- "unable to convert the path passed for --cargo-path into a C string"
+ "unable to convert the path passed for --cargo-path into a C string."
),
- Self::TooManyFeatures => TooManyFeaturesErr::write(stderr),
+ Self::TooManyFeatures(p) => writeln!(stderr, "There are too many features defined in {}. The max number of features allowed is the number of bits that make up a pointer.", p.display()),
Self::StdErr => Ok(()),
Self::Help(err) => writeln!(
stderr,
@@ -119,7 +146,7 @@ impl E {
const fn priv_init<Never>() -> Result<(), Never> {
Ok(())
}
-/// Returns the inital set of `Promises` we pledged.
+/// Returns the inital set of `Promises` we pledged in addition to allow read permissions to the entire file system.
#[cfg(target_os = "openbsd")]
fn priv_init() -> Result<Promises, E> {
let proms = Promises::new([
@@ -136,52 +163,29 @@ fn priv_init() -> Result<Promises, E> {
.map(|()| proms)
})
}
-/// `"Cargo.toml"` as a `CStr`.
-const CARGO_CSTR: &CStr = c"Cargo.toml";
+/// `c"/"`.
+#[cfg(target_os = "openbsd")]
+const ROOT: &CStr = c"/";
/// `"Cargo.toml"`.
-const CARGO: &str = match CARGO_CSTR.to_str() {
- Ok(val) => val,
- Err(_) => panic!("Cargo.toml is not a valid str"),
-};
-/// `"rust-toolchain.toml"` as a `CStr`.
-const RUST_TOOLCHAIN_CSTR: &CStr = c"rust-toolchain.toml";
-/// `"rust-toolchain.toml"`.
-const RUST_TOOLCHAIN: &str = match RUST_TOOLCHAIN_CSTR.to_str() {
- Ok(val) => val,
- Err(_) => panic!("rust-toolchain.toml is not a valid str"),
-};
-/// No-op.
-#[cfg(not(target_os = "openbsd"))]
-#[expect(clippy::unnecessary_wraps, reason = "unify OpenBSD with non-OpenBSD")]
-const fn unveil_next<Never>() -> Result<(), Never> {
- Ok(())
+fn cargo_toml() -> &'static Path {
+ Path::new("Cargo.toml")
}
-/// Remove file permissions before only allowing read permissions to `CARGO_CSTR`
-/// and `RUST_TOOLCHAIN_CSTR`.
-#[cfg(target_os = "openbsd")]
-fn unveil_next() -> Result<(), E> {
- Permissions::NONE
- .unveil(c"/")
- .and_then(|()| {
- Permissions::READ
- .unveil(CARGO_CSTR)
- .and_then(|()| Permissions::READ.unveil(RUST_TOOLCHAIN_CSTR))
- })
- .map_err(E::Unveil)
+/// `"rust-toolchain.toml"`.
+fn rust_toolchain_toml() -> &'static Path {
+ Path::new("rust-toolchain.toml")
}
/// No-op.
#[cfg(not(target_os = "openbsd"))]
#[expect(clippy::unnecessary_wraps, reason = "unify OpenBSD with non-OpenBSD")]
-const fn priv_sep_final(_: &mut (), _: &Path) -> Result<(), E> {
+const fn priv_sep_final<Never>(_: &mut (), _: &Path) -> Result<(), Never> {
Ok(())
}
-/// Remove read permissions to `CARGO_CSTR` and `RUST_TOOLCHAIN_CSTR` before allowing execute permissions to
-/// `cargo_path`. Last remove read and unveil abilities.
+/// Remove read permissions to the entire file system before allowing execute permissions to `cargo_path` or `ROOT`.
+/// Last remove read and unveil permissions.
#[cfg(target_os = "openbsd")]
fn priv_sep_final(proms: &mut Promises, cargo_path: &Path) -> Result<(), E> {
Permissions::NONE
- .unveil(CARGO_CSTR)
- .and_then(|()| Permissions::NONE.unveil(RUST_TOOLCHAIN_CSTR))
+ .unveil(ROOT)
.map_err(E::Unveil)
.and_then(|()| {
if cargo_path.is_absolute() {
@@ -189,7 +193,7 @@ fn priv_sep_final(proms: &mut Promises, cargo_path: &Path) -> Result<(), E> {
.map_err(|_e| E::CargoPathCStr)
.and_then(|path_c| Permissions::EXECUTE.unveil(&path_c).map_err(E::Unveil))
} else {
- Permissions::EXECUTE.unveil(c"/").map_err(E::Unveil)
+ Permissions::EXECUTE.unveil(ROOT).map_err(E::Unveil)
}
.and_then(|()| {
proms
@@ -198,24 +202,24 @@ fn priv_sep_final(proms: &mut Promises, cargo_path: &Path) -> Result<(), E> {
})
})
}
-/// Checks if `Cargo.toml` exists in `cur_dir`; if not, it recursively checks the ancestor
-/// directories.
+/// Finds `file` in `cur_dir` or its ancestor directories returning `true` iff `file` exists. Searching is
+/// done from child directories up.
///
/// We make this recursive in the rare (impossible?) case that traversal becomes circular; in which case,
/// we want a stack overflow to occur.
-fn set_env(mut cur_dir: PathBuf) -> Result<bool, Error> {
- match fs::exists(CARGO) {
- Ok(exists) => {
- if exists {
- Ok(true)
- } else if cur_dir.pop() {
- env::set_current_dir(cur_dir.as_path()).and_then(|()| set_env(cur_dir))
- } else {
- Ok(false)
- }
+fn get_path_of_file(cur_dir: &mut PathBuf, file: &Path) -> Result<bool, Error> {
+ cur_dir.push(file);
+ fs::exists(&cur_dir).and_then(|exists| {
+ // Remove `file`.
+ _ = cur_dir.pop();
+ if exists {
+ Ok(true)
+ } else if cur_dir.pop() {
+ get_path_of_file(cur_dir, file)
+ } else {
+ Ok(false)
}
- Err(e) => Err(e),
- }
+ })
}
/// Current version of this crate.
const VERSION: &str = "ci-cargo 0.1.0\n";
@@ -225,65 +229,73 @@ fn main() -> ExitCode {
MetaCmd::Help => io::stdout().lock().write_all(HELP_MSG.as_bytes()).map_err(E::Help),
MetaCmd::Version => io::stdout().lock().write_all(VERSION.as_bytes()).map_err(E::Version),
MetaCmd::Cargo(cmd, opts) => opts.exec_dir.map_or_else(
- || env::current_dir().map_err(E::CargoTomlIo).and_then(|dir| set_env(dir).map_err(E::CargoTomlIo).and_then(|exists| if exists { Ok(()) } else { Err(E::NoCargoToml) })),
- |path| env::set_current_dir(path).map_err(E::CargoTomlIo),
- ).and_then(|()| {
- unveil_next().and_then(|()| fs::read_to_string(CARGO).map_err(E::CargoTomlRead).and_then(|toml| Manifest::from_toml(toml, opts.allow_implied_features).map_err(E::Manifest).and_then(|man| fs::exists(RUST_TOOLCHAIN).map_err(E::RustupToolchainTomlIo).and_then(|rustup_toolchain_exists| priv_sep_final(&mut proms, &opts.cargo_path).and_then(|()| {
- match man.msrv() {
- None => Ok((None, if rustup_toolchain_exists || (!rustup::SUPPORTED && opts.rustup_home.is_none()) {
- Toolchain::Default
- } else {
- Toolchain::Stable
- })),
- Some(val) => if rustup::SUPPORTED || opts.rustup_home.is_some() {
- val.compare_to_other(rustup_toolchain_exists, opts.rustup_home.as_deref(), &opts.cargo_path, opts.cargo_home.as_deref()).map_err(E::Toolchain).map(|msrv_string| (msrv_string, if rustup_toolchain_exists { Toolchain::Default } else { Toolchain::Stable }))
- } else {
- Ok((None, Toolchain::Default))
- },
- }.and_then(|(msrv_string, toolchain)| {
- let default_feature_does_not_exist = !man.features().contains_default();
- man.features().power_set().map_err(|_e| E::TooManyFeatures).and_then(|mut power_set| {
- let mut non_term_errs = HashSet::new();
- cmd.run(Options { toolchain, rustup_home: opts.rustup_home, cargo_path: opts.cargo_path, cargo_home: opts.cargo_home, color: opts.color, ignore_compile_errors: opts.ignore_compile_errors, default_feature_does_not_exist, non_terminating_errors: &mut non_term_errs, }, msrv_string.as_deref(), &mut power_set).map_err(E::Cargo).and_then(|()| {
- if non_term_errs.is_empty() {
- Ok(())
- } else {
- // `StderrLock` is not buffered.
- let mut stderr = BufWriter::new(io::stderr().lock());
- non_term_errs.into_iter().try_fold((), |(), msg| stderr.write_all(msg.as_bytes())).and_then(|()| stderr.flush()).map_err(|_e| E::StdErr)
- }
- }).and_then(|()| {
- if opts.summary {
- let mut stdout = io::stdout().lock();
- if matches!(toolchain, Toolchain::Stable) {
- if let Some(ref msrv_val) = msrv_string {
- writeln!(stdout, "Toolchains used: cargo +stable and cargo {msrv_val}")
- } else {
- writeln!(stdout, "Toolchain used: cargo +stable")
- }
- } else if let Some(ref msrv_val) = msrv_string {
- writeln!(stdout, "Toolchains used: cargo and cargo {msrv_val}")
+ || env::current_dir().map_err(E::CurDir).and_then(|mut path| {
+ let search_start = path.clone();
+ get_path_of_file(&mut path, cargo_toml()).map_err(|e| E::CargoTomlIo(e, search_start.clone())).and_then(|exists| if exists { Ok(path) } else { Err(E::CargoTomlDoesNotExist(search_start)) })
+ }),
+ |path| fs::canonicalize(&path).map_err(|e| E::CanonicalizePath(e, path)),
+ ).and_then(|mut cur_dir| env::set_current_dir(&cur_dir).map_err(|e| E::SetDir(e, cur_dir.clone())).and_then(|()| {
+ cur_dir.push(cargo_toml());
+ fs::read_to_string(&cur_dir).map_err(|e| E::CargoTomlRead(e, cur_dir.clone())).and_then(|toml| Manifest::from_toml(toml, opts.allow_implied_features, &cur_dir).map_err(E::Manifest).and_then(|man| {
+ let mut cargo_toml_path = cur_dir.clone();
+ _ = cargo_toml_path.pop();
+ get_path_of_file(&mut cargo_toml_path, rust_toolchain_toml()).map_err(|e| E::RustToolchainTomlIo(e, cargo_toml_path)).and_then(|rust_toolchain_exists| priv_sep_final(&mut proms, &opts.cargo_path).and_then(|()| {
+ match man.msrv() {
+ None => Ok((None, if rust_toolchain_exists || (!rustup::SUPPORTED && opts.rustup_home.is_none()) {
+ Toolchain::Default
+ } else {
+ Toolchain::Stable
+ })),
+ Some(val) => if rustup::SUPPORTED || opts.rustup_home.is_some() {
+ val.compare_to_other(rust_toolchain_exists, opts.rustup_home.as_deref(), &opts.cargo_path, opts.cargo_home.as_deref()).map_err(E::Toolchain).map(|msrv_string| (msrv_string, if rust_toolchain_exists { Toolchain::Default } else { Toolchain::Stable }))
+ } else {
+ Ok((None, Toolchain::Default))
+ },
+ }.and_then(|(msrv_string, toolchain)| {
+ let default_feature_does_not_exist = !man.features().contains_default();
+ man.features().power_set().map_err(|_e| E::TooManyFeatures(cur_dir)).and_then(|mut power_set| {
+ let mut non_term_errs = HashSet::new();
+ cmd.run(Options { toolchain, rustup_home: opts.rustup_home, cargo_path: opts.cargo_path, cargo_home: opts.cargo_home, color: opts.color, ignore_compile_errors: opts.ignore_compile_errors, default_feature_does_not_exist, non_terminating_errors: &mut non_term_errs, }, msrv_string.as_deref(), &mut power_set, opts.progress).map_err(E::Cargo).and_then(|()| {
+ if non_term_errs.is_empty() {
+ Ok(())
} else {
- writeln!(stdout, "Toolchain used: cargo")
- }.and_then(|()| {
- writeln!(stdout, "Features used:").and_then(|()| {
- power_set.reset();
- while let Some(features) = power_set.next_set() {
- if let Err(e) = writeln!(stdout, "{features}") {
- return Err(e);
- }
+ // `StderrLock` is not buffered.
+ let mut stderr = BufWriter::new(io::stderr().lock());
+ non_term_errs.into_iter().try_fold((), |(), msg| stderr.write_all(msg.as_bytes())).and_then(|()| stderr.flush()).map_err(|_e| E::StdErr)
+ }
+ }).and_then(|()| {
+ if opts.summary {
+ let mut stdout = io::stdout().lock();
+ if matches!(toolchain, Toolchain::Stable) {
+ if let Some(ref msrv_val) = msrv_string {
+ writeln!(stdout, "Toolchains used: cargo +stable and cargo {msrv_val}")
+ } else {
+ writeln!(stdout, "Toolchain used: cargo +stable")
}
- Ok(())
- })
- }).map_err(E::Summary)
- } else {
- Ok(())
- }
+ } else if let Some(ref msrv_val) = msrv_string {
+ writeln!(stdout, "Toolchains used: cargo and cargo {msrv_val}")
+ } else {
+ writeln!(stdout, "Toolchain used: cargo")
+ }.and_then(|()| {
+ writeln!(stdout, "Features used:").and_then(|()| {
+ power_set.reset();
+ while let Some(features) = power_set.next_set() {
+ if let Err(e) = writeln!(stdout, "{}", if features.is_empty() { "<none>" } else { features }) {
+ return Err(e);
+ }
+ }
+ Ok(())
+ })
+ }).map_err(E::Summary)
+ } else {
+ Ok(())
+ }
+ })
})
})
- })
- })))))
- })
+ }))
+ }))
+ }))
}
})).map_or_else(E::into_exit_code, |()| ExitCode::SUCCESS)
}
diff --git a/src/manifest.rs b/src/manifest.rs
@@ -2,47 +2,181 @@ use super::cargo::{Toolchain, ToolchainErr};
use alloc::borrow::Cow;
use core::cmp::Ordering;
use std::{
- io::{Error, StderrLock, Write as _},
- path::Path,
+ fs,
+ io::{Error, ErrorKind, StderrLock, Write as _},
+ path::{Path, PathBuf},
};
use toml::{
Spanned,
de::{DeArray, DeValue, Error as TomlErr},
map::Map,
};
-/// Error returned from extracting `"package"`.
+/// `"workspace"`.
+const WORKSPACE: &str = "workspace";
+/// `"package"`.
+const PACKAGE: &str = "package";
+/// `"rust-version"`.
+const RUST_VERSION: &str = "rust-version";
+/// Error returned from extracting `"workspace"`.
#[cfg_attr(test, derive(Debug, PartialEq))]
+#[derive(Clone, Copy)]
+pub(crate) enum WorkspaceErr {
+ /// Variant returned when there is no `"workspace"` key.
+ Missing,
+ /// Variant returned when `"workspace"` is not a table.
+ InvalidType,
+ /// Variant returned when `workspace.package` does not exist.
+ MissingPackage,
+ /// Variant returned when `workspace.package` is not a table.
+ InvalidPackageType,
+ /// Variant returned when `workspace.package.rust-version` does not exist.
+ MissingPackageMsrv,
+ /// Variant returned when `workspace.package.rust-version` is not a string.
+ InvalidPackageMsrvType,
+ /// Variant returned when `workspace.package.rust-version` is not a valid MSRV.
+ Msrv,
+}
+impl WorkspaceErr {
+ /// Writes `self` to `stderr`.
+ fn write(self, mut stderr: StderrLock<'_>, file: &Path) -> Result<(), Error> {
+ match self {
+ Self::Missing => writeln!(
+ stderr,
+ "'{WORKSPACE}' does not exist in {}.",
+ file.display()
+ ),
+ Self::InvalidType => writeln!(
+ stderr,
+ "'{WORKSPACE}' exists but is not a table in {}.",
+ file.display()
+ ),
+ Self::MissingPackage => writeln!(
+ stderr,
+ "'{WORKSPACE}.{PACKAGE}' does not exist in {}.",
+ file.display()
+ ),
+ Self::InvalidPackageType => writeln!(
+ stderr,
+ "'{WORKSPACE}.{PACKAGE}' exists but is not a table in {}.",
+ file.display()
+ ),
+ Self::MissingPackageMsrv => writeln!(
+ stderr,
+ "'{WORKSPACE}.{PACKAGE}.{RUST_VERSION}' does not exist in {}.",
+ file.display()
+ ),
+ Self::InvalidPackageMsrvType => writeln!(
+ stderr,
+ "'{WORKSPACE}.{PACKAGE}.{RUST_VERSION}' exists but is not a string in {}.",
+ file.display()
+ ),
+ Self::Msrv => writeln!(
+ stderr,
+ "'{WORKSPACE}.{PACKAGE}.{RUST_VERSION}' exists but is not a valid MSRV in {}.",
+ file.display()
+ ),
+ }
+ }
+}
+/// Error returned from extracting `"package"`.
+#[cfg_attr(test, derive(Debug))]
pub(crate) enum PackageErr {
/// Variant returned when there is no `"package"` key.
Missing,
- /// Variant returned when `"package"` is not a table in Cargo.toml.
+ /// Variant returned when `"package"` is not a table.
InvalidType,
- /// Variant returned when `package.rust-version` is not a string in Cargo.toml.
+ /// Variant returned when `package.rust-version` is not a string nor table.
InvalidMsrvType,
- /// Variant returned from extracting `package.rust-version`.
+ /// Variant returned when `package.rust-version` is not a valid MSRV.
Msrv,
+ /// Variant returned when `package.rust-version` is table that doesn't contain the `"workspace"` key.
+ MsrvWorkspaceMissing,
+ /// Variant returned when `package.rust-version.workspace` is not a Boolean with value `true`.
+ MsrvWorkspaceVal,
+ /// Variant returned when `package.workspace` is not a string.
+ InvalidWorkspaceType,
+ /// Variant returned when searching for the workspace file errors.
+ WorkspaceIo(Error),
+ /// Variant returned when there is no workspace `Cargo.toml`.
+ ///
+ /// This is only returned if the package's MSRV is inherited from the workspace, there is no
+ /// `workspace` key in the package's `Cargo.toml`, and there is no `workspace` key in the table
+ /// `package` (i.e., this is only returned when we must search for it by crawling up the directory).
+ WorkspaceDoesNotExist,
+ /// Variant returned when the file located at `package.workspace` could not be read.
+ ///
+ /// This is only returned if the table `package` had a key `workspace` that was a string, or we searched
+ /// for the workspace and found a `Cargo.toml`.
+ WorkspaceRead(Error, PathBuf),
+ /// Variant returned when the file located at `package.workspace` is not valid TOML.
+ ///
+ /// This is only returned if the table `package` had a key `workspace` that was a string, or we searched
+ /// for the workspace and found a `Cargo.toml`.
+ WorkspaceToml(TomlErr, PathBuf),
+ /// Variant returned when `package.rust-version` defers to the workspace MSRV, but there was an error
+ /// extracting the workspace MSRV from the file located at the contained `PathBuf`.
+ Workspace(WorkspaceErr, PathBuf),
}
impl PackageErr {
/// Writes `self` to `stderr`.
- fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
- writeln!(
- stderr,
- "{}",
- match self {
- Self::Missing => {
- "Error with Cargo.toml: 'package' is missing."
- }
- Self::InvalidType => {
- "Error with Cargo.toml: 'package' is not a table."
- }
- Self::InvalidMsrvType =>
- "Error with Cargo.toml: 'package.rust-version' is not a string.",
- Self::Msrv =>
- "Error with Cargo.toml: 'package.rust-version' is not a valid MSRV. It must be of the form <major>[.<minor>[.<patch>]] where major, minor, and patch are unsigned 64-bit integers without leading 0s.",
+ #[expect(clippy::unreachable, reason = "want to crash when there is a bug")]
+ fn write(self, mut stderr: StderrLock<'_>, file: &Path) -> Result<(), Error> {
+ match self {
+ Self::Missing => writeln!(stderr, "'{PACKAGE}' does not exist in {}.", file.display()),
+ Self::InvalidType => writeln!(
+ stderr,
+ "'{PACKAGE}' exists but is not a table in {}.",
+ file.display()
+ ),
+ Self::InvalidMsrvType => writeln!(
+ stderr,
+ "'{PACKAGE}.{RUST_VERSION}' exists but is not a string nor table in {}.",
+ file.display()
+ ),
+ Self::Msrv => writeln!(
+ stderr,
+ "'{PACKAGE}.{RUST_VERSION}' is a string but is not a valid MSRV in {}.",
+ file.display()
+ ),
+ Self::MsrvWorkspaceMissing => writeln!(
+ stderr,
+ "'{PACKAGE}.{RUST_VERSION}' is a table but does not contain the key '{WORKSPACE}' in {}.",
+ file.display()
+ ),
+ Self::MsrvWorkspaceVal => writeln!(
+ stderr,
+ "'{PACKAGE}.{RUST_VERSION}.{WORKSPACE}' exists but is not a Boolean or is not true in {}.",
+ file.display()
+ ),
+ Self::WorkspaceIo(e) => writeln!(
+ stderr,
+ "There was an error looking for the workspace Cargo.toml in {} and its ancestor directories: {e}.",
+ file.parent().unwrap_or_else(|| unreachable!("there is a bug in main. manifest::Manifest::from_toml must be passed the absolute path to the package's Cargo.toml.")).display(),
+ ),
+ Self::WorkspaceDoesNotExist => writeln!(
+ stderr,
+ "There is no workspace Cargo.toml in {} nor its ancestor directories.",
+ file.parent().unwrap_or_else(|| unreachable!("there is a bug in main. manifest::Manifest::from_toml must be passed the absolute path to the package's Cargo.toml.")).display(),
+ ),
+ Self::InvalidWorkspaceType => writeln!(
+ stderr,
+ "'{PACKAGE}.{WORKSPACE}' exists but is not a string in {}.",
+ file.display()
+ ),
+ Self::WorkspaceRead(e, p) => {
+ writeln!(stderr, "There was an issue reading the workspace file {}: {e}.", p.display())
}
- )
+ Self::WorkspaceToml(e, p) => write!(
+ stderr,
+ "Error parsing workspace file {} as TOML: {e}.",
+ p.display()
+ ),
+ Self::Workspace(e, p) => e.write(stderr, &p),
+ }
}
}
+/// `"features"`.
+const FEATURES: &str = "features";
/// Error returned from extracting feature dependencies.
#[cfg_attr(test, derive(Debug, PartialEq))]
pub(crate) enum FeatureDependenciesErr {
@@ -74,29 +208,34 @@ pub(crate) enum FeatureDependenciesErr {
}
impl FeatureDependenciesErr {
/// Writes `self` to `stderr`.
- fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ fn write(self, mut stderr: StderrLock<'_>, file: &Path) -> Result<(), Error> {
match self {
Self::InvalidFeatureType(name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' is not an array."
+ "'{FEATURES}.{name}' is not an array in {}.",
+ file.display()
)
}
Self::InvalidDependencyType(name) => writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' contains a value that is not a string."
+ "'{FEATURES}.{name}' contains a value that is not a string in {}.",
+ file.display()
),
Self::InvalidDependency(name, dep_name) => writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' contains '{dep_name}' which is neither a feature nor dependency."
+ "'{FEATURES}.{name}' contains '{dep_name}' which is neither a feature nor dependency in {}.",
+ file.display()
),
Self::CyclicFeature(name) => writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' is a cyclic feature."
+ "'{FEATURES}.{name}' is a cyclic feature in {}.",
+ file.display()
),
Self::RedundantDependency(name, dep_name) => writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' contains the redundant dependency '{dep_name}'."
+ "'{FEATURES}.{name}' contains the redundant dependency '{dep_name}' in {}.",
+ file.display()
),
}
}
@@ -115,18 +254,23 @@ pub(crate) enum FeaturesErr {
}
impl FeaturesErr {
/// Writes `self` to `stderr`.
- fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ fn write(self, mut stderr: StderrLock<'_>, file: &Path) -> Result<(), Error> {
match self {
Self::InvalidType => {
- writeln!(stderr, "Error with Cargo.toml: 'features' is not a table.")
+ writeln!(
+ stderr,
+ "'{FEATURES}' exists but is not a table in {}.",
+ file.display()
+ )
}
Self::InvalidName(name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' is not a valid feature name."
+ "'{FEATURES}.{name}' is not a valid feature name in {}.",
+ file.display()
)
}
- Self::FeatureDependencies(e) => e.write(stderr),
+ Self::FeatureDependencies(e) => e.write(stderr, file),
}
}
}
@@ -186,79 +330,102 @@ pub(crate) enum ImpliedFeaturesErr {
/// is defined as a feature.
InvalidDependency(String, String),
}
+/// `"optional"`.
+const OPTIONAL: &str = "optional";
+/// `"target"`.
+const TARGET: &str = "target";
impl ImpliedFeaturesErr {
/// Writes `self` to `stderr`.
- fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
+ fn write(self, mut stderr: StderrLock<'_>, file: &Path) -> Result<(), Error> {
match self {
Self::Dependencies(e) => match e {
DependenciesErr::Type(name) => {
- writeln!(stderr, "Error with Cargo.toml: '{name}' is not a table.")
+ writeln!(
+ stderr,
+ "'{name}' exists but is not a table in {}.",
+ file.display()
+ )
}
DependenciesErr::Name(name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: '{name}.{dep_name}' is not a valid dependency name."
+ "'{name}.{dep_name}' is not a valid dependency name in {}.",
+ file.display()
)
}
DependenciesErr::DependencyType(name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: '{name}.{dep_name}' is not a string or table."
+ "'{name}.{dep_name}' exists but is not a string nor table in {}.",
+ file.display()
)
}
DependenciesErr::OptionalType(name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: '{name}.{dep_name}.optional' is not a Boolean."
+ "'{name}.{dep_name}.{OPTIONAL}' exists but is not a Boolean in {}.",
+ file.display()
)
}
DependenciesErr::ImpliedFeature(name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: '{name}.{dep_name}' causes an implied feature to be defined."
+ "'{name}.{dep_name}' causes an implied feature to be defined in {}, but implied features were forbidden.",
+ file.display()
)
}
},
Self::TargetType => {
- writeln!(stderr, "Error with Cargo.toml: 'target' is not a table.")
+ writeln!(
+ stderr,
+ "'{TARGET}' exists but is not a table in {}.",
+ file.display()
+ )
}
Self::TargetPlatformType(name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}' is not a table."
+ "'{TARGET}.{name}' exists but is not a table in {}.",
+ file.display()
)
}
Self::TagetPlatformDependencies(name, e) => match e {
DependenciesErr::Type(table_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}.{table_name}' is not a table."
+ "'{TARGET}.{name}.{table_name}' exists but is not a table in {}.",
+ file.display()
)
}
DependenciesErr::Name(table_name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' is not a valid dependency name."
+ "'{TARGET}.{name}.{table_name}.{dep_name}' is not a valid dependency name in {}.",
+ file.display()
)
}
DependenciesErr::DependencyType(table_name, dep_name) => writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' is not a string or table."
+ "'{TARGET}.{name}.{table_name}.{dep_name}' exists but is not a string nor table in {}.",
+ file.display()
),
DependenciesErr::OptionalType(table_name, dep_name) => writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}.optional' is not a Boolean."
+ "'{TARGET}.{name}.{table_name}.{dep_name}.{OPTIONAL}' exists but is not a Boolean in {}.",
+ file.display()
),
DependenciesErr::ImpliedFeature(table_name, dep_name) => {
writeln!(
stderr,
- "Error with Cargo.toml: 'target.{name}.{table_name}.{dep_name}' causes an implied feature to be defined."
+ "'{TARGET}.{name}.{table_name}.{dep_name}' causes an implied feature to be defined in {}, but implied features were forbidden.",
+ file.display()
)
}
},
Self::InvalidDependency(name, dep_name) => writeln!(
stderr,
- "Error with Cargo.toml: 'features.{name}' contains '{dep_name}' which is neither a feature nor dependency."
+ "'{FEATURES}.{name}' contains '{dep_name}' which is neither a feature nor dependency in {}.",
+ file.display()
),
}
}
@@ -267,37 +434,32 @@ impl ImpliedFeaturesErr {
#[cfg_attr(test, derive(Debug, PartialEq))]
pub(crate) enum ManifestErr {
/// Variant returned when Cargo.toml is not valid TOML.
- Toml(TomlErr),
+ Toml(TomlErr, PathBuf),
/// Variant returned when extracting `package`.
- Package(PackageErr),
+ Package(PackageErr, PathBuf),
/// Variant returned when extracting `features`.
- Features(FeaturesErr),
+ Features(FeaturesErr, PathBuf),
/// Variant returned when extracting dependencies in order to add implied features.
- ImpliedFeatures(ImpliedFeaturesErr),
+ ImpliedFeatures(ImpliedFeaturesErr, PathBuf),
}
impl ManifestErr {
/// Writes `self` to `stderr`.
pub(crate) fn write(self, mut stderr: StderrLock<'_>) -> Result<(), Error> {
match self {
- Self::Toml(e) => write!(stderr, "Cargo.toml is not valid TOML: {e}"),
- Self::Package(e) => e.write(stderr),
- Self::Features(e) => e.write(stderr),
- Self::ImpliedFeatures(e) => e.write(stderr),
+ Self::Toml(e, file) => write!(
+ stderr,
+ "Error parsing package file {} as TOML: {e}.",
+ file.display()
+ ),
+ Self::Package(e, file) => e.write(stderr, &file),
+ Self::Features(e, file) => e.write(stderr, &file),
+ Self::ImpliedFeatures(e, file) => e.write(stderr, &file),
}
}
}
/// Error when there are too many features to create the power set.
#[cfg_attr(test, derive(Debug, PartialEq))]
pub(crate) struct TooManyFeaturesErr;
-impl TooManyFeaturesErr {
- /// Writes `self` to `stderr`.
- pub(crate) fn write(mut stderr: StderrLock<'_>) -> Result<(), Error> {
- writeln!(
- stderr,
- "There are too many features in Cargo.toml. The max number of features allowed is dependent on the host architecture. Specifically, the number of features must not exceed the width of a pointer in bits."
- )
- }
-}
/// Parses `val` as a `u64` in decimal notation without leading 0s.
///
/// # Errors
@@ -323,98 +485,200 @@ pub(crate) struct Msrv {
patch: Option<u64>,
}
impl Msrv {
- /// Extracts `"package"` from `toml` before extracting `"rust-version"` from it.
+ /// Converts `msrv` into `Self` based on a valid MSRV string.
#[expect(unsafe_code, reason = "comments justify their correctness")]
+ fn extract_msrv(msrv: &str) -> Result<Self, ()> {
+ let mut iter = msrv.as_bytes().split(|b| *b == b'.');
+ iter.next().ok_or(()).and_then(|fst| {
+ // SAFETY:
+ // The original input is a `str` and we split on `b'.'` which is a single-byte
+ // UTF-8 code unit; thus we don't have to worry about splitting a multi-byte
+ // UTF-8 code unit.
+ let major_utf8 = unsafe { str::from_utf8_unchecked(fst) };
+ parse_int(major_utf8).and_then(|major| {
+ iter.next().map_or_else(
+ || {
+ Ok(Self {
+ major,
+ minor: None,
+ patch: None,
+ })
+ },
+ |snd| {
+ // SAFETY:
+ // The original input is a `str` and we split on `b'.'` which is
+ // a single-byte UTF-8 code unit; thus we don't have to worry
+ // about splitting a multi-byte UTF-8 code unit.
+ let minor_utf8 = unsafe { str::from_utf8_unchecked(snd) };
+ parse_int(minor_utf8).and_then(|minor_val| {
+ iter.next().map_or_else(
+ || {
+ Ok(Self {
+ major,
+ minor: Some(minor_val),
+ patch: None,
+ })
+ },
+ |lst| {
+ // SAFETY:
+ // The original input is a `str` and we split on
+ // `b'.'` which is a single-byte UTF-8 code
+ // unit; thus we don't have to worry about
+ // splitting a multi-byte UTF-8 code unit.
+ let patch_utf8 = unsafe { str::from_utf8_unchecked(lst) };
+ parse_int(patch_utf8).and_then(|patch_val| {
+ iter.next().map_or_else(
+ || {
+ Ok(Self {
+ major,
+ minor: Some(minor_val),
+ patch: Some(patch_val),
+ })
+ },
+ |_| Err(()),
+ )
+ })
+ },
+ )
+ })
+ },
+ )
+ })
+ })
+ }
+ /// Reads `workspace` from `toml` extracting the MSRV.
+ fn extract_workspace(
+ toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ ) -> Result<Self, WorkspaceErr> {
+ toml.get(WORKSPACE)
+ .ok_or(WorkspaceErr::Missing)
+ .and_then(|work_span| {
+ if let DeValue::Table(ref workspace) = *work_span.get_ref() {
+ workspace
+ .get(PACKAGE)
+ .ok_or(WorkspaceErr::MissingPackage)
+ .and_then(|pack_span| {
+ if let DeValue::Table(ref package) = *pack_span.get_ref() {
+ package
+ .get(RUST_VERSION)
+ .ok_or(WorkspaceErr::MissingPackageMsrv)
+ .and_then(|msrv_span| {
+ if let DeValue::String(ref msrv) = *msrv_span.get_ref() {
+ Self::extract_msrv(msrv)
+ .map_err(|()| WorkspaceErr::Msrv)
+ } else {
+ Err(WorkspaceErr::InvalidPackageMsrvType)
+ }
+ })
+ } else {
+ Err(WorkspaceErr::InvalidPackageType)
+ }
+ })
+ } else {
+ Err(WorkspaceErr::InvalidType)
+ }
+ })
+ }
+ /// Recursively looks for `Cargo.toml` in `cur_dir` and ancestor directories until one is found
+ /// that contains a key named [`WORKSPACE`]. Once found, it's MSRV will be parsed and returned.
+ ///
+ /// We make this recursive in case the (impossible?) path traversal becomes cyclic; in which
+ /// we want a stack overflow to occur.
+ ///
+ /// Note if any error occurs not related to a not found file error, then this will error.
+ fn get_workspace_toml(mut cur_dir: PathBuf) -> Result<Self, PackageErr> {
+ cur_dir.push(super::cargo_toml());
+ match fs::read_to_string(&cur_dir) {
+ Ok(file) => Map::parse(&file)
+ .map_err(|e| PackageErr::WorkspaceToml(e, cur_dir.clone()))
+ .and_then(|toml| {
+ let t = toml.into_inner();
+ if t.contains_key(WORKSPACE) {
+ Self::extract_workspace(&t).map_err(|e| PackageErr::Workspace(e, cur_dir))
+ } else {
+ _ = cur_dir.pop();
+ if cur_dir.pop() {
+ Self::get_workspace_toml(cur_dir)
+ } else {
+ Err(PackageErr::WorkspaceDoesNotExist)
+ }
+ }
+ }),
+ Err(e) => {
+ if matches!(e.kind(), ErrorKind::NotFound) {
+ _ = cur_dir.pop();
+ if cur_dir.pop() {
+ Self::get_workspace_toml(cur_dir)
+ } else {
+ Err(PackageErr::WorkspaceIo(e))
+ }
+ } else {
+ Err(PackageErr::WorkspaceIo(e))
+ }
+ }
+ }
+ }
+ /// Extracts `"package"` from `toml` before extracting `"rust-version"` from it.
+ #[expect(
+ clippy::panic_in_result_fn,
+ reason = "want to crash when there is a bug"
+ )]
fn extract_from_toml(
toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
+ cargo_toml: &Path,
) -> Result<Option<Self>, PackageErr> {
- toml.get("package")
+ toml.get(PACKAGE)
.ok_or(PackageErr::Missing)
- .and_then(|pack_span| match *pack_span.get_ref() {
- DeValue::Table(ref package) => {
- package
- .get("rust-version")
- .map_or(Ok(None), |msrv_span| match *msrv_span.get_ref() {
- DeValue::String(ref msrv) => {
- let mut iter = msrv.as_bytes().split(|b| *b == b'.');
- iter.next().ok_or(PackageErr::Msrv).and_then(|fst| {
- // SAFETY:
- // The original input is a `str` and we split on `b'.'` which is a single-byte
- // UTF-8 code unit; thus we don't have to worry about splitting a multi-byte
- // UTF-8 code unit.
- let major_utf8 = unsafe { str::from_utf8_unchecked(fst) };
- parse_int(major_utf8)
- .map_err(|()| PackageErr::Msrv)
- .and_then(|major| {
- iter.next().map_or_else(
- || {
- Ok(Some(Self {
- major,
- minor: None,
- patch: None,
- }))
- },
- |snd| {
- // SAFETY:
- // The original input is a `str` and we split on `b'.'` which is
- // a single-byte UTF-8 code unit; thus we don't have to worry
- // about splitting a multi-byte UTF-8 code unit.
- let minor_utf8 = unsafe { str::from_utf8_unchecked(snd) };
- parse_int(minor_utf8)
- .map_err(|()| PackageErr::Msrv)
- .and_then(|minor_val| {
- iter.next().map_or_else(
- || {
- Ok(Some(Self {
- major,
- minor: Some(minor_val),
- patch: None,
- }))
- },
- |lst| {
- // SAFETY:
- // The original input is a `str` and we split on
- // `b'.'` which is a single-byte UTF-8 code
- // unit; thus we don't have to worry about
- // splitting a multi-byte UTF-8 code unit.
- let patch_utf8 =
- unsafe { str::from_utf8_unchecked(lst) };
- parse_int(patch_utf8).map_err(|()| PackageErr::Msrv).and_then(
- |patch_val| {
- iter.next().map_or_else(
- || {
- Ok(Some(Self {
- major,
- minor: Some(minor_val),
- patch: Some(patch_val),
- }))
- },
- |_| Err(PackageErr::Msrv),
- )
- },
- )
- },
- )
- })
- },
- )
- })
- })
- }
+ .and_then(|pack_span| {
+ if let DeValue::Table(ref package) = *pack_span.get_ref() {
+ package.get(RUST_VERSION).map_or(Ok(None), |msrv_span| {
+ match *msrv_span.get_ref() {
+ DeValue::String(ref msrv) => Self::extract_msrv(msrv)
+ .map_err(|()| PackageErr::Msrv)
+ .map(Some),
+ DeValue::Table(ref msrv) => msrv
+ .get(WORKSPACE)
+ .ok_or(PackageErr::MsrvWorkspaceMissing)
+ .and_then(|work| {
+ if matches!(*work.get_ref(), DeValue::Boolean(b) if b) {
+ package.get(WORKSPACE).map_or_else(
+ || if toml.contains_key(WORKSPACE) {
+ Self::extract_workspace(toml).map_err(|e| PackageErr::Workspace(e, cargo_toml.to_path_buf())).map(Some)
+ } else {
+ let mut search_path = cargo_toml.to_path_buf();
+ assert!(search_path.pop(), "there is a bug in main. manifest::Manifest::from_toml must be passed the absolute path to the package's Carg.toml.");
+ if search_path.pop() {
+ Self::get_workspace_toml(search_path).map(Some)
+ } else {
+ Err(PackageErr::WorkspaceDoesNotExist)
+ }
+ },
+ |path_span| {
+ if let DeValue::String(ref workspace_path) = *path_span.get_ref() {
+ let mut path = cargo_toml.to_path_buf();
+ assert!(path.pop(), "there is a bug in main. manifest::Manifest::from_toml must be passed the absolute path to the package's Carg.toml.");
+ path.push(workspace_path.as_ref());
+ path.push(super::cargo_toml());
+ fs::read_to_string(&path).map_err(|e| PackageErr::WorkspaceRead(e, path.clone())).and_then(|workspace_file| Map::parse(&workspace_file).map_err(|e| PackageErr::WorkspaceToml(e, path.clone())).and_then(|workspace_toml| Self::extract_workspace(workspace_toml.get_ref()).map_err(|e| PackageErr::Workspace(e, path)).map(Some)))
+ } else {
+ Err(PackageErr::InvalidWorkspaceType)
+ }
+ },
+ )
+ } else {
+ Err(PackageErr::MsrvWorkspaceVal)
+ }
+ }),
DeValue::Integer(_)
| DeValue::Float(_)
| DeValue::Boolean(_)
| DeValue::Datetime(_)
- | DeValue::Array(_)
- | DeValue::Table(_) => Err(PackageErr::InvalidMsrvType),
- })
+ | DeValue::Array(_) => Err(PackageErr::InvalidMsrvType),
+ }
+ })
+ } else {
+ Err(PackageErr::InvalidType)
}
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_) => Err(PackageErr::InvalidType),
})
}
/// Returns `Some` containing the MSRV with `'+'` prepended iff the stable or default toolchain is semantically
@@ -582,8 +846,28 @@ pub(crate) struct PowerSet<'a> {
///
/// This is of the form `"<feat_1>,<feat_2>,...,<feat_n>"`.
set: String,
+ /// Number of sets skipped due to an equivalence with a smaller set.
+ skipped_sets_counter: usize,
}
impl<'a> PowerSet<'a> {
+ /// Returns the cardinality of the power set.
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ pub(crate) const fn len(&self) -> usize {
+ let len = self.feats.len();
+ // We don't allow construction when `self.feats.len() > usize::BITS`; thus
+ // 2^n overflows iff `self.feats.len()` is `Self::MAX_SET_LEN`.
+ // We treat that separately.
+ if len == Self::MAX_SET_LEN {
+ usize::MAX
+ } else {
+ // We verified that `len <= usize::BITS`; thus
+ // this won't overflow nor underflow since 2^0 = 1.
+ (1 << len) - 1
+ }
+ }
/// Max cardinality of a set we allow to take the power set of.
// usize::MAX = 2^usize::BITS - 1 >= usize::BITS since usize::MAX >= 0;
// thus `usize::BITS as usize` is free from truncation.
@@ -620,30 +904,17 @@ impl<'a> PowerSet<'a> {
// value of `len` is `usize::BITS`.
// 16 * usize::BITS < 2^usize::BITS for `usize::BITS > 6`.
set: String::with_capacity(len << 4),
+ skipped_sets_counter: 0,
})
} else {
Err(TooManyFeaturesErr)
}
}
/// Resets `self` such that iteration returns to the beginning.
- #[expect(
- clippy::arithmetic_side_effects,
- reason = "comment justifies correctness"
- )]
pub(crate) const fn reset(&mut self) {
- let len = self.feats.len();
- // We start on index 2^n - 1.
- // We don't allow construction when `self.feats.len() > usize::BITS`; thus
- // 2^n overflows iff `self.feats.len()` is `Self::MAX_SET_LEN`.
- // We treat that separately.
- self.idx = if len == Self::MAX_SET_LEN {
- usize::MAX
- } else {
- // We verified that `len <= usize::BITS`; thus
- // this won't overflow nor underflow since 2^0 = 1.
- (1 << len) - 1
- };
+ self.idx = self.len();
self.has_remaining = true;
+ self.skipped_sets_counter = 0;
}
/// Writes the next element into `self.buffer` even if the set contains overlapping features.
#[expect(
@@ -679,20 +950,35 @@ impl<'a> PowerSet<'a> {
/// This returns `None` iff there are no more sets to return. It will continue to return `None`
/// unless [`Self::reset`] is called.
pub(crate) fn next_set(&mut self) -> Option<&str> {
+ self.next_set_with_skip_count().map(|tup| tup.0)
+ }
+ /// Returns the next set along with the number of set skipped thus far.
+ ///
+ /// This returns `None` iff there are no more sets to return. It will continue to return `None`
+ /// unless [`Self::reset`] is called.
+ #[expect(
+ clippy::arithmetic_side_effects,
+ reason = "comment justifies correctness"
+ )]
+ pub(crate) fn next_set_with_skip_count(&mut self) -> Option<(&str, usize)> {
if self.has_remaining {
if self.check_overlap {
while self.has_remaining {
self.inner_next_set();
if pairwise_disconnected(self.buffer.as_slice(), self.feats) {
self.current_set();
- return Some(&self.set);
+ return Some((&self.set, self.skipped_sets_counter));
}
+ // This maxes at `usize::MAX` since we ensure a power set is not created on a
+ // set with more than `usize::BITS` elements.
+ // We set to set every time [`Self::reset`] is called as well.
+ self.skipped_sets_counter += 1;
}
None
} else {
self.inner_next_set();
self.current_set();
- Some(&self.set)
+ Some((&self.set, self.skipped_sets_counter))
}
} else {
None
@@ -783,70 +1069,59 @@ impl Features {
cycle_detection: &mut Vec<&'a str>,
allow_implied_features: bool,
) -> Result<(), FeatureDependenciesErr> {
- match *dependencies {
- DeValue::Array(ref info) => {
- info.iter()
- .try_fold((), |(), dep_span| match *dep_span.get_ref() {
- DeValue::String(ref dep_name) => {
- if is_feature_dependency_a_feature(dep_name) {
- if cycle_detection.contains(&dep_name.as_ref()) {
- Err(FeatureDependenciesErr::CyclicFeature(
- dep_name.clone().into_owned(),
- ))
- } else if let Some(next_feature) = features.get(dep_name.as_ref()) {
- cycle_detection.push(dep_name);
- Self::validate_dependencies(
- dep_name,
- next_feature.get_ref(),
- features,
- cycle_detection,
- allow_implied_features,
- )
- .map(|()| {
- // We require calling code to add `feature`
- // before calling this function. We always
- // add the most recent feature dependency.
- // Therefore this is not empty.
- _ = cycle_detection.pop().unwrap_or_else(Self::impossible);
- })
- } else if allow_implied_features {
- // `dep_name` may be an implied feature which we have yet to add.
- Ok(())
- } else {
- Err(FeatureDependenciesErr::InvalidDependency(
- cycle_detection
- .pop()
- // We require calling code to add `feature`
- // before calling this function. We always
- // add the most recent feature dependency.
- // Therefore this is not empty.
- .unwrap_or_else(Self::impossible)
- .to_owned(),
- dep_name.clone().into_owned(),
- ))
- }
- } else {
- Ok(())
- }
+ if let DeValue::Array(ref info) = *dependencies {
+ info.iter().try_fold((), |(), dep_span| {
+ if let DeValue::String(ref dep_name) = *dep_span.get_ref() {
+ if is_feature_dependency_a_feature(dep_name) {
+ if cycle_detection.contains(&dep_name.as_ref()) {
+ Err(FeatureDependenciesErr::CyclicFeature(
+ dep_name.clone().into_owned(),
+ ))
+ } else if let Some(next_feature) = features.get(dep_name.as_ref()) {
+ cycle_detection.push(dep_name);
+ Self::validate_dependencies(
+ dep_name,
+ next_feature.get_ref(),
+ features,
+ cycle_detection,
+ allow_implied_features,
+ )
+ .map(|()| {
+ // We require calling code to add `feature`
+ // before calling this function. We always
+ // add the most recent feature dependency.
+ // Therefore this is not empty.
+ _ = cycle_detection.pop().unwrap_or_else(Self::impossible);
+ })
+ } else if allow_implied_features {
+ // `dep_name` may be an implied feature which we have yet to add.
+ Ok(())
+ } else {
+ // We require calling code to add `feature`
+ // before calling this function. We always
+ // add the most recent feature dependency.
+ // Therefore this is not empty.
+ Err(FeatureDependenciesErr::InvalidDependency(
+ cycle_detection
+ .pop()
+ .unwrap_or_else(Self::impossible)
+ .to_owned(),
+ dep_name.clone().into_owned(),
+ ))
}
- DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_)
- | DeValue::Table(_) => Err(FeatureDependenciesErr::InvalidDependencyType(
- feature.to_owned(),
- )),
- })
- }
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Table(_) => Err(FeatureDependenciesErr::InvalidFeatureType(
+ } else {
+ Ok(())
+ }
+ } else {
+ Err(FeatureDependenciesErr::InvalidDependencyType(
+ feature.to_owned(),
+ ))
+ }
+ })
+ } else {
+ Err(FeatureDependenciesErr::InvalidFeatureType(
feature.to_owned(),
- )),
+ ))
}
}
/// Verifies there are no redundant dependencies that are features in `dependencies`.
@@ -860,50 +1135,44 @@ impl Features {
features: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
allow_implied_features: bool,
) -> bool {
- dependencies
- .iter()
- .any(|dep_span| match *dep_span.get_ref() {
- DeValue::String(ref dep_name) => {
- is_feature_dependency_a_feature(dep_name)
- && (feature == dep_name
- || features.get(dep_name.as_ref()).map_or_else(
- || {
- if allow_implied_features {
- false
- } else {
+ dependencies.iter().any(|dep_span| {
+ if let DeValue::String(ref dep_name) = *dep_span.get_ref() {
+ is_feature_dependency_a_feature(dep_name)
+ && (feature == dep_name
+ || features.get(dep_name.as_ref()).map_or_else(
+ || {
+ if allow_implied_features {
+ false
+ } else {
+ // We require `validate_dependencies` to be called
+ // before this function which ensures all features
+ // recursively in the `dependencies` are defined as
+ // features iff `!allow_implied_features`.
+ Self::impossible()
+ }
+ },
+ |next_feature_span| {
+ Self::check_redundant_dependencies(
+ feature,
+ next_feature_span
+ .get_ref()
+ .as_array()
// We require `validate_dependencies` to be called
- // before this function which ensures all features
- // recursively in the `dependencies` are defined as
- // features iff `!allow_implied_features`.
- Self::impossible()
- }
- },
- |next_feature_span| {
- Self::check_redundant_dependencies(
- feature,
- next_feature_span
- .get_ref()
- .as_array()
- // We require `validate_dependencies` to be called
- // before this function which ensures all feature
- // dependencies recursively are arrays.
- .unwrap_or_else(Self::impossible),
- features,
- allow_implied_features,
- )
- },
- ))
- }
+ // before this function which ensures all feature
+ // dependencies recursively are arrays.
+ .unwrap_or_else(Self::impossible),
+ features,
+ allow_implied_features,
+ )
+ },
+ ))
+ } else {
// We require `validate_dependencies` to be called
// before this function which ensures all dependencies
// recursivley in `dependencies` are strings.
- DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_)
- | DeValue::Table(_) => Self::impossible(),
- })
+ Self::impossible()
+ }
+ })
}
/// Extracts the feature dependencies associated with `feature`.
///
@@ -942,73 +1211,71 @@ impl Features {
// `validate_dependencies` ensures `dependencies` is an array.
let deps = dependencies.as_array().unwrap_or_else(Self::impossible);
let mut vec_deps = Vec::with_capacity(deps.len());
- deps.iter().enumerate().try_fold((), |(), (idx, dep_span)| match *dep_span.get_ref() {
- DeValue::String(ref dep_name) => {
- let dep_utf8 = dep_name.as_bytes();
- if dep_utf8.contains(&b'/') {
+ deps.iter().enumerate().try_fold((), |(), (idx, dep_span)| if let DeValue::String(ref dep_name) = *dep_span.get_ref() {
+ let dep_utf8 = dep_name.as_bytes();
+ if dep_utf8.contains(&b'/') {
+ Ok(())
+ } else if is_feature_dependency_a_dependency(dep_utf8) {
+ if vec_deps.iter().any(|d| d == dep_name) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else {
+ vec_deps.push(dep_name.clone().into_owned());
Ok(())
- } else if is_feature_dependency_a_dependency(dep_utf8) {
- if vec_deps.iter().any(|d| d == dep_name) {
- Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ }
+ } else if let Some(next_feature_span) = features.get(dep_name.as_ref()) {
+ // `validate_dependencies` ensures all feature
+ // dependencies recursively are arrays.
+ let feat_info = next_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible);
+ // `idx < deps.iter().len()`; thus this won't overflow.
+ deps.iter().skip(idx + 1).try_fold((), |(), next_dep_span| if let DeValue::String(ref next_dep_name) = *next_dep_span.get_ref() {
+ if is_feature_dependency_a_feature(next_dep_name) {
+ if dep_name == next_dep_name {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else if Self::check_redundant_dependencies(next_dep_name, feat_info, features, allow_implied_features) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), next_dep_name.clone().into_owned()))
+ } else {
+ features.get(next_dep_name.as_ref()).map_or_else(
+ || {
+ if allow_implied_features {
+ Ok(())
+ } else {
+ // `validate_dependencies` ensures all features
+ // recursively in the feature dependencies are defined
+ // as features iff `!allow_implied_features`.
+ Self::impossible()
+ }
+ },
+ |next_dep_feature_span| {
+ // `validate_dependencies` ensures all feature
+ // dependencies recursively are arrays.
+ if Self::check_redundant_dependencies(dep_name, next_dep_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible), features, allow_implied_features) {
+ Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
+ } else {
+ Ok(())
+ }
+ }
+ )
+ }
} else {
- vec_deps.push(dep_name.clone().into_owned());
Ok(())
}
- } else if let Some(next_feature_span) = features.get(dep_name.as_ref()) {
- // `validate_dependencies` ensures all feature
- // dependencies recursively are arrays.
- let feat_info = next_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible);
- // `idx < deps.iter().len()`; thus this won't overflow.
- deps.iter().skip(idx + 1).try_fold((), |(), next_dep_span| match *next_dep_span.get_ref() {
- DeValue::String(ref next_dep_name) => {
- if is_feature_dependency_a_feature(next_dep_name) {
- if dep_name == next_dep_name {
- Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
- } else if Self::check_redundant_dependencies(next_dep_name, feat_info, features, allow_implied_features) {
- Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), next_dep_name.clone().into_owned()))
- } else {
- features.get(next_dep_name.as_ref()).map_or_else(
- || {
- if allow_implied_features {
- Ok(())
- } else {
- // `validate_dependencies` ensures all features
- // recursively in the feature dependencies are defined
- // as features iff `!allow_implied_features`.
- Self::impossible()
- }
- },
- |next_dep_feature_span| {
- // `validate_dependencies` ensures all feature
- // dependencies recursively are arrays.
- if Self::check_redundant_dependencies(dep_name, next_dep_feature_span.get_ref().as_array().unwrap_or_else(Self::impossible), features, allow_implied_features) {
- Err(FeatureDependenciesErr::RedundantDependency(feature.to_owned(), dep_name.clone().into_owned()))
- } else {
- Ok(())
- }
- }
- )
- }
- } else {
- Ok(())
- }
- }
- // `validate_dependencies` ensures all dependencies recursively in
- // `dependencies` are strings.
- DeValue::Integer(_) | DeValue::Float(_) | DeValue::Boolean(_) | DeValue::Datetime(_) | DeValue::Array(_) | DeValue::Table(_) => Self::impossible(),
- }).map(|()| vec_deps.push(dep_name.clone().into_owned()))
- } else if allow_implied_features {
- vec_deps.push(dep_name.clone().into_owned());
- Ok(())
} else {
- // `validate_dependencies` ensures all features
- // recursively in `dependencies` are defined as features
- // iff `!allow_implied_features`.
+ // `validate_dependencies` ensures all dependencies recursively in `dependencies` are
+ // strings.
Self::impossible()
- }
+ }).map(|()| vec_deps.push(dep_name.clone().into_owned()))
+ } else if allow_implied_features {
+ vec_deps.push(dep_name.clone().into_owned());
+ Ok(())
+ } else {
+ // `validate_dependencies` ensures all features
+ // recursively in `dependencies` are defined as features
+ // iff `!allow_implied_features`.
+ Self::impossible()
}
+ } else {
// `validate_dependencies` ensures all dependencies recursively in `dependencies` are strings.
- DeValue::Integer(_) | DeValue::Float(_) | DeValue::Boolean(_) | DeValue::Datetime(_) | DeValue::Array(_) | DeValue::Table(_) => Self::impossible(),
+ Self::impossible()
}).map(|()| vec_deps)
})
}
@@ -1017,10 +1284,10 @@ impl Features {
toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
allow_implied_features: bool,
) -> Result<Self, FeaturesErr> {
- toml.get("features").map_or_else(
+ toml.get(FEATURES).map_or_else(
|| Ok(Self(Vec::new())),
- |features_span| match *features_span.get_ref() {
- DeValue::Table(ref features) => {
+ |features_span| {
+ if let DeValue::Table(ref features) = *features_span.get_ref() {
let mut cycle_buffer = Vec::with_capacity(features.len());
let mut feats = Vec::with_capacity(features.len());
features
@@ -1044,13 +1311,9 @@ impl Features {
}
})
.map(|()| Self(feats))
+ } else {
+ Err(FeaturesErr::InvalidType)
}
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_) => Err(FeaturesErr::InvalidType),
},
)
}
@@ -1065,77 +1328,70 @@ impl Features {
allow_implied_features: bool,
) -> Result<(), DependenciesErr> {
let table_name = table.into_str();
- toml.get(table_name)
- .map_or(Ok(()), |deps_span| match *deps_span.get_ref() {
- DeValue::Table(ref deps) => deps.iter().try_fold((), |(), dep_span| {
+ toml.get(table_name).map_or(Ok(()), |deps_span| {
+ if let DeValue::Table(ref deps) = *deps_span.get_ref() {
+ deps.iter().try_fold((), |(), dep_span| {
let dep_name = dep_span.0.get_ref();
if is_feature_dependency_a_feature(dep_name) {
match *dep_span.1.get_ref() {
DeValue::String(_) => Ok(()),
DeValue::Table(ref dep_info) => {
- dep_info.get("optional").map_or(Ok(()), |opt_span| {
- match *opt_span.get_ref() {
- DeValue::Boolean(ref optional) => {
- if *optional {
- self.0
- .iter()
- .try_fold((), |(), feat| {
- if feat.0 == *dep_name {
- // We already have a feature with the same name,
- // so we don't need to continue.
- Err(())
- } else if feat.1.iter().any(|feat_dep| {
- feat_dep
- .as_bytes()
- .split_at_checked(DEP.len())
- .is_some_and(|(pref, rem)| {
- pref == DEP
- && dep_name.as_bytes()
- == rem
- })
- }) {
- // The feature dependencies contain `"dep:<dep_name>"`,
- // so we don't need to add an implied feature.
- Err(())
- } else {
- // The feature name is not `<dep_name>` and all of
- // feature dependencies of all features are not named
- // `"dep:<dep_name>"`; thus we need to continue our
- // search.
- Ok(())
- }
- })
- .map_or(Ok(()), |()| {
- if allow_implied_features {
- // There is no feature with the name `<dep_name>` nor
- // are there any features that contain a feature
- // dependency named `"dep:<dep_name>"`; thus we must
- // insert an implied feature.
- self.0.push((
- dep_name.clone().into_owned(),
- Vec::new(),
- ));
- Ok(())
- } else {
- Err(DependenciesErr::ImpliedFeature(
- table_name,
- dep_name.clone().into_owned(),
- ))
- }
- })
- } else {
- Ok(())
- }
+ dep_info.get(OPTIONAL).map_or(Ok(()), |opt_span| {
+ if let DeValue::Boolean(ref optional) = *opt_span.get_ref() {
+ if *optional {
+ self.0
+ .iter()
+ .try_fold((), |(), feat| {
+ if feat.0 == *dep_name {
+ // We already have a feature with the same name,
+ // so we don't need to continue.
+ Err(())
+ } else if feat.1.iter().any(|feat_dep| {
+ feat_dep
+ .as_bytes()
+ .split_at_checked(DEP.len())
+ .is_some_and(|(pref, rem)| {
+ pref == DEP
+ && dep_name.as_bytes() == rem
+ })
+ }) {
+ // The feature dependencies contain `"dep:<dep_name>"`,
+ // so we don't need to add an implied feature.
+ Err(())
+ } else {
+ // The feature name is not `<dep_name>` and all of
+ // feature dependencies of all features are not named
+ // `"dep:<dep_name>"`; thus we need to continue our
+ // search.
+ Ok(())
+ }
+ })
+ .map_or(Ok(()), |()| {
+ if allow_implied_features {
+ // There is no feature with the name `<dep_name>` nor
+ // are there any features that contain a feature
+ // dependency named `"dep:<dep_name>"`; thus we must
+ // insert an implied feature.
+ self.0.push((
+ dep_name.clone().into_owned(),
+ Vec::new(),
+ ));
+ Ok(())
+ } else {
+ Err(DependenciesErr::ImpliedFeature(
+ table_name,
+ dep_name.clone().into_owned(),
+ ))
+ }
+ })
+ } else {
+ Ok(())
}
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_)
- | DeValue::Table(_) => Err(DependenciesErr::OptionalType(
+ } else {
+ Err(DependenciesErr::OptionalType(
table_name,
dep_name.clone().into_owned(),
- )),
+ ))
}
})
}
@@ -1154,131 +1410,56 @@ impl Features {
dep_name.clone().into_owned(),
))
}
- }),
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_) => Err(DependenciesErr::Type(table_name)),
- })
- }
- /// Adds implied features to `self` based on the optional dependencies in `toml`
+ })
+ } else {
+ Err(DependenciesErr::Type(table_name))
+ }
+ })
+ }
+ /// Adds implied features to `self` based on the optional dependencies in `toml`
/// iff `allow_implied_features`.
fn add_implied_features(
&mut self,
toml: &Map<Spanned<Cow<'_, str>>, Spanned<DeValue<'_>>>,
allow_implied_features: bool,
) -> Result<(), ImpliedFeaturesErr> {
- self.add_optional_dependencies(toml, DepTable::Dependencies, allow_implied_features)
- .map_err(ImpliedFeaturesErr::Dependencies)
- .and_then(|()| {
- self.add_optional_dependencies(
- toml,
- DepTable::BuildDependencies,
- allow_implied_features,
- )
- .map_err(ImpliedFeaturesErr::Dependencies)
- .and_then(|()| {
- toml.get("target")
- .map_or_else(
- || Ok(()),
- |target_span| match *target_span.get_ref() {
- DeValue::Table(ref target) => {
- target.iter().try_fold((), |(), target_platform_span| {
- match *target_platform_span.1.get_ref() {
- DeValue::Table(ref target_platform) => self
- .add_optional_dependencies(
- target_platform,
- DepTable::Dependencies,
- allow_implied_features,
- )
- .map_err(|e| {
- ImpliedFeaturesErr::TagetPlatformDependencies(
- target_platform_span
- .0
- .get_ref()
- .clone()
- .into_owned(),
- e,
- )
- })
- .and_then(|()| {
- self.add_optional_dependencies(
- target_platform,
- DepTable::BuildDependencies,
- allow_implied_features,
- )
- .map_err(|e| {
- ImpliedFeaturesErr::TagetPlatformDependencies(
- target_platform_span
- .0
- .get_ref()
- .clone()
- .into_owned(),
- e,
- )
- })
- }),
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_) => {
- Err(ImpliedFeaturesErr::TargetPlatformType(
- target_platform_span
- .0
- .get_ref()
- .clone()
- .into_owned(),
- ))
- }
- }
- })
- }
- DeValue::String(_)
- | DeValue::Integer(_)
- | DeValue::Float(_)
- | DeValue::Boolean(_)
- | DeValue::Datetime(_)
- | DeValue::Array(_) => Err(ImpliedFeaturesErr::TargetType),
- },
- )
- .and_then(|()| {
- if allow_implied_features {
- // We don't have to worry about cyclic features or anything other
- // than the lack of a feature with the name of the feature
- // dependency.
- self.0.iter().try_fold((), |(), feature| {
- feature.1.iter().try_fold((), |(), dep| {
- // We didn't save any feature dependencies that contain
- // `'/'`, so we simply have to check if a dependency
- // begins with [`DEP`] to skip it.
- if is_feature_dependency_a_dependency(dep.as_bytes())
- || self
- .0
- .iter()
- .any(|other_feature| other_feature.0 == *dep)
- {
- Ok(())
- } else {
- Err(ImpliedFeaturesErr::InvalidDependency(
- feature.0.clone(),
- dep.clone(),
- ))
- }
- })
- })
- } else {
- // When `!allowed_implied_features`, [`Self::validate_dependencies`]
- // verifies non-dependency feature dependencies are defined as
- // features.
- Ok(())
- }
- })
- })
- })
+ self.add_optional_dependencies(toml, DepTable::Dependencies, allow_implied_features).map_err(ImpliedFeaturesErr::Dependencies).and_then(|()| self.add_optional_dependencies(toml, DepTable::BuildDependencies, allow_implied_features).map_err(ImpliedFeaturesErr::Dependencies).and_then(|()| toml.get(TARGET).map_or_else(
+ || Ok(()),
+ |target_span| {
+ if let DeValue::Table(ref target) = *target_span.get_ref() {
+ target.iter().try_fold((), |(), target_platform_span| {
+ if let DeValue::Table(ref target_platform) = *target_platform_span.1.get_ref() {
+ self.add_optional_dependencies(target_platform, DepTable::Dependencies, allow_implied_features).map_err(|e| ImpliedFeaturesErr::TagetPlatformDependencies(target_platform_span.0.get_ref().clone().into_owned(), e)).and_then(|()| self.add_optional_dependencies(target_platform, DepTable::BuildDependencies, allow_implied_features).map_err(|e| ImpliedFeaturesErr::TagetPlatformDependencies(target_platform_span.0.get_ref().clone().into_owned(), e)))
+ } else {
+ Err(ImpliedFeaturesErr::TargetPlatformType(target_platform_span.0.get_ref().clone().into_owned()))
+ }
+ })
+ } else {
+ Err(ImpliedFeaturesErr::TargetType)
+ }
+ }
+ ).and_then(|()| {
+ if allow_implied_features {
+ // We don't have to worry about cyclic features or anything other
+ // than the lack of a feature with the name of the feature
+ // dependency.
+ self.0.iter().try_fold((), |(), feature| feature.1.iter().try_fold((), |(), dep| {
+ // We didn't save any feature dependencies that contain
+ // `'/'`, so we simply have to check if a dependency
+ // begins with [`DEP`] to skip it.
+ if is_feature_dependency_a_dependency(dep.as_bytes()) || self.0.iter().any(|other_feature| other_feature.0 == *dep) {
+ Ok(())
+ } else {
+ Err(ImpliedFeaturesErr::InvalidDependency(feature.0.clone(), dep.clone()))
+ }
+ }))
+ } else {
+ // When `!allowed_implied_features`, [`Self::validate_dependencies`]
+ // verifies non-dependency feature dependencies are defined as
+ // features.
+ Ok(())
+ }
+ })))
}
/// Returns the power set of `self` with semantically equivalent sets removed.
pub(crate) fn power_set(&self) -> Result<PowerSet<'_>, TooManyFeaturesErr> {
@@ -1313,20 +1494,28 @@ impl Manifest {
pub(crate) fn from_toml(
val: String,
allow_implied_features: bool,
- ) -> Result<Self, ManifestErr> {
+ cargo_toml: &Path,
+ ) -> Result<Self, Box<ManifestErr>> {
Map::parse(val.as_str())
- .map_err(ManifestErr::Toml)
+ .map_err(|e| Box::new(ManifestErr::Toml(e, cargo_toml.to_path_buf())))
.and_then(|span| {
let cargo = span.get_ref();
- Msrv::extract_from_toml(cargo)
- .map_err(ManifestErr::Package)
+ Msrv::extract_from_toml(cargo, cargo_toml)
+ .map_err(|e| Box::new(ManifestErr::Package(e, cargo_toml.to_path_buf())))
.and_then(|msrv| {
Features::extract_from_toml(cargo, allow_implied_features)
- .map_err(ManifestErr::Features)
+ .map_err(|e| {
+ Box::new(ManifestErr::Features(e, cargo_toml.to_path_buf()))
+ })
.and_then(|mut features| {
features
.add_implied_features(cargo, allow_implied_features)
- .map_err(ManifestErr::ImpliedFeatures)
+ .map_err(|e| {
+ Box::new(ManifestErr::ImpliedFeatures(
+ e,
+ cargo_toml.to_path_buf(),
+ ))
+ })
.map(|()| {
features.0.iter_mut().fold(
(),
@@ -1352,8 +1541,35 @@ impl Manifest {
mod tests {
use super::{
DependenciesErr, FeatureDependenciesErr, Features, FeaturesErr, ImpliedFeaturesErr,
- Manifest, ManifestErr, Msrv, PackageErr, PowerSet, TooManyFeaturesErr,
+ Manifest, ManifestErr, Msrv, PackageErr, Path, PathBuf, PowerSet, TooManyFeaturesErr,
+ WorkspaceErr,
};
+ impl PartialEq for PackageErr {
+ fn eq(&self, other: &Self) -> bool {
+ match *self {
+ Self::Missing => matches!(*other, Self::Missing),
+ Self::InvalidType => matches!(*other, Self::InvalidType),
+ Self::InvalidMsrvType => matches!(*other, Self::InvalidMsrvType),
+ Self::Msrv => matches!(*other, Self::Msrv),
+ Self::MsrvWorkspaceMissing => matches!(*other, Self::MsrvWorkspaceMissing),
+ Self::MsrvWorkspaceVal => matches!(*other, Self::MsrvWorkspaceVal),
+ Self::InvalidWorkspaceType => matches!(*other, Self::InvalidWorkspaceType),
+ Self::WorkspaceIo(ref e) => {
+ matches!(*other, Self::WorkspaceIo(ref e2) if e.kind() == e2.kind())
+ }
+ Self::WorkspaceDoesNotExist => matches!(*other, Self::WorkspaceDoesNotExist),
+ Self::WorkspaceRead(ref e, ref p) => {
+ matches!(*other, Self::WorkspaceRead(ref e2, ref p2) if e.kind() == e2.kind() && p == p2)
+ }
+ Self::WorkspaceToml(ref e, ref p) => {
+ matches!(*other, Self::WorkspaceToml(ref e2, ref p2) if e == e2 && p == p2)
+ }
+ Self::Workspace(e, ref p) => {
+ matches!(*other, Self::Workspace(e2, ref p2) if e == e2 && p == p2)
+ }
+ }
+ }
+ }
#[expect(
clippy::cognitive_complexity,
clippy::too_many_lines,
@@ -1362,447 +1578,810 @@ mod tests {
#[test]
fn cargo_toml() {
assert!(
- Manifest::from_toml("a".to_owned(), false)
- .map_or_else(|e| matches!(e, ManifestErr::Toml(_)), |_| false)
+ Manifest::from_toml("a".to_owned(), false, Path::new(""))
+ .map_or_else(|e| matches!(*e, ManifestErr::Toml(_, _)), |_| false)
);
assert_eq!(
- Manifest::from_toml(String::new(), false),
- Err(ManifestErr::Package(PackageErr::Missing))
+ Manifest::from_toml(String::new(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Missing,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[' package']".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Missing))
+ Manifest::from_toml("[' package']".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Missing,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("['package ']".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Missing))
+ Manifest::from_toml("['package ']".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Missing,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("package=2".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::InvalidType))
+ Manifest::from_toml("package=2".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::InvalidType,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=2".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::InvalidMsrvType))
+ Manifest::from_toml("[package]\nrust-version=2".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::InvalidMsrvType,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"a\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"a\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"1.00.0\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1.00.0\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"1..0\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1..0\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"1.\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1.\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"01.0.0\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"01.0.0\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"1.0.0.1\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1.0.0.1\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
Manifest::from_toml(
"[package]\nrust-version=\"111111111111111111111111.2.3\"".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
Manifest::from_toml(
"[package]\nrust-version=\"1.0.0-nightly\"".to_owned(),
- false
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version=\"-1.0.0\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version=\" 1.0.0\"".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version=\"1.0.0 \"".to_owned(),
+ false,
+ Path::new("")
),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Msrv,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"-1.0.0\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version={}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::MsrvWorkspaceMissing,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\" 1.0.0\"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version={workspace=2}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::MsrvWorkspaceVal,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"1.0.0 \"".to_owned(), false),
- Err(ManifestErr::Package(PackageErr::Msrv))
+ Manifest::from_toml(
+ "[package]\nrust-version={workspace=false}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::MsrvWorkspaceVal,
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[package]\nrust-version={workspace=true}\nworkspace=2".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::InvalidWorkspaceType,
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "workspace=2\n[package]\nrust-version={workspace=true}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::InvalidType, PathBuf::new()),
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[workspace]\n[package]\nrust-version={workspace=true}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::MissingPackage, PathBuf::new()),
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[workspace]\npackage=2\n[package]\nrust-version={workspace=true}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::InvalidPackageType, PathBuf::new()),
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[workspace.package]\n[package]\nrust-version={workspace=true}".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::MissingPackageMsrv, PathBuf::new()),
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[workspace.package]\nrust-version={}\n[package]\nrust-version={workspace=true}"
+ .to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::InvalidPackageMsrvType, PathBuf::new()),
+ PathBuf::new()
+ )))
+ );
+ assert_eq!(
+ Manifest::from_toml(
+ "[workspace.package]\nrust-version=\"\"\n[package]\nrust-version={workspace=true}"
+ .to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Package(
+ PackageErr::Workspace(WorkspaceErr::Msrv, PathBuf::new()),
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("features=2\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::InvalidType))
+ Manifest::from_toml("features=2\n[package]".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::InvalidType,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"/\"=[]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::InvalidName(
- "/".to_owned()
+ Manifest::from_toml(
+ "[features]\n\"/\"=[]\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::InvalidName("/".to_owned()),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"dep:\"=[]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::InvalidName(
- "dep:".to_owned()
+ Manifest::from_toml(
+ "[features]\n\"dep:\"=[]\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::InvalidName("dep:".to_owned()),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=2\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::InvalidFeatureType(String::new())
+ Manifest::from_toml(
+ "[features]\n\"\"=2\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::InvalidFeatureType(
+ String::new()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=[true]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::InvalidDependencyType(String::new())
+ Manifest::from_toml(
+ "[features]\n\"\"=[true]\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::InvalidDependencyType(
+ String::new()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"foo\"]\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::InvalidDependency(
+ String::new(),
+ "foo".to_owned()
+ )),
+ PathBuf::new()
)))
);
// Feature dependencies can't be implied features when implied features are forbidden.
assert_eq!(
Manifest::from_toml(
- "[dependencies]\nfoo={\"optional\"=true}\n[features]\n\"\"=[\"foo\"]\n[package]"
+ "[dependencies]\nfoo={optional=true}\n[features]\n\"\"=[\"foo\"]\n[package]"
.to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::InvalidDependency(
+ String::new(),
+ "foo".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=[\"\"]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::CyclicFeature(String::new())
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"\"]\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::CyclicFeature(
+ String::new()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"a\"]\na=[\"\"]\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::CyclicFeature(String::new())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::CyclicFeature(
+ String::new()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"a\"]\na=[\"b\"]\nb=[\"a\"]\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::CyclicFeature(
+ "a".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"a\"]\na=[\"c\",\"b\"]\nb=[\"a\"]\nc=[]\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::CyclicFeature(
+ "a".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[]\na=[\"c\",\"b\"]\nb=[\"a\"]\nc=[]\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::CyclicFeature("a".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::CyclicFeature(
+ "a".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"a\",\"b\"]\na=[\"b\"]\nb=[]\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::RedundantDependency(String::new(), "b".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::RedundantDependency(
+ String::new(),
+ "b".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"a\",\"a\"]\na=[]\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::RedundantDependency(String::new(), "a".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::RedundantDependency(
+ String::new(),
+ "a".to_owned()
+ )),
+ PathBuf::new()
)))
);
// Duplicate `"dep:"` feature dependencies error.
assert_eq!(
Manifest::from_toml(
"[features]\n\"\"=[\"dep:\",\"dep:\"]\na=[]\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::RedundantDependency(String::new(), "dep:".to_owned())
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::RedundantDependency(
+ String::new(),
+ "dep:".to_owned()
+ )),
+ PathBuf::new()
)))
);
assert_eq!(
- Manifest::from_toml("target=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(ImpliedFeaturesErr::TargetType))
+ Manifest::from_toml("target=2\n[package]".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TargetType,
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("dependencies=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
- ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("dependencies"))
- ))
+ Manifest::from_toml("dependencies=2\n[package]".to_owned(), false, Path::new("")),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("dependencies")),
+ PathBuf::new()
+ )))
);
assert_eq!(
- Manifest::from_toml("build-dependencies=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
- ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("build-dependencies"))
- ))
+ Manifest::from_toml(
+ "build-dependencies=2\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::Dependencies(DependenciesErr::Type("build-dependencies")),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
+ Manifest::from_toml(
+ "[dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
"dependencies",
"dep:".to_owned()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[dependencies]\n\"/\"=\"\"\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
+ Manifest::from_toml(
+ "[dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
"dependencies",
"/".to_owned()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[build-dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
"build-dependencies",
"dep:".to_owned()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[build-dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::Name(
"build-dependencies",
"/".to_owned()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[dependencies]\n\"\"=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
+ Manifest::from_toml(
+ "[dependencies]\n\"\"=2\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::DependencyType(
"dependencies",
String::new()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[build-dependencies]\n\"\"=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
+ Manifest::from_toml(
+ "[build-dependencies]\n\"\"=2\n[package]".to_owned(),
+ false,
+ Path::new("")
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::DependencyType(
"build-dependencies",
String::new()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
- "[dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
- false
+ "[dependencies]\n\"\"={optional=2}\n[package]".to_owned(),
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::OptionalType(
"dependencies",
String::new()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
- "[build-dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ "[build-dependencies]\n\"\"={optional=2}\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::OptionalType(
"build-dependencies",
String::new()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
// Implied features are disallowed iff `!allow_implied_features`.
assert_eq!(
Manifest::from_toml(
- "[dependencies]\nfoo={\"optional\"=true}\n[package]".to_owned(),
- false
+ "[dependencies]\nfoo={optional=true}\n[package]".to_owned(),
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::Dependencies(DependenciesErr::ImpliedFeature(
"dependencies",
"foo".to_owned()
- ))
- ))
+ )),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[target]\n\"\"=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
- ImpliedFeaturesErr::TargetPlatformType(String::new())
- ))
+ Manifest::from_toml(
+ "[target]\n\"\"=2\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::TargetPlatformType(String::new()),
+ PathBuf::new(),
+ )))
);
assert_eq!(
- Manifest::from_toml("[target.\"\"]\ndependencies=2\n[package]".to_owned(), false),
- Err(ManifestErr::ImpliedFeatures(
+ Manifest::from_toml(
+ "[target.\"\"]\ndependencies=2\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Type("dependencies")
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\"]\nbuild-dependencies=2\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Type("build-dependencies")
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Name("dependencies", "/".to_owned())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Name("dependencies", "dep:".to_owned())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".build-dependencies]\n\"/\"=\"\"\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Name("build-dependencies", "/".to_owned())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".build-dependencies]\n\"dep:\"=\"\"\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::Name("build-dependencies", "dep:".to_owned())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".dependencies]\n\"\"=false\n[package]".to_owned(),
- false
+ false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::DependencyType("dependencies", String::new())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
"[target.\"\".build-dependencies]\n\"\"=false\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::DependencyType("build-dependencies", String::new())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
- "[target.\"\".dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ "[target.\"\".dependencies]\n\"\"={optional=2}\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::OptionalType("dependencies", String::new())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
assert_eq!(
Manifest::from_toml(
- "[target.\"\".build-dependencies]\n\"\"={\"optional\"=2}\n[package]".to_owned(),
+ "[target.\"\".build-dependencies]\n\"\"={optional=2}\n[package]".to_owned(),
false,
+ Path::new(""),
),
- Err(ManifestErr::ImpliedFeatures(
+ Err(Box::new(ManifestErr::ImpliedFeatures(
ImpliedFeaturesErr::TagetPlatformDependencies(
String::new(),
DependenciesErr::OptionalType("build-dependencies", String::new())
- )
- ))
+ ),
+ PathBuf::new(),
+ )))
);
// An invalid dependency error occurs later when we `allow_implied_features` since
// implied features aren't added until after feature extraction.
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), true),
- Err(ManifestErr::ImpliedFeatures(
- ImpliedFeaturesErr::InvalidDependency(String::new(), "foo".to_owned())
- ))
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"foo\"]\n[package]".to_owned(),
+ true,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::ImpliedFeatures(
+ ImpliedFeaturesErr::InvalidDependency(String::new(), "foo".to_owned()),
+ PathBuf::new(),
+ )))
);
// In contrast, above would have erred sooner if `!allow_implied_features`.
assert_eq!(
- Manifest::from_toml("[features]\n\"\"=[\"foo\"]\n[package]".to_owned(), false),
- Err(ManifestErr::Features(FeaturesErr::FeatureDependencies(
- FeatureDependenciesErr::InvalidDependency(String::new(), "foo".to_owned())
+ Manifest::from_toml(
+ "[features]\n\"\"=[\"foo\"]\n[package]".to_owned(),
+ false,
+ Path::new(""),
+ ),
+ Err(Box::new(ManifestErr::Features(
+ FeaturesErr::FeatureDependencies(FeatureDependenciesErr::InvalidDependency(
+ String::new(),
+ "foo".to_owned()
+ )),
+ PathBuf::new()
)))
);
// Even if we forbid implied features, we don't error when a feature is defined
@@ -1813,8 +2392,9 @@ mod tests {
// the explict feature `foo` and the implied feature from the dependency `foo`.
assert_eq!(
Manifest::from_toml(
- "[dependencies]\nfoo={\"optional\"=true}\n[features]\nfoo=[]\n[package]".to_owned(),
- false
+ "[dependencies]\nfoo={optional=true}\n[features]\nfoo=[]\n[package]".to_owned(),
+ false,
+ Path::new(""),
),
Ok(Manifest {
msrv: None,
@@ -1823,7 +2403,7 @@ mod tests {
);
// Allow empty `package`.
assert_eq!(
- Manifest::from_toml("[package]".to_owned(), false),
+ Manifest::from_toml("[package]".to_owned(), false, Path::new("")),
Ok(Manifest {
msrv: None,
features: Features(Vec::new()),
@@ -1831,7 +2411,11 @@ mod tests {
);
// Allow major-only MSRV.
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"0\"".to_owned(), false),
+ Manifest::from_toml(
+ "[package]\nrust-version=\"0\"".to_owned(),
+ false,
+ Path::new(""),
+ ),
Ok(Manifest {
msrv: Some(Msrv {
major: 0,
@@ -1845,7 +2429,8 @@ mod tests {
assert_eq!(
Manifest::from_toml(
"[\"\\u0070ackage\"]\n\"\\u0072ust-version\"=\"0\\u002E\\u0031\"".to_owned(),
- false
+ false,
+ Path::new(""),
),
Ok(Manifest {
msrv: Some(Msrv {
@@ -1857,7 +2442,11 @@ mod tests {
})
);
assert_eq!(
- Manifest::from_toml("[package]\nrust-version=\"0.0.0\"".to_owned(), false),
+ Manifest::from_toml(
+ "[package]\nrust-version=\"0.0.0\"".to_owned(),
+ false,
+ Path::new(""),
+ ),
Ok(Manifest {
msrv: Some(Msrv {
major: 0,
@@ -1872,7 +2461,7 @@ mod tests {
// `target.<something>` unless the key is `dependencies` or `build-dependencies`. Don't treat
// `<something>` special in `target.<something>` other than its being a table.
assert_eq!(
- Manifest::from_toml("dev-dependencies=2\n[package]\nfoo=2\nrust-version=\"18446744073709551615.18446744073709551615.18446744073709551615\"\n[foo]\nbar=false\n[target.\"\".foo]\nbar=2\n[target.foo]\nbar=false\n[target.dependencies]\nfoo=2\n[target.build-dependencies]\nfoo=false\n[target.dev-dependencies]\nfoo=true\n".to_owned(), false),
+ Manifest::from_toml("dev-dependencies=2\n[package]\nfoo=2\nrust-version=\"18446744073709551615.18446744073709551615.18446744073709551615\"\n[foo]\nbar=false\n[target.\"\".foo]\nbar=2\n[target.foo]\nbar=false\n[target.dependencies]\nfoo=2\n[target.build-dependencies]\nfoo=false\n[target.dev-dependencies]\nfoo=true\n".to_owned(), false, Path::new("")),
Ok(Manifest {
msrv: Some(Msrv {
@@ -1933,8 +2522,9 @@ mod tests {
// 6. (c, [])
assert_eq!(
Manifest::from_toml(
- "[\"\\u0064ependencies\"]\n\"\\u0000\"=\"\\u0000\"\na={\"optional\"=true}\n[\"build-\\u0064ependencies\"]\n\"\\u0000\"={\"optional\"=true}\n[target.\"\".dependencies]\nb={\"optional\"=false,foo=2}\nfizz={\"optional\"=true,foo=3}\n[features]\ndefault=[\"bar\",\"dep:lk\",\"a/ak\",\"a/ak\"]\nbar=[\"dep\\u003Awuzz\"]\n[dev-dependencies]\nbuzz={\"optional\"=true}\n[target.a.dependencies]\nc={\"optional\"=true}\nwuzz={\"optional\"=true}\n[package]".to_owned(),
+ "[\"\\u0064ependencies\"]\n\"\\u0000\"=\"\\u0000\"\na={optional=true}\n[\"build-\\u0064ependencies\"]\n\"\\u0000\"={optional=true}\n[target.\"\".dependencies]\nb={optional=false,foo=2}\nfizz={optional=true,foo=3}\n[features]\ndefault=[\"bar\",\"dep:lk\",\"a/ak\",\"a/ak\"]\nbar=[\"dep\\u003Awuzz\"]\n[dev-dependencies]\nbuzz={optional=true}\n[target.a.dependencies]\nc={optional=true}\nwuzz={optional=true}\n[package]".to_owned(),
true,
+ Path::new(""),
),
Ok(Manifest {
msrv: None,
@@ -1952,8 +2542,9 @@ mod tests {
// bar = ["dep:foo"]
assert_eq!(
Manifest::from_toml(
- "[package]\n[dependencies]\nfoo={\"optional\"=true}\nfizz={\"optional\"=true}\n[features]\nfizz=[\"dep:fizz\"]\nbar=[\"dep:foo\"]".to_owned(),
+ "[package]\n[dependencies]\nfoo={optional=true}\nfizz={optional=true}\n[features]\nfizz=[\"dep:fizz\"]\nbar=[\"dep:foo\"]".to_owned(),
false,
+ Path::new(""),
),
Ok(Manifest {
msrv: None,
@@ -1972,9 +2563,10 @@ mod tests {
// foo = ["bar"]
assert_eq!(
Manifest::from_toml(
- "[package]\n[dependencies]\nbar={\"optional\"=true}\n[features]\nfoo=[\"bar\"]"
+ "[package]\n[dependencies]\nbar={optional=true}\n[features]\nfoo=[\"bar\"]"
.to_owned(),
true,
+ Path::new(""),
),
Ok(Manifest {
msrv: None,
@@ -1999,7 +2591,18 @@ mod tests {
let feat_len_one_too_large = 33;
#[cfg(target_pointer_width = "64")]
let feat_len_one_too_large = 65;
+ #[cfg(not(any(
+ target_pointer_width = "16",
+ target_pointer_width = "32",
+ target_pointer_width = "64"
+ )))]
+ let feat_len_one_too_large = 0;
let mut feats = Features(vec![(String::new(), Vec::new()); feat_len_one_too_large]);
+ #[cfg(any(
+ target_pointer_width = "16",
+ target_pointer_width = "32",
+ target_pointer_width = "64"
+ ))]
assert_eq!(PowerSet::new(&feats), Err(TooManyFeaturesErr));
#[cfg(target_pointer_width = "16")]
let max_feat_len = 16;
@@ -2007,7 +2610,18 @@ mod tests {
let max_feat_len = 32;
#[cfg(target_pointer_width = "64")]
let max_feat_len = 64;
+ #[cfg(not(any(
+ target_pointer_width = "16",
+ target_pointer_width = "32",
+ target_pointer_width = "64"
+ )))]
+ let max_feat_len = 0;
feats.0 = vec![(String::new(), Vec::new()); max_feat_len];
+ #[cfg(any(
+ target_pointer_width = "16",
+ target_pointer_width = "32",
+ target_pointer_width = "64"
+ ))]
assert_eq!(
PowerSet::new(&feats),
Ok(PowerSet {
@@ -2017,6 +2631,7 @@ mod tests {
idx: usize::MAX,
buffer: vec![""; max_feat_len],
set: String::new(),
+ skipped_sets_counter: 0,
})
);
feats.0 = Vec::new();
@@ -2029,6 +2644,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
})
);
let mut power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
@@ -2044,6 +2660,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2056,6 +2673,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2069,6 +2687,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some(""));
@@ -2081,6 +2700,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2093,6 +2713,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2117,6 +2738,7 @@ mod tests {
idx: 15,
buffer: vec!["a", "b", "c", "d"],
set: String::new(),
+ skipped_sets_counter: 0,
})
);
power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
@@ -2150,6 +2772,7 @@ mod tests {
idx: 11,
buffer: vec!["c", "d"],
set: "c,d".to_owned(),
+ skipped_sets_counter: 3,
}
);
assert_eq!(power_set.next_set(), Some("d"));
@@ -2163,6 +2786,7 @@ mod tests {
idx: 7,
buffer: vec!["d"],
set: "d".to_owned(),
+ skipped_sets_counter: 6,
}
);
assert_eq!(power_set.next_set(), Some("c"));
@@ -2176,6 +2800,7 @@ mod tests {
idx: 3,
buffer: vec!["c"],
set: "c".to_owned(),
+ skipped_sets_counter: 9,
}
);
assert_eq!(power_set.next_set(), Some("b"));
@@ -2189,6 +2814,7 @@ mod tests {
idx: 1,
buffer: vec!["b"],
set: "b".to_owned(),
+ skipped_sets_counter: 10,
}
);
assert_eq!(power_set.next_set(), Some("a"));
@@ -2202,6 +2828,7 @@ mod tests {
idx: 0,
buffer: vec!["a"],
set: "a".to_owned(),
+ skipped_sets_counter: 10,
}
);
assert_eq!(power_set.next_set(), Some(""));
@@ -2216,6 +2843,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 10,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2229,6 +2857,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 10,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2242,6 +2871,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 10,
}
);
power_set.reset();
@@ -2256,6 +2886,7 @@ mod tests {
idx: 15,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
// Same as above except no feature depends on any other.
@@ -2279,6 +2910,7 @@ mod tests {
idx: 15,
buffer: vec!["a", "b", "c", "d"],
set: String::new(),
+ skipped_sets_counter: 0,
})
);
power_set = PowerSet::new(&feats).unwrap_or_else(|_e| {
@@ -2311,6 +2943,7 @@ mod tests {
idx: 14,
buffer: vec!["a", "b", "c", "d"],
set: "a,b,c,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("b,c,d"));
@@ -2323,6 +2956,7 @@ mod tests {
idx: 13,
buffer: vec!["b", "c", "d"],
set: "b,c,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,c,d"));
@@ -2335,6 +2969,7 @@ mod tests {
idx: 12,
buffer: vec!["a", "c", "d"],
set: "a,c,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("c,d"));
@@ -2347,6 +2982,7 @@ mod tests {
idx: 11,
buffer: vec!["c", "d"],
set: "c,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,b,d"));
@@ -2359,6 +2995,7 @@ mod tests {
idx: 10,
buffer: vec!["a", "b", "d"],
set: "a,b,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("b,d"));
@@ -2371,6 +3008,7 @@ mod tests {
idx: 9,
buffer: vec!["b", "d"],
set: "b,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,d"));
@@ -2383,6 +3021,7 @@ mod tests {
idx: 8,
buffer: vec!["a", "d"],
set: "a,d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("d"));
@@ -2395,6 +3034,7 @@ mod tests {
idx: 7,
buffer: vec!["d"],
set: "d".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,b,c"));
@@ -2407,6 +3047,7 @@ mod tests {
idx: 6,
buffer: vec!["a", "b", "c"],
set: "a,b,c".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("b,c"));
@@ -2419,6 +3060,7 @@ mod tests {
idx: 5,
buffer: vec!["b", "c"],
set: "b,c".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,c"));
@@ -2431,6 +3073,7 @@ mod tests {
idx: 4,
buffer: vec!["a", "c"],
set: "a,c".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("c"));
@@ -2443,6 +3086,7 @@ mod tests {
idx: 3,
buffer: vec!["c"],
set: "c".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a,b"));
@@ -2455,6 +3099,7 @@ mod tests {
idx: 2,
buffer: vec!["a", "b"],
set: "a,b".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("b"));
@@ -2467,6 +3112,7 @@ mod tests {
idx: 1,
buffer: vec!["b"],
set: "b".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some("a"));
@@ -2479,6 +3125,7 @@ mod tests {
idx: 0,
buffer: vec!["a"],
set: "a".to_owned(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), Some(""));
@@ -2491,6 +3138,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
assert_eq!(power_set.next_set(), None);
@@ -2503,6 +3151,7 @@ mod tests {
idx: 0,
buffer: Vec::new(),
set: String::new(),
+ skipped_sets_counter: 0,
}
);
}
diff --git a/src/rustup.rs b/src/rustup.rs
@@ -1,5 +1,5 @@
/// `rustup` is supported by the target per <https://rust-lang.github.io/rustup-components-history/>.
-/// Last checked on 2025-10-06T20:57.
+/// Last checked on 2025-10-06T20:57Z.
#[cfg(any(
all(
target_arch = "aarch64",
@@ -605,7 +605,7 @@
))]
pub(crate) const SUPPORTED: bool = true;
/// `rustup` is _not_ supported by the target per <https://rust-lang.github.io/rustup-components-history/>.
-/// Last checked on 2025-10-06T20:57.
+/// Last checked on 2025-10-06T20:57Z.
#[cfg(not(any(
all(
target_arch = "aarch64",